summaryrefslogtreecommitdiffstats
path: root/src/tools
diff options
context:
space:
mode:
Diffstat (limited to 'src/tools')
-rw-r--r--src/tools/build-manifest/src/main.rs5
-rw-r--r--src/tools/bump-stage0/src/main.rs2
-rw-r--r--src/tools/cargo/.github/PULL_REQUEST_TEMPLATE.md7
-rw-r--r--src/tools/cargo/.github/renovate.json518
-rw-r--r--src/tools/cargo/.github/workflows/main.yml8
-rw-r--r--src/tools/cargo/CHANGELOG.md189
-rw-r--r--src/tools/cargo/Cargo.lock1097
-rw-r--r--src/tools/cargo/Cargo.toml72
-rw-r--r--src/tools/cargo/benches/README.md39
-rw-r--r--src/tools/cargo/benches/benchsuite/Cargo.toml9
-rw-r--r--src/tools/cargo/benches/benchsuite/benches/global_cache_tracker.rs159
-rw-r--r--src/tools/cargo/benches/benchsuite/global-cache-tracker/global-cache-samplebin0 -> 2449408 bytes
-rw-r--r--src/tools/cargo/benches/benchsuite/global-cache-tracker/random-sample500
-rw-r--r--src/tools/cargo/benches/benchsuite/src/bin/capture-last-use.rs148
-rw-r--r--src/tools/cargo/benches/benchsuite/src/lib.rs2
-rw-r--r--src/tools/cargo/benches/capture/Cargo.toml3
-rw-r--r--src/tools/cargo/benches/capture/src/main.rs3
-rw-r--r--src/tools/cargo/build.rs32
-rw-r--r--src/tools/cargo/clippy.toml3
-rw-r--r--src/tools/cargo/crates/cargo-platform/Cargo.toml5
-rw-r--r--src/tools/cargo/crates/cargo-platform/examples/matches.rs2
-rw-r--r--src/tools/cargo/crates/cargo-test-macro/Cargo.toml3
-rw-r--r--src/tools/cargo/crates/cargo-test-macro/src/lib.rs9
-rw-r--r--src/tools/cargo/crates/cargo-test-support/Cargo.toml4
-rw-r--r--src/tools/cargo/crates/cargo-test-support/build.rs2
-rw-r--r--src/tools/cargo/crates/cargo-test-support/src/compare.rs37
-rw-r--r--src/tools/cargo/crates/cargo-test-support/src/lib.rs27
-rw-r--r--src/tools/cargo/crates/cargo-test-support/src/paths.rs12
-rw-r--r--src/tools/cargo/crates/cargo-test-support/src/registry.rs59
-rw-r--r--src/tools/cargo/crates/cargo-test-support/src/tools.rs32
-rw-r--r--src/tools/cargo/crates/cargo-util/Cargo.toml6
-rw-r--r--src/tools/cargo/crates/cargo-util/src/du.rs78
-rw-r--r--src/tools/cargo/crates/cargo-util/src/lib.rs4
-rw-r--r--src/tools/cargo/crates/cargo-util/src/paths.rs13
-rw-r--r--src/tools/cargo/crates/crates-io/Cargo.toml5
-rw-r--r--src/tools/cargo/crates/crates-io/lib.rs11
-rw-r--r--src/tools/cargo/crates/home/CHANGELOG.md9
-rw-r--r--src/tools/cargo/crates/home/Cargo.toml7
-rw-r--r--src/tools/cargo/crates/home/src/lib.rs10
-rw-r--r--src/tools/cargo/crates/home/src/windows.rs23
-rw-r--r--src/tools/cargo/crates/mdman/Cargo.toml5
-rw-r--r--src/tools/cargo/crates/mdman/src/main.rs2
-rw-r--r--src/tools/cargo/crates/mdman/tests/compare.rs16
-rw-r--r--src/tools/cargo/crates/mdman/tests/invalid.rs6
-rw-r--r--src/tools/cargo/crates/resolver-tests/Cargo.toml3
-rw-r--r--src/tools/cargo/crates/resolver-tests/src/lib.rs174
-rw-r--r--src/tools/cargo/crates/resolver-tests/tests/resolve.rs225
-rw-r--r--src/tools/cargo/crates/rustfix/Cargo.toml34
-rw-r--r--src/tools/cargo/crates/rustfix/Changelog.md79
l---------src/tools/cargo/crates/rustfix/LICENSE-APACHE1
l---------src/tools/cargo/crates/rustfix/LICENSE-MIT1
-rw-r--r--src/tools/cargo/crates/rustfix/Readme.md29
-rw-r--r--src/tools/cargo/crates/rustfix/examples/fix-json.rs44
-rw-r--r--src/tools/cargo/crates/rustfix/proptest-regressions/replace.txt8
-rw-r--r--src/tools/cargo/crates/rustfix/src/diagnostics.rs115
-rw-r--r--src/tools/cargo/crates/rustfix/src/error.rs21
-rw-r--r--src/tools/cargo/crates/rustfix/src/lib.rs306
-rw-r--r--src/tools/cargo/crates/rustfix/src/replace.rs329
-rw-r--r--src/tools/cargo/crates/rustfix/tests/edge-cases/empty.json42
-rw-r--r--src/tools/cargo/crates/rustfix/tests/edge-cases/empty.rs0
-rw-r--r--src/tools/cargo/crates/rustfix/tests/edge-cases/indented_whitespace.json60
-rw-r--r--src/tools/cargo/crates/rustfix/tests/edge-cases/no_main.json33
-rw-r--r--src/tools/cargo/crates/rustfix/tests/edge-cases/no_main.rs1
-rw-r--r--src/tools/cargo/crates/rustfix/tests/edge-cases/out_of_bounds.recorded.json43
-rw-r--r--src/tools/cargo/crates/rustfix/tests/edge-cases/utf8_idents.recorded.json59
-rw-r--r--src/tools/cargo/crates/rustfix/tests/edge_cases.rs25
-rw-r--r--src/tools/cargo/crates/rustfix/tests/everything/E0178.fixed.rs10
-rw-r--r--src/tools/cargo/crates/rustfix/tests/everything/E0178.json70
-rw-r--r--src/tools/cargo/crates/rustfix/tests/everything/E0178.rs10
-rw-r--r--src/tools/cargo/crates/rustfix/tests/everything/closure-immutable-outer-variable.fixed.rs10
-rw-r--r--src/tools/cargo/crates/rustfix/tests/everything/closure-immutable-outer-variable.json70
-rw-r--r--src/tools/cargo/crates/rustfix/tests/everything/closure-immutable-outer-variable.rs10
-rw-r--r--src/tools/cargo/crates/rustfix/tests/everything/handle-insert-only.fixed.rs8
-rw-r--r--src/tools/cargo/crates/rustfix/tests/everything/handle-insert-only.json68
-rw-r--r--src/tools/cargo/crates/rustfix/tests/everything/handle-insert-only.rs8
-rw-r--r--src/tools/cargo/crates/rustfix/tests/everything/lt-generic-comp.fixed.rs7
-rw-r--r--src/tools/cargo/crates/rustfix/tests/everything/lt-generic-comp.json87
-rw-r--r--src/tools/cargo/crates/rustfix/tests/everything/lt-generic-comp.rs7
-rw-r--r--src/tools/cargo/crates/rustfix/tests/everything/multiple-solutions.fixed.rs5
-rw-r--r--src/tools/cargo/crates/rustfix/tests/everything/multiple-solutions.json114
-rw-r--r--src/tools/cargo/crates/rustfix/tests/everything/multiple-solutions.rs5
-rw-r--r--src/tools/cargo/crates/rustfix/tests/everything/replace-only-one-char.fixed.rs3
-rw-r--r--src/tools/cargo/crates/rustfix/tests/everything/replace-only-one-char.json70
-rw-r--r--src/tools/cargo/crates/rustfix/tests/everything/replace-only-one-char.rs3
-rw-r--r--src/tools/cargo/crates/rustfix/tests/everything/str-lit-type-mismatch.fixed.rs5
-rw-r--r--src/tools/cargo/crates/rustfix/tests/everything/str-lit-type-mismatch.json218
-rw-r--r--src/tools/cargo/crates/rustfix/tests/everything/str-lit-type-mismatch.rs5
-rw-r--r--src/tools/cargo/crates/rustfix/tests/parse_and_replace.rs234
-rw-r--r--src/tools/cargo/crates/semver-check/Cargo.toml3
-rw-r--r--src/tools/cargo/crates/semver-check/src/main.rs2
-rw-r--r--src/tools/cargo/crates/xtask-build-man/Cargo.toml3
-rw-r--r--src/tools/cargo/crates/xtask-build-man/src/main.rs2
-rw-r--r--src/tools/cargo/crates/xtask-bump-check/Cargo.toml3
-rw-r--r--src/tools/cargo/crates/xtask-bump-check/src/xtask.rs21
-rw-r--r--src/tools/cargo/crates/xtask-stale-label/Cargo.toml3
-rw-r--r--src/tools/cargo/crates/xtask-stale-label/src/main.rs2
-rw-r--r--src/tools/cargo/credential/cargo-credential-1password/Cargo.toml3
-rw-r--r--src/tools/cargo/credential/cargo-credential-1password/src/main.rs3
-rw-r--r--src/tools/cargo/credential/cargo-credential-libsecret/Cargo.toml5
-rw-r--r--src/tools/cargo/credential/cargo-credential-macos-keychain/Cargo.toml5
-rw-r--r--src/tools/cargo/credential/cargo-credential-macos-keychain/src/lib.rs2
-rw-r--r--src/tools/cargo/credential/cargo-credential-wincred/Cargo.toml5
-rw-r--r--src/tools/cargo/credential/cargo-credential/Cargo.toml5
-rw-r--r--src/tools/cargo/credential/cargo-credential/examples/stdout-redirected.rs3
-rw-r--r--src/tools/cargo/credential/cargo-credential/src/lib.rs3
-rwxr-xr-xsrc/tools/cargo/publish.py1
-rw-r--r--src/tools/cargo/src/bin/cargo/cli.rs20
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/add.rs21
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/bench.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/build.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/check.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/clean.rs160
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/config.rs10
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/doc.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/fetch.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/fix.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/generate_lockfile.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/init.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/install.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/locate_project.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/login.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/logout.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/metadata.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/new.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/owner.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/package.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/pkgid.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/publish.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/read_manifest.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/remove.rs3
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/run.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/rustc.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/rustdoc.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/search.rs4
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/tree.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/uninstall.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/update.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/vendor.rs1
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/verify_project.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/version.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/yank.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/main.rs6
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/fingerprint/dirty_reason.rs7
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/fingerprint/mod.rs23
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/future_incompat.rs2
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/job_queue/mod.rs2
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/mod.rs69
-rw-r--r--src/tools/cargo/src/cargo/core/features.rs31
-rw-r--r--src/tools/cargo/src/cargo/core/gc.rs509
-rw-r--r--src/tools/cargo/src/cargo/core/global_cache_tracker.rs1827
-rw-r--r--src/tools/cargo/src/cargo/core/manifest.rs5
-rw-r--r--src/tools/cargo/src/cargo/core/mod.rs8
-rw-r--r--src/tools/cargo/src/cargo/core/package.rs12
-rw-r--r--src/tools/cargo/src/cargo/core/package_id.rs35
-rw-r--r--src/tools/cargo/src/cargo/core/package_id_spec.rs479
-rw-r--r--src/tools/cargo/src/cargo/core/profiles.rs38
-rw-r--r--src/tools/cargo/src/cargo/core/registry.rs48
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/context.rs205
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/dep_cache.rs25
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/encode.rs4
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/errors.rs14
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/mod.rs58
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/resolve.rs2
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/version_prefs.rs58
-rw-r--r--src/tools/cargo/src/cargo/core/source_id.rs207
-rw-r--r--src/tools/cargo/src/cargo/core/summary.rs85
-rw-r--r--src/tools/cargo/src/cargo/core/workspace.rs14
-rw-r--r--src/tools/cargo/src/cargo/lib.rs22
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_add/crate_spec.rs5
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_add/mod.rs64
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_clean.rs4
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_compile/mod.rs1
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_compile/packages.rs12
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_fetch.rs1
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_generate_lockfile.rs22
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_install.rs6
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_new.rs8
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_package.rs52
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_pkgid.rs4
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_uninstall.rs25
-rw-r--r--src/tools/cargo/src/cargo/ops/common_for_install_and_uninstall.rs46
-rw-r--r--src/tools/cargo/src/cargo/ops/mod.rs2
-rw-r--r--src/tools/cargo/src/cargo/ops/registry/publish.rs1
-rw-r--r--src/tools/cargo/src/cargo/ops/resolve.rs13
-rw-r--r--src/tools/cargo/src/cargo/ops/tree/mod.rs2
-rw-r--r--src/tools/cargo/src/cargo/sources/directory.rs7
-rw-r--r--src/tools/cargo/src/cargo/sources/git/known_hosts.rs2
-rw-r--r--src/tools/cargo/src/cargo/sources/git/source.rs46
-rw-r--r--src/tools/cargo/src/cargo/sources/mod.rs4
-rw-r--r--src/tools/cargo/src/cargo/sources/path.rs14
-rw-r--r--src/tools/cargo/src/cargo/sources/registry/download.rs18
-rw-r--r--src/tools/cargo/src/cargo/sources/registry/http_remote.rs21
-rw-r--r--src/tools/cargo/src/cargo/sources/registry/index.rs14
-rw-r--r--src/tools/cargo/src/cargo/sources/registry/mod.rs40
-rw-r--r--src/tools/cargo/src/cargo/sources/registry/remote.rs20
-rw-r--r--src/tools/cargo/src/cargo/sources/replaced.rs7
-rw-r--r--src/tools/cargo/src/cargo/sources/source.rs23
-rw-r--r--src/tools/cargo/src/cargo/util/command_prelude.rs31
-rw-r--r--src/tools/cargo/src/cargo/util/config/mod.rs34
-rw-r--r--src/tools/cargo/src/cargo/util/errors.rs2
-rw-r--r--src/tools/cargo/src/cargo/util/graph.rs32
-rw-r--r--src/tools/cargo/src/cargo/util/mod.rs4
-rw-r--r--src/tools/cargo/src/cargo/util/restricted_names.rs151
-rw-r--r--src/tools/cargo/src/cargo/util/rustc.rs22
-rw-r--r--src/tools/cargo/src/cargo/util/semver_ext.rs47
-rw-r--r--src/tools/cargo/src/cargo/util/sqlite.rs118
-rw-r--r--src/tools/cargo/src/cargo/util/toml/embedded.rs3
-rw-r--r--src/tools/cargo/src/cargo/util/toml/mod.rs3241
-rw-r--r--src/tools/cargo/src/cargo/util/toml/targets.rs12
-rw-r--r--src/tools/cargo/src/cargo/util/toml_mut/dependency.rs39
-rw-r--r--src/tools/cargo/src/cargo/util/toml_mut/manifest.rs2
-rw-r--r--src/tools/cargo/src/cargo/util_schemas/core/mod.rs6
-rw-r--r--src/tools/cargo/src/cargo/util_schemas/core/package_id_spec.rs593
-rw-r--r--src/tools/cargo/src/cargo/util_schemas/core/source_kind.rs201
-rw-r--r--src/tools/cargo/src/cargo/util_schemas/manifest.rs (renamed from src/tools/cargo/src/cargo/util/toml/schema.rs)528
-rw-r--r--src/tools/cargo/src/cargo/util_schemas/mod.rs11
-rw-r--r--src/tools/cargo/src/cargo/util_schemas/restricted_names.rs218
-rw-r--r--src/tools/cargo/src/doc/contrib/src/issues.md8
-rw-r--r--src/tools/cargo/src/doc/contrib/src/process/release.md8
-rw-r--r--src/tools/cargo/src/doc/contrib/src/team.md25
-rw-r--r--src/tools/cargo/src/doc/man/cargo-search.md3
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-search.txt3
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-search.md3
-rw-r--r--src/tools/cargo/src/doc/src/guide/continuous-integration.md24
-rw-r--r--src/tools/cargo/src/doc/src/reference/environment-variables.md3
-rw-r--r--src/tools/cargo/src/doc/src/reference/manifest.md11
-rw-r--r--src/tools/cargo/src/doc/src/reference/pkgid-spec.md42
-rw-r--r--src/tools/cargo/src/doc/src/reference/profiles.md2
-rw-r--r--src/tools/cargo/src/doc/src/reference/registry-authentication.md10
-rw-r--r--src/tools/cargo/src/doc/src/reference/registry-web-api.md2
-rw-r--r--src/tools/cargo/src/doc/src/reference/semver.md2
-rw-r--r--src/tools/cargo/src/doc/src/reference/specifying-dependencies.md1
-rw-r--r--src/tools/cargo/src/doc/src/reference/unstable.md76
-rw-r--r--src/tools/cargo/src/doc/src/reference/workspaces.md2
-rw-r--r--src/tools/cargo/src/etc/cargo.bashcomp.sh4
-rw-r--r--src/tools/cargo/src/etc/man/cargo-search.12
-rw-r--r--src/tools/cargo/tests/build-std/main.rs45
-rw-r--r--src/tools/cargo/tests/testsuite/alt_registry.rs92
-rw-r--r--src/tools/cargo/tests/testsuite/artifact_dep.rs7
-rw-r--r--src/tools/cargo/tests/testsuite/build.rs236
-rw-r--r--src/tools/cargo/tests/testsuite/build_script.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/build_script_env.rs2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/out/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/primary/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/in/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/in/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/in/dependency/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/in/primary/Cargo.toml4
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/in/primary/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/mod.rs26
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/out/dependency/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/out/primary/Cargo.toml7
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_add/empty_dep_name/in1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/empty_dep_name/mod.rs24
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/empty_dep_name/out/Cargo.toml5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/empty_dep_name/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/empty_dep_name/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/help/stdout.log17
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/mod.rs9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/no_optional/mod.rs16
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/no_optional/out/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/no_optional/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/no_public/in/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/no_public/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/no_public/mod.rs26
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/no_public/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/no_public/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/no_public/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/optional/mod.rs16
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/optional/out/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/optional/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/out/primary/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/primary/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/out/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/in/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/mod.rs16
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/out/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/in/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/mod.rs16
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/out/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/in/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/mod.rs26
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/in/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/mod.rs26
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/in/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/mod.rs16
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/out/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/stderr.log3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/in/Cargo.toml1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/mod.rs14
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/out/Cargo.toml1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/in/Cargo.toml11
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/mod.rs26
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/out/Cargo.toml11
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/out/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/out/primary/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/in/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/mod.rs26
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/in/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/mod.rs26
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/out/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/out/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/out/primary/Cargo.toml3
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/public/in/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/public/in/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/public/mod.rs26
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/public/out/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/public/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/public/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/rust_version_ignore/mod.rs2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/rust_version_ignore/stderr.log5
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_bench/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_build/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_check/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_clean/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_config/help/stdout.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_doc/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_features.rs4
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_fetch/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_fix/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_generate_lockfile/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_help/help/stdout.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_install/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_locate_project/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_login/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_logout/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_metadata/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_new/empty_name/in/.keep0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_new/empty_name/mod.rs22
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_new/empty_name/out/.keep0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_new/empty_name/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_new/empty_name/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_new/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_new/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_owner/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_package/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_pkgid/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_publish/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_read_manifest/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/in/Cargo.lock4
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/out/Cargo.lock4
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_report/help/stdout.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_run/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_rustc/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_rustdoc/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_search/help/stdout.log4
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_tree/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_uninstall/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_update/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_vendor/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_verify_project/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_version/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_yank/help/stdout.log2
-rw-r--r--src/tools/cargo/tests/testsuite/check.rs49
-rw-r--r--src/tools/cargo/tests/testsuite/check_cfg.rs85
-rw-r--r--src/tools/cargo/tests/testsuite/clean.rs55
-rw-r--r--src/tools/cargo/tests/testsuite/config.rs6
-rw-r--r--src/tools/cargo/tests/testsuite/custom_target.rs2
-rw-r--r--src/tools/cargo/tests/testsuite/doc.rs19
-rw-r--r--src/tools/cargo/tests/testsuite/docscrape.rs78
-rw-r--r--src/tools/cargo/tests/testsuite/features.rs37
-rw-r--r--src/tools/cargo/tests/testsuite/features_namespaced.rs4
-rw-r--r--src/tools/cargo/tests/testsuite/fetch.rs2
-rw-r--r--src/tools/cargo/tests/testsuite/fix.rs164
-rw-r--r--src/tools/cargo/tests/testsuite/freshness.rs8
-rw-r--r--src/tools/cargo/tests/testsuite/generate_lockfile.rs4
-rw-r--r--src/tools/cargo/tests/testsuite/git.rs31
-rw-r--r--src/tools/cargo/tests/testsuite/git_auth.rs10
-rw-r--r--src/tools/cargo/tests/testsuite/global_cache_tracker.rs1862
-rw-r--r--src/tools/cargo/tests/testsuite/install.rs122
-rw-r--r--src/tools/cargo/tests/testsuite/main.rs7
-rw-r--r--src/tools/cargo/tests/testsuite/messages.rs2
-rw-r--r--src/tools/cargo/tests/testsuite/metabuild.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/package.rs239
-rw-r--r--src/tools/cargo/tests/testsuite/patch.rs83
-rw-r--r--src/tools/cargo/tests/testsuite/path.rs11
-rw-r--r--src/tools/cargo/tests/testsuite/pkgid.rs98
-rw-r--r--src/tools/cargo/tests/testsuite/profile_config.rs6
-rw-r--r--src/tools/cargo/tests/testsuite/profile_custom.rs16
-rw-r--r--src/tools/cargo/tests/testsuite/profile_trim_paths.rs267
-rw-r--r--src/tools/cargo/tests/testsuite/pub_priv.rs235
-rw-r--r--src/tools/cargo/tests/testsuite/publish_lockfile.rs12
-rw-r--r--src/tools/cargo/tests/testsuite/registry.rs23
-rw-r--r--src/tools/cargo/tests/testsuite/replace.rs14
-rw-r--r--src/tools/cargo/tests/testsuite/rust_version.rs81
-rw-r--r--src/tools/cargo/tests/testsuite/rustflags.rs6
-rw-r--r--src/tools/cargo/tests/testsuite/test.rs37
-rw-r--r--src/tools/cargo/tests/testsuite/update.rs94
-rw-r--r--src/tools/cargo/tests/testsuite/workspaces.rs1
-rw-r--r--src/tools/cargo/triagebot.toml1
-rw-r--r--src/tools/cargo/windows.manifest.xml28
-rw-r--r--src/tools/clippy/.github/workflows/clippy_bors.yml2
-rw-r--r--src/tools/clippy/.github/workflows/deploy.yml4
-rw-r--r--src/tools/clippy/CHANGELOG.md74
-rw-r--r--src/tools/clippy/CONTRIBUTING.md8
-rw-r--r--src/tools/clippy/Cargo.toml4
-rw-r--r--src/tools/clippy/book/src/development/adding_lints.md16
-rw-r--r--src/tools/clippy/book/src/development/defining_lints.md2
-rw-r--r--src/tools/clippy/book/src/development/type_checking.md2
-rw-r--r--src/tools/clippy/book/src/lint_configuration.md14
-rw-r--r--src/tools/clippy/clippy.toml6
-rw-r--r--src/tools/clippy/clippy_config/Cargo.toml2
-rw-r--r--src/tools/clippy/clippy_config/src/conf.rs6
-rw-r--r--src/tools/clippy/clippy_config/src/msrvs.rs1
-rw-r--r--src/tools/clippy/clippy_config/src/types.rs24
-rw-r--r--src/tools/clippy/clippy_dev/Cargo.toml2
-rw-r--r--src/tools/clippy/clippy_dev/src/lint.rs10
-rw-r--r--src/tools/clippy/clippy_dev/src/new_lint.rs8
-rw-r--r--src/tools/clippy/clippy_dev/src/serve.rs4
-rw-r--r--src/tools/clippy/clippy_lints/Cargo.toml7
-rw-r--r--src/tools/clippy/clippy_lints/src/absolute_paths.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/allow_attributes.rs36
-rw-r--r--src/tools/clippy/clippy_lints/src/almost_complete_range.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/approx_const.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/arc_with_non_send_sync.rs24
-rw-r--r--src/tools/clippy/clippy_lints/src/as_conversions.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/asm_syntax.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/assertions_on_constants.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/assertions_on_result_states.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/async_yields_async.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/attrs.rs166
-rw-r--r--src/tools/clippy/clippy_lints/src/await_holding_invalid.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/blocks_in_conditions.rs137
-rw-r--r--src/tools/clippy/clippy_lints/src/blocks_in_if_conditions.rs142
-rw-r--r--src/tools/clippy/clippy_lints/src/bool_assert_comparison.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/bool_to_int_with_if.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/booleans.rs26
-rw-r--r--src/tools/clippy/clippy_lints/src/borrow_deref_ref.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/box_default.rs12
-rw-r--r--src/tools/clippy/clippy_lints/src/cargo/mod.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/cargo/multiple_crate_versions.rs53
-rw-r--r--src/tools/clippy/clippy_lints/src/cargo/wildcard_dependencies.rs25
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/as_ptr_cast_mut.rs11
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/cast_possible_wrap.rs13
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/cast_sign_loss.rs25
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/cast_slice_different_sizes.rs37
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/cast_slice_from_raw_parts.rs57
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/char_lit_as_u8.rs50
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/mod.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs55
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/ptr_cast_constness.rs53
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs98
-rw-r--r--src/tools/clippy/clippy_lints/src/checked_conversions.rs113
-rw-r--r--src/tools/clippy/clippy_lints/src/cognitive_complexity.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/collapsible_if.rs87
-rw-r--r--src/tools/clippy/clippy_lints/src/collection_is_never_read.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/comparison_chain.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/copies.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/copy_iterator.rs40
-rw-r--r--src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs78
-rw-r--r--src/tools/clippy/clippy_lints/src/create_dir.rs33
-rw-r--r--src/tools/clippy/clippy_lints/src/dbg_macro.rs35
-rw-r--r--src/tools/clippy/clippy_lints/src/declared_lints.rs16
-rw-r--r--src/tools/clippy/clippy_lints/src/default.rs177
-rw-r--r--src/tools/clippy/clippy_lints/src/default_constructed_unit_structs.rs46
-rw-r--r--src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs124
-rw-r--r--src/tools/clippy/clippy_lints/src/default_union_representation.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/dereference.rs287
-rw-r--r--src/tools/clippy/clippy_lints/src/derivable_impls.rs124
-rw-r--r--src/tools/clippy/clippy_lints/src/derive.rs231
-rw-r--r--src/tools/clippy/clippy_lints/src/disallowed_macros.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/disallowed_methods.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/disallowed_names.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/disallowed_script_idents.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/disallowed_types.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/doc/link_with_quotes.rs20
-rw-r--r--src/tools/clippy/clippy_lints/src/doc/markdown.rs119
-rw-r--r--src/tools/clippy/clippy_lints/src/doc/missing_headers.rs86
-rw-r--r--src/tools/clippy/clippy_lints/src/doc/mod.rs (renamed from src/tools/clippy/clippy_lints/src/doc.rs)469
-rw-r--r--src/tools/clippy/clippy_lints/src/doc/needless_doctest_main.rs135
-rw-r--r--src/tools/clippy/clippy_lints/src/doc/suspicious_doc_comments.rs48
-rw-r--r--src/tools/clippy/clippy_lints/src/double_parens.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/drop_forget_ref.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/duplicate_mod.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/else_if_without_else.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/empty_drop.rs52
-rw-r--r--src/tools/clippy/clippy_lints/src/empty_enum.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/empty_structs_with_brackets.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/endian_bytes.rs36
-rw-r--r--src/tools/clippy/clippy_lints/src/entry.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/enum_clike.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/equatable_if_let.rs11
-rw-r--r--src/tools/clippy/clippy_lints/src/error_impl_error.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/escape.rs20
-rw-r--r--src/tools/clippy/clippy_lints/src/eta_reduction.rs10
-rw-r--r--src/tools/clippy/clippy_lints/src/excessive_bools.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/excessive_nesting.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/exhaustive_items.rs60
-rw-r--r--src/tools/clippy/clippy_lints/src/exit.rs23
-rw-r--r--src/tools/clippy/clippy_lints/src/explicit_write.rs29
-rw-r--r--src/tools/clippy/clippy_lints/src/extra_unused_type_parameters.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/fallible_impl_from.rs70
-rw-r--r--src/tools/clippy/clippy_lints/src/float_literal.rs110
-rw-r--r--src/tools/clippy/clippy_lints/src/floating_point_arithmetic.rs460
-rw-r--r--src/tools/clippy/clippy_lints/src/format.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/format_args.rs83
-rw-r--r--src/tools/clippy/clippy_lints/src/format_impl.rs117
-rw-r--r--src/tools/clippy/clippy_lints/src/format_push_string.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/formatting.rs215
-rw-r--r--src/tools/clippy/clippy_lints/src/four_forward_slashes.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/from_over_into.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/from_raw_with_void_ptr.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/from_str_radix_10.rs66
-rw-r--r--src/tools/clippy/clippy_lints/src/functions/impl_trait_in_params.rs82
-rw-r--r--src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs30
-rw-r--r--src/tools/clippy/clippy_lints/src/functions/mod.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/functions/result.rs101
-rw-r--r--src/tools/clippy/clippy_lints/src/future_not_send.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/if_let_mutex.rs21
-rw-r--r--src/tools/clippy/clippy_lints/src/if_not_else.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/ignored_unit_patterns.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/impl_hash_with_borrow_str_and_bytes.rs106
-rw-r--r--src/tools/clippy/clippy_lints/src/implicit_hasher.rs72
-rw-r--r--src/tools/clippy/clippy_lints/src/implicit_return.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/implicit_saturating_add.rs87
-rw-r--r--src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs144
-rw-r--r--src/tools/clippy/clippy_lints/src/implied_bounds_in_impls.rs15
-rw-r--r--src/tools/clippy/clippy_lints/src/inconsistent_struct_constructor.rs86
-rw-r--r--src/tools/clippy/clippy_lints/src/index_refutable_slice.rs58
-rw-r--r--src/tools/clippy/clippy_lints/src/indexing_slicing.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/ineffective_open_options.rs95
-rw-r--r--src/tools/clippy/clippy_lints/src/infinite_iter.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/inherent_impl.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/inherent_to_string.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/init_numbered_fields.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/inline_fn_without_body.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/instant_subtraction.rs34
-rw-r--r--src/tools/clippy/clippy_lints/src/int_plus_one.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/invalid_upcast_comparisons.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/item_name_repetitions.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/items_after_statements.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/items_after_test_module.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/iter_not_returning_iterator.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/iter_over_hash_type.rs78
-rw-r--r--src/tools/clippy/clippy_lints/src/iter_without_into_iter.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/large_const_arrays.rs68
-rw-r--r--src/tools/clippy/clippy_lints/src/large_enum_variant.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/large_futures.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/large_include_file.rs58
-rw-r--r--src/tools/clippy/clippy_lints/src/large_stack_arrays.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/large_stack_frames.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/len_zero.rs73
-rw-r--r--src/tools/clippy/clippy_lints/src/let_if_seq.rs161
-rw-r--r--src/tools/clippy/clippy_lints/src/let_underscore.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/let_with_type_underscore.rs40
-rw-r--r--src/tools/clippy/clippy_lints/src/lib.rs206
-rw-r--r--src/tools/clippy/clippy_lints/src/lifetimes.rs31
-rw-r--r--src/tools/clippy/clippy_lints/src/lines_filter_map_ok.rs62
-rw-r--r--src/tools/clippy/clippy_lints/src/literal_representation.rs113
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/explicit_counter_loop.rs99
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/explicit_iter_loop.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/infinite_loop.rs125
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/manual_find.rs177
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs94
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/manual_memcpy.rs124
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/missing_spin_loop.rs46
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/mod.rs49
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/mut_range_bound.rs37
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs172
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/never_loop.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/same_item_push.rs118
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/single_element_loop.rs54
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/utils.rs26
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/while_immutable_condition.rs27
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/while_let_on_iterator.rs89
-rw-r--r--src/tools/clippy/clippy_lints/src/macro_use.rs64
-rw-r--r--src/tools/clippy/clippy_lints/src/main_recursion.rs31
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_assert.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_async_fn.rs194
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_bits.rs84
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_clamp.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_float_methods.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_hash_one.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_let_else.rs118
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_main_separator_str.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_non_exhaustive.rs10
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_range_patterns.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_retain.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_slice_size_calculation.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_string_new.rs24
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_strip.rs171
-rw-r--r--src/tools/clippy/clippy_lints/src/map_unit_fn.rs21
-rw-r--r--src/tools/clippy/clippy_lints/src/match_result_ok.rs55
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/collapsible_match.rs94
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/infallible_destructuring_match.rs61
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/manual_filter.rs26
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/manual_unwrap_or.rs103
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/manual_utils.rs46
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_as_ref.rs43
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_like_matches.rs135
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_on_vec_items.rs49
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs42
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_str_case_mismatch.rs45
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_wild_err_arm.rs27
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/mod.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/overlapping_arms.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/redundant_guards.rs102
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs31
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/rest_pat_in_fully_bound_struct.rs37
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/single_match.rs90
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/try_err.rs162
-rw-r--r--src/tools/clippy/clippy_lints/src/mem_replace.rs104
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs91
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/bytecount.rs76
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/bytes_count_to_len.rs38
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs90
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs51
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/chars_cmp_with_unwrap.rs45
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/err_expect.rs31
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/extend_with_drain.rs53
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/filetype_is_file.rs29
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/filter_map.rs164
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/filter_map_bool_then.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/filter_next.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/from_iter_instead_of_collect.rs112
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/get_first.rs64
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/implicit_clone.rs55
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/inefficient_to_string.rs62
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/into_iter_on_ref.rs35
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/iter_cloned_collect.rs34
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/iter_kv_map.rs103
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/iter_next_slice.rs54
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/iter_skip_next.rs22
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/join_absolute_paths.rs52
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/manual_ok_or.rs45
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/manual_saturating_arithmetic.rs19
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs72
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/manual_try_fold.rs5
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/map_clone.rs89
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/map_collect_result_unit.rs41
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/map_flatten.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/map_identity.rs32
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/map_unwrap_or.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/mod.rs160
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/mut_mutex_lock.rs33
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/needless_collect.rs5
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/no_effect_replace.rs19
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/ok_expect.rs30
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/option_as_ref_deref.rs44
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/option_map_or_err_ok.rs41
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs45
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/option_map_unwrap_or.rs11
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs90
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/path_buf_push_overwrite.rs43
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/range_zip_with_len.rs36
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/result_map_or_else_none.rs42
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/search_is_some.rs103
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/single_char_pattern.rs36
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/str_splitn.rs51
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/suspicious_map.rs37
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/suspicious_splitn.rs56
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/suspicious_to_owned.rs66
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/uninit_assumed_init.rs25
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unnecessary_fallible_conversions.rs38
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unnecessary_fold.rs85
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs121
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unnecessary_join.rs34
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs112
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs312
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/useless_asref.rs13
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/utils.rs62
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/vec_resize_to_zero.rs52
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/zst_offset.rs13
-rw-r--r--src/tools/clippy/clippy_lints/src/min_ident_chars.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/minmax.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/misc.rs124
-rw-r--r--src/tools/clippy/clippy_lints/src/misc_early/mod.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/mismatching_type_param_order.rs102
-rw-r--r--src/tools/clippy/clippy_lints/src/missing_assert_message.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/missing_asserts_for_indexing.rs33
-rw-r--r--src/tools/clippy/clippy_lints/src/missing_const_for_fn.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/missing_doc.rs21
-rw-r--r--src/tools/clippy/clippy_lints/src/missing_enforced_import_rename.rs36
-rw-r--r--src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/missing_inline.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/missing_trait_methods.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs21
-rw-r--r--src/tools/clippy/clippy_lints/src/module_style.rs24
-rw-r--r--src/tools/clippy/clippy_lints/src/multi_assignments.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/mut_key.rs10
-rw-r--r--src/tools/clippy/clippy_lints/src/mut_mut.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/mut_reference.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/mutable_debug_assertion.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/mutex_atomic.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_arbitrary_self_type.rs93
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_bool.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_borrowed_ref.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_borrows_for_generic_args.rs16
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_continue.rs30
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_else.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_for_each.rs96
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_if.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_late_init.rs47
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_parens_on_range_literals.rs30
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_pass_by_ref_mut.rs15
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs205
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_question_mark.rs47
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_update.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/neg_cmp_op_on_partial_ord.rs66
-rw-r--r--src/tools/clippy/clippy_lints/src/neg_multiply.rs46
-rw-r--r--src/tools/clippy/clippy_lints/src/new_without_default.rs125
-rw-r--r--src/tools/clippy/clippy_lints/src/no_effect.rs169
-rw-r--r--src/tools/clippy/clippy_lints/src/no_mangle_with_rust_abi.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/non_canonical_impls.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/non_copy_const.rs34
-rw-r--r--src/tools/clippy/clippy_lints/src/non_expressive_names.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/non_octal_unix_permissions.rs53
-rw-r--r--src/tools/clippy/clippy_lints/src/non_send_fields_in_send_ty.rs130
-rw-r--r--src/tools/clippy/clippy_lints/src/nonstandard_macro_braces.rs117
-rw-r--r--src/tools/clippy/clippy_lints/src/octal_escapes.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/only_used_in_recursion.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/assign_op_pattern.rs68
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/const_comparisons.rs143
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/eq_op.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/float_cmp.rs15
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/float_equality_without_abs.rs60
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/misrefactored_assign_op.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/mod.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/modulo_arithmetic.rs16
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/op_ref.rs57
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/ptr_eq.rs31
-rw-r--r--src/tools/clippy/clippy_lints/src/option_env_unwrap.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/option_if_let_else.rs166
-rw-r--r--src/tools/clippy/clippy_lints/src/overflow_check_conditional.rs63
-rw-r--r--src/tools/clippy/clippy_lints/src/panic_in_result_fn.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/panic_unimplemented.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/partial_pub_fields.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/partialeq_ne_impl.rs37
-rw-r--r--src/tools/clippy/clippy_lints/src/partialeq_to_none.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs51
-rw-r--r--src/tools/clippy/clippy_lints/src/pattern_type_mismatch.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/permissions_set_readonly_false.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/precedence.rs45
-rw-r--r--src/tools/clippy/clippy_lints/src/ptr.rs50
-rw-r--r--src/tools/clippy/clippy_lints/src/ptr_offset_with_cast.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/pub_use.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/question_mark.rs153
-rw-r--r--src/tools/clippy/clippy_lints/src/question_mark_used.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/ranges.rs214
-rw-r--r--src/tools/clippy/clippy_lints/src/raw_strings.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/rc_clone_in_vec_init.rs38
-rw-r--r--src/tools/clippy/clippy_lints/src/read_zero_byte_vec.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_async_block.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_clone.rs116
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_closure_call.rs90
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_else.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_field_names.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_locals.rs50
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_pub_crate.rs45
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_slicing.rs144
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_static_lifetimes.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_type_annotations.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/ref_option_ref.rs51
-rw-r--r--src/tools/clippy/clippy_lints/src/ref_patterns.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/reference.rs97
-rw-r--r--src/tools/clippy/clippy_lints/src/regex.rs14
-rw-r--r--src/tools/clippy/clippy_lints/src/renamed_lints.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/repeat_vec_with_capacity.rs114
-rw-r--r--src/tools/clippy/clippy_lints/src/reserve_after_initialization.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/return_self_not_must_use.rs56
-rw-r--r--src/tools/clippy/clippy_lints/src/returns.rs106
-rw-r--r--src/tools/clippy/clippy_lints/src/same_name_method.rs29
-rw-r--r--src/tools/clippy/clippy_lints/src/self_named_constructors.rs32
-rw-r--r--src/tools/clippy/clippy_lints/src/semicolon_block.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/semicolon_if_nothing_returned.rs48
-rw-r--r--src/tools/clippy/clippy_lints/src/serde_api.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/shadow.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/single_call_fn.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/single_char_lifetime_names.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/single_component_path_imports.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/single_range_in_vec_init.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/size_of_in_element_count.rs77
-rw-r--r--src/tools/clippy/clippy_lints/src/size_of_ref.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/slow_vector_initialization.rs112
-rw-r--r--src/tools/clippy/clippy_lints/src/std_instead_of_core.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/strings.rs278
-rw-r--r--src/tools/clippy/clippy_lints/src/strlen_on_c_strings.rs80
-rw-r--r--src/tools/clippy/clippy_lints/src/suspicious_doc_comments.rs95
-rw-r--r--src/tools/clippy/clippy_lints/src/suspicious_operation_groupings.rs107
-rw-r--r--src/tools/clippy/clippy_lints/src/suspicious_trait_impl.rs53
-rw-r--r--src/tools/clippy/clippy_lints/src/suspicious_xor_used_as_pow.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/swap.rs72
-rw-r--r--src/tools/clippy/clippy_lints/src/swap_ptr_to_ref.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/tabs_in_doc_comments.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/temporary_assignment.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/tests_outside_test_module.rs28
-rw-r--r--src/tools/clippy/clippy_lints/src/to_digit_is_some.rs99
-rw-r--r--src/tools/clippy/clippy_lints/src/trailing_empty_array.rs24
-rw-r--r--src/tools/clippy/clippy_lints/src/trait_bounds.rs329
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/mod.rs81
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/transmute_float_to_int.rs17
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/transmute_ref_to_ref.rs110
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/transmute_undefined_repr.rs14
-rw-r--r--src/tools/clippy/clippy_lints/src/tuple_array_conversions.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/types/borrowed_box.rs145
-rw-r--r--src/tools/clippy/clippy_lints/src/types/box_collection.rs42
-rw-r--r--src/tools/clippy/clippy_lints/src/types/mod.rs11
-rw-r--r--src/tools/clippy/clippy_lints/src/types/option_option.rs31
-rw-r--r--src/tools/clippy/clippy_lints/src/types/rc_mutex.rs31
-rw-r--r--src/tools/clippy/clippy_lints/src/types/utils.rs17
-rw-r--r--src/tools/clippy/clippy_lints/src/types/vec_box.rs84
-rw-r--r--src/tools/clippy/clippy_lints/src/undocumented_unsafe_blocks.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/unicode.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/uninhabited_references.rs84
-rw-r--r--src/tools/clippy/clippy_lints/src/uninit_vec.rs74
-rw-r--r--src/tools/clippy/clippy_lints/src/unit_return_expecting_ord.rs95
-rw-r--r--src/tools/clippy/clippy_lints/src/unit_types/mod.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/unit_types/unit_arg.rs35
-rw-r--r--src/tools/clippy/clippy_lints/src/unnamed_address.rs100
-rw-r--r--src/tools/clippy/clippy_lints/src/unnecessary_box_returns.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/unnecessary_map_on_constructor.rs21
-rw-r--r--src/tools/clippy/clippy_lints/src/unnecessary_owned_empty_strings.rs77
-rw-r--r--src/tools/clippy/clippy_lints/src/unnecessary_self_imports.rs62
-rw-r--r--src/tools/clippy/clippy_lints/src/unnecessary_struct_initialization.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/unnecessary_wraps.rs41
-rw-r--r--src/tools/clippy/clippy_lints/src/unnested_or_patterns.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/unsafe_removed_from_name.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/unused_async.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/unused_io_amount.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/unused_peekable.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/unused_rounding.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/unused_self.rs39
-rw-r--r--src/tools/clippy/clippy_lints/src/unused_unit.rs78
-rw-r--r--src/tools/clippy/clippy_lints/src/unwrap.rs194
-rw-r--r--src/tools/clippy/clippy_lints/src/unwrap_in_result.rs15
-rw-r--r--src/tools/clippy/clippy_lints/src/upper_case_acronyms.rs33
-rw-r--r--src/tools/clippy/clippy_lints/src/use_self.rs206
-rw-r--r--src/tools/clippy/clippy_lints/src/useless_conversion.rs115
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/author.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints.rs1
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/almost_standard_lint_formulation.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/collapsible_calls.rs79
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/compiler_lint_functions.rs33
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/if_chain_style.rs166
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/interning_defined_symbol.rs93
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/invalid_paths.rs25
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/lint_without_lint_pass.rs19
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs132
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/msrv_attr_impl.rs60
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/outer_expn_data_pass.rs35
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/produce_ice.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/unnecessary_def_path.rs199
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/unsorted_clippy_utils_paths.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/vec.rs148
-rw-r--r--src/tools/clippy/clippy_lints/src/vec_init_then_push.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/visibility.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/wildcard_imports.rs108
-rw-r--r--src/tools/clippy/clippy_lints/src/write.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/zero_div_zero.rs50
-rw-r--r--src/tools/clippy/clippy_lints/src/zero_sized_map_values.rs38
-rw-r--r--src/tools/clippy/clippy_utils/Cargo.toml5
-rw-r--r--src/tools/clippy/clippy_utils/src/ast_utils.rs27
-rw-r--r--src/tools/clippy/clippy_utils/src/check_proc_macro.rs6
-rw-r--r--src/tools/clippy/clippy_utils/src/consts.rs176
-rw-r--r--src/tools/clippy/clippy_utils/src/diagnostics.rs12
-rw-r--r--src/tools/clippy/clippy_utils/src/eager_or_lazy.rs52
-rw-r--r--src/tools/clippy/clippy_utils/src/higher.rs77
-rw-r--r--src/tools/clippy/clippy_utils/src/hir_utils.rs118
-rw-r--r--src/tools/clippy/clippy_utils/src/lib.rs450
-rw-r--r--src/tools/clippy/clippy_utils/src/paths.rs9
-rw-r--r--src/tools/clippy/clippy_utils/src/sugg.rs7
-rw-r--r--src/tools/clippy/clippy_utils/src/ty.rs74
-rw-r--r--src/tools/clippy/clippy_utils/src/ty/type_certainty/mod.rs25
-rw-r--r--src/tools/clippy/declare_clippy_lint/Cargo.toml4
-rw-r--r--src/tools/clippy/declare_clippy_lint/src/lib.rs2
-rw-r--r--src/tools/clippy/lintcheck/src/main.rs10
-rw-r--r--src/tools/clippy/rust-toolchain2
-rw-r--r--src/tools/clippy/src/driver.rs10
-rw-r--r--src/tools/clippy/src/main.rs2
-rw-r--r--src/tools/clippy/tests/headers.rs7
-rw-r--r--src/tools/clippy/tests/integration.rs8
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/fail_mod_remap/Cargo.stderr2
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/fail_no_mod/Cargo.stderr2
-rw-r--r--src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/Cargo.stderr2
-rw-r--r--src/tools/clippy/tests/ui-cargo/wildcard_dependencies/fail/Cargo.stderr2
-rw-r--r--src/tools/clippy/tests/ui-internal/default_deprecation_reason.stderr2
-rw-r--r--src/tools/clippy/tests/ui-internal/default_lint.stderr2
-rw-r--r--src/tools/clippy/tests/ui-internal/disallow_struct_span_lint.rs27
-rw-r--r--src/tools/clippy/tests/ui-internal/disallow_struct_span_lint.stderr17
-rw-r--r--src/tools/clippy/tests/ui-internal/if_chain_style.rs97
-rw-r--r--src/tools/clippy/tests/ui-internal/if_chain_style.stderr86
-rw-r--r--src/tools/clippy/tests/ui-internal/lint_without_lint_pass.stderr2
-rw-r--r--src/tools/clippy/tests/ui-internal/outer_expn_data.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/bad_toml/conf_bad_toml.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/bad_toml_type/conf_bad_type.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/conf_deprecated_key/conf_deprecated_key.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/decimal_literal_representation/decimal_literal_representation.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/disallowed_names_replace/disallowed_names.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/disallowed_script_idents/disallowed_script_idents.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/doc_valid_idents_append/doc_markdown.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/duplicated_keys/duplicated_keys.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/duplicated_keys_deprecated/duplicated_keys.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/duplicated_keys_deprecated_2/duplicated_keys.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/enum_variant_size/enum_variant_size.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/excessive_nesting/excessive_nesting.rs2
-rw-r--r--src/tools/clippy/tests/ui-toml/fn_params_excessive_bools/test.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/ifs_same_cond/ifs_same_cond.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/impl_trait_in_params/impl_trait_in_params.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/invalid_min_rust_version/invalid_min_rust_version.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/large_futures/large_futures.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/large_stack_frames/large_stack_frames.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/large_types_passed_by_value/large_types_passed_by_value.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/manual_let_else/manual_let_else.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/min_rust_version/min_rust_version.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/private-doc-errors/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/private-doc-errors/doc_lints.rs54
-rw-r--r--src/tools/clippy/tests/ui-toml/private-doc-errors/doc_lints.stderr64
-rw-r--r--src/tools/clippy/tests/ui-toml/result_large_err/result_large_err.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/semicolon_block/semicolon_inside_block.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/struct_excessive_bools/test.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/too_large_for_stack/boxed_local.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/too_large_for_stack/useless_vec.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/too_many_arguments/too_many_arguments.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/type_complexity/type_complexity.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/type_repetition_in_bounds/main.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/unnecessary_box_returns/unnecessary_box_returns.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/verbose_bit_mask/verbose_bit_mask.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/wildcard_imports/wildcard_imports.stderr2
-rw-r--r--src/tools/clippy/tests/ui/arc_with_non_send_sync.rs6
-rw-r--r--src/tools/clippy/tests/ui/arc_with_non_send_sync.stderr32
-rw-r--r--src/tools/clippy/tests/ui/attrs.rs3
-rw-r--r--src/tools/clippy/tests/ui/bind_instead_of_map_multipart.fixed2
-rw-r--r--src/tools/clippy/tests/ui/bind_instead_of_map_multipart.rs2
-rw-r--r--src/tools/clippy/tests/ui/blocks_in_conditions.fixed (renamed from src/tools/clippy/tests/ui/blocks_in_if_conditions.fixed)26
-rw-r--r--src/tools/clippy/tests/ui/blocks_in_conditions.rs (renamed from src/tools/clippy/tests/ui/blocks_in_if_conditions.rs)26
-rw-r--r--src/tools/clippy/tests/ui/blocks_in_conditions.stderr (renamed from src/tools/clippy/tests/ui/blocks_in_if_conditions.stderr)33
-rw-r--r--src/tools/clippy/tests/ui/blocks_in_conditions_closure.rs (renamed from src/tools/clippy/tests/ui/blocks_in_if_conditions_closure.rs)21
-rw-r--r--src/tools/clippy/tests/ui/blocks_in_conditions_closure.stderr (renamed from src/tools/clippy/tests/ui/blocks_in_if_conditions_closure.stderr)21
-rw-r--r--src/tools/clippy/tests/ui/borrow_deref_ref_unfixable.stderr2
-rw-r--r--src/tools/clippy/tests/ui/box_default.fixed14
-rw-r--r--src/tools/clippy/tests/ui/box_default.rs14
-rw-r--r--src/tools/clippy/tests/ui/cfg_features.fixed20
-rw-r--r--src/tools/clippy/tests/ui/cfg_features.rs20
-rw-r--r--src/tools/clippy/tests/ui/cfg_features.stderr42
-rw-r--r--src/tools/clippy/tests/ui/char_lit_as_u8.stderr2
-rw-r--r--src/tools/clippy/tests/ui/cognitive_complexity_attr_used.stderr2
-rw-r--r--src/tools/clippy/tests/ui/copy_iterator.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-10148.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-11422.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-11803.rs9
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-11803.stderr26
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-2774.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-3717.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-3891.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-5497.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-5835.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-5872.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6254.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6255.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6256.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-7169.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-7868.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-7869.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-8250.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-8821.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-9041.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-9445.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-96721.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crashes/needless_lifetimes_impl_trait.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crashes/needless_pass_by_value-w-late-bound.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crate_in_macro_def.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crate_level_checks/std_main_recursion.stderr2
-rw-r--r--src/tools/clippy/tests/ui/dbg_macro/auxiliary/submodule.rs3
-rw-r--r--src/tools/clippy/tests/ui/dbg_macro/dbg_macro.rs (renamed from src/tools/clippy/tests/ui/dbg_macro.rs)4
-rw-r--r--src/tools/clippy/tests/ui/dbg_macro/dbg_macro.stderr (renamed from src/tools/clippy/tests/ui/dbg_macro.stderr)54
-rw-r--r--src/tools/clippy/tests/ui/def_id_nocore.stderr2
-rw-r--r--src/tools/clippy/tests/ui/doc/doc-fixable.fixed3
-rw-r--r--src/tools/clippy/tests/ui/doc/doc-fixable.rs3
-rw-r--r--src/tools/clippy/tests/ui/doc/doc-fixable.stderr24
-rw-r--r--src/tools/clippy/tests/ui/doc_link_with_quotes.rs6
-rw-r--r--src/tools/clippy/tests/ui/doc_link_with_quotes.stderr8
-rw-r--r--src/tools/clippy/tests/ui/double_neg.stderr2
-rw-r--r--src/tools/clippy/tests/ui/duplicate_underscore_argument.stderr2
-rw-r--r--src/tools/clippy/tests/ui/empty_enum.stderr2
-rw-r--r--src/tools/clippy/tests/ui/entry_btree.stderr2
-rw-r--r--src/tools/clippy/tests/ui/exit1.stderr2
-rw-r--r--src/tools/clippy/tests/ui/exit2.stderr2
-rw-r--r--src/tools/clippy/tests/ui/explicit_auto_deref.fixed45
-rw-r--r--src/tools/clippy/tests/ui/explicit_auto_deref.rs45
-rw-r--r--src/tools/clippy/tests/ui/explicit_auto_deref.stderr32
-rw-r--r--src/tools/clippy/tests/ui/filter_map_next.stderr2
-rw-r--r--src/tools/clippy/tests/ui/floating_point_mul_add.fixed18
-rw-r--r--src/tools/clippy/tests/ui/floating_point_mul_add.rs18
-rw-r--r--src/tools/clippy/tests/ui/four_forward_slashes_first_line.stderr2
-rw-r--r--src/tools/clippy/tests/ui/functions_maxlines.stderr2
-rw-r--r--src/tools/clippy/tests/ui/impl_hash_with_borrow_str_and_bytes.rs136
-rw-r--r--src/tools/clippy/tests/ui/impl_hash_with_borrow_str_and_bytes.stderr41
-rw-r--r--src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.stderr2
-rw-r--r--src/tools/clippy/tests/ui/ineffective_open_options.fixed41
-rw-r--r--src/tools/clippy/tests/ui/ineffective_open_options.rs41
-rw-r--r--src/tools/clippy/tests/ui/ineffective_open_options.stderr17
-rw-r--r--src/tools/clippy/tests/ui/infallible_destructuring_match.fixed2
-rw-r--r--src/tools/clippy/tests/ui/infallible_destructuring_match.rs2
-rw-r--r--src/tools/clippy/tests/ui/infinite_loops.rs366
-rw-r--r--src/tools/clippy/tests/ui/infinite_loops.stderr259
-rw-r--r--src/tools/clippy/tests/ui/inspect_for_each.stderr2
-rw-r--r--src/tools/clippy/tests/ui/issue-3145.stderr2
-rw-r--r--src/tools/clippy/tests/ui/issue_2356.stderr2
-rw-r--r--src/tools/clippy/tests/ui/items_after_test_module/in_submodule.stderr2
-rw-r--r--src/tools/clippy/tests/ui/items_after_test_module/root_module.stderr2
-rw-r--r--src/tools/clippy/tests/ui/iter_kv_map.fixed43
-rw-r--r--src/tools/clippy/tests/ui/iter_kv_map.rs43
-rw-r--r--src/tools/clippy/tests/ui/iter_kv_map.stderr62
-rw-r--r--src/tools/clippy/tests/ui/iter_next_loop.stderr2
-rw-r--r--src/tools/clippy/tests/ui/iter_over_hash_type.rs74
-rw-r--r--src/tools/clippy/tests/ui/iter_over_hash_type.stderr109
-rw-r--r--src/tools/clippy/tests/ui/join_absolute_paths.rs30
-rw-r--r--src/tools/clippy/tests/ui/join_absolute_paths.stderr68
-rw-r--r--src/tools/clippy/tests/ui/lines_filter_map_ok.fixed6
-rw-r--r--src/tools/clippy/tests/ui/lines_filter_map_ok.rs6
-rw-r--r--src/tools/clippy/tests/ui/lines_filter_map_ok.stderr34
-rw-r--r--src/tools/clippy/tests/ui/macro_use_imports.stderr12
-rw-r--r--src/tools/clippy/tests/ui/manual_filter.fixed2
-rw-r--r--src/tools/clippy/tests/ui/manual_filter.rs2
-rw-r--r--src/tools/clippy/tests/ui/manual_let_else.rs113
-rw-r--r--src/tools/clippy/tests/ui/manual_let_else.stderr172
-rw-r--r--src/tools/clippy/tests/ui/manual_memcpy/without_loop_counters.rs20
-rw-r--r--src/tools/clippy/tests/ui/manual_memcpy/without_loop_counters.stderr31
-rw-r--r--src/tools/clippy/tests/ui/manual_non_exhaustive_enum.rs2
-rw-r--r--src/tools/clippy/tests/ui/manual_non_exhaustive_enum.stderr23
-rw-r--r--src/tools/clippy/tests/ui/manual_non_exhaustive_struct.stderr22
-rw-r--r--src/tools/clippy/tests/ui/manual_ok_or.stderr11
-rw-r--r--src/tools/clippy/tests/ui/manual_try_fold.rs30
-rw-r--r--src/tools/clippy/tests/ui/map_err.stderr2
-rw-r--r--src/tools/clippy/tests/ui/map_identity.fixed42
-rw-r--r--src/tools/clippy/tests/ui/map_identity.rs42
-rw-r--r--src/tools/clippy/tests/ui/map_identity.stderr32
-rw-r--r--src/tools/clippy/tests/ui/map_unwrap_or.stderr30
-rw-r--r--src/tools/clippy/tests/ui/map_unwrap_or_fixable.stderr4
-rw-r--r--src/tools/clippy/tests/ui/mem_replace_macro.stderr2
-rw-r--r--src/tools/clippy/tests/ui/methods_fixable.stderr2
-rw-r--r--src/tools/clippy/tests/ui/methods_unfixable.stderr2
-rw-r--r--src/tools/clippy/tests/ui/missing_asserts_for_indexing.fixed15
-rw-r--r--src/tools/clippy/tests/ui/missing_asserts_for_indexing.rs15
-rw-r--r--src/tools/clippy/tests/ui/missing_asserts_for_indexing.stderr54
-rw-r--r--src/tools/clippy/tests/ui/missing_asserts_for_indexing_unfixable.rs22
-rw-r--r--src/tools/clippy/tests/ui/missing_asserts_for_indexing_unfixable.stderr21
-rw-r--r--src/tools/clippy/tests/ui/missing_doc_crate_missing.stderr2
-rw-r--r--src/tools/clippy/tests/ui/missing_spin_loop_no_std.stderr2
-rw-r--r--src/tools/clippy/tests/ui/mut_mutex_lock.stderr2
-rw-r--r--src/tools/clippy/tests/ui/needless_arbitrary_self_type_unfixable.stderr2
-rw-r--r--src/tools/clippy/tests/ui/needless_bitwise_bool.stderr2
-rw-r--r--src/tools/clippy/tests/ui/needless_bool_assign.stderr3
-rw-r--r--src/tools/clippy/tests/ui/needless_borrow.fixed51
-rw-r--r--src/tools/clippy/tests/ui/needless_borrow.rs51
-rw-r--r--src/tools/clippy/tests/ui/needless_borrow.stderr26
-rw-r--r--src/tools/clippy/tests/ui/needless_borrows_for_generic_args.fixed15
-rw-r--r--src/tools/clippy/tests/ui/needless_borrows_for_generic_args.rs15
-rw-r--r--src/tools/clippy/tests/ui/needless_else.stderr2
-rw-r--r--src/tools/clippy/tests/ui/needless_for_each_unfixable.stderr2
-rw-r--r--src/tools/clippy/tests/ui/needless_if.fixed2
-rw-r--r--src/tools/clippy/tests/ui/needless_if.rs2
-rw-r--r--src/tools/clippy/tests/ui/needless_late_init.fixed2
-rw-r--r--src/tools/clippy/tests/ui/needless_late_init.rs2
-rw-r--r--src/tools/clippy/tests/ui/needless_option_take.stderr2
-rw-r--r--src/tools/clippy/tests/ui/needless_pass_by_ref_mut.rs7
-rw-r--r--src/tools/clippy/tests/ui/needless_pass_by_ref_mut.stderr62
-rw-r--r--src/tools/clippy/tests/ui/needless_return_with_question_mark.fixed40
-rw-r--r--src/tools/clippy/tests/ui/needless_return_with_question_mark.rs40
-rw-r--r--src/tools/clippy/tests/ui/needless_return_with_question_mark.stderr10
-rw-r--r--src/tools/clippy/tests/ui/needless_update.stderr2
-rw-r--r--src/tools/clippy/tests/ui/new_ret_no_self_overflow.stderr2
-rw-r--r--src/tools/clippy/tests/ui/no_effect.rs31
-rw-r--r--src/tools/clippy/tests/ui/no_effect.stderr58
-rw-r--r--src/tools/clippy/tests/ui/non_minimal_cfg2.stderr2
-rw-r--r--src/tools/clippy/tests/ui/obfuscated_if_else.stderr2
-rw-r--r--src/tools/clippy/tests/ui/option_as_ref_deref.stderr36
-rw-r--r--src/tools/clippy/tests/ui/option_if_let_else.fixed26
-rw-r--r--src/tools/clippy/tests/ui/option_if_let_else.rs34
-rw-r--r--src/tools/clippy/tests/ui/option_if_let_else.stderr48
-rw-r--r--src/tools/clippy/tests/ui/option_map_or_err_ok.fixed7
-rw-r--r--src/tools/clippy/tests/ui/option_map_or_err_ok.rs7
-rw-r--r--src/tools/clippy/tests/ui/option_map_or_err_ok.stderr11
-rw-r--r--src/tools/clippy/tests/ui/option_map_or_none.stderr10
-rw-r--r--src/tools/clippy/tests/ui/partialeq_ne_impl.stderr2
-rw-r--r--src/tools/clippy/tests/ui/path_buf_push_overwrite.stderr2
-rw-r--r--src/tools/clippy/tests/ui/permissions_set_readonly_false.stderr2
-rw-r--r--src/tools/clippy/tests/ui/proc_macro.stderr2
-rw-r--r--src/tools/clippy/tests/ui/ptr_arg.rs6
-rw-r--r--src/tools/clippy/tests/ui/ptr_arg.stderr50
-rw-r--r--src/tools/clippy/tests/ui/ptr_as_ptr.fixed115
-rw-r--r--src/tools/clippy/tests/ui/ptr_as_ptr.rs115
-rw-r--r--src/tools/clippy/tests/ui/ptr_as_ptr.stderr146
-rw-r--r--src/tools/clippy/tests/ui/pub_use.stderr2
-rw-r--r--src/tools/clippy/tests/ui/question_mark_used.stderr2
-rw-r--r--src/tools/clippy/tests/ui/range.stderr2
-rw-r--r--src/tools/clippy/tests/ui/redundant_closure_call_fixable.fixed18
-rw-r--r--src/tools/clippy/tests/ui/redundant_closure_call_fixable.rs18
-rw-r--r--src/tools/clippy/tests/ui/redundant_closure_call_fixable.stderr20
-rw-r--r--src/tools/clippy/tests/ui/redundant_guards.fixed57
-rw-r--r--src/tools/clippy/tests/ui/redundant_guards.rs57
-rw-r--r--src/tools/clippy/tests/ui/redundant_guards.stderr86
-rw-r--r--src/tools/clippy/tests/ui/regex.rs4
-rw-r--r--src/tools/clippy/tests/ui/rename.fixed9
-rw-r--r--src/tools/clippy/tests/ui/rename.rs5
-rw-r--r--src/tools/clippy/tests/ui/rename.stderr134
-rw-r--r--src/tools/clippy/tests/ui/renamed_builtin_attr.stderr2
-rw-r--r--src/tools/clippy/tests/ui/repeat_vec_with_capacity.fixed38
-rw-r--r--src/tools/clippy/tests/ui/repeat_vec_with_capacity.rs38
-rw-r--r--src/tools/clippy/tests/ui/repeat_vec_with_capacity.stderr40
-rw-r--r--src/tools/clippy/tests/ui/result_map_or_into_option.fixed7
-rw-r--r--src/tools/clippy/tests/ui/result_map_or_into_option.rs7
-rw-r--r--src/tools/clippy/tests/ui/result_map_or_into_option.stderr16
-rw-r--r--src/tools/clippy/tests/ui/seek_from_current.stderr2
-rw-r--r--src/tools/clippy/tests/ui/self_named_constructors.stderr2
-rw-r--r--src/tools/clippy/tests/ui/serde.stderr2
-rw-r--r--src/tools/clippy/tests/ui/should_panic_without_expect.stderr2
-rw-r--r--src/tools/clippy/tests/ui/single_element_loop.fixed12
-rw-r--r--src/tools/clippy/tests/ui/single_element_loop.stderr72
-rw-r--r--src/tools/clippy/tests/ui/string_from_utf8_as_bytes.stderr2
-rw-r--r--src/tools/clippy/tests/ui/string_to_string.stderr2
-rw-r--r--src/tools/clippy/tests/ui/test_attr_in_doctest.rs51
-rw-r--r--src/tools/clippy/tests/ui/test_attr_in_doctest.stderr29
-rw-r--r--src/tools/clippy/tests/ui/tests_outside_test_module.stderr2
-rw-r--r--src/tools/clippy/tests/ui/track-diagnostics.stderr2
-rw-r--r--src/tools/clippy/tests/ui/transmute_ptr_to_ptr.fixed3
-rw-r--r--src/tools/clippy/tests/ui/transmute_ptr_to_ptr.rs3
-rw-r--r--src/tools/clippy/tests/ui/transmute_ptr_to_ptr.stderr8
-rw-r--r--src/tools/clippy/tests/ui/transmute_ref_to_ref.rs18
-rw-r--r--src/tools/clippy/tests/ui/transmute_ref_to_ref.stderr26
-rw-r--r--src/tools/clippy/tests/ui/types.stderr2
-rw-r--r--src/tools/clippy/tests/ui/uninhabited_references.rs22
-rw-r--r--src/tools/clippy/tests/ui/uninhabited_references.stderr39
-rw-r--r--src/tools/clippy/tests/ui/uninlined_format_args_panic.edition2018.stderr2
-rw-r--r--src/tools/clippy/tests/ui/unknown_attribute.stderr2
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_fallible_conversions.stderr3
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_fallible_conversions_unfixable.stderr11
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_lazy_eval.fixed78
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_lazy_eval.rs78
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_lazy_eval.stderr266
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_operation.fixed19
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_operation.rs19
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_operation.stderr38
-rw-r--r--src/tools/clippy/tests/ui/upper_case_acronyms.fixed8
-rw-r--r--src/tools/clippy/tests/ui/upper_case_acronyms.rs8
-rw-r--r--src/tools/clippy/tests/ui/upper_case_acronyms.stderr8
-rw-r--r--src/tools/clippy/tests/ui/vec.fixed34
-rw-r--r--src/tools/clippy/tests/ui/vec.rs34
-rw-r--r--src/tools/clippy/tests/ui/vec.stderr8
-rw-r--r--src/tools/clippy/tests/ui/vec_box_sized.fixed57
-rw-r--r--src/tools/clippy/tests/ui/vec_box_sized.rs37
-rw-r--r--src/tools/clippy/tests/ui/vec_box_sized.stderr32
-rw-r--r--src/tools/clippy/tests/ui/vec_resize_to_zero.stderr2
-rw-r--r--src/tools/clippy/tests/ui/vtable_address_comparisons.rs52
-rw-r--r--src/tools/clippy/tests/ui/vtable_address_comparisons.stderr68
-rw-r--r--src/tools/clippy/triagebot.toml2
-rw-r--r--src/tools/collect-license-metadata/src/main.rs10
-rw-r--r--src/tools/collect-license-metadata/src/path_tree.rs56
-rw-r--r--src/tools/collect-license-metadata/src/reuse.rs8
-rw-r--r--src/tools/compiletest/src/common.rs51
-rw-r--r--src/tools/compiletest/src/header.rs2
-rw-r--r--src/tools/compiletest/src/header/cfg.rs12
-rw-r--r--src/tools/compiletest/src/header/tests.rs5
-rw-r--r--src/tools/compiletest/src/lib.rs3
-rw-r--r--src/tools/compiletest/src/runtest.rs88
-rw-r--r--src/tools/error_index_generator/main.rs5
-rw-r--r--src/tools/generate-copyright/src/main.rs37
-rw-r--r--src/tools/generate-windows-sys/Cargo.toml2
-rw-r--r--src/tools/jsondocck/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/Cargo.lock563
-rw-r--r--src/tools/rust-analyzer/Cargo.toml47
-rw-r--r--src/tools/rust-analyzer/crates/base-db/Cargo.toml8
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/fixture.rs80
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/input.rs229
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/lib.rs16
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/span.rs208
-rw-r--r--src/tools/rust-analyzer/crates/cfg/Cargo.toml4
-rw-r--r--src/tools/rust-analyzer/crates/cfg/src/lib.rs9
-rw-r--r--src/tools/rust-analyzer/crates/cfg/src/tests.rs34
-rw-r--r--src/tools/rust-analyzer/crates/flycheck/Cargo.toml4
-rw-r--r--src/tools/rust-analyzer/crates/flycheck/src/lib.rs7
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/Cargo.toml23
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/attr.rs12
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/attr/builtin.rs36
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/attr/tests.rs13
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body.rs18
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs66
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs16
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs115
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs7
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/data.rs35
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/expander.rs26
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/find_path.rs178
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/generics.rs33
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs30
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/import_map.rs300
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs9
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs96
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs168
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs22
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs12
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/lib.rs96
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/lower.rs21
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs8
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs160
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs156
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs174
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs59
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs113
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs11
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs241
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/path.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs70
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/resolver.rs19
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/test_db.rs1
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/visibility.rs18
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/Cargo.toml6
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs65
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs195
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs39
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs460
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs246
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/db.rs716
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/eager.rs189
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/files.rs375
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs376
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs423
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/lib.rs1027
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs85
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/name.rs1
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs26
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/quote.rs170
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/span.rs124
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/Cargo.toml29
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs5
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs28
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs14
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/db.rs26
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs157
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs54
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/display.rs16
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer.rs37
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs33
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs101
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs42
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs64
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout.rs138
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs30
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs7
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout/tests/closure.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lib.rs16
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lower.rs234
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs134
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir.rs37
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs88
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs12
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs77
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs5
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs9
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs1
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests.rs14
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs72
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs62
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs12
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs84
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/traits.rs24
-rw-r--r--src/tools/rust-analyzer/crates/hir/Cargo.toml7
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/attrs.rs12
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/db.rs23
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/diagnostics.rs60
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/display.rs61
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/lib.rs455
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics.rs612
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs175
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/symbols.rs63
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/Cargo.toml4
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs31
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs83
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs140
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs334
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs261
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs1675
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs17
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs76
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs889
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs241
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs14
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs35
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs161
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs31
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs216
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs16
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs13
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs202
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs58
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs25
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/into_to_qualified_from.rs74
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs33
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs17
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs172
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs40
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unqualify_method_call.rs52
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs24
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs24
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/lib.rs11
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/tests.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs136
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/utils.rs14
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs181
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs33
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs30
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs25
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/config.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/item.rs34
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/lib.rs25
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render.rs522
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs87
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs3
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests.rs21
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs252
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs126
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs24
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs33
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs42
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs270
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs486
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/use_tree.rs20
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/Cargo.toml14
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs17
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/defs.rs15
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs8415
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/helpers.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs268
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs28
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs40
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs11
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs40
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/lib.rs13
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs49
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/rename.rs93
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/search.rs125
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/source_change.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs45
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt105
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt437
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/tests/sourcegen_lints.rs173
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml6
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs15
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs80
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs24
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs34
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs22
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs25
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs49
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs23
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs82
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs20
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs10
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs22
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs129
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs129
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs106
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs79
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs98
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs10
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs25
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs12
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs55
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs15
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs111
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs59
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs67
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/Cargo.toml7
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/annotations.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs18
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/doc_links.rs20
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/expand_macro.rs20
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/extend_selection.rs16
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_definition.rs60
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs14
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs92
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/highlight_related.rs79
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover.rs81
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/render.rs3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/tests.rs227
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs54
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs20
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs218
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/interpret_function.rs14
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/lib.rs16
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/moniker.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/navigation_target.rs620
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/parent_module.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/references.rs129
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/rename.rs93
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/runnables.rs100
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/signature_help.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/static_index.rs12
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/status.rs10
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs25
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs20
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html14
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html48
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs47
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/typing.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/view_hir.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs18
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/view_mir.rs6
-rw-r--r--src/tools/rust-analyzer/crates/intern/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/intern/src/lib.rs14
-rw-r--r--src/tools/rust-analyzer/crates/limit/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/load-cargo/Cargo.toml8
-rw-r--r--src/tools/rust-analyzer/crates/load-cargo/src/lib.rs38
-rw-r--r--src/tools/rust-analyzer/crates/mbe/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/benchmark.rs62
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander.rs48
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs175
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs338
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/lib.rs159
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/parser.rs68
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs1022
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs15
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs4
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/token_map.rs156
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs48
-rw-r--r--src/tools/rust-analyzer/crates/parser/Cargo.toml5
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/event.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar.rs10
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs95
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/items.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/params.rs9
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/lexed_str.rs95
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/lib.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/shortcuts.rs36
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_char_literals.rast86
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_char_literals.rs44
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_strings.rast28
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_strings.rs14
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/c_strings.rast28
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/c_strings.rs14
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/char_literals.rast92
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/char_literals.rs47
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/strings.rast28
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/strings.rs14
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rast6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rast2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0023_empty_param_slot.rast41
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0023_empty_param_slot.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rast66
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_top_level_let.rast30
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_top_level_let.rs1
-rw-r--r--src/tools/rust-analyzer/crates/paths/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml10
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs41
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs121
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs196
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs31
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs4
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs12
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs46
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs85
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs49
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/server/token_stream.rs64
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs98
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs26
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-test/imp/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-test/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/profile/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/profile/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/project-model/Cargo.toml8
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs4
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs2
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/project_json.rs3
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/tests.rs56
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/workspace.rs67
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/deduplication_crate_graph_A.json140
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/deduplication_crate_graph_B.json66
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt7
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt7
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt7
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt16
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml43
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs19
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs4
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs1
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs17
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs2
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs20
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs236
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs42
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs173
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs69
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs7
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs8
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs50
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs61
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs43
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs50
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs28
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs1
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs6
-rw-r--r--src/tools/rust-analyzer/crates/rustc-dependencies/Cargo.toml20
-rw-r--r--src/tools/rust-analyzer/crates/rustc-dependencies/src/lib.rs48
-rw-r--r--src/tools/rust-analyzer/crates/sourcegen/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/sourcegen/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/stdx/Cargo.toml5
-rw-r--r--src/tools/rust-analyzer/crates/stdx/src/anymap.rs379
-rw-r--r--src/tools/rust-analyzer/crates/stdx/src/lib.rs73
-rw-r--r--src/tools/rust-analyzer/crates/stdx/src/macros.rs7
-rw-r--r--src/tools/rust-analyzer/crates/stdx/src/process.rs2
-rw-r--r--src/tools/rust-analyzer/crates/syntax/Cargo.toml14
-rw-r--r--src/tools/rust-analyzer/crates/syntax/rust.ungram2
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs191
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs93
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/make.rs42
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs22
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs8
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs3
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/lib.rs87
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs12
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ptr.rs26
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/tests.rs6
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs4
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/token_text.rs2
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/utils.rs42
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/validation.rs2
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/minicore.rs91
-rw-r--r--src/tools/rust-analyzer/crates/text-edit/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/text-edit/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/toolchain/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/tt/Cargo.toml1
-rw-r--r--src/tools/rust-analyzer/crates/tt/src/lib.rs264
-rw-r--r--src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml3
-rw-r--r--src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/vfs/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/lib.rs23
-rw-r--r--src/tools/rust-analyzer/docs/dev/guide.md6
-rw-r--r--src/tools/rust-analyzer/docs/dev/lsp-extensions.md5
-rw-r--r--src/tools/rust-analyzer/docs/user/generated_config.adoc41
-rw-r--r--src/tools/rust-analyzer/docs/user/manual.adoc22
-rw-r--r--src/tools/rust-analyzer/lib/la-arena/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/lib/line-index/Cargo.toml4
-rw-r--r--src/tools/rust-analyzer/lib/line-index/src/lib.rs5
-rw-r--r--src/tools/rust-analyzer/lib/line-index/src/tests.rs28
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/Cargo.toml7
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/src/lib.rs171
-rw-r--r--src/tools/rust-analyzer/rust-bors.toml1
-rw-r--r--src/tools/rust-analyzer/triagebot.toml7
-rw-r--r--src/tools/rust-analyzer/xtask/Cargo.toml4
-rw-r--r--src/tools/rust-analyzer/xtask/src/flags.rs3
-rw-r--r--src/tools/rust-analyzer/xtask/src/main.rs2
-rw-r--r--src/tools/rust-analyzer/xtask/src/metrics.rs17
-rw-r--r--src/tools/rustdoc-js/tester.js35
-rw-r--r--src/tools/rustfmt/Cargo.toml2
-rw-r--r--src/tools/rustfmt/src/closures.rs30
-rw-r--r--src/tools/rustfmt/src/comment.rs3
-rw-r--r--src/tools/rustfmt/src/config/file_lines.rs2
-rw-r--r--src/tools/rustfmt/src/config/mod.rs2
-rw-r--r--src/tools/rustfmt/src/emitter/checkstyle.rs1
-rw-r--r--src/tools/rustfmt/src/emitter/diff.rs2
-rw-r--r--src/tools/rustfmt/src/emitter/json.rs2
-rw-r--r--src/tools/rustfmt/src/emitter/modified_lines.rs1
-rw-r--r--src/tools/rustfmt/src/emitter/stdout.rs1
-rw-r--r--src/tools/rustfmt/src/expr.rs4
-rw-r--r--src/tools/rustfmt/src/ignore_path.rs2
-rw-r--r--src/tools/rustfmt/src/imports.rs1
-rw-r--r--src/tools/rustfmt/src/items.rs19
-rw-r--r--src/tools/rustfmt/src/macros.rs6
-rw-r--r--src/tools/rustfmt/src/matches.rs8
-rw-r--r--src/tools/rustfmt/src/pairs.rs2
-rw-r--r--src/tools/rustfmt/src/parse/macros/cfg_if.rs2
-rw-r--r--src/tools/rustfmt/src/parse/macros/lazy_static.rs6
-rw-r--r--src/tools/rustfmt/src/parse/macros/mod.rs6
-rw-r--r--src/tools/rustfmt/src/parse/session.rs24
-rw-r--r--src/tools/rustfmt/src/patterns.rs5
-rw-r--r--src/tools/rustfmt/src/reorder.rs2
-rw-r--r--src/tools/rustfmt/src/spanned.rs7
-rw-r--r--src/tools/rustfmt/src/types.rs3
-rw-r--r--src/tools/rustfmt/src/utils.rs9
-rw-r--r--src/tools/rustfmt/tests/source/issue-2927-2.rs2
-rw-r--r--src/tools/rustfmt/tests/source/issue-2927.rs2
-rw-r--r--src/tools/rustfmt/tests/target/issue-2927-2.rs2
-rw-r--r--src/tools/rustfmt/tests/target/issue-2927.rs2
-rw-r--r--src/tools/suggest-tests/src/lib.rs16
-rw-r--r--src/tools/suggest-tests/src/static_suggestions.rs31
-rw-r--r--src/tools/tidy/src/deps.rs49
-rw-r--r--src/tools/tidy/src/main.rs1
-rw-r--r--src/tools/tidy/src/ui_tests.rs2
1675 files changed, 62066 insertions, 26890 deletions
diff --git a/src/tools/build-manifest/src/main.rs b/src/tools/build-manifest/src/main.rs
index aed6796fa..9ac97236f 100644
--- a/src/tools/build-manifest/src/main.rs
+++ b/src/tools/build-manifest/src/main.rs
@@ -50,7 +50,9 @@ static HOSTS: &[&str] = &[
static TARGETS: &[&str] = &[
"aarch64-apple-darwin",
+ "arm64e-apple-darwin",
"aarch64-apple-ios",
+ "arm64e-apple-ios",
"aarch64-apple-ios-sim",
"aarch64-unknown-fuchsia",
"aarch64-linux-android",
@@ -82,7 +84,6 @@ static TARGETS: &[&str] = &[
"armv7r-none-eabi",
"armv7r-none-eabihf",
"armv7s-apple-ios",
- "asmjs-unknown-emscripten",
"bpfeb-unknown-none",
"bpfel-unknown-none",
"i386-apple-ios",
@@ -123,6 +124,7 @@ static TARGETS: &[&str] = &[
"riscv32im-unknown-none-elf",
"riscv32imc-unknown-none-elf",
"riscv32imac-unknown-none-elf",
+ "riscv32imafc-unknown-none-elf",
"riscv32gc-unknown-linux-gnu",
"riscv64imac-unknown-none-elf",
"riscv64gc-unknown-hermit",
@@ -150,7 +152,6 @@ static TARGETS: &[&str] = &[
"x86_64-linux-android",
"x86_64-pc-windows-gnu",
"x86_64-pc-windows-msvc",
- "x86_64-sun-solaris",
"x86_64-pc-solaris",
"x86_64-unikraft-linux-musl",
"x86_64-unknown-freebsd",
diff --git a/src/tools/bump-stage0/src/main.rs b/src/tools/bump-stage0/src/main.rs
index b007f9a22..bd97b4eaa 100644
--- a/src/tools/bump-stage0/src/main.rs
+++ b/src/tools/bump-stage0/src/main.rs
@@ -4,7 +4,7 @@ use indexmap::IndexMap;
use std::collections::HashMap;
const PATH: &str = "src/stage0.json";
-const COMPILER_COMPONENTS: &[&str] = &["rustc", "rust-std", "cargo"];
+const COMPILER_COMPONENTS: &[&str] = &["rustc", "rust-std", "cargo", "clippy-preview"];
const RUSTFMT_COMPONENTS: &[&str] = &["rustfmt-preview", "rustc"];
struct Tool {
diff --git a/src/tools/cargo/.github/PULL_REQUEST_TEMPLATE.md b/src/tools/cargo/.github/PULL_REQUEST_TEMPLATE.md
index 141ac5ed4..f8ff10d15 100644
--- a/src/tools/cargo/.github/PULL_REQUEST_TEMPLATE.md
+++ b/src/tools/cargo/.github/PULL_REQUEST_TEMPLATE.md
@@ -1,13 +1,8 @@
<!-- homu-ignore:start -->
<!--
-NOTICE: Due to limited review capacity, the Cargo team is not accepting new
-features or major changes at this time. Please consult with the team before
-opening a new PR. Only issues that have been explicitly marked as accepted
-will be reviewed.
-
Thanks for submitting a pull request 🎉! Here are some tips for you:
-* If this is your first contribution, read "Cargo Contribution Guide":
+* If this is your first contribution, read "Cargo Contribution Guide" first:
https://doc.crates.io/contrib/
* Run `cargo fmt --all` to format your code changes.
* Small commits and pull requests are always preferable and easy to review.
diff --git a/src/tools/cargo/.github/renovate.json5 b/src/tools/cargo/.github/renovate.json5
index 03e6d8da8..ca9c2a813 100644
--- a/src/tools/cargo/.github/renovate.json5
+++ b/src/tools/cargo/.github/renovate.json5
@@ -6,7 +6,7 @@
configMigration: true,
dependencyDashboard: false,
ignorePaths: [
- "**/tests/**",
+ '**/tests/**',
],
customManagers: [
{
@@ -40,7 +40,7 @@
{
commitMessageTopic: 'MSRV (1 version)',
matchManagers: [
- 'regex',
+ 'custom.regex',
],
matchPackageNames: [
'MSRV:1',
@@ -53,12 +53,12 @@
{
commitMessageTopic: 'MSRV (3 versions)',
matchManagers: [
- 'regex',
+ 'custom.regex',
],
matchPackageNames: [
'MSRV:3',
],
- "extractVersion": "^(?<version>\\d+\\.\\d+)", // Drop the patch version
+ extractVersion: '^(?<version>\\d+\\.\\d+)', // Drop the patch version
schedule: [
'* * * * *',
],
@@ -66,6 +66,16 @@
internalChecksFilter: 'strict',
groupName: 'msrv',
},
+ {
+ matchManagers: [
+ 'cargo',
+ ],
+ matchPackageNames: [
+ 'gix-features-for-configuration-only',
+ 'gix',
+ ],
+ groupName: 'gix',
+ },
// Goals:
// - Rollup safe upgrades to reduce CI runner load
// - Have lockfile and manifest in-sync (implicit rules)
diff --git a/src/tools/cargo/.github/workflows/main.yml b/src/tools/cargo/.github/workflows/main.yml
index 7b8055223..a49ac0988 100644
--- a/src/tools/cargo/.github/workflows/main.yml
+++ b/src/tools/cargo/.github/workflows/main.yml
@@ -66,9 +66,7 @@ jobs:
- uses: actions/checkout@v4
- run: rustup update stable && rustup default stable
- run: rustup component add clippy
- # Only check cargo lib for now
- # TODO: check every members
- - run: cargo clippy -p cargo --lib --no-deps -- -D warnings
+ - run: cargo clippy --workspace --all-targets --no-deps -- -D warnings
stale-label:
runs-on: ubuntu-latest
@@ -110,8 +108,6 @@ jobs:
CARGO_PROFILE_TEST_DEBUG: 1
CARGO_INCREMENTAL: 0
CARGO_PUBLIC_NETWORK_TESTS: 1
- # Deny warnings on CI to avoid warnings getting into the codebase.
- RUSTFLAGS: -D warnings
strategy:
matrix:
include:
@@ -152,7 +148,7 @@ jobs:
- run: rustup target add ${{ matrix.other }}
- run: rustup component add rustc-dev llvm-tools-preview rust-docs
if: startsWith(matrix.rust, 'nightly')
- - run: sudo apt update -y && sudo apt install gcc-multilib libsecret-1-0 libsecret-1-dev -y
+ - run: sudo apt update -y && sudo apt install lldb gcc-multilib libsecret-1-0 libsecret-1-dev -y
if: matrix.os == 'ubuntu-latest'
- run: rustup component add rustfmt || echo "rustfmt not available"
- name: Configure extra test environment
diff --git a/src/tools/cargo/CHANGELOG.md b/src/tools/cargo/CHANGELOG.md
index 08be017a1..eb0e46d0b 100644
--- a/src/tools/cargo/CHANGELOG.md
+++ b/src/tools/cargo/CHANGELOG.md
@@ -1,23 +1,156 @@
# Changelog
+## Cargo 1.76 (2024-02-08)
+[6790a512...HEAD](https://github.com/rust-lang/cargo/compare/6790a512...HEAD)
+
+### Added
+
+### Changed
+
+### Fixed
+
+### Nightly only
+
+- 🔥 The `-Zgc` flag enables garbage collection for deleting old, unused files
+ in cargo's cache. That is, downloaded source files and registry index under
+ the `CARGO_HOME` directory.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#gc)
+ [#12634](https://github.com/rust-lang/cargo/pull/12634)
+ [#12958](https://github.com/rust-lang/cargo/pull/12958)
+
+### Documentation
+
+### Internal
+
+- Refactored `Cargo.toml` parsing code in preparation of extracting an offical schema API.
+ [#12954](https://github.com/rust-lang/cargo/pull/12954)
+ [#12960](https://github.com/rust-lang/cargo/pull/12960)
+ [#12961](https://github.com/rust-lang/cargo/pull/12961)
+- Fixed `--quiet` being used with nested subcommands.
+ [#12959](https://github.com/rust-lang/cargo/pull/12959)
+
## Cargo 1.75 (2023-12-28)
-[59596f0f...HEAD](https://github.com/rust-lang/cargo/compare/59596f0f...HEAD)
+[59596f0f...rust-1.75.0](https://github.com/rust-lang/cargo/compare/59596f0f...rust-1.75.0)
### Added
+- `package.version` field in `Cargo.toml` is now optional and defaults to `0.0.0`.
+ Packages without the `package.version` field cannot be published.
+ [#12786](https://github.com/rust-lang/cargo/pull/12786)
+- Links in `--timings` and `cargo doc` outputs are clickable on supported terminals,
+ controllable through `term.hyperlinks` config value.
+ [#12889](https://github.com/rust-lang/cargo/pull/12889)
+- Print environment variables for build script executions with `-vv`.
+ [#12829](https://github.com/rust-lang/cargo/pull/12829)
+- cargo-new: add new packages to [workspace.members] automatically.
+ [#12779](https://github.com/rust-lang/cargo/pull/12779)
+- cargo-doc: print a new `Generated` status displaying the full path.
+ [#12859](https://github.com/rust-lang/cargo/pull/12859)
+
### Changed
+- cargo-new: warn if crate name doesn't follow snake_case or kebab-case.
+ [#12766](https://github.com/rust-lang/cargo/pull/12766)
+- cargo-install: clarify the arg `<crate>` to install is positional.
+ [#12841](https://github.com/rust-lang/cargo/pull/12841)
+- cargo-install: Suggest an alternative version on MSRV failure.
+ [#12798](https://github.com/rust-lang/cargo/pull/12798)
+- cargo-install: reports more detailed SemVer errors.
+ [#12924](https://github.com/rust-lang/cargo/pull/12924)
+- cargo-install: install only once if there are crates duplicated.
+ [#12868](https://github.com/rust-lang/cargo/pull/12868)
+- cargo-remove: Clarify flag behavior of different dependency kinds.
+ [#12823](https://github.com/rust-lang/cargo/pull/12823)
+- cargo-remove: suggest the dependency to remove exists only in the other section.
+ [#12865](https://github.com/rust-lang/cargo/pull/12865)
+- cargo-update: Do not call it "Downgrading" when difference is only build metadata.
+ [#12796](https://github.com/rust-lang/cargo/pull/12796)
+- Enhanced help text to clarify `--test` flag is for Cargo targets, not test functions.
+ [#12915](https://github.com/rust-lang/cargo/pull/12915)
+- Included package name/version in build script warnings.
+ [#12799](https://github.com/rust-lang/cargo/pull/12799)
+- Provide next steps for bad -Z flag.
+ [#12857](https://github.com/rust-lang/cargo/pull/12857)
+- Suggest `cargo search` when `cargo-<command>` cannot be found.
+ [#12840](https://github.com/rust-lang/cargo/pull/12840)
+- Do not allow empty feature name.
+ [#12928](https://github.com/rust-lang/cargo/pull/12928)
+- Added unsupported short flag suggestion for `--target` and `--exclude` flags.
+ [#12805](https://github.com/rust-lang/cargo/pull/12805)
+- Added unsupported short flag suggestion for `--out-dir` flag.
+ [#12755](https://github.com/rust-lang/cargo/pull/12755)
+- Added unsupported lowercase `-z` flag suggestion for `-Z` flag.
+ [#12788](https://github.com/rust-lang/cargo/pull/12788)
+- Added better suggestion for unsupported `--path` flag.
+ [#12811](https://github.com/rust-lang/cargo/pull/12811)
+- Added detailed message when target directory path is invalid.
+ [#12820](https://github.com/rust-lang/cargo/pull/12820)
+
### Fixed
- Fixed corruption when cargo was killed while writing to files.
[#12744](https://github.com/rust-lang/cargo/pull/12744)
+- cargo-add: Preserve more comments
+ [#12838](https://github.com/rust-lang/cargo/pull/12838)
+- cargo-fix: preserve jobserver file descriptors on rustc invocation.
+ [#12951](https://github.com/rust-lang/cargo/pull/12951)
+- cargo-remove: Preserve feature comments
+ [#12837](https://github.com/rust-lang/cargo/pull/12837)
+- Removed unnecessary backslash in timings HTML report when error happens.
+ [#12934](https://github.com/rust-lang/cargo/pull/12934)
+- Fixed error message that invalid a feature name can contain `-`.
+ [#12939](https://github.com/rust-lang/cargo/pull/12939)
+- When there's a version of a dependency in the lockfile,
+ Cargo would use that "exact" version, including the build metadata.
+ [#12772](https://github.com/rust-lang/cargo/pull/12772)
### Nightly only
+- Added `Edition2024` unstable feature.
+ [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#edition-2024)
+ [#12771](https://github.com/rust-lang/cargo/pull/12771)
+- 🔥 The `-Ztrim-paths` feature adds a new profile setting to control how paths
+ are sanitized in the resulting binary.
+ ([RFC 3127](https://github.com/rust-lang/rfcs/blob/master/text/3127-trim-paths.md))
+ ([docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#profile-trim-paths-option))
+ [#12625](https://github.com/rust-lang/cargo/pull/12625)
+ [#12900](https://github.com/rust-lang/cargo/pull/12900)
+ [#12908](https://github.com/rust-lang/cargo/pull/12908)
+- `-Zcheck-cfg`: Adjusted for new rustc syntax and behavior.
+ [#12845](https://github.com/rust-lang/cargo/pull/12845)
+- `-Zcheck-cfg`: Remove outdated option to `-Zcheck-cfg` warnings.
+ [#12884](https://github.com/rust-lang/cargo/pull/12884)
+- `public-dependency`: Support `public` dependency configuration with workspace deps.
+ [#12817](https://github.com/rust-lang/cargo/pull/12817)
+
### Documentation
- profile: add missing `strip` info.
[#12754](https://github.com/rust-lang/cargo/pull/12754)
+- features: a note about the new limit on number of features.
+ [#12913](https://github.com/rust-lang/cargo/pull/12913)
+- crates-io: Add doc comment for `NewCrate` struct.
+ [#12782](https://github.com/rust-lang/cargo/pull/12782)
+- resolver: Highlight commands to answer dep resolution questions.
+ [#12903](https://github.com/rust-lang/cargo/pull/12903)
+- cargo-bench: `--bench` is passed in unconditionally to bench harnesses.
+ [#12850](https://github.com/rust-lang/cargo/pull/12850)
+- cargo-login: mention args after `--` in manpage.
+ [#12832](https://github.com/rust-lang/cargo/pull/12832)
+- cargo-vendor: clarify config to use vendored source is printed to stdout
+ [#12893](https://github.com/rust-lang/cargo/pull/12893)
+- manifest: update to SPDX 2.3 license expression and 3.20 license list.
+ [#12827](https://github.com/rust-lang/cargo/pull/12827)
+- contrib: Policy on manifest editing
+ [#12836](https://github.com/rust-lang/cargo/pull/12836)
+- contrib: use `AND` search terms in mdbook search and fixed broken links.
+ [#12812](https://github.com/rust-lang/cargo/pull/12812)
+ [#12813](https://github.com/rust-lang/cargo/pull/12813)
+ [#12814](https://github.com/rust-lang/cargo/pull/12814)
+- contrib: Describe how to add a new package
+ [#12878](https://github.com/rust-lang/cargo/pull/12878)
+- contrib: Removed review capacity notice.
+ [#12842](https://github.com/rust-lang/cargo/pull/12842)
### Internal
@@ -25,8 +158,61 @@
[#12759](https://github.com/rust-lang/cargo/pull/12759)
- Updated to `cargo_metadata` 0.18.0.
[#12758](https://github.com/rust-lang/cargo/pull/12758)
+- Updated to `curl-sys` 0.4.68, which corresponds to curl 8.4.0.
+ [#12808](https://github.com/rust-lang/cargo/pull/12808)
+- Updated to `toml` 0.8.2.
+ [#12760](https://github.com/rust-lang/cargo/pull/12760)
+- Updated to `toml_edit` 0.20.2.
+ [#12761](https://github.com/rust-lang/cargo/pull/12761)
+- Updated to `gix` to 0.55.2
+ [#12906](https://github.com/rust-lang/cargo/pull/12906)
- Disabled the `custom_target::custom_bin_target` test on windows-gnu.
[#12763](https://github.com/rust-lang/cargo/pull/12763)
+- Refactored `Cargo.toml` parsing code in preparation of extracting an official
+ schema API.
+ [#12768](https://github.com/rust-lang/cargo/pull/12768)
+ [#12881](https://github.com/rust-lang/cargo/pull/12881)
+ [#12902](https://github.com/rust-lang/cargo/pull/12902)
+ [#12911](https://github.com/rust-lang/cargo/pull/12911)
+ [#12948](https://github.com/rust-lang/cargo/pull/12948)
+- Split out SemVer logic to its own module.
+ [#12926](https://github.com/rust-lang/cargo/pull/12926)
+ [#12940](https://github.com/rust-lang/cargo/pull/12940)
+- source: Prepare for new `PackageIDSpec` syntax
+ [#12938](https://github.com/rust-lang/cargo/pull/12938)
+- resolver: Consolidate logic in `VersionPreferences`
+ [#12930](https://github.com/rust-lang/cargo/pull/12930)
+- Make the `SourceId::precise` field an Enum.
+ [#12849](https://github.com/rust-lang/cargo/pull/12849)
+- shell: Write at once rather than in fragments.
+ [#12880](https://github.com/rust-lang/cargo/pull/12880)
+- Move up looking at index summary enum
+ [#12749](https://github.com/rust-lang/cargo/pull/12749)
+ [#12923](https://github.com/rust-lang/cargo/pull/12923)
+- Generate redirection HTML pages in CI for Cargo Contributor Guide.
+ [#12846](https://github.com/rust-lang/cargo/pull/12846)
+- Add new package cache lock modes.
+ [#12706](https://github.com/rust-lang/cargo/pull/12706)
+- Add regression test for issue 6915: features and transitive dev deps.
+ [#12907](https://github.com/rust-lang/cargo/pull/12907)
+- Auto-labeling when PR review state changes.
+ [#12856](https://github.com/rust-lang/cargo/pull/12856)
+- credential: include license files in all published crates.
+ [#12953](https://github.com/rust-lang/cargo/pull/12953)
+- credential: Filter `cargo-credential-*` dependencies by OS.
+ [#12949](https://github.com/rust-lang/cargo/pull/12949)
+- ci: bump cargo-semver-checks to 0.24.0
+ [#12795](https://github.com/rust-lang/cargo/pull/12795)
+- ci: set and verify all MSRVs for Cargo's crates automatically.
+ [#12767](https://github.com/rust-lang/cargo/pull/12767)
+ [#12654](https://github.com/rust-lang/cargo/pull/12654)
+- ci: use separate concurrency group for publishing Cargo Contributor Book.
+ [#12834](https://github.com/rust-lang/cargo/pull/12834)
+ [#12835](https://github.com/rust-lang/cargo/pull/12835)
+- ci: update `actions/checkout` action to v4
+ [#12762](https://github.com/rust-lang/cargo/pull/12762)
+- cargo-search: improved the margin calculation for the output.
+ [#12890](https://github.com/rust-lang/cargo/pull/12890)
## Cargo 1.74 (2023-11-16)
[80eca0e5...rust-1.74.0](https://github.com/rust-lang/cargo/compare/80eca0e5...rust-1.74.0)
@@ -70,6 +256,7 @@
- Allowed incomplete versions when they are unambiguous for flags like `--package`.
[#12591](https://github.com/rust-lang/cargo/pull/12591)
[#12614](https://github.com/rust-lang/cargo/pull/12614)
+ [#12806](https://github.com/rust-lang/cargo/pull/12806)
### Changed
diff --git a/src/tools/cargo/Cargo.lock b/src/tools/cargo/Cargo.lock
index a2d339b0c..36cf8804a 100644
--- a/src/tools/cargo/Cargo.lock
+++ b/src/tools/cargo/Cargo.lock
@@ -9,15 +9,33 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
+name = "ahash"
+version = "0.8.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a"
+dependencies = [
+ "cfg-if",
+ "once_cell",
+ "version_check",
+ "zerocopy",
+]
+
+[[package]]
name = "aho-corasick"
-version = "1.0.2"
+version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41"
+checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"
dependencies = [
"memchr",
]
[[package]]
+name = "allocator-api2"
+version = "0.2.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5"
+
+[[package]]
name = "anes"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -25,9 +43,9 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299"
[[package]]
name = "anstream"
-version = "0.6.4"
+version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2ab91ebe16eb252986481c5b62f6098f3b698a45e34b5b98200cf20dd2484a44"
+checksum = "d664a92ecae85fd0a7392615844904654d1d5f5514837f471ddef4a057aba1b6"
dependencies = [
"anstyle",
"anstyle-parse",
@@ -45,30 +63,30 @@ checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87"
[[package]]
name = "anstyle-parse"
-version = "0.2.0"
+version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e765fd216e48e067936442276d1d57399e37bce53c264d6fefbe298080cb57ee"
+checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c"
dependencies = [
"utf8parse",
]
[[package]]
name = "anstyle-query"
-version = "1.0.0"
+version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b"
+checksum = "a3a318f1f38d2418400f8209655bfd825785afd25aa30bb7ba6cc792e4596748"
dependencies = [
- "windows-sys",
+ "windows-sys 0.52.0",
]
[[package]]
name = "anstyle-wincon"
-version = "3.0.1"
+version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f0699d10d2f4d628a98ee7b57b289abbc98ff3bad977cb3152709d4bf2330628"
+checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7"
dependencies = [
"anstyle",
- "windows-sys",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -112,8 +130,10 @@ name = "benchsuite"
version = "0.0.0"
dependencies = [
"cargo",
+ "cargo-util",
"criterion",
"flate2",
+ "rand",
"tar",
"url",
]
@@ -141,9 +161,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
-version = "2.4.0"
+version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635"
+checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07"
[[package]]
name = "bitmaps"
@@ -165,12 +185,12 @@ dependencies = [
[[package]]
name = "bstr"
-version = "1.6.2"
+version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c2f7349907b712260e64b0afe2f84692af14a454be26187d9df565c7f69266a"
+checksum = "542f33a8835a0884b006a0c3df3dadd99c0c3f296ed26c2fdc8028e01ad6230c"
dependencies = [
"memchr",
- "regex-automata 0.3.8",
+ "regex-automata 0.4.3",
"serde",
]
@@ -185,15 +205,15 @@ dependencies = [
[[package]]
name = "bumpalo"
-version = "3.13.0"
+version = "3.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1"
+checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec"
[[package]]
name = "bytes"
-version = "1.4.0"
+version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be"
+checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223"
[[package]]
name = "bytesize"
@@ -203,9 +223,9 @@ checksum = "a3e368af43e418a04d52505cf3dbc23dda4e3407ae2fa99fd0e4f308ce546acc"
[[package]]
name = "camino"
-version = "1.1.4"
+version = "1.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c530edf18f37068ac2d977409ed5cd50d53d73bc653c7647b48eb78976ac9ae2"
+checksum = "c59e92b5a388f549b863a7bea62612c09f24c8393560709a54558a9abdfb3b9c"
dependencies = [
"serde",
]
@@ -222,7 +242,7 @@ dependencies = [
[[package]]
name = "cargo"
-version = "0.76.0"
+version = "0.77.0"
dependencies = [
"anstream",
"anstyle",
@@ -233,7 +253,7 @@ dependencies = [
"cargo-credential-libsecret",
"cargo-credential-macos-keychain",
"cargo-credential-wincred",
- "cargo-platform 0.1.6",
+ "cargo-platform 0.1.7",
"cargo-test-macro",
"cargo-test-support",
"cargo-util",
@@ -257,7 +277,7 @@ dependencies = [
"ignore",
"im-rc",
"indexmap",
- "itertools 0.11.0",
+ "itertools 0.12.0",
"jobserver",
"lazycell",
"libc",
@@ -270,6 +290,8 @@ dependencies = [
"pathdiff",
"pulldown-cmark",
"rand",
+ "regex",
+ "rusqlite",
"rustfix",
"same-file",
"semver",
@@ -282,7 +304,7 @@ dependencies = [
"shell-escape",
"snapbox",
"supports-hyperlinks",
- "syn 2.0.38",
+ "syn 2.0.39",
"tar",
"tempfile",
"time",
@@ -295,12 +317,12 @@ dependencies = [
"unicode-xid",
"url",
"walkdir",
- "windows-sys",
+ "windows-sys 0.52.0",
]
[[package]]
name = "cargo-credential"
-version = "0.4.1"
+version = "0.4.2"
dependencies = [
"anyhow",
"libc",
@@ -309,7 +331,7 @@ dependencies = [
"snapbox",
"thiserror",
"time",
- "windows-sys",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -323,7 +345,7 @@ dependencies = [
[[package]]
name = "cargo-credential-libsecret"
-version = "0.4.1"
+version = "0.4.2"
dependencies = [
"anyhow",
"cargo-credential",
@@ -332,7 +354,7 @@ dependencies = [
[[package]]
name = "cargo-credential-macos-keychain"
-version = "0.4.1"
+version = "0.4.2"
dependencies = [
"cargo-credential",
"security-framework",
@@ -340,24 +362,24 @@ dependencies = [
[[package]]
name = "cargo-credential-wincred"
-version = "0.4.1"
+version = "0.4.2"
dependencies = [
"cargo-credential",
- "windows-sys",
+ "windows-sys 0.52.0",
]
[[package]]
name = "cargo-platform"
-version = "0.1.2"
+version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cbdb825da8a5df079a43676dbe042702f1707b1109f713a01420fbb4cc71fa27"
+checksum = "e34637b3140142bdf929fb439e8aa4ebad7651ebf7b1080b3930aa16ac1459ff"
dependencies = [
"serde",
]
[[package]]
name = "cargo-platform"
-version = "0.1.6"
+version = "0.1.7"
dependencies = [
"serde",
]
@@ -380,7 +402,7 @@ dependencies = [
"flate2",
"git2",
"glob",
- "itertools 0.11.0",
+ "itertools 0.12.0",
"pasetors",
"serde",
"serde_json",
@@ -389,17 +411,19 @@ dependencies = [
"time",
"toml",
"url",
- "windows-sys",
+ "walkdir",
+ "windows-sys 0.52.0",
]
[[package]]
name = "cargo-util"
-version = "0.2.8"
+version = "0.2.9"
dependencies = [
"anyhow",
"core-foundation",
"filetime",
"hex",
+ "ignore",
"jobserver",
"libc",
"miow",
@@ -409,7 +433,7 @@ dependencies = [
"tempfile",
"tracing",
"walkdir",
- "windows-sys",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -419,7 +443,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d886547e41f740c616ae73108f6eb70afe6d940c7bc697cb30f13daec073037"
dependencies = [
"camino",
- "cargo-platform 0.1.2",
+ "cargo-platform 0.1.5",
"semver",
"serde",
"serde_json",
@@ -434,11 +458,12 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
[[package]]
name = "cc"
-version = "1.0.79"
+version = "1.0.83"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f"
+checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0"
dependencies = [
"jobserver",
+ "libc",
]
[[package]]
@@ -476,18 +501,18 @@ dependencies = [
[[package]]
name = "clap"
-version = "4.4.7"
+version = "4.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ac495e00dcec98c83465d5ad66c5c4fabd652fd6686e7c6269b117e729a6f17b"
+checksum = "bfaff671f6b22ca62406885ece523383b9b64022e341e53e009a62ebc47a45f2"
dependencies = [
"clap_builder",
]
[[package]]
name = "clap_builder"
-version = "4.4.7"
+version = "4.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c77ed9a32a62e6ca27175d00d29d05ca32e396ea1eb5fb01d8256b669cec7663"
+checksum = "a216b506622bb1d316cd51328dce24e07bdff4a6128a47c7e7fad11878d5adbb"
dependencies = [
"anstream",
"anstyle",
@@ -537,9 +562,9 @@ checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
[[package]]
name = "const-oid"
-version = "0.9.2"
+version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "520fbf3c07483f94e3e3ca9d0cfd913d7718ef2483d2cfd91c0d9e91474ab913"
+checksum = "28c122c3980598d243d63d9a704629a2d748d101f278052ff068be5a4423ab6f"
[[package]]
name = "content_inspector"
@@ -552,9 +577,9 @@ dependencies = [
[[package]]
name = "core-foundation"
-version = "0.9.3"
+version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146"
+checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f"
dependencies = [
"core-foundation-sys",
"libc",
@@ -562,22 +587,22 @@ dependencies = [
[[package]]
name = "core-foundation-sys"
-version = "0.8.4"
+version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa"
+checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f"
[[package]]
name = "cpufeatures"
-version = "0.2.7"
+version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3e4c1eaa2012c47becbbad2ab175484c2a84d1185b566fb2cc5b8707343dfe58"
+checksum = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0"
dependencies = [
"libc",
]
[[package]]
name = "crates-io"
-version = "0.39.1"
+version = "0.39.2"
dependencies = [
"curl",
"percent-encoding",
@@ -677,9 +702,9 @@ dependencies = [
[[package]]
name = "crypto-bigint"
-version = "0.5.2"
+version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cf4c2f4e1afd912bc40bfd6fed5d9dc1f288e0ba01bfcc835cc5bc3eb13efe15"
+checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76"
dependencies = [
"generic-array",
"rand_core",
@@ -720,9 +745,9 @@ dependencies = [
[[package]]
name = "curl-sys"
-version = "0.4.68+curl-8.4.0"
+version = "0.4.70+curl-8.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b4a0d18d88360e374b16b2273c832b5e57258ffc1d4aa4f96b108e0738d5752f"
+checksum = "3c0333d8849afe78a4c8102a429a446bfdd055832af071945520e835ae2d841e"
dependencies = [
"cc",
"libc",
@@ -731,14 +756,14 @@ dependencies = [
"openssl-sys",
"pkg-config",
"vcpkg",
- "windows-sys",
+ "windows-sys 0.48.0",
]
[[package]]
name = "der"
-version = "0.7.6"
+version = "0.7.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "56acb310e15652100da43d130af8d97b509e95af61aab1c5a7939ef24337ee17"
+checksum = "fffa369a668c8af7dbf8b5e56c9f744fbd399949ed171606040001947de40b1c"
dependencies = [
"const-oid",
"pem-rfc7468",
@@ -747,20 +772,15 @@ dependencies = [
[[package]]
name = "deranged"
-version = "0.3.8"
+version = "0.3.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f2696e8a945f658fd14dc3b87242e6b80cd0f36ff04ea560fa39082368847946"
+checksum = "8eb30d70a07a3b04884d2677f06bec33509dc67ca60d92949e5535352d3191dc"
dependencies = [
+ "powerfmt",
"serde",
]
[[package]]
-name = "diff"
-version = "0.1.13"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
-
-[[package]]
name = "digest"
version = "0.10.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -780,9 +800,9 @@ checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b"
[[package]]
name = "ecdsa"
-version = "0.16.7"
+version = "0.16.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0997c976637b606099b9985693efa3581e84e41f5c11ba5255f88711058ad428"
+checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca"
dependencies = [
"der",
"digest",
@@ -803,15 +823,15 @@ dependencies = [
[[package]]
name = "either"
-version = "1.8.1"
+version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91"
+checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
[[package]]
name = "elliptic-curve"
-version = "0.13.5"
+version = "0.13.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "968405c8fdc9b3bf4df0a6638858cc0b52462836ab6b1c87377785dd09cf1c0b"
+checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47"
dependencies = [
"base16ct",
"crypto-bigint",
@@ -854,23 +874,12 @@ dependencies = [
[[package]]
name = "errno"
-version = "0.3.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a"
-dependencies = [
- "errno-dragonfly",
- "libc",
- "windows-sys",
-]
-
-[[package]]
-name = "errno-dragonfly"
-version = "0.1.2"
+version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf"
+checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245"
dependencies = [
- "cc",
"libc",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -886,6 +895,18 @@ dependencies = [
]
[[package]]
+name = "fallible-iterator"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649"
+
+[[package]]
+name = "fallible-streaming-iterator"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a"
+
+[[package]]
name = "faster-hex"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -895,10 +916,19 @@ dependencies = [
]
[[package]]
+name = "faster-hex"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a2a2b11eda1d40935b26cf18f6833c526845ae8c41e58d09af6adeb6f0269183"
+dependencies = [
+ "serde",
+]
+
+[[package]]
name = "fastrand"
-version = "2.0.0"
+version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764"
+checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5"
[[package]]
name = "ff"
@@ -912,20 +942,20 @@ dependencies = [
[[package]]
name = "fiat-crypto"
-version = "0.1.20"
+version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e825f6987101665dea6ec934c09ec6d721de7bc1bf92248e1d5810c8cd636b77"
+checksum = "27573eac26f4dd11e2b1916c3fe1baa56407c83c71a773a8ba17ec0bca03b6b7"
[[package]]
name = "filetime"
-version = "0.2.22"
+version = "0.2.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0"
+checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd"
dependencies = [
"cfg-if",
"libc",
- "redox_syscall 0.3.5",
- "windows-sys",
+ "redox_syscall",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -962,9 +992,9 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
[[package]]
name = "form_urlencoded"
-version = "1.2.0"
+version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652"
+checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456"
dependencies = [
"percent-encoding",
]
@@ -982,9 +1012,9 @@ dependencies = [
[[package]]
name = "getrandom"
-version = "0.2.10"
+version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427"
+checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f"
dependencies = [
"cfg-if",
"js-sys",
@@ -999,7 +1029,7 @@ version = "0.18.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fbf97ba92db08df386e10c8ede66a2a0369bd277090afd8710e19e38de9ec0cd"
dependencies = [
- "bitflags 2.4.0",
+ "bitflags 2.4.1",
"libc",
"libgit2-sys",
"log",
@@ -1022,19 +1052,20 @@ dependencies = [
[[package]]
name = "gix"
-version = "0.55.2"
+version = "0.56.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "002667cd1ebb789313d0d0afe3d23b2821cf3b0e91605095f0e6d8751f0ceeea"
+checksum = "5b0dcdc9c60d66535897fa40a7ea2a635e72f99456b1d9ae86b7e170e80618cb"
dependencies = [
"gix-actor",
"gix-attributes",
+ "gix-command",
"gix-commitgraph",
"gix-config",
"gix-credentials",
"gix-date",
"gix-diff",
"gix-discover",
- "gix-features 0.36.0",
+ "gix-features 0.36.1",
"gix-filter",
"gix-fs",
"gix-glob",
@@ -1076,23 +1107,23 @@ dependencies = [
[[package]]
name = "gix-actor"
-version = "0.28.0"
+version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "948a5f9e43559d16faf583694f1c742eb401ce24ce8e6f2238caedea7486433c"
+checksum = "2eadca029ef716b4378f7afb19f7ee101fde9e58ba1f1445971315ac866db417"
dependencies = [
"bstr",
"btoi",
"gix-date",
- "itoa 1.0.6",
+ "itoa 1.0.9",
"thiserror",
"winnow",
]
[[package]]
name = "gix-attributes"
-version = "0.20.0"
+version = "0.20.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dca120f0c6562d2d7cae467f2466e576d9f7f189beec2af2e026145107c729e2"
+checksum = "0f395469d38c76ec47cd1a6c5a53fbc3f13f737b96eaf7535f4e6b367e643381"
dependencies = [
"bstr",
"gix-glob",
@@ -1107,40 +1138,43 @@ dependencies = [
[[package]]
name = "gix-bitmap"
-version = "0.2.7"
+version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0ccab4bc576844ddb51b78d81b4a42d73e6229660fa614dfc3d3999c874d1959"
+checksum = "d49e1a13a30d3f88be4bceae184dd13a2d3fb9ffa7515f7ed7ae771b857f4916"
dependencies = [
"thiserror",
]
[[package]]
name = "gix-chunk"
-version = "0.4.4"
+version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5b42ea64420f7994000130328f3c7a2038f639120518870436d31b8bde704493"
+checksum = "d411ecd9b558b0c20b3252b7e409eec48eabc41d18324954fe526bac6e2db55f"
dependencies = [
"thiserror",
]
[[package]]
name = "gix-command"
-version = "0.2.10"
+version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3c576cfbf577f72c097b5f88aedea502cd62952bdc1fb3adcab4531d5525a4c7"
+checksum = "b3b54c1d8d63e6ef2adbd9b94d6e49ff168858510b44d3811cdd02dfacc4f0c9"
dependencies = [
"bstr",
+ "gix-path",
+ "gix-trace",
+ "shell-words",
]
[[package]]
name = "gix-commitgraph"
-version = "0.22.0"
+version = "0.22.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7e8bc78b1a6328fa6d8b3a53b6c73997af37fd6bfc1d6c49f149e63bda5cbb36"
+checksum = "85a7007ba021f059803afaf6f8a48872422abc20550ac12ede6ddea2936cec36"
dependencies = [
"bstr",
"gix-chunk",
- "gix-features 0.36.0",
+ "gix-features 0.36.1",
"gix-hash",
"memmap2",
"thiserror",
@@ -1148,13 +1182,13 @@ dependencies = [
[[package]]
name = "gix-config"
-version = "0.31.0"
+version = "0.32.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5cae98c6b4c66c09379bc35274b172587d6b0ac369a416c39128ad8c6454f9bb"
+checksum = "0341471d55d8676e98b88e121d7065dfa4c9c5acea4b6d6ecdd2846e85cce0c3"
dependencies = [
"bstr",
"gix-config-value",
- "gix-features 0.36.0",
+ "gix-features 0.36.1",
"gix-glob",
"gix-path",
"gix-ref",
@@ -1169,11 +1203,11 @@ dependencies = [
[[package]]
name = "gix-config-value"
-version = "0.14.0"
+version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ea7505b97f4d8e7933e29735a568ba2f86d8de466669d9f0e8321384f9972f47"
+checksum = "6419db582ea84dfb58c7e7b0af7fd62c808aa14954af2936a33f89b0f4ed018e"
dependencies = [
- "bitflags 2.4.0",
+ "bitflags 2.4.1",
"bstr",
"gix-path",
"libc",
@@ -1182,9 +1216,9 @@ dependencies = [
[[package]]
name = "gix-credentials"
-version = "0.21.0"
+version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1c5c5d74069b842a1861e581027ac6b7ad9ff66f5911c89b9f45484d7ebda6a4"
+checksum = "513dac42450b27946bd0a0535a3a5a88e473d6522e5e3439a129cab779c88f3d"
dependencies = [
"bstr",
"gix-command",
@@ -1192,28 +1226,30 @@ dependencies = [
"gix-path",
"gix-prompt",
"gix-sec",
+ "gix-trace",
"gix-url",
"thiserror",
]
[[package]]
name = "gix-date"
-version = "0.8.0"
+version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fc7df669639582dc7c02737642f76890b03b5544e141caba68a7d6b4eb551e0d"
+checksum = "468dfbe411f335f01525a1352271727f8e7772075a93fa747260f502086b30be"
dependencies = [
"bstr",
- "itoa 1.0.6",
+ "itoa 1.0.9",
"thiserror",
"time",
]
[[package]]
name = "gix-diff"
-version = "0.37.0"
+version = "0.38.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "931394f69fb8c9ed6afc0aae3487bd869e936339bcc13ed8884472af072e0554"
+checksum = "8119a985887cfe68f4bdf92e51bd64bc758a73882d82fcfc03ebcb164441c85d"
dependencies = [
+ "bstr",
"gix-hash",
"gix-object",
"thiserror",
@@ -1221,9 +1257,9 @@ dependencies = [
[[package]]
name = "gix-discover"
-version = "0.26.0"
+version = "0.27.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a45d5cf0321178883e38705ab2b098f625d609a7d4c391b33ac952eff2c490f2"
+checksum = "6fad89416ebe0b3b7df78464124e2a02417b6cd3743d48ad93df86f4d2929c07"
dependencies = [
"bstr",
"dunce",
@@ -1249,9 +1285,9 @@ dependencies = [
[[package]]
name = "gix-features"
-version = "0.36.0"
+version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "51f4365ba17c4f218d7fd9ec102b8d2d3cb0ca200a835e81151ace7778aec827"
+checksum = "4d46a4a5c6bb5bebec9c0d18b65ada20e6517dbd7cf855b87dd4bbdce3a771b2"
dependencies = [
"bytes",
"crc32fast",
@@ -1268,9 +1304,9 @@ dependencies = [
[[package]]
name = "gix-filter"
-version = "0.6.0"
+version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "92f674d3fdb6b1987b04521ec9a5b7be8650671f2c4bbd17c3c81e2a364242ff"
+checksum = "6d6a5c9d8e55c364e7c226919c19c9a28be1392d6208b5008059fa94ff7e2bf0"
dependencies = [
"bstr",
"encoding_rs",
@@ -1282,46 +1318,47 @@ dependencies = [
"gix-path",
"gix-quote",
"gix-trace",
+ "gix-utils",
"smallvec",
"thiserror",
]
[[package]]
name = "gix-fs"
-version = "0.8.0"
+version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8cd171c0cae97cd0dc57e7b4601cb1ebf596450e263ef3c02be9107272c877bd"
+checksum = "20e86eb040f5776a5ade092282e51cdcad398adb77d948b88d17583c2ae4e107"
dependencies = [
- "gix-features 0.36.0",
+ "gix-features 0.36.1",
]
[[package]]
name = "gix-glob"
-version = "0.14.0"
+version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8fac08925dbc14d414bd02eb45ffb4cecd912d1fce3883f867bd0103c192d3e4"
+checksum = "5db19298c5eeea2961e5b3bf190767a2d1f09b8802aeb5f258e42276350aff19"
dependencies = [
- "bitflags 2.4.0",
+ "bitflags 2.4.1",
"bstr",
- "gix-features 0.36.0",
+ "gix-features 0.36.1",
"gix-path",
]
[[package]]
name = "gix-hash"
-version = "0.13.1"
+version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1884c7b41ea0875217c1be9ce91322f90bde433e91d374d0e1276073a51ccc60"
+checksum = "1f8cf8c2266f63e582b7eb206799b63aa5fa68ee510ad349f637dfe2d0653de0"
dependencies = [
- "faster-hex",
+ "faster-hex 0.9.0",
"thiserror",
]
[[package]]
name = "gix-hashtable"
-version = "0.4.0"
+version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "409268480841ad008e81c17ca5a293393fbf9f2b6c2f85b8ab9de1f0c5176a16"
+checksum = "feb61880816d7ec4f0b20606b498147d480860ddd9133ba542628df2f548d3ca"
dependencies = [
"gix-hash",
"hashbrown",
@@ -1330,9 +1367,9 @@ dependencies = [
[[package]]
name = "gix-ignore"
-version = "0.9.0"
+version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e73c07763a8005ae02cb5cf83040729cea9bb70c7cef68ec6c24159904c499a"
+checksum = "a215cc8cf21645bca131fcf6329d3ebd46299c47dbbe27df71bb1ca9e328b879"
dependencies = [
"bstr",
"gix-glob",
@@ -1342,32 +1379,34 @@ dependencies = [
[[package]]
name = "gix-index"
-version = "0.26.0"
+version = "0.27.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c83a4fcc121b2f2e109088f677f89f85e7a8ebf39e8e6659c0ae54d4283b1650"
+checksum = "f3f308f5cd2992e96a274b0d1931e9a0e44fdcba87695ead3f6df30d8a697e9c"
dependencies = [
- "bitflags 2.4.0",
+ "bitflags 2.4.1",
"bstr",
"btoi",
"filetime",
"gix-bitmap",
- "gix-features 0.36.0",
+ "gix-features 0.36.1",
"gix-fs",
"gix-hash",
"gix-lock",
"gix-object",
"gix-traverse",
- "itoa 1.0.6",
+ "itoa 1.0.9",
+ "libc",
"memmap2",
+ "rustix",
"smallvec",
"thiserror",
]
[[package]]
name = "gix-lock"
-version = "11.0.0"
+version = "11.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f4feb1dcd304fe384ddc22edba9dd56a42b0800032de6537728cea2f033a4f37"
+checksum = "7e5c65e6a29830a435664891ced3f3c1af010f14900226019590ee0971a22f37"
dependencies = [
"gix-tempfile",
"gix-utils",
@@ -1376,22 +1415,22 @@ dependencies = [
[[package]]
name = "gix-macros"
-version = "0.1.0"
+version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9d8acb5ee668d55f0f2d19a320a3f9ef67a6999ad483e11135abcc2464ed18b6"
+checksum = "02a5bcaf6704d9354a3071cede7e77d366a5980c7352e102e2c2f9b645b1d3ae"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.39",
]
[[package]]
name = "gix-negotiate"
-version = "0.9.0"
+version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2a5cdcf491ecc9ce39dcc227216c540355fe0024ae7c38e94557752ca5ebb67f"
+checksum = "979f6accd9c051b3dd018b50adf29c0a2459edddf6105cc70b767976cd6f8014"
dependencies = [
- "bitflags 2.4.0",
+ "bitflags 2.4.1",
"gix-commitgraph",
"gix-date",
"gix-hash",
@@ -1403,18 +1442,18 @@ dependencies = [
[[package]]
name = "gix-object"
-version = "0.38.0"
+version = "0.39.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "740f2a44267f58770a1cb3a3d01d14e67b089c7136c48d4bddbb3cfd2bf86a51"
+checksum = "febf79c5825720c1c63fe974c7bbe695d0cb54aabad73f45671c60ce0e501e33"
dependencies = [
"bstr",
"btoi",
"gix-actor",
"gix-date",
- "gix-features 0.36.0",
+ "gix-features 0.36.1",
"gix-hash",
"gix-validate",
- "itoa 1.0.6",
+ "itoa 1.0.9",
"smallvec",
"thiserror",
"winnow",
@@ -1422,13 +1461,13 @@ dependencies = [
[[package]]
name = "gix-odb"
-version = "0.54.0"
+version = "0.55.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8630b56cb80d8fa684d383dad006a66401ee8314e12fbf0e566ddad8c115143b"
+checksum = "1fae5f971540c99c6ecc8d4368ecc9d18a9dc8b9391025c68c4399747dc93bac"
dependencies = [
"arc-swap",
"gix-date",
- "gix-features 0.36.0",
+ "gix-features 0.36.1",
"gix-hash",
"gix-object",
"gix-pack",
@@ -1441,13 +1480,13 @@ dependencies = [
[[package]]
name = "gix-pack"
-version = "0.44.0"
+version = "0.45.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1431ba2e30deff1405920693d54ab231c88d7c240dd6ccc936ee223d8f8697c3"
+checksum = "4569491c92446fddf373456ff360aff9a9effd627b40a70f2d7914dcd75a3205"
dependencies = [
"clru",
"gix-chunk",
- "gix-features 0.36.0",
+ "gix-features 0.36.1",
"gix-hash",
"gix-hashtable",
"gix-object",
@@ -1461,31 +1500,33 @@ dependencies = [
[[package]]
name = "gix-packetline"
-version = "0.16.7"
+version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a8384b1e964151aff0d5632dd9b191059d07dff358b96bd940f1b452600d7ab"
+checksum = "03b9fcc4425bd64c585440d14e5d2405a399f323429401571ba56a2c6d111865"
dependencies = [
"bstr",
- "faster-hex",
+ "faster-hex 0.8.1",
+ "gix-trace",
"thiserror",
]
[[package]]
name = "gix-packetline-blocking"
-version = "0.16.6"
+version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7d8395f7501c84d6a1fe902035fdfd8cd86d89e2dd6be0200ec1a72fd3c92d39"
+checksum = "50052c0f76c5af5acda41177fb55b60c1e484cc246ae919d8d21129cd1000a4e"
dependencies = [
"bstr",
- "faster-hex",
+ "faster-hex 0.8.1",
+ "gix-trace",
"thiserror",
]
[[package]]
name = "gix-path"
-version = "0.10.0"
+version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6a1d370115171e3ae03c5c6d4f7d096f2981a40ddccb98dfd704c773530ba73b"
+checksum = "d86d6fac2fabe07b67b7835f46d07571f68b11aa1aaecae94fe722ea4ef305e1"
dependencies = [
"bstr",
"gix-trace",
@@ -1496,11 +1537,11 @@ dependencies = [
[[package]]
name = "gix-pathspec"
-version = "0.4.0"
+version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e9cc7194fdcf43b4a1ccfa13ffae1d79f83beb4becff7761d88dd99faeafe625"
+checksum = "1dbbb92f75a38ef043c8bb830b339b38d0698d7f3746968b5fcbade7a880494d"
dependencies = [
- "bitflags 2.4.0",
+ "bitflags 2.4.1",
"bstr",
"gix-attributes",
"gix-config-value",
@@ -1511,9 +1552,9 @@ dependencies = [
[[package]]
name = "gix-prompt"
-version = "0.7.0"
+version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5c9a913769516f5e9d937afac206fb76428e3d7238e538845842887fda584678"
+checksum = "4967b921304a5355e65a6257280eddf6e0f9ce3df111256531460adca3771305"
dependencies = [
"gix-command",
"gix-config-value",
@@ -1524,15 +1565,15 @@ dependencies = [
[[package]]
name = "gix-protocol"
-version = "0.41.1"
+version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "391e3feabdfa5f90dad6673ce59e3291ac28901b2ff248d86c5a7fbde0391e0e"
+checksum = "95736ef407db0bd15a5bdea791fbfcf523b9f13b96c852c240cd86a9ee0ef817"
dependencies = [
"bstr",
"btoi",
"gix-credentials",
"gix-date",
- "gix-features 0.36.0",
+ "gix-features 0.36.1",
"gix-hash",
"gix-transport",
"maybe-async",
@@ -1542,9 +1583,9 @@ dependencies = [
[[package]]
name = "gix-quote"
-version = "0.4.7"
+version = "0.4.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "475c86a97dd0127ba4465fbb239abac9ea10e68301470c9791a6dd5351cdc905"
+checksum = "4f84845efa535468bc79c5a87b9d29219f1da0313c8ecf0365a5daa7e72786f2"
dependencies = [
"bstr",
"btoi",
@@ -1553,13 +1594,13 @@ dependencies = [
[[package]]
name = "gix-ref"
-version = "0.38.0"
+version = "0.39.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0ec2f6d07ac88d2fb8007ee3fa3e801856fb9d82e7366ec0ca332eb2c9d74a52"
+checksum = "1ac23ed741583c792f573c028785db683496a6dfcd672ec701ee54ba6a77e1ff"
dependencies = [
"gix-actor",
"gix-date",
- "gix-features 0.36.0",
+ "gix-features 0.36.1",
"gix-fs",
"gix-hash",
"gix-lock",
@@ -1574,9 +1615,9 @@ dependencies = [
[[package]]
name = "gix-refspec"
-version = "0.19.0"
+version = "0.20.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ccb0974cc41dbdb43a180c7f67aa481e1c1e160fcfa8f4a55291fd1126c1a6e7"
+checksum = "76d9d3b82e1ee78fc0dc1c37ea5ea76c2dbc73f407db155f0dfcea285e583bee"
dependencies = [
"bstr",
"gix-hash",
@@ -1588,9 +1629,9 @@ dependencies = [
[[package]]
name = "gix-revision"
-version = "0.23.0"
+version = "0.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2ca97ac73459a7f3766aa4a5638a6e37d56d4c7962bc1986fbaf4883d0772588"
+checksum = "fe5dd51710ce5434bc315ea30394fab483c5377276494edd79222b321a5a9544"
dependencies = [
"bstr",
"gix-date",
@@ -1604,9 +1645,9 @@ dependencies = [
[[package]]
name = "gix-revwalk"
-version = "0.9.0"
+version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a16d8c892e4cd676d86f0265bf9d40cefd73d8d94f86b213b8b77d50e77efae0"
+checksum = "69d4ed2493ca94a475fdf147138e1ef8bab3b6ebb56abf3d9bda1c05372ec1dd"
dependencies = [
"gix-commitgraph",
"gix-date",
@@ -1619,11 +1660,11 @@ dependencies = [
[[package]]
name = "gix-sec"
-version = "0.10.0"
+version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "92b9542ac025a8c02ed5d17b3fc031a111a384e859d0be3532ec4d58c40a0f28"
+checksum = "a36ea2c5907d64a9b4b5d3cc9f430e6c30f0509646b5e38eb275ca57c5bf29e2"
dependencies = [
- "bitflags 2.4.0",
+ "bitflags 2.4.1",
"gix-path",
"libc",
"windows",
@@ -1631,9 +1672,9 @@ dependencies = [
[[package]]
name = "gix-submodule"
-version = "0.5.0"
+version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bba78c8d12aa24370178453ec3a472ff08dfaa657d116229f57f2c9cd469a1c2"
+checksum = "02a3d7f60a95bdcaeb8981663c99d1c9f4de42aab1169524c949e948989809f9"
dependencies = [
"bstr",
"gix-config",
@@ -1646,9 +1687,9 @@ dependencies = [
[[package]]
name = "gix-tempfile"
-version = "11.0.0"
+version = "11.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "05cc2205cf10d99f70b96e04e16c55d4c7cf33efc151df1f793e29fd12a931f8"
+checksum = "388dd29114a86ec69b28d1e26d6d63a662300ecf61ab3f4cc578f7d7dc9e7e23"
dependencies = [
"gix-fs",
"libc",
@@ -1659,22 +1700,22 @@ dependencies = [
[[package]]
name = "gix-trace"
-version = "0.1.3"
+version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "96b6d623a1152c3facb79067d6e2ecdae48130030cf27d6eb21109f13bd7b836"
+checksum = "b686a35799b53a9825575ca3f06481d0a053a409c4d97ffcf5ddd67a8760b497"
[[package]]
name = "gix-transport"
-version = "0.38.0"
+version = "0.39.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2f209a93364e24f20319751bc11092272e2f3fe82bb72592b2822679cf5be752"
+checksum = "f731cfefc4d62468c6dd2053f5c6707828256a6d2f5488c1811e3f42c178b144"
dependencies = [
"base64",
"bstr",
"curl",
"gix-command",
"gix-credentials",
- "gix-features 0.36.0",
+ "gix-features 0.36.1",
"gix-packetline",
"gix-quote",
"gix-sec",
@@ -1684,9 +1725,9 @@ dependencies = [
[[package]]
name = "gix-traverse"
-version = "0.34.0"
+version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "14d050ec7d4e1bb76abf0636cf4104fb915b70e54e3ced9a4427c999100ff38a"
+checksum = "df2112088122a0206592c84fbd42020db63b2ccaed66a0293779f2e5fbf80474"
dependencies = [
"gix-commitgraph",
"gix-date",
@@ -1700,12 +1741,12 @@ dependencies = [
[[package]]
name = "gix-url"
-version = "0.25.1"
+version = "0.25.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b1b9ac8ed32ad45f9fc6c5f8c0be2ed911e544a5a19afd62d95d524ebaa95671"
+checksum = "0c427a1a11ccfa53a4a2da47d9442c2241deee63a154bc15cc14b8312fbc4005"
dependencies = [
"bstr",
- "gix-features 0.36.0",
+ "gix-features 0.36.1",
"gix-path",
"home 0.5.5",
"thiserror",
@@ -1714,18 +1755,18 @@ dependencies = [
[[package]]
name = "gix-utils"
-version = "0.1.5"
+version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b85d89dc728613e26e0ed952a19583744e7f5240fcd4aa30d6c824ffd8b52f0f"
+checksum = "9f82c41937f00e15a1f6cb0b55307f0ca1f77f4407ff2bf440be35aa688c6a3e"
dependencies = [
"fastrand",
]
[[package]]
name = "gix-validate"
-version = "0.8.0"
+version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e05cab2b03a45b866156e052aa38619f4ece4adcb2f79978bfc249bc3b21b8c5"
+checksum = "75b7d8e4274be69f284bbc7e6bb2ccf7065dbcdeba22d8c549f2451ae426883f"
dependencies = [
"bstr",
"thiserror",
@@ -1733,13 +1774,13 @@ dependencies = [
[[package]]
name = "gix-worktree"
-version = "0.27.0"
+version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ddaf79e721dba64fe726a42f297a3c8ed42e55cdc0d81ca68452f2def3c2d7fd"
+checksum = "7f1d0ae01dee14abe8c8117d78d7518f9a507de2dc4522546fbf4c444e9860b4"
dependencies = [
"bstr",
"gix-attributes",
- "gix-features 0.36.0",
+ "gix-features 0.36.1",
"gix-fs",
"gix-glob",
"gix-hash",
@@ -1757,15 +1798,15 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]]
name = "globset"
-version = "0.4.13"
+version = "0.4.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "759c97c1e17c55525b57192c06a267cda0ac5210b222d6b82189a2338fa1c13d"
+checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1"
dependencies = [
"aho-corasick",
"bstr",
- "fnv",
"log",
- "regex",
+ "regex-automata 0.4.3",
+ "regex-syntax 0.8.2",
]
[[package]]
@@ -1802,24 +1843,28 @@ dependencies = [
[[package]]
name = "hashbrown"
-version = "0.14.0"
+version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a"
+checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
+dependencies = [
+ "ahash",
+ "allocator-api2",
+]
[[package]]
-name = "hermit-abi"
-version = "0.2.6"
+name = "hashlink"
+version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7"
+checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7"
dependencies = [
- "libc",
+ "hashbrown",
]
[[package]]
name = "hermit-abi"
-version = "0.3.2"
+version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b"
+checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7"
[[package]]
name = "hex"
@@ -1851,14 +1896,14 @@ version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb"
dependencies = [
- "windows-sys",
+ "windows-sys 0.48.0",
]
[[package]]
name = "home"
-version = "0.5.8"
+version = "0.5.9"
dependencies = [
- "windows-sys",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -1878,9 +1923,9 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
[[package]]
name = "idna"
-version = "0.4.0"
+version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c"
+checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6"
dependencies = [
"unicode-bidi",
"unicode-normalization",
@@ -1888,17 +1933,16 @@ dependencies = [
[[package]]
name = "ignore"
-version = "0.4.20"
+version = "0.4.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dbe7873dab538a9a44ad79ede1faf5f30d49f9a5c883ddbab48bce81b64b7492"
+checksum = "747ad1b4ae841a78e8aba0d63adbfbeaea26b517b63705d47856b73015d27060"
dependencies = [
+ "crossbeam-deque",
"globset",
- "lazy_static",
"log",
"memchr",
- "regex",
+ "regex-automata 0.4.3",
"same-file",
- "thread_local",
"walkdir",
"winapi-util",
]
@@ -1919,9 +1963,9 @@ dependencies = [
[[package]]
name = "indexmap"
-version = "2.0.0"
+version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d"
+checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f"
dependencies = [
"equivalent",
"hashbrown",
@@ -1933,9 +1977,9 @@ version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b"
dependencies = [
- "hermit-abi 0.3.2",
+ "hermit-abi",
"rustix",
- "windows-sys",
+ "windows-sys 0.48.0",
]
[[package]]
@@ -1949,9 +1993,9 @@ dependencies = [
[[package]]
name = "itertools"
-version = "0.11.0"
+version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57"
+checksum = "25db6b064527c5d482d0423354fcd07a89a2dfe07b67892e62411946db7f07b0"
dependencies = [
"either",
]
@@ -1964,9 +2008,9 @@ checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4"
[[package]]
name = "itoa"
-version = "1.0.6"
+version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6"
+checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38"
[[package]]
name = "jobserver"
@@ -1979,9 +2023,9 @@ dependencies = [
[[package]]
name = "js-sys"
-version = "0.3.64"
+version = "0.3.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a"
+checksum = "cee9c64da59eae3b50095c18d3e74f8b73c0b86d2792824ff01bbce68ba229ca"
dependencies = [
"wasm-bindgen",
]
@@ -2015,9 +2059,9 @@ checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67"
[[package]]
name = "libc"
-version = "0.2.149"
+version = "0.2.150"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b"
+checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c"
[[package]]
name = "libgit2-sys"
@@ -2040,26 +2084,37 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c571b676ddfc9a8c12f1f3d3085a7b163966a8fd8098a90640953ce5f6170161"
dependencies = [
"cfg-if",
- "windows-sys",
+ "windows-sys 0.48.0",
]
[[package]]
name = "libm"
-version = "0.2.7"
+version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4"
+checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058"
[[package]]
name = "libnghttp2-sys"
-version = "0.1.7+1.45.0"
+version = "0.1.8+1.55.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "57ed28aba195b38d5ff02b9170cbff627e336a20925e43b4945390401c5dc93f"
+checksum = "4fae956c192dadcdb5dace96db71fa0b827333cce7c7b38dc71446f024d8a340"
dependencies = [
"cc",
"libc",
]
[[package]]
+name = "libsqlite3-sys"
+version = "0.27.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cf4e226dcd58b4be396f7bd3c20da8fdee2911400705297ba7d2d7cc2c30f716"
+dependencies = [
+ "cc",
+ "pkg-config",
+ "vcpkg",
+]
+
+[[package]]
name = "libssh2-sys"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2075,9 +2130,9 @@ dependencies = [
[[package]]
name = "libz-sys"
-version = "1.1.9"
+version = "1.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "56ee889ecc9568871456d42f603d6a0ce59ff328d291063a45cbdf0036baf6db"
+checksum = "d97137b25e321a73eef1418d1d5d2eda4d77e12813f8e6dead84bc52c5870a7b"
dependencies = [
"cc",
"libc",
@@ -2087,15 +2142,15 @@ dependencies = [
[[package]]
name = "linux-raw-sys"
-version = "0.4.10"
+version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f"
+checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456"
[[package]]
name = "lock_api"
-version = "0.4.10"
+version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16"
+checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45"
dependencies = [
"autocfg",
"scopeguard",
@@ -2103,9 +2158,9 @@ dependencies = [
[[package]]
name = "log"
-version = "0.4.19"
+version = "0.4.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
+checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
[[package]]
name = "matchers"
@@ -2133,10 +2188,10 @@ version = "0.0.0"
dependencies = [
"anyhow",
"handlebars",
- "pretty_assertions",
"pulldown-cmark",
"same-file",
"serde_json",
+ "snapbox",
"url",
]
@@ -2148,9 +2203,9 @@ checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167"
[[package]]
name = "memmap2"
-version = "0.7.1"
+version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f49388d20533534cd19360ad3d6a7dadc885944aa802ba3995040c5ec11288c6"
+checksum = "deaba38d7abf1d4cca21cc89e932e542ba2b9258664d2a9ef0e61512039c9375"
dependencies = [
"libc",
]
@@ -2185,7 +2240,7 @@ version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "359f76430b20a79f9e20e115b3428614e654f04fab314482fc0fda0ebd3c6044"
dependencies = [
- "windows-sys",
+ "windows-sys 0.48.0",
]
[[package]]
@@ -2210,7 +2265,7 @@ version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec60c60a693226186f5d6edf073232bfb6464ed97eb22cf3b01c1e8198fd97f5"
dependencies = [
- "windows-sys",
+ "windows-sys 0.48.0",
]
[[package]]
@@ -2225,25 +2280,15 @@ dependencies = [
[[package]]
name = "num-traits"
-version = "0.2.15"
+version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd"
+checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c"
dependencies = [
"autocfg",
"libm",
]
[[package]]
-name = "num_cpus"
-version = "1.15.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b"
-dependencies = [
- "hermit-abi 0.2.6",
- "libc",
-]
-
-[[package]]
name = "num_threads"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2281,7 +2326,7 @@ version = "0.10.57"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bac25ee399abb46215765b1cb35bc0212377e58a061560d8b29b024fd0430e7c"
dependencies = [
- "bitflags 2.4.0",
+ "bitflags 2.4.1",
"cfg-if",
"foreign-types",
"libc",
@@ -2298,7 +2343,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.39",
]
[[package]]
@@ -2309,9 +2354,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
[[package]]
name = "openssl-src"
-version = "111.26.0+1.1.1u"
+version = "111.28.1+1.1.1w"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "efc62c9f12b22b8f5208c23a7200a442b2e5999f8bdf80233852122b5a4f6f37"
+checksum = "4bf7e82ffd6d3d6e6524216a0bfd85509f68b5b28354e8e7800057e44cefa9b4"
dependencies = [
"cc",
]
@@ -2331,18 +2376,18 @@ dependencies = [
[[package]]
name = "ordered-float"
-version = "2.10.0"
+version = "2.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7940cf2ca942593318d07fcf2596cdca60a85c9e7fab408a5e21a4f9dcd40d87"
+checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c"
dependencies = [
"num-traits",
]
[[package]]
name = "orion"
-version = "0.17.4"
+version = "0.17.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cbe74a766292f94f7e69db5a7bf010eadd944f24186c463fe578a7e637582066"
+checksum = "7abdb10181903c8c4b016ba45d6d6d5af1a1e2a461aa4763a83b87f5df4695e5"
dependencies = [
"fiat-crypto",
"subtle",
@@ -2390,15 +2435,15 @@ dependencies = [
[[package]]
name = "parking_lot_core"
-version = "0.9.8"
+version = "0.9.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447"
+checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e"
dependencies = [
"cfg-if",
"libc",
- "redox_syscall 0.3.5",
+ "redox_syscall",
"smallvec",
- "windows-targets",
+ "windows-targets 0.48.5",
]
[[package]]
@@ -2459,25 +2504,26 @@ dependencies = [
[[package]]
name = "percent-encoding"
-version = "2.3.0"
+version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94"
+checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]]
name = "pest"
-version = "2.6.0"
+version = "2.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e68e84bfb01f0507134eac1e9b410a12ba379d064eab48c50ba4ce329a527b70"
+checksum = "ae9cee2a55a544be8b89dc6848072af97a20f2422603c10865be2a42b580fff5"
dependencies = [
+ "memchr",
"thiserror",
"ucd-trie",
]
[[package]]
name = "pest_derive"
-version = "2.6.0"
+version = "2.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6b79d4c71c865a25a4322296122e3924d30bc8ee0834c8bfc8b95f7f054afbfb"
+checksum = "81d78524685f5ef2a3b3bd1cafbc9fcabb036253d9b1463e726a91cd16e2dfc2"
dependencies = [
"pest",
"pest_generator",
@@ -2485,22 +2531,22 @@ dependencies = [
[[package]]
name = "pest_generator"
-version = "2.6.0"
+version = "2.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c435bf1076437b851ebc8edc3a18442796b30f1728ffea6262d59bbe28b077e"
+checksum = "68bd1206e71118b5356dae5ddc61c8b11e28b09ef6a31acbd15ea48a28e0c227"
dependencies = [
"pest",
"pest_meta",
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.39",
]
[[package]]
name = "pest_meta"
-version = "2.6.0"
+version = "2.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "745a452f8eb71e39ffd8ee32b3c5f51d03845f99786fa9b68db6ff509c505411"
+checksum = "7c747191d4ad9e4a4ab9c8798f1e82a39affe7ef9648390b7e5548d18e099de6"
dependencies = [
"once_cell",
"pest",
@@ -2509,9 +2555,9 @@ dependencies = [
[[package]]
name = "pin-project-lite"
-version = "0.2.10"
+version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c40d25201921e5ff0c862a505c6557ea88568a4e3ace775ab55e93f2f4f9d57"
+checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58"
[[package]]
name = "pkcs8"
@@ -2531,9 +2577,9 @@ checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964"
[[package]]
name = "plotters"
-version = "0.3.4"
+version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2538b639e642295546c50fcd545198c9d64ee2a38620a628724a3b266d5fbf97"
+checksum = "d2c224ba00d7cadd4d5c660deaf2098e5e80e07846537c51f9cfa4be50c1fd45"
dependencies = [
"num-traits",
"plotters-backend",
@@ -2544,49 +2590,45 @@ dependencies = [
[[package]]
name = "plotters-backend"
-version = "0.3.4"
+version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "193228616381fecdc1224c62e96946dfbc73ff4384fba576e052ff8c1bea8142"
+checksum = "9e76628b4d3a7581389a35d5b6e2139607ad7c75b17aed325f210aa91f4a9609"
[[package]]
name = "plotters-svg"
-version = "0.3.3"
+version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f9a81d2759aae1dae668f783c308bc5c8ebd191ff4184aaa1b37f65a6ae5a56f"
+checksum = "38f6d39893cca0701371e3c27294f09797214b86f1fb951b89ade8ec04e2abab"
dependencies = [
"plotters-backend",
]
[[package]]
-name = "ppv-lite86"
-version = "0.2.17"
+name = "powerfmt"
+version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
+checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
[[package]]
-name = "pretty_assertions"
-version = "1.4.0"
+name = "ppv-lite86"
+version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66"
-dependencies = [
- "diff",
- "yansi",
-]
+checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
[[package]]
name = "primeorder"
-version = "0.13.2"
+version = "0.13.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3c2fcef82c0ec6eefcc179b978446c399b3cdf73c392c35604e399eee6df1ee3"
+checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6"
dependencies = [
"elliptic-curve",
]
[[package]]
name = "proc-macro2"
-version = "1.0.67"
+version = "1.0.70"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3d433d9f1a3e8c1263d9456598b16fec66f4acc9a74dacffd35c7bb09b3a1328"
+checksum = "39278fbbf5fb4f646ce651690877f89d1c5811a3d4acb27700c1cb3cdb78fd3b"
dependencies = [
"unicode-ident",
]
@@ -2602,19 +2644,19 @@ dependencies = [
[[package]]
name = "proptest"
-version = "1.3.1"
+version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7c003ac8c77cb07bb74f5f198bce836a689bcd5a42574612bf14d17bfd08c20e"
+checksum = "31b476131c3c86cb68032fdc5cb6d5a1045e3e42d96b69fa599fd77701e1f5bf"
dependencies = [
"bit-set",
"bit-vec",
- "bitflags 2.4.0",
+ "bitflags 2.4.1",
"lazy_static",
"num-traits",
"rand",
"rand_chacha",
"rand_xorshift",
- "regex-syntax 0.7.2",
+ "regex-syntax 0.8.2",
"rusty-fork",
"tempfile",
"unarray",
@@ -2645,9 +2687,9 @@ checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3"
[[package]]
name = "quote"
-version = "1.0.32"
+version = "1.0.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "50f3b39ccfb720540debaa0164757101c08ecb8d326b15358ce76a62c7e85965"
+checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
dependencies = [
"proc-macro2",
]
@@ -2702,9 +2744,9 @@ dependencies = [
[[package]]
name = "rayon"
-version = "1.7.0"
+version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b"
+checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1"
dependencies = [
"either",
"rayon-core",
@@ -2712,23 +2754,12 @@ dependencies = [
[[package]]
name = "rayon-core"
-version = "1.11.0"
+version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d"
+checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed"
dependencies = [
- "crossbeam-channel",
"crossbeam-deque",
"crossbeam-utils",
- "num_cpus",
-]
-
-[[package]]
-name = "redox_syscall"
-version = "0.3.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29"
-dependencies = [
- "bitflags 1.3.2",
]
[[package]]
@@ -2742,13 +2773,14 @@ dependencies = [
[[package]]
name = "regex"
-version = "1.8.4"
+version = "1.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d0ab3ca65655bb1e41f2a8c8cd662eb4fb035e67c3f78da1d61dffe89d07300f"
+checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343"
dependencies = [
"aho-corasick",
"memchr",
- "regex-syntax 0.7.2",
+ "regex-automata 0.4.3",
+ "regex-syntax 0.8.2",
]
[[package]]
@@ -2762,9 +2794,14 @@ dependencies = [
[[package]]
name = "regex-automata"
-version = "0.3.8"
+version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c2f401f4955220693b56f8ec66ee9c78abffd8d1c4f23dc41a23839eb88f0795"
+checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax 0.8.2",
+]
[[package]]
name = "regex-syntax"
@@ -2774,9 +2811,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
[[package]]
name = "regex-syntax"
-version = "0.7.2"
+version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78"
+checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
[[package]]
name = "resolver-tests"
@@ -2799,6 +2836,20 @@ dependencies = [
]
[[package]]
+name = "rusqlite"
+version = "0.30.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a78046161564f5e7cd9008aff3b2990b3850dc8e0349119b98e8f251e099f24d"
+dependencies = [
+ "bitflags 2.4.1",
+ "fallible-iterator",
+ "fallible-streaming-iterator",
+ "hashlink",
+ "libsqlite3-sys",
+ "smallvec",
+]
+
+[[package]]
name = "rustc-hash"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2806,27 +2857,30 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]]
name = "rustfix"
-version = "0.6.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ecd2853d9e26988467753bd9912c3a126f642d05d229a4b53f5752ee36c56481"
+version = "0.7.0"
dependencies = [
"anyhow",
- "log",
+ "proptest",
"serde",
"serde_json",
+ "similar",
+ "tempfile",
+ "thiserror",
+ "tracing",
+ "tracing-subscriber",
]
[[package]]
name = "rustix"
-version = "0.38.21"
+version = "0.38.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2b426b0506e5d50a7d8dafcf2e81471400deb602392c7dd110815afb4eaf02a3"
+checksum = "9470c4bf8246c8daf25f9598dca807fb6510347b1e1cfa55749113850c79d88a"
dependencies = [
- "bitflags 2.4.0",
+ "bitflags 2.4.1",
"errno",
"libc",
"linux-raw-sys",
- "windows-sys",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -2843,9 +2897,9 @@ dependencies = [
[[package]]
name = "ryu"
-version = "1.0.13"
+version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041"
+checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741"
[[package]]
name = "same-file"
@@ -2862,20 +2916,20 @@ version = "0.1.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88"
dependencies = [
- "windows-sys",
+ "windows-sys 0.48.0",
]
[[package]]
name = "scopeguard"
-version = "1.1.0"
+version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
+checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
name = "sec1"
-version = "0.7.2"
+version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f0aec48e813d6b90b15f0b8948af3c63483992dee44c03e9930b3eebdabe046e"
+checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc"
dependencies = [
"base16ct",
"der",
@@ -2900,9 +2954,9 @@ dependencies = [
[[package]]
name = "security-framework-sys"
-version = "2.9.0"
+version = "2.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f51d0c0d83bec45f16480d0ce0058397a69e48fcdc52d1dc8855fb68acbd31a7"
+checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a"
dependencies = [
"core-foundation-sys",
"libc",
@@ -2926,9 +2980,9 @@ dependencies = [
[[package]]
name = "serde"
-version = "1.0.190"
+version = "1.0.193"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "91d3c334ca1ee894a2c6f6ad698fe8c435b76d504b13d436f0685d648d6d96f7"
+checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89"
dependencies = [
"serde_derive",
]
@@ -2955,13 +3009,13 @@ dependencies = [
[[package]]
name = "serde_derive"
-version = "1.0.190"
+version = "1.0.193"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "67c5609f394e5c2bd7fc51efda478004ea80ef42fee983d5c67a65e34f32c0e3"
+checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.39",
]
[[package]]
@@ -2979,7 +3033,7 @@ version = "1.0.108"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b"
dependencies = [
- "itoa 1.0.6",
+ "itoa 1.0.9",
"ryu",
"serde",
]
@@ -3023,9 +3077,9 @@ dependencies = [
[[package]]
name = "sharded-slab"
-version = "0.1.4"
+version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31"
+checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
dependencies = [
"lazy_static",
]
@@ -3037,10 +3091,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "45bb67a18fa91266cc7807181f62f9178a6873bfad7dc788c42e6430db40184f"
[[package]]
+name = "shell-words"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde"
+
+[[package]]
name = "signature"
-version = "2.1.0"
+version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500"
+checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de"
dependencies = [
"digest",
"rand_core",
@@ -3048,9 +3108,9 @@ dependencies = [
[[package]]
name = "similar"
-version = "2.2.1"
+version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "420acb44afdae038210c99e69aae24109f32f15500aa708e81d46c9f29d55fcf"
+checksum = "2aeaf503862c419d66959f5d7ca015337d864e9c49485d771b732e2a20453597"
[[package]]
name = "sized-chunks"
@@ -3064,9 +3124,9 @@ dependencies = [
[[package]]
name = "smallvec"
-version = "1.10.0"
+version = "1.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
+checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970"
[[package]]
name = "snapbox"
@@ -3098,9 +3158,9 @@ dependencies = [
[[package]]
name = "socket2"
-version = "0.4.9"
+version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662"
+checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d"
dependencies = [
"libc",
"winapi",
@@ -3108,9 +3168,9 @@ dependencies = [
[[package]]
name = "spki"
-version = "0.7.2"
+version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a"
+checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d"
dependencies = [
"base64ct",
"der",
@@ -3156,9 +3216,9 @@ dependencies = [
[[package]]
name = "syn"
-version = "2.0.38"
+version = "2.0.39"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b"
+checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a"
dependencies = [
"proc-macro2",
"quote",
@@ -3195,9 +3255,9 @@ checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5"
dependencies = [
"cfg-if",
"fastrand",
- "redox_syscall 0.4.1",
+ "redox_syscall",
"rustix",
- "windows-sys",
+ "windows-sys 0.48.0",
]
[[package]]
@@ -3207,7 +3267,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7"
dependencies = [
"rustix",
- "windows-sys",
+ "windows-sys 0.48.0",
]
[[package]]
@@ -3227,7 +3287,7 @@ checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.39",
]
[[package]]
@@ -3242,14 +3302,15 @@ dependencies = [
[[package]]
name = "time"
-version = "0.3.29"
+version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "426f806f4089c493dcac0d24c29c01e2c38baf8e30f1b716ee37e83d200b18fe"
+checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5"
dependencies = [
"deranged",
- "itoa 1.0.6",
+ "itoa 1.0.9",
"libc",
"num_threads",
+ "powerfmt",
"serde",
"time-core",
"time-macros",
@@ -3297,9 +3358,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "toml"
-version = "0.8.6"
+version = "0.8.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8ff9e3abce27ee2c9a37f9ad37238c1bdd4e789c84ba37df76aa4d528f5072cc"
+checksum = "a1a195ec8c9da26928f773888e0742ca3ca1040c6cd859c919c9f59c1954ab35"
dependencies = [
"serde",
"serde_spanned",
@@ -3318,9 +3379,9 @@ dependencies = [
[[package]]
name = "toml_edit"
-version = "0.20.7"
+version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81"
+checksum = "d34d383cd00a163b4a5b85053df514d45bc330f6de7737edfe0a93311d1eaa03"
dependencies = [
"indexmap",
"serde",
@@ -3348,7 +3409,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.39",
]
[[package]]
@@ -3363,20 +3424,20 @@ dependencies = [
[[package]]
name = "tracing-log"
-version = "0.1.3"
+version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922"
+checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
dependencies = [
- "lazy_static",
"log",
+ "once_cell",
"tracing-core",
]
[[package]]
name = "tracing-subscriber"
-version = "0.3.17"
+version = "0.3.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77"
+checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b"
dependencies = [
"matchers",
"nu-ansi-term",
@@ -3392,15 +3453,15 @@ dependencies = [
[[package]]
name = "typenum"
-version = "1.16.0"
+version = "1.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
+checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
[[package]]
name = "ucd-trie"
-version = "0.1.5"
+version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9e79c4d996edb816c91e4308506774452e55e95c3c9de07b6729e17e15a5ef81"
+checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9"
[[package]]
name = "unarray"
@@ -3425,15 +3486,15 @@ checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460"
[[package]]
name = "unicode-bom"
-version = "2.0.2"
+version = "2.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "98e90c70c9f0d4d1ee6d0a7d04aa06cb9bbd53d8cfbdd62a0269a7c2eb640552"
+checksum = "7eec5d1121208364f6793f7d2e222bf75a915c19557537745b195b253dd64217"
[[package]]
name = "unicode-ident"
-version = "1.0.9"
+version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b15811caf2415fb889178633e7724bad2509101cde276048e013b9def5e51fa0"
+checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
[[package]]
name = "unicode-normalization"
@@ -3458,9 +3519,9 @@ checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
[[package]]
name = "url"
-version = "2.4.1"
+version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5"
+checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633"
dependencies = [
"form_urlencoded",
"idna",
@@ -3605,9 +3666,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "wasm-bindgen"
-version = "0.2.87"
+version = "0.2.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342"
+checksum = "0ed0d4f68a3015cc185aff4db9506a015f4b96f95303897bfa23f846db54064e"
dependencies = [
"cfg-if",
"wasm-bindgen-macro",
@@ -3615,24 +3676,24 @@ dependencies = [
[[package]]
name = "wasm-bindgen-backend"
-version = "0.2.87"
+version = "0.2.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd"
+checksum = "1b56f625e64f3a1084ded111c4d5f477df9f8c92df113852fa5a374dbda78826"
dependencies = [
"bumpalo",
"log",
"once_cell",
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.39",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-macro"
-version = "0.2.87"
+version = "0.2.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d"
+checksum = "0162dbf37223cd2afce98f3d0785506dcb8d266223983e4b5b525859e6e182b2"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
@@ -3640,28 +3701,28 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro-support"
-version = "0.2.87"
+version = "0.2.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b"
+checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.39",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-shared"
-version = "0.2.87"
+version = "0.2.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1"
+checksum = "7ab9b36309365056cd639da3134bf87fa8f3d86008abf99e612384a6eecd459f"
[[package]]
name = "web-sys"
-version = "0.3.64"
+version = "0.3.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b"
+checksum = "50c24a44ec86bb68fbecd1b3efed7e85ea5621b39b35ef2766b66cd984f8010f"
dependencies = [
"js-sys",
"wasm-bindgen",
@@ -3685,9 +3746,9 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
-version = "0.1.5"
+version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
+checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596"
dependencies = [
"winapi",
]
@@ -3704,7 +3765,7 @@ version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f"
dependencies = [
- "windows-targets",
+ "windows-targets 0.48.5",
]
[[package]]
@@ -3713,71 +3774,137 @@ version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
dependencies = [
- "windows-targets",
+ "windows-targets 0.48.5",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
+dependencies = [
+ "windows-targets 0.52.0",
]
[[package]]
name = "windows-targets"
-version = "0.48.0"
+version = "0.48.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
+dependencies = [
+ "windows_aarch64_gnullvm 0.48.5",
+ "windows_aarch64_msvc 0.48.5",
+ "windows_i686_gnu 0.48.5",
+ "windows_i686_msvc 0.48.5",
+ "windows_x86_64_gnu 0.48.5",
+ "windows_x86_64_gnullvm 0.48.5",
+ "windows_x86_64_msvc 0.48.5",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5"
+checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd"
dependencies = [
- "windows_aarch64_gnullvm",
- "windows_aarch64_msvc",
- "windows_i686_gnu",
- "windows_i686_msvc",
- "windows_x86_64_gnu",
- "windows_x86_64_gnullvm",
- "windows_x86_64_msvc",
+ "windows_aarch64_gnullvm 0.52.0",
+ "windows_aarch64_msvc 0.52.0",
+ "windows_i686_gnu 0.52.0",
+ "windows_i686_msvc 0.52.0",
+ "windows_x86_64_gnu 0.52.0",
+ "windows_x86_64_gnullvm 0.52.0",
+ "windows_x86_64_msvc 0.52.0",
]
[[package]]
name = "windows_aarch64_gnullvm"
-version = "0.48.0"
+version = "0.48.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
+checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea"
[[package]]
name = "windows_aarch64_msvc"
-version = "0.48.0"
+version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
+checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef"
[[package]]
name = "windows_i686_gnu"
-version = "0.48.0"
+version = "0.48.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
+checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313"
[[package]]
name = "windows_i686_msvc"
-version = "0.48.0"
+version = "0.48.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
+checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a"
[[package]]
name = "windows_x86_64_gnu"
-version = "0.48.0"
+version = "0.48.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
+checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd"
[[package]]
name = "windows_x86_64_gnullvm"
-version = "0.48.0"
+version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
+checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e"
[[package]]
name = "windows_x86_64_msvc"
-version = "0.48.0"
+version = "0.48.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
+checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04"
[[package]]
name = "winnow"
-version = "0.5.15"
+version = "0.5.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7c2e3184b9c4e92ad5167ca73039d0c42476302ab603e2fec4487511f38ccefc"
+checksum = "b7e87b8dfbe3baffbe687eef2e164e32286eff31a5ee16463ce03d991643ec94"
dependencies = [
"memchr",
]
@@ -3808,13 +3935,27 @@ dependencies = [
]
[[package]]
-name = "yansi"
-version = "0.5.1"
+name = "zerocopy"
+version = "0.7.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
+checksum = "1c4061bedbb353041c12f413700357bec76df2c7e2ca8e4df8bac24c6bf68e3d"
+dependencies = [
+ "zerocopy-derive",
+]
+
+[[package]]
+name = "zerocopy-derive"
+version = "0.7.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b3c129550b3e6de3fd0ba67ba5c81818f9805e58b8d7fee80a3a59d2c9fc601a"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.39",
+]
[[package]]
name = "zeroize"
-version = "1.6.0"
+version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9"
+checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d"
diff --git a/src/tools/cargo/Cargo.toml b/src/tools/cargo/Cargo.toml
index 3fb36b44e..c07b0047b 100644
--- a/src/tools/cargo/Cargo.toml
+++ b/src/tools/cargo/Cargo.toml
@@ -16,33 +16,33 @@ edition = "2021"
license = "MIT OR Apache-2.0"
[workspace.dependencies]
-anstream = "0.6.4"
+anstream = "0.6.5"
anstyle = "1.0.4"
anyhow = "1.0.75"
base64 = "0.21.5"
bytesize = "1.3"
cargo = { path = "" }
-cargo-credential = { version = "0.4.1", path = "credential/cargo-credential" }
-cargo-credential-libsecret = { version = "0.4.1", path = "credential/cargo-credential-libsecret" }
-cargo-credential-macos-keychain = { version = "0.4.1", path = "credential/cargo-credential-macos-keychain" }
-cargo-credential-wincred = { version = "0.4.1", path = "credential/cargo-credential-wincred" }
+cargo-credential = { version = "0.4.2", path = "credential/cargo-credential" }
+cargo-credential-libsecret = { version = "0.4.2", path = "credential/cargo-credential-libsecret" }
+cargo-credential-macos-keychain = { version = "0.4.2", path = "credential/cargo-credential-macos-keychain" }
+cargo-credential-wincred = { version = "0.4.2", path = "credential/cargo-credential-wincred" }
cargo-platform = { path = "crates/cargo-platform", version = "0.1.4" }
cargo-test-macro = { path = "crates/cargo-test-macro" }
cargo-test-support = { path = "crates/cargo-test-support" }
cargo-util = { version = "0.2.6", path = "crates/cargo-util" }
cargo_metadata = "0.18.1"
-clap = "4.4.7"
+clap = "4.4.10"
color-print = "0.3.5"
-core-foundation = { version = "0.9.3", features = ["mac_os_10_7_support"] }
+core-foundation = { version = "0.9.4", features = ["mac_os_10_7_support"] }
crates-io = { version = "0.39.0", path = "crates/crates-io" }
criterion = { version = "0.5.1", features = ["html_reports"] }
curl = "0.4.44"
-curl-sys = "0.4.68"
+curl-sys = "0.4.70"
filetime = "0.2.22"
flate2 = { version = "1.0.28", default-features = false, features = ["zlib"] }
git2 = "0.18.1"
git2-curl = "0.19.0"
-gix = { version = "0.55.2", default-features = false, features = ["blocking-http-transport-curl", "progress-tree", "revision"] }
+gix = { version = "0.56.0", default-features = false, features = ["blocking-http-transport-curl", "progress-tree", "revision"] }
gix-features-for-configuration-only = { version = "0.35.0", package = "gix-features", features = [ "parallel" ] }
glob = "0.3.1"
handlebars = { version = "3.5.5", features = ["dir_source"] }
@@ -51,33 +51,34 @@ hmac = "0.12.1"
home = "0.5.5"
http-auth = { version = "0.1.8", default-features = false }
humantime = "2.1.0"
-ignore = "0.4.20"
+ignore = "0.4.21"
im-rc = "15.1.0"
indexmap = "2"
-itertools = "0.11.0"
+itertools = "0.12.0"
jobserver = "0.1.27"
lazycell = "1.3.0"
-libc = "0.2.149"
+libc = "0.2.150"
libgit2-sys = "0.16.1"
libloading = "0.8.1"
memchr = "2.6.4"
miow = "0.6.0"
opener = "0.6.1"
-openssl ="0.10.57"
+openssl = "0.10.57"
os_info = "3.7.0"
pasetors = { version = "0.6.7", features = ["v3", "paserk", "std", "serde"] }
pathdiff = "0.2"
percent-encoding = "2.3"
pkg-config = "0.3.27"
-pretty_assertions = "1.4.0"
-proptest = "1.3.1"
+proptest = "1.4.0"
pulldown-cmark = { version = "0.9.3", default-features = false }
rand = "0.8.5"
-rustfix = "0.6.1"
+regex = "1.10.2"
+rusqlite = { version = "0.30.0", features = ["bundled"] }
+rustfix = { version = "0.7.0", path = "crates/rustfix" }
same-file = "1.0.6"
security-framework = "2.9.2"
semver = { version = "1.0.20", features = ["serde"] }
-serde = "1.0.190"
+serde = "1.0.193"
serde-untagged = "0.1.1"
serde-value = "0.7.0"
serde_ignored = "0.1.9"
@@ -87,26 +88,40 @@ sha2 = "0.10.8"
shell-escape = "0.1.5"
supports-hyperlinks = "2.1.0"
snapbox = { version = "0.4.14", features = ["diff", "path"] }
-syn = { version = "2.0.38", features = ["extra-traits", "full"] }
+syn = { version = "2.0.39", features = ["extra-traits", "full"] }
tar = { version = "0.4.40", default-features = false }
tempfile = "3.8.1"
thiserror = "1.0.50"
time = { version = "0.3", features = ["parsing", "formatting", "serde"] }
-toml = "0.8.6"
-toml_edit = { version = "0.20.7", features = ["serde"] }
-tracing = "0.1.40"
-tracing-subscriber = { version = "0.3.17", features = ["env-filter"] }
+toml = "0.8.8"
+toml_edit = { version = "0.21.0", features = ["serde"] }
+tracing = "0.1.37" # be compatible with rustc_log: https://github.com/rust-lang/rust/blob/e51e98dde6a/compiler/rustc_log/Cargo.toml#L9
+tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
unicase = "2.7.0"
unicode-width = "0.1.11"
unicode-xid = "0.2.4"
-url = "2.4.1"
+url = "2.5.0"
varisat = "0.2.2"
walkdir = "2.4.0"
-windows-sys = "0.48"
+windows-sys = "0.52"
+
+[workspace.lints.rust]
+rust_2018_idioms = "warn" # TODO: could this be removed?
+
+[workspace.lints.rustdoc]
+private_intra_doc_links = "allow"
+
+[workspace.lints.clippy]
+all = { level = "allow", priority = -1 }
+dbg_macro = "warn"
+disallowed_methods = "warn"
+print_stderr = "warn"
+print_stdout = "warn"
+self_named_module_files = "warn"
[package]
name = "cargo"
-version = "0.76.0"
+version = "0.77.0"
edition.workspace = true
license.workspace = true
rust-version.workspace = true
@@ -162,6 +177,8 @@ pasetors.workspace = true
pathdiff.workspace = true
pulldown-cmark.workspace = true
rand.workspace = true
+regex.workspace = true
+rusqlite.workspace = true
rustfix.workspace = true
semver.workspace = true
serde = { workspace = true, features = ["derive"] }
@@ -229,4 +246,7 @@ doc = false
vendored-openssl = ["openssl/vendored"]
vendored-libgit2 = ["libgit2-sys/vendored"]
# This is primarily used by rust-lang/rust distributing cargo the executable.
-all-static = ['vendored-openssl', 'curl/static-curl', 'curl/force-system-lib-on-osx']
+all-static = ['vendored-openssl', 'curl/static-curl', 'curl/force-system-lib-on-osx', 'vendored-libgit2']
+
+[lints]
+workspace = true
diff --git a/src/tools/cargo/benches/README.md b/src/tools/cargo/benches/README.md
index b4b8b190a..900bf084c 100644
--- a/src/tools/cargo/benches/README.md
+++ b/src/tools/cargo/benches/README.md
@@ -9,7 +9,23 @@ cd benches/benchsuite
cargo bench
```
-The tests involve downloading the index and benchmarking against some
+However, running all benchmarks would take many minutes, so in most cases it
+is recommended to just run the benchmarks relevant to whatever section of code
+you are working on.
+
+## Benchmarks
+
+There are several different kinds of benchmarks in the `benchsuite/benches` directory:
+
+* `global_cache_tracker` — Benchmarks saving data to the global cache tracker
+ database using samples of real-world data.
+* `resolve` — Benchmarks the resolver against simulations of real-world workspaces.
+* `workspace_initialization` — Benchmarks initialization of a workspace
+ against simulations of real-world workspaces.
+
+### Resolve benchmarks
+
+The resolve benchmarks involve downloading the index and benchmarking against some
real-world and artificial workspaces located in the [`workspaces`](workspaces)
directory.
@@ -21,7 +37,7 @@ faster. You can (and probably should) specify individual benchmarks to run to
narrow it down to a more reasonable set, for example:
```sh
-cargo bench -- resolve_ws/rust
+cargo bench -p benchsuite --bench resolve -- resolve_ws/rust
```
This will only download what's necessary for the rust-lang/rust workspace
@@ -29,7 +45,24 @@ This will only download what's necessary for the rust-lang/rust workspace
about a minute). To get a list of all the benchmarks, run:
```sh
-cargo bench -- --list
+cargo bench -p benchsuite --bench resolve -- --list
+```
+
+### Global cache tracker
+
+The `global_cache_tracker` benchmark tests saving data to the global cache
+tracker database using samples of real-world data. This benchmark should run
+relatively quickly.
+
+The real-world data is based on a capture of my personal development
+environment which has accumulated a large cache. So it is somewhat arbitrary,
+but hopefully representative of a challenging environment. Capturing of the
+data is done with the `capture-last-use` binary, which you can run if you need
+to rebuild the database. Just try to run on a system with a relatively full
+cache in your cargo home directory.
+
+```sh
+cargo bench -p benchsuite --bench global_cache_tracker
```
## Viewing reports
diff --git a/src/tools/cargo/benches/benchsuite/Cargo.toml b/src/tools/cargo/benches/benchsuite/Cargo.toml
index 81413e761..25c160438 100644
--- a/src/tools/cargo/benches/benchsuite/Cargo.toml
+++ b/src/tools/cargo/benches/benchsuite/Cargo.toml
@@ -11,8 +11,10 @@ publish = false
[dependencies]
cargo.workspace = true
+cargo-util.workspace = true
criterion.workspace = true
flate2.workspace = true
+rand.workspace = true
tar.workspace = true
url.workspace = true
@@ -26,3 +28,10 @@ harness = false
[[bench]]
name = "workspace_initialization"
harness = false
+
+[[bench]]
+name = "global_cache_tracker"
+harness = false
+
+[lints]
+workspace = true
diff --git a/src/tools/cargo/benches/benchsuite/benches/global_cache_tracker.rs b/src/tools/cargo/benches/benchsuite/benches/global_cache_tracker.rs
new file mode 100644
index 000000000..71d5d5262
--- /dev/null
+++ b/src/tools/cargo/benches/benchsuite/benches/global_cache_tracker.rs
@@ -0,0 +1,159 @@
+//! Benchmarks for the global cache tracker.
+
+use cargo::core::global_cache_tracker::{self, DeferredGlobalLastUse, GlobalCacheTracker};
+use cargo::util::cache_lock::CacheLockMode;
+use cargo::util::interning::InternedString;
+use cargo::util::Config;
+use criterion::{criterion_group, criterion_main, Criterion};
+use std::fs;
+use std::path::{Path, PathBuf};
+
+// Samples of real-world data.
+const GLOBAL_CACHE_SAMPLE: &str = "global-cache-tracker/global-cache-sample";
+const GLOBAL_CACHE_RANDOM: &str = "global-cache-tracker/random-sample";
+
+/// A scratch directory where the benchmark can place some files.
+fn root() -> PathBuf {
+ let mut p = PathBuf::from(env!("CARGO_TARGET_TMPDIR"));
+ p.push("bench_global_cache_tracker");
+ p
+}
+
+fn cargo_home() -> PathBuf {
+ let mut p = root();
+ p.push("chome");
+ p
+}
+
+fn initialize_config() -> Config {
+ // Set up config.
+ let shell = cargo::core::Shell::new();
+ let homedir = cargo_home();
+ if !homedir.exists() {
+ fs::create_dir_all(&homedir).unwrap();
+ }
+ let cwd = homedir.clone();
+ let mut config = Config::new(shell, cwd, homedir);
+ config.nightly_features_allowed = true;
+ config.set_search_stop_path(root());
+ config
+ .configure(
+ 0,
+ false,
+ None,
+ false,
+ false,
+ false,
+ &None,
+ &["gc".to_string()],
+ &[],
+ )
+ .unwrap();
+ // Set up database sample.
+ let db_path = GlobalCacheTracker::db_path(&config).into_path_unlocked();
+ if db_path.exists() {
+ fs::remove_file(&db_path).unwrap();
+ }
+ let sample = Path::new(env!("CARGO_MANIFEST_DIR")).join(GLOBAL_CACHE_SAMPLE);
+ fs::copy(sample, &db_path).unwrap();
+ config
+}
+
+/// Benchmarks how long it takes to initialize `GlobalCacheTracker` with an already
+/// existing full database.
+fn global_tracker_init(c: &mut Criterion) {
+ let config = initialize_config();
+ let _lock = config
+ .acquire_package_cache_lock(CacheLockMode::DownloadExclusive)
+ .unwrap();
+ c.bench_function("global_tracker_init", |b| {
+ b.iter(|| {
+ GlobalCacheTracker::new(&config).unwrap();
+ })
+ });
+}
+
+/// Benchmarks how long it takes to save a `GlobalCacheTracker` when there are zero
+/// updates.
+fn global_tracker_empty_save(c: &mut Criterion) {
+ let config = initialize_config();
+ let _lock = config
+ .acquire_package_cache_lock(CacheLockMode::DownloadExclusive)
+ .unwrap();
+ let mut deferred = DeferredGlobalLastUse::new();
+ let mut tracker = GlobalCacheTracker::new(&config).unwrap();
+
+ c.bench_function("global_tracker_empty_save", |b| {
+ b.iter(|| {
+ deferred.save(&mut tracker).unwrap();
+ })
+ });
+}
+
+fn load_random_sample() -> Vec<(InternedString, InternedString, u64)> {
+ let path = Path::new(env!("CARGO_MANIFEST_DIR")).join(GLOBAL_CACHE_RANDOM);
+ fs::read_to_string(path)
+ .unwrap()
+ .lines()
+ .map(|s| {
+ let mut s = s.split(',');
+ (
+ s.next().unwrap().into(),
+ s.next().unwrap().into(),
+ s.next().unwrap().parse().unwrap(),
+ )
+ })
+ .collect()
+}
+
+/// Tests performance of updating the last-use timestamps in an already
+/// populated database.
+///
+/// This runs for different sizes of number of crates to update (selecting
+/// from the random sample stored on disk).
+fn global_tracker_update(c: &mut Criterion) {
+ let config = initialize_config();
+ let _lock = config
+ .acquire_package_cache_lock(CacheLockMode::DownloadExclusive)
+ .unwrap();
+ let sample = Path::new(env!("CARGO_MANIFEST_DIR")).join(GLOBAL_CACHE_SAMPLE);
+ let db_path = GlobalCacheTracker::db_path(&config).into_path_unlocked();
+
+ let random_sample = load_random_sample();
+
+ let mut group = c.benchmark_group("global_tracker_update");
+ for size in [1, 10, 100, 500] {
+ if db_path.exists() {
+ fs::remove_file(&db_path).unwrap();
+ }
+
+ fs::copy(&sample, &db_path).unwrap();
+ let mut deferred = DeferredGlobalLastUse::new();
+ let mut tracker = GlobalCacheTracker::new(&config).unwrap();
+ group.bench_with_input(size.to_string(), &size, |b, &size| {
+ b.iter(|| {
+ for (encoded_registry_name, name, size) in &random_sample[..size] {
+ deferred.mark_registry_crate_used(global_cache_tracker::RegistryCrate {
+ encoded_registry_name: *encoded_registry_name,
+ crate_filename: format!("{}.crate", name).into(),
+ size: *size,
+ });
+ deferred.mark_registry_src_used(global_cache_tracker::RegistrySrc {
+ encoded_registry_name: *encoded_registry_name,
+ package_dir: *name,
+ size: Some(*size),
+ });
+ }
+ deferred.save(&mut tracker).unwrap();
+ })
+ });
+ }
+}
+
+criterion_group!(
+ benches,
+ global_tracker_init,
+ global_tracker_empty_save,
+ global_tracker_update
+);
+criterion_main!(benches);
diff --git a/src/tools/cargo/benches/benchsuite/global-cache-tracker/global-cache-sample b/src/tools/cargo/benches/benchsuite/global-cache-tracker/global-cache-sample
new file mode 100644
index 000000000..dc134f538
--- /dev/null
+++ b/src/tools/cargo/benches/benchsuite/global-cache-tracker/global-cache-sample
Binary files differ
diff --git a/src/tools/cargo/benches/benchsuite/global-cache-tracker/random-sample b/src/tools/cargo/benches/benchsuite/global-cache-tracker/random-sample
new file mode 100644
index 000000000..62b611cff
--- /dev/null
+++ b/src/tools/cargo/benches/benchsuite/global-cache-tracker/random-sample
@@ -0,0 +1,500 @@
+github.com-1ecc6299db9ec823,tungstenite-0.18.0,218740
+github.com-1ecc6299db9ec823,integer-encoding-1.1.5,30672
+github.com-1ecc6299db9ec823,tungstenite-0.14.0,315676
+github.com-1ecc6299db9ec823,oxcable-0.5.1,163196
+github.com-1ecc6299db9ec823,swc_ecma_transforms_typescript-0.32.0,245522
+github.com-1ecc6299db9ec823,hyper-0.12.35,601153
+github.com-1ecc6299db9ec823,resiter-0.4.0,59880
+github.com-1ecc6299db9ec823,net2-0.2.37,115813
+github.com-1ecc6299db9ec823,str_inflector-0.12.0,182460
+github.com-1ecc6299db9ec823,derive_builder_macro-0.10.2,16441
+github.com-1ecc6299db9ec823,smol_str-0.1.23,42436
+github.com-1ecc6299db9ec823,wasm-bindgen-multi-value-xform-0.2.83,35347
+github.com-1ecc6299db9ec823,time-macros-0.1.0,1620
+github.com-1ecc6299db9ec823,unicode-bidi-0.3.7,140153
+github.com-1ecc6299db9ec823,socket2-0.4.0,167295
+github.com-1ecc6299db9ec823,ppv-lite86-0.2.10,125234
+github.com-1ecc6299db9ec823,tracing-wasm-0.2.1,31449
+github.com-1ecc6299db9ec823,eframe-0.19.0,158130
+github.com-1ecc6299db9ec823,block-modes-0.7.0,42530
+github.com-1ecc6299db9ec823,rangemap-0.1.11,144157
+github.com-1ecc6299db9ec823,metal-0.23.1,1038699
+github.com-1ecc6299db9ec823,os_str_bytes-6.0.1,86390
+github.com-1ecc6299db9ec823,plotters-backend-0.3.4,53018
+github.com-1ecc6299db9ec823,spidev-0.4.0,45301
+github.com-1ecc6299db9ec823,axum-macros-0.2.3,102058
+github.com-1ecc6299db9ec823,embedded-time-0.12.1,246450
+github.com-1ecc6299db9ec823,envmnt-0.10.4,2328079
+github.com-1ecc6299db9ec823,camino-1.1.1,133976
+github.com-1ecc6299db9ec823,siphasher-0.3.5,46666
+github.com-1ecc6299db9ec823,lexical-write-integer-0.8.5,388374
+github.com-1ecc6299db9ec823,reqwest-0.11.14,686608
+github.com-1ecc6299db9ec823,enum-map-2.4.1,51184
+github.com-1ecc6299db9ec823,sentry-panic-0.29.0,18211
+github.com-1ecc6299db9ec823,msf-srtp-0.2.0,73164
+github.com-1ecc6299db9ec823,near-sandbox-utils-0.4.1,7543
+github.com-1ecc6299db9ec823,ablescript-0.5.2,129318
+github.com-1ecc6299db9ec823,apecs-derive-0.2.3,10620
+github.com-1ecc6299db9ec823,libc-0.2.133,3417382
+github.com-1ecc6299db9ec823,tracing-0.1.35,380627
+github.com-1ecc6299db9ec823,serde-wasm-bindgen-0.3.1,55371
+github.com-1ecc6299db9ec823,compiler_builtins-0.1.71,692853
+github.com-1ecc6299db9ec823,mockito-0.7.2,1179718
+github.com-1ecc6299db9ec823,tonic-0.5.2,420299
+github.com-1ecc6299db9ec823,tracing-core-0.1.30,240058
+github.com-1ecc6299db9ec823,tower-timeout-0.3.0-alpha.2,7486
+github.com-1ecc6299db9ec823,js-intern-0.3.1,7026
+github.com-1ecc6299db9ec823,json-ld-context-processing-0.12.1,78101
+github.com-1ecc6299db9ec823,generic-array-0.14.6,67349
+github.com-1ecc6299db9ec823,synstructure-0.12.3,93523
+github.com-1ecc6299db9ec823,version-compare-0.0.10,74950
+github.com-1ecc6299db9ec823,dirs-1.0.5,51075
+github.com-1ecc6299db9ec823,worker-kv-0.5.1,67351
+github.com-1ecc6299db9ec823,vsimd-0.8.0,170805
+github.com-1ecc6299db9ec823,mockall-0.9.1,187734
+github.com-1ecc6299db9ec823,nan-preserving-float-0.1.0,6341
+github.com-1ecc6299db9ec823,wasmer-types-2.3.0,192436
+github.com-1ecc6299db9ec823,sodiumoxide-0.2.7,5131115
+github.com-1ecc6299db9ec823,tracing-attributes-0.1.11,74857
+github.com-1ecc6299db9ec823,treediff-4.0.2,72588
+github.com-1ecc6299db9ec823,wiggle-generate-5.0.0,103044
+github.com-1ecc6299db9ec823,lapin-1.6.6,497368
+github.com-1ecc6299db9ec823,cranelift-entity-0.93.1,114206
+github.com-1ecc6299db9ec823,pcap-parser-0.13.3,184131
+github.com-1ecc6299db9ec823,rustfft-5.1.1,1638221
+github.com-1ecc6299db9ec823,string_cache-0.7.5,75074
+github.com-1ecc6299db9ec823,maybe-uninit-2.0.0,38492
+github.com-1ecc6299db9ec823,diesel_full_text_search-2.0.0,10179
+github.com-1ecc6299db9ec823,quinn-proto-0.8.4,687565
+github.com-1ecc6299db9ec823,semver-0.5.1,73365
+github.com-1ecc6299db9ec823,rocket_http-0.5.0-rc.2,409939
+github.com-1ecc6299db9ec823,dialoguer-0.7.1,95159
+github.com-1ecc6299db9ec823,fallible_collections-0.4.5,244152
+github.com-1ecc6299db9ec823,parking_lot_core-0.9.0,138932
+github.com-1ecc6299db9ec823,relative-path-1.6.0,103315
+github.com-1ecc6299db9ec823,lua52-sys-0.1.2,584054
+github.com-1ecc6299db9ec823,actix-files-0.6.0,126121
+github.com-1ecc6299db9ec823,crates-io-0.35.1,29498
+github.com-1ecc6299db9ec823,sentry-backtrace-0.19.1,20268
+github.com-1ecc6299db9ec823,text_unit-0.1.10,26100
+github.com-1ecc6299db9ec823,ascii-1.0.0,143025
+github.com-1ecc6299db9ec823,crossbeam-utils-0.8.6,169542
+github.com-1ecc6299db9ec823,nelf-0.1.0,28868
+github.com-1ecc6299db9ec823,colorsys-0.6.5,86989
+github.com-1ecc6299db9ec823,enum-iterator-1.2.0,31042
+github.com-1ecc6299db9ec823,ansi-str-0.7.2,111689
+github.com-1ecc6299db9ec823,anyhow-1.0.68,209123
+github.com-1ecc6299db9ec823,gix-lock-5.0.1,65110
+github.com-1ecc6299db9ec823,nom-supreme-0.8.0,147530
+github.com-1ecc6299db9ec823,path-slash-0.1.4,28655
+github.com-1ecc6299db9ec823,crates-io-0.35.0,29406
+github.com-1ecc6299db9ec823,stb_truetype-0.2.8,22939
+github.com-1ecc6299db9ec823,proc-macro2-1.0.50,185288
+github.com-1ecc6299db9ec823,snapbox-0.4.1,169526
+github.com-1ecc6299db9ec823,hyper-0.14.9,764075
+github.com-1ecc6299db9ec823,ab_glyph-0.2.15,61722
+github.com-1ecc6299db9ec823,uuid-0.1.18,47889
+github.com-1ecc6299db9ec823,data-url-0.2.0,123480
+github.com-1ecc6299db9ec823,threadpool-1.7.1,59558
+github.com-1ecc6299db9ec823,thiserror-impl-1.0.29,65149
+github.com-1ecc6299db9ec823,sha1-0.6.0,31102
+github.com-1ecc6299db9ec823,tokio-tls-0.2.1,51467
+github.com-1ecc6299db9ec823,locspan-derive-0.6.0,59360
+github.com-1ecc6299db9ec823,ureq-1.5.1,249335
+github.com-1ecc6299db9ec823,protoc-rust-2.24.1,13459
+github.com-1ecc6299db9ec823,serde-1.0.159,509060
+github.com-1ecc6299db9ec823,unescape-0.1.0,6047
+github.com-1ecc6299db9ec823,data-encoding-2.2.0,113191
+github.com-1ecc6299db9ec823,bytestring-1.1.0,23705
+github.com-1ecc6299db9ec823,ab_glyph_rasterizer-0.1.8,34773
+github.com-1ecc6299db9ec823,syn-0.12.15,912964
+github.com-1ecc6299db9ec823,reqwest-0.11.9,656209
+github.com-1ecc6299db9ec823,rustls-0.17.0,903717
+github.com-1ecc6299db9ec823,term_size-0.3.2,36226
+github.com-1ecc6299db9ec823,ordered-float-3.1.0,91357
+github.com-1ecc6299db9ec823,cookie-0.2.5,44912
+github.com-1ecc6299db9ec823,debugid-0.8.0,44521
+github.com-1ecc6299db9ec823,conrod-0.51.1,2154016
+github.com-1ecc6299db9ec823,indexmap-1.6.1,247801
+github.com-1ecc6299db9ec823,target-spec-1.3.1,68315
+github.com-1ecc6299db9ec823,lexical-parse-integer-0.8.6,139671
+github.com-1ecc6299db9ec823,time-0.1.38,131629
+github.com-1ecc6299db9ec823,glib-macros-0.14.1,102959
+github.com-1ecc6299db9ec823,metrics-macros-0.6.0,37750
+github.com-1ecc6299db9ec823,structopt-0.3.12,224213
+github.com-1ecc6299db9ec823,criterion-0.3.2,439241
+github.com-1ecc6299db9ec823,lyon_path-0.17.7,186745
+github.com-1ecc6299db9ec823,miette-5.5.0,312945
+github.com-1ecc6299db9ec823,tokio-codec-0.2.0-alpha.6,118193
+github.com-1ecc6299db9ec823,structopt-derive-0.4.14,84883
+github.com-1ecc6299db9ec823,objekt-0.1.2,24191
+github.com-1ecc6299db9ec823,sqlx-macros-0.5.7,110890
+github.com-1ecc6299db9ec823,systemstat-0.1.10,127295
+github.com-1ecc6299db9ec823,colorful-0.2.2,99698
+github.com-1ecc6299db9ec823,quick-xml-0.20.0,645935
+github.com-1ecc6299db9ec823,selinux-sys-0.6.2,27060
+github.com-1ecc6299db9ec823,vsmtp-mail-parser-1.4.0-rc.10,137699
+github.com-1ecc6299db9ec823,sec1-0.7.2,64870
+github.com-1ecc6299db9ec823,nix-0.22.1,1161830
+github.com-1ecc6299db9ec823,snow-0.9.0,2658286
+github.com-1ecc6299db9ec823,per_test_directory_macros-0.1.0,2962
+github.com-1ecc6299db9ec823,syn-helpers-0.4.3,58801
+github.com-1ecc6299db9ec823,terminal_size-0.2.2,29633
+github.com-1ecc6299db9ec823,bevy_hierarchy-0.7.0,41018
+github.com-1ecc6299db9ec823,dynamic_reload-0.4.0,74455
+github.com-1ecc6299db9ec823,http-signature-normalization-actix-0.5.0-beta.14,126857
+github.com-1ecc6299db9ec823,http-body-0.4.1,24138
+github.com-1ecc6299db9ec823,gix-index-0.13.0,207795
+github.com-1ecc6299db9ec823,darling_macro-0.13.1,4156
+github.com-1ecc6299db9ec823,serde_json-1.0.66,543072
+github.com-1ecc6299db9ec823,minreq-1.4.1,41355
+github.com-1ecc6299db9ec823,sct-0.6.1,60974
+github.com-1ecc6299db9ec823,openssl-0.10.50,1173941
+github.com-1ecc6299db9ec823,bevy_pbr-0.6.0,201163
+github.com-1ecc6299db9ec823,security-framework-2.3.1,290512
+github.com-1ecc6299db9ec823,pin-project-internal-0.4.30,128419
+github.com-1ecc6299db9ec823,serde_yaml-0.7.5,158524
+github.com-1ecc6299db9ec823,cid-0.3.2,17269
+github.com-1ecc6299db9ec823,plotters-backend-0.3.0,51995
+github.com-1ecc6299db9ec823,serde_yaml-0.8.12,179579
+github.com-1ecc6299db9ec823,cosmwasm-schema-derive-1.1.9,34956
+github.com-1ecc6299db9ec823,docopt-0.6.86,175553
+github.com-1ecc6299db9ec823,git-testament-0.2.4,27685
+github.com-1ecc6299db9ec823,htmlescape-0.3.1,143378
+github.com-1ecc6299db9ec823,is_proc_translated-0.1.1,16533
+github.com-1ecc6299db9ec823,futures-macro-0.3.4,33147
+github.com-1ecc6299db9ec823,futures-intrusive-0.4.2,520476
+github.com-1ecc6299db9ec823,rustix-0.35.13,1581355
+github.com-1ecc6299db9ec823,glsl-layout-0.3.2,75515
+github.com-1ecc6299db9ec823,darling-0.12.0,67446
+github.com-1ecc6299db9ec823,blake3-0.1.5,394136
+github.com-1ecc6299db9ec823,async-stripe-0.15.0,3157635
+github.com-1ecc6299db9ec823,hbs-common-sys-0.2.1,1034
+github.com-1ecc6299db9ec823,base58-0.1.0,7019
+github.com-1ecc6299db9ec823,time-0.2.23,342720
+github.com-1ecc6299db9ec823,memoffset-0.5.6,27595
+github.com-1ecc6299db9ec823,colored-1.9.3,85161
+github.com-1ecc6299db9ec823,lrpar-0.13.1,153317
+github.com-1ecc6299db9ec823,clap-2.34.0,975823
+github.com-1ecc6299db9ec823,chalk-engine-0.55.0,203718
+github.com-1ecc6299db9ec823,cosmic-space-0.3.6,800331
+github.com-1ecc6299db9ec823,syn-1.0.93,1886902
+github.com-1ecc6299db9ec823,futures-core-0.3.5,43430
+github.com-1ecc6299db9ec823,prost-derive-0.11.6,99428
+github.com-1ecc6299db9ec823,toml_edit-0.15.0,491549
+github.com-1ecc6299db9ec823,pcb-llvm-0.2.0,17328
+github.com-1ecc6299db9ec823,rusticata-macros-2.1.0,35537
+github.com-1ecc6299db9ec823,rustyline-with-hint-fix-10.1.0,548833
+github.com-1ecc6299db9ec823,sharded-slab-0.1.1,239224
+github.com-1ecc6299db9ec823,literally-0.1.3,20415
+github.com-1ecc6299db9ec823,riff-1.0.1,20582
+github.com-1ecc6299db9ec823,futures-macro-0.3.23,38691
+github.com-1ecc6299db9ec823,criterion-0.3.1,431723
+github.com-1ecc6299db9ec823,atty-0.2.14,14567
+github.com-1ecc6299db9ec823,vergen-3.1.0,49089
+github.com-1ecc6299db9ec823,peeking_take_while-0.1.2,18604
+github.com-1ecc6299db9ec823,serde_derive-1.0.156,316173
+github.com-1ecc6299db9ec823,geo-0.23.1,1022596
+github.com-1ecc6299db9ec823,persy-1.4.3,778219
+github.com-1ecc6299db9ec823,futures-lite-1.13.0,214632
+github.com-1ecc6299db9ec823,ms_dtyp-0.0.3,44387
+github.com-1ecc6299db9ec823,thiserror-1.0.33,66618
+github.com-1ecc6299db9ec823,marksman_escape-0.1.2,587235
+github.com-1ecc6299db9ec823,serde_derive-1.0.101,289156
+github.com-1ecc6299db9ec823,gix-ref-0.29.0,214105
+github.com-1ecc6299db9ec823,der-0.7.5,384316
+github.com-1ecc6299db9ec823,promptly-0.3.0,35216
+github.com-1ecc6299db9ec823,libc-0.2.115,3166629
+github.com-1ecc6299db9ec823,ppv-lite86-0.1.2,33514
+github.com-1ecc6299db9ec823,gfx-hal-0.6.0,254453
+github.com-1ecc6299db9ec823,as-slice-0.1.3,20306
+github.com-1ecc6299db9ec823,gpu-alloc-0.3.0,78823
+github.com-1ecc6299db9ec823,arc-swap-0.4.8,167950
+github.com-1ecc6299db9ec823,libusb1-sys-0.5.0,1458763
+github.com-1ecc6299db9ec823,sysinfo-0.26.8,609932
+github.com-1ecc6299db9ec823,refinery-macros-0.8.7,6514
+github.com-1ecc6299db9ec823,assert_float_eq-1.1.3,38445
+github.com-1ecc6299db9ec823,tinyvec-1.1.0,363582
+github.com-1ecc6299db9ec823,predicates-1.0.7,1168580
+github.com-1ecc6299db9ec823,pulldown-cmark-0.9.3,595681
+github.com-1ecc6299db9ec823,aws-sigv4-0.46.0,97885
+github.com-1ecc6299db9ec823,fastrand-1.5.0,39175
+github.com-1ecc6299db9ec823,futures-channel-0.3.17,131816
+github.com-1ecc6299db9ec823,usbd_scsi-0.1.0,172205
+github.com-1ecc6299db9ec823,tinyvec-1.4.0,379505
+github.com-1ecc6299db9ec823,structsy-0.5.1,513822
+github.com-1ecc6299db9ec823,aws-sdk-ssm-0.21.0,9755619
+github.com-1ecc6299db9ec823,pin-project-lite-0.1.1,63942
+github.com-1ecc6299db9ec823,tokio-rustls-0.13.0,78252
+github.com-1ecc6299db9ec823,tinyvec_macros-0.1.0,2912
+github.com-1ecc6299db9ec823,extended_matrix_float-1.0.0,6233
+github.com-1ecc6299db9ec823,displaydoc-0.2.3,68676
+github.com-1ecc6299db9ec823,typed-arena-2.0.2,43549
+github.com-1ecc6299db9ec823,cranelift-0.86.1,16294
+github.com-1ecc6299db9ec823,modular-bitfield-impl-0.10.0,64389
+github.com-1ecc6299db9ec823,schemafy_core-0.5.2,7696
+github.com-1ecc6299db9ec823,sea-orm-macros-0.8.0,86930
+github.com-1ecc6299db9ec823,core-foundation-sys-0.4.6,61859
+github.com-1ecc6299db9ec823,move-symbol-pool-0.3.2,14473
+github.com-1ecc6299db9ec823,glutin-0.25.1,300518
+github.com-1ecc6299db9ec823,postcard-cobs-0.2.0,41524
+github.com-1ecc6299db9ec823,quote-0.6.11,69636
+github.com-1ecc6299db9ec823,encoding_rs-0.8.32,5022316
+github.com-1ecc6299db9ec823,clap-2.32.0,946148
+github.com-1ecc6299db9ec823,term-0.6.1,181220
+github.com-1ecc6299db9ec823,enumset-1.0.12,85911
+github.com-1ecc6299db9ec823,ctest2-0.4.1,100745
+github.com-1ecc6299db9ec823,serde-xml-any-0.0.3,70554
+github.com-1ecc6299db9ec823,proc-macro-hack-0.5.11,39025
+github.com-1ecc6299db9ec823,remove_dir_all-0.5.1,23418
+github.com-1ecc6299db9ec823,weezl-0.1.5,134218
+github.com-1ecc6299db9ec823,windows_x86_64_gnullvm-0.42.1,3254874
+github.com-1ecc6299db9ec823,rocket-0.5.0-rc.2,1225987
+github.com-1ecc6299db9ec823,pin-project-0.4.27,282004
+github.com-1ecc6299db9ec823,criterion-cycles-per-byte-0.1.3,18296
+github.com-1ecc6299db9ec823,coco-0.1.1,107143
+github.com-1ecc6299db9ec823,solana-bloom-1.15.1,22207
+github.com-1ecc6299db9ec823,qoqo_calculator-1.1.1,163666
+github.com-1ecc6299db9ec823,aes-gcm-0.9.4,381036
+github.com-1ecc6299db9ec823,blowfish-0.9.1,39658
+github.com-1ecc6299db9ec823,pango-0.14.3,258440
+github.com-1ecc6299db9ec823,clap_derive-3.0.0,129105
+github.com-1ecc6299db9ec823,content_inspector-0.2.4,27568
+github.com-1ecc6299db9ec823,jsona-0.2.0,104104
+github.com-1ecc6299db9ec823,gix-quote-0.4.3,32314
+github.com-1ecc6299db9ec823,bcs-0.1.3,93194
+github.com-1ecc6299db9ec823,statrs-0.14.0,681982
+github.com-1ecc6299db9ec823,cw-controllers-0.16.0,32195
+github.com-1ecc6299db9ec823,hyper-0.12.36,578470
+github.com-1ecc6299db9ec823,argon2-0.4.1,112707
+github.com-1ecc6299db9ec823,fraction-0.12.2,482976
+github.com-1ecc6299db9ec823,quickcheck-0.7.2,89884
+github.com-1ecc6299db9ec823,typetag-0.1.8,135149
+github.com-1ecc6299db9ec823,object-0.20.0,916661
+github.com-1ecc6299db9ec823,pest_derive-2.2.1,60318
+github.com-1ecc6299db9ec823,coremidi-sys-3.1.0,40849
+github.com-1ecc6299db9ec823,either-1.6.0,48881
+github.com-1ecc6299db9ec823,tarpc-0.29.0,244416
+github.com-1ecc6299db9ec823,num-integer-0.1.42,88403
+github.com-1ecc6299db9ec823,oid-registry-0.6.0,46996
+github.com-1ecc6299db9ec823,historian-3.0.11,23818
+github.com-1ecc6299db9ec823,ui-sys-0.1.3,1784250
+github.com-1ecc6299db9ec823,cranelift-frontend-0.92.0,166902
+github.com-1ecc6299db9ec823,pin-project-lite-0.1.12,77882
+github.com-1ecc6299db9ec823,piston2d-gfx_graphics-0.72.0,91826
+github.com-1ecc6299db9ec823,stylist-macros-0.9.2,78647
+github.com-1ecc6299db9ec823,valico-3.4.0,1394467
+github.com-1ecc6299db9ec823,inventory-0.3.3,40329
+github.com-1ecc6299db9ec823,wrapping_arithmetic-0.1.0,8774
+github.com-1ecc6299db9ec823,serde-1.0.138,502921
+github.com-1ecc6299db9ec823,ra_common-0.1.3,16920
+github.com-1ecc6299db9ec823,markup5ever-0.10.0,213742
+github.com-1ecc6299db9ec823,libp2p-core-0.20.1,460422
+github.com-1ecc6299db9ec823,inout-0.1.2,40474
+github.com-1ecc6299db9ec823,flatbuffers-23.1.21,103944
+github.com-1ecc6299db9ec823,gdk-pixbuf-sys-0.10.0,42914
+github.com-1ecc6299db9ec823,miniz_oxide-0.5.1,223551
+github.com-1ecc6299db9ec823,merge-0.1.0,70214
+github.com-1ecc6299db9ec823,pagecache-0.6.0,260742
+github.com-1ecc6299db9ec823,ritelinked-0.3.2,142063
+github.com-1ecc6299db9ec823,ethers-contract-1.0.2,589452
+github.com-1ecc6299db9ec823,color_quant-1.1.0,21284
+github.com-1ecc6299db9ec823,libykpers-sys-0.3.1,14270
+github.com-1ecc6299db9ec823,cgmath-0.17.0,367702
+github.com-1ecc6299db9ec823,clap-4.0.18,1096299
+github.com-1ecc6299db9ec823,ears-0.5.1,165152
+github.com-1ecc6299db9ec823,h2-0.2.5,765073
+github.com-1ecc6299db9ec823,image-0.22.5,725576
+github.com-1ecc6299db9ec823,digest-0.10.1,83013
+github.com-1ecc6299db9ec823,js-sys-0.3.46,410849
+github.com-1ecc6299db9ec823,psl-types-2.0.11,25329
+github.com-1ecc6299db9ec823,apub-core-0.2.0,52434
+github.com-1ecc6299db9ec823,thiserror-1.0.22,59077
+github.com-1ecc6299db9ec823,num-complex-0.4.3,139539
+github.com-1ecc6299db9ec823,autocfg-1.0.1,41521
+github.com-1ecc6299db9ec823,amethyst_locale-0.15.3,4896
+github.com-1ecc6299db9ec823,tokio-timer-0.2.11,167147
+github.com-1ecc6299db9ec823,pipe-trait-0.2.1,11031
+github.com-1ecc6299db9ec823,http-muncher-0.3.2,259101
+github.com-1ecc6299db9ec823,thin-dst-1.1.0,46297
+github.com-1ecc6299db9ec823,float-ord-0.2.0,21145
+github.com-1ecc6299db9ec823,trust-dns-proto-0.21.2,1312809
+github.com-1ecc6299db9ec823,ordered-multimap-0.4.3,178966
+github.com-1ecc6299db9ec823,bitflags-0.4.0,33932
+github.com-1ecc6299db9ec823,windows_x86_64_gnullvm-0.42.0,3240134
+github.com-1ecc6299db9ec823,cargo-util-0.1.2,72189
+github.com-1ecc6299db9ec823,serde_with_macros-1.5.2,72325
+github.com-1ecc6299db9ec823,wasmer-2.3.0,529984
+github.com-1ecc6299db9ec823,tokio-codec-0.1.2,30428
+github.com-1ecc6299db9ec823,pico-args-0.5.0,54991
+github.com-1ecc6299db9ec823,migformatting-0.1.1,1680
+github.com-1ecc6299db9ec823,lexical-core-0.6.7,2382284
+github.com-1ecc6299db9ec823,katex-wasmbind-0.10.0,274096
+github.com-1ecc6299db9ec823,blender-armature-0.0.1,51371
+github.com-1ecc6299db9ec823,twoway-0.2.1,129719
+github.com-1ecc6299db9ec823,sha3-0.10.0,540582
+github.com-1ecc6299db9ec823,ringbuf-0.2.8,92733
+github.com-1ecc6299db9ec823,pest_meta-2.1.3,175833
+github.com-1ecc6299db9ec823,selectme-macros-0.7.1,79130
+github.com-1ecc6299db9ec823,secp256k1-sys-0.7.0,5303296
+github.com-1ecc6299db9ec823,panic-probe-0.3.0,18841
+github.com-1ecc6299db9ec823,ron-0.6.6,208755
+github.com-1ecc6299db9ec823,defmt-macros-0.3.3,78405
+github.com-1ecc6299db9ec823,winapi-x86_64-pc-windows-gnu-0.4.0,53158182
+github.com-1ecc6299db9ec823,aph-0.2.0,30088
+github.com-1ecc6299db9ec823,winnow-0.4.6,959730
+github.com-1ecc6299db9ec823,syntex_syntax-0.54.0,1272567
+github.com-1ecc6299db9ec823,prost-derive-0.11.9,99428
+github.com-1ecc6299db9ec823,commoncrypto-sys-0.2.0,16095
+github.com-1ecc6299db9ec823,yew-router-macro-0.15.0,42667
+github.com-1ecc6299db9ec823,http-range-header-0.3.0,29647
+github.com-1ecc6299db9ec823,crossbeam-queue-0.2.3,60131
+github.com-1ecc6299db9ec823,slice-deque-0.3.0,271889
+github.com-1ecc6299db9ec823,libc-0.2.65,2334946
+github.com-1ecc6299db9ec823,minidom-0.14.0,102507
+github.com-1ecc6299db9ec823,tokio-native-tls-0.3.0,60313
+github.com-1ecc6299db9ec823,glam-0.17.3,1191013
+github.com-1ecc6299db9ec823,semver-1.0.6,114819
+github.com-1ecc6299db9ec823,cortex-m-rtfm-macros-0.5.1,112048
+github.com-1ecc6299db9ec823,bitvec-1.0.0,1006982
+github.com-1ecc6299db9ec823,gfx-backend-metal-0.6.5,660301
+github.com-1ecc6299db9ec823,object-0.30.1,1467041
+github.com-1ecc6299db9ec823,proc-macro-error-attr-0.4.11,18220
+github.com-1ecc6299db9ec823,proteus-0.5.0,179567
+github.com-1ecc6299db9ec823,crunchy-0.1.6,6678
+github.com-1ecc6299db9ec823,once_cell-1.7.2,121632
+github.com-1ecc6299db9ec823,rel-0.2.0,14524
+github.com-1ecc6299db9ec823,lexical-core-0.7.5,2355166
+github.com-1ecc6299db9ec823,windows_x86_64_gnu-0.42.1,10581222
+github.com-1ecc6299db9ec823,thread_local-1.1.5,49409
+github.com-1ecc6299db9ec823,openssl-sys-0.9.63,285709
+github.com-1ecc6299db9ec823,simplelog-0.11.2,85170
+github.com-1ecc6299db9ec823,thiserror-impl-1.0.25,55249
+github.com-1ecc6299db9ec823,quanta-0.10.0,82241
+github.com-1ecc6299db9ec823,vsmtp-common-1.4.0-rc.10,122740
+github.com-1ecc6299db9ec823,tonic-0.1.0-alpha.6,302938
+github.com-1ecc6299db9ec823,ecdsa-0.16.1,121203
+github.com-1ecc6299db9ec823,deltae-0.3.0,2871017
+github.com-1ecc6299db9ec823,phf_shared-0.11.1,30454
+github.com-1ecc6299db9ec823,trustfall-rustdoc-adapter-22.5.2,5348192
+github.com-1ecc6299db9ec823,mockall_derive-0.11.0,227736
+github.com-1ecc6299db9ec823,wasm-bindgen-0.2.64,584320
+github.com-1ecc6299db9ec823,sg-std-0.12.0,27020
+github.com-1ecc6299db9ec823,chalk-ir-0.87.0,288472
+github.com-1ecc6299db9ec823,environment-0.1.1,9957
+github.com-1ecc6299db9ec823,crash-handler-0.3.3,125183
+github.com-1ecc6299db9ec823,bindgen-0.59.2,958852
+github.com-1ecc6299db9ec823,serde_path_to_error-0.1.7,101591
+github.com-1ecc6299db9ec823,tinyvec-0.3.3,77508
+github.com-1ecc6299db9ec823,precomputed-hash-0.1.1,2853
+github.com-1ecc6299db9ec823,rustc-rayon-core-0.4.1,264995
+github.com-1ecc6299db9ec823,gix-sec-0.6.2,57428
+github.com-1ecc6299db9ec823,pistoncore-input-0.19.0,83490
+github.com-1ecc6299db9ec823,gloo-utils-0.1.5,15602
+github.com-1ecc6299db9ec823,redox_intelflash-0.1.3,28056
+github.com-1ecc6299db9ec823,block2-0.2.0-alpha.6,39192
+github.com-1ecc6299db9ec823,fastly-shared-0.9.1,19292
+github.com-1ecc6299db9ec823,ibc-chain-registry-0.1.0,48243
+github.com-1ecc6299db9ec823,socket2-0.4.4,205035
+github.com-1ecc6299db9ec823,futures-channel-0.3.19,132274
+github.com-1ecc6299db9ec823,structopt-0.3.16,217443
+github.com-1ecc6299db9ec823,rusty-fork-0.2.2,64570
+github.com-1ecc6299db9ec823,parking_lot_core-0.9.7,139601
+github.com-1ecc6299db9ec823,async-lock-2.6.0,99844
+github.com-1ecc6299db9ec823,bindgen-0.56.0,923373
+github.com-1ecc6299db9ec823,quad-rand-0.2.1,9108
+github.com-1ecc6299db9ec823,wasmflow-codec-0.10.0,12343
+github.com-1ecc6299db9ec823,gix-0.38.0,883190
+github.com-1ecc6299db9ec823,futures-macro-0.3.27,38519
+github.com-1ecc6299db9ec823,portable-atomic-0.3.13,549649
+github.com-1ecc6299db9ec823,portable-atomic-1.3.2,799707
+github.com-1ecc6299db9ec823,bevy-crevice-derive-0.6.0,16165
+github.com-1ecc6299db9ec823,gltf-json-0.15.2,118263
+github.com-1ecc6299db9ec823,struple-impl-0.1.0,4096
+github.com-1ecc6299db9ec823,annotate-snippets-0.9.1,153174
+github.com-1ecc6299db9ec823,futures-core-0.3.28,46207
+github.com-1ecc6299db9ec823,wezterm-bidi-0.2.2,361283
+github.com-1ecc6299db9ec823,mildew-0.1.2,3002
+github.com-1ecc6299db9ec823,bytecount-0.6.3,46567
+github.com-1ecc6299db9ec823,numext-fixed-hash-core-0.1.6,7403
+github.com-1ecc6299db9ec823,bytesize-1.1.0,34012
+github.com-1ecc6299db9ec823,oxsdatatypes-0.1.0,174662
+github.com-1ecc6299db9ec823,hostname-0.1.5,4811
+github.com-1ecc6299db9ec823,io-lifetimes-1.0.4,207652
+github.com-1ecc6299db9ec823,derive_builder_core-0.11.2,135502
+github.com-1ecc6299db9ec823,ttf-parser-0.15.2,711615
+github.com-1ecc6299db9ec823,tracing-opentelemetry-0.17.4,187675
+github.com-1ecc6299db9ec823,ab_glyph_rasterizer-0.1.7,34278
+github.com-1ecc6299db9ec823,bevy_diagnostic-0.6.0,14396
+github.com-1ecc6299db9ec823,toml_datetime-0.5.0,34801
+github.com-1ecc6299db9ec823,wasm-parser-0.1.7,39726
+github.com-1ecc6299db9ec823,ppv-null-0.1.2,26098
+github.com-1ecc6299db9ec823,ci_info-0.10.2,1197933
+github.com-1ecc6299db9ec823,jobserver-0.1.21,72720
+github.com-1ecc6299db9ec823,sentencepiece-sys-0.10.0,10055292
+github.com-1ecc6299db9ec823,zstd-sys-2.0.1+zstd.1.5.2,3387955
+github.com-1ecc6299db9ec823,byte-strings-proc_macros-0.2.2,7886
+github.com-1ecc6299db9ec823,snapbox-0.4.11,193312
+github.com-1ecc6299db9ec823,ron-0.6.4,198516
+github.com-1ecc6299db9ec823,gix-object-0.28.0,102536
+github.com-1ecc6299db9ec823,strum_macros-0.23.1,87403
+github.com-1ecc6299db9ec823,defmt-0.3.2,93568
+github.com-1ecc6299db9ec823,openssl-0.10.35,971227
+github.com-1ecc6299db9ec823,gtk-sys-0.14.0,1376726
+github.com-1ecc6299db9ec823,gpu-alloc-0.4.7,99476
+github.com-1ecc6299db9ec823,colored-2.0.0,91075
+github.com-1ecc6299db9ec823,fixedbitset-0.4.2,67872
+github.com-1ecc6299db9ec823,argparse-0.2.2,95032
+github.com-1ecc6299db9ec823,bevy_mod_raycast-0.6.2,456756
+github.com-1ecc6299db9ec823,byte-strings-0.2.2,35209
+github.com-1ecc6299db9ec823,mem_tools-0.1.0,937956
+github.com-1ecc6299db9ec823,deno_core-0.167.0,11067700
+github.com-1ecc6299db9ec823,rocksdb-0.19.0,628015
+github.com-1ecc6299db9ec823,num-traits-0.2.12,231414
+github.com-1ecc6299db9ec823,type-info-derive-0.2.0,56221
+github.com-1ecc6299db9ec823,structopt-derive-0.3.4,68017
+github.com-1ecc6299db9ec823,extendr-macros-0.3.1,49695
+github.com-1ecc6299db9ec823,secret-cosmwasm-std-1.0.0,632711
+github.com-1ecc6299db9ec823,skim-0.7.0,380243
+github.com-1ecc6299db9ec823,serde-1.0.135,501463
+github.com-1ecc6299db9ec823,lock_api-0.1.5,109183
+github.com-1ecc6299db9ec823,cw-multi-test-0.16.2,445599
+github.com-1ecc6299db9ec823,quote-1.0.10,120640
+github.com-1ecc6299db9ec823,safemem-0.3.2,17382
+github.com-1ecc6299db9ec823,gloo-dialogs-0.1.1,4653
+github.com-1ecc6299db9ec823,dashmap-4.0.2,105438
+github.com-1ecc6299db9ec823,oorandom-11.1.0,31893
+github.com-1ecc6299db9ec823,polars-core-0.21.1,1678691
+github.com-1ecc6299db9ec823,claxon-0.4.2,259276
+github.com-1ecc6299db9ec823,cc-1.0.35,179169
+github.com-1ecc6299db9ec823,cocoa-0.19.1,296083
+github.com-1ecc6299db9ec823,tokio-1.9.0,2490393
+github.com-1ecc6299db9ec823,gix-refspec-0.10.1,105495
+github.com-1ecc6299db9ec823,futures-task-0.3.12,39561
+github.com-1ecc6299db9ec823,sqlx-core-0.4.2,1064795
+github.com-1ecc6299db9ec823,futures-task-0.3.14,39566
+github.com-1ecc6299db9ec823,datastore_grpc-0.4.0,18233399
+github.com-1ecc6299db9ec823,directories-4.0.1,74013
+github.com-1ecc6299db9ec823,wgpu-hal-0.15.1,1201034
+github.com-1ecc6299db9ec823,discard-1.0.4,14342
+github.com-1ecc6299db9ec823,tinytga-0.1.0,102322
+github.com-1ecc6299db9ec823,prost-types-0.10.1,126121
+github.com-1ecc6299db9ec823,assert2-0.3.6,36145
+github.com-1ecc6299db9ec823,syn-inline-mod-0.5.0,35740
+github.com-1ecc6299db9ec823,bat-0.22.1,5407476
+github.com-1ecc6299db9ec823,minidumper-child-0.1.0,32329
+github.com-1ecc6299db9ec823,libp2p-kad-0.21.0,416675
+github.com-1ecc6299db9ec823,asn1_der-0.6.3,1102166
+github.com-1ecc6299db9ec823,h2-0.2.4,764682
+github.com-1ecc6299db9ec823,ena-0.14.2,90713
+github.com-1ecc6299db9ec823,prost-build-0.8.0,31248726
+github.com-1ecc6299db9ec823,wasmer-compiler-cranelift-3.1.1,300456
+github.com-1ecc6299db9ec823,gfx-hal-0.7.0,238750
+github.com-1ecc6299db9ec823,nom-4.2.3,644514
+github.com-1ecc6299db9ec823,os_str_bytes-2.4.0,52159
+github.com-1ecc6299db9ec823,sourcemap-6.2.1,135303
+github.com-1ecc6299db9ec823,actix-router-0.5.1,150753
+github.com-1ecc6299db9ec823,markup5ever-0.9.0,229731
+github.com-1ecc6299db9ec823,gloo-worker-0.2.1,31624
+github.com-1ecc6299db9ec823,object-0.25.3,1313095
+github.com-1ecc6299db9ec823,rustversion-1.0.0,41602
diff --git a/src/tools/cargo/benches/benchsuite/src/bin/capture-last-use.rs b/src/tools/cargo/benches/benchsuite/src/bin/capture-last-use.rs
new file mode 100644
index 000000000..3034d49ac
--- /dev/null
+++ b/src/tools/cargo/benches/benchsuite/src/bin/capture-last-use.rs
@@ -0,0 +1,148 @@
+//! Utility for capturing a global cache last-use database based on the files
+//! on a real-world system.
+//!
+//! This will look in the CARGO_HOME of the current system and record last-use
+//! data for all files in the cache. This is intended to provide a real-world
+//! example for a benchmark that should be close to what a real set of data
+//! should look like.
+//!
+//! See `benches/global_cache_tracker.rs` for the benchmark that uses this
+//! data.
+//!
+//! The database is kept in git. It usually shouldn't need to be re-generated
+//! unless there is a change in the schema or the benchmark.
+
+use cargo::core::global_cache_tracker::{self, DeferredGlobalLastUse, GlobalCacheTracker};
+use cargo::util::cache_lock::CacheLockMode;
+use cargo::util::interning::InternedString;
+use cargo::Config;
+use rand::prelude::SliceRandom;
+use std::collections::HashMap;
+use std::fs;
+use std::fs::File;
+use std::io::Write;
+use std::path::Path;
+
+fn main() {
+ // Set up config.
+ let shell = cargo::core::Shell::new();
+ let homedir = Path::new(env!("CARGO_MANIFEST_DIR")).join("global-cache-tracker");
+ let cwd = homedir.clone();
+ let mut config = Config::new(shell, cwd, homedir.clone());
+ config
+ .configure(
+ 0,
+ false,
+ None,
+ false,
+ false,
+ false,
+ &None,
+ &["gc".to_string()],
+ &[],
+ )
+ .unwrap();
+ let db_path = GlobalCacheTracker::db_path(&config).into_path_unlocked();
+ if db_path.exists() {
+ fs::remove_file(&db_path).unwrap();
+ }
+
+ let _lock = config
+ .acquire_package_cache_lock(CacheLockMode::DownloadExclusive)
+ .unwrap();
+ let mut deferred = DeferredGlobalLastUse::new();
+ let mut tracker = GlobalCacheTracker::new(&config).unwrap();
+
+ let real_home = cargo::util::homedir(&std::env::current_dir().unwrap()).unwrap();
+
+ let cache_dir = real_home.join("registry/cache");
+ for dir_ent in fs::read_dir(cache_dir).unwrap() {
+ let registry = dir_ent.unwrap();
+ let encoded_registry_name = InternedString::new(&registry.file_name().to_string_lossy());
+ for krate in fs::read_dir(registry.path()).unwrap() {
+ let krate = krate.unwrap();
+ let meta = krate.metadata().unwrap();
+ deferred.mark_registry_crate_used_stamp(
+ global_cache_tracker::RegistryCrate {
+ encoded_registry_name,
+ crate_filename: krate.file_name().to_string_lossy().as_ref().into(),
+ size: meta.len(),
+ },
+ Some(&meta.modified().unwrap()),
+ );
+ }
+ }
+
+ let mut src_entries = Vec::new();
+
+ let cache_dir = real_home.join("registry/src");
+ for dir_ent in fs::read_dir(cache_dir).unwrap() {
+ let registry = dir_ent.unwrap();
+ let encoded_registry_name = InternedString::new(&registry.file_name().to_string_lossy());
+ for krate in fs::read_dir(registry.path()).unwrap() {
+ let krate = krate.unwrap();
+ let meta = krate.metadata().unwrap();
+ let src = global_cache_tracker::RegistrySrc {
+ encoded_registry_name,
+ package_dir: krate.file_name().to_string_lossy().as_ref().into(),
+ size: Some(cargo_util::du(&krate.path(), &[]).unwrap()),
+ };
+ src_entries.push(src.clone());
+ let timestamp = meta.modified().unwrap();
+ deferred.mark_registry_src_used_stamp(src, Some(&timestamp));
+ }
+ }
+
+ let git_co_dir = real_home.join("git/checkouts");
+ for dir_ent in fs::read_dir(git_co_dir).unwrap() {
+ let git_source = dir_ent.unwrap();
+ let encoded_git_name = InternedString::new(&git_source.file_name().to_string_lossy());
+ for co in fs::read_dir(git_source.path()).unwrap() {
+ let co = co.unwrap();
+ let meta = co.metadata().unwrap();
+ deferred.mark_git_checkout_used_stamp(
+ global_cache_tracker::GitCheckout {
+ encoded_git_name,
+ short_name: co.file_name().to_string_lossy().as_ref().into(),
+ size: Some(cargo_util::du(&co.path(), &[]).unwrap()),
+ },
+ Some(&meta.modified().unwrap()),
+ );
+ }
+ }
+
+ deferred.save(&mut tracker).unwrap();
+ drop(deferred);
+ drop(tracker);
+ fs::rename(&db_path, homedir.join("global-cache-sample")).unwrap();
+ // Clean up the lock file created above.
+ fs::remove_file(homedir.join(".package-cache")).unwrap();
+
+ // Save a random sample of crates that the benchmark should update.
+ // Pick whichever registry has the most entries. This is to be somewhat
+ // realistic for the common case that all dependencies come from one
+ // registry (crates.io).
+ let mut counts = HashMap::new();
+ for src in &src_entries {
+ let c: &mut u32 = counts.entry(src.encoded_registry_name).or_default();
+ *c += 1;
+ }
+ let mut counts: Vec<_> = counts.into_iter().map(|(k, v)| (v, k)).collect();
+ counts.sort();
+ let biggest = counts.last().unwrap().1;
+
+ src_entries.retain(|src| src.encoded_registry_name == biggest);
+ let mut rng = &mut rand::thread_rng();
+ let sample: Vec<_> = src_entries.choose_multiple(&mut rng, 500).collect();
+ let mut f = File::create(homedir.join("random-sample")).unwrap();
+ for src in sample {
+ writeln!(
+ f,
+ "{},{},{}",
+ src.encoded_registry_name,
+ src.package_dir,
+ src.size.unwrap()
+ )
+ .unwrap();
+ }
+}
diff --git a/src/tools/cargo/benches/benchsuite/src/lib.rs b/src/tools/cargo/benches/benchsuite/src/lib.rs
index e470a03b9..f27710841 100644
--- a/src/tools/cargo/benches/benchsuite/src/lib.rs
+++ b/src/tools/cargo/benches/benchsuite/src/lib.rs
@@ -1,3 +1,5 @@
+#![allow(clippy::disallowed_methods)]
+
use cargo::Config;
use std::fs;
use std::path::{Path, PathBuf};
diff --git a/src/tools/cargo/benches/capture/Cargo.toml b/src/tools/cargo/benches/capture/Cargo.toml
index e300815d5..16f2c5071 100644
--- a/src/tools/cargo/benches/capture/Cargo.toml
+++ b/src/tools/cargo/benches/capture/Cargo.toml
@@ -12,3 +12,6 @@ cargo_metadata.workspace = true
flate2.workspace = true
tar.workspace = true
toml.workspace = true
+
+[lints]
+workspace = true
diff --git a/src/tools/cargo/benches/capture/src/main.rs b/src/tools/cargo/benches/capture/src/main.rs
index f6f02c4ba..dcded3b1a 100644
--- a/src/tools/cargo/benches/capture/src/main.rs
+++ b/src/tools/cargo/benches/capture/src/main.rs
@@ -4,6 +4,9 @@
//! Use the `-f` flag to allow it to overwrite existing captures.
//! The workspace will be saved in a `.tgz` file in the `../workspaces` directory.
+#![allow(clippy::disallowed_methods)]
+#![allow(clippy::print_stderr)]
+
use flate2::{Compression, GzBuilder};
use std::fs;
use std::path::{Path, PathBuf};
diff --git a/src/tools/cargo/build.rs b/src/tools/cargo/build.rs
index 752221f8c..60fda40e3 100644
--- a/src/tools/cargo/build.rs
+++ b/src/tools/cargo/build.rs
@@ -7,6 +7,7 @@ use std::process::Command;
fn main() {
commit_info();
compress_man();
+ windows_manifest();
// ALLOWED: Accessing environment during build time shouldn't be prohibited.
#[allow(clippy::disallowed_methods)]
let target = std::env::var("TARGET").unwrap();
@@ -50,6 +51,14 @@ fn commit_info() {
if !Path::new(".git").exists() {
return;
}
+
+ // Var set by bootstrap whenever omit-git-hash is enabled in rust-lang/rust's config.toml.
+ println!("cargo:rerun-if-env-changed=CFG_OMIT_GIT_HASH");
+ #[allow(clippy::disallowed_methods)]
+ if std::env::var_os("CFG_OMIT_GIT_HASH").is_some() {
+ return;
+ }
+
let output = match Command::new("git")
.arg("log")
.arg("-1")
@@ -68,3 +77,26 @@ fn commit_info() {
println!("cargo:rustc-env=CARGO_COMMIT_SHORT_HASH={}", next());
println!("cargo:rustc-env=CARGO_COMMIT_DATE={}", next())
}
+
+#[allow(clippy::disallowed_methods)]
+fn windows_manifest() {
+ use std::env;
+ let target_os = env::var("CARGO_CFG_TARGET_OS");
+ let target_env = env::var("CARGO_CFG_TARGET_ENV");
+ if Ok("windows") == target_os.as_deref() && Ok("msvc") == target_env.as_deref() {
+ static WINDOWS_MANIFEST_FILE: &str = "windows.manifest.xml";
+
+ let mut manifest = env::current_dir().unwrap();
+ manifest.push(WINDOWS_MANIFEST_FILE);
+
+ println!("cargo:rerun-if-changed={WINDOWS_MANIFEST_FILE}");
+ // Embed the Windows application manifest file.
+ println!("cargo:rustc-link-arg-bin=cargo=/MANIFEST:EMBED");
+ println!(
+ "cargo:rustc-link-arg-bin=cargo=/MANIFESTINPUT:{}",
+ manifest.to_str().unwrap()
+ );
+ // Turn linker warnings into errors.
+ println!("cargo:rustc-link-arg-bin=cargo=/WX");
+ }
+}
diff --git a/src/tools/cargo/clippy.toml b/src/tools/cargo/clippy.toml
index f50e36588..dff120e95 100644
--- a/src/tools/cargo/clippy.toml
+++ b/src/tools/cargo/clippy.toml
@@ -6,3 +6,6 @@ disallowed-methods = [
{ path = "std::env::vars", reason = "not recommended to use in Cargo. See rust-lang/cargo#11588" },
{ path = "std::env::vars_os", reason = "not recommended to use in Cargo. See rust-lang/cargo#11588" },
]
+disallowed-types = [
+ { path = "std::sync::atomic::AtomicU64", reason = "not portable. See rust-lang/cargo#12988" },
+]
diff --git a/src/tools/cargo/crates/cargo-platform/Cargo.toml b/src/tools/cargo/crates/cargo-platform/Cargo.toml
index 786948ff3..baa179291 100644
--- a/src/tools/cargo/crates/cargo-platform/Cargo.toml
+++ b/src/tools/cargo/crates/cargo-platform/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "cargo-platform"
-version = "0.1.6"
+version = "0.1.7"
edition.workspace = true
license.workspace = true
rust-version = "1.70.0" # MSRV:3
@@ -11,3 +11,6 @@ description = "Cargo's representation of a target platform."
[dependencies]
serde.workspace = true
+
+[lints]
+workspace = true
diff --git a/src/tools/cargo/crates/cargo-platform/examples/matches.rs b/src/tools/cargo/crates/cargo-platform/examples/matches.rs
index 1b438fb11..11318a7df 100644
--- a/src/tools/cargo/crates/cargo-platform/examples/matches.rs
+++ b/src/tools/cargo/crates/cargo-platform/examples/matches.rs
@@ -1,6 +1,8 @@
//! This example demonstrates how to filter a Platform based on the current
//! host target.
+#![allow(clippy::print_stdout)]
+
use cargo_platform::{Cfg, Platform};
use std::process::Command;
use std::str::FromStr;
diff --git a/src/tools/cargo/crates/cargo-test-macro/Cargo.toml b/src/tools/cargo/crates/cargo-test-macro/Cargo.toml
index 1e81ab314..17ca326f6 100644
--- a/src/tools/cargo/crates/cargo-test-macro/Cargo.toml
+++ b/src/tools/cargo/crates/cargo-test-macro/Cargo.toml
@@ -12,3 +12,6 @@ publish = false
[lib]
proc-macro = true
+
+[lints]
+workspace = true
diff --git a/src/tools/cargo/crates/cargo-test-macro/src/lib.rs b/src/tools/cargo/crates/cargo-test-macro/src/lib.rs
index 937fbce6b..14672ab94 100644
--- a/src/tools/cargo/crates/cargo-test-macro/src/lib.rs
+++ b/src/tools/cargo/crates/cargo-test-macro/src/lib.rs
@@ -208,10 +208,11 @@ fn has_command(command: &str) -> bool {
let output = match Command::new(command).arg("--version").output() {
Ok(output) => output,
Err(e) => {
- // hg is not installed on GitHub macOS or certain constrained
- // environments like Docker. Consider installing it if Cargo gains
- // more hg support, but otherwise it isn't critical.
- if is_ci() && command != "hg" {
+ // * hg is not installed on GitHub macOS or certain constrained
+ // environments like Docker. Consider installing it if Cargo
+ // gains more hg support, but otherwise it isn't critical.
+ // * lldb is not pre-installed on Ubuntu and Windows, so skip.
+ if is_ci() && !["hg", "lldb"].contains(&command) {
panic!(
"expected command `{}` to be somewhere in PATH: {}",
command, e
diff --git a/src/tools/cargo/crates/cargo-test-support/Cargo.toml b/src/tools/cargo/crates/cargo-test-support/Cargo.toml
index fc32e1c9c..1098d598d 100644
--- a/src/tools/cargo/crates/cargo-test-support/Cargo.toml
+++ b/src/tools/cargo/crates/cargo-test-support/Cargo.toml
@@ -29,6 +29,10 @@ tar.workspace = true
time.workspace = true
toml.workspace = true
url.workspace = true
+walkdir.workspace = true
[target.'cfg(windows)'.dependencies]
windows-sys = { workspace = true, features = ["Win32_Storage_FileSystem"] }
+
+[lints]
+workspace = true
diff --git a/src/tools/cargo/crates/cargo-test-support/build.rs b/src/tools/cargo/crates/cargo-test-support/build.rs
index 478da7d99..8854f461a 100644
--- a/src/tools/cargo/crates/cargo-test-support/build.rs
+++ b/src/tools/cargo/crates/cargo-test-support/build.rs
@@ -1,3 +1,5 @@
+#![allow(clippy::disallowed_methods)]
+
fn main() {
println!(
"cargo:rustc-env=NATIVE_ARCH={}",
diff --git a/src/tools/cargo/crates/cargo-test-support/src/compare.rs b/src/tools/cargo/crates/cargo-test-support/src/compare.rs
index d9e8d5454..fc1663d34 100644
--- a/src/tools/cargo/crates/cargo-test-support/src/compare.rs
+++ b/src/tools/cargo/crates/cargo-test-support/src/compare.rs
@@ -591,15 +591,36 @@ fn find_json_mismatch_r<'a>(
.next()
}
(&Object(ref l), &Object(ref r)) => {
- let same_keys = l.len() == r.len() && l.keys().all(|k| r.contains_key(k));
- if !same_keys {
- return Some((expected, actual));
+ let mut expected_entries = l.iter();
+ let mut actual_entries = r.iter();
+
+ // Compilers older than 1.76 do not produce $message_type.
+ // Treat it as optional for now.
+ let mut expected_entries_without_message_type;
+ let expected_entries: &mut dyn Iterator<Item = _> =
+ if l.contains_key("$message_type") && !r.contains_key("$message_type") {
+ expected_entries_without_message_type =
+ expected_entries.filter(|entry| entry.0 != "$message_type");
+ &mut expected_entries_without_message_type
+ } else {
+ &mut expected_entries
+ };
+
+ loop {
+ match (expected_entries.next(), actual_entries.next()) {
+ (None, None) => return None,
+ (Some((expected_key, expected_value)), Some((actual_key, actual_value)))
+ if expected_key == actual_key =>
+ {
+ if let mismatch @ Some(_) =
+ find_json_mismatch_r(expected_value, actual_value, cwd)
+ {
+ return mismatch;
+ }
+ }
+ _ => return Some((expected, actual)),
+ }
}
-
- l.values()
- .zip(r.values())
- .filter_map(|(l, r)| find_json_mismatch_r(l, r, cwd))
- .next()
}
(&Null, &Null) => None,
// Magic string literal `"{...}"` acts as wildcard for any sub-JSON.
diff --git a/src/tools/cargo/crates/cargo-test-support/src/lib.rs b/src/tools/cargo/crates/cargo-test-support/src/lib.rs
index ec74ce0b2..4e3ef6118 100644
--- a/src/tools/cargo/crates/cargo-test-support/src/lib.rs
+++ b/src/tools/cargo/crates/cargo-test-support/src/lib.rs
@@ -2,7 +2,9 @@
//!
//! See <https://rust-lang.github.io/cargo/contrib/> for a guide on writing tests.
-#![allow(clippy::all)]
+#![allow(clippy::disallowed_methods)]
+#![allow(clippy::print_stderr)]
+#![allow(clippy::print_stdout)]
use std::env;
use std::ffi::OsStr;
@@ -521,29 +523,6 @@ pub fn cargo_exe() -> PathBuf {
snapbox::cmd::cargo_bin("cargo")
}
-/// A wrapper around `rustc` instead of calling `clippy`.
-pub fn wrapped_clippy_driver() -> PathBuf {
- let clippy_driver = project()
- .at(paths::global_root().join("clippy-driver"))
- .file("Cargo.toml", &basic_manifest("clippy-driver", "0.0.1"))
- .file(
- "src/main.rs",
- r#"
- fn main() {
- let mut args = std::env::args_os();
- let _me = args.next().unwrap();
- let rustc = args.next().unwrap();
- let status = std::process::Command::new(rustc).args(args).status().unwrap();
- std::process::exit(status.code().unwrap_or(1));
- }
- "#,
- )
- .build();
- clippy_driver.cargo("build").run();
-
- clippy_driver.bin("clippy-driver")
-}
-
/// This is the raw output from the process.
///
/// This is similar to `std::process::Output`, however the `status` is
diff --git a/src/tools/cargo/crates/cargo-test-support/src/paths.rs b/src/tools/cargo/crates/cargo-test-support/src/paths.rs
index 50040e1d4..a07491bcc 100644
--- a/src/tools/cargo/crates/cargo-test-support/src/paths.rs
+++ b/src/tools/cargo/crates/cargo-test-support/src/paths.rs
@@ -114,6 +114,10 @@ pub trait CargoPathExt {
fn rm_rf(&self);
fn mkdir_p(&self);
+ /// Returns a list of all files and directories underneath the given
+ /// directory, recursively, including the starting path.
+ fn ls_r(&self) -> Vec<PathBuf>;
+
fn move_into_the_past(&self) {
self.move_in_time(|sec, nsec| (sec - 3600, nsec))
}
@@ -155,6 +159,14 @@ impl CargoPathExt for Path {
.unwrap_or_else(|e| panic!("failed to mkdir_p {}: {}", self.display(), e))
}
+ fn ls_r(&self) -> Vec<PathBuf> {
+ walkdir::WalkDir::new(self)
+ .sort_by_file_name()
+ .into_iter()
+ .filter_map(|e| e.map(|e| e.path().to_owned()).ok())
+ .collect()
+ }
+
fn move_in_time<F>(&self, travel_amount: F)
where
F: Fn(i64, u32) -> (i64, u32),
diff --git a/src/tools/cargo/crates/cargo-test-support/src/registry.rs b/src/tools/cargo/crates/cargo-test-support/src/registry.rs
index 853829c56..6f9d558a9 100644
--- a/src/tools/cargo/crates/cargo-test-support/src/registry.rs
+++ b/src/tools/cargo/crates/cargo-test-support/src/registry.rs
@@ -557,6 +557,7 @@ pub struct Dependency {
registry: Option<String>,
package: Option<String>,
optional: bool,
+ default_features: bool,
}
/// Entry with data that corresponds to [`tar::EntryType`].
@@ -1161,12 +1162,15 @@ fn save_new_crate(
"name": name,
"req": dep.version_req,
"features": dep.features,
- "default_features": true,
+ "default_features": dep.default_features,
"target": dep.target,
"optional": dep.optional,
"kind": dep.kind,
"registry": dep.registry,
"package": package,
+ "artifact": dep.artifact,
+ "bindep_target": dep.bindep_target,
+ "lib": dep.lib,
})
})
.collect::<Vec<_>>();
@@ -1179,7 +1183,7 @@ fn save_new_crate(
new_crate.features,
false,
new_crate.links,
- None,
+ new_crate.rust_version.as_deref(),
None,
);
@@ -1415,7 +1419,7 @@ impl Package {
"name": dep.name,
"req": dep.vers,
"features": dep.features,
- "default_features": true,
+ "default_features": dep.default_features,
"target": dep.target,
"artifact": artifact,
"bindep_target": dep.bindep_target,
@@ -1523,6 +1527,30 @@ impl Package {
manifest.push_str(&format!("rust-version = \"{}\"", version));
}
+ if !self.features.is_empty() {
+ let features: Vec<String> = self
+ .features
+ .iter()
+ .map(|(feature, features)| {
+ if features.is_empty() {
+ format!("{} = []", feature)
+ } else {
+ format!(
+ "{} = [{}]",
+ feature,
+ features
+ .iter()
+ .map(|s| format!("\"{}\"", s))
+ .collect::<Vec<_>>()
+ .join(", ")
+ )
+ }
+ })
+ .collect();
+
+ manifest.push_str(&format!("\n[features]\n{}", features.join("\n")));
+ }
+
for dep in self.deps.iter() {
let target = match dep.target {
None => String::new(),
@@ -1540,6 +1568,9 @@ impl Package {
"#,
target, kind, dep.name, dep.vers
));
+ if dep.optional {
+ manifest.push_str("optional = true\n");
+ }
if let Some(artifact) = &dep.artifact {
manifest.push_str(&format!("artifact = \"{}\"\n", artifact));
}
@@ -1553,6 +1584,21 @@ impl Package {
assert_eq!(registry, "alternative");
manifest.push_str(&format!("registry-index = \"{}\"", alt_registry_url()));
}
+ if !dep.default_features {
+ manifest.push_str("default-features = false\n");
+ }
+ if !dep.features.is_empty() {
+ let mut features = String::new();
+ serde::Serialize::serialize(
+ &dep.features,
+ toml::ser::ValueSerializer::new(&mut features),
+ )
+ .unwrap();
+ manifest.push_str(&format!("features = {}\n", features));
+ }
+ if let Some(package) = &dep.package {
+ manifest.push_str(&format!("package = \"{}\"\n", package));
+ }
}
if self.proc_macro {
manifest.push_str("[lib]\nproc-macro = true\n");
@@ -1631,6 +1677,7 @@ impl Dependency {
package: None,
optional: false,
registry: None,
+ default_features: true,
}
}
@@ -1683,4 +1730,10 @@ impl Dependency {
self.optional = optional;
self
}
+
+ /// Adds `default-features = false` if the argument is `false`.
+ pub fn default_features(&mut self, default_features: bool) -> &mut Self {
+ self.default_features = default_features;
+ self
+ }
}
diff --git a/src/tools/cargo/crates/cargo-test-support/src/tools.rs b/src/tools/cargo/crates/cargo-test-support/src/tools.rs
index 2ce2849ae..b6fa4092f 100644
--- a/src/tools/cargo/crates/cargo-test-support/src/tools.rs
+++ b/src/tools/cargo/crates/cargo-test-support/src/tools.rs
@@ -7,6 +7,7 @@ use std::sync::OnceLock;
static ECHO_WRAPPER: OnceLock<Mutex<Option<PathBuf>>> = OnceLock::new();
static ECHO: OnceLock<Mutex<Option<PathBuf>>> = OnceLock::new();
+static CLIPPY_DRIVER: OnceLock<Mutex<Option<PathBuf>>> = OnceLock::new();
/// Returns the path to an executable that works as a wrapper around rustc.
///
@@ -107,3 +108,34 @@ pub fn echo_subcommand() -> Project {
p.cargo("build").run();
p
}
+
+/// A wrapper around `rustc` instead of calling `clippy`.
+pub fn wrapped_clippy_driver() -> PathBuf {
+ let mut lock = CLIPPY_DRIVER
+ .get_or_init(|| Default::default())
+ .lock()
+ .unwrap();
+ if let Some(path) = &*lock {
+ return path.clone();
+ }
+ let clippy_driver = project()
+ .at(paths::global_root().join("clippy-driver"))
+ .file("Cargo.toml", &basic_manifest("clippy-driver", "0.0.1"))
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ let mut args = std::env::args_os();
+ let _me = args.next().unwrap();
+ let rustc = args.next().unwrap();
+ let status = std::process::Command::new(rustc).args(args).status().unwrap();
+ std::process::exit(status.code().unwrap_or(1));
+ }
+ "#,
+ )
+ .build();
+ clippy_driver.cargo("build").run();
+ let path = clippy_driver.bin("clippy-driver");
+ *lock = Some(path.clone());
+ path
+}
diff --git a/src/tools/cargo/crates/cargo-util/Cargo.toml b/src/tools/cargo/crates/cargo-util/Cargo.toml
index 616a79c5e..3fd6bdeca 100644
--- a/src/tools/cargo/crates/cargo-util/Cargo.toml
+++ b/src/tools/cargo/crates/cargo-util/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "cargo-util"
-version = "0.2.8"
+version = "0.2.9"
rust-version.workspace = true
edition.workspace = true
license.workspace = true
@@ -12,6 +12,7 @@ description = "Miscellaneous support code used by Cargo."
anyhow.workspace = true
filetime.workspace = true
hex.workspace = true
+ignore.workspace = true
jobserver.workspace = true
libc.workspace = true
same-file.workspace = true
@@ -27,3 +28,6 @@ core-foundation.workspace = true
[target.'cfg(windows)'.dependencies]
miow.workspace = true
windows-sys = { workspace = true, features = ["Win32_Storage_FileSystem", "Win32_Foundation", "Win32_System_Console"] }
+
+[lints]
+workspace = true
diff --git a/src/tools/cargo/crates/cargo-util/src/du.rs b/src/tools/cargo/crates/cargo-util/src/du.rs
new file mode 100644
index 000000000..14634c47b
--- /dev/null
+++ b/src/tools/cargo/crates/cargo-util/src/du.rs
@@ -0,0 +1,78 @@
+//! A simple disk usage estimator.
+
+use anyhow::{Context, Result};
+use ignore::overrides::OverrideBuilder;
+use ignore::{WalkBuilder, WalkState};
+use std::path::Path;
+use std::sync::{Arc, Mutex};
+
+/// Determines the disk usage of all files in the given directory.
+///
+/// The given patterns are gitignore style patterns relative to the given
+/// path. If there are patterns, it will only count things matching that
+/// pattern. `!` can be used to exclude things. See [`OverrideBuilder::add`]
+/// for more info.
+///
+/// This is a primitive implementation that doesn't handle hard links, and
+/// isn't particularly fast (for example, not using `getattrlistbulk` on
+/// macOS). It also only uses actual byte sizes instead of block counts (and
+/// thus vastly undercounts directories with lots of small files). It would be
+/// nice to improve this or replace it with something better.
+pub fn du(path: &Path, patterns: &[&str]) -> Result<u64> {
+ du_inner(path, patterns).with_context(|| format!("failed to walk `{}`", path.display()))
+}
+
+fn du_inner(path: &Path, patterns: &[&str]) -> Result<u64> {
+ let mut builder = OverrideBuilder::new(path);
+ for pattern in patterns {
+ builder.add(pattern)?;
+ }
+ let overrides = builder.build()?;
+
+ let mut builder = WalkBuilder::new(path);
+ builder
+ .overrides(overrides)
+ .hidden(false)
+ .parents(false)
+ .ignore(false)
+ .git_global(false)
+ .git_ignore(false)
+ .git_exclude(false);
+ let walker = builder.build_parallel();
+ let total = Arc::new(Mutex::new(0u64));
+ // A slot used to indicate there was an error while walking.
+ //
+ // It is possible that more than one error happens (such as in different
+ // threads). The error returned is arbitrary in that case.
+ let err = Arc::new(Mutex::new(None));
+ walker.run(|| {
+ Box::new(|entry| {
+ match entry {
+ Ok(entry) => match entry.metadata() {
+ Ok(meta) => {
+ if meta.is_file() {
+ let mut lock = total.lock().unwrap();
+ *lock += meta.len();
+ }
+ }
+ Err(e) => {
+ *err.lock().unwrap() = Some(e.into());
+ return WalkState::Quit;
+ }
+ },
+ Err(e) => {
+ *err.lock().unwrap() = Some(e.into());
+ return WalkState::Quit;
+ }
+ }
+ WalkState::Continue
+ })
+ });
+
+ if let Some(e) = err.lock().unwrap().take() {
+ return Err(e);
+ }
+
+ let total = *total.lock().unwrap();
+ Ok(total)
+}
diff --git a/src/tools/cargo/crates/cargo-util/src/lib.rs b/src/tools/cargo/crates/cargo-util/src/lib.rs
index 0cbc920ec..717e89ba4 100644
--- a/src/tools/cargo/crates/cargo-util/src/lib.rs
+++ b/src/tools/cargo/crates/cargo-util/src/lib.rs
@@ -1,10 +1,14 @@
//! Miscellaneous support code used by Cargo.
+#![allow(clippy::disallowed_methods)]
+
pub use self::read2::read2;
+pub use du::du;
pub use process_builder::ProcessBuilder;
pub use process_error::{exit_status_to_string, is_simple_exit_code, ProcessError};
pub use sha256::Sha256;
+mod du;
pub mod paths;
mod process_builder;
mod process_error;
diff --git a/src/tools/cargo/crates/cargo-util/src/paths.rs b/src/tools/cargo/crates/cargo-util/src/paths.rs
index f405c8f97..743e3f3a8 100644
--- a/src/tools/cargo/crates/cargo-util/src/paths.rs
+++ b/src/tools/cargo/crates/cargo-util/src/paths.rs
@@ -719,14 +719,17 @@ pub fn exclude_from_backups_and_indexing(p: impl AsRef<Path>) {
/// * CACHEDIR.TAG files supported by various tools in a platform-independent way
fn exclude_from_backups(path: &Path) {
exclude_from_time_machine(path);
- let _ = std::fs::write(
- path.join("CACHEDIR.TAG"),
- "Signature: 8a477f597d28d172789f06886806bc55
+ let file = path.join("CACHEDIR.TAG");
+ if !file.exists() {
+ let _ = std::fs::write(
+ file,
+ "Signature: 8a477f597d28d172789f06886806bc55
# This file is a cache directory tag created by cargo.
# For information about cache directory tags see https://bford.info/cachedir/
",
- );
- // Similarly to exclude_from_time_machine() we ignore errors here as it's an optional feature.
+ );
+ // Similarly to exclude_from_time_machine() we ignore errors here as it's an optional feature.
+ }
}
/// Marks the directory as excluded from content indexing.
diff --git a/src/tools/cargo/crates/crates-io/Cargo.toml b/src/tools/cargo/crates/crates-io/Cargo.toml
index f1b92602e..bf2b20cf7 100644
--- a/src/tools/cargo/crates/crates-io/Cargo.toml
+++ b/src/tools/cargo/crates/crates-io/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "crates-io"
-version = "0.39.1"
+version = "0.39.2"
rust-version.workspace = true
edition.workspace = true
license.workspace = true
@@ -20,3 +20,6 @@ serde = { workspace = true, features = ["derive"] }
serde_json.workspace = true
thiserror.workspace = true
url.workspace = true
+
+[lints]
+workspace = true
diff --git a/src/tools/cargo/crates/crates-io/lib.rs b/src/tools/cargo/crates/crates-io/lib.rs
index 1764ce527..468900e55 100644
--- a/src/tools/cargo/crates/crates-io/lib.rs
+++ b/src/tools/cargo/crates/crates-io/lib.rs
@@ -1,5 +1,3 @@
-#![allow(clippy::all)]
-
use std::collections::BTreeMap;
use std::fs::File;
use std::io::prelude::*;
@@ -438,7 +436,8 @@ impl Registry {
.map(|s| s.errors.into_iter().map(|s| s.detail).collect::<Vec<_>>());
match (self.handle.response_code()?, errors) {
- (0, None) | (200, None) => Ok(body),
+ (0, None) => Ok(body),
+ (code, None) if is_success(code) => Ok(body),
(code, Some(errors)) => Err(Error::Api {
code,
headers,
@@ -453,8 +452,12 @@ impl Registry {
}
}
+fn is_success(code: u32) -> bool {
+ code >= 200 && code < 300
+}
+
fn status(code: u32) -> String {
- if code == 200 {
+ if is_success(code) {
String::new()
} else {
let reason = reason(code);
diff --git a/src/tools/cargo/crates/home/CHANGELOG.md b/src/tools/cargo/crates/home/CHANGELOG.md
index 58f960cc3..5b1a2f8ea 100644
--- a/src/tools/cargo/crates/home/CHANGELOG.md
+++ b/src/tools/cargo/crates/home/CHANGELOG.md
@@ -4,7 +4,14 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
-## 0.5.6
+## 0.5.9 - 2023-12-15
+
+- Replace SHGetFolderPathW with SHGetKnownFolderPath
+ [#13173](https://github.com/rust-lang/cargo/pull/13173)
+- Update windows-sys to 0.52
+ [#13089](https://github.com/rust-lang/cargo/pull/13089)
+- Set MSRV to 1.70.0
+ [#12654](https://github.com/rust-lang/cargo/pull/12654)
- Fixed & enhanced documentation.
[#12047](https://github.com/rust-lang/cargo/pull/12047)
diff --git a/src/tools/cargo/crates/home/Cargo.toml b/src/tools/cargo/crates/home/Cargo.toml
index 702a14e55..33cd6ba5a 100644
--- a/src/tools/cargo/crates/home/Cargo.toml
+++ b/src/tools/cargo/crates/home/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "home"
-version = "0.5.8"
+version = "0.5.9"
authors = ["Brian Anderson <andersrb@gmail.com>"]
rust-version = "1.70.0" # MSRV:3
documentation = "https://docs.rs/home"
@@ -17,4 +17,7 @@ repository = "https://github.com/rust-lang/cargo"
description = "Shared definitions of home directories."
[target.'cfg(windows)'.dependencies]
-windows-sys = { workspace = true, features = ["Win32_Foundation", "Win32_UI_Shell"] }
+windows-sys = { workspace = true, features = ["Win32_Foundation", "Win32_UI_Shell", "Win32_System_Com"] }
+
+[lints]
+workspace = true
diff --git a/src/tools/cargo/crates/home/src/lib.rs b/src/tools/cargo/crates/home/src/lib.rs
index 4aee7383b..bbe7c32ca 100644
--- a/src/tools/cargo/crates/home/src/lib.rs
+++ b/src/tools/cargo/crates/home/src/lib.rs
@@ -18,7 +18,7 @@
//!
//! [discussion]: https://github.com/rust-lang/rust/pull/46799#issuecomment-361156935
-#![deny(rust_2018_idioms)]
+#![allow(clippy::disallowed_methods)]
pub mod env;
@@ -44,11 +44,11 @@ use std::path::{Path, PathBuf};
///
/// Returns the value of the `USERPROFILE` environment variable if it is set
/// **and** it is not an empty string. Otherwise, it tries to determine the
-/// home directory by invoking the [`SHGetFolderPathW`][shgfp] function with
-/// [`CSIDL_PROFILE`][csidl].
+/// home directory by invoking the [`SHGetKnownFolderPath`][shgkfp] function with
+/// [`FOLDERID_Profile`][knownfolderid].
///
-/// [shgfp]: https://docs.microsoft.com/en-us/windows/win32/api/shlobj_core/nf-shlobj_core-shgetfolderpathw
-/// [csidl]: https://learn.microsoft.com/en-us/windows/win32/shell/csidl
+/// [shgkfp]: https://learn.microsoft.com/en-us/windows/win32/api/shlobj_core/nf-shlobj_core-shgetknownfolderpath
+/// [knownfolderid]: https://learn.microsoft.com/en-us/windows/win32/shell/knownfolderid
///
/// # Examples
///
diff --git a/src/tools/cargo/crates/home/src/windows.rs b/src/tools/cargo/crates/home/src/windows.rs
index a35dc9c57..c9a63d97b 100644
--- a/src/tools/cargo/crates/home/src/windows.rs
+++ b/src/tools/cargo/crates/home/src/windows.rs
@@ -2,9 +2,12 @@ use std::env;
use std::ffi::OsString;
use std::os::windows::ffi::OsStringExt;
use std::path::PathBuf;
+use std::ptr;
+use std::slice;
-use windows_sys::Win32::Foundation::{MAX_PATH, S_OK};
-use windows_sys::Win32::UI::Shell::{SHGetFolderPathW, CSIDL_PROFILE};
+use windows_sys::Win32::Foundation::S_OK;
+use windows_sys::Win32::System::Com::CoTaskMemFree;
+use windows_sys::Win32::UI::Shell::{FOLDERID_Profile, SHGetKnownFolderPath, KF_FLAG_DONT_VERIFY};
pub fn home_dir_inner() -> Option<PathBuf> {
env::var_os("USERPROFILE")
@@ -16,15 +19,19 @@ pub fn home_dir_inner() -> Option<PathBuf> {
#[cfg(not(target_vendor = "uwp"))]
fn home_dir_crt() -> Option<PathBuf> {
unsafe {
- let mut path: Vec<u16> = Vec::with_capacity(MAX_PATH as usize);
- match SHGetFolderPathW(0, CSIDL_PROFILE as i32, 0, 0, path.as_mut_ptr()) {
+ let mut path = ptr::null_mut();
+ match SHGetKnownFolderPath(&FOLDERID_Profile, KF_FLAG_DONT_VERIFY as u32, 0, &mut path) {
S_OK => {
- let len = wcslen(path.as_ptr());
- path.set_len(len);
- let s = OsString::from_wide(&path);
+ let path_slice = slice::from_raw_parts(path, wcslen(path));
+ let s = OsString::from_wide(&path_slice);
+ CoTaskMemFree(path.cast());
Some(PathBuf::from(s))
}
- _ => None,
+ _ => {
+ // Free any allocated memory even on failure. A null ptr is a no-op for `CoTaskMemFree`.
+ CoTaskMemFree(path.cast());
+ None
+ }
}
}
}
diff --git a/src/tools/cargo/crates/mdman/Cargo.toml b/src/tools/cargo/crates/mdman/Cargo.toml
index fd33da3c2..4e86b8e1a 100644
--- a/src/tools/cargo/crates/mdman/Cargo.toml
+++ b/src/tools/cargo/crates/mdman/Cargo.toml
@@ -16,4 +16,7 @@ serde_json.workspace = true
url.workspace = true
[dev-dependencies]
-pretty_assertions.workspace = true
+snapbox.workspace = true
+
+[lints]
+workspace = true
diff --git a/src/tools/cargo/crates/mdman/src/main.rs b/src/tools/cargo/crates/mdman/src/main.rs
index facaa5120..3c09bc4dd 100644
--- a/src/tools/cargo/crates/mdman/src/main.rs
+++ b/src/tools/cargo/crates/mdman/src/main.rs
@@ -1,3 +1,5 @@
+#![allow(clippy::print_stderr)]
+
use anyhow::{bail, format_err, Context, Error};
use mdman::{Format, ManMap};
use std::collections::HashMap;
diff --git a/src/tools/cargo/crates/mdman/tests/compare.rs b/src/tools/cargo/crates/mdman/tests/compare.rs
index 3e679d127..fde2c235d 100644
--- a/src/tools/cargo/crates/mdman/tests/compare.rs
+++ b/src/tools/cargo/crates/mdman/tests/compare.rs
@@ -1,11 +1,8 @@
//! Compares input to expected output.
-//!
-//! Use the MDMAN_BLESS environment variable to automatically update the
-//! expected output.
-use mdman::{Format, ManMap};
-use pretty_assertions::assert_eq;
use std::path::PathBuf;
+
+use mdman::{Format, ManMap};
use url::Url;
fn run(name: &str) {
@@ -25,14 +22,7 @@ fn run(name: &str) {
name,
format.extension(section)
);
- if std::env::var("MDMAN_BLESS").is_ok() {
- std::fs::write(&expected_path, result).unwrap();
- } else {
- let expected = std::fs::read_to_string(&expected_path).unwrap();
- // Fix if Windows checked out with autocrlf.
- let expected = expected.replace("\r\n", "\n");
- assert_eq!(expected, result);
- }
+ snapbox::assert_eq_path(expected_path, result);
}
}
diff --git a/src/tools/cargo/crates/mdman/tests/invalid.rs b/src/tools/cargo/crates/mdman/tests/invalid.rs
index cc81d06c4..b8be1ed24 100644
--- a/src/tools/cargo/crates/mdman/tests/invalid.rs
+++ b/src/tools/cargo/crates/mdman/tests/invalid.rs
@@ -1,9 +1,9 @@
//! Tests for errors and invalid input.
-use mdman::{Format, ManMap};
-use pretty_assertions::assert_eq;
use std::path::PathBuf;
+use mdman::{Format, ManMap};
+
fn run(name: &str, expected_error: &str) {
let input = PathBuf::from(format!("tests/invalid/{}", name));
match mdman::convert(&input, Format::Man, None, ManMap::new()) {
@@ -11,7 +11,7 @@ fn run(name: &str, expected_error: &str) {
panic!("expected {} to fail", name);
}
Err(e) => {
- assert_eq!(expected_error, e.to_string());
+ snapbox::assert_eq(expected_error, e.to_string());
}
}
}
diff --git a/src/tools/cargo/crates/resolver-tests/Cargo.toml b/src/tools/cargo/crates/resolver-tests/Cargo.toml
index 8750a3d97..947c44569 100644
--- a/src/tools/cargo/crates/resolver-tests/Cargo.toml
+++ b/src/tools/cargo/crates/resolver-tests/Cargo.toml
@@ -10,3 +10,6 @@ cargo.workspace = true
cargo-util.workspace = true
proptest.workspace = true
varisat.workspace = true
+
+[lints]
+workspace = true
diff --git a/src/tools/cargo/crates/resolver-tests/src/lib.rs b/src/tools/cargo/crates/resolver-tests/src/lib.rs
index e2cbcee62..2df7a36bb 100644
--- a/src/tools/cargo/crates/resolver-tests/src/lib.rs
+++ b/src/tools/cargo/crates/resolver-tests/src/lib.rs
@@ -1,9 +1,9 @@
-#![allow(clippy::all)]
+#![allow(clippy::print_stderr)]
use std::cell::RefCell;
use std::cmp::PartialEq;
use std::cmp::{max, min};
-use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
+use std::collections::{BTreeMap, HashMap, HashSet};
use std::fmt;
use std::fmt::Write;
use std::rc::Rc;
@@ -17,7 +17,9 @@ use cargo::core::Resolve;
use cargo::core::{Dependency, PackageId, Registry, Summary};
use cargo::core::{GitReference, SourceId};
use cargo::sources::source::QueryKind;
-use cargo::util::{CargoResult, Config, Graph, IntoUrl, RustVersion};
+use cargo::sources::IndexSummary;
+use cargo::util::{CargoResult, Config, IntoUrl};
+use cargo::util_schemas::manifest::RustVersion;
use proptest::collection::{btree_map, vec};
use proptest::prelude::*;
@@ -69,33 +71,6 @@ pub fn resolve_and_validated(
let out = resolve.sort();
assert_eq!(out.len(), used.len());
- let mut pub_deps: HashMap<PackageId, HashSet<_>> = HashMap::new();
- for &p in out.iter() {
- // make the list of `p` public dependencies
- let mut self_pub_dep = HashSet::new();
- self_pub_dep.insert(p);
- for (dp, deps) in resolve.deps(p) {
- if deps.iter().any(|d| d.is_public()) {
- self_pub_dep.extend(pub_deps[&dp].iter().cloned())
- }
- }
- pub_deps.insert(p, self_pub_dep);
-
- // check if `p` has a public dependencies conflicts
- let seen_dep: BTreeSet<_> = resolve
- .deps(p)
- .flat_map(|(dp, _)| pub_deps[&dp].iter().cloned())
- .collect();
- let seen_dep: Vec<_> = seen_dep.iter().collect();
- for a in seen_dep.windows(2) {
- if a[0].name() == a[1].name() {
- panic!(
- "the package {:?} can publicly see {:?} and {:?}",
- p, a[0], a[1]
- )
- }
- }
- }
let sat_resolve = sat_resolve.unwrap_or_else(|| SatResolve::new(registry));
if !sat_resolve.sat_is_valid_solution(&out) {
panic!(
@@ -131,7 +106,7 @@ pub fn resolve_with_config_raw(
&mut self,
dep: &Dependency,
kind: QueryKind,
- f: &mut dyn FnMut(Summary),
+ f: &mut dyn FnMut(IndexSummary),
) -> Poll<CargoResult<()>> {
for summary in self.list.iter() {
let matched = match kind {
@@ -140,7 +115,7 @@ pub fn resolve_with_config_raw(
};
if matched {
self.used.insert(summary.package_id());
- f(summary.clone());
+ f(IndexSummary::Candidate(summary.clone()));
}
}
Poll::Ready(Ok(()))
@@ -200,7 +175,6 @@ pub fn resolve_with_config_raw(
&mut registry,
&version_prefs,
Some(config),
- true,
);
// The largest test in our suite takes less then 30 sec.
@@ -294,7 +268,7 @@ impl SatResolve {
);
// no two semver compatible versions of the same package
- let by_activations_keys = sat_at_most_one_by_key(
+ sat_at_most_one_by_key(
&mut cnf,
var_for_is_packages_used
.iter()
@@ -312,119 +286,22 @@ impl SatResolve {
let empty_vec = vec![];
- let mut graph: Graph<PackageId, ()> = Graph::new();
-
- let mut version_selected_for: HashMap<
- PackageId,
- HashMap<Dependency, HashMap<_, varisat::Var>>,
- > = HashMap::new();
// active packages need each of there `deps` to be satisfied
for p in registry.iter() {
- graph.add(p.package_id());
for dep in p.dependencies() {
- // This can more easily be written as:
- // !is_active(p) or one of the things that match dep is_active
- // All the complexity, from here to the end, is to support public and private dependencies!
- let mut by_key: HashMap<_, Vec<varisat::Lit>> = HashMap::new();
- for &m in by_name
+ let mut matches: Vec<varisat::Lit> = by_name
.get(dep.package_name().as_str())
.unwrap_or(&empty_vec)
.iter()
.filter(|&p| dep.matches_id(*p))
- {
- graph.link(p.package_id(), m);
- by_key
- .entry(m.as_activations_key())
- .or_default()
- .push(var_for_is_packages_used[&m].positive());
- }
- let keys: HashMap<_, _> = by_key.keys().map(|&k| (k, cnf.new_var())).collect();
-
- // if `p` is active then we need to select one of the keys
- let matches: Vec<_> = keys
- .values()
- .map(|v| v.positive())
- .chain(Some(var_for_is_packages_used[&p.package_id()].negative()))
+ .map(|p| var_for_is_packages_used[&p].positive())
.collect();
+ // ^ the `dep` is satisfied or `p` is not active
+ matches.push(var_for_is_packages_used[&p.package_id()].negative());
cnf.add_clause(&matches);
-
- // if a key is active then we need to select one of the versions
- for (key, vars) in by_key.iter() {
- let mut matches = vars.clone();
- matches.push(keys[key].negative());
- cnf.add_clause(&matches);
- }
-
- version_selected_for
- .entry(p.package_id())
- .or_default()
- .insert(dep.clone(), keys);
}
}
- let topological_order = graph.sort();
-
- // we already ensure there is only one version for each `activations_key` so we can think of
- // `publicly_exports` as being in terms of a set of `activations_key`s
- let mut publicly_exports: HashMap<_, HashMap<_, varisat::Var>> = HashMap::new();
-
- for &key in by_activations_keys.keys() {
- // everything publicly depends on itself
- let var = publicly_exports
- .entry(key)
- .or_default()
- .entry(key)
- .or_insert_with(|| cnf.new_var());
- cnf.add_clause(&[var.positive()]);
- }
-
- // if a `dep` is public then `p` `publicly_exports` all the things that the selected version `publicly_exports`
- for &p in topological_order.iter() {
- if let Some(deps) = version_selected_for.get(&p) {
- let mut p_exports = publicly_exports.remove(&p.as_activations_key()).unwrap();
- for (_, versions) in deps.iter().filter(|(d, _)| d.is_public()) {
- for (ver, sel) in versions {
- for (&export_pid, &export_var) in publicly_exports[ver].iter() {
- let our_var =
- p_exports.entry(export_pid).or_insert_with(|| cnf.new_var());
- cnf.add_clause(&[
- sel.negative(),
- export_var.negative(),
- our_var.positive(),
- ]);
- }
- }
- }
- publicly_exports.insert(p.as_activations_key(), p_exports);
- }
- }
-
- // we already ensure there is only one version for each `activations_key` so we can think of
- // `can_see` as being in terms of a set of `activations_key`s
- // and if `p` `publicly_exports` `export` then it `can_see` `export`
- let mut can_see: HashMap<_, HashMap<_, varisat::Var>> = HashMap::new();
-
- // if `p` has a `dep` that selected `ver` then it `can_see` all the things that the selected version `publicly_exports`
- for (&p, deps) in version_selected_for.iter() {
- let p_can_see = can_see.entry(p).or_default();
- for (_, versions) in deps.iter() {
- for (&ver, sel) in versions {
- for (&export_pid, &export_var) in publicly_exports[&ver].iter() {
- let our_var = p_can_see.entry(export_pid).or_insert_with(|| cnf.new_var());
- cnf.add_clause(&[
- sel.negative(),
- export_var.negative(),
- our_var.positive(),
- ]);
- }
- }
- }
- }
-
- // a package `can_see` only one version by each name
- for (_, see) in can_see.iter() {
- sat_at_most_one_by_key(&mut cnf, see.iter().map(|((name, _, _), &v)| (name, v)));
- }
let mut solver = varisat::Solver::new();
solver.add_formula(&cnf);
@@ -543,14 +420,14 @@ impl ToPkgId for PackageId {
impl<'a> ToPkgId for &'a str {
fn to_pkgid(&self) -> PackageId {
- PackageId::new(*self, "1.0.0", registry_loc()).unwrap()
+ PackageId::try_new(*self, "1.0.0", registry_loc()).unwrap()
}
}
impl<T: AsRef<str>, U: AsRef<str>> ToPkgId for (T, U) {
fn to_pkgid(&self) -> PackageId {
let (name, vers) = self;
- PackageId::new(name.as_ref(), vers.as_ref(), registry_loc()).unwrap()
+ PackageId::try_new(name.as_ref(), vers.as_ref(), registry_loc()).unwrap()
}
}
@@ -596,7 +473,7 @@ pub fn pkg_dep<T: ToPkgId>(name: T, dep: Vec<Dependency>) -> Summary {
}
pub fn pkg_id(name: &str) -> PackageId {
- PackageId::new(name, "1.0.0", registry_loc()).unwrap()
+ PackageId::try_new(name, "1.0.0", registry_loc()).unwrap()
}
fn pkg_id_loc(name: &str, loc: &str) -> PackageId {
@@ -604,7 +481,7 @@ fn pkg_id_loc(name: &str, loc: &str) -> PackageId {
let master = GitReference::Branch("master".to_string());
let source_id = SourceId::for_git(&remote.unwrap(), master).unwrap();
- PackageId::new(name, "1.0.0", source_id).unwrap()
+ PackageId::try_new(name, "1.0.0", source_id).unwrap()
}
pub fn pkg_loc(name: &str, loc: &str) -> Summary {
@@ -643,10 +520,9 @@ pub fn dep(name: &str) -> Dependency {
pub fn dep_req(name: &str, req: &str) -> Dependency {
Dependency::parse(name, Some(req), registry_loc()).unwrap()
}
-pub fn dep_req_kind(name: &str, req: &str, kind: DepKind, public: bool) -> Dependency {
+pub fn dep_req_kind(name: &str, req: &str, kind: DepKind) -> Dependency {
let mut dep = dep_req(name, req);
dep.set_kind(kind);
- dep.set_public(public);
dep
}
@@ -739,8 +615,8 @@ fn meta_test_deep_pretty_print_registry() {
pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]),
pkg!(("baz", "1.0.2") => [dep_req("other", "2")]),
pkg!(("baz", "1.0.1")),
- pkg!(("cat", "1.0.2") => [dep_req_kind("other", "2", DepKind::Build, false)]),
- pkg!(("cat", "1.0.3") => [dep_req_kind("other", "2", DepKind::Development, false)]),
+ pkg!(("cat", "1.0.2") => [dep_req_kind("other", "2", DepKind::Build)]),
+ pkg!(("cat", "1.0.3") => [dep_req_kind("other", "2", DepKind::Development)]),
pkg!(("dep_req", "1.0.0")),
pkg!(("dep_req", "2.0.0")),
])
@@ -803,14 +679,7 @@ pub fn registry_strategy(
let max_deps = max_versions * (max_crates * (max_crates - 1)) / shrinkage;
let raw_version_range = (any::<Index>(), any::<Index>());
- let raw_dependency = (
- any::<Index>(),
- any::<Index>(),
- raw_version_range,
- 0..=1,
- Just(false),
- // TODO: ^ this needs to be set back to `any::<bool>()` and work before public & private dependencies can stabilize
- );
+ let raw_dependency = (any::<Index>(), any::<Index>(), raw_version_range, 0..=1);
fn order_index(a: Index, b: Index, size: usize) -> (usize, usize) {
let (a, b) = (a.index(size), b.index(size));
@@ -837,7 +706,7 @@ pub fn registry_strategy(
.collect();
let len_all_pkgid = list_of_pkgid.len();
let mut dependency_by_pkgid = vec![vec![]; len_all_pkgid];
- for (a, b, (c, d), k, p) in raw_dependencies {
+ for (a, b, (c, d), k) in raw_dependencies {
let (a, b) = order_index(a, b, len_all_pkgid);
let (a, b) = if reverse_alphabetical { (b, a) } else { (a, b) };
let ((dep_name, _), _) = list_of_pkgid[a];
@@ -867,7 +736,6 @@ pub fn registry_strategy(
// => DepKind::Development, // Development has no impact so don't gen
_ => panic!("bad index for DepKind"),
},
- p && k == 0,
))
}
diff --git a/src/tools/cargo/crates/resolver-tests/tests/resolve.rs b/src/tools/cargo/crates/resolver-tests/tests/resolve.rs
index dd21502d8..662bad90f 100644
--- a/src/tools/cargo/crates/resolver-tests/tests/resolve.rs
+++ b/src/tools/cargo/crates/resolver-tests/tests/resolve.rs
@@ -6,8 +6,8 @@ use cargo::util::Config;
use cargo_util::is_ci;
use resolver_tests::{
- assert_contains, assert_same, dep, dep_kind, dep_loc, dep_req, dep_req_kind, loc_names, names,
- pkg, pkg_id, pkg_loc, registry, registry_strategy, remove_dep, resolve, resolve_and_validated,
+ assert_contains, assert_same, dep, dep_kind, dep_loc, dep_req, loc_names, names, pkg, pkg_id,
+ pkg_loc, registry, registry_strategy, remove_dep, resolve, resolve_and_validated,
resolve_with_config, PrettyPrintRegistry, SatResolve, ToDep, ToPkgId,
};
@@ -288,192 +288,6 @@ proptest! {
}
#[test]
-#[should_panic(expected = "pub dep")] // The error handling is not yet implemented.
-fn pub_fail() {
- let input = vec![
- pkg!(("a", "0.0.4")),
- pkg!(("a", "0.0.5")),
- pkg!(("e", "0.0.6") => [dep_req_kind("a", "<= 0.0.4", DepKind::Normal, true),]),
- pkg!(("kB", "0.0.3") => [dep_req("a", ">= 0.0.5"),dep("e"),]),
- ];
- let reg = registry(input);
- assert!(resolve_and_validated(vec![dep("kB")], &reg, None).is_err());
-}
-
-#[test]
-fn basic_public_dependency() {
- let reg = registry(vec![
- pkg!(("A", "0.1.0")),
- pkg!(("A", "0.2.0")),
- pkg!("B" => [dep_req_kind("A", "0.1", DepKind::Normal, true)]),
- pkg!("C" => [dep("A"), dep("B")]),
- ]);
-
- let res = resolve_and_validated(vec![dep("C")], &reg, None).unwrap();
- assert_same(
- &res,
- &names(&[
- ("root", "1.0.0"),
- ("C", "1.0.0"),
- ("B", "1.0.0"),
- ("A", "0.1.0"),
- ]),
- );
-}
-
-#[test]
-fn public_dependency_filling_in() {
- // The resolver has an optimization where if a candidate to resolve a dependency
- // has already bean activated then we skip looking at the candidates dependencies.
- // However, we have to be careful as the new path may make pub dependencies invalid.
-
- // Triggering this case requires dependencies to be resolved in a specific order.
- // Fuzzing found this unintuitive case, that triggers this unfortunate order of operations:
- // 1. `d`'s dep on `c` is resolved
- // 2. `d`'s dep on `a` is resolved with `0.1.1`
- // 3. `c`'s dep on `b` is resolved with `0.0.2`
- // 4. `b`'s dep on `a` is resolved with `0.0.6` no pub dev conflict as `b` is private to `c`
- // 5. `d`'s dep on `b` is resolved with `0.0.2` triggering the optimization.
- // Do we notice that `d` has a pub dep conflict on `a`? Lets try it and see.
- let reg = registry(vec![
- pkg!(("a", "0.0.6")),
- pkg!(("a", "0.1.1")),
- pkg!(("b", "0.0.0") => [dep("bad")]),
- pkg!(("b", "0.0.1") => [dep("bad")]),
- pkg!(("b", "0.0.2") => [dep_req_kind("a", "=0.0.6", DepKind::Normal, true)]),
- pkg!("c" => [dep_req("b", ">=0.0.1")]),
- pkg!("d" => [dep("c"), dep("a"), dep("b")]),
- ]);
-
- let res = resolve_and_validated(vec![dep("d")], &reg, None).unwrap();
- assert_same(
- &res,
- &names(&[
- ("root", "1.0.0"),
- ("d", "1.0.0"),
- ("c", "1.0.0"),
- ("b", "0.0.2"),
- ("a", "0.0.6"),
- ]),
- );
-}
-
-#[test]
-fn public_dependency_filling_in_and_update() {
- // The resolver has an optimization where if a candidate to resolve a dependency
- // has already bean activated then we skip looking at the candidates dependencies.
- // However, we have to be careful as the new path may make pub dependencies invalid.
-
- // Triggering this case requires dependencies to be resolved in a specific order.
- // Fuzzing found this unintuitive case, that triggers this unfortunate order of operations:
- // 1. `D`'s dep on `B` is resolved
- // 2. `D`'s dep on `C` is resolved
- // 3. `B`'s dep on `A` is resolved with `0.0.0`
- // 4. `C`'s dep on `B` triggering the optimization.
- // So did we add `A 0.0.0` to the deps `C` can see?
- // Or are we going to resolve `C`'s dep on `A` with `0.0.2`?
- // Lets try it and see.
- let reg = registry(vec![
- pkg!(("A", "0.0.0")),
- pkg!(("A", "0.0.2")),
- pkg!("B" => [dep_req_kind("A", "=0.0.0", DepKind::Normal, true),]),
- pkg!("C" => [dep("A"),dep("B")]),
- pkg!("D" => [dep("B"),dep("C")]),
- ]);
- let res = resolve_and_validated(vec![dep("D")], &reg, None).unwrap();
- assert_same(
- &res,
- &names(&[
- ("root", "1.0.0"),
- ("D", "1.0.0"),
- ("C", "1.0.0"),
- ("B", "1.0.0"),
- ("A", "0.0.0"),
- ]),
- );
-}
-
-#[test]
-fn public_dependency_skipping() {
- // When backtracking due to a failed dependency, if Cargo is
- // trying to be clever and skip irrelevant dependencies, care must
- // the effects of pub dep must be accounted for.
- let input = vec![
- pkg!(("a", "0.2.0")),
- pkg!(("a", "2.0.0")),
- pkg!(("b", "0.0.0") => [dep("bad")]),
- pkg!(("b", "0.2.1") => [dep_req_kind("a", "0.2.0", DepKind::Normal, true)]),
- pkg!("c" => [dep("a"),dep("b")]),
- ];
- let reg = registry(input);
-
- resolve_and_validated(vec![dep("c")], &reg, None).unwrap();
-}
-
-#[test]
-fn public_dependency_skipping_in_backtracking() {
- // When backtracking due to a failed dependency, if Cargo is
- // trying to be clever and skip irrelevant dependencies, care must
- // the effects of pub dep must be accounted for.
- let input = vec![
- pkg!(("A", "0.0.0") => [dep("bad")]),
- pkg!(("A", "0.0.1") => [dep("bad")]),
- pkg!(("A", "0.0.2") => [dep("bad")]),
- pkg!(("A", "0.0.3") => [dep("bad")]),
- pkg!(("A", "0.0.4")),
- pkg!(("A", "0.0.5")),
- pkg!("B" => [dep_req_kind("A", ">= 0.0.3", DepKind::Normal, true)]),
- pkg!("C" => [dep_req("A", "<= 0.0.4"), dep("B")]),
- ];
- let reg = registry(input);
-
- resolve_and_validated(vec![dep("C")], &reg, None).unwrap();
-}
-
-#[test]
-fn public_sat_topological_order() {
- let input = vec![
- pkg!(("a", "0.0.1")),
- pkg!(("a", "0.0.0")),
- pkg!(("b", "0.0.1") => [dep_req_kind("a", "= 0.0.1", DepKind::Normal, true),]),
- pkg!(("b", "0.0.0") => [dep("bad"),]),
- pkg!("A" => [dep_req("a", "= 0.0.0"),dep_req_kind("b", "*", DepKind::Normal, true)]),
- ];
-
- let reg = registry(input);
- assert!(resolve_and_validated(vec![dep("A")], &reg, None).is_err());
-}
-
-#[test]
-fn public_sat_unused_makes_things_pub() {
- let input = vec![
- pkg!(("a", "0.0.1")),
- pkg!(("a", "0.0.0")),
- pkg!(("b", "8.0.1") => [dep_req_kind("a", "= 0.0.1", DepKind::Normal, true),]),
- pkg!(("b", "8.0.0") => [dep_req("a", "= 0.0.1"),]),
- pkg!("c" => [dep_req("b", "= 8.0.0"),dep_req("a", "= 0.0.0"),]),
- ];
- let reg = registry(input);
-
- resolve_and_validated(vec![dep("c")], &reg, None).unwrap();
-}
-
-#[test]
-fn public_sat_unused_makes_things_pub_2() {
- let input = vec![
- pkg!(("c", "0.0.2")),
- pkg!(("c", "0.0.1")),
- pkg!(("a-sys", "0.0.2")),
- pkg!(("a-sys", "0.0.1") => [dep_req_kind("c", "= 0.0.1", DepKind::Normal, true),]),
- pkg!("P" => [dep_req_kind("a-sys", "*", DepKind::Normal, true),dep_req("c", "= 0.0.1"),]),
- pkg!("A" => [dep("P"),dep_req("c", "= 0.0.2"),]),
- ];
- let reg = registry(input);
-
- resolve_and_validated(vec![dep("A")], &reg, None).unwrap();
-}
-
-#[test]
#[should_panic(expected = "assertion failed: !name.is_empty()")]
fn test_dependency_with_empty_name() {
// Bug 5229, dependency-names must not be empty
@@ -1116,41 +930,6 @@ fn resolving_with_constrained_sibling_backtrack_activation() {
}
#[test]
-fn resolving_with_public_constrained_sibling() {
- // It makes sense to resolve most-constrained deps first, but
- // with that logic the backtrack traps here come between the two
- // attempted resolutions of 'constrained'. When backtracking,
- // cargo should skip past them and resume resolution once the
- // number of activations for 'constrained' changes.
- let mut reglist = vec![
- pkg!(("foo", "1.0.0") => [dep_req("bar", "=1.0.0"),
- dep_req("backtrack_trap1", "1.0"),
- dep_req("backtrack_trap2", "1.0"),
- dep_req("constrained", "<=60")]),
- pkg!(("bar", "1.0.0") => [dep_req_kind("constrained", ">=60", DepKind::Normal, true)]),
- ];
- // Bump these to make the test harder, but you'll also need to
- // change the version constraints on `constrained` above. To correctly
- // exercise Cargo, the relationship between the values is:
- // NUM_CONSTRAINED - vsn < NUM_TRAPS < vsn
- // to make sure the traps are resolved between `constrained`.
- const NUM_TRAPS: usize = 45; // min 1
- const NUM_CONSTRAINED: usize = 100; // min 1
- for i in 0..NUM_TRAPS {
- let vsn = format!("1.0.{}", i);
- reglist.push(pkg!(("backtrack_trap1", vsn.clone())));
- reglist.push(pkg!(("backtrack_trap2", vsn.clone())));
- }
- for i in 0..NUM_CONSTRAINED {
- let vsn = format!("{}.0.0", i);
- reglist.push(pkg!(("constrained", vsn.clone())));
- }
- let reg = registry(reglist);
-
- let _ = resolve_and_validated(vec![dep_req("foo", "1")], &reg, None);
-}
-
-#[test]
fn resolving_with_constrained_sibling_transitive_dep_effects() {
// When backtracking due to a failed dependency, if Cargo is
// trying to be clever and skip irrelevant dependencies, care must
diff --git a/src/tools/cargo/crates/rustfix/Cargo.toml b/src/tools/cargo/crates/rustfix/Cargo.toml
new file mode 100644
index 000000000..7947f0268
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/Cargo.toml
@@ -0,0 +1,34 @@
+[package]
+name = "rustfix"
+version = "0.7.0"
+authors = [
+ "Pascal Hertleif <killercup@gmail.com>",
+ "Oliver Schneider <oli-obk@users.noreply.github.com>",
+]
+rust-version = "1.70.0" # MSRV:3
+edition.workspace = true
+license.workspace = true
+homepage = "https://github.com/rust-lang/cargo"
+repository = "https://github.com/rust-lang/cargo"
+description = "Automatically apply the suggestions made by rustc"
+documentation = "https://docs.rs/rustfix"
+exclude = [
+ "examples/*",
+ "tests/*",
+]
+
+[dependencies]
+serde = { workspace = true, features = ["derive"] }
+serde_json.workspace = true
+thiserror.workspace = true
+tracing.workspace = true
+
+[dev-dependencies]
+anyhow.workspace = true
+proptest.workspace = true
+similar = "2.3.0"
+tempfile.workspace = true
+tracing-subscriber.workspace = true
+
+[lints]
+workspace = true
diff --git a/src/tools/cargo/crates/rustfix/Changelog.md b/src/tools/cargo/crates/rustfix/Changelog.md
new file mode 100644
index 000000000..1b57320e6
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/Changelog.md
@@ -0,0 +1,79 @@
+# Changelog
+
+All notable changes to this project will be documented in this file.
+
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
+and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+
+## [Unreleased]
+
+## [0.4.6] - 2019-07-16
+
+### Changed
+
+Internal changes:
+
+- Change example to automatically determine filename
+- Migrate to Rust 2018
+- use `derive` feature over `serde_derive` crate
+
+## [0.4.5] - 2019-03-26
+
+### Added
+
+- Implement common traits for Diagnostic and related types
+
+### Fixed
+
+- Fix out of bounds access in parse_snippet
+
+## [0.4.4] - 2018-12-13
+
+### Added
+
+- Make Diagnostic::rendered public.
+
+### Changed
+
+- Revert faulty "Allow multiple solutions in a suggestion"
+
+## [0.4.3] - 2018-12-09 - *yanked!*
+
+### Added
+
+- Allow multiple solutions in a suggestion
+
+### Changed
+
+- use `RUSTC` environment var if present
+
+## [0.4.2] - 2018-07-31
+
+### Added
+
+- Expose an interface to apply fixes on-by-one
+
+### Changed
+
+- Handle invalid snippets instead of panicking
+
+## [0.4.1] - 2018-07-26
+
+### Changed
+
+- Ignore duplicate replacements
+
+## [0.4.0] - 2018-05-23
+
+### Changed
+
+- Filter by machine applicability by default
+
+[Unreleased]: https://github.com/rust-lang-nursery/rustfix/compare/rustfix-0.4.6...HEAD
+[0.4.6]: https://github.com/rust-lang-nursery/rustfix/compare/rustfix-0.4.5...rustfix-0.4.6
+[0.4.5]: https://github.com/rust-lang-nursery/rustfix/compare/rustfix-0.4.4...rustfix-0.4.5
+[0.4.4]: https://github.com/rust-lang-nursery/rustfix/compare/rustfix-0.4.3...rustfix-0.4.4
+[0.4.3]: https://github.com/rust-lang-nursery/rustfix/compare/rustfix-0.4.2...rustfix-0.4.3
+[0.4.2]: https://github.com/rust-lang-nursery/rustfix/compare/rustfix-0.4.1...rustfix-0.4.2
+[0.4.1]: https://github.com/rust-lang-nursery/rustfix/compare/rustfix-0.4.0...rustfix-0.4.1
+[0.4.0]: https://github.com/rust-lang-nursery/rustfix/compare/rustfix-0.4.0
diff --git a/src/tools/cargo/crates/rustfix/LICENSE-APACHE b/src/tools/cargo/crates/rustfix/LICENSE-APACHE
new file mode 120000
index 000000000..1cd601d0a
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/LICENSE-APACHE
@@ -0,0 +1 @@
+../../LICENSE-APACHE \ No newline at end of file
diff --git a/src/tools/cargo/crates/rustfix/LICENSE-MIT b/src/tools/cargo/crates/rustfix/LICENSE-MIT
new file mode 120000
index 000000000..b2cfbdc7b
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/LICENSE-MIT
@@ -0,0 +1 @@
+../../LICENSE-MIT \ No newline at end of file
diff --git a/src/tools/cargo/crates/rustfix/Readme.md b/src/tools/cargo/crates/rustfix/Readme.md
new file mode 100644
index 000000000..0546e6018
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/Readme.md
@@ -0,0 +1,29 @@
+# rustfix
+
+[![Latest Version](https://img.shields.io/crates/v/rustfix.svg)](https://crates.io/crates/rustfix)
+[![Rust Documentation](https://docs.rs/rustfix/badge.svg)](https://docs.rs/rustfix)
+
+Rustfix is a library defining useful structures that represent fix suggestions from rustc.
+
+This is a low-level library. You pass it the JSON output from `rustc`, and you can then use it to apply suggestions to in-memory strings. This library doesn't execute commands, or read or write from the filesystem.
+
+If you are looking for the [`cargo fix`] implementation, the core of it is located in [`cargo::ops::fix`].
+
+[`cargo fix`]: https://doc.rust-lang.org/cargo/commands/cargo-fix.html
+[`cargo::ops::fix`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/ops/fix.rs
+
+## License
+
+Licensed under either of
+
+- Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or <http://www.apache.org/licenses/LICENSE-2.0>)
+- MIT license ([LICENSE-MIT](LICENSE-MIT) or <http://opensource.org/licenses/MIT>)
+
+at your option.
+
+### Contribution
+
+Unless you explicitly state otherwise, any contribution intentionally
+submitted for inclusion in the work by you, as defined in the Apache-2.0
+license, shall be dual licensed as above, without any additional terms or
+conditions.
diff --git a/src/tools/cargo/crates/rustfix/examples/fix-json.rs b/src/tools/cargo/crates/rustfix/examples/fix-json.rs
new file mode 100644
index 000000000..676171106
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/examples/fix-json.rs
@@ -0,0 +1,44 @@
+#![allow(clippy::disallowed_methods, clippy::print_stdout, clippy::print_stderr)]
+
+use anyhow::Error;
+use std::io::{stdin, BufReader, Read};
+use std::{collections::HashMap, collections::HashSet, env, fs};
+
+fn main() -> Result<(), Error> {
+ let suggestions_file = env::args().nth(1).expect("USAGE: fix-json <file or -->");
+ let suggestions = if suggestions_file == "--" {
+ let mut buffer = String::new();
+ BufReader::new(stdin()).read_to_string(&mut buffer)?;
+ buffer
+ } else {
+ fs::read_to_string(&suggestions_file)?
+ };
+ let suggestions = rustfix::get_suggestions_from_json(
+ &suggestions,
+ &HashSet::new(),
+ rustfix::Filter::Everything,
+ )?;
+
+ let mut files = HashMap::new();
+ for suggestion in suggestions {
+ let file = suggestion.solutions[0].replacements[0]
+ .snippet
+ .file_name
+ .clone();
+ files.entry(file).or_insert_with(Vec::new).push(suggestion);
+ }
+
+ for (source_file, suggestions) in &files {
+ let source = fs::read_to_string(source_file)?;
+ let mut fix = rustfix::CodeFix::new(&source);
+ for suggestion in suggestions.iter().rev() {
+ if let Err(e) = fix.apply(suggestion) {
+ eprintln!("Failed to apply suggestion to {}: {}", source_file, e);
+ }
+ }
+ let fixes = fix.finish()?;
+ fs::write(source_file, fixes)?;
+ }
+
+ Ok(())
+}
diff --git a/src/tools/cargo/crates/rustfix/proptest-regressions/replace.txt b/src/tools/cargo/crates/rustfix/proptest-regressions/replace.txt
new file mode 100644
index 000000000..fc5bd1a8b
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/proptest-regressions/replace.txt
@@ -0,0 +1,8 @@
+# Seeds for failure cases proptest has generated in the past. It is
+# automatically read and these particular cases re-run before any
+# novel cases are generated.
+#
+# It is recommended to check this file in to source control so that
+# everyone who runs the test benefits from these saved cases.
+xs 358148376 3634975642 2528447681 3675516813 # shrinks to ref s = ""
+xs 3127423015 3362740891 2605681441 2390162043 # shrinks to ref data = "", ref replacements = [(0..0, [])]
diff --git a/src/tools/cargo/crates/rustfix/src/diagnostics.rs b/src/tools/cargo/crates/rustfix/src/diagnostics.rs
new file mode 100644
index 000000000..ad1899b2c
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/src/diagnostics.rs
@@ -0,0 +1,115 @@
+//! Rustc Diagnostic JSON Output.
+//!
+//! The following data types are copied from [rust-lang/rust](https://github.com/rust-lang/rust/blob/4fd68eb47bad1c121417ac4450b2f0456150db86/compiler/rustc_errors/src/json.rs).
+//!
+//! For examples of the JSON output, see JSON fixture files under `tests/` directory.
+
+use serde::Deserialize;
+
+/// The root diagnostic JSON output emitted by the compiler.
+#[derive(Clone, Deserialize, Debug, Hash, Eq, PartialEq)]
+pub struct Diagnostic {
+ /// The primary error message.
+ pub message: String,
+ pub code: Option<DiagnosticCode>,
+ /// "error: internal compiler error", "error", "warning", "note", "help".
+ level: String,
+ pub spans: Vec<DiagnosticSpan>,
+ /// Associated diagnostic messages.
+ pub children: Vec<Diagnostic>,
+ /// The message as rustc would render it.
+ pub rendered: Option<String>,
+}
+
+/// Span information of a diagnostic item.
+#[derive(Clone, Deserialize, Debug, Hash, Eq, PartialEq)]
+pub struct DiagnosticSpan {
+ pub file_name: String,
+ pub byte_start: u32,
+ pub byte_end: u32,
+ /// 1-based.
+ pub line_start: usize,
+ pub line_end: usize,
+ /// 1-based, character offset.
+ pub column_start: usize,
+ pub column_end: usize,
+ /// Is this a "primary" span -- meaning the point, or one of the points,
+ /// where the error occurred?
+ pub is_primary: bool,
+ /// Source text from the start of line_start to the end of line_end.
+ pub text: Vec<DiagnosticSpanLine>,
+ /// Label that should be placed at this location (if any)
+ label: Option<String>,
+ /// If we are suggesting a replacement, this will contain text
+ /// that should be sliced in atop this span.
+ pub suggested_replacement: Option<String>,
+ /// If the suggestion is approximate
+ pub suggestion_applicability: Option<Applicability>,
+ /// Macro invocations that created the code at this span, if any.
+ expansion: Option<Box<DiagnosticSpanMacroExpansion>>,
+}
+
+/// Indicates the confidence in the correctness of a suggestion.
+///
+/// All suggestions are marked with an `Applicability`. Tools use the applicability of a suggestion
+/// to determine whether it should be automatically applied or if the user should be consulted
+/// before applying the suggestion.
+#[derive(Copy, Clone, Debug, PartialEq, Deserialize, Hash, Eq)]
+pub enum Applicability {
+ /// The suggestion is definitely what the user intended, or maintains the exact meaning of the code.
+ /// This suggestion should be automatically applied.
+ ///
+ /// In case of multiple `MachineApplicable` suggestions (whether as part of
+ /// the same `multipart_suggestion` or not), all of them should be
+ /// automatically applied.
+ MachineApplicable,
+
+ /// The suggestion may be what the user intended, but it is uncertain. The suggestion should
+ /// result in valid Rust code if it is applied.
+ MaybeIncorrect,
+
+ /// The suggestion contains placeholders like `(...)` or `{ /* fields */ }`. The suggestion
+ /// cannot be applied automatically because it will not result in valid Rust code. The user
+ /// will need to fill in the placeholders.
+ HasPlaceholders,
+
+ /// The applicability of the suggestion is unknown.
+ Unspecified,
+}
+
+/// Span information of a single line.
+#[derive(Clone, Deserialize, Debug, Eq, PartialEq, Hash)]
+pub struct DiagnosticSpanLine {
+ pub text: String,
+
+ /// 1-based, character offset in self.text.
+ pub highlight_start: usize,
+
+ pub highlight_end: usize,
+}
+
+/// Span information for macro expansions.
+#[derive(Clone, Deserialize, Debug, Eq, PartialEq, Hash)]
+struct DiagnosticSpanMacroExpansion {
+ /// span where macro was applied to generate this code; note that
+ /// this may itself derive from a macro (if
+ /// `span.expansion.is_some()`)
+ span: DiagnosticSpan,
+
+ /// name of macro that was applied (e.g., "foo!" or "#[derive(Eq)]")
+ macro_decl_name: String,
+
+ /// span where macro was defined (if known)
+ def_site_span: Option<DiagnosticSpan>,
+}
+
+/// The error code emitted by the compiler. See [Rust error codes index].
+///
+/// [Rust error codes index]: https://doc.rust-lang.org/error_codes/error-index.html
+#[derive(Clone, Deserialize, Debug, Eq, PartialEq, Hash)]
+pub struct DiagnosticCode {
+ /// The code itself.
+ pub code: String,
+ /// An explanation for the code.
+ explanation: Option<String>,
+}
diff --git a/src/tools/cargo/crates/rustfix/src/error.rs b/src/tools/cargo/crates/rustfix/src/error.rs
new file mode 100644
index 000000000..171864504
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/src/error.rs
@@ -0,0 +1,21 @@
+//! Error types.
+
+use std::ops::Range;
+
+#[derive(Debug, thiserror::Error)]
+pub enum Error {
+ #[error("invalid range {0:?}, start is larger than end")]
+ InvalidRange(Range<usize>),
+
+ #[error("invalid range {0:?}, original data is only {1} byte long")]
+ DataLengthExceeded(Range<usize>, usize),
+
+ #[error("could not replace range {0:?}, maybe parts of it were already replaced?")]
+ MaybeAlreadyReplaced(Range<usize>),
+
+ #[error("cannot replace slice of data that was already replaced")]
+ AlreadyReplaced,
+
+ #[error(transparent)]
+ Utf8(#[from] std::string::FromUtf8Error),
+}
diff --git a/src/tools/cargo/crates/rustfix/src/lib.rs b/src/tools/cargo/crates/rustfix/src/lib.rs
new file mode 100644
index 000000000..5fdb37b56
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/src/lib.rs
@@ -0,0 +1,306 @@
+//! Library for applying diagnostic suggestions to source code.
+//!
+//! This is a low-level library. You pass it the [JSON output] from `rustc`,
+//! and you can then use it to apply suggestions to in-memory strings.
+//! This library doesn't execute commands, or read or write from the filesystem.
+//!
+//! If you are looking for the [`cargo fix`] implementation, the core of it is
+//! located in [`cargo::ops::fix`].
+//!
+//! [`cargo fix`]: https://doc.rust-lang.org/cargo/commands/cargo-fix.html
+//! [`cargo::ops::fix`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/ops/fix.rs
+//! [JSON output]: diagnostics
+//!
+//! The general outline of how to use this library is:
+//!
+//! 1. Call `rustc` and collect the JSON data.
+//! 2. Pass the json data to [`get_suggestions_from_json`].
+//! 3. Create a [`CodeFix`] with the source of a file to modify.
+//! 4. Call [`CodeFix::apply`] to apply a change.
+//! 5. Call [`CodeFix::finish`] to get the result and write it back to disk.
+
+use std::collections::HashSet;
+use std::ops::Range;
+
+pub mod diagnostics;
+mod error;
+mod replace;
+
+use diagnostics::Diagnostic;
+use diagnostics::DiagnosticSpan;
+pub use error::Error;
+
+/// A filter to control which suggestion should be applied.
+#[derive(Debug, Clone, Copy)]
+pub enum Filter {
+ /// For [`diagnostics::Applicability::MachineApplicable`] only.
+ MachineApplicableOnly,
+ /// Everything is included. YOLO!
+ Everything,
+}
+
+/// Collects code [`Suggestion`]s from one or more compiler diagnostic lines.
+///
+/// Fails if any of diagnostic line `input` is not a valid [`Diagnostic`] JSON.
+///
+/// * `only` --- only diagnostics with code in a set of error codes would be collected.
+pub fn get_suggestions_from_json<S: ::std::hash::BuildHasher>(
+ input: &str,
+ only: &HashSet<String, S>,
+ filter: Filter,
+) -> serde_json::error::Result<Vec<Suggestion>> {
+ let mut result = Vec::new();
+ for cargo_msg in serde_json::Deserializer::from_str(input).into_iter::<Diagnostic>() {
+ // One diagnostic line might have multiple suggestions
+ result.extend(collect_suggestions(&cargo_msg?, only, filter));
+ }
+ Ok(result)
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct LinePosition {
+ pub line: usize,
+ pub column: usize,
+}
+
+impl std::fmt::Display for LinePosition {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{}:{}", self.line, self.column)
+ }
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct LineRange {
+ pub start: LinePosition,
+ pub end: LinePosition,
+}
+
+impl std::fmt::Display for LineRange {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{}-{}", self.start, self.end)
+ }
+}
+
+/// An error/warning and possible solutions for fixing it
+#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+pub struct Suggestion {
+ pub message: String,
+ pub snippets: Vec<Snippet>,
+ pub solutions: Vec<Solution>,
+}
+
+/// Solution to a diagnostic item.
+#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+pub struct Solution {
+ /// The error message of the diagnostic item.
+ pub message: String,
+ /// Possible solutions to fix the error.
+ pub replacements: Vec<Replacement>,
+}
+
+/// Represents code that will get replaced.
+#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+pub struct Snippet {
+ pub file_name: String,
+ pub line_range: LineRange,
+ pub range: Range<usize>,
+ /// leading surrounding text, text to replace, trailing surrounding text
+ ///
+ /// This split is useful for higlighting the part that gets replaced
+ pub text: (String, String, String),
+}
+
+/// Represents a replacement of a `snippet`.
+#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+pub struct Replacement {
+ /// Code snippet that gets replaced.
+ pub snippet: Snippet,
+ /// The replacement of the snippet.
+ pub replacement: String,
+}
+
+/// Parses a [`Snippet`] from a diagnostic span item.
+fn parse_snippet(span: &DiagnosticSpan) -> Option<Snippet> {
+ // unindent the snippet
+ let indent = span
+ .text
+ .iter()
+ .map(|line| {
+ let indent = line
+ .text
+ .chars()
+ .take_while(|&c| char::is_whitespace(c))
+ .count();
+ std::cmp::min(indent, line.highlight_start - 1)
+ })
+ .min()?;
+
+ let text_slice = span.text[0].text.chars().collect::<Vec<char>>();
+
+ // We subtract `1` because these highlights are 1-based
+ // Check the `min` so that it doesn't attempt to index out-of-bounds when
+ // the span points to the "end" of the line. For example, a line of
+ // "foo\n" with a highlight_start of 5 is intended to highlight *after*
+ // the line. This needs to compensate since the newline has been removed
+ // from the text slice.
+ let start = (span.text[0].highlight_start - 1).min(text_slice.len());
+ let end = (span.text[0].highlight_end - 1).min(text_slice.len());
+ let lead = text_slice[indent..start].iter().collect();
+ let mut body: String = text_slice[start..end].iter().collect();
+
+ for line in span.text.iter().take(span.text.len() - 1).skip(1) {
+ body.push('\n');
+ body.push_str(&line.text[indent..]);
+ }
+ let mut tail = String::new();
+ let last = &span.text[span.text.len() - 1];
+
+ // If we get a DiagnosticSpanLine where highlight_end > text.len(), we prevent an 'out of
+ // bounds' access by making sure the index is within the array bounds.
+ // `saturating_sub` is used in case of an empty file
+ let last_tail_index = last.highlight_end.min(last.text.len()).saturating_sub(1);
+ let last_slice = last.text.chars().collect::<Vec<char>>();
+
+ if span.text.len() > 1 {
+ body.push('\n');
+ body.push_str(
+ &last_slice[indent..last_tail_index]
+ .iter()
+ .collect::<String>(),
+ );
+ }
+ tail.push_str(&last_slice[last_tail_index..].iter().collect::<String>());
+ Some(Snippet {
+ file_name: span.file_name.clone(),
+ line_range: LineRange {
+ start: LinePosition {
+ line: span.line_start,
+ column: span.column_start,
+ },
+ end: LinePosition {
+ line: span.line_end,
+ column: span.column_end,
+ },
+ },
+ range: (span.byte_start as usize)..(span.byte_end as usize),
+ text: (lead, body, tail),
+ })
+}
+
+/// Converts a [`DiagnosticSpan`] into a [`Replacement`].
+fn collect_span(span: &DiagnosticSpan) -> Option<Replacement> {
+ let snippet = parse_snippet(span)?;
+ let replacement = span.suggested_replacement.clone()?;
+ Some(Replacement {
+ snippet,
+ replacement,
+ })
+}
+
+/// Collects code [`Suggestion`]s from a single compiler diagnostic line.
+///
+/// * `only` --- only diagnostics with code in a set of error codes would be collected.
+pub fn collect_suggestions<S: ::std::hash::BuildHasher>(
+ diagnostic: &Diagnostic,
+ only: &HashSet<String, S>,
+ filter: Filter,
+) -> Option<Suggestion> {
+ if !only.is_empty() {
+ if let Some(ref code) = diagnostic.code {
+ if !only.contains(&code.code) {
+ // This is not the code we are looking for
+ return None;
+ }
+ } else {
+ // No code, probably a weird builtin warning/error
+ return None;
+ }
+ }
+
+ let snippets = diagnostic.spans.iter().filter_map(parse_snippet).collect();
+
+ let solutions: Vec<_> = diagnostic
+ .children
+ .iter()
+ .filter_map(|child| {
+ let replacements: Vec<_> = child
+ .spans
+ .iter()
+ .filter(|span| {
+ use crate::diagnostics::Applicability::*;
+ use crate::Filter::*;
+
+ match (filter, &span.suggestion_applicability) {
+ (MachineApplicableOnly, Some(MachineApplicable)) => true,
+ (MachineApplicableOnly, _) => false,
+ (Everything, _) => true,
+ }
+ })
+ .filter_map(collect_span)
+ .collect();
+ if !replacements.is_empty() {
+ Some(Solution {
+ message: child.message.clone(),
+ replacements,
+ })
+ } else {
+ None
+ }
+ })
+ .collect();
+
+ if solutions.is_empty() {
+ None
+ } else {
+ Some(Suggestion {
+ message: diagnostic.message.clone(),
+ snippets,
+ solutions,
+ })
+ }
+}
+
+/// Represents a code fix. This doesn't write to disks but is only in memory.
+///
+/// The general way to use this is:
+///
+/// 1. Feeds the source of a file to [`CodeFix::new`].
+/// 2. Calls [`CodeFix::apply`] to apply suggestions to the source code.
+/// 3. Calls [`CodeFix::finish`] to get the "fixed" code.
+pub struct CodeFix {
+ data: replace::Data,
+}
+
+impl CodeFix {
+ /// Creates a `CodeFix` with the source of a file to modify.
+ pub fn new(s: &str) -> CodeFix {
+ CodeFix {
+ data: replace::Data::new(s.as_bytes()),
+ }
+ }
+
+ /// Applies a suggestion to the code.
+ pub fn apply(&mut self, suggestion: &Suggestion) -> Result<(), Error> {
+ for sol in &suggestion.solutions {
+ for r in &sol.replacements {
+ self.data
+ .replace_range(r.snippet.range.clone(), r.replacement.as_bytes())?;
+ }
+ }
+ Ok(())
+ }
+
+ /// Gets the result of the "fixed" code.
+ pub fn finish(&self) -> Result<String, Error> {
+ Ok(String::from_utf8(self.data.to_vec())?)
+ }
+}
+
+/// Applies multiple `suggestions` to the given `code`.
+pub fn apply_suggestions(code: &str, suggestions: &[Suggestion]) -> Result<String, Error> {
+ let mut fix = CodeFix::new(code);
+ for suggestion in suggestions.iter().rev() {
+ fix.apply(suggestion)?;
+ }
+ fix.finish()
+}
diff --git a/src/tools/cargo/crates/rustfix/src/replace.rs b/src/tools/cargo/crates/rustfix/src/replace.rs
new file mode 100644
index 000000000..ed467dcba
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/src/replace.rs
@@ -0,0 +1,329 @@
+//! A small module giving you a simple container that allows easy and cheap
+//! replacement of parts of its content, with the ability to prevent changing
+//! the same parts multiple times.
+
+use std::rc::Rc;
+
+use crate::error::Error;
+
+/// Indicates the change state of a [`Span`].
+#[derive(Debug, Clone, PartialEq, Eq)]
+enum State {
+ /// The initial state. No change applied.
+ Initial,
+ /// Has been replaced.
+ Replaced(Rc<[u8]>),
+ /// Has been inserted.
+ Inserted(Rc<[u8]>),
+}
+
+impl State {
+ fn is_inserted(&self) -> bool {
+ matches!(*self, State::Inserted(..))
+ }
+}
+
+/// Span with a change [`State`].
+#[derive(Debug, Clone, PartialEq, Eq)]
+struct Span {
+ /// Start of this span in parent data
+ start: usize,
+ /// up to end excluding
+ end: usize,
+ /// Whether the span is inserted, replaced or still fresh.
+ data: State,
+}
+
+/// A container that allows easily replacing chunks of its data
+#[derive(Debug, Clone, Default)]
+pub struct Data {
+ /// Original data.
+ original: Vec<u8>,
+ /// [`Span`]s covering the full range of the original data.
+ parts: Vec<Span>,
+}
+
+impl Data {
+ /// Create a new data container from a slice of bytes
+ pub fn new(data: &[u8]) -> Self {
+ Data {
+ original: data.into(),
+ parts: vec![Span {
+ data: State::Initial,
+ start: 0,
+ end: data.len(),
+ }],
+ }
+ }
+
+ /// Render this data as a vector of bytes
+ pub fn to_vec(&self) -> Vec<u8> {
+ if self.original.is_empty() {
+ return Vec::new();
+ }
+
+ self.parts.iter().fold(Vec::new(), |mut acc, d| {
+ match d.data {
+ State::Initial => acc.extend_from_slice(&self.original[d.start..d.end]),
+ State::Replaced(ref d) | State::Inserted(ref d) => acc.extend_from_slice(d),
+ };
+ acc
+ })
+ }
+
+ /// Replace a chunk of data with the given slice, erroring when this part
+ /// was already changed previously.
+ pub fn replace_range(
+ &mut self,
+ range: std::ops::Range<usize>,
+ data: &[u8],
+ ) -> Result<(), Error> {
+ if range.start > range.end {
+ return Err(Error::InvalidRange(range));
+ }
+
+ if range.end > self.original.len() {
+ return Err(Error::DataLengthExceeded(range, self.original.len()));
+ }
+
+ let insert_only = range.start == range.end;
+
+ // Since we error out when replacing an already replaced chunk of data,
+ // we can take some shortcuts here. For example, there can be no
+ // overlapping replacements -- we _always_ split a chunk of 'initial'
+ // data into three[^empty] parts, and there can't ever be two 'initial'
+ // parts touching.
+ //
+ // [^empty]: Leading and trailing ones might be empty if we replace
+ // the whole chunk. As an optimization and without loss of generality we
+ // don't add empty parts.
+ let new_parts = {
+ let Some(index_of_part_to_split) = self.parts.iter().position(|p| {
+ !p.data.is_inserted() && p.start <= range.start && p.end >= range.end
+ }) else {
+ if tracing::enabled!(tracing::Level::DEBUG) {
+ let slices = self
+ .parts
+ .iter()
+ .map(|p| {
+ (
+ p.start,
+ p.end,
+ match p.data {
+ State::Initial => "initial",
+ State::Replaced(..) => "replaced",
+ State::Inserted(..) => "inserted",
+ },
+ )
+ })
+ .collect::<Vec<_>>();
+ tracing::debug!(
+ "no single slice covering {}..{}, current slices: {:?}",
+ range.start,
+ range.end,
+ slices,
+ );
+ }
+
+ return Err(Error::MaybeAlreadyReplaced(range));
+ };
+
+ let part_to_split = &self.parts[index_of_part_to_split];
+
+ // If this replacement matches exactly the part that we would
+ // otherwise split then we ignore this for now. This means that you
+ // can replace the exact same range with the exact same content
+ // multiple times and we'll process and allow it.
+ //
+ // This is currently done to alleviate issues like
+ // rust-lang/rust#51211 although this clause likely wants to be
+ // removed if that's fixed deeper in the compiler.
+ if part_to_split.start == range.start && part_to_split.end == range.end {
+ if let State::Replaced(ref replacement) = part_to_split.data {
+ if &**replacement == data {
+ return Ok(());
+ }
+ }
+ }
+
+ if part_to_split.data != State::Initial {
+ return Err(Error::AlreadyReplaced);
+ }
+
+ let mut new_parts = Vec::with_capacity(self.parts.len() + 2);
+
+ // Previous parts
+ if let Some(ps) = self.parts.get(..index_of_part_to_split) {
+ new_parts.extend_from_slice(ps);
+ }
+
+ // Keep initial data on left side of part
+ if range.start > part_to_split.start {
+ new_parts.push(Span {
+ start: part_to_split.start,
+ end: range.start,
+ data: State::Initial,
+ });
+ }
+
+ // New part
+ new_parts.push(Span {
+ start: range.start,
+ end: range.end,
+ data: if insert_only {
+ State::Inserted(data.into())
+ } else {
+ State::Replaced(data.into())
+ },
+ });
+
+ // Keep initial data on right side of part
+ if range.end < part_to_split.end {
+ new_parts.push(Span {
+ start: range.end,
+ end: part_to_split.end,
+ data: State::Initial,
+ });
+ }
+
+ // Following parts
+ if let Some(ps) = self.parts.get(index_of_part_to_split + 1..) {
+ new_parts.extend_from_slice(ps);
+ }
+
+ new_parts
+ };
+
+ self.parts = new_parts;
+
+ Ok(())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use proptest::prelude::*;
+
+ fn str(i: &[u8]) -> &str {
+ ::std::str::from_utf8(i).unwrap()
+ }
+
+ #[test]
+ fn insert_at_beginning() {
+ let mut d = Data::new(b"foo bar baz");
+ d.replace_range(0..0, b"oh no ").unwrap();
+ assert_eq!("oh no foo bar baz", str(&d.to_vec()));
+ }
+
+ #[test]
+ fn insert_at_end() {
+ let mut d = Data::new(b"foo bar baz");
+ d.replace_range(11..11, b" oh no").unwrap();
+ assert_eq!("foo bar baz oh no", str(&d.to_vec()));
+ }
+
+ #[test]
+ fn replace_some_stuff() {
+ let mut d = Data::new(b"foo bar baz");
+ d.replace_range(4..7, b"lol").unwrap();
+ assert_eq!("foo lol baz", str(&d.to_vec()));
+ }
+
+ #[test]
+ fn replace_a_single_char() {
+ let mut d = Data::new(b"let y = true;");
+ d.replace_range(4..5, b"mut y").unwrap();
+ assert_eq!("let mut y = true;", str(&d.to_vec()));
+ }
+
+ #[test]
+ fn replace_multiple_lines() {
+ let mut d = Data::new(b"lorem\nipsum\ndolor");
+
+ d.replace_range(6..11, b"lol").unwrap();
+ assert_eq!("lorem\nlol\ndolor", str(&d.to_vec()));
+
+ d.replace_range(12..17, b"lol").unwrap();
+ assert_eq!("lorem\nlol\nlol", str(&d.to_vec()));
+ }
+
+ #[test]
+ fn replace_multiple_lines_with_insert_only() {
+ let mut d = Data::new(b"foo!");
+
+ d.replace_range(3..3, b"bar").unwrap();
+ assert_eq!("foobar!", str(&d.to_vec()));
+
+ d.replace_range(0..3, b"baz").unwrap();
+ assert_eq!("bazbar!", str(&d.to_vec()));
+
+ d.replace_range(3..4, b"?").unwrap();
+ assert_eq!("bazbar?", str(&d.to_vec()));
+ }
+
+ #[test]
+ fn replace_invalid_range() {
+ let mut d = Data::new(b"foo!");
+
+ assert!(d.replace_range(2..1, b"bar").is_err());
+ assert!(d.replace_range(0..3, b"bar").is_ok());
+ }
+
+ #[test]
+ fn empty_to_vec_roundtrip() {
+ let s = "";
+ assert_eq!(s.as_bytes(), Data::new(s.as_bytes()).to_vec().as_slice());
+ }
+
+ #[test]
+ fn replace_overlapping_stuff_errs() {
+ let mut d = Data::new(b"foo bar baz");
+
+ d.replace_range(4..7, b"lol").unwrap();
+ assert_eq!("foo lol baz", str(&d.to_vec()));
+
+ assert!(matches!(
+ d.replace_range(4..7, b"lol2").unwrap_err(),
+ Error::AlreadyReplaced,
+ ));
+ }
+
+ #[test]
+ fn broken_replacements() {
+ let mut d = Data::new(b"foo");
+ assert!(matches!(
+ d.replace_range(4..8, b"lol").unwrap_err(),
+ Error::DataLengthExceeded(std::ops::Range { start: 4, end: 8 }, 3),
+ ));
+ }
+
+ #[test]
+ fn replace_same_twice() {
+ let mut d = Data::new(b"foo");
+ d.replace_range(0..1, b"b").unwrap();
+ d.replace_range(0..1, b"b").unwrap();
+ assert_eq!("boo", str(&d.to_vec()));
+ }
+
+ proptest! {
+ #[test]
+ fn new_to_vec_roundtrip(ref s in "\\PC*") {
+ assert_eq!(s.as_bytes(), Data::new(s.as_bytes()).to_vec().as_slice());
+ }
+
+ #[test]
+ fn replace_random_chunks(
+ ref data in "\\PC*",
+ ref replacements in prop::collection::vec(
+ (any::<::std::ops::Range<usize>>(), any::<Vec<u8>>()),
+ 1..100,
+ )
+ ) {
+ let mut d = Data::new(data.as_bytes());
+ for &(ref range, ref bytes) in replacements {
+ let _ = d.replace_range(range.clone(), bytes);
+ }
+ }
+ }
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/edge-cases/empty.json b/src/tools/cargo/crates/rustfix/tests/edge-cases/empty.json
new file mode 100644
index 000000000..62df0b936
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/edge-cases/empty.json
@@ -0,0 +1,42 @@
+{
+ "message": "`main` function not found in crate `empty`",
+ "code": {
+ "code": "E0601",
+ "explanation": "No `main` function was found in a binary crate. To fix this error, add a\n`main` function. For example:\n\n```\nfn main() {\n // Your program will start here.\n println!(\"Hello world!\");\n}\n```\n\nIf you don't know the basics of Rust, you can go look to the Rust Book to get\nstarted: https://doc.rust-lang.org/book/\n"
+ },
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "empty.rs",
+ "byte_start": 0,
+ "byte_end": 0,
+ "line_start": 0,
+ "line_end": 0,
+ "column_start": 1,
+ "column_end": 1,
+ "is_primary": true,
+ "text": [
+ {
+ "text": "",
+ "highlight_start": 1,
+ "highlight_end": 1
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "consider adding a `main` function to `empty.rs`",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "error[E0601]: `main` function not found in crate `empty`\n |\n = note: consider adding a `main` function to `empty.rs`\n\n"
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/edge-cases/empty.rs b/src/tools/cargo/crates/rustfix/tests/edge-cases/empty.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/edge-cases/empty.rs
diff --git a/src/tools/cargo/crates/rustfix/tests/edge-cases/indented_whitespace.json b/src/tools/cargo/crates/rustfix/tests/edge-cases/indented_whitespace.json
new file mode 100644
index 000000000..b25189aaf
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/edge-cases/indented_whitespace.json
@@ -0,0 +1,60 @@
+{
+ "message": "non-ASCII whitespace symbol '\\u{a0}' is not skipped",
+ "code": null,
+ "level": "warning",
+ "spans":
+ [
+ {
+ "file_name": "lib.rs",
+ "byte_start": 26,
+ "byte_end": 28,
+ "line_start": 2,
+ "line_end": 2,
+ "column_start": 1,
+ "column_end": 2,
+ "is_primary": false,
+ "text":
+ [
+ {
+ "text": " indented\";",
+ "highlight_start": 1,
+ "highlight_end": 2
+ }
+ ],
+ "label": "non-ASCII whitespace symbol '\\u{a0}' is not skipped",
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ },
+ {
+ "file_name": "lib.rs",
+ "byte_start": 24,
+ "byte_end": 28,
+ "line_start": 1,
+ "line_end": 2,
+ "column_start": 25,
+ "column_end": 2,
+ "is_primary": true,
+ "text":
+ [
+ {
+ "text": "pub static FOO: &str = \"\\",
+ "highlight_start": 25,
+ "highlight_end": 26
+ },
+ {
+ "text": " indented\";",
+ "highlight_start": 1,
+ "highlight_end": 2
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children":
+ [],
+ "rendered": "warning: non-ASCII whitespace symbol '\\u{a0}' is not skipped\n --> lib.rs:1:25\n |\n1 | pub static FOO: &str = \"\\\n | _________________________^\n2 | |  indented\";\n | | ^ non-ASCII whitespace symbol '\\u{a0}' is not skipped\n | |_|\n | \n\n"
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/edge-cases/no_main.json b/src/tools/cargo/crates/rustfix/tests/edge-cases/no_main.json
new file mode 100644
index 000000000..e4b1c8f97
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/edge-cases/no_main.json
@@ -0,0 +1,33 @@
+{
+ "message": "`main` function not found in crate `no_main`",
+ "code": {
+ "code": "E0601",
+ "explanation": "No `main` function was found in a binary crate. To fix this error, add a\n`main` function. For example:\n\n```\nfn main() {\n // Your program will start here.\n println!(\"Hello world!\");\n}\n```\n\nIf you don't know the basics of Rust, you can go look to the Rust Book to get\nstarted: https://doc.rust-lang.org/book/\n"
+ },
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "no_main.rs",
+ "byte_start": 26,
+ "byte_end": 26,
+ "line_start": 1,
+ "line_end": 1,
+ "column_start": 27,
+ "column_end": 27,
+ "is_primary": true,
+ "text": [
+ {
+ "text": "// This file has no main.",
+ "highlight_start": 27,
+ "highlight_end": 27
+ }
+ ],
+ "label": "consider adding a `main` function to `no_main.rs`",
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": "error[E0601]: `main` function not found in crate `no_main`\n --> no_main.rs:1:27\n |\n1 | // This file has no main.\n | ^ consider adding a `main` function to `no_main.rs`\n\n"
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/edge-cases/no_main.rs b/src/tools/cargo/crates/rustfix/tests/edge-cases/no_main.rs
new file mode 100644
index 000000000..0147ba726
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/edge-cases/no_main.rs
@@ -0,0 +1 @@
+// This file has no main.
diff --git a/src/tools/cargo/crates/rustfix/tests/edge-cases/out_of_bounds.recorded.json b/src/tools/cargo/crates/rustfix/tests/edge-cases/out_of_bounds.recorded.json
new file mode 100644
index 000000000..147debb6c
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/edge-cases/out_of_bounds.recorded.json
@@ -0,0 +1,43 @@
+{
+ "message": "unterminated double quote string",
+ "code": null,
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "./tests/everything/tab_2.rs",
+ "byte_start": 485,
+ "byte_end": 526,
+ "line_start": 12,
+ "line_end": 13,
+ "column_start": 7,
+ "column_end": 3,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " \"\"\"; //~ ERROR unterminated double quote",
+ "highlight_start": 7,
+ "highlight_end": 45
+ },
+ {
+ "text": "}",
+ "highlight_start": 1,
+ "highlight_end": 3
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": "error: unterminated double quote string\n --> ./tests/everything/tab_2.rs:12:7\n |\n12 | \"\"\"; //~ ERROR unterminated double quote\n | _______^\n13 | | }\n | |__^\n\n"
+}
+{
+ "message": "aborting due to previous error",
+ "code": null,
+ "level": "error",
+ "spans": [],
+ "children": [],
+ "rendered": "error: aborting due to previous error\n\n"
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/edge-cases/utf8_idents.recorded.json b/src/tools/cargo/crates/rustfix/tests/edge-cases/utf8_idents.recorded.json
new file mode 100644
index 000000000..28950d694
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/edge-cases/utf8_idents.recorded.json
@@ -0,0 +1,59 @@
+{
+ "message": "expected one of `,`, `:`, `=`, or `>`, found `'β`",
+ "code": null,
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "./tests/everything/utf8_idents.rs",
+ "byte_start": 14,
+ "byte_end": 14,
+ "line_start": 2,
+ "line_end": 2,
+ "column_start": 6,
+ "column_end": 6,
+ "is_primary": false,
+ "text": [
+ {
+ "text": " γ //~ ERROR non-ascii idents are not fully supported",
+ "highlight_start": 6,
+ "highlight_end": 6
+ }
+ ],
+ "label": "expected one of `,`, `:`, `=`, or `>` here",
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ },
+ {
+ "file_name": "./tests/everything/utf8_idents.rs",
+ "byte_start": 145,
+ "byte_end": 148,
+ "line_start": 4,
+ "line_end": 4,
+ "column_start": 5,
+ "column_end": 7,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " 'β, //~ ERROR non-ascii idents are not fully supported",
+ "highlight_start": 5,
+ "highlight_end": 7
+ }
+ ],
+ "label": "unexpected token",
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": "error: expected one of `,`, `:`, `=`, or `>`, found `'β`\n --> ./tests/everything/utf8_idents.rs:4:5\n |\n2 | γ //~ ERROR non-ascii idents are not fully supported\n | - expected one of `,`, `:`, `=`, or `>` here\n3 | //~^ WARN type parameter `γ` should have an upper camel case name\n4 | 'β, //~ ERROR non-ascii idents are not fully supported\n | ^^ unexpected token\n\n"
+}
+{
+ "message": "aborting due to previous error",
+ "code": null,
+ "level": "error",
+ "spans": [],
+ "children": [],
+ "rendered": "error: aborting due to previous error\n\n"
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/edge_cases.rs b/src/tools/cargo/crates/rustfix/tests/edge_cases.rs
new file mode 100644
index 000000000..42d1e405a
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/edge_cases.rs
@@ -0,0 +1,25 @@
+use rustfix;
+use std::collections::HashSet;
+use std::fs;
+
+macro_rules! expect_empty_json_test {
+ ($name:ident, $file:expr) => {
+ #[test]
+ fn $name() {
+ let json = fs::read_to_string(concat!("./tests/edge-cases/", $file)).unwrap();
+ let expected_suggestions = rustfix::get_suggestions_from_json(
+ &json,
+ &HashSet::new(),
+ rustfix::Filter::Everything,
+ )
+ .unwrap();
+ assert!(expected_suggestions.is_empty());
+ }
+ };
+}
+
+expect_empty_json_test! {out_of_bounds_test, "out_of_bounds.recorded.json"}
+expect_empty_json_test! {utf8_identifiers_test, "utf8_idents.recorded.json"}
+expect_empty_json_test! {empty, "empty.json"}
+expect_empty_json_test! {no_main, "no_main.json"}
+expect_empty_json_test! {indented_whitespace, "indented_whitespace.json"}
diff --git a/src/tools/cargo/crates/rustfix/tests/everything/E0178.fixed.rs b/src/tools/cargo/crates/rustfix/tests/everything/E0178.fixed.rs
new file mode 100644
index 000000000..07e611774
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/everything/E0178.fixed.rs
@@ -0,0 +1,10 @@
+#![allow(dead_code)]
+
+trait Foo {}
+
+struct Bar<'a> {
+ w: &'a (dyn Foo + Send),
+}
+
+fn main() {
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/everything/E0178.json b/src/tools/cargo/crates/rustfix/tests/everything/E0178.json
new file mode 100644
index 000000000..89f15b528
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/everything/E0178.json
@@ -0,0 +1,70 @@
+{
+ "message": "expected a path on the left-hand side of `+`, not `&'a Foo`",
+ "code": {
+ "code": "E0178",
+ "explanation": "\nIn types, the `+` type operator has low precedence, so it is often necessary\nto use parentheses.\n\nFor example:\n\n```compile_fail,E0178\ntrait Foo {}\n\nstruct Bar<'a> {\n w: &'a Foo + Copy, // error, use &'a (Foo + Copy)\n x: &'a Foo + 'a, // error, use &'a (Foo + 'a)\n y: &'a mut Foo + 'a, // error, use &'a mut (Foo + 'a)\n z: fn() -> Foo + 'a, // error, use fn() -> (Foo + 'a)\n}\n```\n\nMore details can be found in [RFC 438].\n\n[RFC 438]: https://github.com/rust-lang/rfcs/pull/438\n"
+ },
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "./tests/everything/E0178.rs",
+ "byte_start": 60,
+ "byte_end": 74,
+ "line_start": 6,
+ "line_end": 6,
+ "column_start": 8,
+ "column_end": 22,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " w: &'a Foo + Send,",
+ "highlight_start": 8,
+ "highlight_end": 22
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "try adding parentheses",
+ "code": null,
+ "level": "help",
+ "spans": [
+ {
+ "file_name": "./tests/everything/E0178.rs",
+ "byte_start": 60,
+ "byte_end": 74,
+ "line_start": 6,
+ "line_end": 6,
+ "column_start": 8,
+ "column_end": 22,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " w: &'a Foo + Send,",
+ "highlight_start": 8,
+ "highlight_end": 22
+ }
+ ],
+ "label": null,
+ "suggested_replacement": "&'a (Foo + Send)",
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "error[E0178]: expected a path on the left-hand side of `+`, not `&'a Foo`\n --> ./tests/everything/E0178.rs:6:8\n |\n6 | w: &'a Foo + Send,\n | ^^^^^^^^^^^^^^ help: try adding parentheses: `&'a (Foo + Send)`\n\nIf you want more information on this error, try using \"rustc --explain E0178\"\n"
+}
+{
+ "message": "aborting due to previous error",
+ "code": null,
+ "level": "error",
+ "spans": [],
+ "children": [],
+ "rendered": "error: aborting due to previous error\n\n"
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/everything/E0178.rs b/src/tools/cargo/crates/rustfix/tests/everything/E0178.rs
new file mode 100644
index 000000000..24226fe0e
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/everything/E0178.rs
@@ -0,0 +1,10 @@
+#![allow(dead_code)]
+
+trait Foo {}
+
+struct Bar<'a> {
+ w: &'a dyn Foo + Send,
+}
+
+fn main() {
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/everything/closure-immutable-outer-variable.fixed.rs b/src/tools/cargo/crates/rustfix/tests/everything/closure-immutable-outer-variable.fixed.rs
new file mode 100644
index 000000000..e443e024b
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/everything/closure-immutable-outer-variable.fixed.rs
@@ -0,0 +1,10 @@
+// Point at the captured immutable outer variable
+
+fn foo(mut f: Box<dyn FnMut()>) {
+ f();
+}
+
+fn main() {
+ let mut y = true;
+ foo(Box::new(move || y = false) as Box<_>); //~ ERROR cannot assign to captured outer variable
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/everything/closure-immutable-outer-variable.json b/src/tools/cargo/crates/rustfix/tests/everything/closure-immutable-outer-variable.json
new file mode 100644
index 000000000..f7afa491d
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/everything/closure-immutable-outer-variable.json
@@ -0,0 +1,70 @@
+{
+ "message": "cannot assign to captured outer variable in an `FnMut` closure",
+ "code": {
+ "code": "E0594",
+ "explanation": null
+ },
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "./tests/everything/closure-immutable-outer-variable.rs",
+ "byte_start": 615,
+ "byte_end": 624,
+ "line_start": 19,
+ "line_end": 19,
+ "column_start": 26,
+ "column_end": 35,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " foo(Box::new(move || y = false) as Box<_>); //~ ERROR cannot assign to captured outer variable",
+ "highlight_start": 26,
+ "highlight_end": 35
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "consider making `y` mutable",
+ "code": null,
+ "level": "help",
+ "spans": [
+ {
+ "file_name": "./tests/everything/closure-immutable-outer-variable.rs",
+ "byte_start": 580,
+ "byte_end": 581,
+ "line_start": 18,
+ "line_end": 18,
+ "column_start": 9,
+ "column_end": 10,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let y = true;",
+ "highlight_start": 9,
+ "highlight_end": 10
+ }
+ ],
+ "label": null,
+ "suggested_replacement": "mut y",
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "error[E0594]: cannot assign to captured outer variable in an `FnMut` closure\n --> ./tests/everything/closure-immutable-outer-variable.rs:19:26\n |\n18 | let y = true;\n | - help: consider making `y` mutable: `mut y`\n19 | foo(Box::new(move || y = false) as Box<_>); //~ ERROR cannot assign to captured outer variable\n | ^^^^^^^^^\n\nIf you want more information on this error, try using \"rustc --explain E0594\"\n"
+}
+{
+ "message": "aborting due to previous error",
+ "code": null,
+ "level": "error",
+ "spans": [],
+ "children": [],
+ "rendered": "error: aborting due to previous error\n\n"
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/everything/closure-immutable-outer-variable.rs b/src/tools/cargo/crates/rustfix/tests/everything/closure-immutable-outer-variable.rs
new file mode 100644
index 000000000..c97ec3589
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/everything/closure-immutable-outer-variable.rs
@@ -0,0 +1,10 @@
+// Point at the captured immutable outer variable
+
+fn foo(mut f: Box<FnMut()>) {
+ f();
+}
+
+fn main() {
+ let y = true;
+ foo(Box::new(move || y = false) as Box<_>); //~ ERROR cannot assign to captured outer variable
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/everything/handle-insert-only.fixed.rs b/src/tools/cargo/crates/rustfix/tests/everything/handle-insert-only.fixed.rs
new file mode 100644
index 000000000..604bc8503
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/everything/handle-insert-only.fixed.rs
@@ -0,0 +1,8 @@
+fn main() {
+ // insert only fix, adds `,` to first match arm
+ // why doesnt this replace 1 with 1,?
+ match &Some(3) {
+ &None => 1,
+ &Some(x) => x,
+ };
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/everything/handle-insert-only.json b/src/tools/cargo/crates/rustfix/tests/everything/handle-insert-only.json
new file mode 100644
index 000000000..c32a71290
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/everything/handle-insert-only.json
@@ -0,0 +1,68 @@
+{
+ "message": "expected one of `,`, `.`, `?`, `}`, or an operator, found `=>`",
+ "code": null,
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "./tests/everything/handle-insert-only.rs",
+ "byte_start": 163,
+ "byte_end": 165,
+ "line_start": 6,
+ "line_end": 6,
+ "column_start": 18,
+ "column_end": 20,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " &Some(x) => x,",
+ "highlight_start": 18,
+ "highlight_end": 20
+ }
+ ],
+ "label": "expected one of `,`, `.`, `?`, `}`, or an operator here",
+ "suggested_replacement": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "missing a comma here to end this `match` arm",
+ "code": null,
+ "level": "help",
+ "spans": [
+ {
+ "file_name": "./tests/everything/handle-insert-only.rs",
+ "byte_start": 145,
+ "byte_end": 145,
+ "line_start": 5,
+ "line_end": 5,
+ "column_start": 19,
+ "column_end": 19,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " &None => 1",
+ "highlight_start": 19,
+ "highlight_end": 19
+ }
+ ],
+ "label": null,
+ "suggested_replacement": ",",
+ "suggestion_applicability": "Unspecified",
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "error: expected one of `,`, `.`, `?`, `}`, or an operator, found `=>`\n --> ./tests/everything/handle-insert-only.rs:6:18\n |\n5 | &None => 1\n | - help: missing a comma here to end this `match` arm\n6 | &Some(x) => x,\n | ^^ expected one of `,`, `.`, `?`, `}`, or an operator here\n\n"
+}
+{
+ "message": "aborting due to previous error",
+ "code": null,
+ "level": "error",
+ "spans": [],
+ "children": [],
+ "rendered": "error: aborting due to previous error\n\n"
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/everything/handle-insert-only.rs b/src/tools/cargo/crates/rustfix/tests/everything/handle-insert-only.rs
new file mode 100644
index 000000000..d42a4caa8
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/everything/handle-insert-only.rs
@@ -0,0 +1,8 @@
+fn main() {
+ // insert only fix, adds `,` to first match arm
+ // why doesnt this replace 1 with 1,?
+ match &Some(3) {
+ &None => 1
+ &Some(x) => x,
+ };
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/everything/lt-generic-comp.fixed.rs b/src/tools/cargo/crates/rustfix/tests/everything/lt-generic-comp.fixed.rs
new file mode 100644
index 000000000..533c91734
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/everything/lt-generic-comp.fixed.rs
@@ -0,0 +1,7 @@
+fn main() {
+ let x = 5i64;
+
+ if (x as u32) < 4 {
+ println!("yay");
+ }
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/everything/lt-generic-comp.json b/src/tools/cargo/crates/rustfix/tests/everything/lt-generic-comp.json
new file mode 100644
index 000000000..0634762e3
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/everything/lt-generic-comp.json
@@ -0,0 +1,87 @@
+{
+ "message": "`<` is interpreted as a start of generic arguments for `u32`, not a comparison",
+ "code": null,
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "./tests/everything/lt-generic-comp.rs",
+ "byte_start": 49,
+ "byte_end": 50,
+ "line_start": 4,
+ "line_end": 4,
+ "column_start": 19,
+ "column_end": 20,
+ "is_primary": false,
+ "text": [
+ {
+ "text": " if x as u32 < 4 {",
+ "highlight_start": 19,
+ "highlight_end": 20
+ }
+ ],
+ "label": "interpreted as generic arguments",
+ "suggested_replacement": null,
+ "expansion": null
+ },
+ {
+ "file_name": "./tests/everything/lt-generic-comp.rs",
+ "byte_start": 47,
+ "byte_end": 48,
+ "line_start": 4,
+ "line_end": 4,
+ "column_start": 17,
+ "column_end": 18,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " if x as u32 < 4 {",
+ "highlight_start": 17,
+ "highlight_end": 18
+ }
+ ],
+ "label": "not interpreted as comparison",
+ "suggested_replacement": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "try comparing the cast value",
+ "code": null,
+ "level": "help",
+ "spans": [
+ {
+ "file_name": "./tests/everything/lt-generic-comp.rs",
+ "byte_start": 38,
+ "byte_end": 46,
+ "line_start": 4,
+ "line_end": 4,
+ "column_start": 8,
+ "column_end": 16,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " if x as u32 < 4 {",
+ "highlight_start": 8,
+ "highlight_end": 16
+ }
+ ],
+ "label": null,
+ "suggested_replacement": "(x as u32)",
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "error: `<` is interpreted as a start of generic arguments for `u32`, not a comparison\n --> ./tests/everything/lt-generic-comp.rs:4:17\n |\n4 | if x as u32 < 4 {\n | -------- ^ - interpreted as generic arguments\n | | |\n | | not interpreted as comparison\n | help: try comparing the cast value: `(x as u32)`\n\n"
+}
+{
+ "message": "aborting due to previous error",
+ "code": null,
+ "level": "error",
+ "spans": [],
+ "children": [],
+ "rendered": "error: aborting due to previous error\n\n"
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/everything/lt-generic-comp.rs b/src/tools/cargo/crates/rustfix/tests/everything/lt-generic-comp.rs
new file mode 100644
index 000000000..c279b261f
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/everything/lt-generic-comp.rs
@@ -0,0 +1,7 @@
+fn main() {
+ let x = 5i64;
+
+ if x as u32 < 4 {
+ println!("yay");
+ }
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/everything/multiple-solutions.fixed.rs b/src/tools/cargo/crates/rustfix/tests/everything/multiple-solutions.fixed.rs
new file mode 100644
index 000000000..1a261785d
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/everything/multiple-solutions.fixed.rs
@@ -0,0 +1,5 @@
+use std::collections::{HashSet};
+
+fn main() {
+ let _: HashSet<()>;
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/everything/multiple-solutions.json b/src/tools/cargo/crates/rustfix/tests/everything/multiple-solutions.json
new file mode 100644
index 000000000..89b14ccc8
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/everything/multiple-solutions.json
@@ -0,0 +1,114 @@
+{
+ "message": "unused imports: `HashMap`, `VecDeque`",
+ "code": {
+ "code": "unused_imports",
+ "explanation": null
+ },
+ "level": "warning",
+ "spans": [
+ {
+ "file_name": "src/main.rs",
+ "byte_start": 23,
+ "byte_end": 30,
+ "line_start": 1,
+ "line_end": 1,
+ "column_start": 24,
+ "column_end": 31,
+ "is_primary": true,
+ "text": [
+ {
+ "text": "use std::collections::{HashMap, HashSet, VecDeque};",
+ "highlight_start": 24,
+ "highlight_end": 31
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ },
+ {
+ "file_name": "src/main.rs",
+ "byte_start": 41,
+ "byte_end": 49,
+ "line_start": 1,
+ "line_end": 1,
+ "column_start": 42,
+ "column_end": 50,
+ "is_primary": true,
+ "text": [
+ {
+ "text": "use std::collections::{HashMap, HashSet, VecDeque};",
+ "highlight_start": 42,
+ "highlight_end": 50
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "#[warn(unused_imports)] on by default",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "remove the unused imports",
+ "code": null,
+ "level": "help",
+ "spans": [
+ {
+ "file_name": "src/main.rs",
+ "byte_start": 23,
+ "byte_end": 32,
+ "line_start": 1,
+ "line_end": 1,
+ "column_start": 24,
+ "column_end": 33,
+ "is_primary": true,
+ "text": [
+ {
+ "text": "use std::collections::{HashMap, HashSet, VecDeque};",
+ "highlight_start": 24,
+ "highlight_end": 33
+ }
+ ],
+ "label": null,
+ "suggested_replacement": "",
+ "suggestion_applicability": "MachineApplicable",
+ "expansion": null
+ },
+ {
+ "file_name": "src/main.rs",
+ "byte_start": 39,
+ "byte_end": 49,
+ "line_start": 1,
+ "line_end": 1,
+ "column_start": 40,
+ "column_end": 50,
+ "is_primary": true,
+ "text": [
+ {
+ "text": "use std::collections::{HashMap, HashSet, VecDeque};",
+ "highlight_start": 40,
+ "highlight_end": 50
+ }
+ ],
+ "label": null,
+ "suggested_replacement": "",
+ "suggestion_applicability": "MachineApplicable",
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "warning: unused imports: `HashMap`, `VecDeque`\n --> src/main.rs:1:24\n |\n1 | use std::collections::{HashMap, HashSet, VecDeque};\n | ^^^^^^^ ^^^^^^^^\n |\n = note: #[warn(unused_imports)] on by default\nhelp: remove the unused imports\n |\n1 | use std::collections::{HashSet};\n | -- --\n\n"
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/everything/multiple-solutions.rs b/src/tools/cargo/crates/rustfix/tests/everything/multiple-solutions.rs
new file mode 100644
index 000000000..401198f7e
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/everything/multiple-solutions.rs
@@ -0,0 +1,5 @@
+use std::collections::{HashMap, HashSet, VecDeque};
+
+fn main() {
+ let _: HashSet<()>;
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/everything/replace-only-one-char.fixed.rs b/src/tools/cargo/crates/rustfix/tests/everything/replace-only-one-char.fixed.rs
new file mode 100644
index 000000000..1a9298d5f
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/everything/replace-only-one-char.fixed.rs
@@ -0,0 +1,3 @@
+fn main() {
+ let _x = 42;
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/everything/replace-only-one-char.json b/src/tools/cargo/crates/rustfix/tests/everything/replace-only-one-char.json
new file mode 100644
index 000000000..9a70c0880
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/everything/replace-only-one-char.json
@@ -0,0 +1,70 @@
+{
+ "message": "unused variable: `x`",
+ "code": {
+ "code": "unused_variables",
+ "explanation": null
+ },
+ "level": "warning",
+ "spans": [
+ {
+ "file_name": "replace-only-one-char.rs",
+ "byte_start": 20,
+ "byte_end": 21,
+ "line_start": 2,
+ "line_end": 2,
+ "column_start": 9,
+ "column_end": 10,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let x = 42;",
+ "highlight_start": 9,
+ "highlight_end": 10
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "#[warn(unused_variables)] on by default",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "consider using `_x` instead",
+ "code": null,
+ "level": "help",
+ "spans": [
+ {
+ "file_name": "replace-only-one-char.rs",
+ "byte_start": 20,
+ "byte_end": 21,
+ "line_start": 2,
+ "line_end": 2,
+ "column_start": 9,
+ "column_end": 10,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let x = 42;",
+ "highlight_start": 9,
+ "highlight_end": 10
+ }
+ ],
+ "label": null,
+ "suggested_replacement": "_x",
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "warning: unused variable: `x`\n --> replace-only-one-char.rs:2:9\n |\n2 | let x = 42;\n | ^ help: consider using `_x` instead\n |\n = note: #[warn(unused_variables)] on by default\n\n"
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/everything/replace-only-one-char.rs b/src/tools/cargo/crates/rustfix/tests/everything/replace-only-one-char.rs
new file mode 100644
index 000000000..36e44936f
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/everything/replace-only-one-char.rs
@@ -0,0 +1,3 @@
+fn main() {
+ let x = 42;
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/everything/str-lit-type-mismatch.fixed.rs b/src/tools/cargo/crates/rustfix/tests/everything/str-lit-type-mismatch.fixed.rs
new file mode 100644
index 000000000..d5a81a8a8
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/everything/str-lit-type-mismatch.fixed.rs
@@ -0,0 +1,5 @@
+fn main() {
+ let x: &[u8] = b"foo"; //~ ERROR mismatched types
+ let y: &[u8; 4] = b"baaa"; //~ ERROR mismatched types
+ let z: &str = "foo"; //~ ERROR mismatched types
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/everything/str-lit-type-mismatch.json b/src/tools/cargo/crates/rustfix/tests/everything/str-lit-type-mismatch.json
new file mode 100644
index 000000000..1c852513a
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/everything/str-lit-type-mismatch.json
@@ -0,0 +1,218 @@
+{
+ "message": "mismatched types",
+ "code": {
+ "code": "E0308",
+ "explanation": "\nThis error occurs when the compiler was unable to infer the concrete type of a\nvariable. It can occur for several cases, the most common of which is a\nmismatch in the expected type that the compiler inferred for a variable's\ninitializing expression, and the actual type explicitly assigned to the\nvariable.\n\nFor example:\n\n```compile_fail,E0308\nlet x: i32 = \"I am not a number!\";\n// ~~~ ~~~~~~~~~~~~~~~~~~~~\n// | |\n// | initializing expression;\n// | compiler infers type `&str`\n// |\n// type `i32` assigned to variable `x`\n```\n"
+ },
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "./tests/everything/str-lit-type-mismatch.rs",
+ "byte_start": 499,
+ "byte_end": 504,
+ "line_start": 13,
+ "line_end": 13,
+ "column_start": 20,
+ "column_end": 25,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let x: &[u8] = \"foo\"; //~ ERROR mismatched types",
+ "highlight_start": 20,
+ "highlight_end": 25
+ }
+ ],
+ "label": "expected slice, found str",
+ "suggested_replacement": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "expected type `&[u8]`\n found type `&'static str`",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "consider adding a leading `b`",
+ "code": null,
+ "level": "help",
+ "spans": [
+ {
+ "file_name": "./tests/everything/str-lit-type-mismatch.rs",
+ "byte_start": 499,
+ "byte_end": 504,
+ "line_start": 13,
+ "line_end": 13,
+ "column_start": 20,
+ "column_end": 25,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let x: &[u8] = \"foo\"; //~ ERROR mismatched types",
+ "highlight_start": 20,
+ "highlight_end": 25
+ }
+ ],
+ "label": null,
+ "suggested_replacement": "b\"foo\"",
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "error[E0308]: mismatched types\n --> ./tests/everything/str-lit-type-mismatch.rs:13:20\n |\n13 | let x: &[u8] = \"foo\"; //~ ERROR mismatched types\n | ^^^^^\n | |\n | expected slice, found str\n | help: consider adding a leading `b`: `b\"foo\"`\n |\n = note: expected type `&[u8]`\n found type `&'static str`\n\nIf you want more information on this error, try using \"rustc --explain E0308\"\n"
+}
+{
+ "message": "mismatched types",
+ "code": {
+ "code": "E0308",
+ "explanation": "\nThis error occurs when the compiler was unable to infer the concrete type of a\nvariable. It can occur for several cases, the most common of which is a\nmismatch in the expected type that the compiler inferred for a variable's\ninitializing expression, and the actual type explicitly assigned to the\nvariable.\n\nFor example:\n\n```compile_fail,E0308\nlet x: i32 = \"I am not a number!\";\n// ~~~ ~~~~~~~~~~~~~~~~~~~~\n// | |\n// | initializing expression;\n// | compiler infers type `&str`\n// |\n// type `i32` assigned to variable `x`\n```\n"
+ },
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "./tests/everything/str-lit-type-mismatch.rs",
+ "byte_start": 555,
+ "byte_end": 561,
+ "line_start": 14,
+ "line_end": 14,
+ "column_start": 23,
+ "column_end": 29,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let y: &[u8; 4] = \"baaa\"; //~ ERROR mismatched types",
+ "highlight_start": 23,
+ "highlight_end": 29
+ }
+ ],
+ "label": "expected array of 4 elements, found str",
+ "suggested_replacement": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "expected type `&[u8; 4]`\n found type `&'static str`",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "consider adding a leading `b`",
+ "code": null,
+ "level": "help",
+ "spans": [
+ {
+ "file_name": "./tests/everything/str-lit-type-mismatch.rs",
+ "byte_start": 555,
+ "byte_end": 561,
+ "line_start": 14,
+ "line_end": 14,
+ "column_start": 23,
+ "column_end": 29,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let y: &[u8; 4] = \"baaa\"; //~ ERROR mismatched types",
+ "highlight_start": 23,
+ "highlight_end": 29
+ }
+ ],
+ "label": null,
+ "suggested_replacement": "b\"baaa\"",
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "error[E0308]: mismatched types\n --> ./tests/everything/str-lit-type-mismatch.rs:14:23\n |\n14 | let y: &[u8; 4] = \"baaa\"; //~ ERROR mismatched types\n | ^^^^^^\n | |\n | expected array of 4 elements, found str\n | help: consider adding a leading `b`: `b\"baaa\"`\n |\n = note: expected type `&[u8; 4]`\n found type `&'static str`\n\nIf you want more information on this error, try using \"rustc --explain E0308\"\n"
+}
+{
+ "message": "mismatched types",
+ "code": {
+ "code": "E0308",
+ "explanation": "\nThis error occurs when the compiler was unable to infer the concrete type of a\nvariable. It can occur for several cases, the most common of which is a\nmismatch in the expected type that the compiler inferred for a variable's\ninitializing expression, and the actual type explicitly assigned to the\nvariable.\n\nFor example:\n\n```compile_fail,E0308\nlet x: i32 = \"I am not a number!\";\n// ~~~ ~~~~~~~~~~~~~~~~~~~~\n// | |\n// | initializing expression;\n// | compiler infers type `&str`\n// |\n// type `i32` assigned to variable `x`\n```\n"
+ },
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "./tests/everything/str-lit-type-mismatch.rs",
+ "byte_start": 608,
+ "byte_end": 614,
+ "line_start": 15,
+ "line_end": 15,
+ "column_start": 19,
+ "column_end": 25,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let z: &str = b\"foo\"; //~ ERROR mismatched types",
+ "highlight_start": 19,
+ "highlight_end": 25
+ }
+ ],
+ "label": "expected str, found array of 3 elements",
+ "suggested_replacement": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "expected type `&str`\n found type `&'static [u8; 3]`",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "consider removing the leading `b`",
+ "code": null,
+ "level": "help",
+ "spans": [
+ {
+ "file_name": "./tests/everything/str-lit-type-mismatch.rs",
+ "byte_start": 608,
+ "byte_end": 614,
+ "line_start": 15,
+ "line_end": 15,
+ "column_start": 19,
+ "column_end": 25,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let z: &str = b\"foo\"; //~ ERROR mismatched types",
+ "highlight_start": 19,
+ "highlight_end": 25
+ }
+ ],
+ "label": null,
+ "suggested_replacement": "\"foo\"",
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "error[E0308]: mismatched types\n --> ./tests/everything/str-lit-type-mismatch.rs:15:19\n |\n15 | let z: &str = b\"foo\"; //~ ERROR mismatched types\n | ^^^^^^\n | |\n | expected str, found array of 3 elements\n | help: consider removing the leading `b`: `\"foo\"`\n |\n = note: expected type `&str`\n found type `&'static [u8; 3]`\n\nIf you want more information on this error, try using \"rustc --explain E0308\"\n"
+}
+{
+ "message": "aborting due to 3 previous errors",
+ "code": null,
+ "level": "error",
+ "spans": [],
+ "children": [],
+ "rendered": "error: aborting due to 3 previous errors\n\n"
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/everything/str-lit-type-mismatch.rs b/src/tools/cargo/crates/rustfix/tests/everything/str-lit-type-mismatch.rs
new file mode 100644
index 000000000..12637c7b9
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/everything/str-lit-type-mismatch.rs
@@ -0,0 +1,5 @@
+fn main() {
+ let x: &[u8] = "foo"; //~ ERROR mismatched types
+ let y: &[u8; 4] = "baaa"; //~ ERROR mismatched types
+ let z: &str = b"foo"; //~ ERROR mismatched types
+}
diff --git a/src/tools/cargo/crates/rustfix/tests/parse_and_replace.rs b/src/tools/cargo/crates/rustfix/tests/parse_and_replace.rs
new file mode 100644
index 000000000..902275b64
--- /dev/null
+++ b/src/tools/cargo/crates/rustfix/tests/parse_and_replace.rs
@@ -0,0 +1,234 @@
+#![allow(clippy::disallowed_methods, clippy::print_stdout, clippy::print_stderr)]
+
+use anyhow::{anyhow, ensure, Context, Error};
+use rustfix::apply_suggestions;
+use std::collections::HashSet;
+use std::env;
+use std::ffi::OsString;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::process::{Command, Output};
+use tempfile::tempdir;
+use tracing::{debug, info, warn};
+
+mod fixmode {
+ pub const EVERYTHING: &str = "yolo";
+}
+
+mod settings {
+ // can be set as env var to debug
+ pub const CHECK_JSON: &str = "RUSTFIX_TEST_CHECK_JSON";
+ pub const RECORD_JSON: &str = "RUSTFIX_TEST_RECORD_JSON";
+ pub const RECORD_FIXED_RUST: &str = "RUSTFIX_TEST_RECORD_FIXED_RUST";
+}
+
+fn compile(file: &Path) -> Result<Output, Error> {
+ let tmp = tempdir()?;
+
+ let args: Vec<OsString> = vec![
+ file.into(),
+ "--error-format=json".into(),
+ "--emit=metadata".into(),
+ "--crate-name=rustfix_test".into(),
+ "--out-dir".into(),
+ tmp.path().into(),
+ ];
+
+ let res = Command::new(env::var_os("RUSTC").unwrap_or("rustc".into()))
+ .args(&args)
+ .env("CLIPPY_DISABLE_DOCS_LINKS", "true")
+ .env_remove("RUST_LOG")
+ .output()?;
+
+ Ok(res)
+}
+
+fn compile_and_get_json_errors(file: &Path) -> Result<String, Error> {
+ let res = compile(file)?;
+ let stderr = String::from_utf8(res.stderr)?;
+ if stderr.contains("is only accepted on the nightly compiler") {
+ panic!("rustfix tests require a nightly compiler");
+ }
+
+ match res.status.code() {
+ Some(0) | Some(1) | Some(101) => Ok(stderr),
+ _ => Err(anyhow!(
+ "failed with status {:?}: {}",
+ res.status.code(),
+ stderr
+ )),
+ }
+}
+
+fn compiles_without_errors(file: &Path) -> Result<(), Error> {
+ let res = compile(file)?;
+
+ match res.status.code() {
+ Some(0) => Ok(()),
+ _ => {
+ info!(
+ "file {:?} failed to compile:\n{}",
+ file,
+ String::from_utf8(res.stderr)?
+ );
+ Err(anyhow!(
+ "failed with status {:?} (`env RUST_LOG=parse_and_replace=info` for more info)",
+ res.status.code(),
+ ))
+ }
+ }
+}
+
+fn read_file(path: &Path) -> Result<String, Error> {
+ use std::io::Read;
+
+ let mut buffer = String::new();
+ let mut file = fs::File::open(path)?;
+ file.read_to_string(&mut buffer)?;
+ Ok(buffer)
+}
+
+fn diff(expected: &str, actual: &str) -> String {
+ use similar::{ChangeTag, TextDiff};
+ use std::fmt::Write;
+
+ let mut res = String::new();
+ let diff = TextDiff::from_lines(expected.trim(), actual.trim());
+
+ let mut different = false;
+ for op in diff.ops() {
+ for change in diff.iter_changes(op) {
+ let prefix = match change.tag() {
+ ChangeTag::Equal => continue,
+ ChangeTag::Insert => "+",
+ ChangeTag::Delete => "-",
+ };
+ if !different {
+ write!(
+ &mut res,
+ "differences found (+ == actual, - == expected):\n"
+ )
+ .unwrap();
+ different = true;
+ }
+ write!(&mut res, "{} {}", prefix, change.value()).unwrap();
+ }
+ }
+ if different {
+ write!(&mut res, "").unwrap();
+ }
+
+ res
+}
+
+fn test_rustfix_with_file<P: AsRef<Path>>(file: P, mode: &str) -> Result<(), Error> {
+ let file: &Path = file.as_ref();
+ let json_file = file.with_extension("json");
+ let fixed_file = file.with_extension("fixed.rs");
+
+ let filter_suggestions = if mode == fixmode::EVERYTHING {
+ rustfix::Filter::Everything
+ } else {
+ rustfix::Filter::MachineApplicableOnly
+ };
+
+ debug!("next up: {:?}", file);
+ let code = read_file(file).context(format!("could not read {}", file.display()))?;
+ let errors =
+ compile_and_get_json_errors(file).context(format!("could compile {}", file.display()))?;
+ let suggestions =
+ rustfix::get_suggestions_from_json(&errors, &HashSet::new(), filter_suggestions)
+ .context("could not load suggestions")?;
+
+ if std::env::var(settings::RECORD_JSON).is_ok() {
+ use std::io::Write;
+ let mut recorded_json = fs::File::create(&file.with_extension("recorded.json")).context(
+ format!("could not create recorded.json for {}", file.display()),
+ )?;
+ recorded_json.write_all(errors.as_bytes())?;
+ }
+
+ if std::env::var(settings::CHECK_JSON).is_ok() {
+ let expected_json = read_file(&json_file).context(format!(
+ "could not load json fixtures for {}",
+ file.display()
+ ))?;
+ let expected_suggestions =
+ rustfix::get_suggestions_from_json(&expected_json, &HashSet::new(), filter_suggestions)
+ .context("could not load expected suggestions")?;
+
+ ensure!(
+ expected_suggestions == suggestions,
+ "got unexpected suggestions from clippy:\n{}",
+ diff(
+ &format!("{:?}", expected_suggestions),
+ &format!("{:?}", suggestions)
+ )
+ );
+ }
+
+ let fixed = apply_suggestions(&code, &suggestions)
+ .context(format!("could not apply suggestions to {}", file.display()))?;
+
+ if std::env::var(settings::RECORD_FIXED_RUST).is_ok() {
+ use std::io::Write;
+ let mut recorded_rust = fs::File::create(&file.with_extension("recorded.rs"))?;
+ recorded_rust.write_all(fixed.as_bytes())?;
+ }
+
+ let expected_fixed =
+ read_file(&fixed_file).context(format!("could read fixed file for {}", file.display()))?;
+ ensure!(
+ fixed.trim() == expected_fixed.trim(),
+ "file {} doesn't look fixed:\n{}",
+ file.display(),
+ diff(fixed.trim(), expected_fixed.trim())
+ );
+
+ compiles_without_errors(&fixed_file)?;
+
+ Ok(())
+}
+
+fn get_fixture_files(p: &str) -> Result<Vec<PathBuf>, Error> {
+ Ok(fs::read_dir(&p)?
+ .into_iter()
+ .map(|e| e.unwrap().path())
+ .filter(|p| p.is_file())
+ .filter(|p| {
+ let x = p.to_string_lossy();
+ x.ends_with(".rs") && !x.ends_with(".fixed.rs") && !x.ends_with(".recorded.rs")
+ })
+ .collect())
+}
+
+fn assert_fixtures(dir: &str, mode: &str) {
+ let files = get_fixture_files(&dir)
+ .context(format!("couldn't load dir `{}`", dir))
+ .unwrap();
+ let mut failures = 0;
+
+ for file in &files {
+ if let Err(err) = test_rustfix_with_file(file, mode) {
+ println!("failed: {}", file.display());
+ warn!("{:?}", err);
+ failures += 1;
+ }
+ info!("passed: {:?}", file);
+ }
+
+ if failures > 0 {
+ panic!(
+ "{} out of {} fixture asserts failed\n\
+ (run with `env RUST_LOG=parse_and_replace=info` to get more details)",
+ failures,
+ files.len(),
+ );
+ }
+}
+
+#[test]
+fn everything() {
+ tracing_subscriber::fmt::init();
+ assert_fixtures("./tests/everything", fixmode::EVERYTHING);
+}
diff --git a/src/tools/cargo/crates/semver-check/Cargo.toml b/src/tools/cargo/crates/semver-check/Cargo.toml
index 7387c3091..ab13b5730 100644
--- a/src/tools/cargo/crates/semver-check/Cargo.toml
+++ b/src/tools/cargo/crates/semver-check/Cargo.toml
@@ -10,3 +10,6 @@ publish = false
[dependencies]
tempfile.workspace = true
+
+[lints]
+workspace = true
diff --git a/src/tools/cargo/crates/semver-check/src/main.rs b/src/tools/cargo/crates/semver-check/src/main.rs
index 9ea0d1244..edcf59957 100644
--- a/src/tools/cargo/crates/semver-check/src/main.rs
+++ b/src/tools/cargo/crates/semver-check/src/main.rs
@@ -13,6 +13,8 @@
//! - `dont-deny`: By default tests have a `#![deny(warnings)]`. This option
//! avoids this attribute. Note that `#![allow(unused)]` is always added.
+#![allow(clippy::print_stderr)]
+
use std::error::Error;
use std::fs;
use std::path::Path;
diff --git a/src/tools/cargo/crates/xtask-build-man/Cargo.toml b/src/tools/cargo/crates/xtask-build-man/Cargo.toml
index 9e92125a1..87ae8c670 100644
--- a/src/tools/cargo/crates/xtask-build-man/Cargo.toml
+++ b/src/tools/cargo/crates/xtask-build-man/Cargo.toml
@@ -6,3 +6,6 @@ edition.workspace = true
publish = false
[dependencies]
+
+[lints]
+workspace = true
diff --git a/src/tools/cargo/crates/xtask-build-man/src/main.rs b/src/tools/cargo/crates/xtask-build-man/src/main.rs
index 6680c3783..2ab3f098a 100644
--- a/src/tools/cargo/crates/xtask-build-man/src/main.rs
+++ b/src/tools/cargo/crates/xtask-build-man/src/main.rs
@@ -10,6 +10,8 @@
//! For more, read their doc comments.
//! ```
+#![allow(clippy::print_stderr)]
+
use std::fs;
use std::io;
use std::path::PathBuf;
diff --git a/src/tools/cargo/crates/xtask-bump-check/Cargo.toml b/src/tools/cargo/crates/xtask-bump-check/Cargo.toml
index c8a472adc..989ece4b7 100644
--- a/src/tools/cargo/crates/xtask-bump-check/Cargo.toml
+++ b/src/tools/cargo/crates/xtask-bump-check/Cargo.toml
@@ -14,3 +14,6 @@ git2.workspace = true
semver.workspace = true
tracing-subscriber.workspace = true
tracing.workspace = true
+
+[lints]
+workspace = true
diff --git a/src/tools/cargo/crates/xtask-bump-check/src/xtask.rs b/src/tools/cargo/crates/xtask-bump-check/src/xtask.rs
index b99ac8b32..db82fff63 100644
--- a/src/tools/cargo/crates/xtask-bump-check/src/xtask.rs
+++ b/src/tools/cargo/crates/xtask-bump-check/src/xtask.rs
@@ -41,7 +41,11 @@ pub fn cli() -> clap::Command {
.action(ArgAction::Count)
.global(true),
)
- .arg_quiet()
+ .arg(
+ flag("quiet", "Do not print cargo log messages")
+ .short('q')
+ .global(true),
+ )
.arg(
opt("color", "Coloring: auto, always, never")
.value_name("WHEN")
@@ -114,6 +118,11 @@ fn bump_check(args: &clap::ArgMatches, config: &cargo::util::Config) -> CargoRes
let changed_members = changed(&ws, &repo, &base_commit, &head_commit)?;
let status = |msg: &str| config.shell().status(STATUS, msg);
+ // Don't check against beta and stable branches,
+ // as the publish of these crates are not tied with Rust release process.
+ // See `TO_PUBLISH` in publish.py.
+ let crates_not_check_against_channels = ["home"];
+
status(&format!("base commit `{}`", base_commit.id()))?;
status(&format!("head commit `{}`", head_commit.id()))?;
@@ -125,6 +134,11 @@ fn bump_check(args: &clap::ArgMatches, config: &cargo::util::Config) -> CargoRes
status(&format!("compare against `{}`", referenced_commit.id()))?;
for referenced_member in checkout_ws(&ws, &repo, referenced_commit)?.members() {
let pkg_name = referenced_member.name().as_str();
+
+ if crates_not_check_against_channels.contains(&pkg_name) {
+ continue;
+ }
+
let Some(changed_member) = changed_members.get(pkg_name) else {
tracing::trace!("skipping {pkg_name}, may be removed or not published");
continue;
@@ -162,8 +176,12 @@ fn bump_check(args: &clap::ArgMatches, config: &cargo::util::Config) -> CargoRes
let mut cmd = ProcessBuilder::new("cargo");
cmd.arg("semver-checks")
.arg("--workspace")
+ .args(&["--exclude", "rustfix"]) // FIXME: Remove once 1.76 is stable
.arg("--baseline-rev")
.arg(referenced_commit.id().to_string());
+ for krate in crates_not_check_against_channels {
+ cmd.args(&["--exclude", krate]);
+ }
config.shell().status("Running", &cmd)?;
cmd.exec()?;
}
@@ -373,6 +391,7 @@ fn check_crates_io<'a>(
"`{name}@{current}` needs a bump because its should have a version newer than crates.io: {:?}`",
possibilities
.iter()
+ .map(|s| s.as_summary())
.map(|s| format!("{}@{}", s.name(), s.version()))
.collect::<Vec<_>>(),
);
diff --git a/src/tools/cargo/crates/xtask-stale-label/Cargo.toml b/src/tools/cargo/crates/xtask-stale-label/Cargo.toml
index 8d68536d2..aff6194b7 100644
--- a/src/tools/cargo/crates/xtask-stale-label/Cargo.toml
+++ b/src/tools/cargo/crates/xtask-stale-label/Cargo.toml
@@ -7,3 +7,6 @@ publish = false
[dependencies]
toml_edit.workspace = true
+
+[lints]
+workspace = true
diff --git a/src/tools/cargo/crates/xtask-stale-label/src/main.rs b/src/tools/cargo/crates/xtask-stale-label/src/main.rs
index 88c044b5b..efcb52d01 100644
--- a/src/tools/cargo/crates/xtask-stale-label/src/main.rs
+++ b/src/tools/cargo/crates/xtask-stale-label/src/main.rs
@@ -10,6 +10,8 @@
//! Probably autofix them in the future.
//! ```
+#![allow(clippy::print_stderr)]
+
use std::fmt::Write as _;
use std::path::PathBuf;
use std::process;
diff --git a/src/tools/cargo/credential/cargo-credential-1password/Cargo.toml b/src/tools/cargo/credential/cargo-credential-1password/Cargo.toml
index 9e5b1e635..3024bd497 100644
--- a/src/tools/cargo/credential/cargo-credential-1password/Cargo.toml
+++ b/src/tools/cargo/credential/cargo-credential-1password/Cargo.toml
@@ -11,3 +11,6 @@ description = "A Cargo credential process that stores tokens in a 1password vaul
cargo-credential.workspace = true
serde = { workspace = true, features = ["derive"] }
serde_json.workspace = true
+
+[lints]
+workspace = true
diff --git a/src/tools/cargo/credential/cargo-credential-1password/src/main.rs b/src/tools/cargo/credential/cargo-credential-1password/src/main.rs
index 321a99c51..38b567bf2 100644
--- a/src/tools/cargo/credential/cargo-credential-1password/src/main.rs
+++ b/src/tools/cargo/credential/cargo-credential-1password/src/main.rs
@@ -1,5 +1,8 @@
//! Cargo registry 1password credential process.
+#![allow(clippy::disallowed_methods)]
+#![allow(clippy::print_stderr)]
+
use cargo_credential::{
Action, CacheControl, Credential, CredentialResponse, Error, RegistryInfo, Secret,
};
diff --git a/src/tools/cargo/credential/cargo-credential-libsecret/Cargo.toml b/src/tools/cargo/credential/cargo-credential-libsecret/Cargo.toml
index 19ef33a34..e68c9b83a 100644
--- a/src/tools/cargo/credential/cargo-credential-libsecret/Cargo.toml
+++ b/src/tools/cargo/credential/cargo-credential-libsecret/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "cargo-credential-libsecret"
-version = "0.4.1"
+version = "0.4.2"
edition.workspace = true
license.workspace = true
rust-version.workspace = true
@@ -11,3 +11,6 @@ description = "A Cargo credential process that stores tokens with GNOME libsecre
anyhow.workspace = true
cargo-credential.workspace = true
libloading.workspace = true
+
+[lints]
+workspace = true
diff --git a/src/tools/cargo/credential/cargo-credential-macos-keychain/Cargo.toml b/src/tools/cargo/credential/cargo-credential-macos-keychain/Cargo.toml
index 4dec8def6..95ffa231f 100644
--- a/src/tools/cargo/credential/cargo-credential-macos-keychain/Cargo.toml
+++ b/src/tools/cargo/credential/cargo-credential-macos-keychain/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "cargo-credential-macos-keychain"
-version = "0.4.1"
+version = "0.4.2"
edition.workspace = true
license.workspace = true
rust-version.workspace = true
@@ -12,3 +12,6 @@ cargo-credential.workspace = true
[target.'cfg(target_os = "macos")'.dependencies]
security-framework.workspace = true
+
+[lints]
+workspace = true
diff --git a/src/tools/cargo/credential/cargo-credential-macos-keychain/src/lib.rs b/src/tools/cargo/credential/cargo-credential-macos-keychain/src/lib.rs
index 9e6d55472..8a702a362 100644
--- a/src/tools/cargo/credential/cargo-credential-macos-keychain/src/lib.rs
+++ b/src/tools/cargo/credential/cargo-credential-macos-keychain/src/lib.rs
@@ -1,5 +1,7 @@
//! Cargo registry macos keychain credential process.
+#![allow(clippy::print_stderr)]
+
#[cfg(target_os = "macos")]
mod macos {
use cargo_credential::{
diff --git a/src/tools/cargo/credential/cargo-credential-wincred/Cargo.toml b/src/tools/cargo/credential/cargo-credential-wincred/Cargo.toml
index c904075bb..1ecb2a7be 100644
--- a/src/tools/cargo/credential/cargo-credential-wincred/Cargo.toml
+++ b/src/tools/cargo/credential/cargo-credential-wincred/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "cargo-credential-wincred"
-version = "0.4.1"
+version = "0.4.2"
edition.workspace = true
license.workspace = true
rust-version.workspace = true
@@ -13,3 +13,6 @@ cargo-credential.workspace = true
[target.'cfg(windows)'.dependencies.windows-sys]
features = ["Win32_Foundation", "Win32_Security_Credentials"]
workspace = true
+
+[lints]
+workspace = true
diff --git a/src/tools/cargo/credential/cargo-credential/Cargo.toml b/src/tools/cargo/credential/cargo-credential/Cargo.toml
index 8ba65b8b9..7dc37ff82 100644
--- a/src/tools/cargo/credential/cargo-credential/Cargo.toml
+++ b/src/tools/cargo/credential/cargo-credential/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "cargo-credential"
-version = "0.4.1"
+version = "0.4.2"
edition.workspace = true
license.workspace = true
rust-version = "1.70.0" # MSRV:3
@@ -20,3 +20,6 @@ windows-sys = { workspace = true, features = ["Win32_System_Console", "Win32_Fou
[dev-dependencies]
snapbox = { workspace = true, features = ["examples"] }
+
+[lints]
+workspace = true
diff --git a/src/tools/cargo/credential/cargo-credential/examples/stdout-redirected.rs b/src/tools/cargo/credential/cargo-credential/examples/stdout-redirected.rs
index 75a2d16d1..fb7c0446c 100644
--- a/src/tools/cargo/credential/cargo-credential/examples/stdout-redirected.rs
+++ b/src/tools/cargo/credential/cargo-credential/examples/stdout-redirected.rs
@@ -1,5 +1,8 @@
//! Provider used for testing redirection of stdout.
+#![allow(clippy::print_stderr)]
+#![allow(clippy::print_stdout)]
+
use cargo_credential::{Action, Credential, CredentialResponse, Error, RegistryInfo};
struct MyCredential;
diff --git a/src/tools/cargo/credential/cargo-credential/src/lib.rs b/src/tools/cargo/credential/cargo-credential/src/lib.rs
index 60bce65be..0888fb402 100644
--- a/src/tools/cargo/credential/cargo-credential/src/lib.rs
+++ b/src/tools/cargo/credential/cargo-credential/src/lib.rs
@@ -37,6 +37,9 @@
#![doc = include_str!("../examples/file-provider.rs")]
//! ```
+#![allow(clippy::print_stderr)]
+#![allow(clippy::print_stdout)]
+
use serde::{Deserialize, Serialize};
use std::{fmt::Display, io};
use time::OffsetDateTime;
diff --git a/src/tools/cargo/publish.py b/src/tools/cargo/publish.py
index 87ea0e896..114d7dbbd 100755
--- a/src/tools/cargo/publish.py
+++ b/src/tools/cargo/publish.py
@@ -21,6 +21,7 @@ TO_PUBLISH = [
'credential/cargo-credential-wincred',
'credential/cargo-credential-1password',
'credential/cargo-credential-macos-keychain',
+ 'crates/rustfix',
'crates/cargo-platform',
'crates/cargo-util',
'crates/crates-io',
diff --git a/src/tools/cargo/src/bin/cargo/cli.rs b/src/tools/cargo/src/bin/cargo/cli.rs
index a21030f01..a9f5fd308 100644
--- a/src/tools/cargo/src/bin/cargo/cli.rs
+++ b/src/tools/cargo/src/bin/cargo/cli.rs
@@ -134,9 +134,14 @@ Run with 'cargo -Z [FLAG] [COMMAND]'",
"Formats all bin and lib files of the current crate using rustfmt.",
),
]);
- drop_println!(config, "Installed Commands:");
+ drop_println!(
+ config,
+ color_print::cstr!("<green,bold>Installed Commands:</>")
+ );
for (name, command) in list_commands(config) {
let known_external_desc = known_external_command_descriptions.get(name.as_str());
+ let literal = style::LITERAL.render();
+ let reset = anstyle::Reset.render();
match command {
CommandInfo::BuiltIn { about } => {
assert!(
@@ -145,22 +150,21 @@ Run with 'cargo -Z [FLAG] [COMMAND]'",
);
let summary = about.unwrap_or_default();
let summary = summary.lines().next().unwrap_or(&summary); // display only the first line
- drop_println!(config, " {:<20} {}", name, summary);
+ drop_println!(config, " {literal}{name:<20}{reset} {summary}");
}
CommandInfo::External { path } => {
if let Some(desc) = known_external_desc {
- drop_println!(config, " {:<20} {}", name, desc);
+ drop_println!(config, " {literal}{name:<20}{reset} {desc}");
} else if is_verbose {
- drop_println!(config, " {:<20} {}", name, path.display());
+ drop_println!(config, " {literal}{name:<20}{reset} {}", path.display());
} else {
- drop_println!(config, " {}", name);
+ drop_println!(config, " {literal}{name}{reset}");
}
}
CommandInfo::Alias { target } => {
drop_println!(
config,
- " {:<20} alias: {}",
- name,
+ " {literal}{name:<20}{reset} alias: {}",
target.iter().join(" ")
);
}
@@ -589,7 +593,7 @@ See '<cyan,bold>cargo help</> <cyan><<command>></>' for more information on a sp
.action(ArgAction::Count)
.global(true),
)
- .arg_quiet()
+ .arg(flag("quiet", "Do not print cargo log messages").short('q').global(true))
.arg(
opt("color", "Coloring: auto, always, never")
.value_name("WHEN")
diff --git a/src/tools/cargo/src/bin/cargo/commands/add.rs b/src/tools/cargo/src/bin/cargo/commands/add.rs
index e1ece14b8..55a074282 100644
--- a/src/tools/cargo/src/bin/cargo/commands/add.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/add.rs
@@ -62,6 +62,19 @@ The package name will be exposed as feature of your crate.")
The package will be removed from your features.")
.conflicts_with("dev")
.overrides_with("optional"),
+ flag("public", "Mark the dependency as public")
+ .conflicts_with("dev")
+ .conflicts_with("build")
+ .long_help("Mark the dependency as public
+
+The dependency can be referenced in your library's public API."),
+ flag("no-public", "Mark the dependency as private")
+ .conflicts_with("dev")
+ .conflicts_with("build")
+ .overrides_with("public")
+ .long_help("Mark the dependency as private
+
+While you can use the crate in your implementation, it cannot be referenced in your public API."),
clap::Arg::new("rename")
.long("rename")
.action(ArgAction::Set)
@@ -80,7 +93,7 @@ Example uses:
.arg_manifest_path_without_unsupported_path_tip()
.arg_package("Package to modify")
.arg_dry_run("Don't actually write the manifest")
- .arg_quiet()
+ .arg_silent_suggestion()
.next_help_heading("Source")
.args([
clap::Arg::new("path")
@@ -235,6 +248,7 @@ fn parse_dependencies(config: &Config, matches: &ArgMatches) -> CargoResult<Vec<
};
let default_features = default_features(matches);
let optional = optional(matches);
+ let public = public(matches);
let mut crates = matches
.get_many::<String>("crates")
@@ -325,6 +339,7 @@ fn parse_dependencies(config: &Config, matches: &ArgMatches) -> CargoResult<Vec<
features,
default_features,
optional,
+ public,
registry: registry.clone(),
path: path.map(String::from),
git: git.map(String::from),
@@ -353,6 +368,10 @@ fn optional(matches: &ArgMatches) -> Option<bool> {
resolve_bool_arg(matches.flag("optional"), matches.flag("no-optional"))
}
+fn public(matches: &ArgMatches) -> Option<bool> {
+ resolve_bool_arg(matches.flag("public"), matches.flag("no-public"))
+}
+
fn resolve_bool_arg(yes: bool, no: bool) -> Option<bool> {
match (yes, no) {
(true, false) => Some(true),
diff --git a/src/tools/cargo/src/bin/cargo/commands/bench.rs b/src/tools/cargo/src/bin/cargo/commands/bench.rs
index 85c975a6c..11bcf2eb9 100644
--- a/src/tools/cargo/src/bin/cargo/commands/bench.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/bench.rs
@@ -24,7 +24,7 @@ pub fn cli() -> Command {
))
.arg_ignore_rust_version()
.arg_message_format()
- .arg_quiet()
+ .arg_silent_suggestion()
.arg_package_spec(
"Package to run benchmarks for",
"Benchmark all packages in the workspace",
diff --git a/src/tools/cargo/src/bin/cargo/commands/build.rs b/src/tools/cargo/src/bin/cargo/commands/build.rs
index e2ed87d1b..0dde7bde9 100644
--- a/src/tools/cargo/src/bin/cargo/commands/build.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/build.rs
@@ -10,7 +10,7 @@ pub fn cli() -> Command {
.arg_ignore_rust_version()
.arg_future_incompat_report()
.arg_message_format()
- .arg_quiet()
+ .arg_silent_suggestion()
.arg_package_spec(
"Package to build (see `cargo help pkgid`)",
"Build all packages in the workspace",
diff --git a/src/tools/cargo/src/bin/cargo/commands/check.rs b/src/tools/cargo/src/bin/cargo/commands/check.rs
index 77e2b9280..199cbf3fe 100644
--- a/src/tools/cargo/src/bin/cargo/commands/check.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/check.rs
@@ -10,7 +10,7 @@ pub fn cli() -> Command {
.arg_ignore_rust_version()
.arg_future_incompat_report()
.arg_message_format()
- .arg_quiet()
+ .arg_silent_suggestion()
.arg_package_spec(
"Package(s) to check",
"Check all packages in the workspace",
diff --git a/src/tools/cargo/src/bin/cargo/commands/clean.rs b/src/tools/cargo/src/bin/cargo/commands/clean.rs
index 8596561c9..c7b7f98c3 100644
--- a/src/tools/cargo/src/bin/cargo/commands/clean.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/clean.rs
@@ -1,13 +1,18 @@
use crate::command_prelude::*;
-
+use crate::util::cache_lock::CacheLockMode;
+use cargo::core::gc::Gc;
+use cargo::core::gc::{parse_human_size, parse_time_span, GcOpts};
+use cargo::core::global_cache_tracker::GlobalCacheTracker;
+use cargo::ops::CleanContext;
use cargo::ops::{self, CleanOptions};
use cargo::util::print_available_packages;
+use std::time::Duration;
pub fn cli() -> Command {
subcommand("clean")
.about("Remove artifacts that cargo has generated in the past")
.arg_doc("Whether or not to clean just the documentation directory")
- .arg_quiet()
+ .arg_silent_suggestion()
.arg_package_spec_simple("Package to clean artifacts for")
.arg_release("Whether or not to clean release artifacts")
.arg_profile("Clean artifacts of the specified profile")
@@ -15,12 +20,122 @@ pub fn cli() -> Command {
.arg_target_dir()
.arg_manifest_path()
.arg_dry_run("Display what would be deleted without deleting anything")
+ .args_conflicts_with_subcommands(true)
+ .subcommand(
+ subcommand("gc")
+ .about("Clean global caches")
+ .hide(true)
+ .arg_silent_suggestion()
+ .arg_dry_run("Display what would be deleted without deleting anything")
+ // NOTE: Not all of these options may get stabilized. Some of them are
+ // very low-level details, and may not be something typical users need.
+ .arg(
+ opt(
+ "max-src-age",
+ "Deletes source cache files that have not been used \
+ since the given age (unstable)",
+ )
+ .value_name("DURATION")
+ .value_parser(parse_time_span),
+ )
+ .arg(
+ opt(
+ "max-crate-age",
+ "Deletes crate cache files that have not been used \
+ since the given age (unstable)",
+ )
+ .value_name("DURATION")
+ .value_parser(parse_time_span),
+ )
+ .arg(
+ opt(
+ "max-index-age",
+ "Deletes registry indexes that have not been used \
+ since the given age (unstable)",
+ )
+ .value_name("DURATION")
+ .value_parser(parse_time_span),
+ )
+ .arg(
+ opt(
+ "max-git-co-age",
+ "Deletes git dependency checkouts that have not been used \
+ since the given age (unstable)",
+ )
+ .value_name("DURATION")
+ .value_parser(parse_time_span),
+ )
+ .arg(
+ opt(
+ "max-git-db-age",
+ "Deletes git dependency clones that have not been used \
+ since the given age (unstable)",
+ )
+ .value_name("DURATION")
+ .value_parser(parse_time_span),
+ )
+ .arg(
+ opt(
+ "max-download-age",
+ "Deletes any downloaded cache data that has not been used \
+ since the given age (unstable)",
+ )
+ .value_name("DURATION")
+ .value_parser(parse_time_span),
+ )
+ .arg(
+ opt(
+ "max-src-size",
+ "Deletes source cache files until the cache is under the \
+ given size (unstable)",
+ )
+ .value_name("SIZE")
+ .value_parser(parse_human_size),
+ )
+ .arg(
+ opt(
+ "max-crate-size",
+ "Deletes crate cache files until the cache is under the \
+ given size (unstable)",
+ )
+ .value_name("SIZE")
+ .value_parser(parse_human_size),
+ )
+ .arg(
+ opt(
+ "max-git-size",
+ "Deletes git dependency caches until the cache is under \
+ the given size (unstable)",
+ )
+ .value_name("SIZE")
+ .value_parser(parse_human_size),
+ )
+ .arg(
+ opt(
+ "max-download-size",
+ "Deletes downloaded cache data until the cache is under \
+ the given size (unstable)",
+ )
+ .value_name("SIZE")
+ .value_parser(parse_human_size),
+ ),
+ )
.after_help(color_print::cstr!(
"Run `<cyan,bold>cargo help clean</>` for more detailed information.\n"
))
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+ match args.subcommand() {
+ Some(("gc", args)) => {
+ return gc(config, args);
+ }
+ Some((cmd, _)) => {
+ unreachable!("unexpected command {}", cmd)
+ }
+ None => {}
+ }
+
let ws = args.workspace(config)?;
if args.is_present_with_zero_values("package") {
@@ -39,3 +154,44 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
ops::clean(&ws, &opts)?;
Ok(())
}
+
+fn gc(config: &Config, args: &ArgMatches) -> CliResult {
+ config.cli_unstable().fail_if_stable_command(
+ config,
+ "clean gc",
+ 12633,
+ "gc",
+ config.cli_unstable().gc,
+ )?;
+
+ let size_opt = |opt| -> Option<u64> { args.get_one::<u64>(opt).copied() };
+ let duration_opt = |opt| -> Option<Duration> { args.get_one::<Duration>(opt).copied() };
+ let mut gc_opts = GcOpts {
+ max_src_age: duration_opt("max-src-age"),
+ max_crate_age: duration_opt("max-crate-age"),
+ max_index_age: duration_opt("max-index-age"),
+ max_git_co_age: duration_opt("max-git-co-age"),
+ max_git_db_age: duration_opt("max-git-db-age"),
+ max_src_size: size_opt("max-src-size"),
+ max_crate_size: size_opt("max-crate-size"),
+ max_git_size: size_opt("max-git-size"),
+ max_download_size: size_opt("max-download-size"),
+ };
+ if let Some(age) = duration_opt("max-download-age") {
+ gc_opts.set_max_download_age(age);
+ }
+ // If the user sets any options, then only perform the options requested.
+ // If no options are set, do the default behavior.
+ if !gc_opts.is_download_cache_opt_set() {
+ gc_opts.update_for_auto_gc(config)?;
+ }
+
+ let _lock = config.acquire_package_cache_lock(CacheLockMode::MutateExclusive)?;
+ let mut cache_track = GlobalCacheTracker::new(&config)?;
+ let mut gc = Gc::new(config, &mut cache_track)?;
+ let mut clean_ctx = CleanContext::new(config);
+ clean_ctx.dry_run = args.dry_run();
+ gc.gc(&mut clean_ctx, &gc_opts)?;
+ clean_ctx.display_summary()?;
+ Ok(())
+}
diff --git a/src/tools/cargo/src/bin/cargo/commands/config.rs b/src/tools/cargo/src/bin/cargo/commands/config.rs
index 84c5e9209..feea9ed28 100644
--- a/src/tools/cargo/src/bin/cargo/commands/config.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/config.rs
@@ -31,9 +31,13 @@ pub fn cli() -> Command {
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
- config
- .cli_unstable()
- .fail_if_stable_command(config, "config", 9301)?;
+ config.cli_unstable().fail_if_stable_command(
+ config,
+ "config",
+ 9301,
+ "unstable-options",
+ config.cli_unstable().unstable_options,
+ )?;
match args.subcommand() {
Some(("get", args)) => {
let opts = cargo_config::GetOptions {
diff --git a/src/tools/cargo/src/bin/cargo/commands/doc.rs b/src/tools/cargo/src/bin/cargo/commands/doc.rs
index 43a6ee950..bf2e0c7ba 100644
--- a/src/tools/cargo/src/bin/cargo/commands/doc.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/doc.rs
@@ -18,7 +18,7 @@ pub fn cli() -> Command {
.arg(flag("document-private-items", "Document private items"))
.arg_ignore_rust_version()
.arg_message_format()
- .arg_quiet()
+ .arg_silent_suggestion()
.arg_package_spec(
"Package to document",
"Document all packages in the workspace",
diff --git a/src/tools/cargo/src/bin/cargo/commands/fetch.rs b/src/tools/cargo/src/bin/cargo/commands/fetch.rs
index 794dbf9b0..1c25204e3 100644
--- a/src/tools/cargo/src/bin/cargo/commands/fetch.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/fetch.rs
@@ -6,7 +6,7 @@ use cargo::ops::FetchOptions;
pub fn cli() -> Command {
subcommand("fetch")
.about("Fetch dependencies of a package from the network")
- .arg_quiet()
+ .arg_silent_suggestion()
.arg_target_triple("Fetch dependencies for the target triple")
.arg_manifest_path()
.after_help(color_print::cstr!(
diff --git a/src/tools/cargo/src/bin/cargo/commands/fix.rs b/src/tools/cargo/src/bin/cargo/commands/fix.rs
index bd938dbc7..93df738e1 100644
--- a/src/tools/cargo/src/bin/cargo/commands/fix.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/fix.rs
@@ -28,7 +28,7 @@ pub fn cli() -> Command {
))
.arg_ignore_rust_version()
.arg_message_format()
- .arg_quiet()
+ .arg_silent_suggestion()
.arg_package_spec(
"Package(s) to fix",
"Fix all packages in the workspace",
diff --git a/src/tools/cargo/src/bin/cargo/commands/generate_lockfile.rs b/src/tools/cargo/src/bin/cargo/commands/generate_lockfile.rs
index 4f1382ee5..3617e38f4 100644
--- a/src/tools/cargo/src/bin/cargo/commands/generate_lockfile.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/generate_lockfile.rs
@@ -5,7 +5,7 @@ use cargo::ops;
pub fn cli() -> Command {
subcommand("generate-lockfile")
.about("Generate the lockfile for a package")
- .arg_quiet()
+ .arg_silent_suggestion()
.arg_manifest_path()
.after_help(color_print::cstr!(
"Run `<cyan,bold>cargo help generate-lockfile</>` for more detailed information.\n"
diff --git a/src/tools/cargo/src/bin/cargo/commands/init.rs b/src/tools/cargo/src/bin/cargo/commands/init.rs
index 04dd7ae45..b58a6a26d 100644
--- a/src/tools/cargo/src/bin/cargo/commands/init.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/init.rs
@@ -13,7 +13,7 @@ pub fn cli() -> Command {
)
.arg_new_opts()
.arg_registry("Registry to use")
- .arg_quiet()
+ .arg_silent_suggestion()
.after_help(color_print::cstr!(
"Run `<cyan,bold>cargo help init</>` for more detailed information.\n"
))
diff --git a/src/tools/cargo/src/bin/cargo/commands/install.rs b/src/tools/cargo/src/bin/cargo/commands/install.rs
index cb66ba100..b3d379c41 100644
--- a/src/tools/cargo/src/bin/cargo/commands/install.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/install.rs
@@ -76,7 +76,7 @@ pub fn cli() -> Command {
))
.arg_ignore_rust_version()
.arg_message_format()
- .arg_quiet()
+ .arg_silent_suggestion()
.arg_targets_bins_examples(
"Install only the specified binary",
"Install all binaries",
diff --git a/src/tools/cargo/src/bin/cargo/commands/locate_project.rs b/src/tools/cargo/src/bin/cargo/commands/locate_project.rs
index 217bdcac9..1f1b87e2e 100644
--- a/src/tools/cargo/src/bin/cargo/commands/locate_project.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/locate_project.rs
@@ -14,7 +14,7 @@ pub fn cli() -> Command {
)
.value_name("FMT"),
)
- .arg_quiet()
+ .arg_silent_suggestion()
.arg_manifest_path()
.after_help(color_print::cstr!(
"Run `<cyan,bold>cargo help locate-project</>` for more detailed information.\n"
diff --git a/src/tools/cargo/src/bin/cargo/commands/login.rs b/src/tools/cargo/src/bin/cargo/commands/login.rs
index 877ec6aeb..d6fc6d55d 100644
--- a/src/tools/cargo/src/bin/cargo/commands/login.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/login.rs
@@ -14,7 +14,7 @@ pub fn cli() -> Command {
.num_args(0..)
.last(true),
)
- .arg_quiet()
+ .arg_silent_suggestion()
.after_help(color_print::cstr!(
"Run `<cyan,bold>cargo help login</>` for more detailed information.\n"
))
diff --git a/src/tools/cargo/src/bin/cargo/commands/logout.rs b/src/tools/cargo/src/bin/cargo/commands/logout.rs
index e7bacca86..cef9311a8 100644
--- a/src/tools/cargo/src/bin/cargo/commands/logout.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/logout.rs
@@ -7,7 +7,7 @@ pub fn cli() -> Command {
subcommand("logout")
.about("Remove an API token from the registry locally")
.arg_registry("Registry to use")
- .arg_quiet()
+ .arg_silent_suggestion()
.after_help(color_print::cstr!(
"Run `<cyan,bold>cargo help logout</>` for more detailed information.\n"
))
diff --git a/src/tools/cargo/src/bin/cargo/commands/metadata.rs b/src/tools/cargo/src/bin/cargo/commands/metadata.rs
index 09064de7d..664211420 100644
--- a/src/tools/cargo/src/bin/cargo/commands/metadata.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/metadata.rs
@@ -23,7 +23,7 @@ pub fn cli() -> Command {
.value_name("VERSION")
.value_parser(["1"]),
)
- .arg_quiet()
+ .arg_silent_suggestion()
.arg_features()
.arg_manifest_path()
.after_help(color_print::cstr!(
diff --git a/src/tools/cargo/src/bin/cargo/commands/new.rs b/src/tools/cargo/src/bin/cargo/commands/new.rs
index 0ab093012..f2cc73621 100644
--- a/src/tools/cargo/src/bin/cargo/commands/new.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/new.rs
@@ -13,7 +13,7 @@ pub fn cli() -> Command {
)
.arg_new_opts()
.arg_registry("Registry to use")
- .arg_quiet()
+ .arg_silent_suggestion()
.after_help(color_print::cstr!(
"Run `<cyan,bold>cargo help new</>` for more detailed information.\n"
))
diff --git a/src/tools/cargo/src/bin/cargo/commands/owner.rs b/src/tools/cargo/src/bin/cargo/commands/owner.rs
index b787d094c..45f34bc8e 100644
--- a/src/tools/cargo/src/bin/cargo/commands/owner.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/owner.rs
@@ -27,7 +27,7 @@ pub fn cli() -> Command {
.arg_index("Registry index URL to modify owners for")
.arg_registry("Registry to modify owners for")
.arg(opt("token", "API token to use when authenticating").value_name("TOKEN"))
- .arg_quiet()
+ .arg_silent_suggestion()
.after_help(color_print::cstr!(
"Run `<cyan,bold>cargo help owner</>` for more detailed information.\n"
))
diff --git a/src/tools/cargo/src/bin/cargo/commands/package.rs b/src/tools/cargo/src/bin/cargo/commands/package.rs
index 0020e365e..59a3c8f66 100644
--- a/src/tools/cargo/src/bin/cargo/commands/package.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/package.rs
@@ -24,7 +24,7 @@ pub fn cli() -> Command {
"allow-dirty",
"Allow dirty working directories to be packaged",
))
- .arg_quiet()
+ .arg_silent_suggestion()
.arg_package_spec_no_all(
"Package(s) to assemble",
"Assemble all packages in the workspace",
diff --git a/src/tools/cargo/src/bin/cargo/commands/pkgid.rs b/src/tools/cargo/src/bin/cargo/commands/pkgid.rs
index 2d1d41325..f1494af00 100644
--- a/src/tools/cargo/src/bin/cargo/commands/pkgid.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/pkgid.rs
@@ -7,7 +7,7 @@ pub fn cli() -> Command {
subcommand("pkgid")
.about("Print a fully qualified package specification")
.arg(Arg::new("spec").value_name("SPEC").action(ArgAction::Set))
- .arg_quiet()
+ .arg_silent_suggestion()
.arg_package("Argument to get the package ID specifier for")
.arg_manifest_path()
.after_help(color_print::cstr!(
diff --git a/src/tools/cargo/src/bin/cargo/commands/publish.rs b/src/tools/cargo/src/bin/cargo/commands/publish.rs
index 8ce2ffc5b..af5bf7447 100644
--- a/src/tools/cargo/src/bin/cargo/commands/publish.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/publish.rs
@@ -17,7 +17,7 @@ pub fn cli() -> Command {
"allow-dirty",
"Allow dirty working directories to be packaged",
))
- .arg_quiet()
+ .arg_silent_suggestion()
.arg_package("Package to publish")
.arg_features()
.arg_parallel()
diff --git a/src/tools/cargo/src/bin/cargo/commands/read_manifest.rs b/src/tools/cargo/src/bin/cargo/commands/read_manifest.rs
index 6625d60f5..8cfd9b34e 100644
--- a/src/tools/cargo/src/bin/cargo/commands/read_manifest.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/read_manifest.rs
@@ -9,7 +9,7 @@ Print a JSON representation of a Cargo.toml manifest.
Deprecated, use `<cyan,bold>cargo metadata --no-deps</>` instead.\
"
))
- .arg_quiet()
+ .arg_silent_suggestion()
.arg_manifest_path()
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/remove.rs b/src/tools/cargo/src/bin/cargo/commands/remove.rs
index c115291cb..b7abb1715 100644
--- a/src/tools/cargo/src/bin/cargo/commands/remove.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/remove.rs
@@ -1,5 +1,6 @@
use cargo::core::dependency::DepKind;
use cargo::core::PackageIdSpec;
+use cargo::core::PackageIdSpecQuery;
use cargo::core::Resolve;
use cargo::core::Workspace;
use cargo::ops::cargo_remove::remove;
@@ -26,7 +27,7 @@ pub fn cli() -> clap::Command {
.value_name("DEP_ID")
.help("Dependencies to be removed")])
.arg_dry_run("Don't actually write the manifest")
- .arg_quiet()
+ .arg_silent_suggestion()
.next_help_heading("Section")
.args([
clap::Arg::new("dev")
diff --git a/src/tools/cargo/src/bin/cargo/commands/run.rs b/src/tools/cargo/src/bin/cargo/commands/run.rs
index 94396e63f..170c7ddf1 100644
--- a/src/tools/cargo/src/bin/cargo/commands/run.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/run.rs
@@ -24,7 +24,7 @@ pub fn cli() -> Command {
)
.arg_ignore_rust_version()
.arg_message_format()
- .arg_quiet()
+ .arg_silent_suggestion()
.arg_package("Package with the target to run")
.arg_targets_bin_example(
"Name of the bin target to run",
diff --git a/src/tools/cargo/src/bin/cargo/commands/rustc.rs b/src/tools/cargo/src/bin/cargo/commands/rustc.rs
index 9b6a57577..7e5370be3 100644
--- a/src/tools/cargo/src/bin/cargo/commands/rustc.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/rustc.rs
@@ -30,7 +30,7 @@ pub fn cli() -> Command {
.arg_future_incompat_report()
.arg_ignore_rust_version()
.arg_message_format()
- .arg_quiet()
+ .arg_silent_suggestion()
.arg_package("Package to build")
.arg_targets_all(
"Build only this package's library",
diff --git a/src/tools/cargo/src/bin/cargo/commands/rustdoc.rs b/src/tools/cargo/src/bin/cargo/commands/rustdoc.rs
index 72de57ad0..ec4e52c6d 100644
--- a/src/tools/cargo/src/bin/cargo/commands/rustdoc.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/rustdoc.rs
@@ -18,7 +18,7 @@ pub fn cli() -> Command {
))
.arg_ignore_rust_version()
.arg_message_format()
- .arg_quiet()
+ .arg_silent_suggestion()
.arg_package("Package to document")
.arg_targets_all(
"Build only this package's library",
diff --git a/src/tools/cargo/src/bin/cargo/commands/search.rs b/src/tools/cargo/src/bin/cargo/commands/search.rs
index 377aa84e1..77394242b 100644
--- a/src/tools/cargo/src/bin/cargo/commands/search.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/search.rs
@@ -6,7 +6,7 @@ use cargo::ops;
pub fn cli() -> Command {
subcommand("search")
- .about("Search packages in crates.io")
+ .about("Search packages in the registry. Default registry is crates.io")
.arg(Arg::new("query").value_name("QUERY").num_args(0..))
.arg(
opt(
@@ -17,7 +17,7 @@ pub fn cli() -> Command {
)
.arg_index("Registry index URL to search packages in")
.arg_registry("Registry to search packages in")
- .arg_quiet()
+ .arg_silent_suggestion()
.after_help(color_print::cstr!(
"Run `<cyan,bold>cargo help search</>` for more detailed information.\n"
))
diff --git a/src/tools/cargo/src/bin/cargo/commands/tree.rs b/src/tools/cargo/src/bin/cargo/commands/tree.rs
index 1fe6a3a14..30cf4fe3a 100644
--- a/src/tools/cargo/src/bin/cargo/commands/tree.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/tree.rs
@@ -17,7 +17,7 @@ pub fn cli() -> Command {
.short('a')
.hide(true),
)
- .arg_quiet()
+ .arg_silent_suggestion()
.arg(flag("no-dev-dependencies", "Deprecated, use -e=no-dev instead").hide(true))
.arg(
multi_opt(
diff --git a/src/tools/cargo/src/bin/cargo/commands/uninstall.rs b/src/tools/cargo/src/bin/cargo/commands/uninstall.rs
index 30833f292..217f22ef3 100644
--- a/src/tools/cargo/src/bin/cargo/commands/uninstall.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/uninstall.rs
@@ -7,7 +7,7 @@ pub fn cli() -> Command {
.about("Remove a Rust binary")
.arg(Arg::new("spec").value_name("SPEC").num_args(0..))
.arg(opt("root", "Directory to uninstall packages from").value_name("DIR"))
- .arg_quiet()
+ .arg_silent_suggestion()
.arg_package_spec_simple("Package to uninstall")
.arg(
multi_opt("bin", "NAME", "Only uninstall the binary NAME")
diff --git a/src/tools/cargo/src/bin/cargo/commands/update.rs b/src/tools/cargo/src/bin/cargo/commands/update.rs
index e06e8e51e..e11ac45c7 100644
--- a/src/tools/cargo/src/bin/cargo/commands/update.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/update.rs
@@ -35,7 +35,7 @@ pub fn cli() -> Command {
.value_name("PRECISE")
.requires("package-group"),
)
- .arg_quiet()
+ .arg_silent_suggestion()
.arg(
flag("workspace", "Only update the workspace packages")
.short('w')
diff --git a/src/tools/cargo/src/bin/cargo/commands/vendor.rs b/src/tools/cargo/src/bin/cargo/commands/vendor.rs
index 3f9c2dcaf..a15878484 100644
--- a/src/tools/cargo/src/bin/cargo/commands/vendor.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/vendor.rs
@@ -36,7 +36,6 @@ pub fn cli() -> Command {
.arg(unsupported("relative-path"))
.arg(unsupported("only-git-deps"))
.arg(unsupported("disallow-duplicates"))
- .arg_quiet_without_unknown_silent_arg_tip()
.arg_manifest_path()
.after_help(color_print::cstr!(
"Run `<cyan,bold>cargo help vendor</>` for more detailed information.\n"
diff --git a/src/tools/cargo/src/bin/cargo/commands/verify_project.rs b/src/tools/cargo/src/bin/cargo/commands/verify_project.rs
index 35bb747a4..14a5df07d 100644
--- a/src/tools/cargo/src/bin/cargo/commands/verify_project.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/verify_project.rs
@@ -6,7 +6,7 @@ use std::process;
pub fn cli() -> Command {
subcommand("verify-project")
.about("Check correctness of crate manifest")
- .arg_quiet()
+ .arg_silent_suggestion()
.arg_manifest_path()
.after_help(color_print::cstr!(
"Run `<cyan,bold>cargo help verify-project</>` for more detailed information.\n"
diff --git a/src/tools/cargo/src/bin/cargo/commands/version.rs b/src/tools/cargo/src/bin/cargo/commands/version.rs
index 65e1c6c47..5a6d710c3 100644
--- a/src/tools/cargo/src/bin/cargo/commands/version.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/version.rs
@@ -4,7 +4,7 @@ use crate::command_prelude::*;
pub fn cli() -> Command {
subcommand("version")
.about("Show version information")
- .arg_quiet()
+ .arg_silent_suggestion()
.after_help(color_print::cstr!(
"Run `<cyan,bold>cargo help version</>` for more detailed information.\n"
))
diff --git a/src/tools/cargo/src/bin/cargo/commands/yank.rs b/src/tools/cargo/src/bin/cargo/commands/yank.rs
index 75a1772ca..8a69d3eb7 100644
--- a/src/tools/cargo/src/bin/cargo/commands/yank.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/yank.rs
@@ -19,7 +19,7 @@ pub fn cli() -> Command {
.arg_index("Registry index URL to yank from")
.arg_registry("Registry to yank from")
.arg(opt("token", "API token to use when authenticating").value_name("TOKEN"))
- .arg_quiet()
+ .arg_silent_suggestion()
.after_help(color_print::cstr!(
"Run `<cyan,bold>cargo help yank</>` for more detailed information.\n"
))
diff --git a/src/tools/cargo/src/bin/cargo/main.rs b/src/tools/cargo/src/bin/cargo/main.rs
index 245622b6c..14a4206d6 100644
--- a/src/tools/cargo/src/bin/cargo/main.rs
+++ b/src/tools/cargo/src/bin/cargo/main.rs
@@ -1,12 +1,10 @@
-#![warn(rust_2018_idioms)] // while we're getting used to 2018
-#![allow(clippy::all)]
-#![warn(clippy::disallowed_methods)]
+#![allow(clippy::self_named_module_files)] // false positive in `commands/build.rs`
use cargo::util::network::http::http_handle;
use cargo::util::network::http::needs_custom_http_transport;
-use cargo::util::toml::schema::StringOrVec;
use cargo::util::CliError;
use cargo::util::{self, closest_msg, command_prelude, CargoResult, CliResult, Config};
+use cargo::util_schemas::manifest::StringOrVec;
use cargo_util::{ProcessBuilder, ProcessError};
use std::collections::BTreeMap;
use std::env;
diff --git a/src/tools/cargo/src/cargo/core/compiler/fingerprint/dirty_reason.rs b/src/tools/cargo/src/cargo/core/compiler/fingerprint/dirty_reason.rs
index 363aab7e6..38b3fc441 100644
--- a/src/tools/cargo/src/cargo/core/compiler/fingerprint/dirty_reason.rs
+++ b/src/tools/cargo/src/cargo/core/compiler/fingerprint/dirty_reason.rs
@@ -15,6 +15,10 @@ pub enum DirtyReason {
old: String,
new: String,
},
+ DeclaredFeaturesChanged {
+ old: String,
+ new: String,
+ },
TargetConfigurationChanged,
PathToSourceChanged,
ProfileConfigurationChanged,
@@ -141,6 +145,9 @@ impl DirtyReason {
DirtyReason::FeaturesChanged { .. } => {
s.dirty_because(unit, "the list of features changed")
}
+ DirtyReason::DeclaredFeaturesChanged { .. } => {
+ s.dirty_because(unit, "the list of declared features changed")
+ }
DirtyReason::TargetConfigurationChanged => {
s.dirty_because(unit, "the target configuration changed")
}
diff --git a/src/tools/cargo/src/cargo/core/compiler/fingerprint/mod.rs b/src/tools/cargo/src/cargo/core/compiler/fingerprint/mod.rs
index b1040be41..e1737a8b6 100644
--- a/src/tools/cargo/src/cargo/core/compiler/fingerprint/mod.rs
+++ b/src/tools/cargo/src/cargo/core/compiler/fingerprint/mod.rs
@@ -65,6 +65,7 @@
//! Target Name | ✓ | ✓
//! TargetKind (bin/lib/etc.) | ✓ | ✓
//! Enabled Features | ✓ | ✓
+//! Declared Features | ✓ |
//! Immediate dependency’s hashes | ✓[^1] | ✓
//! [`CompileKind`] (host/target) | ✓ | ✓
//! __CARGO_DEFAULT_LIB_METADATA[^4] | | ✓
@@ -572,6 +573,8 @@ pub struct Fingerprint {
rustc: u64,
/// Sorted list of cfg features enabled.
features: String,
+ /// Sorted list of all the declared cfg features.
+ declared_features: String,
/// Hash of the `Target` struct, including the target name,
/// package-relative source path, edition, etc.
target: u64,
@@ -876,6 +879,7 @@ impl Fingerprint {
profile: 0,
path: 0,
features: String::new(),
+ declared_features: String::new(),
deps: Vec::new(),
local: Mutex::new(Vec::new()),
memoized_hash: Mutex::new(None),
@@ -922,6 +926,12 @@ impl Fingerprint {
new: self.features.clone(),
};
}
+ if self.declared_features != old.declared_features {
+ return DirtyReason::DeclaredFeaturesChanged {
+ old: old.declared_features.clone(),
+ new: self.declared_features.clone(),
+ };
+ }
if self.target != old.target {
return DirtyReason::TargetConfigurationChanged;
}
@@ -1200,6 +1210,7 @@ impl hash::Hash for Fingerprint {
let Fingerprint {
rustc,
ref features,
+ ref declared_features,
target,
path,
profile,
@@ -1215,6 +1226,7 @@ impl hash::Hash for Fingerprint {
(
rustc,
features,
+ declared_features,
target,
path,
profile,
@@ -1431,6 +1443,9 @@ fn calculate_normal(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<Finger
allow_features.hash(&mut config);
}
let compile_kind = unit.kind.fingerprint_hash();
+ let mut declared_features = unit.pkg.summary().features().keys().collect::<Vec<_>>();
+ declared_features.sort(); // to avoid useless rebuild if the user orders it's features
+ // differently
Ok(Fingerprint {
rustc: util::hash_u64(&cx.bcx.rustc().verbose_version),
target: util::hash_u64(&unit.target),
@@ -1439,6 +1454,14 @@ fn calculate_normal(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<Finger
// actually affect the output artifact so there's no need to hash it.
path: util::hash_u64(path_args(cx.bcx.ws, unit).0),
features: format!("{:?}", unit.features),
+ // Note we curently only populate `declared_features` when `-Zcheck-cfg`
+ // is passed since it's the only user-facing toggle that will make this
+ // fingerprint relevant.
+ declared_features: if cx.bcx.config.cli_unstable().check_cfg {
+ format!("{declared_features:?}")
+ } else {
+ "".to_string()
+ },
deps,
local: Mutex::new(local),
memoized_hash: Mutex::new(None),
diff --git a/src/tools/cargo/src/cargo/core/compiler/future_incompat.rs b/src/tools/cargo/src/cargo/core/compiler/future_incompat.rs
index af44940bd..6f59d5989 100644
--- a/src/tools/cargo/src/cargo/core/compiler/future_incompat.rs
+++ b/src/tools/cargo/src/cargo/core/compiler/future_incompat.rs
@@ -348,7 +348,7 @@ fn get_updates(ws: &Workspace<'_>, package_ids: &BTreeSet<PackageId>) -> Option<
for (pkg_id, summaries) in summaries {
let mut updated_versions: Vec<_> = summaries
.iter()
- .map(|summary| summary.version())
+ .map(|summary| summary.as_summary().version())
.filter(|version| *version > pkg_id.version())
.collect();
updated_versions.sort();
diff --git a/src/tools/cargo/src/cargo/core/compiler/job_queue/mod.rs b/src/tools/cargo/src/cargo/core/compiler/job_queue/mod.rs
index e39fe184d..7c4c89e4f 100644
--- a/src/tools/cargo/src/cargo/core/compiler/job_queue/mod.rs
+++ b/src/tools/cargo/src/cargo/core/compiler/job_queue/mod.rs
@@ -3,7 +3,7 @@
//! ## Overview
//!
//! This module implements a job queue. A job here represents a unit of work,
-//! which is roughly a rusc invocation, a build script run, or just a no-op.
+//! which is roughly a rustc invocation, a build script run, or just a no-op.
//! The job queue primarily handles the following things:
//!
//! * Spawns concurrent jobs. Depending on its [`Freshness`], a job could be
diff --git a/src/tools/cargo/src/cargo/core/compiler/mod.rs b/src/tools/cargo/src/cargo/core/compiler/mod.rs
index ab43e9979..170b59b26 100644
--- a/src/tools/cargo/src/cargo/core/compiler/mod.rs
+++ b/src/tools/cargo/src/cargo/core/compiler/mod.rs
@@ -93,9 +93,9 @@ use crate::core::{Feature, PackageId, Target, Verbosity};
use crate::util::errors::{CargoResult, VerboseError};
use crate::util::interning::InternedString;
use crate::util::machine_message::{self, Message};
-use crate::util::toml::schema::TomlDebugInfo;
-use crate::util::toml::schema::TomlTrimPaths;
use crate::util::{add_path_args, internal, iter_join_onto, profile};
+use crate::util_schemas::manifest::TomlDebugInfo;
+use crate::util_schemas::manifest::TomlTrimPaths;
use cargo_util::{paths, ProcessBuilder, ProcessError};
use rustfix::diagnostics::Applicability;
@@ -422,7 +422,7 @@ fn rustc(cx: &mut Context<'_, '_>, unit: &Unit, exec: &Arc<dyn Executor>) -> Car
};
let errors = match output_options.errors_seen {
0 => String::new(),
- 1 => " due to previous error".to_string(),
+ 1 => " due to 1 previous error".to_string(),
count => format!(" due to {} previous errors", count),
};
let name = descriptive_pkg_name(&name, &target, &mode);
@@ -662,6 +662,15 @@ fn prepare_rustc(cx: &Context<'_, '_>, unit: &Unit) -> CargoResult<ProcessBuilde
let mut base = cx
.compilation
.rustc_process(unit, is_primary, is_workspace)?;
+ build_base_args(cx, &mut base, unit)?;
+
+ base.inherit_jobserver(&cx.jobserver);
+ build_deps_args(&mut base, cx, unit)?;
+ add_cap_lints(cx.bcx, unit, &mut base);
+ base.args(cx.bcx.rustflags_args(unit));
+ if cx.bcx.config.cli_unstable().binary_dep_depinfo {
+ base.arg("-Z").arg("binary-dep-depinfo");
+ }
if is_primary {
base.env("CARGO_PRIMARY_PACKAGE", "1");
@@ -671,15 +680,17 @@ fn prepare_rustc(cx: &Context<'_, '_>, unit: &Unit) -> CargoResult<ProcessBuilde
let tmp = cx.files().layout(unit.kind).prepare_tmp()?;
base.env("CARGO_TARGET_TMPDIR", tmp.display().to_string());
}
-
- base.inherit_jobserver(&cx.jobserver);
- build_base_args(cx, &mut base, unit)?;
- build_deps_args(&mut base, cx, unit)?;
- add_cap_lints(cx.bcx, unit, &mut base);
- base.args(cx.bcx.rustflags_args(unit));
- if cx.bcx.config.cli_unstable().binary_dep_depinfo {
- base.arg("-Z").arg("binary-dep-depinfo");
+ if cx.bcx.config.nightly_features_allowed {
+ // This must come after `build_base_args` (which calls `add_path_args`) so that the `cwd`
+ // is set correctly.
+ base.env(
+ "CARGO_RUSTC_CURRENT_DIR",
+ base.get_cwd()
+ .map(|c| c.display().to_string())
+ .unwrap_or(String::new()),
+ );
}
+
Ok(base)
}
@@ -732,7 +743,7 @@ fn prepare_rustdoc(cx: &Context<'_, '_>, unit: &Unit) -> CargoResult<ProcessBuil
.arg(scrape_output_path(cx, unit)?);
// Only scrape example for items from crates in the workspace, to reduce generated file size
- for pkg in cx.bcx.ws.members() {
+ for pkg in cx.bcx.packages.packages() {
let names = pkg
.targets()
.iter()
@@ -1203,8 +1214,6 @@ fn trim_paths_args(
}
remap
};
- cmd.arg(sysroot_remap);
-
let package_remap = {
let pkg_root = unit.pkg.root();
let ws_root = cx.bcx.ws.root();
@@ -1221,7 +1230,7 @@ fn trim_paths_args(
// * path dependencies outside workspace root directory
if is_local && pkg_root.strip_prefix(ws_root).is_ok() {
remap.push(ws_root);
- remap.push("="); // empty to remap to relative paths.
+ remap.push("=."); // remap to relative rustc work dir explicitly
} else {
remap.push(pkg_root);
remap.push("=");
@@ -1231,7 +1240,11 @@ fn trim_paths_args(
}
remap
};
+
+ // Order of `--remap-path-prefix` flags is important for `-Zbuild-std`.
+ // We want to show `/rustc/<hash>/library/std` instead of `std-0.0.0`.
cmd.arg(package_remap);
+ cmd.arg(sysroot_remap);
Ok(())
}
@@ -1243,24 +1256,31 @@ fn trim_paths_args(
fn check_cfg_args(cx: &Context<'_, '_>, unit: &Unit) -> Vec<OsString> {
if cx.bcx.config.cli_unstable().check_cfg {
// This generate something like this:
- // - cfg(feature, values())
+ // - cfg()
// - cfg(feature, values("foo", "bar"))
//
// NOTE: Despite only explicitly specifying `feature`, well known names and values
// are implicitly enabled when one or more `--check-cfg` argument is passed.
+ // NOTE: Never generate a empty `values()` since it would mean that it's possible
+ // to have `cfg(feature)` without a feature name which is impossible.
let gross_cap_estimation = unit.pkg.summary().features().len() * 7 + 25;
let mut arg_feature = OsString::with_capacity(gross_cap_estimation);
- arg_feature.push("cfg(feature, values(");
- for (i, feature) in unit.pkg.summary().features().keys().enumerate() {
- if i != 0 {
- arg_feature.push(", ");
+
+ arg_feature.push("cfg(");
+ if !unit.pkg.summary().features().is_empty() {
+ arg_feature.push("feature, values(");
+ for (i, feature) in unit.pkg.summary().features().keys().enumerate() {
+ if i != 0 {
+ arg_feature.push(", ");
+ }
+ arg_feature.push("\"");
+ arg_feature.push(feature);
+ arg_feature.push("\"");
}
- arg_feature.push("\"");
- arg_feature.push(feature);
- arg_feature.push("\"");
+ arg_feature.push(")");
}
- arg_feature.push("))");
+ arg_feature.push(")");
vec![
OsString::from("-Zunstable-options"),
@@ -1414,6 +1434,7 @@ pub fn extern_args(
.require(Feature::public_dependency())
.is_ok()
&& !dep.public
+ && unit.target.is_lib()
{
opts.push("priv");
*unstable_opts = true;
diff --git a/src/tools/cargo/src/cargo/core/features.rs b/src/tools/cargo/src/cargo/core/features.rs
index 72a267f04..4f5b069ff 100644
--- a/src/tools/cargo/src/cargo/core/features.rs
+++ b/src/tools/cargo/src/cargo/core/features.rs
@@ -168,7 +168,7 @@ pub const SEE_CHANNELS: &str =
/// - Update [`CLI_VALUES`] to include the new edition.
/// - Set [`LATEST_UNSTABLE`] to Some with the new edition.
/// - Add an unstable feature to the [`features!`] macro invocation below for the new edition.
-/// - Gate on that new feature in [`TomlManifest::to_real_manifest`].
+/// - Gate on that new feature in [`toml::to_real_manifest`].
/// - Update the shell completion files.
/// - Update any failing tests (hopefully there are very few).
/// - Update unstable.md to add a new section for this new edition (see [this example]).
@@ -195,7 +195,7 @@ pub const SEE_CHANNELS: &str =
/// [`LATEST_STABLE`]: Edition::LATEST_STABLE
/// [this example]: https://github.com/rust-lang/cargo/blob/3ebb5f15a940810f250b68821149387af583a79e/src/doc/src/reference/unstable.md?plain=1#L1238-L1264
/// [`is_stable`]: Edition::is_stable
-/// [`TomlManifest::to_real_manifest`]: crate::util::toml::schema::TomlManifest::to_real_manifest
+/// [`toml::to_real_manifest`]: crate::util::toml::to_real_manifest
/// [`features!`]: macro.features.html
#[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, Eq, PartialEq, Serialize, Deserialize)]
pub enum Edition {
@@ -741,6 +741,7 @@ unstable_cli_options!(
doctest_xcompile: bool = ("Compile and run doctests for non-host target using runner config"),
dual_proc_macros: bool = ("Build proc-macros for both the host and the target"),
features: Option<Vec<String>> = (HIDDEN),
+ gc: bool = ("Track cache usage and \"garbage collect\" unused files"),
gitoxide: Option<GitoxideFeatures> = ("Use gitoxide for the given git interactions, or all of them if no argument is given"),
host_config: bool = ("Enable the [host] section in the .cargo/config.toml file"),
lints: bool = ("Pass `[lints]` to the linting tools"),
@@ -1077,6 +1078,7 @@ impl CliUnstable {
"direct-minimal-versions" => self.direct_minimal_versions = parse_empty(k, v)?,
"doctest-xcompile" => self.doctest_xcompile = parse_empty(k, v)?,
"dual-proc-macros" => self.dual_proc_macros = parse_empty(k, v)?,
+ "gc" => self.gc = parse_empty(k, v)?,
"gitoxide" => {
self.gitoxide = v.map_or_else(
|| Ok(Some(GitoxideFeatures::all())),
@@ -1114,7 +1116,17 @@ impl CliUnstable {
/// Generates an error if `-Z unstable-options` was not used for a new,
/// unstable command-line flag.
pub fn fail_if_stable_opt(&self, flag: &str, issue: u32) -> CargoResult<()> {
- if !self.unstable_options {
+ self.fail_if_stable_opt_custom_z(flag, issue, "unstable-options", self.unstable_options)
+ }
+
+ pub fn fail_if_stable_opt_custom_z(
+ &self,
+ flag: &str,
+ issue: u32,
+ z_name: &str,
+ enabled: bool,
+ ) -> CargoResult<()> {
+ if !enabled {
let see = format!(
"See https://github.com/rust-lang/cargo/issues/{issue} for more \
information about the `{flag}` flag."
@@ -1123,7 +1135,7 @@ impl CliUnstable {
let channel = channel();
if channel == "nightly" || channel == "dev" {
bail!(
- "the `{flag}` flag is unstable, pass `-Z unstable-options` to enable it\n\
+ "the `{flag}` flag is unstable, pass `-Z {z_name}` to enable it\n\
{see}"
);
} else {
@@ -1145,8 +1157,10 @@ impl CliUnstable {
config: &Config,
command: &str,
issue: u32,
+ z_name: &str,
+ enabled: bool,
) -> CargoResult<()> {
- if self.unstable_options {
+ if enabled {
return Ok(());
}
let see = format!(
@@ -1156,10 +1170,9 @@ impl CliUnstable {
);
if config.nightly_features_allowed {
bail!(
- "the `cargo {}` command is unstable, pass `-Z unstable-options` to enable it\n\
- {}",
- command,
- see
+ "the `cargo {command}` command is unstable, pass `-Z {z_name}` \
+ to enable it\n\
+ {see}",
);
} else {
bail!(
diff --git a/src/tools/cargo/src/cargo/core/gc.rs b/src/tools/cargo/src/cargo/core/gc.rs
new file mode 100644
index 000000000..565078ff0
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/gc.rs
@@ -0,0 +1,509 @@
+//! Support for garbage collecting unused files from downloaded files or
+//! artifacts from the target directory.
+//!
+//! The [`Gc`] type provides the high-level interface for the
+//! garbage-collection system.
+//!
+//! Garbage collection can be done "automatically" by cargo, which it does by
+//! default once a day when running any command that does a lot of work (like
+//! `cargo build`). The entry point for this is the [`auto_gc`] function,
+//! which handles some basic setup, creating the [`Gc`], and calling
+//! [`Gc::auto`].
+//!
+//! Garbage collection can also be done manually via the `cargo clean` command
+//! by passing any option that requests deleting unused files. That is
+//! implemented by calling the [`Gc::gc`] method.
+//!
+//! Garbage collection for the global cache is guided by the last-use tracking
+//! implemented in the [`crate::core::global_cache_tracker`] module. See that
+//! module documentation for an in-depth explanation of how global cache
+//! tracking works.
+
+use crate::core::global_cache_tracker::{self, GlobalCacheTracker};
+use crate::ops::CleanContext;
+use crate::util::cache_lock::{CacheLock, CacheLockMode};
+use crate::{CargoResult, Config};
+use anyhow::{format_err, Context};
+use serde::Deserialize;
+use std::time::Duration;
+
+/// Default max age to auto-clean extracted sources, which can be recovered
+/// without downloading anything.
+const DEFAULT_MAX_AGE_EXTRACTED: &str = "1 month";
+/// Default max ago to auto-clean cache data, which must be downloaded to
+/// recover.
+const DEFAULT_MAX_AGE_DOWNLOADED: &str = "3 months";
+/// How often auto-gc will run by default unless overridden in the config.
+const DEFAULT_AUTO_FREQUENCY: &str = "1 day";
+
+/// Performs automatic garbage collection.
+///
+/// This is called in various places in Cargo where garbage collection should
+/// be performed automatically based on the config settings. The default
+/// behavior is to only clean once a day.
+///
+/// This should only be called in code paths for commands that are already
+/// doing a lot of work. It should only be called *after* crates are
+/// downloaded so that the last-use data is updated first.
+///
+/// It should be cheap to call this multiple times (subsequent calls are
+/// ignored), but try not to abuse that.
+pub fn auto_gc(config: &Config) {
+ if !config.cli_unstable().gc {
+ return;
+ }
+ if !config.network_allowed() {
+ // As a conservative choice, auto-gc is disabled when offline. If the
+ // user is indefinitely offline, we don't want to delete things they
+ // may later depend on.
+ tracing::trace!(target: "gc", "running offline, auto gc disabled");
+ return;
+ }
+
+ if let Err(e) = auto_gc_inner(config) {
+ if global_cache_tracker::is_silent_error(&e) && !config.extra_verbose() {
+ tracing::warn!(target: "gc", "failed to auto-clean cache data: {e:?}");
+ } else {
+ crate::display_warning_with_error(
+ "failed to auto-clean cache data",
+ &e,
+ &mut config.shell(),
+ );
+ }
+ }
+}
+
+fn auto_gc_inner(config: &Config) -> CargoResult<()> {
+ let _lock = match config.try_acquire_package_cache_lock(CacheLockMode::MutateExclusive)? {
+ Some(lock) => lock,
+ None => {
+ tracing::debug!(target: "gc", "unable to acquire mutate lock, auto gc disabled");
+ return Ok(());
+ }
+ };
+ // This should not be called when there are pending deferred entries, so check that.
+ let deferred = config.deferred_global_last_use()?;
+ debug_assert!(deferred.is_empty());
+ let mut global_cache_tracker = config.global_cache_tracker()?;
+ let mut gc = Gc::new(config, &mut global_cache_tracker)?;
+ let mut clean_ctx = CleanContext::new(config);
+ gc.auto(&mut clean_ctx)?;
+ Ok(())
+}
+
+/// Automatic garbage collection settings from the `gc.auto` config table.
+///
+/// NOTE: Not all of these options may get stabilized. Some of them are very
+/// low-level details, and may not be something typical users need.
+///
+/// If any of these options are `None`, the built-in default is used.
+#[derive(Deserialize, Default)]
+#[serde(rename_all = "kebab-case")]
+struct AutoConfig {
+ /// The maximum frequency that automatic garbage collection happens.
+ frequency: Option<String>,
+ /// Anything older than this duration will be deleted in the source cache.
+ max_src_age: Option<String>,
+ /// Anything older than this duration will be deleted in the compressed crate cache.
+ max_crate_age: Option<String>,
+ /// Any index older than this duration will be deleted from the index cache.
+ max_index_age: Option<String>,
+ /// Any git checkout older than this duration will be deleted from the checkout cache.
+ max_git_co_age: Option<String>,
+ /// Any git clone older than this duration will be deleted from the git cache.
+ max_git_db_age: Option<String>,
+}
+
+/// Options to use for garbage collection.
+#[derive(Clone, Debug, Default)]
+pub struct GcOpts {
+ /// The `--max-src-age` CLI option.
+ pub max_src_age: Option<Duration>,
+ // The `--max-crate-age` CLI option.
+ pub max_crate_age: Option<Duration>,
+ /// The `--max-index-age` CLI option.
+ pub max_index_age: Option<Duration>,
+ /// The `--max-git-co-age` CLI option.
+ pub max_git_co_age: Option<Duration>,
+ /// The `--max-git-db-age` CLI option.
+ pub max_git_db_age: Option<Duration>,
+ /// The `--max-src-size` CLI option.
+ pub max_src_size: Option<u64>,
+ /// The `--max-crate-size` CLI option.
+ pub max_crate_size: Option<u64>,
+ /// The `--max-git-size` CLI option.
+ pub max_git_size: Option<u64>,
+ /// The `--max-download-size` CLI option.
+ pub max_download_size: Option<u64>,
+}
+
+impl GcOpts {
+ /// Returns whether any download cache cleaning options are set.
+ pub fn is_download_cache_opt_set(&self) -> bool {
+ self.max_src_age.is_some()
+ || self.max_crate_age.is_some()
+ || self.max_index_age.is_some()
+ || self.max_git_co_age.is_some()
+ || self.max_git_db_age.is_some()
+ || self.max_src_size.is_some()
+ || self.max_crate_size.is_some()
+ || self.max_git_size.is_some()
+ || self.max_download_size.is_some()
+ }
+
+ /// Returns whether any download cache cleaning options based on size are set.
+ pub fn is_download_cache_size_set(&self) -> bool {
+ self.max_src_size.is_some()
+ || self.max_crate_size.is_some()
+ || self.max_git_size.is_some()
+ || self.max_download_size.is_some()
+ }
+
+ /// Updates the `GcOpts` to incorporate the specified max download age.
+ ///
+ /// "Download" means any cached data that can be re-downloaded.
+ pub fn set_max_download_age(&mut self, max_download_age: Duration) {
+ self.max_src_age = Some(maybe_newer_span(max_download_age, self.max_src_age));
+ self.max_crate_age = Some(maybe_newer_span(max_download_age, self.max_crate_age));
+ self.max_index_age = Some(maybe_newer_span(max_download_age, self.max_index_age));
+ self.max_git_co_age = Some(maybe_newer_span(max_download_age, self.max_git_co_age));
+ self.max_git_db_age = Some(maybe_newer_span(max_download_age, self.max_git_db_age));
+ }
+
+ /// Updates the configuration of this [`GcOpts`] to incorporate the
+ /// settings from config.
+ pub fn update_for_auto_gc(&mut self, config: &Config) -> CargoResult<()> {
+ let auto_config = config
+ .get::<Option<AutoConfig>>("gc.auto")?
+ .unwrap_or_default();
+ self.update_for_auto_gc_config(&auto_config)
+ }
+
+ fn update_for_auto_gc_config(&mut self, auto_config: &AutoConfig) -> CargoResult<()> {
+ self.max_src_age = newer_time_span_for_config(
+ self.max_src_age,
+ "gc.auto.max-src-age",
+ auto_config
+ .max_src_age
+ .as_deref()
+ .unwrap_or(DEFAULT_MAX_AGE_EXTRACTED),
+ )?;
+ self.max_crate_age = newer_time_span_for_config(
+ self.max_crate_age,
+ "gc.auto.max-crate-age",
+ auto_config
+ .max_crate_age
+ .as_deref()
+ .unwrap_or(DEFAULT_MAX_AGE_DOWNLOADED),
+ )?;
+ self.max_index_age = newer_time_span_for_config(
+ self.max_index_age,
+ "gc.auto.max-index-age",
+ auto_config
+ .max_index_age
+ .as_deref()
+ .unwrap_or(DEFAULT_MAX_AGE_DOWNLOADED),
+ )?;
+ self.max_git_co_age = newer_time_span_for_config(
+ self.max_git_co_age,
+ "gc.auto.max-git-co-age",
+ auto_config
+ .max_git_co_age
+ .as_deref()
+ .unwrap_or(DEFAULT_MAX_AGE_EXTRACTED),
+ )?;
+ self.max_git_db_age = newer_time_span_for_config(
+ self.max_git_db_age,
+ "gc.auto.max-git-db-age",
+ auto_config
+ .max_git_db_age
+ .as_deref()
+ .unwrap_or(DEFAULT_MAX_AGE_DOWNLOADED),
+ )?;
+ Ok(())
+ }
+}
+
+/// Garbage collector.
+///
+/// See the module docs at [`crate::core::gc`] for more information on GC.
+pub struct Gc<'a, 'config> {
+ config: &'config Config,
+ global_cache_tracker: &'a mut GlobalCacheTracker,
+ /// A lock on the package cache.
+ ///
+ /// This is important to be held, since we don't want multiple cargos to
+ /// be allowed to write to the cache at the same time, or for others to
+ /// read while we are modifying the cache.
+ #[allow(dead_code)] // Held for drop.
+ lock: CacheLock<'config>,
+}
+
+impl<'a, 'config> Gc<'a, 'config> {
+ pub fn new(
+ config: &'config Config,
+ global_cache_tracker: &'a mut GlobalCacheTracker,
+ ) -> CargoResult<Gc<'a, 'config>> {
+ let lock = config.acquire_package_cache_lock(CacheLockMode::MutateExclusive)?;
+ Ok(Gc {
+ config,
+ global_cache_tracker,
+ lock,
+ })
+ }
+
+ /// Performs automatic garbage cleaning.
+ ///
+ /// This returns immediately without doing work if garbage collection has
+ /// been performed recently (since `gc.auto.frequency`).
+ fn auto(&mut self, clean_ctx: &mut CleanContext<'config>) -> CargoResult<()> {
+ if !self.config.cli_unstable().gc {
+ return Ok(());
+ }
+ let auto_config = self
+ .config
+ .get::<Option<AutoConfig>>("gc.auto")?
+ .unwrap_or_default();
+ let Some(freq) = parse_frequency(
+ auto_config
+ .frequency
+ .as_deref()
+ .unwrap_or(DEFAULT_AUTO_FREQUENCY),
+ )?
+ else {
+ tracing::trace!(target: "gc", "auto gc disabled");
+ return Ok(());
+ };
+ if !self.global_cache_tracker.should_run_auto_gc(freq)? {
+ return Ok(());
+ }
+ let mut gc_opts = GcOpts::default();
+ gc_opts.update_for_auto_gc_config(&auto_config)?;
+ self.gc(clean_ctx, &gc_opts)?;
+ if !clean_ctx.dry_run {
+ self.global_cache_tracker.set_last_auto_gc()?;
+ }
+ Ok(())
+ }
+
+ /// Performs garbage collection based on the given options.
+ pub fn gc(
+ &mut self,
+ clean_ctx: &mut CleanContext<'config>,
+ gc_opts: &GcOpts,
+ ) -> CargoResult<()> {
+ self.global_cache_tracker.clean(clean_ctx, gc_opts)?;
+ // In the future, other gc operations go here, such as target cleaning.
+ Ok(())
+ }
+}
+
+/// Returns the shorter duration from `cur_span` versus `config_span`.
+///
+/// This is used because the user may specify multiple options which overlap,
+/// and this will pick whichever one is shorter.
+///
+/// * `cur_span` is the span we are comparing against (the value from the CLI
+/// option). If None, just returns the config duration.
+/// * `config_name` is the name of the config option the span is loaded from.
+/// * `config_span` is the span value loaded from config.
+fn newer_time_span_for_config(
+ cur_span: Option<Duration>,
+ config_name: &str,
+ config_span: &str,
+) -> CargoResult<Option<Duration>> {
+ let config_span = parse_time_span_for_config(config_name, config_span)?;
+ Ok(Some(maybe_newer_span(config_span, cur_span)))
+}
+
+/// Returns whichever [`Duration`] is shorter.
+fn maybe_newer_span(a: Duration, b: Option<Duration>) -> Duration {
+ match b {
+ Some(b) => {
+ if b < a {
+ b
+ } else {
+ a
+ }
+ }
+ None => a,
+ }
+}
+
+/// Parses a frequency string.
+///
+/// Returns `Ok(None)` if the frequency is "never".
+fn parse_frequency(frequency: &str) -> CargoResult<Option<Duration>> {
+ if frequency == "always" {
+ return Ok(Some(Duration::new(0, 0)));
+ } else if frequency == "never" {
+ return Ok(None);
+ }
+ let duration = maybe_parse_time_span(frequency).ok_or_else(|| {
+ format_err!(
+ "config option `gc.auto.frequency` expected a value of \"always\", \"never\", \
+ or \"N seconds/minutes/days/weeks/months\", got: {frequency:?}"
+ )
+ })?;
+ Ok(Some(duration))
+}
+
+/// Parses a time span value fetched from config.
+///
+/// This is here to provide better error messages specific to reading from
+/// config.
+fn parse_time_span_for_config(config_name: &str, span: &str) -> CargoResult<Duration> {
+ maybe_parse_time_span(span).ok_or_else(|| {
+ format_err!(
+ "config option `{config_name}` expected a value of the form \
+ \"N seconds/minutes/days/weeks/months\", got: {span:?}"
+ )
+ })
+}
+
+/// Parses a time span string.
+///
+/// Returns None if the value is not valid. See [`parse_time_span`] if you
+/// need a variant that generates an error message.
+fn maybe_parse_time_span(span: &str) -> Option<Duration> {
+ let Some(right_i) = span.find(|c: char| !c.is_ascii_digit()) else {
+ return None;
+ };
+ let (left, mut right) = span.split_at(right_i);
+ if right.starts_with(' ') {
+ right = &right[1..];
+ }
+ let count: u64 = left.parse().ok()?;
+ let factor = match right {
+ "second" | "seconds" => 1,
+ "minute" | "minutes" => 60,
+ "hour" | "hours" => 60 * 60,
+ "day" | "days" => 24 * 60 * 60,
+ "week" | "weeks" => 7 * 24 * 60 * 60,
+ "month" | "months" => 2_629_746, // average is 30.436875 days
+ _ => return None,
+ };
+ Some(Duration::from_secs(factor * count))
+}
+
+/// Parses a time span string.
+pub fn parse_time_span(span: &str) -> CargoResult<Duration> {
+ maybe_parse_time_span(span).ok_or_else(|| {
+ format_err!(
+ "expected a value of the form \
+ \"N seconds/minutes/days/weeks/months\", got: {span:?}"
+ )
+ })
+}
+
+/// Parses a file size using metric or IEC units.
+pub fn parse_human_size(input: &str) -> CargoResult<u64> {
+ let re = regex::Regex::new(r"(?i)^([0-9]+(\.[0-9])?) ?(b|kb|mb|gb|kib|mib|gib)?$").unwrap();
+ let cap = re.captures(input).ok_or_else(|| {
+ format_err!(
+ "invalid size `{input}`, \
+ expected a number with an optional B, kB, MB, GB, kiB, MiB, or GiB suffix"
+ )
+ })?;
+ let factor = match cap.get(3) {
+ Some(suffix) => match suffix.as_str().to_lowercase().as_str() {
+ "b" => 1.0,
+ "kb" => 1_000.0,
+ "mb" => 1_000_000.0,
+ "gb" => 1_000_000_000.0,
+ "kib" => 1024.0,
+ "mib" => 1024.0 * 1024.0,
+ "gib" => 1024.0 * 1024.0 * 1024.0,
+ s => unreachable!("suffix `{s}` out of sync with regex"),
+ },
+ None => {
+ return cap[1]
+ .parse()
+ .with_context(|| format!("expected an integer size, got `{}`", &cap[1]))
+ }
+ };
+ let num = cap[1]
+ .parse::<f64>()
+ .with_context(|| format!("expected an integer or float, found `{}`", &cap[1]))?;
+ Ok((num * factor) as u64)
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ #[test]
+ fn time_spans() {
+ let d = |x| Some(Duration::from_secs(x));
+ assert_eq!(maybe_parse_time_span("0 seconds"), d(0));
+ assert_eq!(maybe_parse_time_span("1second"), d(1));
+ assert_eq!(maybe_parse_time_span("23 seconds"), d(23));
+ assert_eq!(maybe_parse_time_span("5 minutes"), d(60 * 5));
+ assert_eq!(maybe_parse_time_span("2 hours"), d(60 * 60 * 2));
+ assert_eq!(maybe_parse_time_span("1 day"), d(60 * 60 * 24));
+ assert_eq!(maybe_parse_time_span("2 weeks"), d(60 * 60 * 24 * 14));
+ assert_eq!(maybe_parse_time_span("6 months"), d(2_629_746 * 6));
+
+ assert_eq!(parse_frequency("5 seconds").unwrap(), d(5));
+ assert_eq!(parse_frequency("always").unwrap(), d(0));
+ assert_eq!(parse_frequency("never").unwrap(), None);
+ }
+
+ #[test]
+ fn time_span_errors() {
+ assert_eq!(maybe_parse_time_span(""), None);
+ assert_eq!(maybe_parse_time_span("1"), None);
+ assert_eq!(maybe_parse_time_span("second"), None);
+ assert_eq!(maybe_parse_time_span("+2 seconds"), None);
+ assert_eq!(maybe_parse_time_span("day"), None);
+ assert_eq!(maybe_parse_time_span("-1 days"), None);
+ assert_eq!(maybe_parse_time_span("1.5 days"), None);
+ assert_eq!(maybe_parse_time_span("1 dayz"), None);
+ assert_eq!(maybe_parse_time_span("always"), None);
+ assert_eq!(maybe_parse_time_span("never"), None);
+ assert_eq!(maybe_parse_time_span("1 day "), None);
+ assert_eq!(maybe_parse_time_span(" 1 day"), None);
+ assert_eq!(maybe_parse_time_span("1 second"), None);
+
+ let e = parse_time_span_for_config("gc.auto.max-src-age", "-1 days").unwrap_err();
+ assert_eq!(
+ e.to_string(),
+ "config option `gc.auto.max-src-age` \
+ expected a value of the form \"N seconds/minutes/days/weeks/months\", \
+ got: \"-1 days\""
+ );
+ let e = parse_frequency("abc").unwrap_err();
+ assert_eq!(
+ e.to_string(),
+ "config option `gc.auto.frequency` \
+ expected a value of \"always\", \"never\", or \"N seconds/minutes/days/weeks/months\", \
+ got: \"abc\""
+ );
+ }
+
+ #[test]
+ fn human_sizes() {
+ assert_eq!(parse_human_size("0").unwrap(), 0);
+ assert_eq!(parse_human_size("123").unwrap(), 123);
+ assert_eq!(parse_human_size("123b").unwrap(), 123);
+ assert_eq!(parse_human_size("123B").unwrap(), 123);
+ assert_eq!(parse_human_size("123 b").unwrap(), 123);
+ assert_eq!(parse_human_size("123 B").unwrap(), 123);
+ assert_eq!(parse_human_size("1kb").unwrap(), 1_000);
+ assert_eq!(parse_human_size("5kb").unwrap(), 5_000);
+ assert_eq!(parse_human_size("1mb").unwrap(), 1_000_000);
+ assert_eq!(parse_human_size("1gb").unwrap(), 1_000_000_000);
+ assert_eq!(parse_human_size("1kib").unwrap(), 1_024);
+ assert_eq!(parse_human_size("1mib").unwrap(), 1_048_576);
+ assert_eq!(parse_human_size("1gib").unwrap(), 1_073_741_824);
+ assert_eq!(parse_human_size("1.5kb").unwrap(), 1_500);
+ assert_eq!(parse_human_size("1.7b").unwrap(), 1);
+
+ assert!(parse_human_size("").is_err());
+ assert!(parse_human_size("x").is_err());
+ assert!(parse_human_size("1x").is_err());
+ assert!(parse_human_size("1 2").is_err());
+ assert!(parse_human_size("1.5").is_err());
+ assert!(parse_human_size("+1").is_err());
+ assert!(parse_human_size("123 b").is_err());
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/global_cache_tracker.rs b/src/tools/cargo/src/cargo/core/global_cache_tracker.rs
new file mode 100644
index 000000000..79ae252d9
--- /dev/null
+++ b/src/tools/cargo/src/cargo/core/global_cache_tracker.rs
@@ -0,0 +1,1827 @@
+//! Support for tracking the last time files were used to assist with cleaning
+//! up those files if they haven't been used in a while.
+//!
+//! Tracking of cache files is stored in a sqlite database which contains a
+//! timestamp of the last time the file was used, as well as the size of the
+//! file.
+//!
+//! While cargo is running, when it detects a use of a cache file, it adds a
+//! timestamp to [`DeferredGlobalLastUse`]. This batches up a set of changes
+//! that are then flushed to the database all at once (via
+//! [`DeferredGlobalLastUse::save`]). Ideally saving would only be done once
+//! for performance reasons, but that is not really possible due to the way
+//! cargo works, since there are different ways cargo can be used (like `cargo
+//! generate-lockfile`, `cargo fetch`, and `cargo build` are all very
+//! different ways the code is used).
+//!
+//! All of the database interaction is done through the [`GlobalCacheTracker`]
+//! type.
+//!
+//! There is a single global [`GlobalCacheTracker`] and
+//! [`DeferredGlobalLastUse`] stored in [`Config`].
+//!
+//! The high-level interface for performing garbage collection is defined in
+//! the [`crate::core::gc`] module. The functions there are responsible for
+//! interacting with the [`GlobalCacheTracker`] to handle cleaning of global
+//! cache data.
+//!
+//! ## Automatic gc
+//!
+//! Some commands (primarily the build commands) will trigger an automatic
+//! deletion of files that haven't been used in a while. The high-level
+//! interface for this is the [`crate::core::gc::auto_gc`] function.
+//!
+//! The [`GlobalCacheTracker`] database tracks the last time an automatic gc
+//! was performed so that it is only done once per day for performance
+//! reasons.
+//!
+//! ## Manual gc
+//!
+//! The user can perform a manual garbage collection with the `cargo clean`
+//! command. That command has a variety of options to specify what to delete.
+//! Manual gc supports deleting based on age or size or both. From a
+//! high-level, this is done by the [`crate::core::gc::Gc::gc`] method, which
+//! calls into [`GlobalCacheTracker`] to handle all the cleaning.
+//!
+//! ## Locking
+//!
+//! Usage of the database requires that the package cache is locked to prevent
+//! concurrent access. Although sqlite has built-in locking support, we want
+//! to use cargo's locking so that the "Blocking" message gets displayed, and
+//! so that locks can block indefinitely for long-running build commands.
+//! [`rusqlite`] has a default timeout of 5 seconds, though that is
+//! configurable.
+//!
+//! When garbage collection is being performed, the package cache lock must be
+//! in [`CacheLockMode::MutateExclusive`] to ensure no other cargo process is
+//! running. See [`crate::util::cache_lock`] for more detail on locking.
+//!
+//! When performing automatic gc, [`crate::core::gc::auto_gc`] will skip the
+//! GC if the package cache lock is already held by anything else. Automatic
+//! GC is intended to be opportunistic, and should impose as little disruption
+//! to the user as possible.
+//!
+//! ## Compatibility
+//!
+//! The database must retain both forwards and backwards compatibility between
+//! different versions of cargo. For the most part, this shouldn't be too
+//! difficult to maintain. Generally sqlite doesn't change on-disk formats
+//! between versions (the introduction of WAL is one of the few examples where
+//! version 3 had a format change, but we wouldn't use it anyway since it has
+//! shared-memory requirements cargo can't depend on due to things like
+//! network mounts).
+//!
+//! Schema changes must be managed through [`migrations`] by adding new
+//! entries that make a change to the database. Changes must not break older
+//! versions of cargo. Generally, adding columns should be fine (either with a
+//! default value, or NULL). Adding tables should also be fine. Just don't do
+//! destructive things like removing a column, or changing the semantics of an
+//! existing column.
+//!
+//! Since users may run older versions of cargo that do not do cache tracking,
+//! the [`GlobalCacheTracker::sync_db_with_files`] method helps dealing with
+//! keeping the database in sync in the presence of older versions of cargo
+//! touching the cache directories.
+//!
+//! ## Performance
+//!
+//! A lot of focus on the design of this system is to minimize the performance
+//! impact. Every build command needs to save updates which we try to avoid
+//! having a noticeable impact on build times. Systems like Windows,
+//! particularly with a magnetic hard disk, can experience a fairly large
+//! impact of cargo's overhead. Cargo's benchsuite has some benchmarks to help
+//! compare different environments, or changes to the code here. Please try to
+//! keep performance in mind if making any major changes.
+//!
+//! Performance of `cargo clean` is not quite as important since it is not
+//! expected to be run often. However, it is still courteous to the user to
+//! try to not impact it too much. One part that has a performance concern is
+//! that the clean command will synchronize the database with whatever is on
+//! disk if needed (in case files were added by older versions of cargo that
+//! don't do cache tracking, or if the user manually deleted some files). This
+//! can potentially be very slow, especially if the two are very out of sync.
+//!
+//! ## Filesystems
+//!
+//! Everything here is sensitive to the kind of filesystem it is running on.
+//! People tend to run cargo in all sorts of strange environments that have
+//! limited capabilities, or on things like read-only mounts. The code here
+//! needs to gracefully handle as many situations as possible.
+//!
+//! See also the information in the [Performance](#performance) and
+//! [Locking](#locking) sections when considering different filesystems and
+//! their impact on performance and locking.
+//!
+//! There are checks for read-only filesystems, which is generally ignored.
+
+use crate::core::gc::GcOpts;
+use crate::core::Verbosity;
+use crate::ops::CleanContext;
+use crate::util::cache_lock::CacheLockMode;
+use crate::util::interning::InternedString;
+use crate::util::sqlite::{self, basic_migration, Migration};
+use crate::util::{Filesystem, Progress, ProgressStyle};
+use crate::{CargoResult, Config};
+use anyhow::{bail, Context};
+use cargo_util::paths;
+use rusqlite::{params, Connection, ErrorCode};
+use std::collections::{hash_map, HashMap};
+use std::path::{Path, PathBuf};
+use std::time::{Duration, SystemTime};
+use tracing::{debug, trace};
+
+/// The filename of the database.
+const GLOBAL_CACHE_FILENAME: &str = ".global-cache";
+
+const REGISTRY_INDEX_TABLE: &str = "registry_index";
+const REGISTRY_CRATE_TABLE: &str = "registry_crate";
+const REGISTRY_SRC_TABLE: &str = "registry_src";
+const GIT_DB_TABLE: &str = "git_db";
+const GIT_CO_TABLE: &str = "git_checkout";
+
+/// How often timestamps will be updated.
+///
+/// As an optimization timestamps are not updated unless they are older than
+/// the given number of seconds. This helps reduce the amount of disk I/O when
+/// running cargo multiple times within a short window.
+const UPDATE_RESOLUTION: u64 = 60 * 5;
+
+/// Type for timestamps as stored in the database.
+///
+/// These are seconds since the Unix epoch.
+type Timestamp = u64;
+
+/// The key for a registry index entry stored in the database.
+#[derive(Clone, Debug, Hash, Eq, PartialEq)]
+pub struct RegistryIndex {
+ /// A unique name of the registry source.
+ pub encoded_registry_name: InternedString,
+}
+
+/// The key for a registry `.crate` entry stored in the database.
+#[derive(Clone, Debug, Hash, Eq, PartialEq)]
+pub struct RegistryCrate {
+ /// A unique name of the registry source.
+ pub encoded_registry_name: InternedString,
+ /// The filename of the compressed crate, like `foo-1.2.3.crate`.
+ pub crate_filename: InternedString,
+ /// The size of the `.crate` file.
+ pub size: u64,
+}
+
+/// The key for a registry src directory entry stored in the database.
+#[derive(Clone, Debug, Hash, Eq, PartialEq)]
+pub struct RegistrySrc {
+ /// A unique name of the registry source.
+ pub encoded_registry_name: InternedString,
+ /// The directory name of the extracted source, like `foo-1.2.3`.
+ pub package_dir: InternedString,
+ /// Total size of the src directory in bytes.
+ ///
+ /// This can be None when the size is unknown. For example, when the src
+ /// directory already exists on disk, and we just want to update the
+ /// last-use timestamp. We don't want to take the expense of computing disk
+ /// usage unless necessary. [`GlobalCacheTracker::populate_untracked`]
+ /// will handle any actual NULL values in the database, which can happen
+ /// when the src directory is created by an older version of cargo that
+ /// did not track sizes.
+ pub size: Option<u64>,
+}
+
+/// The key for a git db entry stored in the database.
+#[derive(Clone, Debug, Hash, Eq, PartialEq)]
+pub struct GitDb {
+ /// A unique name of the git database.
+ pub encoded_git_name: InternedString,
+}
+
+/// The key for a git checkout entry stored in the database.
+#[derive(Clone, Debug, Hash, Eq, PartialEq)]
+pub struct GitCheckout {
+ /// A unique name of the git database.
+ pub encoded_git_name: InternedString,
+ /// A unique name of the checkout without the database.
+ pub short_name: InternedString,
+ /// Total size of the checkout directory.
+ ///
+ /// This can be None when the size is unknown. See [`RegistrySrc::size`]
+ /// for an explanation.
+ pub size: Option<u64>,
+}
+
+/// Filesystem paths in the global cache.
+///
+/// Accessing these assumes a lock has already been acquired.
+struct BasePaths {
+ /// Root path to the index caches.
+ index: PathBuf,
+ /// Root path to the git DBs.
+ git_db: PathBuf,
+ /// Root path to the git checkouts.
+ git_co: PathBuf,
+ /// Root path to the `.crate` files.
+ crate_dir: PathBuf,
+ /// Root path to the `src` directories.
+ src: PathBuf,
+}
+
+/// Migrations which initialize the database, and can be used to evolve it over time.
+///
+/// See [`Migration`] for more detail.
+///
+/// **Be sure to not change the order or entries here!**
+fn migrations() -> Vec<Migration> {
+ vec![
+ // registry_index tracks the overall usage of an index cache, and tracks a
+ // numeric ID to refer to that index that is used in other tables.
+ basic_migration(
+ "CREATE TABLE registry_index (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ name TEXT UNIQUE NOT NULL,
+ timestamp INTEGER NOT NULL
+ )",
+ ),
+ // .crate files
+ basic_migration(
+ "CREATE TABLE registry_crate (
+ registry_id INTEGER NOT NULL,
+ name TEXT NOT NULL,
+ size INTEGER NOT NULL,
+ timestamp INTEGER NOT NULL,
+ PRIMARY KEY (registry_id, name),
+ FOREIGN KEY (registry_id) REFERENCES registry_index (id) ON DELETE CASCADE
+ )",
+ ),
+ // Extracted src directories
+ //
+ // Note that `size` can be NULL. This will happen when marking a src
+ // directory as used that was created by an older version of cargo
+ // that didn't do size tracking.
+ basic_migration(
+ "CREATE TABLE registry_src (
+ registry_id INTEGER NOT NULL,
+ name TEXT NOT NULL,
+ size INTEGER,
+ timestamp INTEGER NOT NULL,
+ PRIMARY KEY (registry_id, name),
+ FOREIGN KEY (registry_id) REFERENCES registry_index (id) ON DELETE CASCADE
+ )",
+ ),
+ // Git db directories
+ basic_migration(
+ "CREATE TABLE git_db (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ name TEXT UNIQUE NOT NULL,
+ timestamp INTEGER NOT NULL
+ )",
+ ),
+ // Git checkout directories
+ basic_migration(
+ "CREATE TABLE git_checkout (
+ git_id INTEGER NOT NULL,
+ name TEXT UNIQUE NOT NULL,
+ size INTEGER,
+ timestamp INTEGER NOT NULL,
+ PRIMARY KEY (git_id, name),
+ FOREIGN KEY (git_id) REFERENCES git_db (id) ON DELETE CASCADE
+ )",
+ ),
+ // This is a general-purpose single-row table that can store arbitrary
+ // data. Feel free to add columns (with ALTER TABLE) if necessary.
+ basic_migration(
+ "CREATE TABLE global_data (
+ last_auto_gc INTEGER NOT NULL
+ )",
+ ),
+ // last_auto_gc tracks the last time auto-gc was run (so that it only
+ // runs roughly once a day for performance reasons). Prime it with the
+ // current time to establish a baseline.
+ Box::new(|conn| {
+ conn.execute(
+ "INSERT INTO global_data (last_auto_gc) VALUES (?1)",
+ [now()],
+ )?;
+ Ok(())
+ }),
+ ]
+}
+
+/// Type for SQL columns that refer to the primary key of their parent table.
+///
+/// For example, `registry_crate.registry_id` refers to its parent `registry_index.id`.
+#[derive(Copy, Clone, Debug, PartialEq)]
+struct ParentId(i64);
+
+impl rusqlite::types::FromSql for ParentId {
+ fn column_result(value: rusqlite::types::ValueRef<'_>) -> rusqlite::types::FromSqlResult<Self> {
+ let i = i64::column_result(value)?;
+ Ok(ParentId(i))
+ }
+}
+
+impl rusqlite::types::ToSql for ParentId {
+ fn to_sql(&self) -> rusqlite::Result<rusqlite::types::ToSqlOutput<'_>> {
+ Ok(rusqlite::types::ToSqlOutput::from(self.0))
+ }
+}
+
+/// Tracking for the global shared cache (registry files, etc.).
+///
+/// This is the interface to the global cache database, used for tracking and
+/// cleaning. See the [`crate::core::global_cache_tracker`] module docs for
+/// details.
+#[derive(Debug)]
+pub struct GlobalCacheTracker {
+ /// Connection to the SQLite database.
+ conn: Connection,
+ /// This is an optimization used to make sure cargo only checks if gc
+ /// needs to run once per session. This starts as `false`, and then the
+ /// first time it checks if automatic gc needs to run, it will be set to
+ /// `true`.
+ auto_gc_checked_this_session: bool,
+}
+
+impl GlobalCacheTracker {
+ /// Creates a new [`GlobalCacheTracker`].
+ ///
+ /// The caller is responsible for locking the package cache with
+ /// [`CacheLockMode::DownloadExclusive`] before calling this.
+ pub fn new(config: &Config) -> CargoResult<GlobalCacheTracker> {
+ let db_path = Self::db_path(config);
+ // A package cache lock is required to ensure only one cargo is
+ // accessing at the same time. If there is concurrent access, we
+ // want to rely on cargo's own "Blocking" system (which can
+ // provide user feedback) rather than blocking inside sqlite
+ // (which by default has a short timeout).
+ let db_path =
+ config.assert_package_cache_locked(CacheLockMode::DownloadExclusive, &db_path);
+ let mut conn = if config.cli_unstable().gc {
+ Connection::open(db_path)?
+ } else {
+ // To simplify things (so there aren't checks everywhere for being
+ // enabled), just process everything in memory.
+ Connection::open_in_memory()?
+ };
+ conn.pragma_update(None, "foreign_keys", true)?;
+ sqlite::migrate(&mut conn, &migrations())?;
+ Ok(GlobalCacheTracker {
+ conn,
+ auto_gc_checked_this_session: false,
+ })
+ }
+
+ /// The path to the database.
+ pub fn db_path(config: &Config) -> Filesystem {
+ config.home().join(GLOBAL_CACHE_FILENAME)
+ }
+
+ /// Given an encoded registry name, returns its ID.
+ ///
+ /// Returns None if the given name isn't in the database.
+ fn id_from_name(
+ conn: &Connection,
+ table_name: &str,
+ encoded_name: &str,
+ ) -> CargoResult<Option<ParentId>> {
+ let mut stmt =
+ conn.prepare_cached(&format!("SELECT id FROM {table_name} WHERE name = ?"))?;
+ match stmt.query_row([encoded_name], |row| row.get(0)) {
+ Ok(id) => Ok(Some(id)),
+ Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None),
+ Err(e) => Err(e.into()),
+ }
+ }
+
+ /// Returns a map of ID to path for the given ids in the given table.
+ ///
+ /// For example, given `registry_index` IDs, it returns filenames of the
+ /// form "index.crates.io-6f17d22bba15001f".
+ fn get_id_map(
+ conn: &Connection,
+ table_name: &str,
+ ids: &[i64],
+ ) -> CargoResult<HashMap<i64, PathBuf>> {
+ let mut stmt =
+ conn.prepare_cached(&format!("SELECT name FROM {table_name} WHERE id = ?1"))?;
+ ids.iter()
+ .map(|id| {
+ let name = stmt.query_row(params![id], |row| {
+ Ok(PathBuf::from(row.get::<_, String>(0)?))
+ })?;
+ Ok((*id, name))
+ })
+ .collect()
+ }
+
+ /// Returns all index cache timestamps.
+ pub fn registry_index_all(&self) -> CargoResult<Vec<(RegistryIndex, Timestamp)>> {
+ let mut stmt = self
+ .conn
+ .prepare_cached("SELECT name, timestamp FROM registry_index")?;
+ let rows = stmt
+ .query_map([], |row| {
+ let encoded_registry_name = row.get_unwrap(0);
+ let timestamp = row.get_unwrap(1);
+ let kind = RegistryIndex {
+ encoded_registry_name,
+ };
+ Ok((kind, timestamp))
+ })?
+ .collect::<Result<Vec<_>, _>>()?;
+ Ok(rows)
+ }
+
+ /// Returns all registry crate cache timestamps.
+ pub fn registry_crate_all(&self) -> CargoResult<Vec<(RegistryCrate, Timestamp)>> {
+ let mut stmt = self.conn.prepare_cached(
+ "SELECT registry_index.name, registry_crate.name, registry_crate.size, registry_crate.timestamp
+ FROM registry_index, registry_crate
+ WHERE registry_crate.registry_id = registry_index.id",
+ )?;
+ let rows = stmt
+ .query_map([], |row| {
+ let encoded_registry_name = row.get_unwrap(0);
+ let crate_filename = row.get_unwrap(1);
+ let size = row.get_unwrap(2);
+ let timestamp = row.get_unwrap(3);
+ let kind = RegistryCrate {
+ encoded_registry_name,
+ crate_filename,
+ size,
+ };
+ Ok((kind, timestamp))
+ })?
+ .collect::<Result<Vec<_>, _>>()?;
+ Ok(rows)
+ }
+
+ /// Returns all registry source cache timestamps.
+ pub fn registry_src_all(&self) -> CargoResult<Vec<(RegistrySrc, Timestamp)>> {
+ let mut stmt = self.conn.prepare_cached(
+ "SELECT registry_index.name, registry_src.name, registry_src.size, registry_src.timestamp
+ FROM registry_index, registry_src
+ WHERE registry_src.registry_id = registry_index.id",
+ )?;
+ let rows = stmt
+ .query_map([], |row| {
+ let encoded_registry_name = row.get_unwrap(0);
+ let package_dir = row.get_unwrap(1);
+ let size = row.get_unwrap(2);
+ let timestamp = row.get_unwrap(3);
+ let kind = RegistrySrc {
+ encoded_registry_name,
+ package_dir,
+ size,
+ };
+ Ok((kind, timestamp))
+ })?
+ .collect::<Result<Vec<_>, _>>()?;
+ Ok(rows)
+ }
+
+ /// Returns all git db timestamps.
+ pub fn git_db_all(&self) -> CargoResult<Vec<(GitDb, Timestamp)>> {
+ let mut stmt = self
+ .conn
+ .prepare_cached("SELECT name, timestamp FROM git_db")?;
+ let rows = stmt
+ .query_map([], |row| {
+ let encoded_git_name = row.get_unwrap(0);
+ let timestamp = row.get_unwrap(1);
+ let kind = GitDb { encoded_git_name };
+ Ok((kind, timestamp))
+ })?
+ .collect::<Result<Vec<_>, _>>()?;
+ Ok(rows)
+ }
+
+ /// Returns all git checkout timestamps.
+ pub fn git_checkout_all(&self) -> CargoResult<Vec<(GitCheckout, Timestamp)>> {
+ let mut stmt = self.conn.prepare_cached(
+ "SELECT git_db.name, git_checkout.name, git_checkout.size, git_checkout.timestamp
+ FROM git_db, git_checkout
+ WHERE git_checkout.registry_id = git_db.id",
+ )?;
+ let rows = stmt
+ .query_map([], |row| {
+ let encoded_git_name = row.get_unwrap(0);
+ let short_name = row.get_unwrap(1);
+ let size = row.get_unwrap(2);
+ let timestamp = row.get_unwrap(3);
+ let kind = GitCheckout {
+ encoded_git_name,
+ short_name,
+ size,
+ };
+ Ok((kind, timestamp))
+ })?
+ .collect::<Result<Vec<_>, _>>()?;
+ Ok(rows)
+ }
+
+ /// Returns whether or not an auto GC should be performed, compared to the
+ /// last time it was recorded in the database.
+ pub fn should_run_auto_gc(&mut self, frequency: Duration) -> CargoResult<bool> {
+ trace!(target: "gc", "should_run_auto_gc");
+ if self.auto_gc_checked_this_session {
+ return Ok(false);
+ }
+ let last_auto_gc: Timestamp =
+ self.conn
+ .query_row("SELECT last_auto_gc FROM global_data", [], |row| row.get(0))?;
+ let should_run = last_auto_gc + frequency.as_secs() < now();
+ trace!(target: "gc",
+ "last auto gc was {}, {}",
+ last_auto_gc,
+ if should_run { "running" } else { "skipping" }
+ );
+ self.auto_gc_checked_this_session = true;
+ Ok(should_run)
+ }
+
+ /// Writes to the database to indicate that an automatic GC has just been
+ /// completed.
+ pub fn set_last_auto_gc(&self) -> CargoResult<()> {
+ self.conn
+ .execute("UPDATE global_data SET last_auto_gc = ?1", [now()])?;
+ Ok(())
+ }
+
+ /// Deletes files from the global cache based on the given options.
+ pub fn clean(&mut self, clean_ctx: &mut CleanContext<'_>, gc_opts: &GcOpts) -> CargoResult<()> {
+ self.clean_inner(clean_ctx, gc_opts)
+ .with_context(|| "failed to clean entries from the global cache")
+ }
+
+ fn clean_inner(
+ &mut self,
+ clean_ctx: &mut CleanContext<'_>,
+ gc_opts: &GcOpts,
+ ) -> CargoResult<()> {
+ let _p = crate::util::profile::start("cleaning global cache files");
+ let config = clean_ctx.config;
+ let base_git_path = config.git_path().into_path_unlocked();
+ let base = BasePaths {
+ index: config.registry_index_path().into_path_unlocked(),
+ git_db: base_git_path.join("db"),
+ git_co: base_git_path.join("checkouts"),
+ crate_dir: config.registry_cache_path().into_path_unlocked(),
+ src: config.registry_source_path().into_path_unlocked(),
+ };
+ let now = now();
+ trace!(target: "gc", "cleaning {gc_opts:?}");
+ let tx = self.conn.transaction()?;
+ let mut delete_paths = Vec::new();
+ // This can be an expensive operation, so only perform it if necessary.
+ if gc_opts.is_download_cache_opt_set() {
+ // TODO: Investigate how slow this might be.
+ Self::sync_db_with_files(
+ &tx,
+ now,
+ config,
+ &base,
+ gc_opts.is_download_cache_size_set(),
+ &mut delete_paths,
+ )
+ .with_context(|| "failed to sync tracking database")?
+ }
+ if let Some(max_age) = gc_opts.max_index_age {
+ let max_age = now - max_age.as_secs();
+ Self::get_registry_index_to_clean(&tx, max_age, &base, &mut delete_paths)?;
+ }
+ if let Some(max_age) = gc_opts.max_src_age {
+ let max_age = now - max_age.as_secs();
+ Self::get_registry_items_to_clean_age(
+ &tx,
+ max_age,
+ REGISTRY_SRC_TABLE,
+ &base.src,
+ &mut delete_paths,
+ )?;
+ }
+ if let Some(max_age) = gc_opts.max_crate_age {
+ let max_age = now - max_age.as_secs();
+ Self::get_registry_items_to_clean_age(
+ &tx,
+ max_age,
+ REGISTRY_CRATE_TABLE,
+ &base.crate_dir,
+ &mut delete_paths,
+ )?;
+ }
+ if let Some(max_age) = gc_opts.max_git_db_age {
+ let max_age = now - max_age.as_secs();
+ Self::get_git_db_items_to_clean(&tx, max_age, &base, &mut delete_paths)?;
+ }
+ if let Some(max_age) = gc_opts.max_git_co_age {
+ let max_age = now - max_age.as_secs();
+ Self::get_git_co_items_to_clean(&tx, max_age, &base.git_co, &mut delete_paths)?;
+ }
+ // Size collection must happen after date collection so that dates
+ // have precedence, since size constraints are a more blunt
+ // instrument.
+ //
+ // These are also complicated by the `--max-download-size` option
+ // overlapping with `--max-crate-size` and `--max-src-size`, which
+ // requires some coordination between those options which isn't
+ // necessary with the age-based options. An item's age is either older
+ // or it isn't, but contrast that with size which is based on the sum
+ // of all tracked items. Also, `--max-download-size` is summed against
+ // both the crate and src tracking, which requires combining them to
+ // compute the size, and then separating them to calculate the correct
+ // paths.
+ if let Some(max_size) = gc_opts.max_crate_size {
+ Self::get_registry_items_to_clean_size(
+ &tx,
+ max_size,
+ REGISTRY_CRATE_TABLE,
+ &base.crate_dir,
+ &mut delete_paths,
+ )?;
+ }
+ if let Some(max_size) = gc_opts.max_src_size {
+ Self::get_registry_items_to_clean_size(
+ &tx,
+ max_size,
+ REGISTRY_SRC_TABLE,
+ &base.src,
+ &mut delete_paths,
+ )?;
+ }
+ if let Some(max_size) = gc_opts.max_git_size {
+ Self::get_git_items_to_clean_size(&tx, max_size, &base, &mut delete_paths)?;
+ }
+ if let Some(max_size) = gc_opts.max_download_size {
+ Self::get_registry_items_to_clean_size_both(&tx, max_size, &base, &mut delete_paths)?;
+ }
+
+ clean_ctx.remove_paths(&delete_paths)?;
+
+ if clean_ctx.dry_run {
+ tx.rollback()?;
+ } else {
+ tx.commit()?;
+ }
+ Ok(())
+ }
+
+ /// Returns a list of directory entries in the given path.
+ fn names_from(path: &Path) -> CargoResult<Vec<String>> {
+ let entries = match path.read_dir() {
+ Ok(e) => e,
+ Err(e) => {
+ if e.kind() == std::io::ErrorKind::NotFound {
+ return Ok(Vec::new());
+ } else {
+ return Err(
+ anyhow::Error::new(e).context(format!("failed to read path `{path:?}`"))
+ );
+ }
+ }
+ };
+ let names = entries
+ .filter_map(|entry| entry.ok()?.file_name().into_string().ok())
+ .collect();
+ Ok(names)
+ }
+
+ /// Synchronizes the database to match the files on disk.
+ ///
+ /// This performs the following cleanups:
+ ///
+ /// 1. Remove entries from the database that are missing on disk.
+ /// 2. Adds missing entries to the database that are on disk (such as when
+ /// files are added by older versions of cargo).
+ /// 3. Fills in the `size` column where it is NULL (such as when something
+ /// is added to disk by an older version of cargo, and one of the mark
+ /// functions marked it without knowing the size).
+ ///
+ /// Size computations are only done if `sync_size` is set since it can
+ /// be a very expensive operation. This should only be set if the user
+ /// requested to clean based on the cache size.
+ /// 4. Checks for orphaned files. For example, if there are `.crate` files
+ /// associated with an index that does not exist.
+ ///
+ /// These orphaned files will be added to `delete_paths` so that the
+ /// caller can delete them.
+ fn sync_db_with_files(
+ conn: &Connection,
+ now: Timestamp,
+ config: &Config,
+ base: &BasePaths,
+ sync_size: bool,
+ delete_paths: &mut Vec<PathBuf>,
+ ) -> CargoResult<()> {
+ let _p = crate::util::profile::start("global cache db sync");
+ debug!(target: "gc", "starting db sync");
+ // For registry_index and git_db, add anything that is missing in the db.
+ Self::update_parent_for_missing_from_db(conn, now, REGISTRY_INDEX_TABLE, &base.index)?;
+ Self::update_parent_for_missing_from_db(conn, now, GIT_DB_TABLE, &base.git_db)?;
+
+ // For registry_crate, registry_src, and git_checkout, remove anything
+ // from the db that isn't on disk.
+ Self::update_db_for_removed(
+ conn,
+ REGISTRY_INDEX_TABLE,
+ "registry_id",
+ REGISTRY_CRATE_TABLE,
+ &base.crate_dir,
+ )?;
+ Self::update_db_for_removed(
+ conn,
+ REGISTRY_INDEX_TABLE,
+ "registry_id",
+ REGISTRY_SRC_TABLE,
+ &base.src,
+ )?;
+ Self::update_db_for_removed(conn, GIT_DB_TABLE, "git_id", GIT_CO_TABLE, &base.git_co)?;
+
+ // For registry_index and git_db, remove anything from the db that
+ // isn't on disk.
+ //
+ // This also collects paths for any child files that don't have their
+ // respective parent on disk.
+ Self::update_db_parent_for_removed_from_disk(
+ conn,
+ REGISTRY_INDEX_TABLE,
+ &base.index,
+ &[&base.crate_dir, &base.src],
+ delete_paths,
+ )?;
+ Self::update_db_parent_for_removed_from_disk(
+ conn,
+ GIT_DB_TABLE,
+ &base.git_db,
+ &[&base.git_co],
+ delete_paths,
+ )?;
+
+ // For registry_crate, registry_src, and git_checkout, add anything
+ // that is missing in the db.
+ Self::populate_untracked_crate(conn, now, &base.crate_dir)?;
+ Self::populate_untracked(
+ conn,
+ now,
+ config,
+ REGISTRY_INDEX_TABLE,
+ "registry_id",
+ REGISTRY_SRC_TABLE,
+ &base.src,
+ sync_size,
+ )?;
+ Self::populate_untracked(
+ conn,
+ now,
+ config,
+ GIT_DB_TABLE,
+ "git_id",
+ GIT_CO_TABLE,
+ &base.git_co,
+ sync_size,
+ )?;
+
+ // Update any NULL sizes if needed.
+ if sync_size {
+ Self::update_null_sizes(
+ conn,
+ config,
+ REGISTRY_INDEX_TABLE,
+ "registry_id",
+ REGISTRY_SRC_TABLE,
+ &base.src,
+ )?;
+ Self::update_null_sizes(
+ conn,
+ config,
+ GIT_DB_TABLE,
+ "git_id",
+ GIT_CO_TABLE,
+ &base.git_co,
+ )?;
+ }
+ Ok(())
+ }
+
+ /// For parent tables, add any entries that are on disk but aren't tracked in the db.
+ fn update_parent_for_missing_from_db(
+ conn: &Connection,
+ now: Timestamp,
+ parent_table_name: &str,
+ base_path: &Path,
+ ) -> CargoResult<()> {
+ let _p = crate::util::profile::start(format!(
+ "update parent db for missing from db {parent_table_name}"
+ ));
+ trace!(target: "gc", "checking for untracked parent to add to {parent_table_name}");
+ let names = Self::names_from(base_path)?;
+
+ let mut stmt = conn.prepare_cached(&format!(
+ "INSERT INTO {parent_table_name} (name, timestamp)
+ VALUES (?1, ?2)
+ ON CONFLICT DO NOTHING",
+ ))?;
+ for name in names {
+ stmt.execute(params![name, now])?;
+ }
+ Ok(())
+ }
+
+ /// Removes database entries for any files that are not on disk for the child tables.
+ ///
+ /// This could happen for example if the user manually deleted the file or
+ /// any such scenario where the filesystem and db are out of sync.
+ fn update_db_for_removed(
+ conn: &Connection,
+ parent_table_name: &str,
+ id_column_name: &str,
+ table_name: &str,
+ base_path: &Path,
+ ) -> CargoResult<()> {
+ let _p = crate::util::profile::start(format!("update db for removed {table_name}"));
+ trace!(target: "gc", "checking for db entries to remove from {table_name}");
+ let mut select_stmt = conn.prepare_cached(&format!(
+ "SELECT {table_name}.rowid, {parent_table_name}.name, {table_name}.name
+ FROM {parent_table_name}, {table_name}
+ WHERE {table_name}.{id_column_name} = {parent_table_name}.id",
+ ))?;
+ let mut delete_stmt =
+ conn.prepare_cached(&format!("DELETE FROM {table_name} WHERE rowid = ?1"))?;
+ let mut rows = select_stmt.query([])?;
+ while let Some(row) = rows.next()? {
+ let rowid: i64 = row.get_unwrap(0);
+ let id_name: String = row.get_unwrap(1);
+ let name: String = row.get_unwrap(2);
+ if !base_path.join(id_name).join(name).exists() {
+ delete_stmt.execute([rowid])?;
+ }
+ }
+ Ok(())
+ }
+
+ /// Removes database entries for any files that are not on disk for the parent tables.
+ fn update_db_parent_for_removed_from_disk(
+ conn: &Connection,
+ parent_table_name: &str,
+ base_path: &Path,
+ child_base_paths: &[&Path],
+ delete_paths: &mut Vec<PathBuf>,
+ ) -> CargoResult<()> {
+ let _p = crate::util::profile::start(format!(
+ "update db parent for removed from disk {parent_table_name}"
+ ));
+ trace!(target: "gc", "checking for db entries to remove from {parent_table_name}");
+ let mut select_stmt =
+ conn.prepare_cached(&format!("SELECT rowid, name FROM {parent_table_name}"))?;
+ let mut delete_stmt =
+ conn.prepare_cached(&format!("DELETE FROM {parent_table_name} WHERE rowid = ?1"))?;
+ let mut rows = select_stmt.query([])?;
+ while let Some(row) = rows.next()? {
+ let rowid: i64 = row.get_unwrap(0);
+ let id_name: String = row.get_unwrap(1);
+ if !base_path.join(&id_name).exists() {
+ delete_stmt.execute([rowid])?;
+ // Make sure any child data is also cleaned up.
+ for child_base in child_base_paths {
+ let child_path = child_base.join(&id_name);
+ if child_path.exists() {
+ debug!(target: "gc", "removing orphaned path {child_path:?}");
+ delete_paths.push(child_path);
+ }
+ }
+ }
+ }
+ Ok(())
+ }
+
+ /// Updates the database to add any `.crate` files that are currently
+ /// not tracked (such as when they are downloaded by an older version of
+ /// cargo).
+ fn populate_untracked_crate(
+ conn: &Connection,
+ now: Timestamp,
+ base_path: &Path,
+ ) -> CargoResult<()> {
+ let _p = crate::util::profile::start("populate untracked crate");
+ trace!(target: "gc", "populating untracked crate files");
+ let mut insert_stmt = conn.prepare_cached(
+ "INSERT INTO registry_crate (registry_id, name, size, timestamp)
+ VALUES (?1, ?2, ?3, ?4)
+ ON CONFLICT DO NOTHING",
+ )?;
+ let index_names = Self::names_from(&base_path)?;
+ for index_name in index_names {
+ let Some(id) = Self::id_from_name(conn, REGISTRY_INDEX_TABLE, &index_name)? else {
+ // The id is missing from the database. This should be resolved
+ // via update_db_parent_for_removed_from_disk.
+ continue;
+ };
+ let index_path = base_path.join(index_name);
+ for crate_name in Self::names_from(&index_path)? {
+ if crate_name.ends_with(".crate") {
+ // Missing files should have already been taken care of by
+ // update_db_for_removed.
+ let size = paths::metadata(index_path.join(&crate_name))?.len();
+ insert_stmt.execute(params![id, crate_name, size, now])?;
+ }
+ }
+ }
+ Ok(())
+ }
+
+ /// Updates the database to add any files that are currently not tracked
+ /// (such as when they are downloaded by an older version of cargo).
+ fn populate_untracked(
+ conn: &Connection,
+ now: Timestamp,
+ config: &Config,
+ id_table_name: &str,
+ id_column_name: &str,
+ table_name: &str,
+ base_path: &Path,
+ populate_size: bool,
+ ) -> CargoResult<()> {
+ let _p = crate::util::profile::start(format!("populate untracked {table_name}"));
+ trace!(target: "gc", "populating untracked files for {table_name}");
+ // Gather names (and make sure they are in the database).
+ let id_names = Self::names_from(&base_path)?;
+
+ // This SELECT is used to determine if the directory is already
+ // tracked. We don't want to do the expensive size computation unless
+ // necessary.
+ let mut select_stmt = conn.prepare_cached(&format!(
+ "SELECT 1 FROM {table_name}
+ WHERE {id_column_name} = ?1 AND name = ?2",
+ ))?;
+ let mut insert_stmt = conn.prepare_cached(&format!(
+ "INSERT INTO {table_name} ({id_column_name}, name, size, timestamp)
+ VALUES (?1, ?2, ?3, ?4)
+ ON CONFLICT DO NOTHING",
+ ))?;
+ let mut progress = Progress::with_style("Scanning", ProgressStyle::Ratio, config);
+ // Compute the size of any directory not in the database.
+ for id_name in id_names {
+ let Some(id) = Self::id_from_name(conn, id_table_name, &id_name)? else {
+ // The id is missing from the database. This should be resolved
+ // via update_db_parent_for_removed_from_disk.
+ continue;
+ };
+ let index_path = base_path.join(id_name);
+ let names = Self::names_from(&index_path)?;
+ let max = names.len();
+ for (i, name) in names.iter().enumerate() {
+ if select_stmt.exists(params![id, name])? {
+ continue;
+ }
+ let dir_path = index_path.join(name);
+ if !dir_path.is_dir() {
+ continue;
+ }
+ progress.tick(i, max, "")?;
+ let size = if populate_size {
+ Some(du(&dir_path, table_name)?)
+ } else {
+ None
+ };
+ insert_stmt.execute(params![id, name, size, now])?;
+ }
+ }
+ Ok(())
+ }
+
+ /// Fills in the `size` column where it is NULL.
+ ///
+ /// This can happen when something is added to disk by an older version of
+ /// cargo, and one of the mark functions marked it without knowing the
+ /// size.
+ ///
+ /// `update_db_for_removed` should be called before this is called.
+ fn update_null_sizes(
+ conn: &Connection,
+ config: &Config,
+ parent_table_name: &str,
+ id_column_name: &str,
+ table_name: &str,
+ base_path: &Path,
+ ) -> CargoResult<()> {
+ let _p = crate::util::profile::start(format!("update NULL sizes {table_name}"));
+ trace!(target: "gc", "updating NULL size information in {table_name}");
+ let mut null_stmt = conn.prepare_cached(&format!(
+ "SELECT {table_name}.rowid, {table_name}.name, {parent_table_name}.name
+ FROM {table_name}, {parent_table_name}
+ WHERE {table_name}.size IS NULL AND {table_name}.{id_column_name} = {parent_table_name}.id",
+ ))?;
+ let mut update_stmt = conn.prepare_cached(&format!(
+ "UPDATE {table_name} SET size = ?1 WHERE rowid = ?2"
+ ))?;
+ let mut progress = Progress::with_style("Scanning", ProgressStyle::Ratio, config);
+ let rows: Vec<_> = null_stmt
+ .query_map([], |row| {
+ Ok((row.get_unwrap(0), row.get_unwrap(1), row.get_unwrap(2)))
+ })?
+ .collect();
+ let max = rows.len();
+ for (i, row) in rows.into_iter().enumerate() {
+ let (rowid, name, id_name): (i64, String, String) = row?;
+ let path = base_path.join(id_name).join(name);
+ progress.tick(i, max, "")?;
+ // Missing files should have already been taken care of by
+ // update_db_for_removed.
+ let size = du(&path, table_name)?;
+ update_stmt.execute(params![size, rowid])?;
+ }
+ Ok(())
+ }
+
+ /// Adds paths to delete from either registry_crate or registry_src whose
+ /// last use is older than the given timestamp.
+ fn get_registry_items_to_clean_age(
+ conn: &Connection,
+ max_age: Timestamp,
+ table_name: &str,
+ base_path: &Path,
+ delete_paths: &mut Vec<PathBuf>,
+ ) -> CargoResult<()> {
+ debug!(target: "gc", "cleaning {table_name} since {max_age:?}");
+ let mut stmt = conn.prepare_cached(&format!(
+ "DELETE FROM {table_name} WHERE timestamp < ?1
+ RETURNING registry_id, name"
+ ))?;
+ let rows = stmt
+ .query_map(params![max_age], |row| {
+ let registry_id = row.get_unwrap(0);
+ let name: String = row.get_unwrap(1);
+ Ok((registry_id, name))
+ })?
+ .collect::<Result<Vec<_>, _>>()?;
+ let ids: Vec<_> = rows.iter().map(|r| r.0).collect();
+ let id_map = Self::get_id_map(conn, REGISTRY_INDEX_TABLE, &ids)?;
+ for (id, name) in rows {
+ let encoded_registry_name = &id_map[&id];
+ delete_paths.push(base_path.join(encoded_registry_name).join(name));
+ }
+ Ok(())
+ }
+
+ /// Adds paths to delete from either `registry_crate` or `registry_src` in
+ /// order to keep the total size under the given max size.
+ fn get_registry_items_to_clean_size(
+ conn: &Connection,
+ max_size: u64,
+ table_name: &str,
+ base_path: &Path,
+ delete_paths: &mut Vec<PathBuf>,
+ ) -> CargoResult<()> {
+ debug!(target: "gc", "cleaning {table_name} till under {max_size:?}");
+ let total_size: u64 = conn.query_row(
+ &format!("SELECT coalesce(SUM(size), 0) FROM {table_name}"),
+ [],
+ |row| row.get(0),
+ )?;
+ if total_size <= max_size {
+ return Ok(());
+ }
+ // This SQL statement selects all of the rows ordered by timestamp,
+ // and then uses a window function to keep a running total of the
+ // size. It selects all rows until the running total exceeds the
+ // threshold of the total number of bytes that we want to delete.
+ //
+ // The window function essentially computes an aggregate over all
+ // previous rows as it goes along. As long as the running size is
+ // below the total amount that we need to delete, it keeps picking
+ // more rows.
+ //
+ // The ORDER BY includes `name` mainly for test purposes so that
+ // entries with the same timestamp have deterministic behavior.
+ //
+ // The coalesce helps convert NULL to 0.
+ let mut stmt = conn.prepare(&format!(
+ "DELETE FROM {table_name} WHERE rowid IN \
+ (SELECT x.rowid FROM \
+ (SELECT rowid, size, SUM(size) OVER \
+ (ORDER BY timestamp, name ROWS UNBOUNDED PRECEDING) AS running_amount \
+ FROM {table_name}) x \
+ WHERE coalesce(x.running_amount, 0) - x.size < ?1) \
+ RETURNING registry_id, name;"
+ ))?;
+ let rows = stmt
+ .query_map(params![total_size - max_size], |row| {
+ let id = row.get_unwrap(0);
+ let name: String = row.get_unwrap(1);
+ Ok((id, name))
+ })?
+ .collect::<Result<Vec<_>, _>>()?;
+ // Convert registry_id to the encoded registry name, and join those.
+ let ids: Vec<_> = rows.iter().map(|r| r.0).collect();
+ let id_map = Self::get_id_map(conn, REGISTRY_INDEX_TABLE, &ids)?;
+ for (id, name) in rows {
+ let encoded_name = &id_map[&id];
+ delete_paths.push(base_path.join(encoded_name).join(name));
+ }
+ Ok(())
+ }
+
+ /// Adds paths to delete from both `registry_crate` and `registry_src` in
+ /// order to keep the total size under the given max size.
+ fn get_registry_items_to_clean_size_both(
+ conn: &Connection,
+ max_size: u64,
+ base: &BasePaths,
+ delete_paths: &mut Vec<PathBuf>,
+ ) -> CargoResult<()> {
+ debug!(target: "gc", "cleaning download till under {max_size:?}");
+
+ // This SQL statement selects from both registry_src and
+ // registry_crate so that sorting of timestamps incorporates both of
+ // them at the same time. It uses a const value of 1 or 2 as the first
+ // column so that the code below can determine which table the value
+ // came from.
+ let mut stmt = conn.prepare_cached(
+ "SELECT 1, registry_src.rowid, registry_src.name AS name, registry_index.name,
+ registry_src.size, registry_src.timestamp AS timestamp
+ FROM registry_src, registry_index
+ WHERE registry_src.registry_id = registry_index.id AND registry_src.size NOT NULL
+
+ UNION
+
+ SELECT 2, registry_crate.rowid, registry_crate.name AS name, registry_index.name,
+ registry_crate.size, registry_crate.timestamp AS timestamp
+ FROM registry_crate, registry_index
+ WHERE registry_crate.registry_id = registry_index.id
+
+ ORDER BY timestamp, name",
+ )?;
+ let mut delete_src_stmt =
+ conn.prepare_cached("DELETE FROM registry_src WHERE rowid = ?1")?;
+ let mut delete_crate_stmt =
+ conn.prepare_cached("DELETE FROM registry_crate WHERE rowid = ?1")?;
+ let rows = stmt
+ .query_map([], |row| {
+ Ok((
+ row.get_unwrap(0),
+ row.get_unwrap(1),
+ row.get_unwrap(2),
+ row.get_unwrap(3),
+ row.get_unwrap(4),
+ ))
+ })?
+ .collect::<Result<Vec<(i64, i64, String, String, u64)>, _>>()?;
+ let mut total_size: u64 = rows.iter().map(|r| r.4).sum();
+ debug!(target: "gc", "total download cache size appears to be {total_size}");
+ for (table, rowid, name, index_name, size) in rows {
+ if total_size <= max_size {
+ break;
+ }
+ if table == 1 {
+ delete_paths.push(base.src.join(index_name).join(name));
+ delete_src_stmt.execute([rowid])?;
+ } else {
+ delete_paths.push(base.crate_dir.join(index_name).join(name));
+ delete_crate_stmt.execute([rowid])?;
+ }
+ // TODO: If delete crate, ensure src is also deleted.
+ total_size -= size;
+ }
+ Ok(())
+ }
+
+ /// Adds paths to delete from the git cache, keeping the total size under
+ /// the give value.
+ ///
+ /// Paths are relative to the `git` directory in the cache directory.
+ fn get_git_items_to_clean_size(
+ conn: &Connection,
+ max_size: u64,
+ base: &BasePaths,
+ delete_paths: &mut Vec<PathBuf>,
+ ) -> CargoResult<()> {
+ debug!(target: "gc", "cleaning git till under {max_size:?}");
+
+ // Collect all the sizes from git_db and git_checkouts, and then sort them by timestamp.
+ let mut stmt = conn.prepare_cached("SELECT rowid, name, timestamp FROM git_db")?;
+ let mut git_info = stmt
+ .query_map([], |row| {
+ let rowid: i64 = row.get_unwrap(0);
+ let name: String = row.get_unwrap(1);
+ let timestamp: Timestamp = row.get_unwrap(2);
+ // Size is added below so that the error doesn't need to be
+ // converted to a rusqlite error.
+ Ok((timestamp, rowid, None, name, 0))
+ })?
+ .collect::<Result<Vec<_>, _>>()?;
+ for info in &mut git_info {
+ let size = cargo_util::du(&base.git_db.join(&info.3), &[])?;
+ info.4 = size;
+ }
+
+ let mut stmt = conn.prepare_cached(
+ "SELECT git_checkout.rowid, git_db.name, git_checkout.name,
+ git_checkout.size, git_checkout.timestamp
+ FROM git_checkout, git_db
+ WHERE git_checkout.git_id = git_db.id AND git_checkout.size NOT NULL",
+ )?;
+ let git_co_rows = stmt
+ .query_map([], |row| {
+ let rowid = row.get_unwrap(0);
+ let db_name: String = row.get_unwrap(1);
+ let name = row.get_unwrap(2);
+ let size = row.get_unwrap(3);
+ let timestamp = row.get_unwrap(4);
+ Ok((timestamp, rowid, Some(db_name), name, size))
+ })?
+ .collect::<Result<Vec<_>, _>>()?;
+ git_info.extend(git_co_rows);
+
+ // Sort by timestamp, and name. The name is included mostly for test
+ // purposes so that entries with the same timestamp have deterministic
+ // behavior.
+ git_info.sort_by(|a, b| (b.0, &b.3).cmp(&(a.0, &a.3)));
+
+ // Collect paths to delete.
+ let mut delete_db_stmt = conn.prepare_cached("DELETE FROM git_db WHERE rowid = ?1")?;
+ let mut delete_co_stmt =
+ conn.prepare_cached("DELETE FROM git_checkout WHERE rowid = ?1")?;
+ let mut total_size: u64 = git_info.iter().map(|r| r.4).sum();
+ debug!(target: "gc", "total git cache size appears to be {total_size}");
+ while let Some((_timestamp, rowid, db_name, name, size)) = git_info.pop() {
+ if total_size <= max_size {
+ break;
+ }
+ if let Some(db_name) = db_name {
+ delete_paths.push(base.git_co.join(db_name).join(name));
+ delete_co_stmt.execute([rowid])?;
+ total_size -= size;
+ } else {
+ total_size -= size;
+ delete_paths.push(base.git_db.join(&name));
+ delete_db_stmt.execute([rowid])?;
+ // If the db is deleted, then all the checkouts must be deleted.
+ let mut i = 0;
+ while i < git_info.len() {
+ if git_info[i].2.as_deref() == Some(name.as_ref()) {
+ let (_, rowid, db_name, name, size) = git_info.remove(i);
+ delete_paths.push(base.git_co.join(db_name.unwrap()).join(name));
+ delete_co_stmt.execute([rowid])?;
+ total_size -= size;
+ } else {
+ i += 1;
+ }
+ }
+ }
+ }
+ Ok(())
+ }
+
+ /// Adds paths to delete from `registry_index` whose last use is older
+ /// than the given timestamp.
+ fn get_registry_index_to_clean(
+ conn: &Connection,
+ max_age: Timestamp,
+ base: &BasePaths,
+ delete_paths: &mut Vec<PathBuf>,
+ ) -> CargoResult<()> {
+ debug!(target: "gc", "cleaning index since {max_age:?}");
+ let mut stmt = conn.prepare_cached(
+ "DELETE FROM registry_index WHERE timestamp < ?1
+ RETURNING name",
+ )?;
+ let mut rows = stmt.query([max_age])?;
+ while let Some(row) = rows.next()? {
+ let name: String = row.get_unwrap(0);
+ delete_paths.push(base.index.join(&name));
+ // Also delete .crate and src directories, since by definition
+ // they cannot be used without their index.
+ delete_paths.push(base.src.join(&name));
+ delete_paths.push(base.crate_dir.join(&name));
+ }
+ Ok(())
+ }
+
+ /// Adds paths to delete from `git_checkout` whose last use is
+ /// older than the given timestamp.
+ fn get_git_co_items_to_clean(
+ conn: &Connection,
+ max_age: Timestamp,
+ base_path: &Path,
+ delete_paths: &mut Vec<PathBuf>,
+ ) -> CargoResult<()> {
+ debug!(target: "gc", "cleaning git co since {max_age:?}");
+ let mut stmt = conn.prepare_cached(
+ "DELETE FROM git_checkout WHERE timestamp < ?1
+ RETURNING git_id, name",
+ )?;
+ let rows = stmt
+ .query_map(params![max_age], |row| {
+ let git_id = row.get_unwrap(0);
+ let name: String = row.get_unwrap(1);
+ Ok((git_id, name))
+ })?
+ .collect::<Result<Vec<_>, _>>()?;
+ let ids: Vec<_> = rows.iter().map(|r| r.0).collect();
+ let id_map = Self::get_id_map(conn, GIT_DB_TABLE, &ids)?;
+ for (id, name) in rows {
+ let encoded_git_name = &id_map[&id];
+ delete_paths.push(base_path.join(encoded_git_name).join(name));
+ }
+ Ok(())
+ }
+
+ /// Adds paths to delete from `git_db` in order to keep the total size
+ /// under the given max size.
+ fn get_git_db_items_to_clean(
+ conn: &Connection,
+ max_age: Timestamp,
+ base: &BasePaths,
+ delete_paths: &mut Vec<PathBuf>,
+ ) -> CargoResult<()> {
+ debug!(target: "gc", "cleaning git db since {max_age:?}");
+ let mut stmt = conn.prepare_cached(
+ "DELETE FROM git_db WHERE timestamp < ?1
+ RETURNING name",
+ )?;
+ let mut rows = stmt.query([max_age])?;
+ while let Some(row) = rows.next()? {
+ let name: String = row.get_unwrap(0);
+ delete_paths.push(base.git_db.join(&name));
+ // Also delete checkout directories, since by definition they
+ // cannot be used without their db.
+ delete_paths.push(base.git_co.join(&name));
+ }
+ Ok(())
+ }
+}
+
+/// Helper to generate the upsert for the parent tables.
+///
+/// This handles checking if the row already exists, and only updates the
+/// timestamp it if it hasn't been updated recently. This also handles keeping
+/// a cached map of the `id` value.
+///
+/// Unfortunately it is a bit tricky to share this code without a macro.
+macro_rules! insert_or_update_parent {
+ ($self:expr, $conn:expr, $table_name:expr, $timestamps_field:ident, $keys_field:ident, $encoded_name:ident) => {
+ let mut select_stmt = $conn.prepare_cached(concat!(
+ "SELECT id, timestamp FROM ",
+ $table_name,
+ " WHERE name = ?1"
+ ))?;
+ let mut insert_stmt = $conn.prepare_cached(concat!(
+ "INSERT INTO ",
+ $table_name,
+ " (name, timestamp)
+ VALUES (?1, ?2)
+ ON CONFLICT DO UPDATE SET timestamp=excluded.timestamp
+ RETURNING id",
+ ))?;
+ let mut update_stmt = $conn.prepare_cached(concat!(
+ "UPDATE ",
+ $table_name,
+ " SET timestamp = ?1 WHERE id = ?2"
+ ))?;
+ for (parent, new_timestamp) in std::mem::take(&mut $self.$timestamps_field) {
+ trace!(target: "gc",
+ concat!("insert ", $table_name, " {:?} {}"),
+ parent,
+ new_timestamp
+ );
+ let mut rows = select_stmt.query([parent.$encoded_name])?;
+ let id = if let Some(row) = rows.next()? {
+ let id: ParentId = row.get_unwrap(0);
+ let timestamp: Timestamp = row.get_unwrap(1);
+ if timestamp < new_timestamp - UPDATE_RESOLUTION {
+ update_stmt.execute(params![new_timestamp, id])?;
+ }
+ id
+ } else {
+ insert_stmt.query_row(params![parent.$encoded_name, new_timestamp], |row| {
+ row.get(0)
+ })?
+ };
+ match $self.$keys_field.entry(parent.$encoded_name) {
+ hash_map::Entry::Occupied(o) => {
+ assert_eq!(*o.get(), id);
+ }
+ hash_map::Entry::Vacant(v) => {
+ v.insert(id);
+ }
+ }
+ }
+ return Ok(());
+ };
+}
+
+/// This is a cache of modifications that will be saved to disk all at once
+/// via the [`DeferredGlobalLastUse::save`] method.
+///
+/// This is here to improve performance.
+#[derive(Debug)]
+pub struct DeferredGlobalLastUse {
+ /// Cache of registry keys, used for faster fetching.
+ ///
+ /// The key is the registry name (which is its directory name) and the
+ /// value is the `id` in the `registry_index` table.
+ registry_keys: HashMap<InternedString, ParentId>,
+ /// Cache of git keys, used for faster fetching.
+ ///
+ /// The key is the git db name (which is its directory name) and the value
+ /// is the `id` in the `git_db` table.
+ git_keys: HashMap<InternedString, ParentId>,
+
+ /// New registry index entries to insert.
+ registry_index_timestamps: HashMap<RegistryIndex, Timestamp>,
+ /// New registry `.crate` entries to insert.
+ registry_crate_timestamps: HashMap<RegistryCrate, Timestamp>,
+ /// New registry src directory entries to insert.
+ registry_src_timestamps: HashMap<RegistrySrc, Timestamp>,
+ /// New git db entries to insert.
+ git_db_timestamps: HashMap<GitDb, Timestamp>,
+ /// New git checkout entries to insert.
+ git_checkout_timestamps: HashMap<GitCheckout, Timestamp>,
+ /// This is used so that a warning about failing to update the database is
+ /// only displayed once.
+ save_err_has_warned: bool,
+ /// The current time, used to improve performance to avoid accessing the
+ /// clock hundreds of times.
+ now: Timestamp,
+}
+
+impl DeferredGlobalLastUse {
+ pub fn new() -> DeferredGlobalLastUse {
+ DeferredGlobalLastUse {
+ registry_keys: HashMap::new(),
+ git_keys: HashMap::new(),
+ registry_index_timestamps: HashMap::new(),
+ registry_crate_timestamps: HashMap::new(),
+ registry_src_timestamps: HashMap::new(),
+ git_db_timestamps: HashMap::new(),
+ git_checkout_timestamps: HashMap::new(),
+ save_err_has_warned: false,
+ now: now(),
+ }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.registry_index_timestamps.is_empty()
+ && self.registry_crate_timestamps.is_empty()
+ && self.registry_src_timestamps.is_empty()
+ && self.git_db_timestamps.is_empty()
+ && self.git_checkout_timestamps.is_empty()
+ }
+
+ fn clear(&mut self) {
+ self.registry_index_timestamps.clear();
+ self.registry_crate_timestamps.clear();
+ self.registry_src_timestamps.clear();
+ self.git_db_timestamps.clear();
+ self.git_checkout_timestamps.clear();
+ }
+
+ /// Indicates the given [`RegistryIndex`] has been used right now.
+ pub fn mark_registry_index_used(&mut self, registry_index: RegistryIndex) {
+ self.mark_registry_index_used_stamp(registry_index, None);
+ }
+
+ /// Indicates the given [`RegistryCrate`] has been used right now.
+ ///
+ /// Also implicitly marks the index used, too.
+ pub fn mark_registry_crate_used(&mut self, registry_crate: RegistryCrate) {
+ self.mark_registry_crate_used_stamp(registry_crate, None);
+ }
+
+ /// Indicates the given [`RegistrySrc`] has been used right now.
+ ///
+ /// Also implicitly marks the index used, too.
+ pub fn mark_registry_src_used(&mut self, registry_src: RegistrySrc) {
+ self.mark_registry_src_used_stamp(registry_src, None);
+ }
+
+ /// Indicates the given [`GitCheckout`] has been used right now.
+ ///
+ /// Also implicitly marks the git db used, too.
+ pub fn mark_git_checkout_used(&mut self, git_checkout: GitCheckout) {
+ self.mark_git_checkout_used_stamp(git_checkout, None);
+ }
+
+ /// Indicates the given [`RegistryIndex`] has been used with the given
+ /// time (or "now" if `None`).
+ pub fn mark_registry_index_used_stamp(
+ &mut self,
+ registry_index: RegistryIndex,
+ timestamp: Option<&SystemTime>,
+ ) {
+ let timestamp = timestamp.map_or(self.now, to_timestamp);
+ self.registry_index_timestamps
+ .insert(registry_index, timestamp);
+ }
+
+ /// Indicates the given [`RegistryCrate`] has been used with the given
+ /// time (or "now" if `None`).
+ ///
+ /// Also implicitly marks the index used, too.
+ pub fn mark_registry_crate_used_stamp(
+ &mut self,
+ registry_crate: RegistryCrate,
+ timestamp: Option<&SystemTime>,
+ ) {
+ let timestamp = timestamp.map_or(self.now, to_timestamp);
+ let index = RegistryIndex {
+ encoded_registry_name: registry_crate.encoded_registry_name,
+ };
+ self.registry_index_timestamps.insert(index, timestamp);
+ self.registry_crate_timestamps
+ .insert(registry_crate, timestamp);
+ }
+
+ /// Indicates the given [`RegistrySrc`] has been used with the given
+ /// time (or "now" if `None`).
+ ///
+ /// Also implicitly marks the index used, too.
+ pub fn mark_registry_src_used_stamp(
+ &mut self,
+ registry_src: RegistrySrc,
+ timestamp: Option<&SystemTime>,
+ ) {
+ let timestamp = timestamp.map_or(self.now, to_timestamp);
+ let index = RegistryIndex {
+ encoded_registry_name: registry_src.encoded_registry_name,
+ };
+ self.registry_index_timestamps.insert(index, timestamp);
+ self.registry_src_timestamps.insert(registry_src, timestamp);
+ }
+
+ /// Indicates the given [`GitCheckout`] has been used with the given
+ /// time (or "now" if `None`).
+ ///
+ /// Also implicitly marks the git db used, too.
+ pub fn mark_git_checkout_used_stamp(
+ &mut self,
+ git_checkout: GitCheckout,
+ timestamp: Option<&SystemTime>,
+ ) {
+ let timestamp = timestamp.map_or(self.now, to_timestamp);
+ let db = GitDb {
+ encoded_git_name: git_checkout.encoded_git_name,
+ };
+ self.git_db_timestamps.insert(db, timestamp);
+ self.git_checkout_timestamps.insert(git_checkout, timestamp);
+ }
+
+ /// Saves all of the deferred information to the database.
+ ///
+ /// This will also clear the state of `self`.
+ pub fn save(&mut self, tracker: &mut GlobalCacheTracker) -> CargoResult<()> {
+ let _p = crate::util::profile::start("saving last-use data");
+ trace!(target: "gc", "saving last-use data");
+ if self.is_empty() {
+ return Ok(());
+ }
+ let tx = tracker.conn.transaction()?;
+ // These must run before the ones that refer to their IDs.
+ self.insert_registry_index_from_cache(&tx)?;
+ self.insert_git_db_from_cache(&tx)?;
+ self.insert_registry_crate_from_cache(&tx)?;
+ self.insert_registry_src_from_cache(&tx)?;
+ self.insert_git_checkout_from_cache(&tx)?;
+ tx.commit()?;
+ trace!(target: "gc", "last-use save complete");
+ Ok(())
+ }
+
+ /// Variant of [`DeferredGlobalLastUse::save`] that does not return an
+ /// error.
+ ///
+ /// This will log or display a warning to the user.
+ pub fn save_no_error(&mut self, config: &Config) {
+ if let Err(e) = self.save_with_config(config) {
+ // Because there is an assertion in auto-gc that checks if this is
+ // empty, be sure to clear it so that assertion doesn't fail.
+ self.clear();
+ if !self.save_err_has_warned {
+ if is_silent_error(&e) && config.shell().verbosity() != Verbosity::Verbose {
+ tracing::warn!("failed to save last-use data: {e:?}");
+ } else {
+ crate::display_warning_with_error(
+ "failed to save last-use data\n\
+ This may prevent cargo from accurately tracking what is being \
+ used in its global cache. This information is used for \
+ automatically removing unused data in the cache.",
+ &e,
+ &mut config.shell(),
+ );
+ self.save_err_has_warned = true;
+ }
+ }
+ }
+ }
+
+ fn save_with_config(&mut self, config: &Config) -> CargoResult<()> {
+ let mut tracker = config.global_cache_tracker()?;
+ self.save(&mut tracker)
+ }
+
+ /// Flushes all of the `registry_index_timestamps` to the database,
+ /// clearing `registry_index_timestamps`.
+ fn insert_registry_index_from_cache(&mut self, conn: &Connection) -> CargoResult<()> {
+ insert_or_update_parent!(
+ self,
+ conn,
+ "registry_index",
+ registry_index_timestamps,
+ registry_keys,
+ encoded_registry_name
+ );
+ }
+
+ /// Flushes all of the `git_db_timestamps` to the database,
+ /// clearing `registry_index_timestamps`.
+ fn insert_git_db_from_cache(&mut self, conn: &Connection) -> CargoResult<()> {
+ insert_or_update_parent!(
+ self,
+ conn,
+ "git_db",
+ git_db_timestamps,
+ git_keys,
+ encoded_git_name
+ );
+ }
+
+ /// Flushes all of the `registry_crate_timestamps` to the database,
+ /// clearing `registry_index_timestamps`.
+ fn insert_registry_crate_from_cache(&mut self, conn: &Connection) -> CargoResult<()> {
+ let registry_crate_timestamps = std::mem::take(&mut self.registry_crate_timestamps);
+ for (registry_crate, timestamp) in registry_crate_timestamps {
+ trace!(target: "gc", "insert registry crate {registry_crate:?} {timestamp}");
+ let registry_id = self.registry_id(conn, registry_crate.encoded_registry_name)?;
+ let mut stmt = conn.prepare_cached(
+ "INSERT INTO registry_crate (registry_id, name, size, timestamp)
+ VALUES (?1, ?2, ?3, ?4)
+ ON CONFLICT DO UPDATE SET timestamp=excluded.timestamp
+ WHERE timestamp < ?5
+ ",
+ )?;
+ stmt.execute(params![
+ registry_id,
+ registry_crate.crate_filename,
+ registry_crate.size,
+ timestamp,
+ timestamp - UPDATE_RESOLUTION
+ ])?;
+ }
+ Ok(())
+ }
+
+ /// Flushes all of the `registry_src_timestamps` to the database,
+ /// clearing `registry_index_timestamps`.
+ fn insert_registry_src_from_cache(&mut self, conn: &Connection) -> CargoResult<()> {
+ let registry_src_timestamps = std::mem::take(&mut self.registry_src_timestamps);
+ for (registry_src, timestamp) in registry_src_timestamps {
+ trace!(target: "gc", "insert registry src {registry_src:?} {timestamp}");
+ let registry_id = self.registry_id(conn, registry_src.encoded_registry_name)?;
+ let mut stmt = conn.prepare_cached(
+ "INSERT INTO registry_src (registry_id, name, size, timestamp)
+ VALUES (?1, ?2, ?3, ?4)
+ ON CONFLICT DO UPDATE SET timestamp=excluded.timestamp
+ WHERE timestamp < ?5
+ ",
+ )?;
+ stmt.execute(params![
+ registry_id,
+ registry_src.package_dir,
+ registry_src.size,
+ timestamp,
+ timestamp - UPDATE_RESOLUTION
+ ])?;
+ }
+
+ Ok(())
+ }
+
+ /// Flushes all of the `git_checkout_timestamps` to the database,
+ /// clearing `registry_index_timestamps`.
+ fn insert_git_checkout_from_cache(&mut self, conn: &Connection) -> CargoResult<()> {
+ let git_checkout_timestamps = std::mem::take(&mut self.git_checkout_timestamps);
+ for (git_checkout, timestamp) in git_checkout_timestamps {
+ let git_id = self.git_id(conn, git_checkout.encoded_git_name)?;
+ let mut stmt = conn.prepare_cached(
+ "INSERT INTO git_checkout (git_id, name, size, timestamp)
+ VALUES (?1, ?2, ?3, ?4)
+ ON CONFLICT DO UPDATE SET timestamp=excluded.timestamp
+ WHERE timestamp < ?5",
+ )?;
+ stmt.execute(params![
+ git_id,
+ git_checkout.short_name,
+ git_checkout.size,
+ timestamp,
+ timestamp - UPDATE_RESOLUTION
+ ])?;
+ }
+
+ Ok(())
+ }
+
+ /// Returns the numeric ID of the registry, either fetching from the local
+ /// cache, or getting it from the database.
+ ///
+ /// It is an error if the registry does not exist.
+ fn registry_id(
+ &mut self,
+ conn: &Connection,
+ encoded_registry_name: InternedString,
+ ) -> CargoResult<ParentId> {
+ match self.registry_keys.get(&encoded_registry_name) {
+ Some(i) => Ok(*i),
+ None => {
+ let Some(id) = GlobalCacheTracker::id_from_name(
+ conn,
+ REGISTRY_INDEX_TABLE,
+ &encoded_registry_name,
+ )?
+ else {
+ bail!("expected registry_index {encoded_registry_name} to exist, but wasn't found");
+ };
+ self.registry_keys.insert(encoded_registry_name, id);
+ Ok(id)
+ }
+ }
+ }
+
+ /// Returns the numeric ID of the git db, either fetching from the local
+ /// cache, or getting it from the database.
+ ///
+ /// It is an error if the git db does not exist.
+ fn git_id(
+ &mut self,
+ conn: &Connection,
+ encoded_git_name: InternedString,
+ ) -> CargoResult<ParentId> {
+ match self.git_keys.get(&encoded_git_name) {
+ Some(i) => Ok(*i),
+ None => {
+ let Some(id) =
+ GlobalCacheTracker::id_from_name(conn, GIT_DB_TABLE, &encoded_git_name)?
+ else {
+ bail!("expected git_db {encoded_git_name} to exist, but wasn't found")
+ };
+ self.git_keys.insert(encoded_git_name, id);
+ Ok(id)
+ }
+ }
+ }
+}
+
+/// Converts a [`SystemTime`] to a [`Timestamp`] which can be stored in the database.
+fn to_timestamp(t: &SystemTime) -> Timestamp {
+ t.duration_since(SystemTime::UNIX_EPOCH)
+ .expect("invalid clock")
+ .as_secs()
+}
+
+/// Returns the current time.
+///
+/// This supports pretending that the time is different for testing using an
+/// environment variable.
+///
+/// If possible, try to avoid calling this too often since accessing clocks
+/// can be a little slow on some systems.
+#[allow(clippy::disallowed_methods)]
+fn now() -> Timestamp {
+ match std::env::var("__CARGO_TEST_LAST_USE_NOW") {
+ Ok(now) => now.parse().unwrap(),
+ Err(_) => to_timestamp(&SystemTime::now()),
+ }
+}
+
+/// Returns whether or not the given error should cause a warning to be
+/// displayed to the user.
+///
+/// In some situations, like a read-only global cache, we don't want to spam
+/// the user with a warning. I think once cargo has controllable lints, I
+/// think we should consider changing this to always warn, but give the user
+/// an option to silence the warning.
+pub fn is_silent_error(e: &anyhow::Error) -> bool {
+ if let Some(e) = e.downcast_ref::<rusqlite::Error>() {
+ if matches!(
+ e.sqlite_error_code(),
+ Some(ErrorCode::CannotOpen | ErrorCode::ReadOnly)
+ ) {
+ return true;
+ }
+ }
+ false
+}
+
+/// Returns the disk usage for a git checkout directory.
+pub fn du_git_checkout(path: &Path) -> CargoResult<u64> {
+ // !.git is used because clones typically use hardlinks for the git
+ // contents. TODO: Verify behavior on Windows.
+ // TODO: Or even better, switch to worktrees, and remove this.
+ cargo_util::du(&path, &["!.git"])
+}
+
+fn du(path: &Path, table_name: &str) -> CargoResult<u64> {
+ if table_name == GIT_CO_TABLE {
+ du_git_checkout(path)
+ } else {
+ cargo_util::du(&path, &[])
+ }
+}
diff --git a/src/tools/cargo/src/cargo/core/manifest.rs b/src/tools/cargo/src/cargo/core/manifest.rs
index 66af40c10..0468caa96 100644
--- a/src/tools/cargo/src/cargo/core/manifest.rs
+++ b/src/tools/cargo/src/cargo/core/manifest.rs
@@ -18,8 +18,9 @@ use crate::core::{Dependency, PackageId, PackageIdSpec, SourceId, Summary};
use crate::core::{Edition, Feature, Features, WorkspaceConfig};
use crate::util::errors::*;
use crate::util::interning::InternedString;
-use crate::util::toml::schema::{TomlManifest, TomlProfiles};
-use crate::util::{short_hash, Config, Filesystem, RustVersion};
+use crate::util::{short_hash, Config, Filesystem};
+use crate::util_schemas::manifest::RustVersion;
+use crate::util_schemas::manifest::{TomlManifest, TomlProfiles};
pub enum EitherManifest {
Real(Manifest),
diff --git a/src/tools/cargo/src/cargo/core/mod.rs b/src/tools/cargo/src/cargo/core/mod.rs
index 2add52d5c..f3b3142fa 100644
--- a/src/tools/cargo/src/cargo/core/mod.rs
+++ b/src/tools/cargo/src/cargo/core/mod.rs
@@ -4,21 +4,23 @@ pub use self::manifest::{EitherManifest, VirtualManifest};
pub use self::manifest::{Manifest, Target, TargetKind};
pub use self::package::{Package, PackageSet};
pub use self::package_id::PackageId;
-pub use self::package_id_spec::PackageIdSpec;
+pub use self::package_id_spec::PackageIdSpecQuery;
pub use self::registry::Registry;
pub use self::resolver::{Resolve, ResolveVersion};
pub use self::shell::{Shell, Verbosity};
-pub use self::source_id::{GitReference, SourceId};
+pub use self::source_id::SourceId;
pub use self::summary::{FeatureMap, FeatureValue, Summary};
pub use self::workspace::{
find_workspace_root, resolve_relative_path, MaybePackage, Workspace, WorkspaceConfig,
WorkspaceRootConfig,
};
-pub use crate::util::toml::schema::InheritableFields;
+pub use crate::util_schemas::core::{GitReference, PackageIdSpec, SourceKind};
pub mod compiler;
pub mod dependency;
pub mod features;
+pub mod gc;
+pub mod global_cache_tracker;
pub mod manifest;
pub mod package;
pub mod package_id;
diff --git a/src/tools/cargo/src/cargo/core/package.rs b/src/tools/cargo/src/cargo/core/package.rs
index 274798474..082544882 100644
--- a/src/tools/cargo/src/cargo/core/package.rs
+++ b/src/tools/cargo/src/cargo/core/package.rs
@@ -31,8 +31,9 @@ use crate::util::network::http::http_handle_and_timeout;
use crate::util::network::http::HttpTimeout;
use crate::util::network::retry::{Retry, RetryResult};
use crate::util::network::sleep::SleepTracker;
-use crate::util::RustVersion;
+use crate::util::toml::prepare_for_publish;
use crate::util::{self, internal, Config, Progress, ProgressStyle};
+use crate::util_schemas::manifest::RustVersion;
pub const MANIFEST_PREAMBLE: &str = "\
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
@@ -197,10 +198,7 @@ impl Package {
}
pub fn to_registry_toml(&self, ws: &Workspace<'_>) -> CargoResult<String> {
- let manifest = self
- .manifest()
- .original()
- .prepare_for_publish(ws, self.root())?;
+ let manifest = prepare_for_publish(self.manifest().original(), ws, self.root())?;
let toml = toml::to_string_pretty(&manifest)?;
Ok(format!("{}\n{}", MANIFEST_PREAMBLE, toml))
}
@@ -491,6 +489,10 @@ impl<'cfg> PackageSet<'cfg> {
pkgs.push(downloads.wait()?);
}
downloads.success = true;
+ drop(downloads);
+
+ let mut deferred = self.config.deferred_global_last_use()?;
+ deferred.save_no_error(self.config);
Ok(pkgs)
}
diff --git a/src/tools/cargo/src/cargo/core/package_id.rs b/src/tools/cargo/src/cargo/core/package_id.rs
index 3e9c03a47..37b367218 100644
--- a/src/tools/cargo/src/cargo/core/package_id.rs
+++ b/src/tools/cargo/src/cargo/core/package_id.rs
@@ -10,6 +10,7 @@ use std::sync::OnceLock;
use serde::de;
use serde::ser;
+use crate::core::PackageIdSpec;
use crate::core::SourceId;
use crate::util::interning::InternedString;
use crate::util::CargoResult;
@@ -88,7 +89,7 @@ impl<'de> de::Deserialize<'de> for PackageId {
strip_parens(rest).ok_or_else(|| de::Error::custom("invalid serialized PackageId"))?;
let source_id = SourceId::from_url(url).map_err(de::Error::custom)?;
- Ok(PackageId::pure(name, version, source_id))
+ Ok(PackageId::new(name, version, source_id))
}
}
@@ -123,16 +124,16 @@ impl Hash for PackageId {
}
impl PackageId {
- pub fn new(
+ pub fn try_new(
name: impl Into<InternedString>,
version: &str,
sid: SourceId,
) -> CargoResult<PackageId> {
let v = version.parse()?;
- Ok(PackageId::pure(name.into(), v, sid))
+ Ok(PackageId::new(name.into(), v, sid))
}
- pub fn pure(name: InternedString, version: semver::Version, source_id: SourceId) -> PackageId {
+ pub fn new(name: InternedString, version: semver::Version, source_id: SourceId) -> PackageId {
let inner = PackageIdInner {
name,
version,
@@ -161,7 +162,7 @@ impl PackageId {
}
pub fn with_source_id(self, source: SourceId) -> PackageId {
- PackageId::pure(self.inner.name, self.inner.version.clone(), source)
+ PackageId::new(self.inner.name, self.inner.version.clone(), source)
}
pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Self {
@@ -186,6 +187,15 @@ impl PackageId {
pub fn tarball_name(&self) -> String {
format!("{}-{}.crate", self.name(), self.version())
}
+
+ /// Convert a `PackageId` to a `PackageIdSpec`, which will have both the `PartialVersion` and `Url`
+ /// fields filled in.
+ pub fn to_spec(&self) -> PackageIdSpec {
+ PackageIdSpec::new(String::from(self.name().as_str()))
+ .with_version(self.version().clone().into())
+ .with_url(self.source_id().url().clone())
+ .with_kind(self.source_id().kind().clone())
+ }
}
pub struct PackageIdStableHash<'a>(PackageId, &'a Path);
@@ -242,16 +252,17 @@ mod tests {
let loc = CRATES_IO_INDEX.into_url().unwrap();
let repo = SourceId::for_registry(&loc).unwrap();
- assert!(PackageId::new("foo", "1.0", repo).is_err());
- assert!(PackageId::new("foo", "1", repo).is_err());
- assert!(PackageId::new("foo", "bar", repo).is_err());
- assert!(PackageId::new("foo", "", repo).is_err());
+ assert!(PackageId::try_new("foo", "1.0", repo).is_err());
+ assert!(PackageId::try_new("foo", "1", repo).is_err());
+ assert!(PackageId::try_new("foo", "bar", repo).is_err());
+ assert!(PackageId::try_new("foo", "", repo).is_err());
}
#[test]
fn display() {
let loc = CRATES_IO_INDEX.into_url().unwrap();
- let pkg_id = PackageId::new("foo", "1.0.0", SourceId::for_registry(&loc).unwrap()).unwrap();
+ let pkg_id =
+ PackageId::try_new("foo", "1.0.0", SourceId::for_registry(&loc).unwrap()).unwrap();
assert_eq!("foo v1.0.0", pkg_id.to_string());
}
@@ -259,8 +270,8 @@ mod tests {
fn unequal_build_metadata() {
let loc = CRATES_IO_INDEX.into_url().unwrap();
let repo = SourceId::for_registry(&loc).unwrap();
- let first = PackageId::new("foo", "0.0.1+first", repo).unwrap();
- let second = PackageId::new("foo", "0.0.1+second", repo).unwrap();
+ let first = PackageId::try_new("foo", "0.0.1+first", repo).unwrap();
+ let second = PackageId::try_new("foo", "0.0.1+second", repo).unwrap();
assert_ne!(first, second);
assert_ne!(first.inner, second.inner);
}
diff --git a/src/tools/cargo/src/cargo/core/package_id_spec.rs b/src/tools/cargo/src/cargo/core/package_id_spec.rs
index c617c1f7a..35c5437ae 100644
--- a/src/tools/cargo/src/cargo/core/package_id_spec.rs
+++ b/src/tools/cargo/src/cargo/core/package_id_spec.rs
@@ -1,88 +1,30 @@
use std::collections::HashMap;
-use std::fmt;
use anyhow::{bail, Context as _};
-use semver::Version;
-use serde::{de, ser};
-use url::Url;
use crate::core::PackageId;
+use crate::core::PackageIdSpec;
use crate::util::edit_distance;
use crate::util::errors::CargoResult;
-use crate::util::{validate_package_name, IntoUrl};
-use crate::util_semver::PartialVersion;
-/// Some or all of the data required to identify a package:
-///
-/// 1. the package name (a `String`, required)
-/// 2. the package version (a `Version`, optional)
-/// 3. the package source (a `Url`, optional)
-///
-/// If any of the optional fields are omitted, then the package ID may be ambiguous, there may be
-/// more than one package/version/url combo that will match. However, often just the name is
-/// sufficient to uniquely define a package ID.
-#[derive(Clone, PartialEq, Eq, Debug, Hash, Ord, PartialOrd)]
-pub struct PackageIdSpec {
- name: String,
- version: Option<PartialVersion>,
- url: Option<Url>,
-}
+pub trait PackageIdSpecQuery {
+ /// Roughly equivalent to `PackageIdSpec::parse(spec)?.query(i)`
+ fn query_str<I>(spec: &str, i: I) -> CargoResult<PackageId>
+ where
+ I: IntoIterator<Item = PackageId>;
-impl PackageIdSpec {
- /// Parses a spec string and returns a `PackageIdSpec` if the string was valid.
- ///
- /// # Examples
- /// Some examples of valid strings
- ///
- /// ```
- /// use cargo::core::PackageIdSpec;
- ///
- /// let specs = vec![
- /// "https://crates.io/foo",
- /// "https://crates.io/foo#1.2.3",
- /// "https://crates.io/foo#bar:1.2.3",
- /// "https://crates.io/foo#bar@1.2.3",
- /// "foo",
- /// "foo:1.2.3",
- /// "foo@1.2.3",
- /// ];
- /// for spec in specs {
- /// assert!(PackageIdSpec::parse(spec).is_ok());
- /// }
- pub fn parse(spec: &str) -> CargoResult<PackageIdSpec> {
- if spec.contains("://") {
- if let Ok(url) = spec.into_url() {
- return PackageIdSpec::from_url(url);
- }
- } else if spec.contains('/') || spec.contains('\\') {
- let abs = std::env::current_dir().unwrap_or_default().join(spec);
- if abs.exists() {
- let maybe_url = Url::from_file_path(abs)
- .map_or_else(|_| "a file:// URL".to_string(), |url| url.to_string());
- bail!(
- "package ID specification `{}` looks like a file path, \
- maybe try {}",
- spec,
- maybe_url
- );
- }
- }
- let mut parts = spec.splitn(2, [':', '@']);
- let name = parts.next().unwrap();
- let version = match parts.next() {
- Some(version) => Some(version.parse::<PartialVersion>()?),
- None => None,
- };
- validate_package_name(name, "pkgid", "")?;
- Ok(PackageIdSpec {
- name: String::from(name),
- version,
- url: None,
- })
- }
+ /// Checks whether the given `PackageId` matches the `PackageIdSpec`.
+ fn matches(&self, package_id: PackageId) -> bool;
- /// Roughly equivalent to `PackageIdSpec::parse(spec)?.query(i)`
- pub fn query_str<I>(spec: &str, i: I) -> CargoResult<PackageId>
+ /// Checks a list of `PackageId`s to find 1 that matches this `PackageIdSpec`. If 0, 2, or
+ /// more are found, then this returns an error.
+ fn query<I>(&self, i: I) -> CargoResult<PackageId>
+ where
+ I: IntoIterator<Item = PackageId>;
+}
+
+impl PackageIdSpecQuery for PackageIdSpec {
+ fn query_str<I>(spec: &str, i: I) -> CargoResult<PackageId>
where
I: IntoIterator<Item = PackageId>,
{
@@ -94,94 +36,25 @@ impl PackageIdSpec {
spec.query(i)
}
- /// Convert a `PackageId` to a `PackageIdSpec`, which will have both the `PartialVersion` and `Url`
- /// fields filled in.
- pub fn from_package_id(package_id: PackageId) -> PackageIdSpec {
- PackageIdSpec {
- name: String::from(package_id.name().as_str()),
- version: Some(package_id.version().clone().into()),
- url: Some(package_id.source_id().url().clone()),
- }
- }
-
- /// Tries to convert a valid `Url` to a `PackageIdSpec`.
- fn from_url(mut url: Url) -> CargoResult<PackageIdSpec> {
- if url.query().is_some() {
- bail!("cannot have a query string in a pkgid: {}", url)
- }
- let frag = url.fragment().map(|s| s.to_owned());
- url.set_fragment(None);
- let (name, version) = {
- let mut path = url
- .path_segments()
- .ok_or_else(|| anyhow::format_err!("pkgid urls must have a path: {}", url))?;
- let path_name = path.next_back().ok_or_else(|| {
- anyhow::format_err!(
- "pkgid urls must have at least one path \
- component: {}",
- url
- )
- })?;
- match frag {
- Some(fragment) => match fragment.split_once([':', '@']) {
- Some((name, part)) => {
- let version = part.parse::<PartialVersion>()?;
- (String::from(name), Some(version))
- }
- None => {
- if fragment.chars().next().unwrap().is_alphabetic() {
- (String::from(fragment.as_str()), None)
- } else {
- let version = fragment.parse::<PartialVersion>()?;
- (String::from(path_name), Some(version))
- }
- }
- },
- None => (String::from(path_name), None),
- }
- };
- Ok(PackageIdSpec {
- name,
- version,
- url: Some(url),
- })
- }
-
- pub fn name(&self) -> &str {
- self.name.as_str()
- }
-
- /// Full `semver::Version`, if present
- pub fn version(&self) -> Option<Version> {
- self.version.as_ref().and_then(|v| v.to_version())
- }
-
- pub fn partial_version(&self) -> Option<&PartialVersion> {
- self.version.as_ref()
- }
-
- pub fn url(&self) -> Option<&Url> {
- self.url.as_ref()
- }
-
- pub fn set_url(&mut self, url: Url) {
- self.url = Some(url);
- }
-
- /// Checks whether the given `PackageId` matches the `PackageIdSpec`.
- pub fn matches(&self, package_id: PackageId) -> bool {
+ fn matches(&self, package_id: PackageId) -> bool {
if self.name() != package_id.name().as_str() {
return false;
}
- if let Some(ref v) = self.version {
+ if let Some(ref v) = self.partial_version() {
if !v.matches(package_id.version()) {
return false;
}
}
- if let Some(u) = &self.url {
- if u != package_id.source_id().url() {
+ if let Some(u) = &self.url() {
+ if *u != package_id.source_id().url() {
+ return false;
+ }
+ }
+
+ if let Some(k) = &self.kind() {
+ if *k != package_id.source_id().kind() {
return false;
}
}
@@ -189,9 +62,7 @@ impl PackageIdSpec {
true
}
- /// Checks a list of `PackageId`s to find 1 that matches this `PackageIdSpec`. If 0, 2, or
- /// more are found, then this returns an error.
- pub fn query<I>(&self, i: I) -> CargoResult<PackageId>
+ fn query<I>(&self, i: I) -> CargoResult<PackageId>
where
I: IntoIterator<Item = PackageId>,
{
@@ -210,29 +81,21 @@ impl PackageIdSpec {
minimize(suggestion, &try_matches, self);
}
};
- if self.url.is_some() {
- try_spec(
- PackageIdSpec {
- name: self.name.clone(),
- version: self.version.clone(),
- url: None,
- },
- &mut suggestion,
- );
+ if self.url().is_some() {
+ let spec = PackageIdSpec::new(self.name().to_owned());
+ let spec = if let Some(version) = self.partial_version().cloned() {
+ spec.with_version(version)
+ } else {
+ spec
+ };
+ try_spec(spec, &mut suggestion);
}
- if suggestion.is_empty() && self.version.is_some() {
- try_spec(
- PackageIdSpec {
- name: self.name.clone(),
- version: None,
- url: None,
- },
- &mut suggestion,
- );
+ if suggestion.is_empty() && self.version().is_some() {
+ try_spec(PackageIdSpec::new(self.name().to_owned()), &mut suggestion);
}
if suggestion.is_empty() {
suggestion.push_str(&edit_distance::closest_msg(
- &self.name,
+ self.name(),
all_ids.iter(),
|id| id.name().as_str(),
));
@@ -273,274 +136,26 @@ impl PackageIdSpec {
if version_cnt[id.version()] == 1 {
msg.push_str(&format!("\n {}@{}", spec.name(), id.version()));
} else {
- msg.push_str(&format!("\n {}", PackageIdSpec::from_package_id(*id)));
+ msg.push_str(&format!("\n {}", id.to_spec()));
}
}
}
}
}
-impl fmt::Display for PackageIdSpec {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let mut printed_name = false;
- match self.url {
- Some(ref url) => {
- write!(f, "{}", url)?;
- if url.path_segments().unwrap().next_back().unwrap() != &*self.name {
- printed_name = true;
- write!(f, "#{}", self.name)?;
- }
- }
- None => {
- printed_name = true;
- write!(f, "{}", self.name)?;
- }
- }
- if let Some(ref v) = self.version {
- write!(f, "{}{}", if printed_name { "@" } else { "#" }, v)?;
- }
- Ok(())
- }
-}
-
-impl ser::Serialize for PackageIdSpec {
- fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
- where
- S: ser::Serializer,
- {
- self.to_string().serialize(s)
- }
-}
-
-impl<'de> de::Deserialize<'de> for PackageIdSpec {
- fn deserialize<D>(d: D) -> Result<PackageIdSpec, D::Error>
- where
- D: de::Deserializer<'de>,
- {
- let string = String::deserialize(d)?;
- PackageIdSpec::parse(&string).map_err(de::Error::custom)
- }
-}
-
#[cfg(test)]
mod tests {
use super::PackageIdSpec;
+ use super::PackageIdSpecQuery;
use crate::core::{PackageId, SourceId};
use url::Url;
#[test]
- fn good_parsing() {
- #[track_caller]
- fn ok(spec: &str, expected: PackageIdSpec, expected_rendered: &str) {
- let parsed = PackageIdSpec::parse(spec).unwrap();
- assert_eq!(parsed, expected);
- let rendered = parsed.to_string();
- assert_eq!(rendered, expected_rendered);
- let reparsed = PackageIdSpec::parse(&rendered).unwrap();
- assert_eq!(reparsed, expected);
- }
-
- ok(
- "https://crates.io/foo",
- PackageIdSpec {
- name: String::from("foo"),
- version: None,
- url: Some(Url::parse("https://crates.io/foo").unwrap()),
- },
- "https://crates.io/foo",
- );
- ok(
- "https://crates.io/foo#1.2.3",
- PackageIdSpec {
- name: String::from("foo"),
- version: Some("1.2.3".parse().unwrap()),
- url: Some(Url::parse("https://crates.io/foo").unwrap()),
- },
- "https://crates.io/foo#1.2.3",
- );
- ok(
- "https://crates.io/foo#1.2",
- PackageIdSpec {
- name: String::from("foo"),
- version: Some("1.2".parse().unwrap()),
- url: Some(Url::parse("https://crates.io/foo").unwrap()),
- },
- "https://crates.io/foo#1.2",
- );
- ok(
- "https://crates.io/foo#bar:1.2.3",
- PackageIdSpec {
- name: String::from("bar"),
- version: Some("1.2.3".parse().unwrap()),
- url: Some(Url::parse("https://crates.io/foo").unwrap()),
- },
- "https://crates.io/foo#bar@1.2.3",
- );
- ok(
- "https://crates.io/foo#bar@1.2.3",
- PackageIdSpec {
- name: String::from("bar"),
- version: Some("1.2.3".parse().unwrap()),
- url: Some(Url::parse("https://crates.io/foo").unwrap()),
- },
- "https://crates.io/foo#bar@1.2.3",
- );
- ok(
- "https://crates.io/foo#bar@1.2",
- PackageIdSpec {
- name: String::from("bar"),
- version: Some("1.2".parse().unwrap()),
- url: Some(Url::parse("https://crates.io/foo").unwrap()),
- },
- "https://crates.io/foo#bar@1.2",
- );
- ok(
- "foo",
- PackageIdSpec {
- name: String::from("foo"),
- version: None,
- url: None,
- },
- "foo",
- );
- ok(
- "foo:1.2.3",
- PackageIdSpec {
- name: String::from("foo"),
- version: Some("1.2.3".parse().unwrap()),
- url: None,
- },
- "foo@1.2.3",
- );
- ok(
- "foo@1.2.3",
- PackageIdSpec {
- name: String::from("foo"),
- version: Some("1.2.3".parse().unwrap()),
- url: None,
- },
- "foo@1.2.3",
- );
- ok(
- "foo@1.2",
- PackageIdSpec {
- name: String::from("foo"),
- version: Some("1.2".parse().unwrap()),
- url: None,
- },
- "foo@1.2",
- );
-
- // pkgid-spec.md
- ok(
- "regex",
- PackageIdSpec {
- name: String::from("regex"),
- version: None,
- url: None,
- },
- "regex",
- );
- ok(
- "regex@1.4",
- PackageIdSpec {
- name: String::from("regex"),
- version: Some("1.4".parse().unwrap()),
- url: None,
- },
- "regex@1.4",
- );
- ok(
- "regex@1.4.3",
- PackageIdSpec {
- name: String::from("regex"),
- version: Some("1.4.3".parse().unwrap()),
- url: None,
- },
- "regex@1.4.3",
- );
- ok(
- "https://github.com/rust-lang/crates.io-index#regex",
- PackageIdSpec {
- name: String::from("regex"),
- version: None,
- url: Some(Url::parse("https://github.com/rust-lang/crates.io-index").unwrap()),
- },
- "https://github.com/rust-lang/crates.io-index#regex",
- );
- ok(
- "https://github.com/rust-lang/crates.io-index#regex@1.4.3",
- PackageIdSpec {
- name: String::from("regex"),
- version: Some("1.4.3".parse().unwrap()),
- url: Some(Url::parse("https://github.com/rust-lang/crates.io-index").unwrap()),
- },
- "https://github.com/rust-lang/crates.io-index#regex@1.4.3",
- );
- ok(
- "https://github.com/rust-lang/cargo#0.52.0",
- PackageIdSpec {
- name: String::from("cargo"),
- version: Some("0.52.0".parse().unwrap()),
- url: Some(Url::parse("https://github.com/rust-lang/cargo").unwrap()),
- },
- "https://github.com/rust-lang/cargo#0.52.0",
- );
- ok(
- "https://github.com/rust-lang/cargo#cargo-platform@0.1.2",
- PackageIdSpec {
- name: String::from("cargo-platform"),
- version: Some("0.1.2".parse().unwrap()),
- url: Some(Url::parse("https://github.com/rust-lang/cargo").unwrap()),
- },
- "https://github.com/rust-lang/cargo#cargo-platform@0.1.2",
- );
- ok(
- "ssh://git@github.com/rust-lang/regex.git#regex@1.4.3",
- PackageIdSpec {
- name: String::from("regex"),
- version: Some("1.4.3".parse().unwrap()),
- url: Some(Url::parse("ssh://git@github.com/rust-lang/regex.git").unwrap()),
- },
- "ssh://git@github.com/rust-lang/regex.git#regex@1.4.3",
- );
- ok(
- "file:///path/to/my/project/foo",
- PackageIdSpec {
- name: String::from("foo"),
- version: None,
- url: Some(Url::parse("file:///path/to/my/project/foo").unwrap()),
- },
- "file:///path/to/my/project/foo",
- );
- ok(
- "file:///path/to/my/project/foo#1.1.8",
- PackageIdSpec {
- name: String::from("foo"),
- version: Some("1.1.8".parse().unwrap()),
- url: Some(Url::parse("file:///path/to/my/project/foo").unwrap()),
- },
- "file:///path/to/my/project/foo#1.1.8",
- );
- }
-
- #[test]
- fn bad_parsing() {
- assert!(PackageIdSpec::parse("baz:").is_err());
- assert!(PackageIdSpec::parse("baz:*").is_err());
- assert!(PackageIdSpec::parse("baz@").is_err());
- assert!(PackageIdSpec::parse("baz@*").is_err());
- assert!(PackageIdSpec::parse("baz@^1.0").is_err());
- assert!(PackageIdSpec::parse("https://baz:1.0").is_err());
- assert!(PackageIdSpec::parse("https://#baz:1.0").is_err());
- }
-
- #[test]
fn matching() {
let url = Url::parse("https://example.com").unwrap();
let sid = SourceId::for_registry(&url).unwrap();
- let foo = PackageId::new("foo", "1.2.3", sid).unwrap();
+ let foo = PackageId::try_new("foo", "1.2.3", sid).unwrap();
assert!(PackageIdSpec::parse("foo").unwrap().matches(foo));
assert!(!PackageIdSpec::parse("bar").unwrap().matches(foo));
assert!(PackageIdSpec::parse("foo:1.2.3").unwrap().matches(foo));
@@ -554,8 +169,14 @@ mod tests {
assert!(!PackageIdSpec::parse("https://bob.com#foo@1.2")
.unwrap()
.matches(foo));
+ assert!(PackageIdSpec::parse("registry+https://example.com#foo@1.2")
+ .unwrap()
+ .matches(foo));
+ assert!(!PackageIdSpec::parse("git+https://example.com#foo@1.2")
+ .unwrap()
+ .matches(foo));
- let meta = PackageId::new("meta", "1.2.3+hello", sid).unwrap();
+ let meta = PackageId::try_new("meta", "1.2.3+hello", sid).unwrap();
assert!(PackageIdSpec::parse("meta").unwrap().matches(meta));
assert!(PackageIdSpec::parse("meta@1").unwrap().matches(meta));
assert!(PackageIdSpec::parse("meta@1.2").unwrap().matches(meta));
@@ -570,7 +191,7 @@ mod tests {
.unwrap()
.matches(meta));
- let pre = PackageId::new("pre", "1.2.3-alpha.0", sid).unwrap();
+ let pre = PackageId::try_new("pre", "1.2.3-alpha.0", sid).unwrap();
assert!(PackageIdSpec::parse("pre").unwrap().matches(pre));
assert!(!PackageIdSpec::parse("pre@1").unwrap().matches(pre));
assert!(!PackageIdSpec::parse("pre@1.2").unwrap().matches(pre));
diff --git a/src/tools/cargo/src/cargo/core/profiles.rs b/src/tools/cargo/src/cargo/core/profiles.rs
index ec53dbae5..4d2a23f50 100644
--- a/src/tools/cargo/src/cargo/core/profiles.rs
+++ b/src/tools/cargo/src/cargo/core/profiles.rs
@@ -25,14 +25,17 @@ use crate::core::compiler::{CompileKind, CompileTarget, Unit};
use crate::core::dependency::Artifact;
use crate::core::resolver::features::FeaturesFor;
use crate::core::Feature;
-use crate::core::{PackageId, PackageIdSpec, Resolve, Shell, Target, Workspace};
+use crate::core::{
+ PackageId, PackageIdSpec, PackageIdSpecQuery, Resolve, Shell, Target, Workspace,
+};
use crate::util::interning::InternedString;
-use crate::util::toml::schema::TomlTrimPaths;
-use crate::util::toml::schema::TomlTrimPathsValue;
-use crate::util::toml::schema::{
+use crate::util::toml::validate_profile;
+use crate::util::{closest_msg, config, CargoResult, Config};
+use crate::util_schemas::manifest::TomlTrimPaths;
+use crate::util_schemas::manifest::TomlTrimPathsValue;
+use crate::util_schemas::manifest::{
ProfilePackageSpec, StringOrBool, TomlDebugInfo, TomlProfile, TomlProfiles,
};
-use crate::util::{closest_msg, config, CargoResult, Config};
use anyhow::{bail, Context as _};
use std::collections::{BTreeMap, HashMap, HashSet};
use std::hash::Hash;
@@ -1235,20 +1238,19 @@ fn get_config_profile(ws: &Workspace<'_>, name: &str) -> CargoResult<Option<Toml
return Ok(None);
};
let mut warnings = Vec::new();
- profile
- .val
- .validate(
- name,
- ws.config().cli_unstable(),
- ws.unstable_features(),
- &mut warnings,
+ validate_profile(
+ &profile.val,
+ name,
+ ws.config().cli_unstable(),
+ ws.unstable_features(),
+ &mut warnings,
+ )
+ .with_context(|| {
+ format!(
+ "config profile `{}` is not valid (defined in `{}`)",
+ name, profile.definition
)
- .with_context(|| {
- format!(
- "config profile `{}` is not valid (defined in `{}`)",
- name, profile.definition
- )
- })?;
+ })?;
for warning in warnings {
ws.config().shell().warn(warning)?;
}
diff --git a/src/tools/cargo/src/cargo/core/registry.rs b/src/tools/cargo/src/cargo/core/registry.rs
index a91f2986a..a1fb1054d 100644
--- a/src/tools/cargo/src/cargo/core/registry.rs
+++ b/src/tools/cargo/src/cargo/core/registry.rs
@@ -7,6 +7,7 @@ use crate::sources::config::SourceConfigMap;
use crate::sources::source::QueryKind;
use crate::sources::source::Source;
use crate::sources::source::SourceMap;
+use crate::sources::IndexSummary;
use crate::util::errors::CargoResult;
use crate::util::interning::InternedString;
use crate::util::{CanonicalUrl, Config};
@@ -23,10 +24,14 @@ pub trait Registry {
&mut self,
dep: &Dependency,
kind: QueryKind,
- f: &mut dyn FnMut(Summary),
+ f: &mut dyn FnMut(IndexSummary),
) -> Poll<CargoResult<()>>;
- fn query_vec(&mut self, dep: &Dependency, kind: QueryKind) -> Poll<CargoResult<Vec<Summary>>> {
+ fn query_vec(
+ &mut self,
+ dep: &Dependency,
+ kind: QueryKind,
+ ) -> Poll<CargoResult<Vec<IndexSummary>>> {
let mut ret = Vec::new();
self.query(dep, kind, &mut |s| ret.push(s)).map_ok(|()| ret)
}
@@ -337,6 +342,8 @@ impl<'cfg> PackageRegistry<'cfg> {
}
};
+ let summaries = summaries.into_iter().map(|s| s.into_summary()).collect();
+
let (summary, should_unlock) =
match summary_for_patch(orig_patch, &locked, summaries, source) {
Poll::Ready(x) => x,
@@ -481,13 +488,15 @@ impl<'cfg> PackageRegistry<'cfg> {
Ok(())
}
- fn query_overrides(&mut self, dep: &Dependency) -> Poll<CargoResult<Option<Summary>>> {
+ fn query_overrides(&mut self, dep: &Dependency) -> Poll<CargoResult<Option<IndexSummary>>> {
for &s in self.overrides.iter() {
let src = self.sources.get_mut(s).unwrap();
let dep = Dependency::new_override(dep.package_name(), s);
- let mut results = ready!(src.query_vec(&dep, QueryKind::Exact))?;
- if !results.is_empty() {
- return Poll::Ready(Ok(Some(results.remove(0))));
+
+ let mut results = None;
+ ready!(src.query(&dep, QueryKind::Exact, &mut |s| results = Some(s)))?;
+ if results.is_some() {
+ return Poll::Ready(Ok(results));
}
}
Poll::Ready(Ok(None))
@@ -575,7 +584,7 @@ impl<'cfg> Registry for PackageRegistry<'cfg> {
&mut self,
dep: &Dependency,
kind: QueryKind,
- f: &mut dyn FnMut(Summary),
+ f: &mut dyn FnMut(IndexSummary),
) -> Poll<CargoResult<()>> {
assert!(self.patches_locked);
let (override_summary, n, to_warn) = {
@@ -607,9 +616,9 @@ impl<'cfg> Registry for PackageRegistry<'cfg> {
if patches.len() == 1 && dep.is_locked() {
let patch = patches.remove(0);
match override_summary {
- Some(summary) => (summary, 1, Some(patch)),
+ Some(summary) => (summary, 1, Some(IndexSummary::Candidate(patch))),
None => {
- f(patch);
+ f(IndexSummary::Candidate(patch));
return Poll::Ready(Ok(()));
}
}
@@ -646,7 +655,7 @@ impl<'cfg> Registry for PackageRegistry<'cfg> {
// everything upstairs after locking the summary
(None, Some(source)) => {
for patch in patches.iter() {
- f(patch.clone());
+ f(IndexSummary::Candidate(patch.clone()));
}
// Our sources shouldn't ever come back to us with two
@@ -658,14 +667,18 @@ impl<'cfg> Registry for PackageRegistry<'cfg> {
// already selected, then we skip this `summary`.
let locked = &self.locked;
let all_patches = &self.patches_available;
- let callback = &mut |summary: Summary| {
+ let callback = &mut |summary: IndexSummary| {
for patch in patches.iter() {
let patch = patch.package_id().version();
if summary.package_id().version() == patch {
return;
}
}
- f(lock(locked, all_patches, summary))
+ f(IndexSummary::Candidate(lock(
+ locked,
+ all_patches,
+ summary.into_summary(),
+ )))
};
return source.query(dep, kind, callback);
}
@@ -702,9 +715,12 @@ impl<'cfg> Registry for PackageRegistry<'cfg> {
"found an override with a non-locked list"
)));
} else if let Some(summary) = to_warn {
- self.warn_bad_override(&override_summary, &summary)?;
+ self.warn_bad_override(override_summary.as_summary(), summary.as_summary())?;
}
- f(self.lock(override_summary));
+ f(IndexSummary::Candidate(
+ self.lock(override_summary.into_summary()),
+ ));
+
Poll::Ready(Ok(()))
}
@@ -887,6 +903,8 @@ fn summary_for_patch(
Vec::new()
});
+ let orig_matches = orig_matches.into_iter().map(|s| s.into_summary()).collect();
+
let summary = ready!(summary_for_patch(orig_patch, &None, orig_matches, source))?;
// The unlocked version found a match. This returns a value to
@@ -907,7 +925,7 @@ fn summary_for_patch(
});
let mut vers = name_summaries
.iter()
- .map(|summary| summary.version())
+ .map(|summary| summary.as_summary().version())
.collect::<Vec<_>>();
let found = match vers.len() {
0 => format!(""),
diff --git a/src/tools/cargo/src/cargo/core/resolver/context.rs b/src/tools/cargo/src/cargo/core/resolver/context.rs
index 09b16b39c..cfeea209a 100644
--- a/src/tools/cargo/src/cargo/core/resolver/context.rs
+++ b/src/tools/cargo/src/cargo/core/resolver/context.rs
@@ -22,9 +22,6 @@ pub struct Context {
pub resolve_features: im_rc::HashMap<PackageId, FeaturesSet>,
/// get the package that will be linking to a native library by its links attribute
pub links: im_rc::HashMap<InternedString, PackageId>,
- /// for each package the list of names it can see,
- /// then for each name the exact version that name represents and whether the name is public.
- pub public_dependency: Option<PublicDependency>,
/// a way to look up for a package in activations what packages required it
/// and all of the exact deps that it fulfilled.
@@ -74,16 +71,11 @@ impl PackageId {
}
impl Context {
- pub fn new(check_public_visible_dependencies: bool) -> Context {
+ pub fn new() -> Context {
Context {
age: 0,
resolve_features: im_rc::HashMap::new(),
links: im_rc::HashMap::new(),
- public_dependency: if check_public_visible_dependencies {
- Some(PublicDependency::new())
- } else {
- None
- },
parents: Graph::new(),
activations: im_rc::HashMap::new(),
}
@@ -192,42 +184,6 @@ impl Context {
.and_then(|(s, l)| if s.package_id() == id { Some(*l) } else { None })
}
- /// If the conflict reason on the package still applies returns the `ContextAge` when it was added
- pub fn still_applies(&self, id: PackageId, reason: &ConflictReason) -> Option<ContextAge> {
- self.is_active(id).and_then(|mut max| {
- match reason {
- ConflictReason::PublicDependency(name) => {
- if &id == name {
- return Some(max);
- }
- max = std::cmp::max(max, self.is_active(*name)?);
- max = std::cmp::max(
- max,
- self.public_dependency
- .as_ref()
- .unwrap()
- .can_see_item(*name, id)?,
- );
- }
- ConflictReason::PubliclyExports(name) => {
- if &id == name {
- return Some(max);
- }
- max = std::cmp::max(max, self.is_active(*name)?);
- max = std::cmp::max(
- max,
- self.public_dependency
- .as_ref()
- .unwrap()
- .publicly_exports_item(*name, id)?,
- );
- }
- _ => {}
- }
- Some(max)
- })
- }
-
/// Checks whether all of `parent` and the keys of `conflicting activations`
/// are still active.
/// If so returns the `ContextAge` when the newest one was added.
@@ -241,8 +197,8 @@ impl Context {
max = std::cmp::max(max, self.is_active(parent)?);
}
- for (id, reason) in conflicting_activations.iter() {
- max = std::cmp::max(max, self.still_applies(*id, reason)?);
+ for id in conflicting_activations.keys() {
+ max = std::cmp::max(max, self.is_active(*id)?);
}
Some(max)
}
@@ -280,158 +236,3 @@ impl Graph<PackageId, im_rc::HashSet<Dependency>> {
.map(|(grand, d)| (*grand, d.iter().any(|x| x.is_public())))
}
}
-
-#[derive(Clone, Debug, Default)]
-pub struct PublicDependency {
- /// For each active package the set of all the names it can see,
- /// for each name the exact package that name resolves to,
- /// the `ContextAge` when it was first visible,
- /// and the `ContextAge` when it was first exported.
- inner: im_rc::HashMap<
- PackageId,
- im_rc::HashMap<InternedString, (PackageId, ContextAge, Option<ContextAge>)>,
- >,
-}
-
-impl PublicDependency {
- fn new() -> Self {
- PublicDependency {
- inner: im_rc::HashMap::new(),
- }
- }
- fn publicly_exports(&self, candidate_pid: PackageId) -> Vec<PackageId> {
- self.inner
- .get(&candidate_pid) // if we have seen it before
- .iter()
- .flat_map(|x| x.values()) // all the things we have stored
- .filter(|x| x.2.is_some()) // as publicly exported
- .map(|x| x.0)
- .chain(Some(candidate_pid)) // but even if not we know that everything exports itself
- .collect()
- }
- fn publicly_exports_item(
- &self,
- candidate_pid: PackageId,
- target: PackageId,
- ) -> Option<ContextAge> {
- debug_assert_ne!(candidate_pid, target);
- let out = self
- .inner
- .get(&candidate_pid)
- .and_then(|names| names.get(&target.name()))
- .filter(|(p, _, _)| *p == target)
- .and_then(|(_, _, age)| *age);
- debug_assert_eq!(
- out.is_some(),
- self.publicly_exports(candidate_pid).contains(&target)
- );
- out
- }
- pub fn can_see_item(&self, candidate_pid: PackageId, target: PackageId) -> Option<ContextAge> {
- self.inner
- .get(&candidate_pid)
- .and_then(|names| names.get(&target.name()))
- .filter(|(p, _, _)| *p == target)
- .map(|(_, age, _)| *age)
- }
- pub fn add_edge(
- &mut self,
- candidate_pid: PackageId,
- parent_pid: PackageId,
- is_public: bool,
- age: ContextAge,
- parents: &Graph<PackageId, im_rc::HashSet<Dependency>>,
- ) {
- // one tricky part is that `candidate_pid` may already be active and
- // have public dependencies of its own. So we not only need to mark
- // `candidate_pid` as visible to its parents but also all of its existing
- // publicly exported dependencies.
- for c in self.publicly_exports(candidate_pid) {
- // for each (transitive) parent that can newly see `t`
- let mut stack = vec![(parent_pid, is_public)];
- while let Some((p, public)) = stack.pop() {
- match self.inner.entry(p).or_default().entry(c.name()) {
- im_rc::hashmap::Entry::Occupied(mut o) => {
- // the (transitive) parent can already see something by `c`s name, it had better be `c`.
- assert_eq!(o.get().0, c);
- if o.get().2.is_some() {
- // The previous time the parent saw `c`, it was a public dependency.
- // So all of its parents already know about `c`
- // and we can save some time by stopping now.
- continue;
- }
- if public {
- // Mark that `c` has now bean seen publicly
- let old_age = o.get().1;
- o.insert((c, old_age, if public { Some(age) } else { None }));
- }
- }
- im_rc::hashmap::Entry::Vacant(v) => {
- // The (transitive) parent does not have anything by `c`s name,
- // so we add `c`.
- v.insert((c, age, if public { Some(age) } else { None }));
- }
- }
- // if `candidate_pid` was a private dependency of `p` then `p` parents can't see `c` thru `p`
- if public {
- // if it was public, then we add all of `p`s parents to be checked
- stack.extend(parents.parents_of(p));
- }
- }
- }
- }
- pub fn can_add_edge(
- &self,
- b_id: PackageId,
- parent: PackageId,
- is_public: bool,
- parents: &Graph<PackageId, im_rc::HashSet<Dependency>>,
- ) -> Result<
- (),
- (
- ((PackageId, ConflictReason), (PackageId, ConflictReason)),
- Option<(PackageId, ConflictReason)>,
- ),
- > {
- // one tricky part is that `candidate_pid` may already be active and
- // have public dependencies of its own. So we not only need to check
- // `b_id` as visible to its parents but also all of its existing
- // publicly exported dependencies.
- for t in self.publicly_exports(b_id) {
- // for each (transitive) parent that can newly see `t`
- let mut stack = vec![(parent, is_public)];
- while let Some((p, public)) = stack.pop() {
- // TODO: don't look at the same thing more than once
- if let Some(o) = self.inner.get(&p).and_then(|x| x.get(&t.name())) {
- if o.0 != t {
- // the (transitive) parent can already see a different version by `t`s name.
- // So, adding `b` will cause `p` to have a public dependency conflict on `t`.
- return Err((
- (o.0, ConflictReason::PublicDependency(p)), // p can see the other version and
- (parent, ConflictReason::PublicDependency(p)), // p can see us
- ))
- .map_err(|e| {
- if t == b_id {
- (e, None)
- } else {
- (e, Some((t, ConflictReason::PubliclyExports(b_id))))
- }
- });
- }
- if o.2.is_some() {
- // The previous time the parent saw `t`, it was a public dependency.
- // So all of its parents already know about `t`
- // and we can save some time by stopping now.
- continue;
- }
- }
- // if `b` was a private dependency of `p` then `p` parents can't see `t` thru `p`
- if public {
- // if it was public, then we add all of `p`s parents to be checked
- stack.extend(parents.parents_of(p));
- }
- }
- }
- Ok(())
- }
-}
diff --git a/src/tools/cargo/src/cargo/core/resolver/dep_cache.rs b/src/tools/cargo/src/cargo/core/resolver/dep_cache.rs
index 6c904c148..9e8ffd351 100644
--- a/src/tools/cargo/src/cargo/core/resolver/dep_cache.rs
+++ b/src/tools/cargo/src/cargo/core/resolver/dep_cache.rs
@@ -16,7 +16,9 @@ use crate::core::resolver::{
ActivateError, ActivateResult, CliFeatures, RequestedFeatures, ResolveOpts, VersionOrdering,
VersionPreferences,
};
-use crate::core::{Dependency, FeatureValue, PackageId, PackageIdSpec, Registry, Summary};
+use crate::core::{
+ Dependency, FeatureValue, PackageId, PackageIdSpec, PackageIdSpecQuery, Registry, Summary,
+};
use crate::sources::source::QueryKind;
use crate::util::errors::CargoResult;
use crate::util::interning::InternedString;
@@ -105,7 +107,7 @@ impl<'a> RegistryQueryer<'a> {
let mut ret = Vec::new();
let ready = self.registry.query(dep, QueryKind::Exact, &mut |s| {
- ret.push(s);
+ ret.push(s.into_summary());
})?;
if ready.is_pending() {
self.registry_cache
@@ -135,16 +137,19 @@ impl<'a> RegistryQueryer<'a> {
return Poll::Pending;
}
};
- let s = summaries.next().ok_or_else(|| {
- anyhow::format_err!(
- "no matching package for override `{}` found\n\
+ let s = summaries
+ .next()
+ .ok_or_else(|| {
+ anyhow::format_err!(
+ "no matching package for override `{}` found\n\
location searched: {}\n\
version required: {}",
- spec,
- dep.source_id(),
- dep.version_req()
- )
- })?;
+ spec,
+ dep.source_id(),
+ dep.version_req()
+ )
+ })?
+ .into_summary();
let summaries = summaries.collect::<Vec<_>>();
if !summaries.is_empty() {
let bullets = summaries
diff --git a/src/tools/cargo/src/cargo/core/resolver/encode.rs b/src/tools/cargo/src/cargo/core/resolver/encode.rs
index fcef1578a..34bfbe820 100644
--- a/src/tools/cargo/src/cargo/core/resolver/encode.rs
+++ b/src/tools/cargo/src/cargo/core/resolver/encode.rs
@@ -209,7 +209,7 @@ impl EncodableResolve {
debug!("path dependency now missing {} v{}", pkg.name, pkg.version);
continue;
}
- Some(&source) => PackageId::new(&pkg.name, &pkg.version, source)?,
+ Some(&source) => PackageId::try_new(&pkg.name, &pkg.version, source)?,
};
// If a package has a checksum listed directly on it then record
@@ -365,7 +365,7 @@ impl EncodableResolve {
let mut unused_patches = Vec::new();
for pkg in self.patch.unused {
let id = match pkg.source.as_deref().or_else(|| path_deps.get(&pkg.name)) {
- Some(&src) => PackageId::new(&pkg.name, &pkg.version, src)?,
+ Some(&src) => PackageId::try_new(&pkg.name, &pkg.version, src)?,
None => continue,
};
unused_patches.push(id);
diff --git a/src/tools/cargo/src/cargo/core/resolver/errors.rs b/src/tools/cargo/src/cargo/core/resolver/errors.rs
index 15a006ffb..f3e72c51b 100644
--- a/src/tools/cargo/src/cargo/core/resolver/errors.rs
+++ b/src/tools/cargo/src/cargo/core/resolver/errors.rs
@@ -140,9 +140,9 @@ pub(super) fn activation_error(
msg.push_str("` as well:\n");
msg.push_str(&describe_path_in_context(cx, p));
msg.push_str("\nOnly one package in the dependency graph may specify the same links value. This helps ensure that only one copy of a native library is linked in the final binary. ");
- msg.push_str("Try to adjust your dependencies so that only one package uses the links ='");
- msg.push_str(&*dep.package_name());
- msg.push_str("' value. For more information, see https://doc.rust-lang.org/cargo/reference/resolver.html#links.");
+ msg.push_str("Try to adjust your dependencies so that only one package uses the `links = \"");
+ msg.push_str(link);
+ msg.push_str("\"` value. For more information, see https://doc.rust-lang.org/cargo/reference/resolver.html#links.");
}
ConflictReason::MissingFeatures(features) => {
msg.push_str("\n\nthe package `");
@@ -228,7 +228,7 @@ pub(super) fn activation_error(
let mut new_dep = dep.clone();
new_dep.set_version_req(OptVersionReq::Any);
- let mut candidates = loop {
+ let candidates = loop {
match registry.query_vec(&new_dep, QueryKind::Exact) {
Poll::Ready(Ok(candidates)) => break candidates,
Poll::Ready(Err(e)) => return to_resolve_err(e),
@@ -239,6 +239,8 @@ pub(super) fn activation_error(
}
};
+ let mut candidates: Vec<_> = candidates.into_iter().map(|s| s.into_summary()).collect();
+
candidates.sort_unstable_by(|a, b| b.version().cmp(a.version()));
let mut msg = if !candidates.is_empty() {
@@ -303,7 +305,7 @@ pub(super) fn activation_error(
} else {
// Maybe the user mistyped the name? Like `dep-thing` when `Dep_Thing`
// was meant. So we try asking the registry for a `fuzzy` search for suggestions.
- let mut candidates = loop {
+ let candidates = loop {
match registry.query_vec(&new_dep, QueryKind::Fuzzy) {
Poll::Ready(Ok(candidates)) => break candidates,
Poll::Ready(Err(e)) => return to_resolve_err(e),
@@ -314,6 +316,8 @@ pub(super) fn activation_error(
}
};
+ let mut candidates: Vec<_> = candidates.into_iter().map(|s| s.into_summary()).collect();
+
candidates.sort_unstable_by_key(|a| a.name());
candidates.dedup_by(|a, b| a.name() == b.name());
let mut candidates: Vec<_> = candidates
diff --git a/src/tools/cargo/src/cargo/core/resolver/mod.rs b/src/tools/cargo/src/cargo/core/resolver/mod.rs
index ecb6f36e6..4b12f2cf3 100644
--- a/src/tools/cargo/src/cargo/core/resolver/mod.rs
+++ b/src/tools/cargo/src/cargo/core/resolver/mod.rs
@@ -120,24 +120,12 @@ mod version_prefs;
///
/// * `config` - a location to print warnings and such, or `None` if no warnings
/// should be printed
-///
-/// * `check_public_visible_dependencies` - a flag for whether to enforce the restrictions
-/// introduced in the "public & private dependencies" RFC (1977). The current implementation
-/// makes sure that there is only one version of each name visible to each package.
-///
-/// But there are 2 stable ways to directly depend on different versions of the same name.
-/// 1. Use the renamed dependencies functionality
-/// 2. Use 'cfg({})' dependencies functionality
-///
-/// When we have a decision for how to implement is without breaking existing functionality
-/// this flag can be removed.
pub fn resolve(
summaries: &[(Summary, ResolveOpts)],
replacements: &[(PackageIdSpec, Dependency)],
registry: &mut dyn Registry,
version_prefs: &VersionPreferences,
config: Option<&Config>,
- check_public_visible_dependencies: bool,
) -> CargoResult<Resolve> {
let _p = profile::start("resolving");
let first_version = match config {
@@ -148,7 +136,7 @@ pub fn resolve(
};
let mut registry = RegistryQueryer::new(registry, replacements, version_prefs);
let cx = loop {
- let cx = Context::new(check_public_visible_dependencies);
+ let cx = Context::new();
let cx = activate_deps_loop(cx, &mut registry, summaries, first_version, config)?;
if registry.reset_pending() {
break cx;
@@ -286,12 +274,7 @@ fn activate_deps_loop(
let mut backtracked = false;
loop {
- let next = remaining_candidates.next(
- &mut conflicting_activations,
- &cx,
- &dep,
- parent.package_id(),
- );
+ let next = remaining_candidates.next(&mut conflicting_activations, &cx);
let (candidate, has_another) = next.ok_or(()).or_else(|_| {
// If we get here then our `remaining_candidates` was just
@@ -649,15 +632,6 @@ fn activate(
.link(candidate_pid, parent_pid)
// and associate dep with that edge
.insert(dep.clone());
- if let Some(public_dependency) = cx.public_dependency.as_mut() {
- public_dependency.add_edge(
- candidate_pid,
- parent_pid,
- dep.is_public(),
- cx.age,
- &cx.parents,
- );
- }
}
let activated = cx.flag_activated(&candidate, opts, parent)?;
@@ -772,8 +746,6 @@ impl RemainingCandidates {
&mut self,
conflicting_prev_active: &mut ConflictMap,
cx: &Context,
- dep: &Dependency,
- parent: PackageId,
) -> Option<(Summary, bool)> {
for b in self.remaining.by_ref() {
let b_id = b.package_id();
@@ -808,23 +780,6 @@ impl RemainingCandidates {
continue;
}
}
- // We may still have to reject do to a public dependency conflict. If one of any of our
- // ancestors that can see us already knows about a different crate with this name then
- // we have to reject this candidate. Additionally this candidate may already have been
- // activated and have public dependants of its own,
- // all of witch also need to be checked the same way.
- if let Some(public_dependency) = cx.public_dependency.as_ref() {
- if let Err(((c1, c2), c3)) =
- public_dependency.can_add_edge(b_id, parent, dep.is_public(), &cx.parents)
- {
- conflicting_prev_active.insert(c1.0, c1.1);
- conflicting_prev_active.insert(c2.0, c2.1);
- if let Some(c3) = c3 {
- conflicting_prev_active.insert(c3.0, c3.1);
- }
- continue;
- }
- }
// Well if we made it this far then we've got a valid dependency. We
// want this iterator to be inherently "peekable" so we don't
@@ -1001,12 +956,9 @@ fn find_candidate(
};
while let Some(mut frame) = backtrack_stack.pop() {
- let next = frame.remaining_candidates.next(
- &mut frame.conflicting_activations,
- &frame.context,
- &frame.dep,
- frame.parent.package_id(),
- );
+ let next = frame
+ .remaining_candidates
+ .next(&mut frame.conflicting_activations, &frame.context);
let Some((candidate, has_another)) = next else {
continue;
};
diff --git a/src/tools/cargo/src/cargo/core/resolver/resolve.rs b/src/tools/cargo/src/cargo/core/resolver/resolve.rs
index b401e9232..02f112166 100644
--- a/src/tools/cargo/src/cargo/core/resolver/resolve.rs
+++ b/src/tools/cargo/src/cargo/core/resolver/resolve.rs
@@ -1,6 +1,6 @@
use super::encode::Metadata;
use crate::core::dependency::DepKind;
-use crate::core::{Dependency, PackageId, PackageIdSpec, Summary, Target};
+use crate::core::{Dependency, PackageId, PackageIdSpec, PackageIdSpecQuery, Summary, Target};
use crate::util::errors::CargoResult;
use crate::util::interning::InternedString;
use crate::util::Graph;
diff --git a/src/tools/cargo/src/cargo/core/resolver/version_prefs.rs b/src/tools/cargo/src/cargo/core/resolver/version_prefs.rs
index 0deef5565..1e4eacb59 100644
--- a/src/tools/cargo/src/cargo/core/resolver/version_prefs.rs
+++ b/src/tools/cargo/src/cargo/core/resolver/version_prefs.rs
@@ -6,7 +6,7 @@ use std::collections::{HashMap, HashSet};
use crate::core::{Dependency, PackageId, Summary};
use crate::util::interning::InternedString;
-use crate::util::RustVersion;
+use crate::util_schemas::manifest::RustVersion;
/// A collection of preferences for particular package versions.
///
@@ -58,10 +58,10 @@ impl VersionPreferences {
///
/// Sort order:
/// 1. Preferred packages
- /// 2. `first_version`, falling back to [`VersionPreferences::version_ordering`] when `None`
+ /// 2. [`VersionPreferences::max_rust_version`]
+ /// 3. `first_version`, falling back to [`VersionPreferences::version_ordering`] when `None`
///
/// Filtering:
- /// - [`VersionPreferences::max_rust_version`]
/// - `first_version`
pub fn sort_summaries(
&self,
@@ -76,9 +76,6 @@ impl VersionPreferences {
.map(|deps| deps.iter().any(|d| d.matches_id(*pkg_id)))
.unwrap_or(false)
};
- if self.max_rust_version.is_some() {
- summaries.retain(|s| s.rust_version() <= self.max_rust_version.as_ref());
- }
summaries.sort_unstable_by(|a, b| {
let prefer_a = should_prefer(&a.package_id());
let prefer_b = should_prefer(&b.package_id());
@@ -87,6 +84,40 @@ impl VersionPreferences {
return previous_cmp;
}
+ if let Some(max_rust_version) = &self.max_rust_version {
+ match (a.rust_version(), b.rust_version()) {
+ // Fallback
+ (None, None) => {}
+ (Some(a), Some(b)) if a == b => {}
+ // Primary comparison
+ (Some(a), Some(b)) => {
+ let a_is_compat = a <= max_rust_version;
+ let b_is_compat = b <= max_rust_version;
+ match (a_is_compat, b_is_compat) {
+ (true, true) => {} // fallback
+ (false, false) => {} // fallback
+ (true, false) => return Ordering::Less,
+ (false, true) => return Ordering::Greater,
+ }
+ }
+ // Prioritize `None` over incompatible
+ (None, Some(b)) => {
+ if b <= max_rust_version {
+ return Ordering::Greater;
+ } else {
+ return Ordering::Less;
+ }
+ }
+ (Some(a), None) => {
+ if a <= max_rust_version {
+ return Ordering::Less;
+ } else {
+ return Ordering::Greater;
+ }
+ }
+ }
+ }
+
let cmp = a.version().cmp(b.version());
match first_version.unwrap_or(self.version_ordering) {
VersionOrdering::MaximumVersionsFirst => cmp.reverse(),
@@ -108,7 +139,7 @@ mod test {
fn pkgid(name: &str, version: &str) -> PackageId {
let src_id =
SourceId::from_url("registry+https://github.com/rust-lang/crates.io-index").unwrap();
- PackageId::new(name, version, src_id).unwrap()
+ PackageId::try_new(name, version, src_id).unwrap()
}
fn dep(name: &str, version: &str) -> Dependency {
@@ -226,8 +257,11 @@ mod test {
vp.max_rust_version(Some("1.50".parse().unwrap()));
let mut summaries = vec![
- summ("foo", "1.2.4", Some("1.60")),
- summ("foo", "1.2.3", Some("1.50")),
+ summ("foo", "1.2.4", None),
+ summ("foo", "1.2.3", Some("1.60")),
+ summ("foo", "1.2.2", None),
+ summ("foo", "1.2.1", Some("1.50")),
+ summ("foo", "1.2.0", None),
summ("foo", "1.1.0", Some("1.40")),
summ("foo", "1.0.9", None),
];
@@ -236,14 +270,16 @@ mod test {
vp.sort_summaries(&mut summaries, None);
assert_eq!(
describe(&summaries),
- "foo/1.2.3, foo/1.1.0, foo/1.0.9".to_string()
+ "foo/1.2.1, foo/1.1.0, foo/1.2.4, foo/1.2.2, foo/1.2.0, foo/1.0.9, foo/1.2.3"
+ .to_string()
);
vp.version_ordering(VersionOrdering::MinimumVersionsFirst);
vp.sort_summaries(&mut summaries, None);
assert_eq!(
describe(&summaries),
- "foo/1.0.9, foo/1.1.0, foo/1.2.3".to_string()
+ "foo/1.1.0, foo/1.2.1, foo/1.0.9, foo/1.2.0, foo/1.2.2, foo/1.2.4, foo/1.2.3"
+ .to_string()
);
}
}
diff --git a/src/tools/cargo/src/cargo/core/source_id.rs b/src/tools/cargo/src/cargo/core/source_id.rs
index e53b1704d..3b1cad942 100644
--- a/src/tools/cargo/src/cargo/core/source_id.rs
+++ b/src/tools/cargo/src/cargo/core/source_id.rs
@@ -1,4 +1,6 @@
+use crate::core::GitReference;
use crate::core::PackageId;
+use crate::core::SourceKind;
use crate::sources::registry::CRATES_IO_HTTP_INDEX;
use crate::sources::source::Source;
use crate::sources::{DirectorySource, CRATES_IO_DOMAIN, CRATES_IO_INDEX, CRATES_IO_REGISTRY};
@@ -82,38 +84,6 @@ impl fmt::Display for Precise {
}
}
-/// The possible kinds of code source.
-/// Along with [`SourceIdInner`], this fully defines the source.
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-enum SourceKind {
- /// A git repository.
- Git(GitReference),
- /// A local path.
- Path,
- /// A remote registry.
- Registry,
- /// A sparse registry.
- SparseRegistry,
- /// A local filesystem-based registry.
- LocalRegistry,
- /// A directory-based registry.
- Directory,
-}
-
-/// Information to find a specific commit in a Git repository.
-#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
-pub enum GitReference {
- /// From a tag.
- Tag(String),
- /// From a branch.
- Branch(String),
- /// From a specific revision. Can be a commit hash (either short or full),
- /// or a named reference like `refs/pull/493/head`.
- Rev(String),
- /// The default branch of the repository, the reference named `HEAD`.
- DefaultBranch,
-}
-
/// Where the remote source key is defined.
///
/// The purpose of this is to provide better diagnostics for different sources of keys.
@@ -373,6 +343,10 @@ impl SourceId {
Some(self.inner.url.to_file_path().unwrap())
}
+ pub fn kind(&self) -> &SourceKind {
+ &self.inner.kind
+ }
+
/// Returns `true` if this source is from a registry (either local or not).
pub fn is_registry(self) -> bool {
matches!(
@@ -742,108 +716,6 @@ impl PartialEq for SourceIdInner {
}
}
-impl SourceKind {
- pub(crate) fn protocol(&self) -> Option<&str> {
- match self {
- SourceKind::Path => Some("path"),
- SourceKind::Git(_) => Some("git"),
- SourceKind::Registry => Some("registry"),
- // Sparse registry URL already includes the `sparse+` prefix
- SourceKind::SparseRegistry => None,
- SourceKind::LocalRegistry => Some("local-registry"),
- SourceKind::Directory => Some("directory"),
- }
- }
-}
-
-/// Forwards to `Ord`
-impl PartialOrd for SourceKind {
- fn partial_cmp(&self, other: &SourceKind) -> Option<Ordering> {
- Some(self.cmp(other))
- }
-}
-
-/// Note that this is specifically not derived on `SourceKind` although the
-/// implementation here is very similar to what it might look like if it were
-/// otherwise derived.
-///
-/// The reason for this is somewhat obtuse. First of all the hash value of
-/// `SourceKind` makes its way into `~/.cargo/registry/index/github.com-XXXX`
-/// which means that changes to the hash means that all Rust users need to
-/// redownload the crates.io index and all their crates. If possible we strive
-/// to not change this to make this redownloading behavior happen as little as
-/// possible. How is this connected to `Ord` you might ask? That's a good
-/// question!
-///
-/// Since the beginning of time `SourceKind` has had `#[derive(Hash)]`. It for
-/// the longest time *also* derived the `Ord` and `PartialOrd` traits. In #8522,
-/// however, the implementation of `Ord` changed. This handwritten implementation
-/// forgot to sync itself with the originally derived implementation, namely
-/// placing git dependencies as sorted after all other dependencies instead of
-/// first as before.
-///
-/// This regression in #8522 (Rust 1.47) went unnoticed. When we switched back
-/// to a derived implementation in #9133 (Rust 1.52 beta) we only then ironically
-/// saw an issue (#9334). In #9334 it was observed that stable Rust at the time
-/// (1.51) was sorting git dependencies last, whereas Rust 1.52 beta would sort
-/// git dependencies first. This is because the `PartialOrd` implementation in
-/// 1.51 used #8522, the buggy implementation, which put git deps last. In 1.52
-/// it was (unknowingly) restored to the pre-1.47 behavior with git dependencies
-/// first.
-///
-/// Because the breakage was only witnessed after the original breakage, this
-/// trait implementation is preserving the "broken" behavior. Put a different way:
-///
-/// * Rust pre-1.47 sorted git deps first.
-/// * Rust 1.47 to Rust 1.51 sorted git deps last, a breaking change (#8522) that
-/// was never noticed.
-/// * Rust 1.52 restored the pre-1.47 behavior (#9133, without knowing it did
-/// so), and breakage was witnessed by actual users due to difference with
-/// 1.51.
-/// * Rust 1.52 (the source as it lives now) was fixed to match the 1.47-1.51
-/// behavior (#9383), which is now considered intentionally breaking from the
-/// pre-1.47 behavior.
-///
-/// Note that this was all discovered when Rust 1.53 was in nightly and 1.52 was
-/// in beta. #9133 was in both beta and nightly at the time of discovery. For
-/// 1.52 #9383 reverted #9133, meaning 1.52 is the same as 1.51. On nightly
-/// (1.53) #9397 was created to fix the regression introduced by #9133 relative
-/// to the current stable (1.51).
-///
-/// That's all a long winded way of saying "it's weird that git deps hash first
-/// and are sorted last, but it's the way it is right now". The author of this
-/// comment chose to handwrite the `Ord` implementation instead of the `Hash`
-/// implementation, but it's only required that at most one of them is
-/// hand-written because the other can be derived. Perhaps one day in
-/// the future someone can figure out how to remove this behavior.
-impl Ord for SourceKind {
- fn cmp(&self, other: &SourceKind) -> Ordering {
- match (self, other) {
- (SourceKind::Path, SourceKind::Path) => Ordering::Equal,
- (SourceKind::Path, _) => Ordering::Less,
- (_, SourceKind::Path) => Ordering::Greater,
-
- (SourceKind::Registry, SourceKind::Registry) => Ordering::Equal,
- (SourceKind::Registry, _) => Ordering::Less,
- (_, SourceKind::Registry) => Ordering::Greater,
-
- (SourceKind::SparseRegistry, SourceKind::SparseRegistry) => Ordering::Equal,
- (SourceKind::SparseRegistry, _) => Ordering::Less,
- (_, SourceKind::SparseRegistry) => Ordering::Greater,
-
- (SourceKind::LocalRegistry, SourceKind::LocalRegistry) => Ordering::Equal,
- (SourceKind::LocalRegistry, _) => Ordering::Less,
- (_, SourceKind::LocalRegistry) => Ordering::Greater,
-
- (SourceKind::Directory, SourceKind::Directory) => Ordering::Equal,
- (SourceKind::Directory, _) => Ordering::Less,
- (_, SourceKind::Directory) => Ordering::Greater,
-
- (SourceKind::Git(a), SourceKind::Git(b)) => a.cmp(b),
- }
- }
-}
-
/// A `Display`able view into a `SourceId` that will write it as a url
pub struct SourceIdAsUrl<'a> {
inner: &'a SourceIdInner,
@@ -873,73 +745,6 @@ impl<'a> fmt::Display for SourceIdAsUrl<'a> {
}
}
-impl GitReference {
- pub fn from_query(
- query_pairs: impl Iterator<Item = (impl AsRef<str>, impl AsRef<str>)>,
- ) -> Self {
- let mut reference = GitReference::DefaultBranch;
- for (k, v) in query_pairs {
- let v = v.as_ref();
- match k.as_ref() {
- // Map older 'ref' to branch.
- "branch" | "ref" => reference = GitReference::Branch(v.to_owned()),
-
- "rev" => reference = GitReference::Rev(v.to_owned()),
- "tag" => reference = GitReference::Tag(v.to_owned()),
- _ => {}
- }
- }
- reference
- }
-
- /// Returns a `Display`able view of this git reference, or None if using
- /// the head of the default branch
- pub fn pretty_ref(&self, url_encoded: bool) -> Option<PrettyRef<'_>> {
- match self {
- GitReference::DefaultBranch => None,
- _ => Some(PrettyRef {
- inner: self,
- url_encoded,
- }),
- }
- }
-}
-
-/// A git reference that can be `Display`ed
-pub struct PrettyRef<'a> {
- inner: &'a GitReference,
- url_encoded: bool,
-}
-
-impl<'a> fmt::Display for PrettyRef<'a> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let value: &str;
- match self.inner {
- GitReference::Branch(s) => {
- write!(f, "branch=")?;
- value = s;
- }
- GitReference::Tag(s) => {
- write!(f, "tag=")?;
- value = s;
- }
- GitReference::Rev(s) => {
- write!(f, "rev=")?;
- value = s;
- }
- GitReference::DefaultBranch => unreachable!(),
- }
- if self.url_encoded {
- for value in url::form_urlencoded::byte_serialize(value.as_bytes()) {
- write!(f, "{value}")?;
- }
- } else {
- write!(f, "{value}")?;
- }
- Ok(())
- }
-}
-
impl KeyOf {
/// Gets the underlying key.
fn key(&self) -> &str {
diff --git a/src/tools/cargo/src/cargo/core/summary.rs b/src/tools/cargo/src/cargo/core/summary.rs
index 243f6b398..2137d6332 100644
--- a/src/tools/cargo/src/cargo/core/summary.rs
+++ b/src/tools/cargo/src/cargo/core/summary.rs
@@ -1,7 +1,8 @@
use crate::core::{Dependency, PackageId, SourceId};
use crate::util::interning::InternedString;
use crate::util::CargoResult;
-use crate::util::RustVersion;
+use crate::util_schemas::manifest::FeatureName;
+use crate::util_schemas::manifest::RustVersion;
use anyhow::bail;
use semver::Version;
use std::collections::{BTreeMap, HashMap, HashSet};
@@ -49,7 +50,7 @@ impl Summary {
)
}
}
- let feature_map = build_feature_map(pkg_id, features, &dependencies)?;
+ let feature_map = build_feature_map(features, &dependencies)?;
Ok(Summary {
inner: Rc::new(Inner {
package_id: pkg_id,
@@ -140,7 +141,6 @@ impl Hash for Summary {
/// Checks features for errors, bailing out a CargoResult:Err if invalid,
/// and creates FeatureValues for each feature.
fn build_feature_map(
- pkg_id: PackageId,
features: &BTreeMap<InternedString, Vec<InternedString>>,
dependencies: &[Dependency],
) -> CargoResult<FeatureMap> {
@@ -191,19 +191,7 @@ fn build_feature_map(
// Validate features are listed properly.
for (feature, fvs) in &map {
- if feature.starts_with("dep:") {
- bail!(
- "feature named `{}` is not allowed to start with `dep:`",
- feature
- );
- }
- if feature.contains('/') {
- bail!(
- "feature named `{}` is not allowed to contain slashes",
- feature
- );
- }
- validate_feature_name(pkg_id, feature)?;
+ FeatureName::new(feature)?;
for fv in fvs {
// Find data for the referenced dependency...
let dep_data = {
@@ -429,68 +417,3 @@ impl fmt::Display for FeatureValue {
}
pub type FeatureMap = BTreeMap<InternedString, Vec<FeatureValue>>;
-
-fn validate_feature_name(pkg_id: PackageId, name: &str) -> CargoResult<()> {
- if name.is_empty() {
- bail!("feature name cannot be empty");
- }
- let mut chars = name.chars();
- if let Some(ch) = chars.next() {
- if !(unicode_xid::UnicodeXID::is_xid_start(ch) || ch == '_' || ch.is_digit(10)) {
- bail!(
- "invalid character `{}` in feature `{}` in package {}, \
- the first character must be a Unicode XID start character or digit \
- (most letters or `_` or `0` to `9`)",
- ch,
- name,
- pkg_id
- );
- }
- }
- for ch in chars {
- if !(unicode_xid::UnicodeXID::is_xid_continue(ch) || ch == '-' || ch == '+' || ch == '.') {
- bail!(
- "invalid character `{}` in feature `{}` in package {}, \
- characters must be Unicode XID characters, '-', `+`, or `.` \
- (numbers, `+`, `-`, `_`, `.`, or most letters)",
- ch,
- name,
- pkg_id
- );
- }
- }
- Ok(())
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use crate::sources::CRATES_IO_INDEX;
- use crate::util::into_url::IntoUrl;
-
- use crate::core::SourceId;
-
- #[test]
- fn valid_feature_names() {
- let loc = CRATES_IO_INDEX.into_url().unwrap();
- let source_id = SourceId::for_registry(&loc).unwrap();
- let pkg_id = PackageId::new("foo", "1.0.0", source_id).unwrap();
-
- assert!(validate_feature_name(pkg_id, "c++17").is_ok());
- assert!(validate_feature_name(pkg_id, "128bit").is_ok());
- assert!(validate_feature_name(pkg_id, "_foo").is_ok());
- assert!(validate_feature_name(pkg_id, "feat-name").is_ok());
- assert!(validate_feature_name(pkg_id, "feat_name").is_ok());
- assert!(validate_feature_name(pkg_id, "foo.bar").is_ok());
-
- assert!(validate_feature_name(pkg_id, "+foo").is_err());
- assert!(validate_feature_name(pkg_id, "-foo").is_err());
- assert!(validate_feature_name(pkg_id, ".foo").is_err());
- assert!(validate_feature_name(pkg_id, "foo:bar").is_err());
- assert!(validate_feature_name(pkg_id, "foo?").is_err());
- assert!(validate_feature_name(pkg_id, "?foo").is_err());
- assert!(validate_feature_name(pkg_id, "ⒶⒷⒸ").is_err());
- assert!(validate_feature_name(pkg_id, "a¼").is_err());
- assert!(validate_feature_name(pkg_id, "").is_err());
- }
-}
diff --git a/src/tools/cargo/src/cargo/core/workspace.rs b/src/tools/cargo/src/cargo/core/workspace.rs
index 4667c8029..b933c6173 100644
--- a/src/tools/cargo/src/cargo/core/workspace.rs
+++ b/src/tools/cargo/src/cargo/core/workspace.rs
@@ -15,18 +15,19 @@ use crate::core::features::Features;
use crate::core::registry::PackageRegistry;
use crate::core::resolver::features::CliFeatures;
use crate::core::resolver::ResolveBehavior;
-use crate::core::{Dependency, Edition, FeatureValue, PackageId, PackageIdSpec};
+use crate::core::{
+ Dependency, Edition, FeatureValue, PackageId, PackageIdSpec, PackageIdSpecQuery,
+};
use crate::core::{EitherManifest, Package, SourceId, VirtualManifest};
use crate::ops;
use crate::sources::{PathSource, CRATES_IO_INDEX, CRATES_IO_REGISTRY};
use crate::util::edit_distance;
use crate::util::errors::{CargoResult, ManifestError};
use crate::util::interning::InternedString;
-use crate::util::toml::{
- read_manifest, schema::InheritableFields, schema::TomlDependency, schema::TomlProfiles,
-};
-use crate::util::RustVersion;
+use crate::util::toml::{read_manifest, InheritableFields};
use crate::util::{config::ConfigRelativePath, Config, Filesystem, IntoUrl};
+use crate::util_schemas::manifest::RustVersion;
+use crate::util_schemas::manifest::{TomlDependency, TomlProfiles};
use cargo_util::paths;
use cargo_util::paths::normalize_path;
use pathdiff::diff_paths;
@@ -437,7 +438,8 @@ impl<'cfg> Workspace<'cfg> {
url,
deps.iter()
.map(|(name, dep)| {
- dep.to_dependency_split(
+ crate::util::toml::to_dependency(
+ dep,
name,
source,
&mut nested_paths,
diff --git a/src/tools/cargo/src/cargo/lib.rs b/src/tools/cargo/src/cargo/lib.rs
index 6947642c9..6d7468ca3 100644
--- a/src/tools/cargo/src/cargo/lib.rs
+++ b/src/tools/cargo/src/cargo/lib.rs
@@ -70,6 +70,11 @@
//! This is not directly depended upon with a `path` dependency; cargo uses the version from crates.io.
//! It is intended to be versioned and published independently of Rust's release system.
//! Whenever a change needs to be made, bump the version in Cargo.toml and `cargo publish` it manually, and then update cargo's `Cargo.toml` to depend on the new version.
+//! - [`rustfix`](https://crates.io/crates/rustfix)
+//! ([nightly docs](https://doc.rust-lang.org/nightly/nightly-rustc/rustfix)):
+//! This defines structures that represent fix suggestions from rustc,
+//! as well as generates "fixed" code from suggestions.
+//! Operations in `rustfix` are all in memory and won't write to disks.
//! - [`cargo-test-support`](https://github.com/rust-lang/cargo/tree/master/crates/cargo-test-support)
//! ([nightly docs](https://doc.rust-lang.org/nightly/nightly-rustc/cargo_test_support/index.html)):
//! This contains a variety of code to support writing tests
@@ -93,7 +98,7 @@
//! Files that interact with cargo include
//!
//! - Package
-//! - `Cargo.toml`: User-written project manifest, loaded with [`util::toml::schema::TomlManifest`] and then
+//! - `Cargo.toml`: User-written project manifest, loaded with [`util_schemas::manifest::TomlManifest`] and then
//! translated to [`core::manifest::Manifest`] which maybe stored in a [`core::Package`].
//! - This is editable with [`util::toml_mut::manifest::LocalManifest`]
//! - `Cargo.lock`: Generally loaded with [`ops::resolve_ws`] or a variant of it into a [`core::resolver::Resolve`]
@@ -129,20 +134,6 @@
//! [The Cargo Book]: https://doc.rust-lang.org/cargo/
//! [Cargo Contributor Guide]: https://doc.crates.io/contrib/
-// TODO: consider removing these lint attributes when `-Zlints` hits stable.
-// For various reasons, some idioms are still allow'ed, but we would like to
-// test and enforce them.
-#![warn(rust_2018_idioms)]
-// Due to some of the default clippy lints being somewhat subjective and not
-// necessarily an improvement, we prefer to not use them at this time.
-#![allow(clippy::all)]
-#![warn(clippy::disallowed_methods)]
-#![warn(clippy::self_named_module_files)]
-#![warn(clippy::print_stdout)]
-#![warn(clippy::print_stderr)]
-#![warn(clippy::dbg_macro)]
-#![allow(rustdoc::private_intra_doc_links)]
-
use crate::core::shell::Verbosity::Verbose;
use crate::core::Shell;
use anyhow::Error;
@@ -161,6 +152,7 @@ pub mod core;
pub mod ops;
pub mod sources;
pub mod util;
+pub mod util_schemas;
pub mod util_semver;
mod version;
diff --git a/src/tools/cargo/src/cargo/ops/cargo_add/crate_spec.rs b/src/tools/cargo/src/cargo/ops/cargo_add/crate_spec.rs
index f07e2fae5..65c58314f 100644
--- a/src/tools/cargo/src/cargo/ops/cargo_add/crate_spec.rs
+++ b/src/tools/cargo/src/cargo/ops/cargo_add/crate_spec.rs
@@ -4,7 +4,7 @@ use anyhow::Context as _;
use super::Dependency;
use crate::util::toml_mut::dependency::RegistrySource;
-use crate::util::validate_package_name;
+use crate::util_schemas::manifest::PackageName;
use crate::CargoResult;
/// User-specified crate
@@ -12,7 +12,6 @@ use crate::CargoResult;
/// This can be a
/// - Name (e.g. `docopt`)
/// - Name and a version req (e.g. `docopt@^0.8`)
-/// - Path
#[derive(Debug)]
pub struct CrateSpec {
/// Crate name
@@ -29,7 +28,7 @@ impl CrateSpec {
.map(|(n, v)| (n, Some(v)))
.unwrap_or((pkg_id, None));
- validate_package_name(name, "dependency name", "")?;
+ PackageName::new(name)?;
if let Some(version) = version {
semver::VersionReq::parse(version)
diff --git a/src/tools/cargo/src/cargo/ops/cargo_add/mod.rs b/src/tools/cargo/src/cargo/ops/cargo_add/mod.rs
index 39e37b156..9be290b48 100644
--- a/src/tools/cargo/src/cargo/ops/cargo_add/mod.rs
+++ b/src/tools/cargo/src/cargo/ops/cargo_add/mod.rs
@@ -7,6 +7,7 @@ use std::collections::BTreeSet;
use std::collections::VecDeque;
use std::fmt::Write;
use std::path::Path;
+use std::str::FromStr;
use anyhow::Context as _;
use cargo_util::paths;
@@ -34,7 +35,7 @@ use crate::util::toml_mut::dependency::WorkspaceSource;
use crate::util::toml_mut::is_sorted;
use crate::util::toml_mut::manifest::DepTable;
use crate::util::toml_mut::manifest::LocalManifest;
-use crate::util::RustVersion;
+use crate::util_schemas::manifest::RustVersion;
use crate::CargoResult;
use crate::Config;
use crate_spec::CrateSpec;
@@ -196,6 +197,20 @@ pub fn add(workspace: &Workspace<'_>, options: &AddOptions<'_>) -> CargoResult<(
print_dep_table_msg(&mut options.config.shell(), &dep)?;
manifest.insert_into_table(&dep_table, &dep)?;
+ if dep.optional == Some(true) {
+ let is_namespaced_features_supported =
+ check_rust_version_for_optional_dependency(options.spec.rust_version())?;
+ if is_namespaced_features_supported {
+ let dep_key = dep.toml_key();
+ if !manifest.is_explicit_dep_activation(dep_key) {
+ let table = manifest.get_table_mut(&[String::from("features")])?;
+ let dep_name = dep.rename.as_deref().unwrap_or(&dep.name);
+ let new_feature: toml_edit::Value =
+ [format!("dep:{dep_name}")].iter().collect();
+ table[dep_key] = toml_edit::value(new_feature);
+ }
+ }
+ }
manifest.gc_dep(dep.toml_key());
}
@@ -244,6 +259,9 @@ pub struct DepOp {
/// Whether dependency is optional
pub optional: Option<bool>,
+ /// Whether dependency is public
+ pub public: Option<bool>,
+
/// Registry for looking up dependency version
pub registry: Option<String>,
@@ -469,6 +487,26 @@ fn check_invalid_ws_keys(toml_key: &str, arg: &DepOp) -> CargoResult<()> {
Ok(())
}
+/// When the `--optional` option is added using `cargo add`, we need to
+/// check the current rust-version. As the `dep:` syntax is only avaliable
+/// starting with Rust 1.60.0
+///
+/// `true` means that the rust-version is None or the rust-version is higher
+/// than the version needed.
+///
+/// Note: Previous versions can only use the implicit feature name.
+fn check_rust_version_for_optional_dependency(
+ rust_version: Option<&RustVersion>,
+) -> CargoResult<bool> {
+ match rust_version {
+ Some(version) => {
+ let syntax_support_version = RustVersion::from_str("1.60.0")?;
+ Ok(&syntax_support_version <= version)
+ }
+ None => Ok(true),
+ }
+}
+
/// Provide the existing dependency for the target table
///
/// If it doesn't exist but exists in another table, let's use that as most likely users
@@ -545,7 +583,7 @@ fn get_latest_dependency(
unreachable!("registry dependencies required, found a workspace dependency");
}
MaybeWorkspace::Other(query) => {
- let mut possibilities = loop {
+ let possibilities = loop {
match registry.query_vec(&query, QueryKind::Fuzzy) {
std::task::Poll::Ready(res) => {
break res?;
@@ -554,6 +592,11 @@ fn get_latest_dependency(
}
};
+ let mut possibilities: Vec<_> = possibilities
+ .into_iter()
+ .map(|s| s.into_summary())
+ .collect();
+
possibilities.sort_by_key(|s| {
// Fallback to a pre-release if no official release is available by sorting them as
// less.
@@ -671,6 +714,12 @@ fn select_package(
std::task::Poll::Pending => registry.block_until_ready()?,
}
};
+
+ let possibilities: Vec<_> = possibilities
+ .into_iter()
+ .map(|s| s.into_summary())
+ .collect();
+
match possibilities.len() {
0 => {
let source = dependency
@@ -747,6 +796,13 @@ fn populate_dependency(mut dependency: Dependency, arg: &DepOp) -> Dependency {
dependency.optional = None;
}
}
+ if let Some(value) = arg.public {
+ if value {
+ dependency.public = Some(true);
+ } else {
+ dependency.public = None;
+ }
+ }
if let Some(value) = arg.default_features {
if value {
dependency.default_features = None;
@@ -889,6 +945,7 @@ fn populate_available_features(
// in the lock file for a given version requirement.
let lowest_common_denominator = possibilities
.iter()
+ .map(|s| s.as_summary())
.min_by_key(|s| {
// Fallback to a pre-release if no official release is available by sorting them as
// more.
@@ -933,6 +990,9 @@ fn print_action_msg(shell: &mut Shell, dep: &DependencyUI, section: &[String]) -
if dep.optional().unwrap_or(false) {
write!(message, " optional")?;
}
+ if dep.public().unwrap_or(false) {
+ write!(message, " public")?;
+ }
let section = if section.len() == 1 {
section[0].clone()
} else {
diff --git a/src/tools/cargo/src/cargo/ops/cargo_clean.rs b/src/tools/cargo/src/cargo/ops/cargo_clean.rs
index 6f58b8bdc..4add5d863 100644
--- a/src/tools/cargo/src/cargo/ops/cargo_clean.rs
+++ b/src/tools/cargo/src/cargo/ops/cargo_clean.rs
@@ -1,6 +1,6 @@
use crate::core::compiler::{CompileKind, CompileMode, Layout, RustcTargetData};
use crate::core::profiles::Profiles;
-use crate::core::{PackageIdSpec, TargetKind, Workspace};
+use crate::core::{PackageIdSpec, PackageIdSpecQuery, TargetKind, Workspace};
use crate::ops;
use crate::util::edit_distance;
use crate::util::errors::CargoResult;
@@ -389,7 +389,7 @@ impl<'cfg> CleanContext<'cfg> {
Ok(())
}
- fn display_summary(&self) -> CargoResult<()> {
+ pub fn display_summary(&self) -> CargoResult<()> {
let status = if self.dry_run { "Summary" } else { "Removed" };
let byte_count = if self.total_bytes_removed == 0 {
String::new()
diff --git a/src/tools/cargo/src/cargo/ops/cargo_compile/mod.rs b/src/tools/cargo/src/cargo/ops/cargo_compile/mod.rs
index 94c6cf9de..3522ef9d3 100644
--- a/src/tools/cargo/src/cargo/ops/cargo_compile/mod.rs
+++ b/src/tools/cargo/src/cargo/ops/cargo_compile/mod.rs
@@ -153,6 +153,7 @@ pub fn compile_ws<'a>(
unit_graph::emit_serialized_unit_graph(&bcx.roots, &bcx.unit_graph, ws.config())?;
return Compilation::new(&bcx);
}
+ crate::core::gc::auto_gc(bcx.config);
let _p = profile::start("compiling");
let cx = Context::new(&bcx)?;
cx.compile(exec)
diff --git a/src/tools/cargo/src/cargo/ops/cargo_compile/packages.rs b/src/tools/cargo/src/cargo/ops/cargo_compile/packages.rs
index 2d14d60a6..439b87111 100644
--- a/src/tools/cargo/src/cargo/ops/cargo_compile/packages.rs
+++ b/src/tools/cargo/src/cargo/ops/cargo_compile/packages.rs
@@ -47,7 +47,7 @@ impl Packages {
Packages::All => ws
.members()
.map(Package::package_id)
- .map(PackageIdSpec::from_package_id)
+ .map(|id| id.to_spec())
.collect(),
Packages::OptOut(opt_out) => {
let (mut patterns, mut names) = opt_patterns_and_names(opt_out)?;
@@ -57,7 +57,7 @@ impl Packages {
!names.remove(pkg.name().as_str()) && !match_patterns(pkg, &mut patterns)
})
.map(Package::package_id)
- .map(PackageIdSpec::from_package_id)
+ .map(|id| id.to_spec())
.collect();
let warn = |e| ws.config().shell().warn(e);
emit_package_not_found(ws, names, true).or_else(warn)?;
@@ -65,7 +65,7 @@ impl Packages {
specs
}
Packages::Packages(packages) if packages.is_empty() => {
- vec![PackageIdSpec::from_package_id(ws.current()?.package_id())]
+ vec![ws.current()?.package_id().to_spec()]
}
Packages::Packages(opt_in) => {
let (mut patterns, packages) = opt_patterns_and_names(opt_in)?;
@@ -78,7 +78,7 @@ impl Packages {
.members()
.filter(|pkg| match_patterns(pkg, &mut patterns))
.map(Package::package_id)
- .map(PackageIdSpec::from_package_id);
+ .map(|id| id.to_spec());
specs.extend(matched_pkgs);
}
emit_pattern_not_found(ws, patterns, false)?;
@@ -87,7 +87,7 @@ impl Packages {
Packages::Default => ws
.default_members()
.map(Package::package_id)
- .map(PackageIdSpec::from_package_id)
+ .map(|id| id.to_spec())
.collect(),
};
if specs.is_empty() {
@@ -195,7 +195,7 @@ fn opt_patterns_and_names(
let mut opt_patterns = Vec::new();
let mut opt_names = BTreeSet::new();
for x in opt.iter() {
- if is_glob_pattern(x) {
+ if PackageIdSpec::parse(x).is_err() && is_glob_pattern(x) {
opt_patterns.push((build_glob(x)?, false));
} else {
opt_names.insert(String::as_str(x));
diff --git a/src/tools/cargo/src/cargo/ops/cargo_fetch.rs b/src/tools/cargo/src/cargo/ops/cargo_fetch.rs
index 6acdbddef..ac2b60aab 100644
--- a/src/tools/cargo/src/cargo/ops/cargo_fetch.rs
+++ b/src/tools/cargo/src/cargo/ops/cargo_fetch.rs
@@ -76,6 +76,7 @@ pub fn fetch<'a>(
}
packages.get_many(to_download)?;
+ crate::core::gc::auto_gc(config);
Ok((resolve, packages))
}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_generate_lockfile.rs b/src/tools/cargo/src/cargo/ops/cargo_generate_lockfile.rs
index a16d6d403..1bba64925 100644
--- a/src/tools/cargo/src/cargo/ops/cargo_generate_lockfile.rs
+++ b/src/tools/cargo/src/cargo/ops/cargo_generate_lockfile.rs
@@ -1,6 +1,6 @@
use crate::core::registry::PackageRegistry;
use crate::core::resolver::features::{CliFeatures, HasDevUnits};
-use crate::core::{PackageId, PackageIdSpec};
+use crate::core::{PackageId, PackageIdSpec, PackageIdSpecQuery};
use crate::core::{Resolve, SourceId, Workspace};
use crate::ops;
use crate::util::cache_lock::CacheLockMode;
@@ -122,6 +122,26 @@ pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoRes
}
}
+ // Mirror `--workspace` and never avoid workspace members.
+ // Filtering them out here so the above processes them normally
+ // so their dependencies can be updated as requested
+ to_avoid = to_avoid
+ .into_iter()
+ .filter(|id| {
+ for package in ws.members() {
+ let member_id = package.package_id();
+ // Skip checking the `version` because `previous_resolve` might have a stale
+ // value.
+ // When dealing with workspace members, the other fields should be a
+ // sufficiently unique match.
+ if id.name() == member_id.name() && id.source_id() == member_id.source_id() {
+ return false;
+ }
+ }
+ true
+ })
+ .collect();
+
registry.add_sources(sources)?;
}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_install.rs b/src/tools/cargo/src/cargo/ops/cargo_install.rs
index 16027233e..0d7836bc2 100644
--- a/src/tools/cargo/src/cargo/ops/cargo_install.rs
+++ b/src/tools/cargo/src/cargo/ops/cargo_install.rs
@@ -4,9 +4,7 @@ use std::sync::Arc;
use std::{env, fs};
use crate::core::compiler::{CompileKind, DefaultExecutor, Executor, UnitOutput};
-use crate::core::{
- Dependency, Edition, Package, PackageId, PackageIdSpec, SourceId, Target, Workspace,
-};
+use crate::core::{Dependency, Edition, Package, PackageId, SourceId, Target, Workspace};
use crate::ops::{common_for_install_and_uninstall::*, FilterRule};
use crate::ops::{CompileFilter, Packages};
use crate::sources::source::Source;
@@ -206,7 +204,7 @@ impl<'cfg> InstallablePackage<'cfg> {
// For cargo install tracking, we retain the source git url in `pkg`, but for the build spec
// we need to unconditionally use `ws.current()` to correctly address the path where we
// locally cloned that repo.
- let pkgidspec = PackageIdSpec::from_package_id(ws.current()?.package_id());
+ let pkgidspec = ws.current()?.package_id().to_spec();
opts.spec = Packages::Packages(vec![pkgidspec.to_string()]);
if from_cwd {
diff --git a/src/tools/cargo/src/cargo/ops/cargo_new.rs b/src/tools/cargo/src/cargo/ops/cargo_new.rs
index 1c06b5f82..57c7e268e 100644
--- a/src/tools/cargo/src/cargo/ops/cargo_new.rs
+++ b/src/tools/cargo/src/cargo/ops/cargo_new.rs
@@ -4,6 +4,7 @@ use crate::util::important_paths::find_root_manifest_for_wd;
use crate::util::toml_mut::is_sorted;
use crate::util::{existing_vcs_repo, FossilRepo, GitRepo, HgRepo, PijulRepo};
use crate::util::{restricted_names, Config};
+use crate::util_schemas::manifest::PackageName;
use anyhow::{anyhow, Context};
use cargo_util::paths::{self, write_atomic};
use serde::de;
@@ -180,7 +181,7 @@ fn check_name(
};
let bin_help = || {
let mut help = String::from(name_help);
- if has_bin {
+ if has_bin && !name.is_empty() {
help.push_str(&format!(
"\n\
If you need a binary with the name \"{name}\", use a valid package \
@@ -197,7 +198,10 @@ fn check_name(
}
help
};
- restricted_names::validate_package_name(name, "package name", &bin_help())?;
+ PackageName::new(name).map_err(|err| {
+ let help = bin_help();
+ anyhow::anyhow!("{err}{help}")
+ })?;
if restricted_names::is_keyword(name) {
anyhow::bail!(
diff --git a/src/tools/cargo/src/cargo/ops/cargo_package.rs b/src/tools/cargo/src/cargo/ops/cargo_package.rs
index 6ac09dc77..2ff0187fe 100644
--- a/src/tools/cargo/src/cargo/ops/cargo_package.rs
+++ b/src/tools/cargo/src/cargo/ops/cargo_package.rs
@@ -7,6 +7,7 @@ use std::sync::Arc;
use std::task::Poll;
use crate::core::compiler::{BuildConfig, CompileMode, DefaultExecutor, Executor};
+use crate::core::manifest::Target;
use crate::core::resolver::CliFeatures;
use crate::core::{registry::PackageRegistry, resolver::HasDevUnits};
use crate::core::{Feature, Shell, Verbosity, Workspace};
@@ -15,7 +16,7 @@ use crate::sources::PathSource;
use crate::util::cache_lock::CacheLockMode;
use crate::util::config::JobsConfig;
use crate::util::errors::CargoResult;
-use crate::util::toml::schema::TomlManifest;
+use crate::util::toml::{prepare_for_publish, to_real_manifest};
use crate::util::{self, human_readable_bytes, restricted_names, Config, FileLock};
use crate::{drop_println, ops};
use anyhow::Context as _;
@@ -331,6 +332,23 @@ fn build_ar_list(
warn_on_nonexistent_file(&pkg, &readme_path, "readme", &ws)?;
}
}
+
+ for t in pkg
+ .manifest()
+ .targets()
+ .iter()
+ .filter(|t| t.is_custom_build())
+ {
+ if let Some(custome_build_path) = t.src_path().path() {
+ let abs_custome_build_path =
+ paths::normalize_path(&pkg.root().join(custome_build_path));
+ if !abs_custome_build_path.is_file() || !abs_custome_build_path.starts_with(pkg.root())
+ {
+ error_custom_build_file_not_in_package(pkg, &abs_custome_build_path, t)?;
+ }
+ }
+ }
+
result.sort_unstable_by(|a, b| a.rel_path.cmp(&b.rel_path));
Ok(result)
@@ -405,20 +423,42 @@ fn warn_on_nonexistent_file(
))
}
+fn error_custom_build_file_not_in_package(
+ pkg: &Package,
+ path: &Path,
+ target: &Target,
+) -> CargoResult<Vec<ArchiveFile>> {
+ let tip = {
+ let description_name = target.description_named();
+ if path.is_file() {
+ format!("the source file of {description_name} doesn't appear to be a path inside of the package.\n\
+ It is at `{}`, whereas the root the package is `{}`.\n",
+ path.display(), pkg.root().display()
+ )
+ } else {
+ format!("the source file of {description_name} doesn't appear to exist.\n",)
+ }
+ };
+ let msg = format!(
+ "{}\
+ This may cause issue during packaging, as modules resolution and resources included via macros are often relative to the path of source files.\n\
+ Please update the `build` setting in the manifest at `{}` and point to a path inside the root of the package.",
+ tip, pkg.manifest_path().display()
+ );
+ anyhow::bail!(msg)
+}
+
/// Construct `Cargo.lock` for the package to be published.
fn build_lock(ws: &Workspace<'_>, orig_pkg: &Package) -> CargoResult<String> {
let config = ws.config();
let orig_resolve = ops::load_pkg_lockfile(ws)?;
// Convert Package -> TomlManifest -> Manifest -> Package
- let toml_manifest = orig_pkg
- .manifest()
- .original()
- .prepare_for_publish(ws, orig_pkg.root())?;
+ let toml_manifest = prepare_for_publish(orig_pkg.manifest().original(), ws, orig_pkg.root())?;
let package_root = orig_pkg.root();
let source_id = orig_pkg.package_id().source_id();
let (manifest, _nested_paths) =
- TomlManifest::to_real_manifest(toml_manifest, false, source_id, package_root, config)?;
+ to_real_manifest(toml_manifest, false, source_id, package_root, config)?;
let new_pkg = Package::new(manifest, orig_pkg.manifest_path());
let max_rust_version = new_pkg.rust_version().cloned();
diff --git a/src/tools/cargo/src/cargo/ops/cargo_pkgid.rs b/src/tools/cargo/src/cargo/ops/cargo_pkgid.rs
index bbae154a7..4e81e741f 100644
--- a/src/tools/cargo/src/cargo/ops/cargo_pkgid.rs
+++ b/src/tools/cargo/src/cargo/ops/cargo_pkgid.rs
@@ -1,4 +1,4 @@
-use crate::core::{PackageIdSpec, Workspace};
+use crate::core::{PackageIdSpec, PackageIdSpecQuery, Workspace};
use crate::ops;
use crate::util::CargoResult;
@@ -11,5 +11,5 @@ pub fn pkgid(ws: &Workspace<'_>, spec: Option<&str>) -> CargoResult<PackageIdSpe
Some(spec) => PackageIdSpec::query_str(spec, resolve.iter())?,
None => ws.current()?.package_id(),
};
- Ok(PackageIdSpec::from_package_id(pkgid))
+ Ok(pkgid.to_spec())
}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_uninstall.rs b/src/tools/cargo/src/cargo/ops/cargo_uninstall.rs
index 1f22e191e..7b45a69b4 100644
--- a/src/tools/cargo/src/cargo/ops/cargo_uninstall.rs
+++ b/src/tools/cargo/src/cargo/ops/cargo_uninstall.rs
@@ -1,12 +1,11 @@
use crate::core::PackageId;
-use crate::core::{PackageIdSpec, SourceId};
+use crate::core::{PackageIdSpec, PackageIdSpecQuery, SourceId};
use crate::ops::common_for_install_and_uninstall::*;
use crate::sources::PathSource;
use crate::util::errors::CargoResult;
use crate::util::Config;
use crate::util::Filesystem;
use anyhow::bail;
-use cargo_util::paths;
use std::collections::BTreeSet;
use std::env;
@@ -103,7 +102,6 @@ fn uninstall_pkgid(
bins: &[String],
config: &Config,
) -> CargoResult<()> {
- let mut to_remove = Vec::new();
let installed = match tracker.installed_bins(pkgid) {
Some(bins) => bins.clone(),
None => bail!("package `{}` is not installed", pkgid),
@@ -137,19 +135,18 @@ fn uninstall_pkgid(
}
}
- if bins.is_empty() {
- to_remove.extend(installed.iter().map(|b| dst.join(b)));
- tracker.remove(pkgid, &installed);
- } else {
- for bin in bins.iter() {
- to_remove.push(dst.join(bin));
+ let to_remove = {
+ if bins.is_empty() {
+ installed
+ } else {
+ bins
}
- tracker.remove(pkgid, &bins);
- }
- tracker.save()?;
+ };
+
for bin in to_remove {
- config.shell().status("Removing", bin.display())?;
- paths::remove_file(bin)?;
+ let bin_path = dst.join(&bin);
+ config.shell().status("Removing", bin_path.display())?;
+ tracker.remove_bin_then_save(pkgid, &bin, &bin_path)?;
}
Ok(())
diff --git a/src/tools/cargo/src/cargo/ops/common_for_install_and_uninstall.rs b/src/tools/cargo/src/cargo/ops/common_for_install_and_uninstall.rs
index d1f9152be..e678a64df 100644
--- a/src/tools/cargo/src/cargo/ops/common_for_install_and_uninstall.rs
+++ b/src/tools/cargo/src/cargo/ops/common_for_install_and_uninstall.rs
@@ -7,6 +7,7 @@ use std::rc::Rc;
use std::task::Poll;
use anyhow::{bail, format_err, Context as _};
+use cargo_util::paths;
use ops::FilterRule;
use serde::{Deserialize, Serialize};
@@ -319,6 +320,20 @@ impl InstallTracker {
self.v1.remove(pkg_id, bins);
self.v2.remove(pkg_id, bins);
}
+
+ /// Remove a bin after it successfully had been removed in disk and then save the tracker at last.
+ pub fn remove_bin_then_save(
+ &mut self,
+ pkg_id: PackageId,
+ bin: &str,
+ bin_path: &PathBuf,
+ ) -> CargoResult<()> {
+ paths::remove_file(bin_path)?;
+ self.v1.remove_bin(pkg_id, bin);
+ self.v2.remove_bin(pkg_id, bin);
+ self.save()?;
+ Ok(())
+ }
}
impl CrateListingV1 {
@@ -359,6 +374,17 @@ impl CrateListingV1 {
}
}
+ fn remove_bin(&mut self, pkg_id: PackageId, bin: &str) {
+ let mut installed = match self.v1.entry(pkg_id) {
+ btree_map::Entry::Occupied(e) => e,
+ btree_map::Entry::Vacant(..) => panic!("v1 unexpected missing `{}`", pkg_id),
+ };
+ installed.get_mut().remove(bin);
+ if installed.get().is_empty() {
+ installed.remove();
+ }
+ }
+
fn save(&self, lock: &FileLock) -> CargoResult<()> {
let mut file = lock.file();
file.seek(SeekFrom::Start(0))?;
@@ -468,6 +494,17 @@ impl CrateListingV2 {
}
}
+ fn remove_bin(&mut self, pkg_id: PackageId, bin: &str) {
+ let mut info_entry = match self.installs.entry(pkg_id) {
+ btree_map::Entry::Occupied(e) => e,
+ btree_map::Entry::Vacant(..) => panic!("v1 unexpected missing `{}`", pkg_id),
+ };
+ info_entry.get_mut().bins.remove(bin);
+ if info_entry.get().bins.is_empty() {
+ info_entry.remove();
+ }
+ }
+
fn save(&self, lock: &FileLock) -> CargoResult<()> {
let mut file = lock.file();
file.seek(SeekFrom::Start(0))?;
@@ -552,7 +589,11 @@ where
Poll::Pending => source.block_until_ready()?,
}
};
- match deps.iter().max_by_key(|p| p.package_id()) {
+ match deps
+ .iter()
+ .map(|s| s.as_summary())
+ .max_by_key(|p| p.package_id())
+ {
Some(summary) => {
if let (Some(current), Some(msrv)) = (current_rust_version, summary.rust_version()) {
let msrv_req = msrv.to_caret_req();
@@ -571,6 +612,7 @@ where
};
if let Some(alt) = msrv_deps
.iter()
+ .map(|s| s.as_summary())
.filter(|summary| {
summary
.rust_version()
@@ -608,7 +650,7 @@ cannot install package `{name} {ver}`, it requires rustc {msrv} or newer, while
let is_yanked: bool = if dep.version_req().is_exact() {
let version: String = dep.version_req().to_string();
if let Ok(pkg_id) =
- PackageId::new(dep.package_name(), &version[1..], source.source_id())
+ PackageId::try_new(dep.package_name(), &version[1..], source.source_id())
{
source.invalidate_cache();
loop {
diff --git a/src/tools/cargo/src/cargo/ops/mod.rs b/src/tools/cargo/src/cargo/ops/mod.rs
index 13613eaf6..76fa91d25 100644
--- a/src/tools/cargo/src/cargo/ops/mod.rs
+++ b/src/tools/cargo/src/cargo/ops/mod.rs
@@ -1,6 +1,6 @@
use crate::sources::CRATES_IO_DOMAIN;
-pub use self::cargo_clean::{clean, CleanOptions};
+pub use self::cargo_clean::{clean, CleanContext, CleanOptions};
pub use self::cargo_compile::{
compile, compile_with_exec, compile_ws, create_bcx, print, resolve_all_features, CompileOptions,
};
diff --git a/src/tools/cargo/src/cargo/ops/registry/publish.rs b/src/tools/cargo/src/cargo/ops/registry/publish.rs
index 201907bb2..2313792c8 100644
--- a/src/tools/cargo/src/cargo/ops/registry/publish.rs
+++ b/src/tools/cargo/src/cargo/ops/registry/publish.rs
@@ -21,6 +21,7 @@ use crate::core::manifest::ManifestMetadata;
use crate::core::resolver::CliFeatures;
use crate::core::Dependency;
use crate::core::Package;
+use crate::core::PackageIdSpecQuery;
use crate::core::SourceId;
use crate::core::Workspace;
use crate::ops;
diff --git a/src/tools/cargo/src/cargo/ops/resolve.rs b/src/tools/cargo/src/cargo/ops/resolve.rs
index 8ca72f77c..5421d0572 100644
--- a/src/tools/cargo/src/cargo/ops/resolve.rs
+++ b/src/tools/cargo/src/cargo/ops/resolve.rs
@@ -64,14 +64,15 @@ use crate::core::resolver::{
self, HasDevUnits, Resolve, ResolveOpts, ResolveVersion, VersionOrdering, VersionPreferences,
};
use crate::core::summary::Summary;
-use crate::core::Feature;
-use crate::core::{GitReference, PackageId, PackageIdSpec, PackageSet, SourceId, Workspace};
+use crate::core::{
+ GitReference, PackageId, PackageIdSpec, PackageIdSpecQuery, PackageSet, SourceId, Workspace,
+};
use crate::ops;
use crate::sources::PathSource;
use crate::util::cache_lock::CacheLockMode;
use crate::util::errors::CargoResult;
-use crate::util::RustVersion;
use crate::util::{profile, CanonicalUrl};
+use crate::util_schemas::manifest::RustVersion;
use anyhow::Context as _;
use std::collections::{HashMap, HashSet};
use tracing::{debug, trace};
@@ -512,9 +513,6 @@ pub fn resolve_with_previous<'cfg>(
registry,
&version_prefs,
Some(ws.config()),
- ws.unstable_features()
- .require(Feature::public_dependency())
- .is_ok(),
)?;
let patches: Vec<_> = registry
.patches()
@@ -530,6 +528,9 @@ pub fn resolve_with_previous<'cfg>(
if let Some(previous) = previous {
resolved.merge_from(previous)?;
}
+ let config = ws.config();
+ let mut deferred = config.deferred_global_last_use()?;
+ deferred.save_no_error(config);
Ok(resolved)
}
diff --git a/src/tools/cargo/src/cargo/ops/tree/mod.rs b/src/tools/cargo/src/cargo/ops/tree/mod.rs
index ce3bae8cc..6928ec5f9 100644
--- a/src/tools/cargo/src/cargo/ops/tree/mod.rs
+++ b/src/tools/cargo/src/cargo/ops/tree/mod.rs
@@ -4,7 +4,7 @@ use self::format::Pattern;
use crate::core::compiler::{CompileKind, RustcTargetData};
use crate::core::dependency::DepKind;
use crate::core::resolver::{features::CliFeatures, ForceAllTargets, HasDevUnits};
-use crate::core::{Package, PackageId, PackageIdSpec, Workspace};
+use crate::core::{Package, PackageId, PackageIdSpec, PackageIdSpecQuery, Workspace};
use crate::ops::{self, Packages};
use crate::util::{CargoResult, Config};
use crate::{drop_print, drop_println};
diff --git a/src/tools/cargo/src/cargo/sources/directory.rs b/src/tools/cargo/src/cargo/sources/directory.rs
index 7195fd72d..01c3c4330 100644
--- a/src/tools/cargo/src/cargo/sources/directory.rs
+++ b/src/tools/cargo/src/cargo/sources/directory.rs
@@ -3,10 +3,11 @@ use std::fmt::{self, Debug, Formatter};
use std::path::{Path, PathBuf};
use std::task::Poll;
-use crate::core::{Dependency, Package, PackageId, SourceId, Summary};
+use crate::core::{Dependency, Package, PackageId, SourceId};
use crate::sources::source::MaybePackage;
use crate::sources::source::QueryKind;
use crate::sources::source::Source;
+use crate::sources::IndexSummary;
use crate::sources::PathSource;
use crate::util::errors::CargoResult;
use crate::util::Config;
@@ -99,7 +100,7 @@ impl<'cfg> Source for DirectorySource<'cfg> {
&mut self,
dep: &Dependency,
kind: QueryKind,
- f: &mut dyn FnMut(Summary),
+ f: &mut dyn FnMut(IndexSummary),
) -> Poll<CargoResult<()>> {
if !self.updated {
return Poll::Pending;
@@ -110,7 +111,7 @@ impl<'cfg> Source for DirectorySource<'cfg> {
QueryKind::Fuzzy => true,
});
for summary in matches.map(|pkg| pkg.summary().clone()) {
- f(summary);
+ f(IndexSummary::Candidate(summary));
}
Poll::Ready(Ok(()))
}
diff --git a/src/tools/cargo/src/cargo/sources/git/known_hosts.rs b/src/tools/cargo/src/cargo/sources/git/known_hosts.rs
index 0b0dd3208..f316cc253 100644
--- a/src/tools/cargo/src/cargo/sources/git/known_hosts.rs
+++ b/src/tools/cargo/src/cargo/sources/git/known_hosts.rs
@@ -538,7 +538,7 @@ fn user_known_host_location() -> Option<PathBuf> {
// - OpenSSH (most unix platforms): Uses `pw->pw_dir` from `getpwuid()`.
//
// This doesn't do anything close to that. home_dir's behavior is:
- // - Windows: $USERPROFILE, or SHGetFolderPathW()
+ // - Windows: $USERPROFILE, or SHGetKnownFolderPath()
// - Unix: $HOME, or getpwuid_r()
//
// Since there is a mismatch here, the location returned here might be
diff --git a/src/tools/cargo/src/cargo/sources/git/source.rs b/src/tools/cargo/src/cargo/sources/git/source.rs
index a75c1ec6d..664c64bfe 100644
--- a/src/tools/cargo/src/cargo/sources/git/source.rs
+++ b/src/tools/cargo/src/cargo/sources/git/source.rs
@@ -1,16 +1,19 @@
//! See [GitSource].
+use crate::core::global_cache_tracker;
use crate::core::GitReference;
use crate::core::SourceId;
-use crate::core::{Dependency, Package, PackageId, Summary};
+use crate::core::{Dependency, Package, PackageId};
use crate::sources::git::utils::GitRemote;
use crate::sources::source::MaybePackage;
use crate::sources::source::QueryKind;
use crate::sources::source::Source;
+use crate::sources::IndexSummary;
use crate::sources::PathSource;
use crate::util::cache_lock::CacheLockMode;
use crate::util::errors::CargoResult;
use crate::util::hex::short_hash;
+use crate::util::interning::InternedString;
use crate::util::Config;
use anyhow::Context;
use cargo_util::paths::exclude_from_backups_and_indexing;
@@ -73,10 +76,21 @@ pub struct GitSource<'cfg> {
/// The unique identifier of this source.
source_id: SourceId,
/// The underlying path source to discover packages inside the Git repository.
+ ///
+ /// This gets set to `Some` after the git repo has been checked out
+ /// (automatically handled via [`GitSource::block_until_ready`]).
path_source: Option<PathSource<'cfg>>,
+ /// A short string that uniquely identifies the version of the checkout.
+ ///
+ /// This is typically a 7-character string of the OID hash, automatically
+ /// increasing in size if it is ambiguous.
+ ///
+ /// This is set to `Some` after the git repo has been checked out
+ /// (automatically handled via [`GitSource::block_until_ready`]).
+ short_id: Option<InternedString>,
/// The identifier of this source for Cargo's Git cache directory.
/// See [`ident`] for more.
- ident: String,
+ ident: InternedString,
config: &'cfg Config,
/// Disables status messages.
quiet: bool,
@@ -104,7 +118,8 @@ impl<'cfg> GitSource<'cfg> {
locked_rev,
source_id,
path_source: None,
- ident,
+ short_id: None,
+ ident: ident.into(),
config,
quiet: false,
};
@@ -127,6 +142,17 @@ impl<'cfg> GitSource<'cfg> {
}
self.path_source.as_mut().unwrap().read_packages()
}
+
+ fn mark_used(&self, size: Option<u64>) -> CargoResult<()> {
+ self.config
+ .deferred_global_last_use()?
+ .mark_git_checkout_used(global_cache_tracker::GitCheckout {
+ encoded_git_name: self.ident,
+ short_name: self.short_id.expect("update before download"),
+ size,
+ });
+ Ok(())
+ }
}
/// Create an identifier from a URL,
@@ -177,7 +203,7 @@ impl<'cfg> Source for GitSource<'cfg> {
&mut self,
dep: &Dependency,
kind: QueryKind,
- f: &mut dyn FnMut(Summary),
+ f: &mut dyn FnMut(IndexSummary),
) -> Poll<CargoResult<()>> {
if let Some(src) = self.path_source.as_mut() {
src.query(dep, kind, f)
@@ -200,6 +226,7 @@ impl<'cfg> Source for GitSource<'cfg> {
fn block_until_ready(&mut self) -> CargoResult<()> {
if self.path_source.is_some() {
+ self.mark_used(None)?;
return Ok(());
}
@@ -290,8 +317,16 @@ impl<'cfg> Source for GitSource<'cfg> {
let path_source = PathSource::new_recursive(&checkout_path, source_id, self.config);
self.path_source = Some(path_source);
+ self.short_id = Some(short_id.as_str().into());
self.locked_rev = Some(actual_rev);
- self.path_source.as_mut().unwrap().update()
+ self.path_source.as_mut().unwrap().update()?;
+
+ // Hopefully this shouldn't incur too much of a performance hit since
+ // most of this should already be in cache since it was just
+ // extracted.
+ let size = global_cache_tracker::du_git_checkout(&checkout_path)?;
+ self.mark_used(Some(size))?;
+ Ok(())
}
fn download(&mut self, id: PackageId) -> CargoResult<MaybePackage> {
@@ -300,6 +335,7 @@ impl<'cfg> Source for GitSource<'cfg> {
id,
self.remote
);
+ self.mark_used(None)?;
self.path_source
.as_mut()
.expect("BUG: `update()` must be called before `get()`")
diff --git a/src/tools/cargo/src/cargo/sources/mod.rs b/src/tools/cargo/src/cargo/sources/mod.rs
index 7da1d652d..1d2f51a37 100644
--- a/src/tools/cargo/src/cargo/sources/mod.rs
+++ b/src/tools/cargo/src/cargo/sources/mod.rs
@@ -30,7 +30,9 @@ pub use self::config::SourceConfigMap;
pub use self::directory::DirectorySource;
pub use self::git::GitSource;
pub use self::path::PathSource;
-pub use self::registry::{RegistrySource, CRATES_IO_DOMAIN, CRATES_IO_INDEX, CRATES_IO_REGISTRY};
+pub use self::registry::{
+ IndexSummary, RegistrySource, CRATES_IO_DOMAIN, CRATES_IO_INDEX, CRATES_IO_REGISTRY,
+};
pub use self::replaced::ReplacedSource;
pub mod config;
diff --git a/src/tools/cargo/src/cargo/sources/path.rs b/src/tools/cargo/src/cargo/sources/path.rs
index 0cc639976..bbf6f056b 100644
--- a/src/tools/cargo/src/cargo/sources/path.rs
+++ b/src/tools/cargo/src/cargo/sources/path.rs
@@ -3,11 +3,12 @@ use std::fmt::{self, Debug, Formatter};
use std::path::{Path, PathBuf};
use std::task::Poll;
-use crate::core::{Dependency, Package, PackageId, SourceId, Summary};
+use crate::core::{Dependency, Package, PackageId, SourceId};
use crate::ops;
use crate::sources::source::MaybePackage;
use crate::sources::source::QueryKind;
use crate::sources::source::Source;
+use crate::sources::IndexSummary;
use crate::util::{internal, CargoResult, Config};
use anyhow::Context as _;
use cargo_util::paths;
@@ -327,7 +328,12 @@ impl<'cfg> PathSource<'cfg> {
match file_path.file_name().and_then(|s| s.to_str()) {
// The `target` directory is never included.
- Some("target") => continue,
+ Some("target") => {
+ // Only filter out target if its in the package root.
+ if file_path.parent().unwrap() == pkg_path {
+ continue;
+ }
+ }
// Keep track of all sub-packages found and also strip out all
// matches we've found so far. Note, though, that if we find
@@ -542,7 +548,7 @@ impl<'cfg> Source for PathSource<'cfg> {
&mut self,
dep: &Dependency,
kind: QueryKind,
- f: &mut dyn FnMut(Summary),
+ f: &mut dyn FnMut(IndexSummary),
) -> Poll<CargoResult<()>> {
self.update()?;
for s in self.packages.iter().map(|p| p.summary()) {
@@ -551,7 +557,7 @@ impl<'cfg> Source for PathSource<'cfg> {
QueryKind::Fuzzy => true,
};
if matched {
- f(s.clone())
+ f(IndexSummary::Candidate(s.clone()))
}
}
Poll::Ready(Ok(()))
diff --git a/src/tools/cargo/src/cargo/sources/registry/download.rs b/src/tools/cargo/src/cargo/sources/registry/download.rs
index 786432835..daf1d0537 100644
--- a/src/tools/cargo/src/cargo/sources/registry/download.rs
+++ b/src/tools/cargo/src/cargo/sources/registry/download.rs
@@ -3,11 +3,13 @@
//! [`HttpRegistry`]: super::http_remote::HttpRegistry
//! [`RemoteRegistry`]: super::remote::RemoteRegistry
+use crate::util::interning::InternedString;
use anyhow::Context;
use cargo_credential::Operation;
use cargo_util::registry::make_dep_path;
use cargo_util::Sha256;
+use crate::core::global_cache_tracker;
use crate::core::PackageId;
use crate::sources::registry::MaybeLock;
use crate::sources::registry::RegistryConfig;
@@ -34,6 +36,7 @@ const CHECKSUM_TEMPLATE: &str = "{sha256-checksum}";
pub(super) fn download(
cache_path: &Filesystem,
config: &Config,
+ encoded_registry_name: InternedString,
pkg: PackageId,
checksum: &str,
registry_config: RegistryConfig,
@@ -50,6 +53,13 @@ pub(super) fn download(
if let Ok(dst) = File::open(path) {
let meta = dst.metadata()?;
if meta.len() > 0 {
+ config.deferred_global_last_use()?.mark_registry_crate_used(
+ global_cache_tracker::RegistryCrate {
+ encoded_registry_name,
+ crate_filename: pkg.tarball_name().into(),
+ size: meta.len(),
+ },
+ );
return Ok(MaybeLock::Ready(dst));
}
}
@@ -106,6 +116,7 @@ pub(super) fn download(
pub(super) fn finish_download(
cache_path: &Filesystem,
config: &Config,
+ encoded_registry_name: InternedString,
pkg: PackageId,
checksum: &str,
data: &[u8],
@@ -115,6 +126,13 @@ pub(super) fn finish_download(
if actual != checksum {
anyhow::bail!("failed to verify the checksum of `{}`", pkg)
}
+ config.deferred_global_last_use()?.mark_registry_crate_used(
+ global_cache_tracker::RegistryCrate {
+ encoded_registry_name,
+ crate_filename: pkg.tarball_name().into(),
+ size: data.len() as u64,
+ },
+ );
cache_path.create_dir()?;
let path = cache_path.join(&pkg.tarball_name());
diff --git a/src/tools/cargo/src/cargo/sources/registry/http_remote.rs b/src/tools/cargo/src/cargo/sources/registry/http_remote.rs
index 3d31110c3..821cf3cd1 100644
--- a/src/tools/cargo/src/cargo/sources/registry/http_remote.rs
+++ b/src/tools/cargo/src/cargo/sources/registry/http_remote.rs
@@ -1,11 +1,13 @@
//! Access to a HTTP-based crate registry. See [`HttpRegistry`] for details.
+use crate::core::global_cache_tracker;
use crate::core::{PackageId, SourceId};
use crate::sources::registry::download;
use crate::sources::registry::MaybeLock;
use crate::sources::registry::{LoadResponse, RegistryConfig, RegistryData};
use crate::util::cache_lock::CacheLockMode;
use crate::util::errors::{CargoResult, HttpNotSuccessful};
+use crate::util::interning::InternedString;
use crate::util::network::http::http_handle;
use crate::util::network::retry::{Retry, RetryResult};
use crate::util::network::sleep::SleepTracker;
@@ -52,6 +54,9 @@ const UNKNOWN: &'static str = "Unknown";
///
/// [RFC 2789]: https://github.com/rust-lang/rfcs/pull/2789
pub struct HttpRegistry<'cfg> {
+ /// The name of this source, a unique string (across all sources) used as
+ /// the directory name where its cached content is stored.
+ name: InternedString,
/// Path to the registry index (`$CARGO_HOME/registry/index/$REG-HASH`).
///
/// To be fair, `HttpRegistry` doesn't store the registry index it
@@ -199,6 +204,7 @@ impl<'cfg> HttpRegistry<'cfg> {
.expect("a url with the sparse+ stripped should still be valid");
Ok(HttpRegistry {
+ name: name.into(),
index_path: config.registry_index_path().join(name),
cache_path: config.registry_cache_path().join(name),
source_id,
@@ -454,6 +460,11 @@ impl<'cfg> HttpRegistry<'cfg> {
impl<'cfg> RegistryData for HttpRegistry<'cfg> {
fn prepare(&self) -> CargoResult<()> {
+ self.config
+ .deferred_global_last_use()?
+ .mark_registry_index_used(global_cache_tracker::RegistryIndex {
+ encoded_registry_name: self.name,
+ });
Ok(())
}
@@ -750,6 +761,7 @@ impl<'cfg> RegistryData for HttpRegistry<'cfg> {
download::download(
&self.cache_path,
&self.config,
+ self.name.clone(),
pkg,
checksum,
registry_config,
@@ -762,7 +774,14 @@ impl<'cfg> RegistryData for HttpRegistry<'cfg> {
checksum: &str,
data: &[u8],
) -> CargoResult<File> {
- download::finish_download(&self.cache_path, &self.config, pkg, checksum, data)
+ download::finish_download(
+ &self.cache_path,
+ &self.config,
+ self.name.clone(),
+ pkg,
+ checksum,
+ data,
+ )
}
fn is_crate_downloaded(&self, pkg: PackageId) -> bool {
diff --git a/src/tools/cargo/src/cargo/sources/registry/index.rs b/src/tools/cargo/src/cargo/sources/registry/index.rs
index 00f21d669..aed8fc813 100644
--- a/src/tools/cargo/src/cargo/sources/registry/index.rs
+++ b/src/tools/cargo/src/cargo/sources/registry/index.rs
@@ -92,7 +92,8 @@ use crate::sources::registry::{LoadResponse, RegistryData};
use crate::util::cache_lock::CacheLockMode;
use crate::util::interning::InternedString;
use crate::util::IntoUrl;
-use crate::util::{internal, CargoResult, Config, Filesystem, OptVersionReq, RustVersion};
+use crate::util::{internal, CargoResult, Config, Filesystem, OptVersionReq};
+use crate::util_schemas::manifest::RustVersion;
use anyhow::bail;
use cargo_util::{paths, registry::make_dep_path};
use semver::Version;
@@ -223,6 +224,15 @@ impl IndexSummary {
}
}
+ pub fn map_summary(self, f: impl Fn(Summary) -> Summary) -> Self {
+ match self {
+ IndexSummary::Candidate(s) => IndexSummary::Candidate(f(s)),
+ IndexSummary::Yanked(s) => IndexSummary::Yanked(f(s)),
+ IndexSummary::Offline(s) => IndexSummary::Offline(f(s)),
+ IndexSummary::Unsupported(s, v) => IndexSummary::Unsupported(f(s), v.clone()),
+ }
+ }
+
/// Extract the package id from any variant
pub fn package_id(&self) -> PackageId {
match self {
@@ -935,7 +945,7 @@ impl IndexSummary {
} = serde_json::from_slice(line)?;
let v = v.unwrap_or(1);
tracing::trace!("json parsed registry {}/{}", name, vers);
- let pkgid = PackageId::pure(name.into(), vers.clone(), source_id);
+ let pkgid = PackageId::new(name.into(), vers.clone(), source_id);
let deps = deps
.into_iter()
.map(|dep| dep.into_dep(source_id))
diff --git a/src/tools/cargo/src/cargo/sources/registry/mod.rs b/src/tools/cargo/src/cargo/sources/registry/mod.rs
index 7ee461edd..5bdd71e7d 100644
--- a/src/tools/cargo/src/cargo/sources/registry/mod.rs
+++ b/src/tools/cargo/src/cargo/sources/registry/mod.rs
@@ -201,7 +201,8 @@ use tar::Archive;
use tracing::debug;
use crate::core::dependency::Dependency;
-use crate::core::{Package, PackageId, SourceId, Summary};
+use crate::core::global_cache_tracker;
+use crate::core::{Package, PackageId, SourceId};
use crate::sources::source::MaybePackage;
use crate::sources::source::QueryKind;
use crate::sources::source::Source;
@@ -239,6 +240,9 @@ struct LockMetadata {
///
/// For general concepts of registries, see the [module-level documentation](crate::sources::registry).
pub struct RegistrySource<'cfg> {
+ /// A unique name of the source (typically used as the directory name
+ /// where its cached content is stored).
+ name: InternedString,
/// The unique identifier of this source.
source_id: SourceId,
/// The path where crate files are extracted (`$CARGO_HOME/registry/src/$REG-HASH`).
@@ -435,12 +439,18 @@ pub enum MaybeLock {
mod download;
mod http_remote;
mod index;
+pub use index::IndexSummary;
mod local;
mod remote;
/// Generates a unique name for [`SourceId`] to have a unique path to put their
/// index files.
fn short_name(id: SourceId, is_shallow: bool) -> String {
+ // CAUTION: This should not change between versions. If you change how
+ // this is computed, it will orphan previously cached data, forcing the
+ // cache to be rebuilt and potentially wasting significant disk space. If
+ // you change it, be cautious of the impact. See `test_cratesio_hash` for
+ // a similar discussion.
let hash = hex::short_hash(&id);
let ident = id.url().host_str().unwrap_or("").to_string();
let mut name = format!("{}-{}", ident, hash);
@@ -514,6 +524,7 @@ impl<'cfg> RegistrySource<'cfg> {
yanked_whitelist: &HashSet<PackageId>,
) -> RegistrySource<'cfg> {
RegistrySource {
+ name: name.into(),
src_path: config.registry_source_path().join(name),
config,
source_id,
@@ -589,6 +600,13 @@ impl<'cfg> RegistrySource<'cfg> {
match fs::read_to_string(path) {
Ok(ok) => match serde_json::from_str::<LockMetadata>(&ok) {
Ok(lock_meta) if lock_meta.v == 1 => {
+ self.config
+ .deferred_global_last_use()?
+ .mark_registry_src_used(global_cache_tracker::RegistrySrc {
+ encoded_registry_name: self.name,
+ package_dir: package_dir.into(),
+ size: None,
+ });
return Ok(unpack_dir.to_path_buf());
}
_ => {
@@ -613,6 +631,7 @@ impl<'cfg> RegistrySource<'cfg> {
set_mask(&mut tar);
tar
};
+ let mut bytes_written = 0;
let prefix = unpack_dir.file_name().unwrap();
let parent = unpack_dir.parent().unwrap();
for entry in tar.entries()? {
@@ -644,6 +663,7 @@ impl<'cfg> RegistrySource<'cfg> {
continue;
}
// Unpacking failed
+ bytes_written += entry.size();
let mut result = entry.unpack_in(parent).map_err(anyhow::Error::from);
if cfg!(windows) && restricted_names::is_windows_reserved_path(&entry_path) {
result = result.with_context(|| {
@@ -670,6 +690,14 @@ impl<'cfg> RegistrySource<'cfg> {
let lock_meta = LockMetadata { v: 1 };
write!(ok, "{}", serde_json::to_string(&lock_meta).unwrap())?;
+ self.config
+ .deferred_global_last_use()?
+ .mark_registry_src_used(global_cache_tracker::RegistrySrc {
+ encoded_registry_name: self.name,
+ package_dir: package_dir.into(),
+ size: Some(bytes_written),
+ });
+
Ok(unpack_dir.to_path_buf())
}
@@ -710,7 +738,7 @@ impl<'cfg> Source for RegistrySource<'cfg> {
&mut self,
dep: &Dependency,
kind: QueryKind,
- f: &mut dyn FnMut(Summary),
+ f: &mut dyn FnMut(IndexSummary),
) -> Poll<CargoResult<()>> {
let mut req = dep.version_req().clone();
@@ -736,7 +764,7 @@ impl<'cfg> Source for RegistrySource<'cfg> {
if dep.matches(s.as_summary()) {
// We are looking for a package from a lock file so we do not care about yank
called = true;
- f(s.into_summary());
+ f(s);
}
},))?;
if called {
@@ -761,7 +789,7 @@ impl<'cfg> Source for RegistrySource<'cfg> {
if matched
&& (!s.is_yanked() || self.yanked_whitelist.contains(&s.package_id()))
{
- f(s.into_summary());
+ f(s);
called = true;
}
}))?;
@@ -786,9 +814,7 @@ impl<'cfg> Source for RegistrySource<'cfg> {
}
any_pending |= self
.index
- .query_inner(name_permutation, &req, &mut *self.ops, &mut |s| {
- f(s.into_summary());
- })?
+ .query_inner(name_permutation, &req, &mut *self.ops, f)?
.is_pending();
}
}
diff --git a/src/tools/cargo/src/cargo/sources/registry/remote.rs b/src/tools/cargo/src/cargo/sources/registry/remote.rs
index ba171eac3..841ee3683 100644
--- a/src/tools/cargo/src/cargo/sources/registry/remote.rs
+++ b/src/tools/cargo/src/cargo/sources/registry/remote.rs
@@ -1,5 +1,6 @@
//! Access to a Git index based registry. See [`RemoteRegistry`] for details.
+use crate::core::global_cache_tracker;
use crate::core::{GitReference, PackageId, SourceId};
use crate::sources::git;
use crate::sources::git::fetch::RemoteKind;
@@ -47,6 +48,9 @@ use tracing::{debug, trace};
///
/// [`HttpRegistry`]: super::http_remote::HttpRegistry
pub struct RemoteRegistry<'cfg> {
+ /// The name of this source, a unique string (across all sources) used as
+ /// the directory name where its cached content is stored.
+ name: InternedString,
/// Path to the registry index (`$CARGO_HOME/registry/index/$REG-HASH`).
index_path: Filesystem,
/// Path to the cache of `.crate` files (`$CARGO_HOME/registry/cache/$REG-HASH`).
@@ -87,6 +91,7 @@ impl<'cfg> RemoteRegistry<'cfg> {
/// registry index are stored. Expect to be unique.
pub fn new(source_id: SourceId, config: &'cfg Config, name: &str) -> RemoteRegistry<'cfg> {
RemoteRegistry {
+ name: name.into(),
index_path: config.registry_index_path().join(name),
cache_path: config.registry_cache_path().join(name),
source_id,
@@ -211,6 +216,11 @@ impl<'cfg> RemoteRegistry<'cfg> {
impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
fn prepare(&self) -> CargoResult<()> {
self.repo()?;
+ self.config
+ .deferred_global_last_use()?
+ .mark_registry_index_used(global_cache_tracker::RegistryIndex {
+ encoded_registry_name: self.name,
+ });
Ok(())
}
@@ -403,6 +413,7 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
download::download(
&self.cache_path,
&self.config,
+ self.name,
pkg,
checksum,
registry_config,
@@ -415,7 +426,14 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
checksum: &str,
data: &[u8],
) -> CargoResult<File> {
- download::finish_download(&self.cache_path, &self.config, pkg, checksum, data)
+ download::finish_download(
+ &self.cache_path,
+ &self.config,
+ self.name.clone(),
+ pkg,
+ checksum,
+ data,
+ )
}
fn is_crate_downloaded(&self, pkg: PackageId) -> bool {
diff --git a/src/tools/cargo/src/cargo/sources/replaced.rs b/src/tools/cargo/src/cargo/sources/replaced.rs
index f224e073d..8c38a5241 100644
--- a/src/tools/cargo/src/cargo/sources/replaced.rs
+++ b/src/tools/cargo/src/cargo/sources/replaced.rs
@@ -1,7 +1,8 @@
-use crate::core::{Dependency, Package, PackageId, SourceId, Summary};
+use crate::core::{Dependency, Package, PackageId, SourceId};
use crate::sources::source::MaybePackage;
use crate::sources::source::QueryKind;
use crate::sources::source::Source;
+use crate::sources::IndexSummary;
use crate::util::errors::CargoResult;
use std::task::Poll;
@@ -59,14 +60,14 @@ impl<'cfg> Source for ReplacedSource<'cfg> {
&mut self,
dep: &Dependency,
kind: QueryKind,
- f: &mut dyn FnMut(Summary),
+ f: &mut dyn FnMut(IndexSummary),
) -> Poll<CargoResult<()>> {
let (replace_with, to_replace) = (self.replace_with, self.to_replace);
let dep = dep.clone().map_source(to_replace, replace_with);
self.inner
.query(&dep, kind, &mut |summary| {
- f(summary.map_source(replace_with, to_replace))
+ f(summary.map_summary(|s| s.map_source(replace_with, to_replace)))
})
.map_err(|e| {
e.context(format!(
diff --git a/src/tools/cargo/src/cargo/sources/source.rs b/src/tools/cargo/src/cargo/sources/source.rs
index 5a3439e5d..dd6619e59 100644
--- a/src/tools/cargo/src/cargo/sources/source.rs
+++ b/src/tools/cargo/src/cargo/sources/source.rs
@@ -6,7 +6,8 @@ use std::task::Poll;
use crate::core::package::PackageSet;
use crate::core::SourceId;
-use crate::core::{Dependency, Package, PackageId, Summary};
+use crate::core::{Dependency, Package, PackageId};
+use crate::sources::IndexSummary;
use crate::util::{CargoResult, Config};
/// An abstraction of different sources of Cargo packages.
@@ -36,11 +37,11 @@ pub trait Source {
self.source_id()
}
- /// Returns whether or not this source will return [`Summary`] items with
+ /// Returns whether or not this source will return [`IndexSummary`] items with
/// checksums listed.
fn supports_checksums(&self) -> bool;
- /// Returns whether or not this source will return [`Summary`] items with
+ /// Returns whether or not this source will return [`IndexSummary`] items with
/// the `precise` field in the [`SourceId`] listed.
fn requires_precise(&self) -> bool;
@@ -50,17 +51,21 @@ pub trait Source {
/// wait until package information become available. Otherwise any query
/// may return a [`Poll::Pending`].
///
- /// The `f` argument is expected to get called when any [`Summary`] becomes available.
+ /// The `f` argument is expected to get called when any [`IndexSummary`] becomes available.
fn query(
&mut self,
dep: &Dependency,
kind: QueryKind,
- f: &mut dyn FnMut(Summary),
+ f: &mut dyn FnMut(IndexSummary),
) -> Poll<CargoResult<()>>;
- /// Gathers the result from [`Source::query`] as a list of [`Summary`] items
+ /// Gathers the result from [`Source::query`] as a list of [`IndexSummary`] items
/// when they become available.
- fn query_vec(&mut self, dep: &Dependency, kind: QueryKind) -> Poll<CargoResult<Vec<Summary>>> {
+ fn query_vec(
+ &mut self,
+ dep: &Dependency,
+ kind: QueryKind,
+ ) -> Poll<CargoResult<Vec<IndexSummary>>> {
let mut ret = Vec::new();
self.query(dep, kind, &mut |s| ret.push(s)).map_ok(|_| ret)
}
@@ -215,7 +220,7 @@ impl<'a, T: Source + ?Sized + 'a> Source for Box<T> {
&mut self,
dep: &Dependency,
kind: QueryKind,
- f: &mut dyn FnMut(Summary),
+ f: &mut dyn FnMut(IndexSummary),
) -> Poll<CargoResult<()>> {
(**self).query(dep, kind, f)
}
@@ -287,7 +292,7 @@ impl<'a, T: Source + ?Sized + 'a> Source for &'a mut T {
&mut self,
dep: &Dependency,
kind: QueryKind,
- f: &mut dyn FnMut(Summary),
+ f: &mut dyn FnMut(IndexSummary),
) -> Poll<CargoResult<()>> {
(**self).query(dep, kind, f)
}
diff --git a/src/tools/cargo/src/cargo/util/command_prelude.rs b/src/tools/cargo/src/cargo/util/command_prelude.rs
index 3888b80c4..24c8c098d 100644
--- a/src/tools/cargo/src/cargo/util/command_prelude.rs
+++ b/src/tools/cargo/src/cargo/util/command_prelude.rs
@@ -7,11 +7,13 @@ use crate::util::important_paths::find_root_manifest_for_wd;
use crate::util::interning::InternedString;
use crate::util::is_rustup;
use crate::util::restricted_names;
-use crate::util::toml::schema::StringOrVec;
use crate::util::{
print_available_benches, print_available_binaries, print_available_examples,
print_available_packages, print_available_tests,
};
+use crate::util_schemas::manifest::ProfileName;
+use crate::util_schemas::manifest::RegistryName;
+use crate::util_schemas::manifest::StringOrVec;
use crate::CargoResult;
use anyhow::bail;
use cargo_util::paths;
@@ -363,20 +365,19 @@ pub trait CommandExt: Sized {
))
}
- fn arg_quiet(self) -> Self {
- let unsupported_silent_arg = {
- let value_parser = UnknownArgumentValueParser::suggest_arg("--quiet");
+ /// Adds a suggestion for the `--silent` or `-s` flags to use the
+ /// `--quiet` flag instead. This is to help with people familiar with
+ /// other tools that use `-s`.
+ ///
+ /// Every command should call this, unless it has its own `-s` short flag.
+ fn arg_silent_suggestion(self) -> Self {
+ let value_parser = UnknownArgumentValueParser::suggest_arg("--quiet");
+ self._arg(
flag("silent", "")
.short('s')
.value_parser(value_parser)
- .hide(true)
- };
- self.arg_quiet_without_unknown_silent_arg_tip()
- ._arg(unsupported_silent_arg)
- }
-
- fn arg_quiet_without_unknown_silent_arg_tip(self) -> Self {
- self._arg(flag("quiet", "Do not print cargo log messages").short('q'))
+ .hide(true),
+ )
}
fn arg_timings(self) -> Self {
@@ -606,7 +607,7 @@ Run `{cmd}` to see possible targets."
bail!("profile `doc` is reserved and not allowed to be explicitly specified")
}
(_, _, Some(name)) => {
- restricted_names::validate_profile_name(name)?;
+ ProfileName::new(name)?;
name
}
};
@@ -834,7 +835,7 @@ Run `{cmd}` to see possible targets."
(None, None) => config.default_registry()?.map(RegistryOrIndex::Registry),
(None, Some(i)) => Some(RegistryOrIndex::Index(i.into_url()?)),
(Some(r), None) => {
- restricted_names::validate_package_name(r, "registry name", "")?;
+ RegistryName::new(r)?;
Some(RegistryOrIndex::Registry(r.to_string()))
}
(Some(_), Some(_)) => {
@@ -849,7 +850,7 @@ Run `{cmd}` to see possible targets."
match self._value_of("registry").map(|s| s.to_string()) {
None => config.default_registry(),
Some(registry) => {
- restricted_names::validate_package_name(&registry, "registry name", "")?;
+ RegistryName::new(&registry)?;
Ok(Some(registry))
}
}
diff --git a/src/tools/cargo/src/cargo/util/config/mod.rs b/src/tools/cargo/src/cargo/util/config/mod.rs
index 50153466b..1c1b949a7 100644
--- a/src/tools/cargo/src/cargo/util/config/mod.rs
+++ b/src/tools/cargo/src/cargo/util/config/mod.rs
@@ -68,6 +68,7 @@ use std::time::Instant;
use self::ConfigValue as CV;
use crate::core::compiler::rustdoc::RustdocExternMap;
+use crate::core::global_cache_tracker::{DeferredGlobalLastUse, GlobalCacheTracker};
use crate::core::shell::Verbosity;
use crate::core::{features, CliUnstable, Shell, SourceId, Workspace, WorkspaceRootConfig};
use crate::ops::RegistryCredentialConfig;
@@ -76,9 +77,10 @@ use crate::sources::CRATES_IO_REGISTRY;
use crate::util::errors::CargoResult;
use crate::util::network::http::configure_http_handle;
use crate::util::network::http::http_handle;
+use crate::util::try_canonicalize;
use crate::util::{internal, CanonicalUrl};
-use crate::util::{try_canonicalize, validate_package_name};
use crate::util::{Filesystem, IntoUrl, IntoUrlWithBase, Rustc};
+use crate::util_schemas::manifest::RegistryName;
use anyhow::{anyhow, bail, format_err, Context as _};
use cargo_credential::Secret;
use cargo_util::paths;
@@ -244,6 +246,11 @@ pub struct Config {
pub nightly_features_allowed: bool,
/// WorkspaceRootConfigs that have been found
pub ws_roots: RefCell<HashMap<PathBuf, WorkspaceRootConfig>>,
+ /// The global cache tracker is a database used to track disk cache usage.
+ global_cache_tracker: LazyCell<RefCell<GlobalCacheTracker>>,
+ /// A cache of modifications to make to [`Config::global_cache_tracker`],
+ /// saved to disk in a batch to improve performance.
+ deferred_global_last_use: LazyCell<RefCell<DeferredGlobalLastUse>>,
}
impl Config {
@@ -317,6 +324,8 @@ impl Config {
env_config: LazyCell::new(),
nightly_features_allowed: matches!(&*features::channel(), "nightly" | "dev"),
ws_roots: RefCell::new(HashMap::new()),
+ global_cache_tracker: LazyCell::new(),
+ deferred_global_last_use: LazyCell::new(),
}
}
@@ -1195,6 +1204,8 @@ impl Config {
path.display()
);
}
+ tracing::debug!(?path, ?why_load, includes, "load config from file");
+
let contents = fs::read_to_string(path)
.with_context(|| format!("failed to read configuration file `{}`", path.display()))?;
let toml = parse_document(&contents, path, self).with_context(|| {
@@ -1541,7 +1552,7 @@ impl Config {
/// Gets the index for a registry.
pub fn get_registry_index(&self, registry: &str) -> CargoResult<Url> {
- validate_package_name(registry, "registry name", "")?;
+ RegistryName::new(registry)?;
if let Some(index) = self.get_string(&format!("registries.{}.index", registry))? {
self.resolve_registry_index(&index).with_context(|| {
format!(
@@ -1919,6 +1930,25 @@ impl Config {
) -> CargoResult<Option<CacheLock<'_>>> {
self.package_cache_lock.try_lock(self, mode)
}
+
+ /// Returns a reference to the shared [`GlobalCacheTracker`].
+ ///
+ /// The package cache lock must be held to call this function (and to use
+ /// it in general).
+ pub fn global_cache_tracker(&self) -> CargoResult<RefMut<'_, GlobalCacheTracker>> {
+ let tracker = self.global_cache_tracker.try_borrow_with(|| {
+ Ok::<_, anyhow::Error>(RefCell::new(GlobalCacheTracker::new(self)?))
+ })?;
+ Ok(tracker.borrow_mut())
+ }
+
+ /// Returns a reference to the shared [`DeferredGlobalLastUse`].
+ pub fn deferred_global_last_use(&self) -> CargoResult<RefMut<'_, DeferredGlobalLastUse>> {
+ let deferred = self.deferred_global_last_use.try_borrow_with(|| {
+ Ok::<_, anyhow::Error>(RefCell::new(DeferredGlobalLastUse::new()))
+ })?;
+ Ok(deferred.borrow_mut())
+ }
}
/// Internal error for serde errors.
diff --git a/src/tools/cargo/src/cargo/util/errors.rs b/src/tools/cargo/src/cargo/util/errors.rs
index 9589e1ae3..5752f2370 100644
--- a/src/tools/cargo/src/cargo/util/errors.rs
+++ b/src/tools/cargo/src/cargo/util/errors.rs
@@ -1,5 +1,3 @@
-#![allow(unknown_lints)]
-
use anyhow::Error;
use curl::easy::Easy;
use std::fmt::{self, Write};
diff --git a/src/tools/cargo/src/cargo/util/graph.rs b/src/tools/cargo/src/cargo/util/graph.rs
index 8c4a593ea..eed3ad4c1 100644
--- a/src/tools/cargo/src/cargo/util/graph.rs
+++ b/src/tools/cargo/src/cargo/util/graph.rs
@@ -128,30 +128,32 @@ impl<'s, N: Eq + Ord + Clone + 's, E: Default + Clone + 's> Graph<N, E> {
{
let mut back_link = BTreeMap::new();
let mut queue = VecDeque::from([pkg]);
- let mut bottom = None;
+ let mut last = pkg;
while let Some(p) = queue.pop_front() {
- bottom = Some(p);
+ last = p;
+ let mut out_edges = true;
for (child, edge) in fn_edge(&self, p) {
- bottom = None;
+ out_edges = false;
back_link.entry(child).or_insert_with(|| {
queue.push_back(child);
(p, edge)
});
}
- if bottom.is_some() {
+ if out_edges {
break;
}
}
let mut result = Vec::new();
- let mut next =
- bottom.expect("the only path was a cycle, no dependency graph has this shape");
+ let mut next = last;
while let Some((p, e)) = back_link.remove(&next) {
result.push((next, Some(e)));
next = p;
}
- result.push((next, None));
+ if result.iter().all(|(n, _)| n != &next) {
+ result.push((next, None));
+ }
result.reverse();
#[cfg(debug_assertions)]
{
@@ -165,8 +167,12 @@ impl<'s, N: Eq + Ord + Clone + 's, E: Default + Clone + 's> Graph<N, E> {
));
}
let last = result.last().unwrap().0;
- // fixme: this may sometimes be wrong when there are cycles.
- if !fn_edge(&self, last).next().is_none() {
+ let set: Vec<_> = result.iter().map(|(k, _)| k).collect();
+ if !fn_edge(&self, last)
+ .filter(|(e, _)| !set.contains(&e))
+ .next()
+ .is_none()
+ {
self.print_for_test();
unreachable!("The last element in the path should not have outgoing edges");
}
@@ -188,6 +194,14 @@ fn path_to_case() {
);
}
+#[test]
+fn path_to_self() {
+ // Extracted from #12941
+ let mut new: Graph<i32, ()> = Graph::new();
+ new.link(0, 0);
+ assert_eq!(new.path_to_bottom(&0), vec![(&0, Some(&()))]);
+}
+
impl<N: Eq + Ord + Clone, E: Default + Clone> Default for Graph<N, E> {
fn default() -> Graph<N, E> {
Graph::new()
diff --git a/src/tools/cargo/src/cargo/util/mod.rs b/src/tools/cargo/src/cargo/util/mod.rs
index fb4c4b39c..e5ecd077f 100644
--- a/src/tools/cargo/src/cargo/util/mod.rs
+++ b/src/tools/cargo/src/cargo/util/mod.rs
@@ -21,9 +21,8 @@ pub(crate) use self::io::LimitErrorReader;
pub use self::lockserver::{LockServer, LockServerClient, LockServerStarted};
pub use self::progress::{Progress, ProgressStyle};
pub use self::queue::Queue;
-pub use self::restricted_names::validate_package_name;
pub use self::rustc::Rustc;
-pub use self::semver_ext::{OptVersionReq, RustVersion};
+pub use self::semver_ext::OptVersionReq;
pub use self::vcs::{existing_vcs_repo, FossilRepo, GitRepo, HgRepo, PijulRepo};
pub use self::workspace::{
add_path_args, path_args, print_available_benches, print_available_binaries,
@@ -62,6 +61,7 @@ mod queue;
pub mod restricted_names;
pub mod rustc;
mod semver_ext;
+pub mod sqlite;
pub mod style;
pub mod toml;
pub mod toml_mut;
diff --git a/src/tools/cargo/src/cargo/util/restricted_names.rs b/src/tools/cargo/src/cargo/util/restricted_names.rs
index f61249775..f047b0d66 100644
--- a/src/tools/cargo/src/cargo/util/restricted_names.rs
+++ b/src/tools/cargo/src/cargo/util/restricted_names.rs
@@ -1,7 +1,5 @@
//! Helpers for validating and checking names like package and crate names.
-use crate::util::CargoResult;
-use anyhow::bail;
use std::path::Path;
/// Returns `true` if the name contains non-ASCII characters.
@@ -36,76 +34,6 @@ pub fn is_conflicting_artifact_name(name: &str) -> bool {
["deps", "examples", "build", "incremental"].contains(&name)
}
-/// Check the base requirements for a package name.
-///
-/// This can be used for other things than package names, to enforce some
-/// level of sanity. Note that package names have other restrictions
-/// elsewhere. `cargo new` has a few restrictions, such as checking for
-/// reserved names. crates.io has even more restrictions.
-pub fn validate_package_name(name: &str, what: &str, help: &str) -> CargoResult<()> {
- let mut chars = name.chars();
- if let Some(ch) = chars.next() {
- if ch.is_digit(10) {
- // A specific error for a potentially common case.
- bail!(
- "the name `{}` cannot be used as a {}, \
- the name cannot start with a digit{}",
- name,
- what,
- help
- );
- }
- if !(unicode_xid::UnicodeXID::is_xid_start(ch) || ch == '_') {
- bail!(
- "invalid character `{}` in {}: `{}`, \
- the first character must be a Unicode XID start character \
- (most letters or `_`){}",
- ch,
- what,
- name,
- help
- );
- }
- }
- for ch in chars {
- if !(unicode_xid::UnicodeXID::is_xid_continue(ch) || ch == '-') {
- bail!(
- "invalid character `{}` in {}: `{}`, \
- characters must be Unicode XID characters \
- (numbers, `-`, `_`, or most letters){}",
- ch,
- what,
- name,
- help
- );
- }
- }
- Ok(())
-}
-
-/// Ensure a package name is [valid][validate_package_name]
-pub fn sanitize_package_name(name: &str, placeholder: char) -> String {
- let mut slug = String::new();
- let mut chars = name.chars();
- while let Some(ch) = chars.next() {
- if (unicode_xid::UnicodeXID::is_xid_start(ch) || ch == '_') && !ch.is_digit(10) {
- slug.push(ch);
- break;
- }
- }
- while let Some(ch) = chars.next() {
- if unicode_xid::UnicodeXID::is_xid_continue(ch) || ch == '-' {
- slug.push(ch);
- } else {
- slug.push(placeholder);
- }
- }
- if slug.is_empty() {
- slug.push_str("package");
- }
- slug
-}
-
/// Check the entire path for names reserved in Windows.
pub fn is_windows_reserved_path(path: &Path) -> bool {
path.iter()
@@ -120,82 +48,3 @@ pub fn is_windows_reserved_path(path: &Path) -> bool {
pub fn is_glob_pattern<T: AsRef<str>>(name: T) -> bool {
name.as_ref().contains(&['*', '?', '[', ']'][..])
}
-
-/// Validate dir-names and profile names according to RFC 2678.
-pub fn validate_profile_name(name: &str) -> CargoResult<()> {
- if let Some(ch) = name
- .chars()
- .find(|ch| !ch.is_alphanumeric() && *ch != '_' && *ch != '-')
- {
- bail!(
- "invalid character `{}` in profile name `{}`\n\
- Allowed characters are letters, numbers, underscore, and hyphen.",
- ch,
- name
- );
- }
-
- const SEE_DOCS: &str = "See https://doc.rust-lang.org/cargo/reference/profiles.html \
- for more on configuring profiles.";
-
- let lower_name = name.to_lowercase();
- if lower_name == "debug" {
- bail!(
- "profile name `{}` is reserved\n\
- To configure the default development profile, use the name `dev` \
- as in [profile.dev]\n\
- {}",
- name,
- SEE_DOCS
- );
- }
- if lower_name == "build-override" {
- bail!(
- "profile name `{}` is reserved\n\
- To configure build dependency settings, use [profile.dev.build-override] \
- and [profile.release.build-override]\n\
- {}",
- name,
- SEE_DOCS
- );
- }
-
- // These are some arbitrary reservations. We have no plans to use
- // these, but it seems safer to reserve a few just in case we want to
- // add more built-in profiles in the future. We can also uses special
- // syntax like cargo:foo if needed. But it is unlikely these will ever
- // be used.
- if matches!(
- lower_name.as_str(),
- "build"
- | "check"
- | "clean"
- | "config"
- | "fetch"
- | "fix"
- | "install"
- | "metadata"
- | "package"
- | "publish"
- | "report"
- | "root"
- | "run"
- | "rust"
- | "rustc"
- | "rustdoc"
- | "target"
- | "tmp"
- | "uninstall"
- ) || lower_name.starts_with("cargo")
- {
- bail!(
- "profile name `{}` is reserved\n\
- Please choose a different name.\n\
- {}",
- name,
- SEE_DOCS
- );
- }
-
- Ok(())
-}
diff --git a/src/tools/cargo/src/cargo/util/rustc.rs b/src/tools/cargo/src/cargo/util/rustc.rs
index f51580f29..80a9fe802 100644
--- a/src/tools/cargo/src/cargo/util/rustc.rs
+++ b/src/tools/cargo/src/cargo/util/rustc.rs
@@ -83,14 +83,20 @@ impl Rustc {
)
})?;
let commit_hash = extract("commit-hash: ").ok().map(|hash| {
- debug_assert!(
- hash.chars().all(|ch| ch.is_ascii_hexdigit()),
- "commit hash must be a hex string"
- );
- debug_assert!(
- hash.len() == 40 || hash.len() == 64,
- "hex string must be generated from sha1 or sha256"
- );
+ // Possible commit-hash values from rustc are SHA hex string and "unknown". See:
+ // * https://github.com/rust-lang/rust/blob/531cb83fc/src/bootstrap/src/utils/channel.rs#L73
+ // * https://github.com/rust-lang/rust/blob/531cb83fc/compiler/rustc_driver_impl/src/lib.rs#L911-L913
+ #[cfg(debug_assertions)]
+ if hash != "unknown" {
+ debug_assert!(
+ hash.chars().all(|ch| ch.is_ascii_hexdigit()),
+ "commit hash must be a hex string, got: {hash:?}"
+ );
+ debug_assert!(
+ hash.len() == 40 || hash.len() == 64,
+ "hex string must be generated from sha1 or sha256 (i.e., it must be 40 or 64 characters long)\ngot: {hash:?}"
+ );
+ }
hash.to_string()
});
diff --git a/src/tools/cargo/src/cargo/util/semver_ext.rs b/src/tools/cargo/src/cargo/util/semver_ext.rs
index 561cf140e..854fab6c8 100644
--- a/src/tools/cargo/src/cargo/util/semver_ext.rs
+++ b/src/tools/cargo/src/cargo/util/semver_ext.rs
@@ -1,9 +1,7 @@
use std::fmt::{self, Display};
use semver::{Op, Version, VersionReq};
-use serde_untagged::UntaggedEnumVisitor;
-use crate::util_semver::PartialVersion;
use crate::util_semver::VersionExt as _;
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
@@ -125,48 +123,3 @@ impl From<VersionReq> for OptVersionReq {
OptVersionReq::Req(req)
}
}
-
-#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Debug, serde::Serialize)]
-#[serde(transparent)]
-pub struct RustVersion(PartialVersion);
-
-impl std::ops::Deref for RustVersion {
- type Target = PartialVersion;
-
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-
-impl std::str::FromStr for RustVersion {
- type Err = anyhow::Error;
-
- fn from_str(value: &str) -> Result<Self, Self::Err> {
- let partial = value.parse::<PartialVersion>()?;
- if partial.pre.is_some() {
- anyhow::bail!("unexpected prerelease field, expected a version like \"1.32\"")
- }
- if partial.build.is_some() {
- anyhow::bail!("unexpected prerelease field, expected a version like \"1.32\"")
- }
- Ok(Self(partial))
- }
-}
-
-impl<'de> serde::Deserialize<'de> for RustVersion {
- fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
- where
- D: serde::Deserializer<'de>,
- {
- UntaggedEnumVisitor::new()
- .expecting("SemVer version")
- .string(|value| value.parse().map_err(serde::de::Error::custom))
- .deserialize(deserializer)
- }
-}
-
-impl Display for RustVersion {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- self.0.fmt(f)
- }
-}
diff --git a/src/tools/cargo/src/cargo/util/sqlite.rs b/src/tools/cargo/src/cargo/util/sqlite.rs
new file mode 100644
index 000000000..b391cc6db
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/sqlite.rs
@@ -0,0 +1,118 @@
+//! Utilities to help with working with sqlite.
+
+use crate::util::interning::InternedString;
+use crate::CargoResult;
+use rusqlite::types::{FromSql, FromSqlError, ToSql, ToSqlOutput};
+use rusqlite::{Connection, TransactionBehavior};
+
+impl FromSql for InternedString {
+ fn column_result(value: rusqlite::types::ValueRef<'_>) -> Result<Self, FromSqlError> {
+ value.as_str().map(InternedString::new)
+ }
+}
+
+impl ToSql for InternedString {
+ fn to_sql(&self) -> Result<ToSqlOutput<'_>, rusqlite::Error> {
+ Ok(ToSqlOutput::from(self.as_str()))
+ }
+}
+
+/// A function or closure representing a database migration.
+///
+/// Migrations support evolving the schema and contents of the database across
+/// new versions of cargo. The [`migrate`] function should be called
+/// immediately after opening a connection to a database in order to configure
+/// the schema. Whether or not a migration has been done is tracked by the
+/// `pragma_user_version` value in the database. Typically you include the
+/// initial `CREATE TABLE` statements in the initial list, but as time goes on
+/// you can add new tables or `ALTER TABLE` statements. The migration code
+/// will only execute statements that haven't previously been run.
+///
+/// Important things to note about how you define migrations:
+///
+/// * Never remove a migration entry from the list. Migrations are tracked by
+/// the index number in the list.
+/// * Never perform any schema modifications that would be backwards
+/// incompatible. For example, don't drop tables or columns.
+///
+/// The [`basic_migration`] function is a convenience function for specifying
+/// migrations that are simple SQL statements. If you need to do something
+/// more complex, then you can specify a closure that takes a [`Connection`]
+/// and does whatever is needed.
+///
+/// For example:
+///
+/// ```rust
+/// # use cargo::util::sqlite::*;
+/// # use rusqlite::Connection;
+/// # let mut conn = Connection::open_in_memory()?;
+/// # fn generate_name() -> String { "example".to_string() };
+/// migrate(
+/// &mut conn,
+/// &[
+/// basic_migration(
+/// "CREATE TABLE foo (
+/// id INTEGER PRIMARY KEY AUTOINCREMENT,
+/// name STRING NOT NULL
+/// )",
+/// ),
+/// Box::new(|conn| {
+/// conn.execute("INSERT INTO foo (name) VALUES (?1)", [generate_name()])?;
+/// Ok(())
+/// }),
+/// basic_migration("ALTER TABLE foo ADD COLUMN size INTEGER"),
+/// ],
+/// )?;
+/// # Ok::<(), anyhow::Error>(())
+/// ```
+pub type Migration = Box<dyn Fn(&Connection) -> CargoResult<()>>;
+
+/// A basic migration that is a single static SQL statement.
+///
+/// See [`Migration`] for more information.
+pub fn basic_migration(stmt: &'static str) -> Migration {
+ Box::new(|conn| {
+ conn.execute(stmt, [])?;
+ Ok(())
+ })
+}
+
+/// Perform one-time SQL migrations.
+///
+/// See [`Migration`] for more information.
+pub fn migrate(conn: &mut Connection, migrations: &[Migration]) -> CargoResult<()> {
+ // EXCLUSIVE ensures that it starts with an exclusive write lock. No other
+ // readers will be allowed. This generally shouldn't be needed if there is
+ // a file lock, but might be helpful in cases where cargo's `FileLock`
+ // failed.
+ let tx = conn.transaction_with_behavior(TransactionBehavior::Exclusive)?;
+ let user_version = tx.query_row("SELECT user_version FROM pragma_user_version", [], |row| {
+ row.get(0)
+ })?;
+ if user_version < migrations.len() {
+ for migration in &migrations[user_version..] {
+ migration(&tx)?;
+ }
+ tx.pragma_update(None, "user_version", &migrations.len())?;
+ }
+ tx.commit()?;
+ Ok(())
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn migrate_twice() -> CargoResult<()> {
+ // Check that a second migration will apply.
+ let mut conn = Connection::open_in_memory()?;
+ let mut migrations = vec![basic_migration("CREATE TABLE foo (a, b, c)")];
+ migrate(&mut conn, &migrations)?;
+ conn.execute("INSERT INTO foo VALUES (1,2,3)", [])?;
+ migrations.push(basic_migration("ALTER TABLE foo ADD COLUMN d"));
+ migrate(&mut conn, &migrations)?;
+ conn.execute("INSERT INTO foo VALUES (1,2,3,4)", [])?;
+ Ok(())
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/toml/embedded.rs b/src/tools/cargo/src/cargo/util/toml/embedded.rs
index 4c57195d4..cad7abc38 100644
--- a/src/tools/cargo/src/cargo/util/toml/embedded.rs
+++ b/src/tools/cargo/src/cargo/util/toml/embedded.rs
@@ -1,6 +1,7 @@
use anyhow::Context as _;
use crate::util::restricted_names;
+use crate::util_schemas::manifest::PackageName;
use crate::CargoResult;
use crate::Config;
@@ -171,7 +172,7 @@ fn sanitize_name(name: &str) -> String {
'-'
};
- let mut name = restricted_names::sanitize_package_name(name, placeholder);
+ let mut name = PackageName::sanitize(name, placeholder).into_inner();
loop {
if restricted_names::is_keyword(&name) {
diff --git a/src/tools/cargo/src/cargo/util/toml/mod.rs b/src/tools/cargo/src/cargo/util/toml/mod.rs
index cb841476b..8affc69a4 100644
--- a/src/tools/cargo/src/cargo/util/toml/mod.rs
+++ b/src/tools/cargo/src/cargo/util/toml/mod.rs
@@ -23,14 +23,11 @@ use crate::core::{GitReference, PackageIdSpec, SourceId, WorkspaceConfig, Worksp
use crate::sources::{CRATES_IO_INDEX, CRATES_IO_REGISTRY};
use crate::util::errors::{CargoResult, ManifestError};
use crate::util::interning::InternedString;
-use crate::util::restricted_names;
-use crate::util::{
- self, config::ConfigRelativePath, validate_package_name, Config, IntoUrl, OptVersionReq,
- RustVersion,
-};
+use crate::util::{self, config::ConfigRelativePath, Config, IntoUrl, OptVersionReq};
+use crate::util_schemas::manifest;
+use crate::util_schemas::manifest::RustVersion;
mod embedded;
-pub mod schema;
mod targets;
use self::targets::targets;
@@ -97,7 +94,7 @@ fn read_manifest_from_str(
let mut unused = BTreeSet::new();
let deserializer = toml::de::Deserializer::new(contents);
- let manifest: schema::TomlManifest = serde_ignored::deserialize(deserializer, |path| {
+ let manifest: manifest::TomlManifest = serde_ignored::deserialize(deserializer, |path| {
let mut key = String::new();
stringify(&mut key, &path);
unused.insert(key);
@@ -118,21 +115,16 @@ fn read_manifest_from_str(
{
for (name, dep) in deps {
if dep.is_optional() {
- bail!(
- "{} is optional, but workspace dependencies cannot be optional",
- name
- );
+ bail!("{name} is optional, but workspace dependencies cannot be optional",);
+ }
+ if dep.is_public() {
+ bail!("{name} is public, but workspace dependencies cannot be public",);
}
}
}
return if manifest.project.is_some() || manifest.package.is_some() {
- let (mut manifest, paths) = schema::TomlManifest::to_real_manifest(
- manifest,
- embedded,
- source_id,
- package_root,
- config,
- )?;
+ let (mut manifest, paths) =
+ to_real_manifest(manifest, embedded, source_id, package_root, config)?;
add_unused(manifest.warnings_mut());
if manifest.targets().iter().all(|t| t.is_custom_build()) {
bail!(
@@ -143,8 +135,7 @@ fn read_manifest_from_str(
}
Ok((EitherManifest::Real(manifest), paths))
} else {
- let (mut m, paths) =
- schema::TomlManifest::to_virtual_manifest(manifest, source_id, package_root, config)?;
+ let (mut m, paths) = to_virtual_manifest(manifest, source_id, package_root, config)?;
add_unused(m.warnings_mut());
Ok((EitherManifest::Virtual(m), paths))
};
@@ -184,890 +175,877 @@ fn warn_on_deprecated(new_path: &str, name: &str, kind: &str, warnings: &mut Vec
))
}
-impl schema::TomlManifest {
- /// Prepares the manifest for publishing.
- // - Path and git components of dependency specifications are removed.
- // - License path is updated to point within the package.
- pub fn prepare_for_publish(
- &self,
- ws: &Workspace<'_>,
- package_root: &Path,
- ) -> CargoResult<schema::TomlManifest> {
- let config = ws.config();
- let mut package = self
- .package
- .as_ref()
- .or_else(|| self.project.as_ref())
- .unwrap()
- .clone();
- package.workspace = None;
- let current_resolver = package
- .resolver
- .as_ref()
- .map(|r| ResolveBehavior::from_manifest(r))
- .unwrap_or_else(|| {
- package
- .edition
- .as_ref()
- .and_then(|e| e.as_defined())
- .map(|e| Edition::from_str(e))
- .unwrap_or(Ok(Edition::Edition2015))
- .map(|e| e.default_resolve_behavior())
- })?;
- if ws.resolve_behavior() != current_resolver {
- // This ensures the published crate if built as a root (e.g. `cargo install`) will
- // use the same resolver behavior it was tested with in the workspace.
- // To avoid forcing a higher MSRV we don't explicitly set this if it would implicitly
- // result in the same thing.
- package.resolver = Some(ws.resolve_behavior().to_manifest());
- }
- if let Some(license_file) = &package.license_file {
- let license_file = license_file
- .as_defined()
- .context("license file should have been resolved before `prepare_for_publish()`")?;
- let license_path = Path::new(&license_file);
- let abs_license_path = paths::normalize_path(&package_root.join(license_path));
- if abs_license_path.strip_prefix(package_root).is_err() {
- // This path points outside of the package root. `cargo package`
- // will copy it into the root, so adjust the path to this location.
- package.license_file = Some(schema::MaybeWorkspace::Defined(
- license_path
- .file_name()
- .unwrap()
- .to_str()
- .unwrap()
- .to_string(),
- ));
- }
+/// Prepares the manifest for publishing.
+// - Path and git components of dependency specifications are removed.
+// - License path is updated to point within the package.
+pub fn prepare_for_publish(
+ me: &manifest::TomlManifest,
+ ws: &Workspace<'_>,
+ package_root: &Path,
+) -> CargoResult<manifest::TomlManifest> {
+ let config = ws.config();
+ let mut package = me.package().unwrap().clone();
+ package.workspace = None;
+ let current_resolver = package
+ .resolver
+ .as_ref()
+ .map(|r| ResolveBehavior::from_manifest(r))
+ .unwrap_or_else(|| {
+ package
+ .edition
+ .as_ref()
+ .and_then(|e| e.as_value())
+ .map(|e| Edition::from_str(e))
+ .unwrap_or(Ok(Edition::Edition2015))
+ .map(|e| e.default_resolve_behavior())
+ })?;
+ if ws.resolve_behavior() != current_resolver {
+ // This ensures the published crate if built as a root (e.g. `cargo install`) will
+ // use the same resolver behavior it was tested with in the workspace.
+ // To avoid forcing a higher MSRV we don't explicitly set this if it would implicitly
+ // result in the same thing.
+ package.resolver = Some(ws.resolve_behavior().to_manifest());
+ }
+ if let Some(license_file) = &package.license_file {
+ let license_file = license_file
+ .as_value()
+ .context("license file should have been resolved before `prepare_for_publish()`")?;
+ let license_path = Path::new(&license_file);
+ let abs_license_path = paths::normalize_path(&package_root.join(license_path));
+ if abs_license_path.strip_prefix(package_root).is_err() {
+ // This path points outside of the package root. `cargo package`
+ // will copy it into the root, so adjust the path to this location.
+ package.license_file = Some(manifest::InheritableField::Value(
+ license_path
+ .file_name()
+ .unwrap()
+ .to_str()
+ .unwrap()
+ .to_string(),
+ ));
}
+ }
- if let Some(readme) = &package.readme {
- let readme = readme
- .as_defined()
- .context("readme should have been resolved before `prepare_for_publish()`")?;
- match readme {
- schema::StringOrBool::String(readme) => {
- let readme_path = Path::new(&readme);
- let abs_readme_path = paths::normalize_path(&package_root.join(readme_path));
- if abs_readme_path.strip_prefix(package_root).is_err() {
- // This path points outside of the package root. `cargo package`
- // will copy it into the root, so adjust the path to this location.
- package.readme = Some(schema::MaybeWorkspace::Defined(
- schema::StringOrBool::String(
- readme_path
- .file_name()
- .unwrap()
- .to_str()
- .unwrap()
- .to_string(),
- ),
- ));
- }
+ if let Some(readme) = &package.readme {
+ let readme = readme
+ .as_value()
+ .context("readme should have been resolved before `prepare_for_publish()`")?;
+ match readme {
+ manifest::StringOrBool::String(readme) => {
+ let readme_path = Path::new(&readme);
+ let abs_readme_path = paths::normalize_path(&package_root.join(readme_path));
+ if abs_readme_path.strip_prefix(package_root).is_err() {
+ // This path points outside of the package root. `cargo package`
+ // will copy it into the root, so adjust the path to this location.
+ package.readme = Some(manifest::InheritableField::Value(
+ manifest::StringOrBool::String(
+ readme_path
+ .file_name()
+ .unwrap()
+ .to_str()
+ .unwrap()
+ .to_string(),
+ ),
+ ));
}
- schema::StringOrBool::Bool(_) => {}
}
+ manifest::StringOrBool::Bool(_) => {}
}
- let all = |_d: &schema::TomlDependency| true;
- return Ok(schema::TomlManifest {
- package: Some(package),
- project: None,
- profile: self.profile.clone(),
- lib: self.lib.clone(),
- bin: self.bin.clone(),
- example: self.example.clone(),
- test: self.test.clone(),
- bench: self.bench.clone(),
- dependencies: map_deps(config, self.dependencies.as_ref(), all)?,
- dev_dependencies: map_deps(
- config,
- self.dev_dependencies(),
- schema::TomlDependency::is_version_specified,
- )?,
- dev_dependencies2: None,
- build_dependencies: map_deps(config, self.build_dependencies(), all)?,
- build_dependencies2: None,
- features: self.features.clone(),
- target: match self.target.as_ref().map(|target_map| {
- target_map
- .iter()
- .map(|(k, v)| {
- Ok((
- k.clone(),
- schema::TomlPlatform {
- dependencies: map_deps(config, v.dependencies.as_ref(), all)?,
- dev_dependencies: map_deps(
- config,
- v.dev_dependencies(),
- schema::TomlDependency::is_version_specified,
- )?,
- dev_dependencies2: None,
- build_dependencies: map_deps(config, v.build_dependencies(), all)?,
- build_dependencies2: None,
- },
- ))
- })
- .collect()
- }) {
- Some(Ok(v)) => Some(v),
- Some(Err(e)) => return Err(e),
- None => None,
- },
- replace: None,
- patch: None,
- workspace: None,
- badges: self.badges.clone(),
- cargo_features: self.cargo_features.clone(),
- lints: self.lints.clone(),
- });
-
- fn map_deps(
- config: &Config,
- deps: Option<&BTreeMap<String, schema::MaybeWorkspaceDependency>>,
- filter: impl Fn(&schema::TomlDependency) -> bool,
- ) -> CargoResult<Option<BTreeMap<String, schema::MaybeWorkspaceDependency>>> {
- let Some(deps) = deps else { return Ok(None) };
- let deps = deps
+ }
+ let all = |_d: &manifest::TomlDependency| true;
+ return Ok(manifest::TomlManifest {
+ package: Some(package),
+ project: None,
+ profile: me.profile.clone(),
+ lib: me.lib.clone(),
+ bin: me.bin.clone(),
+ example: me.example.clone(),
+ test: me.test.clone(),
+ bench: me.bench.clone(),
+ dependencies: map_deps(config, me.dependencies.as_ref(), all)?,
+ dev_dependencies: map_deps(
+ config,
+ me.dev_dependencies(),
+ manifest::TomlDependency::is_version_specified,
+ )?,
+ dev_dependencies2: None,
+ build_dependencies: map_deps(config, me.build_dependencies(), all)?,
+ build_dependencies2: None,
+ features: me.features.clone(),
+ target: match me.target.as_ref().map(|target_map| {
+ target_map
.iter()
- .filter(|(_k, v)| {
- if let schema::MaybeWorkspace::Defined(def) = v {
- filter(def)
- } else {
- false
- }
+ .map(|(k, v)| {
+ Ok((
+ k.clone(),
+ manifest::TomlPlatform {
+ dependencies: map_deps(config, v.dependencies.as_ref(), all)?,
+ dev_dependencies: map_deps(
+ config,
+ v.dev_dependencies(),
+ manifest::TomlDependency::is_version_specified,
+ )?,
+ dev_dependencies2: None,
+ build_dependencies: map_deps(config, v.build_dependencies(), all)?,
+ build_dependencies2: None,
+ },
+ ))
})
- .map(|(k, v)| Ok((k.clone(), map_dependency(config, v)?)))
- .collect::<CargoResult<BTreeMap<_, _>>>()?;
- Ok(Some(deps))
- }
-
- fn map_dependency(
- config: &Config,
- dep: &schema::MaybeWorkspaceDependency,
- ) -> CargoResult<schema::MaybeWorkspaceDependency> {
- let dep = match dep {
- schema::MaybeWorkspace::Defined(schema::TomlDependency::Detailed(d)) => {
- let mut d = d.clone();
- // Path dependencies become crates.io deps.
- d.path.take();
- // Same with git dependencies.
- d.git.take();
- d.branch.take();
- d.tag.take();
- d.rev.take();
- // registry specifications are elaborated to the index URL
- if let Some(registry) = d.registry.take() {
- d.registry_index = Some(config.get_registry_index(&registry)?.to_string());
- }
- Ok(d)
- }
- schema::MaybeWorkspace::Defined(schema::TomlDependency::Simple(s)) => {
- Ok(schema::DetailedTomlDependency {
- version: Some(s.clone()),
- ..Default::default()
- })
+ .collect()
+ }) {
+ Some(Ok(v)) => Some(v),
+ Some(Err(e)) => return Err(e),
+ None => None,
+ },
+ replace: None,
+ patch: None,
+ workspace: None,
+ badges: me.badges.clone(),
+ cargo_features: me.cargo_features.clone(),
+ lints: me.lints.clone(),
+ });
+
+ fn map_deps(
+ config: &Config,
+ deps: Option<&BTreeMap<manifest::PackageName, manifest::InheritableDependency>>,
+ filter: impl Fn(&manifest::TomlDependency) -> bool,
+ ) -> CargoResult<Option<BTreeMap<manifest::PackageName, manifest::InheritableDependency>>> {
+ let Some(deps) = deps else { return Ok(None) };
+ let deps = deps
+ .iter()
+ .filter(|(_k, v)| {
+ if let manifest::InheritableDependency::Value(def) = v {
+ filter(def)
+ } else {
+ false
}
- _ => unreachable!(),
- };
- dep.map(schema::TomlDependency::Detailed)
- .map(schema::MaybeWorkspace::Defined)
- }
+ })
+ .map(|(k, v)| Ok((k.clone(), map_dependency(config, v)?)))
+ .collect::<CargoResult<BTreeMap<_, _>>>()?;
+ Ok(Some(deps))
}
- pub fn to_real_manifest(
- me: schema::TomlManifest,
- embedded: bool,
- source_id: SourceId,
- package_root: &Path,
+ fn map_dependency(
config: &Config,
- ) -> CargoResult<(Manifest, Vec<PathBuf>)> {
- fn get_ws(
- config: &Config,
- resolved_path: &Path,
- workspace_config: &WorkspaceConfig,
- ) -> CargoResult<schema::InheritableFields> {
- match workspace_config {
- WorkspaceConfig::Root(root) => Ok(root.inheritable().clone()),
- WorkspaceConfig::Member {
- root: Some(ref path_to_root),
- } => {
- let path = resolved_path
- .parent()
- .unwrap()
- .join(path_to_root)
- .join("Cargo.toml");
- let root_path = paths::normalize_path(&path);
- inheritable_from_path(config, root_path)
+ dep: &manifest::InheritableDependency,
+ ) -> CargoResult<manifest::InheritableDependency> {
+ let dep = match dep {
+ manifest::InheritableDependency::Value(manifest::TomlDependency::Detailed(d)) => {
+ let mut d = d.clone();
+ // Path dependencies become crates.io deps.
+ d.path.take();
+ // Same with git dependencies.
+ d.git.take();
+ d.branch.take();
+ d.tag.take();
+ d.rev.take();
+ // registry specifications are elaborated to the index URL
+ if let Some(registry) = d.registry.take() {
+ d.registry_index = Some(config.get_registry_index(&registry)?.to_string());
}
- WorkspaceConfig::Member { root: None } => {
- match find_workspace_root(&resolved_path, config)? {
- Some(path_to_root) => inheritable_from_path(config, path_to_root),
- None => Err(anyhow!("failed to find a workspace root")),
- }
+ Ok(d)
+ }
+ manifest::InheritableDependency::Value(manifest::TomlDependency::Simple(s)) => {
+ Ok(manifest::TomlDetailedDependency {
+ version: Some(s.clone()),
+ ..Default::default()
+ })
+ }
+ _ => unreachable!(),
+ };
+ dep.map(manifest::TomlDependency::Detailed)
+ .map(manifest::InheritableDependency::Value)
+ }
+}
+
+pub fn to_real_manifest(
+ me: manifest::TomlManifest,
+ embedded: bool,
+ source_id: SourceId,
+ package_root: &Path,
+ config: &Config,
+) -> CargoResult<(Manifest, Vec<PathBuf>)> {
+ fn get_ws(
+ config: &Config,
+ resolved_path: &Path,
+ workspace_config: &WorkspaceConfig,
+ ) -> CargoResult<InheritableFields> {
+ match workspace_config {
+ WorkspaceConfig::Root(root) => Ok(root.inheritable().clone()),
+ WorkspaceConfig::Member {
+ root: Some(ref path_to_root),
+ } => {
+ let path = resolved_path
+ .parent()
+ .unwrap()
+ .join(path_to_root)
+ .join("Cargo.toml");
+ let root_path = paths::normalize_path(&path);
+ inheritable_from_path(config, root_path)
+ }
+ WorkspaceConfig::Member { root: None } => {
+ match find_workspace_root(&resolved_path, config)? {
+ Some(path_to_root) => inheritable_from_path(config, path_to_root),
+ None => Err(anyhow!("failed to find a workspace root")),
}
}
}
+ }
- if !package_root.is_dir() {
- bail!(
- "package root '{}' is not a directory",
- package_root.display()
- );
- };
+ if !package_root.is_dir() {
+ bail!(
+ "package root '{}' is not a directory",
+ package_root.display()
+ );
+ };
- let mut nested_paths = vec![];
- let mut warnings = vec![];
- let mut errors = vec![];
+ let mut nested_paths = vec![];
+ let mut warnings = vec![];
+ let mut errors = vec![];
- // Parse features first so they will be available when parsing other parts of the TOML.
- let empty = Vec::new();
- let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty);
- let features = Features::new(cargo_features, config, &mut warnings, source_id.is_path())?;
+ // Parse features first so they will be available when parsing other parts of the TOML.
+ let empty = Vec::new();
+ let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty);
+ let features = Features::new(cargo_features, config, &mut warnings, source_id.is_path())?;
- let mut package = match (&me.package, &me.project) {
- (Some(_), Some(project)) => {
- if source_id.is_path() {
- config.shell().warn(format!(
- "manifest at `{}` contains both `project` and `package`, \
+ let mut package = match (&me.package, &me.project) {
+ (Some(_), Some(project)) => {
+ if source_id.is_path() {
+ config.shell().warn(format!(
+ "manifest at `{}` contains both `project` and `package`, \
this could become a hard error in the future",
- package_root.display()
- ))?;
- }
- project.clone()
+ package_root.display()
+ ))?;
}
- (Some(package), None) => package.clone(),
- (None, Some(project)) => {
- if source_id.is_path() {
- config.shell().warn(format!(
- "manifest at `{}` contains `[project]` instead of `[package]`, \
+ project.clone()
+ }
+ (Some(package), None) => package.clone(),
+ (None, Some(project)) => {
+ if source_id.is_path() {
+ config.shell().warn(format!(
+ "manifest at `{}` contains `[project]` instead of `[package]`, \
this could become a hard error in the future",
- package_root.display()
- ))?;
- }
- project.clone()
+ package_root.display()
+ ))?;
}
- (None, None) => bail!("no `package` section found"),
- };
+ project.clone()
+ }
+ (None, None) => bail!("no `package` section found"),
+ };
- let workspace_config = match (me.workspace.as_ref(), package.workspace.as_ref()) {
- (Some(toml_config), None) => {
- let mut inheritable = toml_config.package.clone().unwrap_or_default();
- inheritable.update_ws_path(package_root.to_path_buf());
- inheritable.update_deps(toml_config.dependencies.clone());
- let lints = toml_config.lints.clone();
- let lints = verify_lints(lints)?;
- inheritable.update_lints(lints);
- if let Some(ws_deps) = &inheritable.dependencies {
- for (name, dep) in ws_deps {
- unused_dep_keys(
- name,
- "workspace.dependencies",
- dep.unused_keys(),
- &mut warnings,
- );
- }
+ let workspace_config = match (me.workspace.as_ref(), package.workspace.as_ref()) {
+ (Some(toml_config), None) => {
+ let lints = toml_config.lints.clone();
+ let lints = verify_lints(lints)?;
+ let inheritable = InheritableFields {
+ package: toml_config.package.clone(),
+ dependencies: toml_config.dependencies.clone(),
+ lints,
+ _ws_root: package_root.to_path_buf(),
+ };
+ if let Some(ws_deps) = &inheritable.dependencies {
+ for (name, dep) in ws_deps {
+ unused_dep_keys(
+ name,
+ "workspace.dependencies",
+ dep.unused_keys(),
+ &mut warnings,
+ );
}
- let ws_root_config = WorkspaceRootConfig::new(
- package_root,
- &toml_config.members,
- &toml_config.default_members,
- &toml_config.exclude,
- &Some(inheritable),
- &toml_config.metadata,
- );
- config
- .ws_roots
- .borrow_mut()
- .insert(package_root.to_path_buf(), ws_root_config.clone());
- WorkspaceConfig::Root(ws_root_config)
}
- (None, root) => WorkspaceConfig::Member {
- root: root.cloned(),
- },
- (Some(..), Some(..)) => bail!(
- "cannot configure both `package.workspace` and \
- `[workspace]`, only one can be specified"
- ),
- };
-
- let package_name = package.name.trim();
- if package_name.is_empty() {
- bail!("package name cannot be an empty string")
+ let ws_root_config = WorkspaceRootConfig::new(
+ package_root,
+ &toml_config.members,
+ &toml_config.default_members,
+ &toml_config.exclude,
+ &Some(inheritable),
+ &toml_config.metadata,
+ );
+ config
+ .ws_roots
+ .borrow_mut()
+ .insert(package_root.to_path_buf(), ws_root_config.clone());
+ WorkspaceConfig::Root(ws_root_config)
}
+ (None, root) => WorkspaceConfig::Member {
+ root: root.cloned(),
+ },
+ (Some(..), Some(..)) => bail!(
+ "cannot configure both `package.workspace` and \
+ `[workspace]`, only one can be specified"
+ ),
+ };
- validate_package_name(package_name, "package name", "")?;
+ let package_name = package.name.trim();
- let resolved_path = package_root.join("Cargo.toml");
+ let resolved_path = package_root.join("Cargo.toml");
- let inherit_cell: LazyCell<schema::InheritableFields> = LazyCell::new();
- let inherit =
- || inherit_cell.try_borrow_with(|| get_ws(config, &resolved_path, &workspace_config));
+ let inherit_cell: LazyCell<InheritableFields> = LazyCell::new();
+ let inherit =
+ || inherit_cell.try_borrow_with(|| get_ws(config, &resolved_path, &workspace_config));
- let version = package
- .version
- .clone()
- .map(|version| version.resolve("version", || inherit()?.version()))
- .transpose()?;
+ let version = package
+ .version
+ .clone()
+ .map(|version| field_inherit_with(version, "version", || inherit()?.version()))
+ .transpose()?;
- package.version = version.clone().map(schema::MaybeWorkspace::Defined);
+ package.version = version.clone().map(manifest::InheritableField::Value);
- let pkgid = package.to_package_id(
- source_id,
- version
- .clone()
- .unwrap_or_else(|| semver::Version::new(0, 0, 0)),
- );
+ let pkgid = PackageId::new(
+ package.name.as_str().into(),
+ version
+ .clone()
+ .unwrap_or_else(|| semver::Version::new(0, 0, 0)),
+ source_id,
+ );
- let edition = if let Some(edition) = package.edition.clone() {
- let edition: Edition = edition
- .resolve("edition", || inherit()?.edition())?
- .parse()
- .with_context(|| "failed to parse the `edition` key")?;
- package.edition = Some(schema::MaybeWorkspace::Defined(edition.to_string()));
+ let edition = if let Some(edition) = package.edition.clone() {
+ let edition: Edition = field_inherit_with(edition, "edition", || inherit()?.edition())?
+ .parse()
+ .with_context(|| "failed to parse the `edition` key")?;
+ package.edition = Some(manifest::InheritableField::Value(edition.to_string()));
+ edition
+ } else {
+ Edition::Edition2015
+ };
+ // Add these lines if start a new unstable edition.
+ // ```
+ // if edition == Edition::Edition20xx {
+ // features.require(Feature::edition20xx())?;
+ // }
+ // ```
+ if edition == Edition::Edition2024 {
+ features.require(Feature::edition2024())?;
+ } else if !edition.is_stable() {
+ // Guard in case someone forgets to add .require()
+ return Err(util::errors::internal(format!(
+ "edition {} should be gated",
edition
- } else {
- Edition::Edition2015
- };
- // Add these lines if start a new unstable edition.
- // ```
- // if edition == Edition::Edition20xx {
- // features.require(Feature::edition20xx())?;
- // }
- // ```
- if edition == Edition::Edition2024 {
- features.require(Feature::edition2024())?;
- } else if !edition.is_stable() {
- // Guard in case someone forgets to add .require()
- return Err(util::errors::internal(format!(
- "edition {} should be gated",
- edition
- )));
- }
+ )));
+ }
- let rust_version = if let Some(rust_version) = &package.rust_version {
- let rust_version = rust_version
- .clone()
- .resolve("rust_version", || inherit()?.rust_version())?;
- let req = rust_version.to_caret_req();
- if let Some(first_version) = edition.first_version() {
- let unsupported =
- semver::Version::new(first_version.major, first_version.minor - 1, 9999);
- if req.matches(&unsupported) {
- bail!(
- "rust-version {} is older than first version ({}) required by \
+ let rust_version = if let Some(rust_version) = &package.rust_version {
+ let rust_version = field_inherit_with(rust_version.clone(), "rust_version", || {
+ inherit()?.rust_version()
+ })?;
+ let req = rust_version.to_caret_req();
+ if let Some(first_version) = edition.first_version() {
+ let unsupported =
+ semver::Version::new(first_version.major, first_version.minor - 1, 9999);
+ if req.matches(&unsupported) {
+ bail!(
+ "rust-version {} is older than first version ({}) required by \
the specified edition ({})",
- rust_version,
- first_version,
- edition,
- )
- }
+ rust_version,
+ first_version,
+ edition,
+ )
}
- Some(rust_version)
- } else {
- None
- };
-
- if package.metabuild.is_some() {
- features.require(Feature::metabuild())?;
}
+ Some(rust_version)
+ } else {
+ None
+ };
- let resolve_behavior = match (
- package.resolver.as_ref(),
- me.workspace.as_ref().and_then(|ws| ws.resolver.as_ref()),
- ) {
- (None, None) => None,
- (Some(s), None) | (None, Some(s)) => Some(ResolveBehavior::from_manifest(s)?),
- (Some(_), Some(_)) => {
- bail!("cannot specify `resolver` field in both `[workspace]` and `[package]`")
- }
- };
-
- // If we have no lib at all, use the inferred lib, if available.
- // If we have a lib with a path, we're done.
- // If we have a lib with no path, use the inferred lib or else the package name.
- let targets = targets(
- &features,
- &me,
- package_name,
- package_root,
- edition,
- &package.build,
- &package.metabuild,
- &mut warnings,
- &mut errors,
- )?;
+ if package.metabuild.is_some() {
+ features.require(Feature::metabuild())?;
+ }
- if targets.is_empty() {
- debug!("manifest has no build targets");
+ let resolve_behavior = match (
+ package.resolver.as_ref(),
+ me.workspace.as_ref().and_then(|ws| ws.resolver.as_ref()),
+ ) {
+ (None, None) => None,
+ (Some(s), None) | (None, Some(s)) => Some(ResolveBehavior::from_manifest(s)?),
+ (Some(_), Some(_)) => {
+ bail!("cannot specify `resolver` field in both `[workspace]` and `[package]`")
}
+ };
- if let Err(conflict_targets) = unique_build_targets(&targets, package_root) {
- conflict_targets
- .iter()
- .for_each(|(target_path, conflicts)| {
- warnings.push(format!(
- "file `{}` found to be present in multiple \
+ // If we have no lib at all, use the inferred lib, if available.
+ // If we have a lib with a path, we're done.
+ // If we have a lib with no path, use the inferred lib or else the package name.
+ let targets = targets(
+ &features,
+ &me,
+ package_name,
+ package_root,
+ edition,
+ &package.build,
+ &package.metabuild,
+ &mut warnings,
+ &mut errors,
+ )?;
+
+ if targets.is_empty() {
+ debug!("manifest has no build targets");
+ }
+
+ if let Err(conflict_targets) = unique_build_targets(&targets, package_root) {
+ conflict_targets
+ .iter()
+ .for_each(|(target_path, conflicts)| {
+ warnings.push(format!(
+ "file `{}` found to be present in multiple \
build targets:\n{}",
- target_path.display().to_string(),
- conflicts
- .iter()
- .map(|t| format!(
- " * `{}` target `{}`",
- t.kind().description(),
- t.name(),
- ))
- .join("\n")
- ));
- })
- }
+ target_path.display().to_string(),
+ conflicts
+ .iter()
+ .map(|t| format!(" * `{}` target `{}`", t.kind().description(), t.name(),))
+ .join("\n")
+ ));
+ })
+ }
- if let Some(links) = &package.links {
- if !targets.iter().any(|t| t.is_custom_build()) {
- bail!(
- "package `{}` specifies that it links to `{}` but does not \
+ if let Some(links) = &package.links {
+ if !targets.iter().any(|t| t.is_custom_build()) {
+ bail!(
+ "package `{}` specifies that it links to `{}` but does not \
have a custom build script",
- pkgid,
- links
- )
- }
+ pkgid,
+ links
+ )
}
+ }
- let mut deps = Vec::new();
+ let mut deps = Vec::new();
+
+ let mut cx = Context {
+ deps: &mut deps,
+ source_id,
+ nested_paths: &mut nested_paths,
+ config,
+ warnings: &mut warnings,
+ features: &features,
+ platform: None,
+ root: package_root,
+ };
- let mut cx = Context {
- deps: &mut deps,
- source_id,
- nested_paths: &mut nested_paths,
- config,
- warnings: &mut warnings,
- features: &features,
- platform: None,
- root: package_root,
+ fn process_dependencies(
+ cx: &mut Context<'_, '_>,
+ new_deps: Option<&BTreeMap<manifest::PackageName, manifest::InheritableDependency>>,
+ kind: Option<DepKind>,
+ workspace_config: &WorkspaceConfig,
+ inherit_cell: &LazyCell<InheritableFields>,
+ ) -> CargoResult<Option<BTreeMap<manifest::PackageName, manifest::InheritableDependency>>> {
+ let Some(dependencies) = new_deps else {
+ return Ok(None);
};
- fn process_dependencies(
- cx: &mut Context<'_, '_>,
- new_deps: Option<&BTreeMap<String, schema::MaybeWorkspaceDependency>>,
- kind: Option<DepKind>,
- workspace_config: &WorkspaceConfig,
- inherit_cell: &LazyCell<schema::InheritableFields>,
- ) -> CargoResult<Option<BTreeMap<String, schema::MaybeWorkspaceDependency>>> {
- let Some(dependencies) = new_deps else {
- return Ok(None);
- };
+ let inheritable = || {
+ inherit_cell.try_borrow_with(|| {
+ get_ws(cx.config, &cx.root.join("Cargo.toml"), &workspace_config)
+ })
+ };
- let inheritable = || {
- inherit_cell.try_borrow_with(|| {
- get_ws(cx.config, &cx.root.join("Cargo.toml"), &workspace_config)
- })
+ let mut deps: BTreeMap<manifest::PackageName, manifest::InheritableDependency> =
+ BTreeMap::new();
+ for (n, v) in dependencies.iter() {
+ let resolved = dependency_inherit_with(v.clone(), n, inheritable, cx)?;
+ let dep = dep_to_dependency(&resolved, n, cx, kind)?;
+ let name_in_toml = dep.name_in_toml().as_str();
+ let kind_name = match kind {
+ Some(k) => k.kind_table(),
+ None => "dependencies",
};
-
- let mut deps: BTreeMap<String, schema::MaybeWorkspaceDependency> = BTreeMap::new();
- for (n, v) in dependencies.iter() {
- let resolved = v
- .clone()
- .resolve_with_self(n, |dep| dep.resolve(n, inheritable, cx))?;
- let dep = resolved.to_dependency(n, cx, kind)?;
- let name_in_toml = dep.name_in_toml().as_str();
- validate_package_name(name_in_toml, "dependency name", "")?;
- let kind_name = match kind {
- Some(k) => k.kind_table(),
- None => "dependencies",
- };
- let table_in_toml = if let Some(platform) = &cx.platform {
- format!("target.{}.{kind_name}", platform.to_string())
- } else {
- kind_name.to_string()
- };
- unused_dep_keys(name_in_toml, &table_in_toml, v.unused_keys(), cx.warnings);
- cx.deps.push(dep);
- deps.insert(
- n.to_string(),
- schema::MaybeWorkspace::Defined(resolved.clone()),
- );
- }
- Ok(Some(deps))
+ let table_in_toml = if let Some(platform) = &cx.platform {
+ format!("target.{}.{kind_name}", platform.to_string())
+ } else {
+ kind_name.to_string()
+ };
+ unused_dep_keys(name_in_toml, &table_in_toml, v.unused_keys(), cx.warnings);
+ cx.deps.push(dep);
+ deps.insert(
+ n.clone(),
+ manifest::InheritableDependency::Value(resolved.clone()),
+ );
}
+ Ok(Some(deps))
+ }
- // Collect the dependencies.
- let dependencies = process_dependencies(
+ // Collect the dependencies.
+ let dependencies = process_dependencies(
+ &mut cx,
+ me.dependencies.as_ref(),
+ None,
+ &workspace_config,
+ &inherit_cell,
+ )?;
+ if me.dev_dependencies.is_some() && me.dev_dependencies2.is_some() {
+ warn_on_deprecated("dev-dependencies", package_name, "package", cx.warnings);
+ }
+ let dev_deps = me.dev_dependencies();
+ let dev_deps = process_dependencies(
+ &mut cx,
+ dev_deps,
+ Some(DepKind::Development),
+ &workspace_config,
+ &inherit_cell,
+ )?;
+ if me.build_dependencies.is_some() && me.build_dependencies2.is_some() {
+ warn_on_deprecated("build-dependencies", package_name, "package", cx.warnings);
+ }
+ let build_deps = me.build_dependencies();
+ let build_deps = process_dependencies(
+ &mut cx,
+ build_deps,
+ Some(DepKind::Build),
+ &workspace_config,
+ &inherit_cell,
+ )?;
+
+ let lints = me
+ .lints
+ .clone()
+ .map(|mw| lints_inherit_with(mw, || inherit()?.lints()))
+ .transpose()?;
+ let lints = verify_lints(lints)?;
+ let default = manifest::TomlLints::default();
+ let rustflags = lints_to_rustflags(lints.as_ref().unwrap_or(&default));
+
+ let mut target: BTreeMap<String, manifest::TomlPlatform> = BTreeMap::new();
+ for (name, platform) in me.target.iter().flatten() {
+ cx.platform = {
+ let platform: Platform = name.parse()?;
+ platform.check_cfg_attributes(cx.warnings);
+ Some(platform)
+ };
+ let deps = process_dependencies(
&mut cx,
- me.dependencies.as_ref(),
+ platform.dependencies.as_ref(),
None,
&workspace_config,
&inherit_cell,
)?;
- if me.dev_dependencies.is_some() && me.dev_dependencies2.is_some() {
- warn_on_deprecated("dev-dependencies", package_name, "package", cx.warnings);
+ if platform.build_dependencies.is_some() && platform.build_dependencies2.is_some() {
+ warn_on_deprecated("build-dependencies", name, "platform target", cx.warnings);
}
- let dev_deps = me.dev_dependencies();
- let dev_deps = process_dependencies(
+ let build_deps = platform.build_dependencies();
+ let build_deps = process_dependencies(
&mut cx,
- dev_deps,
- Some(DepKind::Development),
+ build_deps,
+ Some(DepKind::Build),
&workspace_config,
&inherit_cell,
)?;
- if me.build_dependencies.is_some() && me.build_dependencies2.is_some() {
- warn_on_deprecated("build-dependencies", package_name, "package", cx.warnings);
+ if platform.dev_dependencies.is_some() && platform.dev_dependencies2.is_some() {
+ warn_on_deprecated("dev-dependencies", name, "platform target", cx.warnings);
}
- let build_deps = me.build_dependencies();
- let build_deps = process_dependencies(
+ let dev_deps = platform.dev_dependencies();
+ let dev_deps = process_dependencies(
&mut cx,
- build_deps,
- Some(DepKind::Build),
+ dev_deps,
+ Some(DepKind::Development),
&workspace_config,
&inherit_cell,
)?;
+ target.insert(
+ name.clone(),
+ manifest::TomlPlatform {
+ dependencies: deps,
+ build_dependencies: build_deps,
+ build_dependencies2: None,
+ dev_dependencies: dev_deps,
+ dev_dependencies2: None,
+ },
+ );
+ }
- let lints = me
- .lints
- .clone()
- .map(|mw| mw.resolve(|| inherit()?.lints()))
- .transpose()?;
- let lints = verify_lints(lints)?;
- let default = schema::TomlLints::default();
- let rustflags = lints_to_rustflags(lints.as_ref().unwrap_or(&default));
-
- let mut target: BTreeMap<String, schema::TomlPlatform> = BTreeMap::new();
- for (name, platform) in me.target.iter().flatten() {
- cx.platform = {
- let platform: Platform = name.parse()?;
- platform.check_cfg_attributes(cx.warnings);
- Some(platform)
- };
- let deps = process_dependencies(
- &mut cx,
- platform.dependencies.as_ref(),
- None,
- &workspace_config,
- &inherit_cell,
- )?;
- if platform.build_dependencies.is_some() && platform.build_dependencies2.is_some() {
- warn_on_deprecated("build-dependencies", name, "platform target", cx.warnings);
- }
- let build_deps = platform.build_dependencies();
- let build_deps = process_dependencies(
- &mut cx,
- build_deps,
- Some(DepKind::Build),
- &workspace_config,
- &inherit_cell,
- )?;
- if platform.dev_dependencies.is_some() && platform.dev_dependencies2.is_some() {
- warn_on_deprecated("dev-dependencies", name, "platform target", cx.warnings);
- }
- let dev_deps = platform.dev_dependencies();
- let dev_deps = process_dependencies(
- &mut cx,
- dev_deps,
- Some(DepKind::Development),
- &workspace_config,
- &inherit_cell,
- )?;
- target.insert(
- name.clone(),
- schema::TomlPlatform {
- dependencies: deps,
- build_dependencies: build_deps,
- build_dependencies2: None,
- dev_dependencies: dev_deps,
- dev_dependencies2: None,
- },
- );
- }
-
- let target = if target.is_empty() {
- None
- } else {
- Some(target)
- };
- let replace = me.replace(&mut cx)?;
- let patch = me.patch(&mut cx)?;
+ let target = if target.is_empty() {
+ None
+ } else {
+ Some(target)
+ };
+ let replace = replace(&me, &mut cx)?;
+ let patch = patch(&me, &mut cx)?;
- {
- let mut names_sources = BTreeMap::new();
- for dep in &deps {
- let name = dep.name_in_toml();
- let prev = names_sources.insert(name, dep.source_id());
- if prev.is_some() && prev != Some(dep.source_id()) {
- bail!(
- "Dependency '{}' has different source paths depending on the build \
+ {
+ let mut names_sources = BTreeMap::new();
+ for dep in &deps {
+ let name = dep.name_in_toml();
+ let prev = names_sources.insert(name, dep.source_id());
+ if prev.is_some() && prev != Some(dep.source_id()) {
+ bail!(
+ "Dependency '{}' has different source paths depending on the build \
target. Each dependency must have a single canonical source path \
irrespective of build target.",
- name
- );
- }
+ name
+ );
}
}
+ }
- let exclude = package
- .exclude
- .clone()
- .map(|mw| mw.resolve("exclude", || inherit()?.exclude()))
- .transpose()?
- .unwrap_or_default();
- let include = package
- .include
- .clone()
- .map(|mw| mw.resolve("include", || inherit()?.include()))
- .transpose()?
- .unwrap_or_default();
- let empty_features = BTreeMap::new();
-
- let summary = Summary::new(
- pkgid,
- deps,
- &me.features
- .as_ref()
- .unwrap_or(&empty_features)
- .iter()
- .map(|(k, v)| {
- (
- InternedString::new(k),
- v.iter().map(InternedString::from).collect(),
- )
- })
- .collect(),
- package.links.as_deref(),
- rust_version.clone(),
- )?;
+ let exclude = package
+ .exclude
+ .clone()
+ .map(|mw| field_inherit_with(mw, "exclude", || inherit()?.exclude()))
+ .transpose()?
+ .unwrap_or_default();
+ let include = package
+ .include
+ .clone()
+ .map(|mw| field_inherit_with(mw, "include", || inherit()?.include()))
+ .transpose()?
+ .unwrap_or_default();
+ let empty_features = BTreeMap::new();
+
+ let summary = Summary::new(
+ pkgid,
+ deps,
+ &me.features
+ .as_ref()
+ .unwrap_or(&empty_features)
+ .iter()
+ .map(|(k, v)| {
+ (
+ InternedString::new(k),
+ v.iter().map(InternedString::from).collect(),
+ )
+ })
+ .collect(),
+ package.links.as_deref(),
+ rust_version.clone(),
+ )?;
- let metadata = ManifestMetadata {
- description: package
- .description
- .clone()
- .map(|mw| mw.resolve("description", || inherit()?.description()))
- .transpose()?,
- homepage: package
- .homepage
- .clone()
- .map(|mw| mw.resolve("homepage", || inherit()?.homepage()))
- .transpose()?,
- documentation: package
- .documentation
- .clone()
- .map(|mw| mw.resolve("documentation", || inherit()?.documentation()))
- .transpose()?,
- readme: readme_for_package(
- package_root,
- package
- .readme
- .clone()
- .map(|mw| mw.resolve("readme", || inherit()?.readme(package_root)))
- .transpose()?
- .as_ref(),
- ),
- authors: package
- .authors
- .clone()
- .map(|mw| mw.resolve("authors", || inherit()?.authors()))
- .transpose()?
- .unwrap_or_default(),
- license: package
- .license
- .clone()
- .map(|mw| mw.resolve("license", || inherit()?.license()))
- .transpose()?,
- license_file: package
- .license_file
- .clone()
- .map(|mw| mw.resolve("license", || inherit()?.license_file(package_root)))
- .transpose()?,
- repository: package
- .repository
- .clone()
- .map(|mw| mw.resolve("repository", || inherit()?.repository()))
- .transpose()?,
- keywords: package
- .keywords
- .clone()
- .map(|mw| mw.resolve("keywords", || inherit()?.keywords()))
- .transpose()?
- .unwrap_or_default(),
- categories: package
- .categories
- .clone()
- .map(|mw| mw.resolve("categories", || inherit()?.categories()))
- .transpose()?
- .unwrap_or_default(),
- badges: me
- .badges
- .clone()
- .map(|mw| mw.resolve("badges", || inherit()?.badges()))
- .transpose()?
- .unwrap_or_default(),
- links: package.links.clone(),
- rust_version: package
- .rust_version
- .map(|mw| mw.resolve("rust-version", || inherit()?.rust_version()))
- .transpose()?,
- };
- package.description = metadata
+ let metadata = ManifestMetadata {
+ description: package
.description
.clone()
- .map(|description| schema::MaybeWorkspace::Defined(description));
- package.homepage = metadata
+ .map(|mw| field_inherit_with(mw, "description", || inherit()?.description()))
+ .transpose()?,
+ homepage: package
.homepage
.clone()
- .map(|homepage| schema::MaybeWorkspace::Defined(homepage));
- package.documentation = metadata
+ .map(|mw| field_inherit_with(mw, "homepage", || inherit()?.homepage()))
+ .transpose()?,
+ documentation: package
.documentation
.clone()
- .map(|documentation| schema::MaybeWorkspace::Defined(documentation));
- package.readme = metadata
- .readme
- .clone()
- .map(|readme| schema::MaybeWorkspace::Defined(schema::StringOrBool::String(readme)));
- package.authors = package
+ .map(|mw| field_inherit_with(mw, "documentation", || inherit()?.documentation()))
+ .transpose()?,
+ readme: readme_for_package(
+ package_root,
+ package
+ .readme
+ .clone()
+ .map(|mw| field_inherit_with(mw, "readme", || inherit()?.readme(package_root)))
+ .transpose()?
+ .as_ref(),
+ ),
+ authors: package
.authors
- .as_ref()
- .map(|_| schema::MaybeWorkspace::Defined(metadata.authors.clone()));
- package.license = metadata
+ .clone()
+ .map(|mw| field_inherit_with(mw, "authors", || inherit()?.authors()))
+ .transpose()?
+ .unwrap_or_default(),
+ license: package
.license
.clone()
- .map(|license| schema::MaybeWorkspace::Defined(license));
- package.license_file = metadata
+ .map(|mw| field_inherit_with(mw, "license", || inherit()?.license()))
+ .transpose()?,
+ license_file: package
.license_file
.clone()
- .map(|license_file| schema::MaybeWorkspace::Defined(license_file));
- package.repository = metadata
+ .map(|mw| field_inherit_with(mw, "license", || inherit()?.license_file(package_root)))
+ .transpose()?,
+ repository: package
.repository
.clone()
- .map(|repository| schema::MaybeWorkspace::Defined(repository));
- package.keywords = package
+ .map(|mw| field_inherit_with(mw, "repository", || inherit()?.repository()))
+ .transpose()?,
+ keywords: package
.keywords
- .as_ref()
- .map(|_| schema::MaybeWorkspace::Defined(metadata.keywords.clone()));
- package.categories = package
+ .clone()
+ .map(|mw| field_inherit_with(mw, "keywords", || inherit()?.keywords()))
+ .transpose()?
+ .unwrap_or_default(),
+ categories: package
.categories
- .as_ref()
- .map(|_| schema::MaybeWorkspace::Defined(metadata.categories.clone()));
- package.rust_version = rust_version
.clone()
- .map(|rv| schema::MaybeWorkspace::Defined(rv));
- package.exclude = package
- .exclude
- .as_ref()
- .map(|_| schema::MaybeWorkspace::Defined(exclude.clone()));
- package.include = package
- .include
- .as_ref()
- .map(|_| schema::MaybeWorkspace::Defined(include.clone()));
+ .map(|mw| field_inherit_with(mw, "categories", || inherit()?.categories()))
+ .transpose()?
+ .unwrap_or_default(),
+ badges: me
+ .badges
+ .clone()
+ .map(|mw| field_inherit_with(mw, "badges", || inherit()?.badges()))
+ .transpose()?
+ .unwrap_or_default(),
+ links: package.links.clone(),
+ rust_version: package
+ .rust_version
+ .map(|mw| field_inherit_with(mw, "rust-version", || inherit()?.rust_version()))
+ .transpose()?,
+ };
+ package.description = metadata
+ .description
+ .clone()
+ .map(|description| manifest::InheritableField::Value(description));
+ package.homepage = metadata
+ .homepage
+ .clone()
+ .map(|homepage| manifest::InheritableField::Value(homepage));
+ package.documentation = metadata
+ .documentation
+ .clone()
+ .map(|documentation| manifest::InheritableField::Value(documentation));
+ package.readme = metadata
+ .readme
+ .clone()
+ .map(|readme| manifest::InheritableField::Value(manifest::StringOrBool::String(readme)));
+ package.authors = package
+ .authors
+ .as_ref()
+ .map(|_| manifest::InheritableField::Value(metadata.authors.clone()));
+ package.license = metadata
+ .license
+ .clone()
+ .map(|license| manifest::InheritableField::Value(license));
+ package.license_file = metadata
+ .license_file
+ .clone()
+ .map(|license_file| manifest::InheritableField::Value(license_file));
+ package.repository = metadata
+ .repository
+ .clone()
+ .map(|repository| manifest::InheritableField::Value(repository));
+ package.keywords = package
+ .keywords
+ .as_ref()
+ .map(|_| manifest::InheritableField::Value(metadata.keywords.clone()));
+ package.categories = package
+ .categories
+ .as_ref()
+ .map(|_| manifest::InheritableField::Value(metadata.categories.clone()));
+ package.rust_version = rust_version
+ .clone()
+ .map(|rv| manifest::InheritableField::Value(rv));
+ package.exclude = package
+ .exclude
+ .as_ref()
+ .map(|_| manifest::InheritableField::Value(exclude.clone()));
+ package.include = package
+ .include
+ .as_ref()
+ .map(|_| manifest::InheritableField::Value(include.clone()));
- let profiles = me.profile.clone();
- if let Some(profiles) = &profiles {
- let cli_unstable = config.cli_unstable();
- profiles.validate(cli_unstable, &features, &mut warnings)?;
- }
+ let profiles = me.profile.clone();
+ if let Some(profiles) = &profiles {
+ let cli_unstable = config.cli_unstable();
+ validate_profiles(profiles, cli_unstable, &features, &mut warnings)?;
+ }
- let publish = package
- .publish
- .clone()
- .map(|publish| publish.resolve("publish", || inherit()?.publish()).unwrap());
+ let publish = package
+ .publish
+ .clone()
+ .map(|publish| field_inherit_with(publish, "publish", || inherit()?.publish()).unwrap());
- package.publish = publish.clone().map(|p| schema::MaybeWorkspace::Defined(p));
+ package.publish = publish
+ .clone()
+ .map(|p| manifest::InheritableField::Value(p));
- let publish = match publish {
- Some(schema::VecStringOrBool::VecString(ref vecstring)) => Some(vecstring.clone()),
- Some(schema::VecStringOrBool::Bool(false)) => Some(vec![]),
- Some(schema::VecStringOrBool::Bool(true)) => None,
- None => version.is_none().then_some(vec![]),
- };
+ let publish = match publish {
+ Some(manifest::VecStringOrBool::VecString(ref vecstring)) => Some(vecstring.clone()),
+ Some(manifest::VecStringOrBool::Bool(false)) => Some(vec![]),
+ Some(manifest::VecStringOrBool::Bool(true)) => None,
+ None => version.is_none().then_some(vec![]),
+ };
- if version.is_none() && publish != Some(vec![]) {
- bail!("`package.publish` requires `package.version` be specified");
- }
+ if version.is_none() && publish != Some(vec![]) {
+ bail!("`package.publish` requires `package.version` be specified");
+ }
- if summary.features().contains_key("default-features") {
- warnings.push(
- "`default-features = [\"..\"]` was found in [features]. \
+ if summary.features().contains_key("default-features") {
+ warnings.push(
+ "`default-features = [\"..\"]` was found in [features]. \
Did you mean to use `default = [\"..\"]`?"
- .to_string(),
- )
- }
+ .to_string(),
+ )
+ }
- if let Some(run) = &package.default_run {
- if !targets
- .iter()
- .filter(|t| t.is_bin())
- .any(|t| t.name() == run)
- {
- let suggestion =
- util::closest_msg(run, targets.iter().filter(|t| t.is_bin()), |t| t.name());
- bail!("default-run target `{}` not found{}", run, suggestion);
- }
+ if let Some(run) = &package.default_run {
+ if !targets
+ .iter()
+ .filter(|t| t.is_bin())
+ .any(|t| t.name() == run)
+ {
+ let suggestion =
+ util::closest_msg(run, targets.iter().filter(|t| t.is_bin()), |t| t.name());
+ bail!("default-run target `{}` not found{}", run, suggestion);
}
+ }
- let default_kind = package
- .default_target
- .as_ref()
- .map(|t| CompileTarget::new(&*t))
- .transpose()?
- .map(CompileKind::Target);
- let forced_kind = package
- .forced_target
+ let default_kind = package
+ .default_target
+ .as_ref()
+ .map(|t| CompileTarget::new(&*t))
+ .transpose()?
+ .map(CompileKind::Target);
+ let forced_kind = package
+ .forced_target
+ .as_ref()
+ .map(|t| CompileTarget::new(&*t))
+ .transpose()?
+ .map(CompileKind::Target);
+ let custom_metadata = package.metadata.clone();
+ let resolved_toml = manifest::TomlManifest {
+ cargo_features: me.cargo_features.clone(),
+ package: Some(package.clone()),
+ project: None,
+ profile: me.profile.clone(),
+ lib: me.lib.clone(),
+ bin: me.bin.clone(),
+ example: me.example.clone(),
+ test: me.test.clone(),
+ bench: me.bench.clone(),
+ dependencies,
+ dev_dependencies: dev_deps,
+ dev_dependencies2: None,
+ build_dependencies: build_deps,
+ build_dependencies2: None,
+ features: me.features.clone(),
+ target,
+ replace: me.replace.clone(),
+ patch: me.patch.clone(),
+ workspace: me.workspace.clone(),
+ badges: me
+ .badges
.as_ref()
- .map(|t| CompileTarget::new(&*t))
- .transpose()?
- .map(CompileKind::Target);
- let custom_metadata = package.metadata.clone();
- let resolved_toml = schema::TomlManifest {
- cargo_features: me.cargo_features.clone(),
- package: Some(package.clone()),
- project: None,
- profile: me.profile.clone(),
- lib: me.lib.clone(),
- bin: me.bin.clone(),
- example: me.example.clone(),
- test: me.test.clone(),
- bench: me.bench.clone(),
- dependencies,
- dev_dependencies: dev_deps,
- dev_dependencies2: None,
- build_dependencies: build_deps,
- build_dependencies2: None,
- features: me.features.clone(),
- target,
- replace: me.replace.clone(),
- patch: me.patch.clone(),
- workspace: me.workspace.clone(),
- badges: me
- .badges
- .as_ref()
- .map(|_| schema::MaybeWorkspace::Defined(metadata.badges.clone())),
- lints: lints.map(|lints| schema::MaybeWorkspaceLints {
- workspace: false,
- lints,
- }),
- };
- let mut manifest = Manifest::new(
- summary,
- default_kind,
- forced_kind,
- targets,
- exclude,
- include,
- package.links.clone(),
- metadata,
- custom_metadata,
- profiles,
- publish,
- replace,
- patch,
- workspace_config,
- features,
- edition,
- rust_version,
- package.im_a_teapot,
- package.default_run.clone(),
- Rc::new(resolved_toml),
- package.metabuild.clone().map(|sov| sov.0),
- resolve_behavior,
- rustflags,
- embedded,
- );
- if package.license_file.is_some() && package.license.is_some() {
- manifest.warnings_mut().add_warning(
+ .map(|_| manifest::InheritableField::Value(metadata.badges.clone())),
+ lints: lints.map(|lints| manifest::InheritableLints {
+ workspace: false,
+ lints,
+ }),
+ };
+ let mut manifest = Manifest::new(
+ summary,
+ default_kind,
+ forced_kind,
+ targets,
+ exclude,
+ include,
+ package.links.clone(),
+ metadata,
+ custom_metadata,
+ profiles,
+ publish,
+ replace,
+ patch,
+ workspace_config,
+ features,
+ edition,
+ rust_version,
+ package.im_a_teapot,
+ package.default_run.clone(),
+ Rc::new(resolved_toml),
+ package.metabuild.clone().map(|sov| sov.0),
+ resolve_behavior,
+ rustflags,
+ embedded,
+ );
+ if package.license_file.is_some() && package.license.is_some() {
+ manifest.warnings_mut().add_warning(
"only one of `license` or `license-file` is necessary\n\
`license` should be used if the package license can be expressed \
with a standard SPDX expression.\n\
@@ -1076,212 +1054,224 @@ impl schema::TomlManifest {
for more information."
.to_string(),
);
- }
- for warning in warnings {
- manifest.warnings_mut().add_warning(warning);
- }
- for error in errors {
- manifest.warnings_mut().add_critical_warning(error);
- }
+ }
+ for warning in warnings {
+ manifest.warnings_mut().add_warning(warning);
+ }
+ for error in errors {
+ manifest.warnings_mut().add_critical_warning(error);
+ }
- manifest.feature_gate()?;
+ manifest.feature_gate()?;
- Ok((manifest, nested_paths))
+ Ok((manifest, nested_paths))
+}
+
+fn to_virtual_manifest(
+ me: manifest::TomlManifest,
+ source_id: SourceId,
+ root: &Path,
+ config: &Config,
+) -> CargoResult<(VirtualManifest, Vec<PathBuf>)> {
+ if me.project.is_some() {
+ bail!("this virtual manifest specifies a [project] section, which is not allowed");
+ }
+ if me.package.is_some() {
+ bail!("this virtual manifest specifies a [package] section, which is not allowed");
+ }
+ if me.lib.is_some() {
+ bail!("this virtual manifest specifies a [lib] section, which is not allowed");
+ }
+ if me.bin.is_some() {
+ bail!("this virtual manifest specifies a [[bin]] section, which is not allowed");
+ }
+ if me.example.is_some() {
+ bail!("this virtual manifest specifies a [[example]] section, which is not allowed");
+ }
+ if me.test.is_some() {
+ bail!("this virtual manifest specifies a [[test]] section, which is not allowed");
+ }
+ if me.bench.is_some() {
+ bail!("this virtual manifest specifies a [[bench]] section, which is not allowed");
+ }
+ if me.dependencies.is_some() {
+ bail!("this virtual manifest specifies a [dependencies] section, which is not allowed");
+ }
+ if me.dev_dependencies().is_some() {
+ bail!("this virtual manifest specifies a [dev-dependencies] section, which is not allowed");
+ }
+ if me.build_dependencies().is_some() {
+ bail!(
+ "this virtual manifest specifies a [build-dependencies] section, which is not allowed"
+ );
+ }
+ if me.features.is_some() {
+ bail!("this virtual manifest specifies a [features] section, which is not allowed");
+ }
+ if me.target.is_some() {
+ bail!("this virtual manifest specifies a [target] section, which is not allowed");
+ }
+ if me.badges.is_some() {
+ bail!("this virtual manifest specifies a [badges] section, which is not allowed");
+ }
+ if me.lints.is_some() {
+ bail!("this virtual manifest specifies a [lints] section, which is not allowed");
}
- fn to_virtual_manifest(
- me: schema::TomlManifest,
- source_id: SourceId,
- root: &Path,
- config: &Config,
- ) -> CargoResult<(VirtualManifest, Vec<PathBuf>)> {
- if me.project.is_some() {
- bail!("this virtual manifest specifies a [project] section, which is not allowed");
- }
- if me.package.is_some() {
- bail!("this virtual manifest specifies a [package] section, which is not allowed");
- }
- if me.lib.is_some() {
- bail!("this virtual manifest specifies a [lib] section, which is not allowed");
- }
- if me.bin.is_some() {
- bail!("this virtual manifest specifies a [[bin]] section, which is not allowed");
- }
- if me.example.is_some() {
- bail!("this virtual manifest specifies a [[example]] section, which is not allowed");
- }
- if me.test.is_some() {
- bail!("this virtual manifest specifies a [[test]] section, which is not allowed");
- }
- if me.bench.is_some() {
- bail!("this virtual manifest specifies a [[bench]] section, which is not allowed");
- }
- if me.dependencies.is_some() {
- bail!("this virtual manifest specifies a [dependencies] section, which is not allowed");
- }
- if me.dev_dependencies().is_some() {
- bail!("this virtual manifest specifies a [dev-dependencies] section, which is not allowed");
- }
- if me.build_dependencies().is_some() {
- bail!("this virtual manifest specifies a [build-dependencies] section, which is not allowed");
- }
- if me.features.is_some() {
- bail!("this virtual manifest specifies a [features] section, which is not allowed");
- }
- if me.target.is_some() {
- bail!("this virtual manifest specifies a [target] section, which is not allowed");
- }
- if me.badges.is_some() {
- bail!("this virtual manifest specifies a [badges] section, which is not allowed");
- }
+ let mut nested_paths = Vec::new();
+ let mut warnings = Vec::new();
+ let mut deps = Vec::new();
+ let empty = Vec::new();
+ let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty);
+ let features = Features::new(cargo_features, config, &mut warnings, source_id.is_path())?;
- let mut nested_paths = Vec::new();
- let mut warnings = Vec::new();
- let mut deps = Vec::new();
- let empty = Vec::new();
- let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty);
- let features = Features::new(cargo_features, config, &mut warnings, source_id.is_path())?;
-
- let (replace, patch) = {
- let mut cx = Context {
- deps: &mut deps,
- source_id,
- nested_paths: &mut nested_paths,
- config,
- warnings: &mut warnings,
- platform: None,
- features: &features,
- root,
- };
- (me.replace(&mut cx)?, me.patch(&mut cx)?)
- };
- let profiles = me.profile.clone();
- if let Some(profiles) = &profiles {
- profiles.validate(config.cli_unstable(), &features, &mut warnings)?;
- }
- let resolve_behavior = me
- .workspace
- .as_ref()
- .and_then(|ws| ws.resolver.as_deref())
- .map(|r| ResolveBehavior::from_manifest(r))
- .transpose()?;
- let workspace_config = match me.workspace {
- Some(ref toml_config) => {
- let mut inheritable = toml_config.package.clone().unwrap_or_default();
- inheritable.update_ws_path(root.to_path_buf());
- inheritable.update_deps(toml_config.dependencies.clone());
- let lints = toml_config.lints.clone();
- let lints = verify_lints(lints)?;
- inheritable.update_lints(lints);
- let ws_root_config = WorkspaceRootConfig::new(
- root,
- &toml_config.members,
- &toml_config.default_members,
- &toml_config.exclude,
- &Some(inheritable),
- &toml_config.metadata,
- );
- config
- .ws_roots
- .borrow_mut()
- .insert(root.to_path_buf(), ws_root_config.clone());
- WorkspaceConfig::Root(ws_root_config)
- }
- None => {
- bail!("virtual manifests must be configured with [workspace]");
- }
+ let (replace, patch) = {
+ let mut cx = Context {
+ deps: &mut deps,
+ source_id,
+ nested_paths: &mut nested_paths,
+ config,
+ warnings: &mut warnings,
+ platform: None,
+ features: &features,
+ root,
};
- Ok((
- VirtualManifest::new(
- replace,
- patch,
- workspace_config,
- profiles,
- features,
- resolve_behavior,
- ),
- nested_paths,
- ))
+ (replace(&me, &mut cx)?, patch(&me, &mut cx)?)
+ };
+ let profiles = me.profile.clone();
+ if let Some(profiles) = &profiles {
+ validate_profiles(profiles, config.cli_unstable(), &features, &mut warnings)?;
}
-
- fn replace(&self, cx: &mut Context<'_, '_>) -> CargoResult<Vec<(PackageIdSpec, Dependency)>> {
- if self.patch.is_some() && self.replace.is_some() {
- bail!("cannot specify both [replace] and [patch]");
+ let resolve_behavior = me
+ .workspace
+ .as_ref()
+ .and_then(|ws| ws.resolver.as_deref())
+ .map(|r| ResolveBehavior::from_manifest(r))
+ .transpose()?;
+ let workspace_config = match me.workspace {
+ Some(ref toml_config) => {
+ let lints = toml_config.lints.clone();
+ let lints = verify_lints(lints)?;
+ let inheritable = InheritableFields {
+ package: toml_config.package.clone(),
+ dependencies: toml_config.dependencies.clone(),
+ lints,
+ _ws_root: root.to_path_buf(),
+ };
+ let ws_root_config = WorkspaceRootConfig::new(
+ root,
+ &toml_config.members,
+ &toml_config.default_members,
+ &toml_config.exclude,
+ &Some(inheritable),
+ &toml_config.metadata,
+ );
+ config
+ .ws_roots
+ .borrow_mut()
+ .insert(root.to_path_buf(), ws_root_config.clone());
+ WorkspaceConfig::Root(ws_root_config)
}
- let mut replace = Vec::new();
- for (spec, replacement) in self.replace.iter().flatten() {
- let mut spec = PackageIdSpec::parse(spec).with_context(|| {
- format!(
- "replacements must specify a valid semver \
+ None => {
+ bail!("virtual manifests must be configured with [workspace]");
+ }
+ };
+ Ok((
+ VirtualManifest::new(
+ replace,
+ patch,
+ workspace_config,
+ profiles,
+ features,
+ resolve_behavior,
+ ),
+ nested_paths,
+ ))
+}
+
+fn replace(
+ me: &manifest::TomlManifest,
+ cx: &mut Context<'_, '_>,
+) -> CargoResult<Vec<(PackageIdSpec, Dependency)>> {
+ if me.patch.is_some() && me.replace.is_some() {
+ bail!("cannot specify both [replace] and [patch]");
+ }
+ let mut replace = Vec::new();
+ for (spec, replacement) in me.replace.iter().flatten() {
+ let mut spec = PackageIdSpec::parse(spec).with_context(|| {
+ format!(
+ "replacements must specify a valid semver \
version to replace, but `{}` does not",
- spec
- )
- })?;
- if spec.url().is_none() {
- spec.set_url(CRATES_IO_INDEX.parse().unwrap());
- }
+ spec
+ )
+ })?;
+ if spec.url().is_none() {
+ spec.set_url(CRATES_IO_INDEX.parse().unwrap());
+ }
- if replacement.is_version_specified() {
- bail!(
- "replacements cannot specify a version \
+ if replacement.is_version_specified() {
+ bail!(
+ "replacements cannot specify a version \
requirement, but found one for `{}`",
- spec
- );
- }
-
- let mut dep = replacement.to_dependency(spec.name(), cx, None)?;
- let version = spec.version().ok_or_else(|| {
- anyhow!(
- "replacements must specify a version \
- to replace, but `{}` does not",
- spec
- )
- })?;
- unused_dep_keys(
- dep.name_in_toml().as_str(),
- "replace",
- replacement.unused_keys(),
- &mut cx.warnings,
+ spec
);
- dep.set_version_req(OptVersionReq::exact(&version));
- replace.push((spec, dep));
}
- Ok(replace)
- }
-
- fn patch(&self, cx: &mut Context<'_, '_>) -> CargoResult<HashMap<Url, Vec<Dependency>>> {
- let mut patch = HashMap::new();
- for (toml_url, deps) in self.patch.iter().flatten() {
- let url = match &toml_url[..] {
- CRATES_IO_REGISTRY => CRATES_IO_INDEX.parse().unwrap(),
- _ => cx
- .config
- .get_registry_index(toml_url)
- .or_else(|_| toml_url.into_url())
- .with_context(|| {
- format!(
- "[patch] entry `{}` should be a URL or registry name",
- toml_url
- )
- })?,
- };
- patch.insert(
- url,
- deps.iter()
- .map(|(name, dep)| {
- unused_dep_keys(
- name,
- &format!("patch.{toml_url}",),
- dep.unused_keys(),
- &mut cx.warnings,
- );
- dep.to_dependency(name, cx, None)
- })
- .collect::<CargoResult<Vec<_>>>()?,
- );
- }
- Ok(patch)
+
+ let mut dep = dep_to_dependency(replacement, spec.name(), cx, None)?;
+ let version = spec.version().ok_or_else(|| {
+ anyhow!(
+ "replacements must specify a version \
+ to replace, but `{}` does not",
+ spec
+ )
+ })?;
+ unused_dep_keys(
+ dep.name_in_toml().as_str(),
+ "replace",
+ replacement.unused_keys(),
+ &mut cx.warnings,
+ );
+ dep.set_version_req(OptVersionReq::exact(&version));
+ replace.push((spec, dep));
+ }
+ Ok(replace)
+}
+
+fn patch(
+ me: &manifest::TomlManifest,
+ cx: &mut Context<'_, '_>,
+) -> CargoResult<HashMap<Url, Vec<Dependency>>> {
+ let mut patch = HashMap::new();
+ for (toml_url, deps) in me.patch.iter().flatten() {
+ let url = match &toml_url[..] {
+ CRATES_IO_REGISTRY => CRATES_IO_INDEX.parse().unwrap(),
+ _ => cx
+ .config
+ .get_registry_index(toml_url)
+ .or_else(|_| toml_url.into_url())
+ .with_context(|| {
+ format!(
+ "[patch] entry `{}` should be a URL or registry name",
+ toml_url
+ )
+ })?,
+ };
+ patch.insert(
+ url,
+ deps.iter()
+ .map(|(name, dep)| {
+ unused_dep_keys(
+ name,
+ &format!("patch.{toml_url}",),
+ dep.unused_keys(),
+ &mut cx.warnings,
+ );
+ dep_to_dependency(dep, name, cx, None)
+ })
+ .collect::<CargoResult<Vec<_>>>()?,
+ );
}
+ Ok(patch)
}
struct Context<'a, 'b> {
@@ -1295,7 +1285,7 @@ struct Context<'a, 'b> {
features: &'a Features,
}
-fn verify_lints(lints: Option<schema::TomlLints>) -> CargoResult<Option<schema::TomlLints>> {
+fn verify_lints(lints: Option<manifest::TomlLints>) -> CargoResult<Option<manifest::TomlLints>> {
let Some(lints) = lints else {
return Ok(None);
};
@@ -1326,12 +1316,18 @@ fn verify_lints(lints: Option<schema::TomlLints>) -> CargoResult<Option<schema::
Ok(Some(lints))
}
-fn lints_to_rustflags(lints: &schema::TomlLints) -> Vec<String> {
+fn lints_to_rustflags(lints: &manifest::TomlLints) -> Vec<String> {
let mut rustflags = lints
.iter()
.flat_map(|(tool, lints)| {
lints.iter().map(move |(name, config)| {
- let flag = config.level().flag();
+ let flag = match config.level() {
+ manifest::TomlLintLevel::Forbid => "--forbid",
+ manifest::TomlLintLevel::Deny => "--deny",
+ manifest::TomlLintLevel::Warn => "--warn",
+ manifest::TomlLintLevel::Allow => "--allow",
+ };
+
let option = if tool == "rust" {
format!("{flag}={name}")
} else {
@@ -1366,7 +1362,7 @@ fn unused_dep_keys(
fn inheritable_from_path(
config: &Config,
workspace_path: PathBuf,
-) -> CargoResult<schema::InheritableFields> {
+) -> CargoResult<InheritableFields> {
// Workspace path should have Cargo.toml at the end
let workspace_path_root = workspace_path.parent().unwrap();
@@ -1393,17 +1389,17 @@ fn inheritable_from_path(
}
}
-/// Returns the name of the README file for a [`schema::TomlPackage`].
+/// Returns the name of the README file for a [`manifest::TomlPackage`].
fn readme_for_package(
package_root: &Path,
- readme: Option<&schema::StringOrBool>,
+ readme: Option<&manifest::StringOrBool>,
) -> Option<String> {
match &readme {
None => default_readme_from_package_root(package_root),
Some(value) => match value {
- schema::StringOrBool::Bool(false) => None,
- schema::StringOrBool::Bool(true) => Some("README.md".to_string()),
- schema::StringOrBool::String(v) => Some(v.clone()),
+ manifest::StringOrBool::Bool(false) => None,
+ manifest::StringOrBool::Bool(true) => Some("README.md".to_string()),
+ manifest::StringOrBool::String(v) => Some(v.clone()),
},
}
}
@@ -1449,13 +1445,13 @@ fn unique_build_targets(
}
/// Defines simple getter methods for inheritable fields.
-macro_rules! inheritable_field_getter {
+macro_rules! package_field_getter {
( $(($key:literal, $field:ident -> $ret:ty),)* ) => (
$(
- #[doc = concat!("Gets the field `workspace.", $key, "`.")]
+ #[doc = concat!("Gets the field `workspace.package", $key, "`.")]
fn $field(&self) -> CargoResult<$ret> {
- let Some(val) = &self.$field else {
- bail!("`workspace.{}` was not defined", $key);
+ let Some(val) = self.package.as_ref().and_then(|p| p.$field.as_ref()) else {
+ bail!("`workspace.package.{}` was not defined", $key);
};
Ok(val.clone())
}
@@ -1463,25 +1459,35 @@ macro_rules! inheritable_field_getter {
)
}
-impl schema::InheritableFields {
- inheritable_field_getter! {
+/// A group of fields that are inheritable by members of the workspace
+#[derive(Clone, Debug, Default)]
+pub struct InheritableFields {
+ package: Option<manifest::InheritablePackage>,
+ dependencies: Option<BTreeMap<manifest::PackageName, manifest::TomlDependency>>,
+ lints: Option<manifest::TomlLints>,
+
+ // Bookkeeping to help when resolving values from above
+ _ws_root: PathBuf,
+}
+
+impl InheritableFields {
+ package_field_getter! {
// Please keep this list lexicographically ordered.
- ("lints", lints -> schema::TomlLints),
- ("package.authors", authors -> Vec<String>),
- ("package.badges", badges -> BTreeMap<String, BTreeMap<String, String>>),
- ("package.categories", categories -> Vec<String>),
- ("package.description", description -> String),
- ("package.documentation", documentation -> String),
- ("package.edition", edition -> String),
- ("package.exclude", exclude -> Vec<String>),
- ("package.homepage", homepage -> String),
- ("package.include", include -> Vec<String>),
- ("package.keywords", keywords -> Vec<String>),
- ("package.license", license -> String),
- ("package.publish", publish -> schema::VecStringOrBool),
- ("package.repository", repository -> String),
- ("package.rust-version", rust_version -> RustVersion),
- ("package.version", version -> semver::Version),
+ ("authors", authors -> Vec<String>),
+ ("badges", badges -> BTreeMap<String, BTreeMap<String, String>>),
+ ("categories", categories -> Vec<String>),
+ ("description", description -> String),
+ ("documentation", documentation -> String),
+ ("edition", edition -> String),
+ ("exclude", exclude -> Vec<String>),
+ ("homepage", homepage -> String),
+ ("include", include -> Vec<String>),
+ ("keywords", keywords -> Vec<String>),
+ ("license", license -> String),
+ ("publish", publish -> manifest::VecStringOrBool),
+ ("repository", repository -> String),
+ ("rust-version", rust_version -> RustVersion),
+ ("version", version -> semver::Version),
}
/// Gets a workspace dependency with the `name`.
@@ -1489,7 +1495,7 @@ impl schema::InheritableFields {
&self,
name: &str,
package_root: &Path,
- ) -> CargoResult<schema::TomlDependency> {
+ ) -> CargoResult<manifest::TomlDependency> {
let Some(deps) = &self.dependencies else {
bail!("`workspace.dependencies` was not defined");
};
@@ -1497,815 +1503,640 @@ impl schema::InheritableFields {
bail!("`dependency.{name}` was not found in `workspace.dependencies`");
};
let mut dep = dep.clone();
- if let schema::TomlDependency::Detailed(detailed) = &mut dep {
- detailed.resolve_path(name, self.ws_root(), package_root)?;
+ if let manifest::TomlDependency::Detailed(detailed) = &mut dep {
+ if let Some(rel_path) = &detailed.path {
+ detailed.path = Some(resolve_relative_path(
+ name,
+ self.ws_root(),
+ package_root,
+ rel_path,
+ )?);
+ }
}
Ok(dep)
}
+ /// Gets the field `workspace.lint`.
+ fn lints(&self) -> CargoResult<manifest::TomlLints> {
+ let Some(val) = &self.lints else {
+ bail!("`workspace.lints` was not defined");
+ };
+ Ok(val.clone())
+ }
+
/// Gets the field `workspace.package.license-file`.
fn license_file(&self, package_root: &Path) -> CargoResult<String> {
- let Some(license_file) = &self.license_file else {
+ let Some(license_file) = self.package.as_ref().and_then(|p| p.license_file.as_ref()) else {
bail!("`workspace.package.license-file` was not defined");
};
- resolve_relative_path("license-file", &self.ws_root, package_root, license_file)
+ resolve_relative_path("license-file", &self._ws_root, package_root, license_file)
}
/// Gets the field `workspace.package.readme`.
- fn readme(&self, package_root: &Path) -> CargoResult<schema::StringOrBool> {
- let Some(readme) = readme_for_package(self.ws_root.as_path(), self.readme.as_ref()) else {
+ fn readme(&self, package_root: &Path) -> CargoResult<manifest::StringOrBool> {
+ let Some(readme) = readme_for_package(
+ self._ws_root.as_path(),
+ self.package.as_ref().and_then(|p| p.readme.as_ref()),
+ ) else {
bail!("`workspace.package.readme` was not defined");
};
- resolve_relative_path("readme", &self.ws_root, package_root, &readme)
- .map(schema::StringOrBool::String)
+ resolve_relative_path("readme", &self._ws_root, package_root, &readme)
+ .map(manifest::StringOrBool::String)
}
fn ws_root(&self) -> &PathBuf {
- &self.ws_root
- }
-
- fn update_deps(&mut self, deps: Option<BTreeMap<String, schema::TomlDependency>>) {
- self.dependencies = deps;
- }
-
- fn update_lints(&mut self, lints: Option<schema::TomlLints>) {
- self.lints = lints;
- }
-
- fn update_ws_path(&mut self, ws_root: PathBuf) {
- self.ws_root = ws_root;
- }
-}
-
-impl schema::TomlPackage {
- fn to_package_id(&self, source_id: SourceId, version: semver::Version) -> PackageId {
- PackageId::pure(self.name.as_str().into(), version, source_id)
+ &self._ws_root
}
}
-/// This Trait exists to make [`schema::MaybeWorkspace::Workspace`] generic. It makes deserialization of
-/// [`schema::MaybeWorkspace`] much easier, as well as making error messages for
-/// [`schema::MaybeWorkspace::resolve`] much nicer
-///
-/// Implementors should have a field `workspace` with the type of `bool`. It is used to ensure
-/// `workspace` is not `false` in a `Cargo.toml`
-pub trait WorkspaceInherit {
- /// This is the workspace table that is being inherited from.
- /// For example `[workspace.dependencies]` would be the table "dependencies"
- fn inherit_toml_table(&self) -> &str;
-
- /// This is used to output the value of the implementors `workspace` field
- fn workspace(&self) -> bool;
-}
-
-impl<T, W: WorkspaceInherit> schema::MaybeWorkspace<T, W> {
- fn resolve<'a>(
- self,
- label: &str,
- get_ws_inheritable: impl FnOnce() -> CargoResult<T>,
- ) -> CargoResult<T> {
- match self {
- schema::MaybeWorkspace::Defined(value) => Ok(value),
- schema::MaybeWorkspace::Workspace(w) => get_ws_inheritable().with_context(|| {
+fn field_inherit_with<'a, T>(
+ field: manifest::InheritableField<T>,
+ label: &str,
+ get_ws_inheritable: impl FnOnce() -> CargoResult<T>,
+) -> CargoResult<T> {
+ match field {
+ manifest::InheritableField::Value(value) => Ok(value),
+ manifest::InheritableField::Inherit(_) => get_ws_inheritable().with_context(|| {
format!(
- "error inheriting `{label}` from workspace root manifest's `workspace.{}.{label}`",
- w.inherit_toml_table(),
+ "error inheriting `{label}` from workspace root manifest's `workspace.package.{label}`",
)
}),
}
- }
-
- fn resolve_with_self<'a>(
- self,
- label: &str,
- get_ws_inheritable: impl FnOnce(&W) -> CargoResult<T>,
- ) -> CargoResult<T> {
- match self {
- schema::MaybeWorkspace::Defined(value) => Ok(value),
- schema::MaybeWorkspace::Workspace(w) => get_ws_inheritable(&w).with_context(|| {
- format!(
- "error inheriting `{label}` from workspace root manifest's `workspace.{}.{label}`",
- w.inherit_toml_table(),
- )
- }),
- }
- }
+}
- fn as_defined(&self) -> Option<&T> {
- match self {
- schema::MaybeWorkspace::Workspace(_) => None,
- schema::MaybeWorkspace::Defined(defined) => Some(defined),
+fn lints_inherit_with(
+ lints: manifest::InheritableLints,
+ get_ws_inheritable: impl FnOnce() -> CargoResult<manifest::TomlLints>,
+) -> CargoResult<manifest::TomlLints> {
+ if lints.workspace {
+ if !lints.lints.is_empty() {
+ anyhow::bail!("cannot override `workspace.lints` in `lints`, either remove the overrides or `lints.workspace = true` and manually specify the lints");
}
+ get_ws_inheritable().with_context(|| {
+ "error inheriting `lints` from workspace root manifest's `workspace.lints`"
+ })
+ } else {
+ Ok(lints.lints)
}
}
-impl WorkspaceInherit for schema::TomlWorkspaceField {
- fn inherit_toml_table(&self) -> &str {
- "package"
- }
-
- fn workspace(&self) -> bool {
- self.workspace
- }
+fn dependency_inherit_with<'a>(
+ dependency: manifest::InheritableDependency,
+ name: &str,
+ inheritable: impl FnOnce() -> CargoResult<&'a InheritableFields>,
+ cx: &mut Context<'_, '_>,
+) -> CargoResult<manifest::TomlDependency> {
+ match dependency {
+ manifest::InheritableDependency::Value(value) => Ok(value),
+ manifest::InheritableDependency::Inherit(w) => {
+ inner_dependency_inherit_with(w, name, inheritable, cx).with_context(|| {
+ format!(
+ "error inheriting `{name}` from workspace root manifest's `workspace.dependencies.{name}`",
+ )
+ })
+ }
+ }
}
-impl schema::TomlWorkspaceDependency {
- fn resolve<'a>(
- &self,
- name: &str,
- inheritable: impl FnOnce() -> CargoResult<&'a schema::InheritableFields>,
- cx: &mut Context<'_, '_>,
- ) -> CargoResult<schema::TomlDependency> {
- fn default_features_msg(label: &str, ws_def_feat: Option<bool>, cx: &mut Context<'_, '_>) {
- let ws_def_feat = match ws_def_feat {
- Some(true) => "true",
- Some(false) => "false",
- None => "not specified",
- };
- cx.warnings.push(format!(
- "`default-features` is ignored for {label}, since `default-features` was \
+fn inner_dependency_inherit_with<'a>(
+ dependency: manifest::TomlInheritedDependency,
+ name: &str,
+ inheritable: impl FnOnce() -> CargoResult<&'a InheritableFields>,
+ cx: &mut Context<'_, '_>,
+) -> CargoResult<manifest::TomlDependency> {
+ fn default_features_msg(label: &str, ws_def_feat: Option<bool>, cx: &mut Context<'_, '_>) {
+ let ws_def_feat = match ws_def_feat {
+ Some(true) => "true",
+ Some(false) => "false",
+ None => "not specified",
+ };
+ cx.warnings.push(format!(
+ "`default-features` is ignored for {label}, since `default-features` was \
{ws_def_feat} for `workspace.dependencies.{label}`, \
this could become a hard error in the future"
- ))
- }
- if self.default_features.is_some() && self.default_features2.is_some() {
- warn_on_deprecated("default-features", name, "dependency", cx.warnings);
- }
- inheritable()?.get_dependency(name, cx.root).map(|d| {
- match d {
- schema::TomlDependency::Simple(s) => {
- if let Some(false) = self.default_features() {
- default_features_msg(name, None, cx);
- }
- if self.optional.is_some() || self.features.is_some() || self.public.is_some() {
- schema::TomlDependency::Detailed(schema::DetailedTomlDependency {
- version: Some(s),
- optional: self.optional,
- features: self.features.clone(),
- public: self.public,
- ..Default::default()
- })
- } else {
- schema::TomlDependency::Simple(s)
- }
+ ))
+ }
+ if dependency.default_features.is_some() && dependency.default_features2.is_some() {
+ warn_on_deprecated("default-features", name, "dependency", cx.warnings);
+ }
+ inheritable()?.get_dependency(name, cx.root).map(|d| {
+ match d {
+ manifest::TomlDependency::Simple(s) => {
+ if let Some(false) = dependency.default_features() {
+ default_features_msg(name, None, cx);
+ }
+ if dependency.optional.is_some()
+ || dependency.features.is_some()
+ || dependency.public.is_some()
+ {
+ manifest::TomlDependency::Detailed(manifest::TomlDetailedDependency {
+ version: Some(s),
+ optional: dependency.optional,
+ features: dependency.features.clone(),
+ public: dependency.public,
+ ..Default::default()
+ })
+ } else {
+ manifest::TomlDependency::Simple(s)
}
- schema::TomlDependency::Detailed(d) => {
- let mut d = d.clone();
- match (self.default_features(), d.default_features()) {
- // member: default-features = true and
- // workspace: default-features = false should turn on
- // default-features
- (Some(true), Some(false)) => {
- d.default_features = Some(true);
- }
- // member: default-features = false and
- // workspace: default-features = true should ignore member
- // default-features
- (Some(false), Some(true)) => {
- default_features_msg(name, Some(true), cx);
- }
- // member: default-features = false and
- // workspace: dep = "1.0" should ignore member default-features
- (Some(false), None) => {
- default_features_msg(name, None, cx);
- }
- _ => {}
+ }
+ manifest::TomlDependency::Detailed(d) => {
+ let mut d = d.clone();
+ match (dependency.default_features(), d.default_features()) {
+ // member: default-features = true and
+ // workspace: default-features = false should turn on
+ // default-features
+ (Some(true), Some(false)) => {
+ d.default_features = Some(true);
+ }
+ // member: default-features = false and
+ // workspace: default-features = true should ignore member
+ // default-features
+ (Some(false), Some(true)) => {
+ default_features_msg(name, Some(true), cx);
}
- // Inherit the workspace configuration for `public` unless
- // it's explicitly specified for this dependency.
- if let Some(public) = self.public {
- d.public = Some(public);
+ // member: default-features = false and
+ // workspace: dep = "1.0" should ignore member default-features
+ (Some(false), None) => {
+ default_features_msg(name, None, cx);
}
- d.add_features(self.features.clone());
- d.update_optional(self.optional);
- schema::TomlDependency::Detailed(d)
+ _ => {}
}
+ d.features = match (d.features.clone(), dependency.features.clone()) {
+ (Some(dep_feat), Some(inherit_feat)) => Some(
+ dep_feat
+ .into_iter()
+ .chain(inherit_feat)
+ .collect::<Vec<String>>(),
+ ),
+ (Some(dep_fet), None) => Some(dep_fet),
+ (None, Some(inherit_feat)) => Some(inherit_feat),
+ (None, None) => None,
+ };
+ d.optional = dependency.optional;
+ manifest::TomlDependency::Detailed(d)
}
- })
- }
+ }
+ })
}
-impl WorkspaceInherit for schema::TomlWorkspaceDependency {
- fn inherit_toml_table(&self) -> &str {
- "dependencies"
- }
-
- fn workspace(&self) -> bool {
- self.workspace
- }
+pub(crate) fn to_dependency<P: ResolveToPath + Clone>(
+ dep: &manifest::TomlDependency<P>,
+ name: &str,
+ source_id: SourceId,
+ nested_paths: &mut Vec<PathBuf>,
+ config: &Config,
+ warnings: &mut Vec<String>,
+ platform: Option<Platform>,
+ root: &Path,
+ features: &Features,
+ kind: Option<DepKind>,
+) -> CargoResult<Dependency> {
+ dep_to_dependency(
+ dep,
+ name,
+ &mut Context {
+ deps: &mut Vec::new(),
+ source_id,
+ nested_paths,
+ config,
+ warnings,
+ platform,
+ root,
+ features,
+ },
+ kind,
+ )
}
-impl<P: ResolveToPath + Clone> schema::TomlDependency<P> {
- pub(crate) fn to_dependency_split(
- &self,
- name: &str,
- source_id: SourceId,
- nested_paths: &mut Vec<PathBuf>,
- config: &Config,
- warnings: &mut Vec<String>,
- platform: Option<Platform>,
- root: &Path,
- features: &Features,
- kind: Option<DepKind>,
- ) -> CargoResult<Dependency> {
- self.to_dependency(
- name,
- &mut Context {
- deps: &mut Vec::new(),
- source_id,
- nested_paths,
- config,
- warnings,
- platform,
- root,
- features,
- },
- kind,
- )
- }
-
- fn to_dependency(
- &self,
- name: &str,
- cx: &mut Context<'_, '_>,
- kind: Option<DepKind>,
- ) -> CargoResult<Dependency> {
- match *self {
- schema::TomlDependency::Simple(ref version) => schema::DetailedTomlDependency::<P> {
+fn dep_to_dependency<P: ResolveToPath + Clone>(
+ orig: &manifest::TomlDependency<P>,
+ name: &str,
+ cx: &mut Context<'_, '_>,
+ kind: Option<DepKind>,
+) -> CargoResult<Dependency> {
+ match *orig {
+ manifest::TomlDependency::Simple(ref version) => detailed_dep_to_dependency(
+ &manifest::TomlDetailedDependency::<P> {
version: Some(version.clone()),
..Default::default()
- }
- .to_dependency(name, cx, kind),
- schema::TomlDependency::Detailed(ref details) => details.to_dependency(name, cx, kind),
- }
- }
-}
-
-impl schema::DetailedTomlDependency {
- fn add_features(&mut self, features: Option<Vec<String>>) {
- self.features = match (self.features.clone(), features.clone()) {
- (Some(dep_feat), Some(inherit_feat)) => Some(
- dep_feat
- .into_iter()
- .chain(inherit_feat)
- .collect::<Vec<String>>(),
- ),
- (Some(dep_fet), None) => Some(dep_fet),
- (None, Some(inherit_feat)) => Some(inherit_feat),
- (None, None) => None,
- };
- }
-
- fn update_optional(&mut self, optional: Option<bool>) {
- self.optional = optional;
- }
-
- fn resolve_path(
- &mut self,
- name: &str,
- root_path: &Path,
- package_root: &Path,
- ) -> CargoResult<()> {
- if let Some(rel_path) = &self.path {
- self.path = Some(resolve_relative_path(
- name,
- root_path,
- package_root,
- rel_path,
- )?)
+ },
+ name,
+ cx,
+ kind,
+ ),
+ manifest::TomlDependency::Detailed(ref details) => {
+ detailed_dep_to_dependency(details, name, cx, kind)
}
- Ok(())
}
}
-impl<P: ResolveToPath + Clone> schema::DetailedTomlDependency<P> {
- fn to_dependency(
- &self,
- name_in_toml: &str,
- cx: &mut Context<'_, '_>,
- kind: Option<DepKind>,
- ) -> CargoResult<Dependency> {
- if self.version.is_none() && self.path.is_none() && self.git.is_none() {
- let msg = format!(
- "dependency ({}) specified without \
+fn detailed_dep_to_dependency<P: ResolveToPath + Clone>(
+ orig: &manifest::TomlDetailedDependency<P>,
+ name_in_toml: &str,
+ cx: &mut Context<'_, '_>,
+ kind: Option<DepKind>,
+) -> CargoResult<Dependency> {
+ if orig.version.is_none() && orig.path.is_none() && orig.git.is_none() {
+ let msg = format!(
+ "dependency ({}) specified without \
providing a local path, Git repository, version, or \
workspace dependency to use. This will be considered an \
error in future versions",
- name_in_toml
- );
- cx.warnings.push(msg);
- }
+ name_in_toml
+ );
+ cx.warnings.push(msg);
+ }
- if let Some(version) = &self.version {
- if version.contains('+') {
- cx.warnings.push(format!(
- "version requirement `{}` for dependency `{}` \
+ if let Some(version) = &orig.version {
+ if version.contains('+') {
+ cx.warnings.push(format!(
+ "version requirement `{}` for dependency `{}` \
includes semver metadata which will be ignored, removing the \
metadata is recommended to avoid confusion",
- version, name_in_toml
- ));
- }
+ version, name_in_toml
+ ));
}
+ }
- if self.git.is_none() {
- let git_only_keys = [
- (&self.branch, "branch"),
- (&self.tag, "tag"),
- (&self.rev, "rev"),
- ];
-
- for &(key, key_name) in &git_only_keys {
- if key.is_some() {
- bail!(
- "key `{}` is ignored for dependency ({}).",
- key_name,
- name_in_toml
- );
- }
+ if orig.git.is_none() {
+ let git_only_keys = [
+ (&orig.branch, "branch"),
+ (&orig.tag, "tag"),
+ (&orig.rev, "rev"),
+ ];
+
+ for &(key, key_name) in &git_only_keys {
+ if key.is_some() {
+ bail!(
+ "key `{}` is ignored for dependency ({}).",
+ key_name,
+ name_in_toml
+ );
}
}
+ }
- // Early detection of potentially misused feature syntax
- // instead of generating a "feature not found" error.
- if let Some(features) = &self.features {
- for feature in features {
- if feature.contains('/') {
- bail!(
- "feature `{}` in dependency `{}` is not allowed to contain slashes\n\
+ // Early detection of potentially misused feature syntax
+ // instead of generating a "feature not found" error.
+ if let Some(features) = &orig.features {
+ for feature in features {
+ if feature.contains('/') {
+ bail!(
+ "feature `{}` in dependency `{}` is not allowed to contain slashes\n\
If you want to enable features of a transitive dependency, \
the direct dependency needs to re-export those features from \
the `[features]` table.",
- feature,
- name_in_toml
- );
- }
- if feature.starts_with("dep:") {
- bail!(
- "feature `{}` in dependency `{}` is not allowed to use explicit \
+ feature,
+ name_in_toml
+ );
+ }
+ if feature.starts_with("dep:") {
+ bail!(
+ "feature `{}` in dependency `{}` is not allowed to use explicit \
`dep:` syntax\n\
If you want to enable an optional dependency, specify the name \
of the optional dependency without the `dep:` prefix, or specify \
a feature from the dependency's `[features]` table that enables \
the optional dependency.",
- feature,
- name_in_toml
- );
- }
+ feature,
+ name_in_toml
+ );
}
}
+ }
- let new_source_id = match (
- self.git.as_ref(),
- self.path.as_ref(),
- self.registry.as_ref(),
- self.registry_index.as_ref(),
- ) {
- (Some(_), _, Some(_), _) | (Some(_), _, _, Some(_)) => bail!(
- "dependency ({}) specification is ambiguous. \
+ let new_source_id = match (
+ orig.git.as_ref(),
+ orig.path.as_ref(),
+ orig.registry.as_ref(),
+ orig.registry_index.as_ref(),
+ ) {
+ (Some(_), _, Some(_), _) | (Some(_), _, _, Some(_)) => bail!(
+ "dependency ({}) specification is ambiguous. \
Only one of `git` or `registry` is allowed.",
- name_in_toml
- ),
- (_, _, Some(_), Some(_)) => bail!(
- "dependency ({}) specification is ambiguous. \
+ name_in_toml
+ ),
+ (_, _, Some(_), Some(_)) => bail!(
+ "dependency ({}) specification is ambiguous. \
Only one of `registry` or `registry-index` is allowed.",
- name_in_toml
- ),
- (Some(git), maybe_path, _, _) => {
- if maybe_path.is_some() {
- bail!(
- "dependency ({}) specification is ambiguous. \
+ name_in_toml
+ ),
+ (Some(git), maybe_path, _, _) => {
+ if maybe_path.is_some() {
+ bail!(
+ "dependency ({}) specification is ambiguous. \
Only one of `git` or `path` is allowed.",
- name_in_toml
- );
- }
+ name_in_toml
+ );
+ }
- let n_details = [&self.branch, &self.tag, &self.rev]
- .iter()
- .filter(|d| d.is_some())
- .count();
+ let n_details = [&orig.branch, &orig.tag, &orig.rev]
+ .iter()
+ .filter(|d| d.is_some())
+ .count();
- if n_details > 1 {
- bail!(
- "dependency ({}) specification is ambiguous. \
+ if n_details > 1 {
+ bail!(
+ "dependency ({}) specification is ambiguous. \
Only one of `branch`, `tag` or `rev` is allowed.",
- name_in_toml
- );
- }
+ name_in_toml
+ );
+ }
- let reference = self
- .branch
- .clone()
- .map(GitReference::Branch)
- .or_else(|| self.tag.clone().map(GitReference::Tag))
- .or_else(|| self.rev.clone().map(GitReference::Rev))
- .unwrap_or(GitReference::DefaultBranch);
- let loc = git.into_url()?;
-
- if let Some(fragment) = loc.fragment() {
- let msg = format!(
- "URL fragment `#{}` in git URL is ignored for dependency ({}). \
+ let reference = orig
+ .branch
+ .clone()
+ .map(GitReference::Branch)
+ .or_else(|| orig.tag.clone().map(GitReference::Tag))
+ .or_else(|| orig.rev.clone().map(GitReference::Rev))
+ .unwrap_or(GitReference::DefaultBranch);
+ let loc = git.into_url()?;
+
+ if let Some(fragment) = loc.fragment() {
+ let msg = format!(
+ "URL fragment `#{}` in git URL is ignored for dependency ({}). \
If you were trying to specify a specific git revision, \
use `rev = \"{}\"` in the dependency declaration.",
- fragment, name_in_toml, fragment
- );
- cx.warnings.push(msg)
- }
-
- SourceId::for_git(&loc, reference)?
- }
- (None, Some(path), _, _) => {
- let path = path.resolve(cx.config);
- cx.nested_paths.push(path.clone());
- // If the source ID for the package we're parsing is a path
- // source, then we normalize the path here to get rid of
- // components like `..`.
- //
- // The purpose of this is to get a canonical ID for the package
- // that we're depending on to ensure that builds of this package
- // always end up hashing to the same value no matter where it's
- // built from.
- if cx.source_id.is_path() {
- let path = cx.root.join(path);
- let path = paths::normalize_path(&path);
- SourceId::for_path(&path)?
- } else {
- cx.source_id
- }
- }
- (None, None, Some(registry), None) => SourceId::alt_registry(cx.config, registry)?,
- (None, None, None, Some(registry_index)) => {
- let url = registry_index.into_url()?;
- SourceId::for_registry(&url)?
+ fragment, name_in_toml, fragment
+ );
+ cx.warnings.push(msg)
}
- (None, None, None, None) => SourceId::crates_io(cx.config)?,
- };
- let (pkg_name, explicit_name_in_toml) = match self.package {
- Some(ref s) => (&s[..], Some(name_in_toml)),
- None => (name_in_toml, None),
- };
-
- let version = self.version.as_deref();
- let mut dep = Dependency::parse(pkg_name, version, new_source_id)?;
- if self.default_features.is_some() && self.default_features2.is_some() {
- warn_on_deprecated("default-features", name_in_toml, "dependency", cx.warnings);
- }
- dep.set_features(self.features.iter().flatten())
- .set_default_features(self.default_features().unwrap_or(true))
- .set_optional(self.optional.unwrap_or(false))
- .set_platform(cx.platform.clone());
- if let Some(registry) = &self.registry {
- let registry_id = SourceId::alt_registry(cx.config, registry)?;
- dep.set_registry_id(registry_id);
+ SourceId::for_git(&loc, reference)?
+ }
+ (None, Some(path), _, _) => {
+ let path = path.resolve(cx.config);
+ cx.nested_paths.push(path.clone());
+ // If the source ID for the package we're parsing is a path
+ // source, then we normalize the path here to get rid of
+ // components like `..`.
+ //
+ // The purpose of this is to get a canonical ID for the package
+ // that we're depending on to ensure that builds of this package
+ // always end up hashing to the same value no matter where it's
+ // built from.
+ if cx.source_id.is_path() {
+ let path = cx.root.join(path);
+ let path = paths::normalize_path(&path);
+ SourceId::for_path(&path)?
+ } else {
+ cx.source_id
+ }
}
- if let Some(registry_index) = &self.registry_index {
+ (None, None, Some(registry), None) => SourceId::alt_registry(cx.config, registry)?,
+ (None, None, None, Some(registry_index)) => {
let url = registry_index.into_url()?;
- let registry_id = SourceId::for_registry(&url)?;
- dep.set_registry_id(registry_id);
+ SourceId::for_registry(&url)?
}
+ (None, None, None, None) => SourceId::crates_io(cx.config)?,
+ };
- if let Some(kind) = kind {
- dep.set_kind(kind);
- }
- if let Some(name_in_toml) = explicit_name_in_toml {
- dep.set_explicit_name_in_toml(name_in_toml);
- }
+ let (pkg_name, explicit_name_in_toml) = match orig.package {
+ Some(ref s) => (&s[..], Some(name_in_toml)),
+ None => (name_in_toml, None),
+ };
- if let Some(p) = self.public {
- cx.features.require(Feature::public_dependency())?;
+ let version = orig.version.as_deref();
+ let mut dep = Dependency::parse(pkg_name, version, new_source_id)?;
+ if orig.default_features.is_some() && orig.default_features2.is_some() {
+ warn_on_deprecated("default-features", name_in_toml, "dependency", cx.warnings);
+ }
+ dep.set_features(orig.features.iter().flatten())
+ .set_default_features(orig.default_features().unwrap_or(true))
+ .set_optional(orig.optional.unwrap_or(false))
+ .set_platform(cx.platform.clone());
+ if let Some(registry) = &orig.registry {
+ let registry_id = SourceId::alt_registry(cx.config, registry)?;
+ dep.set_registry_id(registry_id);
+ }
+ if let Some(registry_index) = &orig.registry_index {
+ let url = registry_index.into_url()?;
+ let registry_id = SourceId::for_registry(&url)?;
+ dep.set_registry_id(registry_id);
+ }
- if dep.kind() != DepKind::Normal {
- bail!("'public' specifier can only be used on regular dependencies, not {:?} dependencies", dep.kind());
- }
+ if let Some(kind) = kind {
+ dep.set_kind(kind);
+ }
+ if let Some(name_in_toml) = explicit_name_in_toml {
+ dep.set_explicit_name_in_toml(name_in_toml);
+ }
- dep.set_public(p);
+ if let Some(p) = orig.public {
+ cx.features.require(Feature::public_dependency())?;
+
+ if dep.kind() != DepKind::Normal {
+ bail!("'public' specifier can only be used on regular dependencies, not {:?} dependencies", dep.kind());
}
- if let (Some(artifact), is_lib, target) = (
- self.artifact.as_ref(),
- self.lib.unwrap_or(false),
- self.target.as_deref(),
- ) {
- if cx.config.cli_unstable().bindeps {
- let artifact = Artifact::parse(&artifact.0, is_lib, target)?;
- if dep.kind() != DepKind::Build
- && artifact.target() == Some(ArtifactTarget::BuildDependencyAssumeTarget)
- {
- bail!(
- r#"`target = "target"` in normal- or dev-dependencies has no effect ({})"#,
- name_in_toml
- );
- }
- dep.set_artifact(artifact)
- } else {
- bail!("`artifact = …` requires `-Z bindeps` ({})", name_in_toml);
- }
- } else if self.lib.is_some() || self.target.is_some() {
- for (is_set, specifier) in [
- (self.lib.is_some(), "lib"),
- (self.target.is_some(), "target"),
- ] {
- if !is_set {
- continue;
- }
+ dep.set_public(p);
+ }
+
+ if let (Some(artifact), is_lib, target) = (
+ orig.artifact.as_ref(),
+ orig.lib.unwrap_or(false),
+ orig.target.as_deref(),
+ ) {
+ if cx.config.cli_unstable().bindeps {
+ let artifact = Artifact::parse(&artifact.0, is_lib, target)?;
+ if dep.kind() != DepKind::Build
+ && artifact.target() == Some(ArtifactTarget::BuildDependencyAssumeTarget)
+ {
bail!(
- "'{}' specifier cannot be used without an 'artifact = …' value ({})",
- specifier,
+ r#"`target = "target"` in normal- or dev-dependencies has no effect ({})"#,
name_in_toml
- )
+ );
+ }
+ dep.set_artifact(artifact)
+ } else {
+ bail!("`artifact = …` requires `-Z bindeps` ({})", name_in_toml);
+ }
+ } else if orig.lib.is_some() || orig.target.is_some() {
+ for (is_set, specifier) in [
+ (orig.lib.is_some(), "lib"),
+ (orig.target.is_some(), "target"),
+ ] {
+ if !is_set {
+ continue;
}
+ bail!(
+ "'{}' specifier cannot be used without an 'artifact = …' value ({})",
+ specifier,
+ name_in_toml
+ )
}
- Ok(dep)
}
+ Ok(dep)
}
-impl schema::TomlProfiles {
- /// Checks syntax validity and unstable feature gate for each profile.
- ///
- /// It's a bit unfortunate both `-Z` flags and `cargo-features` are required,
- /// because profiles can now be set in either `Cargo.toml` or `config.toml`.
- fn validate(
- &self,
- cli_unstable: &CliUnstable,
- features: &Features,
- warnings: &mut Vec<String>,
- ) -> CargoResult<()> {
- for (name, profile) in &self.0 {
- profile.validate(name, cli_unstable, features, warnings)?;
- }
- Ok(())
+/// Checks syntax validity and unstable feature gate for each profile.
+///
+/// It's a bit unfortunate both `-Z` flags and `cargo-features` are required,
+/// because profiles can now be set in either `Cargo.toml` or `config.toml`.
+fn validate_profiles(
+ profiles: &manifest::TomlProfiles,
+ cli_unstable: &CliUnstable,
+ features: &Features,
+ warnings: &mut Vec<String>,
+) -> CargoResult<()> {
+ for (name, profile) in &profiles.0 {
+ validate_profile(profile, name, cli_unstable, features, warnings)?;
}
+ Ok(())
}
-impl schema::TomlProfile {
- /// Checks stytax validity and unstable feature gate for a given profile.
- pub fn validate(
- &self,
- name: &str,
- cli_unstable: &CliUnstable,
- features: &Features,
- warnings: &mut Vec<String>,
- ) -> CargoResult<()> {
- self.validate_profile(name, cli_unstable, features)?;
- if let Some(ref profile) = self.build_override {
- profile.validate_override("build-override")?;
- profile.validate_profile(&format!("{name}.build-override"), cli_unstable, features)?;
- }
- if let Some(ref packages) = self.package {
- for (override_name, profile) in packages {
- profile.validate_override("package")?;
- profile.validate_profile(
- &format!("{name}.package.{override_name}"),
- cli_unstable,
- features,
- )?;
- }
+/// Checks stytax validity and unstable feature gate for a given profile.
+pub fn validate_profile(
+ root: &manifest::TomlProfile,
+ name: &str,
+ cli_unstable: &CliUnstable,
+ features: &Features,
+ warnings: &mut Vec<String>,
+) -> CargoResult<()> {
+ validate_profile_layer(root, name, cli_unstable, features)?;
+ if let Some(ref profile) = root.build_override {
+ validate_profile_override(profile, "build-override")?;
+ validate_profile_layer(
+ profile,
+ &format!("{name}.build-override"),
+ cli_unstable,
+ features,
+ )?;
+ }
+ if let Some(ref packages) = root.package {
+ for (override_name, profile) in packages {
+ validate_profile_override(profile, "package")?;
+ validate_profile_layer(
+ profile,
+ &format!("{name}.package.{override_name}"),
+ cli_unstable,
+ features,
+ )?;
}
+ }
- // Profile name validation
- restricted_names::validate_profile_name(name)?;
-
- if let Some(dir_name) = &self.dir_name {
- // This is disabled for now, as we would like to stabilize named
- // profiles without this, and then decide in the future if it is
- // needed. This helps simplify the UI a little.
- bail!(
- "dir-name=\"{}\" in profile `{}` is not currently allowed, \
+ if let Some(dir_name) = &root.dir_name {
+ // This is disabled for now, as we would like to stabilize named
+ // profiles without this, and then decide in the future if it is
+ // needed. This helps simplify the UI a little.
+ bail!(
+ "dir-name=\"{}\" in profile `{}` is not currently allowed, \
directory names are tied to the profile name for custom profiles",
- dir_name,
- name
- );
- }
+ dir_name,
+ name
+ );
+ }
- // `inherits` validation
- if matches!(self.inherits.as_deref(), Some("debug")) {
- bail!(
- "profile.{}.inherits=\"debug\" should be profile.{}.inherits=\"dev\"",
- name,
- name
- );
- }
+ // `inherits` validation
+ if matches!(root.inherits.as_deref(), Some("debug")) {
+ bail!(
+ "profile.{}.inherits=\"debug\" should be profile.{}.inherits=\"dev\"",
+ name,
+ name
+ );
+ }
- match name {
- "doc" => {
- warnings.push("profile `doc` is deprecated and has no effect".to_string());
- }
- "test" | "bench" => {
- if self.panic.is_some() {
- warnings.push(format!("`panic` setting is ignored for `{}` profile", name))
- }
+ match name {
+ "doc" => {
+ warnings.push("profile `doc` is deprecated and has no effect".to_string());
+ }
+ "test" | "bench" => {
+ if root.panic.is_some() {
+ warnings.push(format!("`panic` setting is ignored for `{}` profile", name))
}
- _ => {}
}
+ _ => {}
+ }
- if let Some(panic) = &self.panic {
- if panic != "unwind" && panic != "abort" {
- bail!(
- "`panic` setting of `{}` is not a valid setting, \
+ if let Some(panic) = &root.panic {
+ if panic != "unwind" && panic != "abort" {
+ bail!(
+ "`panic` setting of `{}` is not a valid setting, \
must be `unwind` or `abort`",
- panic
- );
- }
+ panic
+ );
}
+ }
- if let Some(schema::StringOrBool::String(arg)) = &self.lto {
- if arg == "true" || arg == "false" {
- bail!(
- "`lto` setting of string `\"{arg}\"` for `{name}` profile is not \
+ if let Some(manifest::StringOrBool::String(arg)) = &root.lto {
+ if arg == "true" || arg == "false" {
+ bail!(
+ "`lto` setting of string `\"{arg}\"` for `{name}` profile is not \
a valid setting, must be a boolean (`true`/`false`) or a string \
(`\"thin\"`/`\"fat\"`/`\"off\"`) or omitted.",
- );
- }
+ );
}
-
- Ok(())
}
- /// Validates a profile.
- ///
- /// This is a shallow check, which is reused for the profile itself and any overrides.
- fn validate_profile(
- &self,
- name: &str,
- cli_unstable: &CliUnstable,
- features: &Features,
- ) -> CargoResult<()> {
- if let Some(codegen_backend) = &self.codegen_backend {
- match (
- features.require(Feature::codegen_backend()),
- cli_unstable.codegen_backend,
- ) {
- (Err(e), false) => return Err(e),
- _ => {}
- }
+ Ok(())
+}
- if codegen_backend.contains(|c: char| !c.is_ascii_alphanumeric() && c != '_') {
- bail!(
- "`profile.{}.codegen-backend` setting of `{}` is not a valid backend name.",
- name,
- codegen_backend,
- );
- }
- }
- if self.rustflags.is_some() {
- match (
- features.require(Feature::profile_rustflags()),
- cli_unstable.profile_rustflags,
- ) {
- (Err(e), false) => return Err(e),
- _ => {}
- }
- }
- if self.trim_paths.is_some() {
- match (
- features.require(Feature::trim_paths()),
- cli_unstable.trim_paths,
- ) {
- (Err(e), false) => return Err(e),
- _ => {}
- }
+/// Validates a profile.
+///
+/// This is a shallow check, which is reused for the profile itself and any overrides.
+fn validate_profile_layer(
+ profile: &manifest::TomlProfile,
+ name: &str,
+ cli_unstable: &CliUnstable,
+ features: &Features,
+) -> CargoResult<()> {
+ if let Some(codegen_backend) = &profile.codegen_backend {
+ match (
+ features.require(Feature::codegen_backend()),
+ cli_unstable.codegen_backend,
+ ) {
+ (Err(e), false) => return Err(e),
+ _ => {}
}
- Ok(())
- }
- /// Validation that is specific to an override.
- fn validate_override(&self, which: &str) -> CargoResult<()> {
- if self.package.is_some() {
- bail!("package-specific profiles cannot be nested");
- }
- if self.build_override.is_some() {
- bail!("build-override profiles cannot be nested");
- }
- if self.panic.is_some() {
- bail!("`panic` may not be specified in a `{}` profile", which)
- }
- if self.lto.is_some() {
- bail!("`lto` may not be specified in a `{}` profile", which)
- }
- if self.rpath.is_some() {
- bail!("`rpath` may not be specified in a `{}` profile", which)
+ if codegen_backend.contains(|c: char| !c.is_ascii_alphanumeric() && c != '_') {
+ bail!(
+ "`profile.{}.codegen-backend` setting of `{}` is not a valid backend name.",
+ name,
+ codegen_backend,
+ );
}
- Ok(())
}
-
- /// Overwrite self's values with the given profile.
- pub fn merge(&mut self, profile: &schema::TomlProfile) {
- if let Some(v) = &profile.opt_level {
- self.opt_level = Some(v.clone());
- }
-
- if let Some(v) = &profile.lto {
- self.lto = Some(v.clone());
- }
-
- if let Some(v) = &profile.codegen_backend {
- self.codegen_backend = Some(v.clone());
- }
-
- if let Some(v) = profile.codegen_units {
- self.codegen_units = Some(v);
- }
-
- if let Some(v) = profile.debug {
- self.debug = Some(v);
- }
-
- if let Some(v) = profile.debug_assertions {
- self.debug_assertions = Some(v);
- }
-
- if let Some(v) = &profile.split_debuginfo {
- self.split_debuginfo = Some(v.clone());
- }
-
- if let Some(v) = profile.rpath {
- self.rpath = Some(v);
- }
-
- if let Some(v) = &profile.panic {
- self.panic = Some(v.clone());
- }
-
- if let Some(v) = profile.overflow_checks {
- self.overflow_checks = Some(v);
- }
-
- if let Some(v) = profile.incremental {
- self.incremental = Some(v);
- }
-
- if let Some(v) = &profile.rustflags {
- self.rustflags = Some(v.clone());
- }
-
- if let Some(other_package) = &profile.package {
- match &mut self.package {
- Some(self_package) => {
- for (spec, other_pkg_profile) in other_package {
- match self_package.get_mut(spec) {
- Some(p) => p.merge(other_pkg_profile),
- None => {
- self_package.insert(spec.clone(), other_pkg_profile.clone());
- }
- }
- }
- }
- None => self.package = Some(other_package.clone()),
- }
- }
-
- if let Some(other_bo) = &profile.build_override {
- match &mut self.build_override {
- Some(self_bo) => self_bo.merge(other_bo),
- None => self.build_override = Some(other_bo.clone()),
- }
- }
-
- if let Some(v) = &profile.inherits {
- self.inherits = Some(v.clone());
- }
-
- if let Some(v) = &profile.dir_name {
- self.dir_name = Some(v.clone());
- }
-
- if let Some(v) = &profile.strip {
- self.strip = Some(v.clone());
- }
-
- if let Some(v) = &profile.trim_paths {
- self.trim_paths = Some(v.clone())
+ if profile.rustflags.is_some() {
+ match (
+ features.require(Feature::profile_rustflags()),
+ cli_unstable.profile_rustflags,
+ ) {
+ (Err(e), false) => return Err(e),
+ _ => {}
}
}
-}
-
-impl schema::MaybeWorkspaceLints {
- fn resolve<'a>(
- self,
- get_ws_inheritable: impl FnOnce() -> CargoResult<schema::TomlLints>,
- ) -> CargoResult<schema::TomlLints> {
- if self.workspace {
- if !self.lints.is_empty() {
- anyhow::bail!("cannot override `workspace.lints` in `lints`, either remove the overrides or `lints.workspace = true` and manually specify the lints");
- }
- get_ws_inheritable().with_context(|| {
- "error inheriting `lints` from workspace root manifest's `workspace.lints`"
- })
- } else {
- Ok(self.lints)
+ if profile.trim_paths.is_some() {
+ match (
+ features.require(Feature::trim_paths()),
+ cli_unstable.trim_paths,
+ ) {
+ (Err(e), false) => return Err(e),
+ _ => {}
}
}
+ Ok(())
}
-impl schema::TomlLintLevel {
- fn flag(&self) -> &'static str {
- match self {
- Self::Forbid => "--forbid",
- Self::Deny => "--deny",
- Self::Warn => "--warn",
- Self::Allow => "--allow",
- }
+/// Validation that is specific to an override.
+fn validate_profile_override(profile: &manifest::TomlProfile, which: &str) -> CargoResult<()> {
+ if profile.package.is_some() {
+ bail!("package-specific profiles cannot be nested");
}
+ if profile.build_override.is_some() {
+ bail!("build-override profiles cannot be nested");
+ }
+ if profile.panic.is_some() {
+ bail!("`panic` may not be specified in a `{}` profile", which)
+ }
+ if profile.lto.is_some() {
+ bail!("`lto` may not be specified in a `{}` profile", which)
+ }
+ if profile.rpath.is_some() {
+ bail!("`rpath` may not be specified in a `{}` profile", which)
+ }
+ Ok(())
}
pub trait ResolveToPath {
diff --git a/src/tools/cargo/src/cargo/util/toml/targets.rs b/src/tools/cargo/src/cargo/util/toml/targets.rs
index 9d456ffd7..7d0f1891e 100644
--- a/src/tools/cargo/src/cargo/util/toml/targets.rs
+++ b/src/tools/cargo/src/cargo/util/toml/targets.rs
@@ -14,16 +14,16 @@ use std::collections::HashSet;
use std::fs::{self, DirEntry};
use std::path::{Path, PathBuf};
-use super::schema::{
- PathValue, StringOrBool, StringOrVec, TomlBenchTarget, TomlBinTarget, TomlExampleTarget,
- TomlLibTarget, TomlManifest, TomlTarget, TomlTestTarget,
-};
use crate::core::compiler::rustdoc::RustdocScrapeExamples;
use crate::core::compiler::CrateType;
use crate::core::{Edition, Feature, Features, Target};
use crate::util::errors::CargoResult;
use crate::util::restricted_names;
use crate::util::toml::warn_on_deprecated;
+use crate::util_schemas::manifest::{
+ PathValue, StringOrBool, StringOrVec, TomlBenchTarget, TomlBinTarget, TomlExampleTarget,
+ TomlLibTarget, TomlManifest, TomlTarget, TomlTestTarget,
+};
use anyhow::Context as _;
@@ -127,7 +127,7 @@ pub(super) fn targets(
// Verify names match available build deps.
let bdeps = manifest.build_dependencies.as_ref();
for name in &metabuild.0 {
- if !bdeps.map_or(false, |bd| bd.contains_key(name)) {
+ if !bdeps.map_or(false, |bd| bd.contains_key(name.as_str())) {
anyhow::bail!(
"metabuild package `{}` must be specified in `build-dependencies`",
name
@@ -1004,7 +1004,7 @@ fn name_or_panic(target: &TomlTarget) -> &str {
}
fn validate_proc_macro(target: &TomlTarget, kind: &str, warnings: &mut Vec<String>) {
- if target.proc_macro_raw.is_some() && target.proc_macro_raw2.is_some() {
+ if target.proc_macro.is_some() && target.proc_macro2.is_some() {
warn_on_deprecated(
"proc-macro",
name_or_panic(target),
diff --git a/src/tools/cargo/src/cargo/util/toml_mut/dependency.rs b/src/tools/cargo/src/cargo/util/toml_mut/dependency.rs
index 88298fa8d..6ffd214b4 100644
--- a/src/tools/cargo/src/cargo/util/toml_mut/dependency.rs
+++ b/src/tools/cargo/src/cargo/util/toml_mut/dependency.rs
@@ -25,6 +25,9 @@ pub struct Dependency {
/// Whether the dependency is opted-in with a feature flag.
pub optional: Option<bool>,
+ /// Whether the dependency is marked as public.
+ pub public: Option<bool>,
+
/// List of features to add (or None to keep features unchanged).
pub features: Option<IndexSet<String>>,
/// Whether default features are enabled.
@@ -48,6 +51,7 @@ impl Dependency {
Self {
name: name.into(),
optional: None,
+ public: None,
features: None,
default_features: None,
inherited_features: None,
@@ -163,6 +167,11 @@ impl Dependency {
self.optional
}
+ /// Get whether the dep is public.
+ pub fn public(&self) -> Option<bool> {
+ self.public
+ }
+
/// Get the SourceID for this dependency.
pub fn source_id(&self, config: &Config) -> CargoResult<MaybeWorkspace<SourceId>> {
match &self.source.as_ref() {
@@ -325,16 +334,18 @@ impl Dependency {
};
let optional = table.get("optional").and_then(|v| v.as_bool());
+ let public = table.get("public").and_then(|v| v.as_bool());
let dep = Self {
name,
- rename,
- source: Some(source),
- registry,
- default_features,
- features,
optional,
+ public,
+ features,
+ default_features,
inherited_features: None,
+ source: Some(source),
+ registry,
+ rename,
};
Ok(dep)
} else {
@@ -366,6 +377,7 @@ impl Dependency {
crate_root.display()
);
let table: toml_edit::Item = match (
+ self.public.unwrap_or(false),
self.optional.unwrap_or(false),
self.features.as_ref(),
self.default_features.unwrap_or(true),
@@ -376,20 +388,21 @@ impl Dependency {
// Extra short when version flag only
(
false,
+ false,
None,
true,
Some(Source::Registry(RegistrySource { version: v })),
None,
None,
) => toml_edit::value(v),
- (false, None, true, Some(Source::Workspace(WorkspaceSource {})), None, None) => {
+ (false, false, None, true, Some(Source::Workspace(WorkspaceSource {})), None, None) => {
let mut table = toml_edit::InlineTable::default();
table.set_dotted(true);
table.insert("workspace", true.into());
toml_edit::value(toml_edit::Value::InlineTable(table))
}
// Other cases are represented as an inline table
- (_, _, _, _, _, _) => {
+ (_, _, _, _, _, _, _) => {
let mut table = toml_edit::InlineTable::default();
match &self.source {
@@ -442,6 +455,9 @@ impl Dependency {
if let Some(v) = self.optional {
table.insert("optional", v.into());
}
+ if let Some(v) = self.public {
+ table.insert("public", v.into());
+ }
toml_edit::value(toml_edit::Value::InlineTable(table))
}
@@ -579,6 +595,15 @@ impl Dependency {
table.remove("optional");
}
}
+ match self.public {
+ Some(v) => {
+ table.set_dotted(false);
+ overwrite_value(table, "public", v);
+ }
+ None => {
+ table.remove("public");
+ }
+ }
} else {
unreachable!("Invalid dependency type: {}", item.type_name());
}
diff --git a/src/tools/cargo/src/cargo/util/toml_mut/manifest.rs b/src/tools/cargo/src/cargo/util/toml_mut/manifest.rs
index e859af215..3e3b4e69a 100644
--- a/src/tools/cargo/src/cargo/util/toml_mut/manifest.rs
+++ b/src/tools/cargo/src/cargo/util/toml_mut/manifest.rs
@@ -420,7 +420,7 @@ impl LocalManifest {
}
}
- fn is_explicit_dep_activation(&self, dep_key: &str) -> bool {
+ pub fn is_explicit_dep_activation(&self, dep_key: &str) -> bool {
if let Some(toml_edit::Item::Table(feature_table)) = self.data.as_table().get("features") {
for values in feature_table
.iter()
diff --git a/src/tools/cargo/src/cargo/util_schemas/core/mod.rs b/src/tools/cargo/src/cargo/util_schemas/core/mod.rs
new file mode 100644
index 000000000..2001a6bc7
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util_schemas/core/mod.rs
@@ -0,0 +1,6 @@
+mod package_id_spec;
+mod source_kind;
+
+pub use package_id_spec::PackageIdSpec;
+pub use source_kind::GitReference;
+pub use source_kind::SourceKind;
diff --git a/src/tools/cargo/src/cargo/util_schemas/core/package_id_spec.rs b/src/tools/cargo/src/cargo/util_schemas/core/package_id_spec.rs
new file mode 100644
index 000000000..015bfa928
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util_schemas/core/package_id_spec.rs
@@ -0,0 +1,593 @@
+use std::fmt;
+
+use anyhow::bail;
+use anyhow::Result;
+use semver::Version;
+use serde::{de, ser};
+use url::Url;
+
+use crate::util_schemas::core::GitReference;
+use crate::util_schemas::core::SourceKind;
+use crate::util_schemas::manifest::PackageName;
+use crate::util_semver::PartialVersion;
+
+/// Some or all of the data required to identify a package:
+///
+/// 1. the package name (a `String`, required)
+/// 2. the package version (a `Version`, optional)
+/// 3. the package source (a `Url`, optional)
+///
+/// If any of the optional fields are omitted, then the package ID may be ambiguous, there may be
+/// more than one package/version/url combo that will match. However, often just the name is
+/// sufficient to uniquely define a package ID.
+#[derive(Clone, PartialEq, Eq, Debug, Hash, Ord, PartialOrd)]
+pub struct PackageIdSpec {
+ name: String,
+ version: Option<PartialVersion>,
+ url: Option<Url>,
+ kind: Option<SourceKind>,
+}
+
+impl PackageIdSpec {
+ pub fn new(name: String) -> Self {
+ Self {
+ name,
+ version: None,
+ url: None,
+ kind: None,
+ }
+ }
+
+ pub fn with_version(mut self, version: PartialVersion) -> Self {
+ self.version = Some(version);
+ self
+ }
+
+ pub fn with_url(mut self, url: Url) -> Self {
+ self.url = Some(url);
+ self
+ }
+
+ pub fn with_kind(mut self, kind: SourceKind) -> Self {
+ self.kind = Some(kind);
+ self
+ }
+
+ /// Parses a spec string and returns a `PackageIdSpec` if the string was valid.
+ ///
+ /// # Examples
+ /// Some examples of valid strings
+ ///
+ /// ```
+ /// use cargo::core::PackageIdSpec;
+ ///
+ /// let specs = vec![
+ /// "https://crates.io/foo",
+ /// "https://crates.io/foo#1.2.3",
+ /// "https://crates.io/foo#bar:1.2.3",
+ /// "https://crates.io/foo#bar@1.2.3",
+ /// "foo",
+ /// "foo:1.2.3",
+ /// "foo@1.2.3",
+ /// ];
+ /// for spec in specs {
+ /// assert!(PackageIdSpec::parse(spec).is_ok());
+ /// }
+ pub fn parse(spec: &str) -> Result<PackageIdSpec> {
+ if spec.contains("://") {
+ if let Ok(url) = Url::parse(spec) {
+ return PackageIdSpec::from_url(url);
+ }
+ } else if spec.contains('/') || spec.contains('\\') {
+ let abs = std::env::current_dir().unwrap_or_default().join(spec);
+ if abs.exists() {
+ let maybe_url = Url::from_file_path(abs)
+ .map_or_else(|_| "a file:// URL".to_string(), |url| url.to_string());
+ bail!(
+ "package ID specification `{}` looks like a file path, \
+ maybe try {}",
+ spec,
+ maybe_url
+ );
+ }
+ }
+ let mut parts = spec.splitn(2, [':', '@']);
+ let name = parts.next().unwrap();
+ let version = match parts.next() {
+ Some(version) => Some(version.parse::<PartialVersion>()?),
+ None => None,
+ };
+ PackageName::new(name)?;
+ Ok(PackageIdSpec {
+ name: String::from(name),
+ version,
+ url: None,
+ kind: None,
+ })
+ }
+
+ /// Tries to convert a valid `Url` to a `PackageIdSpec`.
+ fn from_url(mut url: Url) -> Result<PackageIdSpec> {
+ let mut kind = None;
+ if let Some((kind_str, scheme)) = url.scheme().split_once('+') {
+ match kind_str {
+ "git" => {
+ let git_ref = GitReference::from_query(url.query_pairs());
+ url.set_query(None);
+ kind = Some(SourceKind::Git(git_ref));
+ url = strip_url_protocol(&url);
+ }
+ "registry" => {
+ if url.query().is_some() {
+ bail!("cannot have a query string in a pkgid: {url}")
+ }
+ kind = Some(SourceKind::Registry);
+ url = strip_url_protocol(&url);
+ }
+ "sparse" => {
+ if url.query().is_some() {
+ bail!("cannot have a query string in a pkgid: {url}")
+ }
+ kind = Some(SourceKind::SparseRegistry);
+ // Leave `sparse` as part of URL, see `SourceId::new`
+ // url = strip_url_protocol(&url);
+ }
+ "path" => {
+ if url.query().is_some() {
+ bail!("cannot have a query string in a pkgid: {url}")
+ }
+ if scheme != "file" {
+ anyhow::bail!("`path+{scheme}` is unsupported; `path+file` and `file` schemes are supported");
+ }
+ kind = Some(SourceKind::Path);
+ url = strip_url_protocol(&url);
+ }
+ kind => anyhow::bail!("unsupported source protocol: {kind}"),
+ }
+ } else {
+ if url.query().is_some() {
+ bail!("cannot have a query string in a pkgid: {url}")
+ }
+ }
+
+ let frag = url.fragment().map(|s| s.to_owned());
+ url.set_fragment(None);
+
+ let (name, version) = {
+ let mut path = url
+ .path_segments()
+ .ok_or_else(|| anyhow::format_err!("pkgid urls must have a path: {}", url))?;
+ let path_name = path.next_back().ok_or_else(|| {
+ anyhow::format_err!(
+ "pkgid urls must have at least one path \
+ component: {}",
+ url
+ )
+ })?;
+ match frag {
+ Some(fragment) => match fragment.split_once([':', '@']) {
+ Some((name, part)) => {
+ let version = part.parse::<PartialVersion>()?;
+ (String::from(name), Some(version))
+ }
+ None => {
+ if fragment.chars().next().unwrap().is_alphabetic() {
+ (String::from(fragment.as_str()), None)
+ } else {
+ let version = fragment.parse::<PartialVersion>()?;
+ (String::from(path_name), Some(version))
+ }
+ }
+ },
+ None => (String::from(path_name), None),
+ }
+ };
+ PackageName::new(&name)?;
+ Ok(PackageIdSpec {
+ name,
+ version,
+ url: Some(url),
+ kind,
+ })
+ }
+
+ pub fn name(&self) -> &str {
+ self.name.as_str()
+ }
+
+ /// Full `semver::Version`, if present
+ pub fn version(&self) -> Option<Version> {
+ self.version.as_ref().and_then(|v| v.to_version())
+ }
+
+ pub fn partial_version(&self) -> Option<&PartialVersion> {
+ self.version.as_ref()
+ }
+
+ pub fn url(&self) -> Option<&Url> {
+ self.url.as_ref()
+ }
+
+ pub fn set_url(&mut self, url: Url) {
+ self.url = Some(url);
+ }
+
+ pub fn kind(&self) -> Option<&SourceKind> {
+ self.kind.as_ref()
+ }
+
+ pub fn set_kind(&mut self, kind: SourceKind) {
+ self.kind = Some(kind);
+ }
+}
+
+fn strip_url_protocol(url: &Url) -> Url {
+ // Ridiculous hoop because `Url::set_scheme` errors when changing to http/https
+ let raw = url.to_string();
+ raw.split_once('+').unwrap().1.parse().unwrap()
+}
+
+impl fmt::Display for PackageIdSpec {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut printed_name = false;
+ match self.url {
+ Some(ref url) => {
+ if let Some(protocol) = self.kind.as_ref().and_then(|k| k.protocol()) {
+ write!(f, "{protocol}+")?;
+ }
+ write!(f, "{}", url)?;
+ if let Some(SourceKind::Git(git_ref)) = self.kind.as_ref() {
+ if let Some(pretty) = git_ref.pretty_ref(true) {
+ write!(f, "?{}", pretty)?;
+ }
+ }
+ if url.path_segments().unwrap().next_back().unwrap() != &*self.name {
+ printed_name = true;
+ write!(f, "#{}", self.name)?;
+ }
+ }
+ None => {
+ printed_name = true;
+ write!(f, "{}", self.name)?;
+ }
+ }
+ if let Some(ref v) = self.version {
+ write!(f, "{}{}", if printed_name { "@" } else { "#" }, v)?;
+ }
+ Ok(())
+ }
+}
+
+impl ser::Serialize for PackageIdSpec {
+ fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+ where
+ S: ser::Serializer,
+ {
+ self.to_string().serialize(s)
+ }
+}
+
+impl<'de> de::Deserialize<'de> for PackageIdSpec {
+ fn deserialize<D>(d: D) -> Result<PackageIdSpec, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ let string = String::deserialize(d)?;
+ PackageIdSpec::parse(&string).map_err(de::Error::custom)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::PackageIdSpec;
+ use crate::util_schemas::core::{GitReference, SourceKind};
+ use url::Url;
+
+ #[test]
+ fn good_parsing() {
+ #[track_caller]
+ fn ok(spec: &str, expected: PackageIdSpec, expected_rendered: &str) {
+ let parsed = PackageIdSpec::parse(spec).unwrap();
+ assert_eq!(parsed, expected);
+ let rendered = parsed.to_string();
+ assert_eq!(rendered, expected_rendered);
+ let reparsed = PackageIdSpec::parse(&rendered).unwrap();
+ assert_eq!(reparsed, expected);
+ }
+
+ ok(
+ "https://crates.io/foo",
+ PackageIdSpec {
+ name: String::from("foo"),
+ version: None,
+ url: Some(Url::parse("https://crates.io/foo").unwrap()),
+ kind: None,
+ },
+ "https://crates.io/foo",
+ );
+ ok(
+ "https://crates.io/foo#1.2.3",
+ PackageIdSpec {
+ name: String::from("foo"),
+ version: Some("1.2.3".parse().unwrap()),
+ url: Some(Url::parse("https://crates.io/foo").unwrap()),
+ kind: None,
+ },
+ "https://crates.io/foo#1.2.3",
+ );
+ ok(
+ "https://crates.io/foo#1.2",
+ PackageIdSpec {
+ name: String::from("foo"),
+ version: Some("1.2".parse().unwrap()),
+ url: Some(Url::parse("https://crates.io/foo").unwrap()),
+ kind: None,
+ },
+ "https://crates.io/foo#1.2",
+ );
+ ok(
+ "https://crates.io/foo#bar:1.2.3",
+ PackageIdSpec {
+ name: String::from("bar"),
+ version: Some("1.2.3".parse().unwrap()),
+ url: Some(Url::parse("https://crates.io/foo").unwrap()),
+ kind: None,
+ },
+ "https://crates.io/foo#bar@1.2.3",
+ );
+ ok(
+ "https://crates.io/foo#bar@1.2.3",
+ PackageIdSpec {
+ name: String::from("bar"),
+ version: Some("1.2.3".parse().unwrap()),
+ url: Some(Url::parse("https://crates.io/foo").unwrap()),
+ kind: None,
+ },
+ "https://crates.io/foo#bar@1.2.3",
+ );
+ ok(
+ "https://crates.io/foo#bar@1.2",
+ PackageIdSpec {
+ name: String::from("bar"),
+ version: Some("1.2".parse().unwrap()),
+ url: Some(Url::parse("https://crates.io/foo").unwrap()),
+ kind: None,
+ },
+ "https://crates.io/foo#bar@1.2",
+ );
+ ok(
+ "registry+https://crates.io/foo#bar@1.2",
+ PackageIdSpec {
+ name: String::from("bar"),
+ version: Some("1.2".parse().unwrap()),
+ url: Some(Url::parse("https://crates.io/foo").unwrap()),
+ kind: Some(SourceKind::Registry),
+ },
+ "registry+https://crates.io/foo#bar@1.2",
+ );
+ ok(
+ "sparse+https://crates.io/foo#bar@1.2",
+ PackageIdSpec {
+ name: String::from("bar"),
+ version: Some("1.2".parse().unwrap()),
+ url: Some(Url::parse("sparse+https://crates.io/foo").unwrap()),
+ kind: Some(SourceKind::SparseRegistry),
+ },
+ "sparse+https://crates.io/foo#bar@1.2",
+ );
+ ok(
+ "foo",
+ PackageIdSpec {
+ name: String::from("foo"),
+ version: None,
+ url: None,
+ kind: None,
+ },
+ "foo",
+ );
+ ok(
+ "foo:1.2.3",
+ PackageIdSpec {
+ name: String::from("foo"),
+ version: Some("1.2.3".parse().unwrap()),
+ url: None,
+ kind: None,
+ },
+ "foo@1.2.3",
+ );
+ ok(
+ "foo@1.2.3",
+ PackageIdSpec {
+ name: String::from("foo"),
+ version: Some("1.2.3".parse().unwrap()),
+ url: None,
+ kind: None,
+ },
+ "foo@1.2.3",
+ );
+ ok(
+ "foo@1.2",
+ PackageIdSpec {
+ name: String::from("foo"),
+ version: Some("1.2".parse().unwrap()),
+ url: None,
+ kind: None,
+ },
+ "foo@1.2",
+ );
+
+ // pkgid-spec.md
+ ok(
+ "regex",
+ PackageIdSpec {
+ name: String::from("regex"),
+ version: None,
+ url: None,
+ kind: None,
+ },
+ "regex",
+ );
+ ok(
+ "regex@1.4",
+ PackageIdSpec {
+ name: String::from("regex"),
+ version: Some("1.4".parse().unwrap()),
+ url: None,
+ kind: None,
+ },
+ "regex@1.4",
+ );
+ ok(
+ "regex@1.4.3",
+ PackageIdSpec {
+ name: String::from("regex"),
+ version: Some("1.4.3".parse().unwrap()),
+ url: None,
+ kind: None,
+ },
+ "regex@1.4.3",
+ );
+ ok(
+ "https://github.com/rust-lang/crates.io-index#regex",
+ PackageIdSpec {
+ name: String::from("regex"),
+ version: None,
+ url: Some(Url::parse("https://github.com/rust-lang/crates.io-index").unwrap()),
+ kind: None,
+ },
+ "https://github.com/rust-lang/crates.io-index#regex",
+ );
+ ok(
+ "https://github.com/rust-lang/crates.io-index#regex@1.4.3",
+ PackageIdSpec {
+ name: String::from("regex"),
+ version: Some("1.4.3".parse().unwrap()),
+ url: Some(Url::parse("https://github.com/rust-lang/crates.io-index").unwrap()),
+ kind: None,
+ },
+ "https://github.com/rust-lang/crates.io-index#regex@1.4.3",
+ );
+ ok(
+ "sparse+https://github.com/rust-lang/crates.io-index#regex@1.4.3",
+ PackageIdSpec {
+ name: String::from("regex"),
+ version: Some("1.4.3".parse().unwrap()),
+ url: Some(
+ Url::parse("sparse+https://github.com/rust-lang/crates.io-index").unwrap(),
+ ),
+ kind: Some(SourceKind::SparseRegistry),
+ },
+ "sparse+https://github.com/rust-lang/crates.io-index#regex@1.4.3",
+ );
+ ok(
+ "https://github.com/rust-lang/cargo#0.52.0",
+ PackageIdSpec {
+ name: String::from("cargo"),
+ version: Some("0.52.0".parse().unwrap()),
+ url: Some(Url::parse("https://github.com/rust-lang/cargo").unwrap()),
+ kind: None,
+ },
+ "https://github.com/rust-lang/cargo#0.52.0",
+ );
+ ok(
+ "https://github.com/rust-lang/cargo#cargo-platform@0.1.2",
+ PackageIdSpec {
+ name: String::from("cargo-platform"),
+ version: Some("0.1.2".parse().unwrap()),
+ url: Some(Url::parse("https://github.com/rust-lang/cargo").unwrap()),
+ kind: None,
+ },
+ "https://github.com/rust-lang/cargo#cargo-platform@0.1.2",
+ );
+ ok(
+ "ssh://git@github.com/rust-lang/regex.git#regex@1.4.3",
+ PackageIdSpec {
+ name: String::from("regex"),
+ version: Some("1.4.3".parse().unwrap()),
+ url: Some(Url::parse("ssh://git@github.com/rust-lang/regex.git").unwrap()),
+ kind: None,
+ },
+ "ssh://git@github.com/rust-lang/regex.git#regex@1.4.3",
+ );
+ ok(
+ "git+ssh://git@github.com/rust-lang/regex.git#regex@1.4.3",
+ PackageIdSpec {
+ name: String::from("regex"),
+ version: Some("1.4.3".parse().unwrap()),
+ url: Some(Url::parse("ssh://git@github.com/rust-lang/regex.git").unwrap()),
+ kind: Some(SourceKind::Git(GitReference::DefaultBranch)),
+ },
+ "git+ssh://git@github.com/rust-lang/regex.git#regex@1.4.3",
+ );
+ ok(
+ "git+ssh://git@github.com/rust-lang/regex.git?branch=dev#regex@1.4.3",
+ PackageIdSpec {
+ name: String::from("regex"),
+ version: Some("1.4.3".parse().unwrap()),
+ url: Some(Url::parse("ssh://git@github.com/rust-lang/regex.git").unwrap()),
+ kind: Some(SourceKind::Git(GitReference::Branch("dev".to_owned()))),
+ },
+ "git+ssh://git@github.com/rust-lang/regex.git?branch=dev#regex@1.4.3",
+ );
+ ok(
+ "file:///path/to/my/project/foo",
+ PackageIdSpec {
+ name: String::from("foo"),
+ version: None,
+ url: Some(Url::parse("file:///path/to/my/project/foo").unwrap()),
+ kind: None,
+ },
+ "file:///path/to/my/project/foo",
+ );
+ ok(
+ "file:///path/to/my/project/foo#1.1.8",
+ PackageIdSpec {
+ name: String::from("foo"),
+ version: Some("1.1.8".parse().unwrap()),
+ url: Some(Url::parse("file:///path/to/my/project/foo").unwrap()),
+ kind: None,
+ },
+ "file:///path/to/my/project/foo#1.1.8",
+ );
+ ok(
+ "path+file:///path/to/my/project/foo#1.1.8",
+ PackageIdSpec {
+ name: String::from("foo"),
+ version: Some("1.1.8".parse().unwrap()),
+ url: Some(Url::parse("file:///path/to/my/project/foo").unwrap()),
+ kind: Some(SourceKind::Path),
+ },
+ "path+file:///path/to/my/project/foo#1.1.8",
+ );
+ }
+
+ #[test]
+ fn bad_parsing() {
+ assert!(PackageIdSpec::parse("baz:").is_err());
+ assert!(PackageIdSpec::parse("baz:*").is_err());
+ assert!(PackageIdSpec::parse("baz@").is_err());
+ assert!(PackageIdSpec::parse("baz@*").is_err());
+ assert!(PackageIdSpec::parse("baz@^1.0").is_err());
+ assert!(PackageIdSpec::parse("https://baz:1.0").is_err());
+ assert!(PackageIdSpec::parse("https://#baz:1.0").is_err());
+ assert!(
+ PackageIdSpec::parse("foobar+https://github.com/rust-lang/crates.io-index").is_err()
+ );
+ assert!(PackageIdSpec::parse("path+https://github.com/rust-lang/crates.io-index").is_err());
+
+ // Only `git+` can use `?`
+ assert!(PackageIdSpec::parse("file:///path/to/my/project/foo?branch=dev").is_err());
+ assert!(PackageIdSpec::parse("path+file:///path/to/my/project/foo?branch=dev").is_err());
+ assert!(PackageIdSpec::parse(
+ "registry+https://github.com/rust-lang/cargo#0.52.0?branch=dev"
+ )
+ .is_err());
+ assert!(PackageIdSpec::parse(
+ "sparse+https://github.com/rust-lang/cargo#0.52.0?branch=dev"
+ )
+ .is_err());
+ assert!(PackageIdSpec::parse("@1.2.3").is_err());
+ assert!(PackageIdSpec::parse("registry+https://github.com").is_err());
+ assert!(PackageIdSpec::parse("https://crates.io/1foo#1.2.3").is_err())
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util_schemas/core/source_kind.rs b/src/tools/cargo/src/cargo/util_schemas/core/source_kind.rs
new file mode 100644
index 000000000..7b2ecaeec
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util_schemas/core/source_kind.rs
@@ -0,0 +1,201 @@
+use std::cmp::Ordering;
+
+/// The possible kinds of code source.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum SourceKind {
+ /// A git repository.
+ Git(GitReference),
+ /// A local path.
+ Path,
+ /// A remote registry.
+ Registry,
+ /// A sparse registry.
+ SparseRegistry,
+ /// A local filesystem-based registry.
+ LocalRegistry,
+ /// A directory-based registry.
+ Directory,
+}
+
+impl SourceKind {
+ pub fn protocol(&self) -> Option<&str> {
+ match self {
+ SourceKind::Path => Some("path"),
+ SourceKind::Git(_) => Some("git"),
+ SourceKind::Registry => Some("registry"),
+ // Sparse registry URL already includes the `sparse+` prefix, see `SourceId::new`
+ SourceKind::SparseRegistry => None,
+ SourceKind::LocalRegistry => Some("local-registry"),
+ SourceKind::Directory => Some("directory"),
+ }
+ }
+}
+
+/// Note that this is specifically not derived on `SourceKind` although the
+/// implementation here is very similar to what it might look like if it were
+/// otherwise derived.
+///
+/// The reason for this is somewhat obtuse. First of all the hash value of
+/// `SourceKind` makes its way into `~/.cargo/registry/index/github.com-XXXX`
+/// which means that changes to the hash means that all Rust users need to
+/// redownload the crates.io index and all their crates. If possible we strive
+/// to not change this to make this redownloading behavior happen as little as
+/// possible. How is this connected to `Ord` you might ask? That's a good
+/// question!
+///
+/// Since the beginning of time `SourceKind` has had `#[derive(Hash)]`. It for
+/// the longest time *also* derived the `Ord` and `PartialOrd` traits. In #8522,
+/// however, the implementation of `Ord` changed. This handwritten implementation
+/// forgot to sync itself with the originally derived implementation, namely
+/// placing git dependencies as sorted after all other dependencies instead of
+/// first as before.
+///
+/// This regression in #8522 (Rust 1.47) went unnoticed. When we switched back
+/// to a derived implementation in #9133 (Rust 1.52 beta) we only then ironically
+/// saw an issue (#9334). In #9334 it was observed that stable Rust at the time
+/// (1.51) was sorting git dependencies last, whereas Rust 1.52 beta would sort
+/// git dependencies first. This is because the `PartialOrd` implementation in
+/// 1.51 used #8522, the buggy implementation, which put git deps last. In 1.52
+/// it was (unknowingly) restored to the pre-1.47 behavior with git dependencies
+/// first.
+///
+/// Because the breakage was only witnessed after the original breakage, this
+/// trait implementation is preserving the "broken" behavior. Put a different way:
+///
+/// * Rust pre-1.47 sorted git deps first.
+/// * Rust 1.47 to Rust 1.51 sorted git deps last, a breaking change (#8522) that
+/// was never noticed.
+/// * Rust 1.52 restored the pre-1.47 behavior (#9133, without knowing it did
+/// so), and breakage was witnessed by actual users due to difference with
+/// 1.51.
+/// * Rust 1.52 (the source as it lives now) was fixed to match the 1.47-1.51
+/// behavior (#9383), which is now considered intentionally breaking from the
+/// pre-1.47 behavior.
+///
+/// Note that this was all discovered when Rust 1.53 was in nightly and 1.52 was
+/// in beta. #9133 was in both beta and nightly at the time of discovery. For
+/// 1.52 #9383 reverted #9133, meaning 1.52 is the same as 1.51. On nightly
+/// (1.53) #9397 was created to fix the regression introduced by #9133 relative
+/// to the current stable (1.51).
+///
+/// That's all a long winded way of saying "it's weird that git deps hash first
+/// and are sorted last, but it's the way it is right now". The author of this
+/// comment chose to handwrite the `Ord` implementation instead of the `Hash`
+/// implementation, but it's only required that at most one of them is
+/// hand-written because the other can be derived. Perhaps one day in
+/// the future someone can figure out how to remove this behavior.
+impl Ord for SourceKind {
+ fn cmp(&self, other: &SourceKind) -> Ordering {
+ match (self, other) {
+ (SourceKind::Path, SourceKind::Path) => Ordering::Equal,
+ (SourceKind::Path, _) => Ordering::Less,
+ (_, SourceKind::Path) => Ordering::Greater,
+
+ (SourceKind::Registry, SourceKind::Registry) => Ordering::Equal,
+ (SourceKind::Registry, _) => Ordering::Less,
+ (_, SourceKind::Registry) => Ordering::Greater,
+
+ (SourceKind::SparseRegistry, SourceKind::SparseRegistry) => Ordering::Equal,
+ (SourceKind::SparseRegistry, _) => Ordering::Less,
+ (_, SourceKind::SparseRegistry) => Ordering::Greater,
+
+ (SourceKind::LocalRegistry, SourceKind::LocalRegistry) => Ordering::Equal,
+ (SourceKind::LocalRegistry, _) => Ordering::Less,
+ (_, SourceKind::LocalRegistry) => Ordering::Greater,
+
+ (SourceKind::Directory, SourceKind::Directory) => Ordering::Equal,
+ (SourceKind::Directory, _) => Ordering::Less,
+ (_, SourceKind::Directory) => Ordering::Greater,
+
+ (SourceKind::Git(a), SourceKind::Git(b)) => a.cmp(b),
+ }
+ }
+}
+
+/// Forwards to `Ord`
+impl PartialOrd for SourceKind {
+ fn partial_cmp(&self, other: &SourceKind) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+/// Information to find a specific commit in a Git repository.
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum GitReference {
+ /// From a tag.
+ Tag(String),
+ /// From a branch.
+ Branch(String),
+ /// From a specific revision. Can be a commit hash (either short or full),
+ /// or a named reference like `refs/pull/493/head`.
+ Rev(String),
+ /// The default branch of the repository, the reference named `HEAD`.
+ DefaultBranch,
+}
+
+impl GitReference {
+ pub fn from_query(
+ query_pairs: impl Iterator<Item = (impl AsRef<str>, impl AsRef<str>)>,
+ ) -> Self {
+ let mut reference = GitReference::DefaultBranch;
+ for (k, v) in query_pairs {
+ let v = v.as_ref();
+ match k.as_ref() {
+ // Map older 'ref' to branch.
+ "branch" | "ref" => reference = GitReference::Branch(v.to_owned()),
+
+ "rev" => reference = GitReference::Rev(v.to_owned()),
+ "tag" => reference = GitReference::Tag(v.to_owned()),
+ _ => {}
+ }
+ }
+ reference
+ }
+
+ /// Returns a `Display`able view of this git reference, or None if using
+ /// the head of the default branch
+ pub fn pretty_ref(&self, url_encoded: bool) -> Option<PrettyRef<'_>> {
+ match self {
+ GitReference::DefaultBranch => None,
+ _ => Some(PrettyRef {
+ inner: self,
+ url_encoded,
+ }),
+ }
+ }
+}
+
+/// A git reference that can be `Display`ed
+pub struct PrettyRef<'a> {
+ inner: &'a GitReference,
+ url_encoded: bool,
+}
+
+impl<'a> std::fmt::Display for PrettyRef<'a> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let value: &str;
+ match self.inner {
+ GitReference::Branch(s) => {
+ write!(f, "branch=")?;
+ value = s;
+ }
+ GitReference::Tag(s) => {
+ write!(f, "tag=")?;
+ value = s;
+ }
+ GitReference::Rev(s) => {
+ write!(f, "rev=")?;
+ value = s;
+ }
+ GitReference::DefaultBranch => unreachable!(),
+ }
+ if self.url_encoded {
+ for value in url::form_urlencoded::byte_serialize(value.as_bytes()) {
+ write!(f, "{value}")?;
+ }
+ } else {
+ write!(f, "{value}")?;
+ }
+ Ok(())
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/toml/schema.rs b/src/tools/cargo/src/cargo/util_schemas/manifest.rs
index 6ea93e021..390658b0e 100644
--- a/src/tools/cargo/src/cargo/util/toml/schema.rs
+++ b/src/tools/cargo/src/cargo/util_schemas/manifest.rs
@@ -1,15 +1,24 @@
+//! `Cargo.toml` / Manifest schema definition
+//!
+//! ## Style
+//!
+//! - Fields duplicated for an alias will have an accessor with the primary field's name
+//! - Keys that exist for bookkeeping but don't correspond to the schema have a `_` prefix
+
use std::collections::BTreeMap;
use std::fmt::{self, Display, Write};
use std::path::PathBuf;
use std::str;
+use anyhow::Result;
use serde::de::{self, IntoDeserializer as _, Unexpected};
use serde::ser;
use serde::{Deserialize, Serialize};
use serde_untagged::UntaggedEnumVisitor;
-use crate::core::PackageIdSpec;
-use crate::util::RustVersion;
+use crate::util_schemas::core::PackageIdSpec;
+use crate::util_schemas::restricted_names;
+use crate::util_semver::PartialVersion;
/// This type is used to deserialize `Cargo.toml` files.
#[derive(Debug, Deserialize, Serialize)]
@@ -24,20 +33,21 @@ pub struct TomlManifest {
pub example: Option<Vec<TomlExampleTarget>>,
pub test: Option<Vec<TomlTestTarget>>,
pub bench: Option<Vec<TomlTestTarget>>,
- pub dependencies: Option<BTreeMap<String, MaybeWorkspaceDependency>>,
- pub dev_dependencies: Option<BTreeMap<String, MaybeWorkspaceDependency>>,
+ pub dependencies: Option<BTreeMap<PackageName, InheritableDependency>>,
+ pub dev_dependencies: Option<BTreeMap<PackageName, InheritableDependency>>,
#[serde(rename = "dev_dependencies")]
- pub dev_dependencies2: Option<BTreeMap<String, MaybeWorkspaceDependency>>,
- pub build_dependencies: Option<BTreeMap<String, MaybeWorkspaceDependency>>,
+ pub dev_dependencies2: Option<BTreeMap<PackageName, InheritableDependency>>,
+ pub build_dependencies: Option<BTreeMap<PackageName, InheritableDependency>>,
#[serde(rename = "build_dependencies")]
- pub build_dependencies2: Option<BTreeMap<String, MaybeWorkspaceDependency>>,
- pub features: Option<BTreeMap<String, Vec<String>>>,
+ pub build_dependencies2: Option<BTreeMap<PackageName, InheritableDependency>>,
+ pub features: Option<BTreeMap<FeatureName, Vec<String>>>,
pub target: Option<BTreeMap<String, TomlPlatform>>,
pub replace: Option<BTreeMap<String, TomlDependency>>,
- pub patch: Option<BTreeMap<String, BTreeMap<String, TomlDependency>>>,
+ pub patch: Option<BTreeMap<String, BTreeMap<PackageName, TomlDependency>>>,
pub workspace: Option<TomlWorkspace>,
- pub badges: Option<MaybeWorkspaceBtreeMap>,
- pub lints: Option<MaybeWorkspaceLints>,
+ pub badges: Option<InheritableBtreeMap>,
+ pub lints: Option<InheritableLints>,
+ // when adding new fields, be sure to check whether `to_virtual_manifest` should disallow them
}
impl TomlManifest {
@@ -45,19 +55,23 @@ impl TomlManifest {
self.profile.is_some()
}
- pub fn dev_dependencies(&self) -> Option<&BTreeMap<String, MaybeWorkspaceDependency>> {
+ pub fn package(&self) -> Option<&Box<TomlPackage>> {
+ self.package.as_ref().or(self.project.as_ref())
+ }
+
+ pub fn dev_dependencies(&self) -> Option<&BTreeMap<PackageName, InheritableDependency>> {
self.dev_dependencies
.as_ref()
.or(self.dev_dependencies2.as_ref())
}
- pub fn build_dependencies(&self) -> Option<&BTreeMap<String, MaybeWorkspaceDependency>> {
+ pub fn build_dependencies(&self) -> Option<&BTreeMap<PackageName, InheritableDependency>> {
self.build_dependencies
.as_ref()
.or(self.build_dependencies2.as_ref())
}
- pub fn features(&self) -> Option<&BTreeMap<String, Vec<String>>> {
+ pub fn features(&self) -> Option<&BTreeMap<FeatureName, Vec<String>>> {
self.features.as_ref()
}
}
@@ -72,22 +86,15 @@ pub struct TomlWorkspace {
pub metadata: Option<toml::Value>,
// Properties that can be inherited by members.
- pub package: Option<InheritableFields>,
- pub dependencies: Option<BTreeMap<String, TomlDependency>>,
+ pub package: Option<InheritablePackage>,
+ pub dependencies: Option<BTreeMap<PackageName, TomlDependency>>,
pub lints: Option<TomlLints>,
}
/// A group of fields that are inheritable by members of the workspace
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
-pub struct InheritableFields {
- // We use skip here since it will never be present when deserializing
- // and we don't want it present when serializing
- #[serde(skip)]
- pub dependencies: Option<BTreeMap<String, TomlDependency>>,
- #[serde(skip)]
- pub lints: Option<TomlLints>,
-
+pub struct InheritablePackage {
pub version: Option<semver::Version>,
pub authors: Option<Vec<String>>,
pub description: Option<String>,
@@ -105,10 +112,6 @@ pub struct InheritableFields {
pub exclude: Option<Vec<String>>,
pub include: Option<Vec<String>>,
pub rust_version: Option<RustVersion>,
- // We use skip here since it will never be present when deserializing
- // and we don't want it present when serializing
- #[serde(skip)]
- pub ws_root: PathBuf,
}
/// Represents the `package`/`project` sections of a `Cargo.toml`.
@@ -120,19 +123,19 @@ pub struct InheritableFields {
#[derive(Deserialize, Serialize, Clone, Debug)]
#[serde(rename_all = "kebab-case")]
pub struct TomlPackage {
- pub edition: Option<MaybeWorkspaceString>,
- pub rust_version: Option<MaybeWorkspaceRustVersion>,
- pub name: String,
- pub version: Option<MaybeWorkspaceSemverVersion>,
- pub authors: Option<MaybeWorkspaceVecString>,
+ pub edition: Option<InheritableString>,
+ pub rust_version: Option<InheritableRustVersion>,
+ pub name: PackageName,
+ pub version: Option<InheritableSemverVersion>,
+ pub authors: Option<InheritableVecString>,
pub build: Option<StringOrBool>,
pub metabuild: Option<StringOrVec>,
pub default_target: Option<String>,
pub forced_target: Option<String>,
pub links: Option<String>,
- pub exclude: Option<MaybeWorkspaceVecString>,
- pub include: Option<MaybeWorkspaceVecString>,
- pub publish: Option<MaybeWorkspaceVecStringOrBool>,
+ pub exclude: Option<InheritableVecString>,
+ pub include: Option<InheritableVecString>,
+ pub publish: Option<InheritableVecStringOrBool>,
pub workspace: Option<String>,
pub im_a_teapot: Option<bool>,
pub autobins: Option<bool>,
@@ -142,15 +145,15 @@ pub struct TomlPackage {
pub default_run: Option<String>,
// Package metadata.
- pub description: Option<MaybeWorkspaceString>,
- pub homepage: Option<MaybeWorkspaceString>,
- pub documentation: Option<MaybeWorkspaceString>,
- pub readme: Option<MaybeWorkspaceStringOrBool>,
- pub keywords: Option<MaybeWorkspaceVecString>,
- pub categories: Option<MaybeWorkspaceVecString>,
- pub license: Option<MaybeWorkspaceString>,
- pub license_file: Option<MaybeWorkspaceString>,
- pub repository: Option<MaybeWorkspaceString>,
+ pub description: Option<InheritableString>,
+ pub homepage: Option<InheritableString>,
+ pub documentation: Option<InheritableString>,
+ pub readme: Option<InheritableStringOrBool>,
+ pub keywords: Option<InheritableVecString>,
+ pub categories: Option<InheritableVecString>,
+ pub license: Option<InheritableString>,
+ pub license_file: Option<InheritableString>,
+ pub repository: Option<InheritableString>,
pub resolver: Option<String>,
pub metadata: Option<toml::Value>,
@@ -163,16 +166,25 @@ pub struct TomlPackage {
/// An enum that allows for inheriting keys from a workspace in a Cargo.toml.
#[derive(Serialize, Copy, Clone, Debug)]
#[serde(untagged)]
-pub enum MaybeWorkspace<T, W> {
- /// The "defined" type, or the type that that is used when not inheriting from a workspace.
- Defined(T),
+pub enum InheritableField<T> {
+ /// The type that that is used when not inheriting from a workspace.
+ Value(T),
/// The type when inheriting from a workspace.
- Workspace(W),
+ Inherit(TomlInheritedField),
+}
+
+impl<T> InheritableField<T> {
+ pub fn as_value(&self) -> Option<&T> {
+ match self {
+ InheritableField::Inherit(_) => None,
+ InheritableField::Value(defined) => Some(defined),
+ }
+ }
}
//. This already has a `Deserialize` impl from version_trim_whitespace
-pub type MaybeWorkspaceSemverVersion = MaybeWorkspace<semver::Version, TomlWorkspaceField>;
-impl<'de> de::Deserialize<'de> for MaybeWorkspaceSemverVersion {
+pub type InheritableSemverVersion = InheritableField<semver::Version>;
+impl<'de> de::Deserialize<'de> for InheritableSemverVersion {
fn deserialize<D>(d: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
@@ -181,17 +193,17 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceSemverVersion {
.expecting("SemVer version")
.string(
|value| match value.trim().parse().map_err(de::Error::custom) {
- Ok(parsed) => Ok(MaybeWorkspace::Defined(parsed)),
+ Ok(parsed) => Ok(InheritableField::Value(parsed)),
Err(e) => Err(e),
},
)
- .map(|value| value.deserialize().map(MaybeWorkspace::Workspace))
+ .map(|value| value.deserialize().map(InheritableField::Inherit))
.deserialize(d)
}
}
-pub type MaybeWorkspaceString = MaybeWorkspace<String, TomlWorkspaceField>;
-impl<'de> de::Deserialize<'de> for MaybeWorkspaceString {
+pub type InheritableString = InheritableField<String>;
+impl<'de> de::Deserialize<'de> for InheritableString {
fn deserialize<D>(d: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
@@ -199,7 +211,7 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceString {
struct Visitor;
impl<'de> de::Visitor<'de> for Visitor {
- type Value = MaybeWorkspaceString;
+ type Value = InheritableString;
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
f.write_str("a string or workspace")
@@ -209,7 +221,7 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceString {
where
E: de::Error,
{
- Ok(MaybeWorkspaceString::Defined(value))
+ Ok(InheritableString::Value(value))
}
fn visit_map<V>(self, map: V) -> Result<Self::Value, V::Error>
@@ -217,7 +229,7 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceString {
V: de::MapAccess<'de>,
{
let mvd = de::value::MapAccessDeserializer::new(map);
- TomlWorkspaceField::deserialize(mvd).map(MaybeWorkspace::Workspace)
+ TomlInheritedField::deserialize(mvd).map(InheritableField::Inherit)
}
}
@@ -225,8 +237,8 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceString {
}
}
-pub type MaybeWorkspaceRustVersion = MaybeWorkspace<RustVersion, TomlWorkspaceField>;
-impl<'de> de::Deserialize<'de> for MaybeWorkspaceRustVersion {
+pub type InheritableRustVersion = InheritableField<RustVersion>;
+impl<'de> de::Deserialize<'de> for InheritableRustVersion {
fn deserialize<D>(d: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
@@ -234,7 +246,7 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceRustVersion {
struct Visitor;
impl<'de> de::Visitor<'de> for Visitor {
- type Value = MaybeWorkspaceRustVersion;
+ type Value = InheritableRustVersion;
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
f.write_str("a semver or workspace")
@@ -245,7 +257,7 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceRustVersion {
E: de::Error,
{
let value = value.parse::<RustVersion>().map_err(|e| E::custom(e))?;
- Ok(MaybeWorkspaceRustVersion::Defined(value))
+ Ok(InheritableRustVersion::Value(value))
}
fn visit_map<V>(self, map: V) -> Result<Self::Value, V::Error>
@@ -253,7 +265,7 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceRustVersion {
V: de::MapAccess<'de>,
{
let mvd = de::value::MapAccessDeserializer::new(map);
- TomlWorkspaceField::deserialize(mvd).map(MaybeWorkspace::Workspace)
+ TomlInheritedField::deserialize(mvd).map(InheritableField::Inherit)
}
}
@@ -261,8 +273,8 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceRustVersion {
}
}
-pub type MaybeWorkspaceVecString = MaybeWorkspace<Vec<String>, TomlWorkspaceField>;
-impl<'de> de::Deserialize<'de> for MaybeWorkspaceVecString {
+pub type InheritableVecString = InheritableField<Vec<String>>;
+impl<'de> de::Deserialize<'de> for InheritableVecString {
fn deserialize<D>(d: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
@@ -270,7 +282,7 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceVecString {
struct Visitor;
impl<'de> de::Visitor<'de> for Visitor {
- type Value = MaybeWorkspaceVecString;
+ type Value = InheritableVecString;
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
f.write_str("a vector of strings or workspace")
@@ -280,7 +292,7 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceVecString {
A: de::SeqAccess<'de>,
{
let seq = de::value::SeqAccessDeserializer::new(v);
- Vec::deserialize(seq).map(MaybeWorkspace::Defined)
+ Vec::deserialize(seq).map(InheritableField::Value)
}
fn visit_map<V>(self, map: V) -> Result<Self::Value, V::Error>
@@ -288,7 +300,7 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceVecString {
V: de::MapAccess<'de>,
{
let mvd = de::value::MapAccessDeserializer::new(map);
- TomlWorkspaceField::deserialize(mvd).map(MaybeWorkspace::Workspace)
+ TomlInheritedField::deserialize(mvd).map(InheritableField::Inherit)
}
}
@@ -296,8 +308,8 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceVecString {
}
}
-pub type MaybeWorkspaceStringOrBool = MaybeWorkspace<StringOrBool, TomlWorkspaceField>;
-impl<'de> de::Deserialize<'de> for MaybeWorkspaceStringOrBool {
+pub type InheritableStringOrBool = InheritableField<StringOrBool>;
+impl<'de> de::Deserialize<'de> for InheritableStringOrBool {
fn deserialize<D>(d: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
@@ -305,7 +317,7 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceStringOrBool {
struct Visitor;
impl<'de> de::Visitor<'de> for Visitor {
- type Value = MaybeWorkspaceStringOrBool;
+ type Value = InheritableStringOrBool;
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
f.write_str("a string, a bool, or workspace")
@@ -316,7 +328,7 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceStringOrBool {
E: de::Error,
{
let b = de::value::BoolDeserializer::new(v);
- StringOrBool::deserialize(b).map(MaybeWorkspace::Defined)
+ StringOrBool::deserialize(b).map(InheritableField::Value)
}
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
@@ -324,7 +336,7 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceStringOrBool {
E: de::Error,
{
let string = de::value::StringDeserializer::new(v);
- StringOrBool::deserialize(string).map(MaybeWorkspace::Defined)
+ StringOrBool::deserialize(string).map(InheritableField::Value)
}
fn visit_map<V>(self, map: V) -> Result<Self::Value, V::Error>
@@ -332,7 +344,7 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceStringOrBool {
V: de::MapAccess<'de>,
{
let mvd = de::value::MapAccessDeserializer::new(map);
- TomlWorkspaceField::deserialize(mvd).map(MaybeWorkspace::Workspace)
+ TomlInheritedField::deserialize(mvd).map(InheritableField::Inherit)
}
}
@@ -340,8 +352,8 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceStringOrBool {
}
}
-pub type MaybeWorkspaceVecStringOrBool = MaybeWorkspace<VecStringOrBool, TomlWorkspaceField>;
-impl<'de> de::Deserialize<'de> for MaybeWorkspaceVecStringOrBool {
+pub type InheritableVecStringOrBool = InheritableField<VecStringOrBool>;
+impl<'de> de::Deserialize<'de> for InheritableVecStringOrBool {
fn deserialize<D>(d: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
@@ -349,7 +361,7 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceVecStringOrBool {
struct Visitor;
impl<'de> de::Visitor<'de> for Visitor {
- type Value = MaybeWorkspaceVecStringOrBool;
+ type Value = InheritableVecStringOrBool;
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
f.write_str("a boolean, a vector of strings, or workspace")
@@ -360,7 +372,7 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceVecStringOrBool {
E: de::Error,
{
let b = de::value::BoolDeserializer::new(v);
- VecStringOrBool::deserialize(b).map(MaybeWorkspace::Defined)
+ VecStringOrBool::deserialize(b).map(InheritableField::Value)
}
fn visit_seq<A>(self, v: A) -> Result<Self::Value, A::Error>
@@ -368,7 +380,7 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceVecStringOrBool {
A: de::SeqAccess<'de>,
{
let seq = de::value::SeqAccessDeserializer::new(v);
- VecStringOrBool::deserialize(seq).map(MaybeWorkspace::Defined)
+ VecStringOrBool::deserialize(seq).map(InheritableField::Value)
}
fn visit_map<V>(self, map: V) -> Result<Self::Value, V::Error>
@@ -376,7 +388,7 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceVecStringOrBool {
V: de::MapAccess<'de>,
{
let mvd = de::value::MapAccessDeserializer::new(map);
- TomlWorkspaceField::deserialize(mvd).map(MaybeWorkspace::Workspace)
+ TomlInheritedField::deserialize(mvd).map(InheritableField::Inherit)
}
}
@@ -384,35 +396,46 @@ impl<'de> de::Deserialize<'de> for MaybeWorkspaceVecStringOrBool {
}
}
-pub type MaybeWorkspaceBtreeMap =
- MaybeWorkspace<BTreeMap<String, BTreeMap<String, String>>, TomlWorkspaceField>;
+pub type InheritableBtreeMap = InheritableField<BTreeMap<String, BTreeMap<String, String>>>;
-impl<'de> de::Deserialize<'de> for MaybeWorkspaceBtreeMap {
+impl<'de> de::Deserialize<'de> for InheritableBtreeMap {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
{
let value = serde_value::Value::deserialize(deserializer)?;
- if let Ok(w) = TomlWorkspaceField::deserialize(
+ if let Ok(w) = TomlInheritedField::deserialize(
serde_value::ValueDeserializer::<D::Error>::new(value.clone()),
) {
return if w.workspace {
- Ok(MaybeWorkspace::Workspace(w))
+ Ok(InheritableField::Inherit(w))
} else {
Err(de::Error::custom("`workspace` cannot be false"))
};
}
BTreeMap::deserialize(serde_value::ValueDeserializer::<D::Error>::new(value))
- .map(MaybeWorkspace::Defined)
+ .map(InheritableField::Value)
}
}
#[derive(Deserialize, Serialize, Copy, Clone, Debug)]
#[serde(rename_all = "kebab-case")]
-pub struct TomlWorkspaceField {
+pub struct TomlInheritedField {
#[serde(deserialize_with = "bool_no_false")]
- pub workspace: bool,
+ workspace: bool,
+}
+
+impl TomlInheritedField {
+ pub fn new() -> Self {
+ TomlInheritedField { workspace: true }
+ }
+}
+
+impl Default for TomlInheritedField {
+ fn default() -> Self {
+ Self::new()
+ }
}
fn bool_no_false<'de, D: de::Deserializer<'de>>(deserializer: D) -> Result<bool, D::Error> {
@@ -424,42 +447,49 @@ fn bool_no_false<'de, D: de::Deserializer<'de>>(deserializer: D) -> Result<bool,
}
}
-pub type MaybeWorkspaceDependency = MaybeWorkspace<TomlDependency, TomlWorkspaceDependency>;
+#[derive(Serialize, Clone, Debug)]
+#[serde(untagged)]
+pub enum InheritableDependency {
+ /// The type that that is used when not inheriting from a workspace.
+ Value(TomlDependency),
+ /// The type when inheriting from a workspace.
+ Inherit(TomlInheritedDependency),
+}
-impl MaybeWorkspaceDependency {
+impl InheritableDependency {
pub fn unused_keys(&self) -> Vec<String> {
match self {
- MaybeWorkspaceDependency::Defined(d) => d.unused_keys(),
- MaybeWorkspaceDependency::Workspace(w) => w.unused_keys.keys().cloned().collect(),
+ InheritableDependency::Value(d) => d.unused_keys(),
+ InheritableDependency::Inherit(w) => w._unused_keys.keys().cloned().collect(),
}
}
}
-impl<'de> de::Deserialize<'de> for MaybeWorkspaceDependency {
+impl<'de> de::Deserialize<'de> for InheritableDependency {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
{
let value = serde_value::Value::deserialize(deserializer)?;
- if let Ok(w) = TomlWorkspaceDependency::deserialize(serde_value::ValueDeserializer::<
+ if let Ok(w) = TomlInheritedDependency::deserialize(serde_value::ValueDeserializer::<
D::Error,
>::new(value.clone()))
{
return if w.workspace {
- Ok(MaybeWorkspace::Workspace(w))
+ Ok(InheritableDependency::Inherit(w))
} else {
Err(de::Error::custom("`workspace` cannot be false"))
};
}
TomlDependency::deserialize(serde_value::ValueDeserializer::<D::Error>::new(value))
- .map(MaybeWorkspace::Defined)
+ .map(InheritableDependency::Value)
}
}
#[derive(Deserialize, Serialize, Clone, Debug)]
#[serde(rename_all = "kebab-case")]
-pub struct TomlWorkspaceDependency {
+pub struct TomlInheritedDependency {
pub workspace: bool,
pub features: Option<Vec<String>>,
pub default_features: Option<bool>,
@@ -471,10 +501,10 @@ pub struct TomlWorkspaceDependency {
/// This is here to provide a way to see the "unused manifest keys" when deserializing
#[serde(skip_serializing)]
#[serde(flatten)]
- pub unused_keys: BTreeMap<String, toml::Value>,
+ pub _unused_keys: BTreeMap<String, toml::Value>,
}
-impl TomlWorkspaceDependency {
+impl TomlInheritedDependency {
pub fn default_features(&self) -> Option<bool> {
self.default_features.or(self.default_features2)
}
@@ -489,7 +519,7 @@ pub enum TomlDependency<P: Clone = String> {
/// The simple format is equivalent to a detailed dependency
/// specifying only a version, eg.
/// `package = { version = "<version>" }`
- Detailed(DetailedTomlDependency<P>),
+ Detailed(TomlDetailedDependency<P>),
}
impl TomlDependency {
@@ -507,10 +537,17 @@ impl TomlDependency {
}
}
+ pub fn is_public(&self) -> bool {
+ match self {
+ TomlDependency::Detailed(d) => d.public.unwrap_or(false),
+ TomlDependency::Simple(..) => false,
+ }
+ }
+
pub fn unused_keys(&self) -> Vec<String> {
match self {
TomlDependency::Simple(_) => vec![],
- TomlDependency::Detailed(detailed) => detailed.unused_keys.keys().cloned().collect(),
+ TomlDependency::Detailed(detailed) => detailed._unused_keys.keys().cloned().collect(),
}
}
}
@@ -533,9 +570,9 @@ impl<'de, P: Deserialize<'de> + Clone> de::Deserialize<'de> for TomlDependency<P
#[derive(Deserialize, Serialize, Clone, Debug)]
#[serde(rename_all = "kebab-case")]
-pub struct DetailedTomlDependency<P: Clone = String> {
+pub struct TomlDetailedDependency<P: Clone = String> {
pub version: Option<String>,
- pub registry: Option<String>,
+ pub registry: Option<RegistryName>,
/// The URL of the `registry` field.
/// This is an internal implementation detail. When Cargo creates a
/// package, it replaces `registry` with `registry-index` so that the
@@ -555,7 +592,7 @@ pub struct DetailedTomlDependency<P: Clone = String> {
pub default_features: Option<bool>,
#[serde(rename = "default_features")]
pub default_features2: Option<bool>,
- pub package: Option<String>,
+ pub package: Option<PackageName>,
pub public: Option<bool>,
/// One or more of `bin`, `cdylib`, `staticlib`, `bin:<name>`.
@@ -568,17 +605,17 @@ pub struct DetailedTomlDependency<P: Clone = String> {
/// This is here to provide a way to see the "unused manifest keys" when deserializing
#[serde(skip_serializing)]
#[serde(flatten)]
- pub unused_keys: BTreeMap<String, toml::Value>,
+ pub _unused_keys: BTreeMap<String, toml::Value>,
}
-impl<P: Clone> DetailedTomlDependency<P> {
+impl<P: Clone> TomlDetailedDependency<P> {
pub fn default_features(&self) -> Option<bool> {
self.default_features.or(self.default_features2)
}
}
// Explicit implementation so we avoid pulling in P: Default
-impl<P: Clone> Default for DetailedTomlDependency<P> {
+impl<P: Clone> Default for TomlDetailedDependency<P> {
fn default() -> Self {
Self {
version: Default::default(),
@@ -598,16 +635,16 @@ impl<P: Clone> Default for DetailedTomlDependency<P> {
artifact: Default::default(),
lib: Default::default(),
target: Default::default(),
- unused_keys: Default::default(),
+ _unused_keys: Default::default(),
}
}
}
#[derive(Deserialize, Serialize, Clone, Debug, Default)]
-pub struct TomlProfiles(pub BTreeMap<String, TomlProfile>);
+pub struct TomlProfiles(pub BTreeMap<ProfileName, TomlProfile>);
impl TomlProfiles {
- pub fn get_all(&self) -> &BTreeMap<String, TomlProfile> {
+ pub fn get_all(&self) -> &BTreeMap<ProfileName, TomlProfile> {
&self.0
}
@@ -643,6 +680,98 @@ pub struct TomlProfile {
pub trim_paths: Option<TomlTrimPaths>,
}
+impl TomlProfile {
+ /// Overwrite self's values with the given profile.
+ pub fn merge(&mut self, profile: &Self) {
+ if let Some(v) = &profile.opt_level {
+ self.opt_level = Some(v.clone());
+ }
+
+ if let Some(v) = &profile.lto {
+ self.lto = Some(v.clone());
+ }
+
+ if let Some(v) = &profile.codegen_backend {
+ self.codegen_backend = Some(v.clone());
+ }
+
+ if let Some(v) = profile.codegen_units {
+ self.codegen_units = Some(v);
+ }
+
+ if let Some(v) = profile.debug {
+ self.debug = Some(v);
+ }
+
+ if let Some(v) = profile.debug_assertions {
+ self.debug_assertions = Some(v);
+ }
+
+ if let Some(v) = &profile.split_debuginfo {
+ self.split_debuginfo = Some(v.clone());
+ }
+
+ if let Some(v) = profile.rpath {
+ self.rpath = Some(v);
+ }
+
+ if let Some(v) = &profile.panic {
+ self.panic = Some(v.clone());
+ }
+
+ if let Some(v) = profile.overflow_checks {
+ self.overflow_checks = Some(v);
+ }
+
+ if let Some(v) = profile.incremental {
+ self.incremental = Some(v);
+ }
+
+ if let Some(v) = &profile.rustflags {
+ self.rustflags = Some(v.clone());
+ }
+
+ if let Some(other_package) = &profile.package {
+ match &mut self.package {
+ Some(self_package) => {
+ for (spec, other_pkg_profile) in other_package {
+ match self_package.get_mut(spec) {
+ Some(p) => p.merge(other_pkg_profile),
+ None => {
+ self_package.insert(spec.clone(), other_pkg_profile.clone());
+ }
+ }
+ }
+ }
+ None => self.package = Some(other_package.clone()),
+ }
+ }
+
+ if let Some(other_bo) = &profile.build_override {
+ match &mut self.build_override {
+ Some(self_bo) => self_bo.merge(other_bo),
+ None => self.build_override = Some(other_bo.clone()),
+ }
+ }
+
+ if let Some(v) = &profile.inherits {
+ self.inherits = Some(v.clone());
+ }
+
+ if let Some(v) = &profile.dir_name {
+ self.dir_name = Some(v.clone());
+ }
+
+ if let Some(v) = &profile.strip {
+ self.strip = Some(v.clone());
+ }
+
+ if let Some(v) = &profile.trim_paths {
+ self.trim_paths = Some(v.clone())
+ }
+ }
+}
+
#[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd, Hash)]
pub enum ProfilePackageSpec {
Spec(PackageIdSpec),
@@ -950,10 +1079,9 @@ pub struct TomlTarget {
pub doc: Option<bool>,
pub plugin: Option<bool>,
pub doc_scrape_examples: Option<bool>,
- #[serde(rename = "proc-macro")]
- pub proc_macro_raw: Option<bool>,
+ pub proc_macro: Option<bool>,
#[serde(rename = "proc_macro")]
- pub proc_macro_raw2: Option<bool>,
+ pub proc_macro2: Option<bool>,
pub harness: Option<bool>,
pub required_features: Option<Vec<String>>,
pub edition: Option<String>,
@@ -965,7 +1093,7 @@ impl TomlTarget {
}
pub fn proc_macro(&self) -> Option<bool> {
- self.proc_macro_raw.or(self.proc_macro_raw2).or_else(|| {
+ self.proc_macro.or(self.proc_macro2).or_else(|| {
if let Some(types) = self.crate_types() {
if types.contains(&"proc-macro".to_string()) {
return Some(true);
@@ -982,27 +1110,138 @@ impl TomlTarget {
}
}
+macro_rules! str_newtype {
+ ($name:ident) => {
+ /// Verified string newtype
+ #[derive(Serialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+ #[serde(transparent)]
+ pub struct $name<T: AsRef<str> = String>(T);
+
+ impl<T: AsRef<str>> $name<T> {
+ pub fn into_inner(self) -> T {
+ self.0
+ }
+ }
+
+ impl<T: AsRef<str>> AsRef<str> for $name<T> {
+ fn as_ref(&self) -> &str {
+ self.0.as_ref()
+ }
+ }
+
+ impl<T: AsRef<str>> std::ops::Deref for $name<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+ }
+
+ impl<T: AsRef<str>> std::borrow::Borrow<str> for $name<T> {
+ fn borrow(&self) -> &str {
+ self.0.as_ref()
+ }
+ }
+
+ impl<'a> std::str::FromStr for $name<String> {
+ type Err = anyhow::Error;
+
+ fn from_str(value: &str) -> Result<Self, Self::Err> {
+ Self::new(value.to_owned())
+ }
+ }
+
+ impl<'de, T: AsRef<str> + serde::Deserialize<'de>> serde::Deserialize<'de> for $name<T> {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ let inner = T::deserialize(deserializer)?;
+ Self::new(inner).map_err(serde::de::Error::custom)
+ }
+ }
+
+ impl<T: AsRef<str>> Display for $name<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.as_ref().fmt(f)
+ }
+ }
+ };
+}
+
+str_newtype!(PackageName);
+
+impl<T: AsRef<str>> PackageName<T> {
+ /// Validated package name
+ pub fn new(name: T) -> Result<Self> {
+ restricted_names::validate_package_name(name.as_ref(), "package name", "")?;
+ Ok(Self(name))
+ }
+}
+
+impl PackageName {
+ /// Coerce a value to be a validate package name
+ ///
+ /// Replaces invalid values with `placeholder`
+ pub fn sanitize(name: impl AsRef<str>, placeholder: char) -> Self {
+ PackageName(restricted_names::sanitize_package_name(
+ name.as_ref(),
+ placeholder,
+ ))
+ }
+}
+
+str_newtype!(RegistryName);
+
+impl<T: AsRef<str>> RegistryName<T> {
+ /// Validated registry name
+ pub fn new(name: T) -> Result<Self> {
+ restricted_names::validate_package_name(name.as_ref(), "registry name", "")?;
+ Ok(Self(name))
+ }
+}
+
+str_newtype!(ProfileName);
+
+impl<T: AsRef<str>> ProfileName<T> {
+ /// Validated profile name
+ pub fn new(name: T) -> Result<Self> {
+ restricted_names::validate_profile_name(name.as_ref())?;
+ Ok(Self(name))
+ }
+}
+
+str_newtype!(FeatureName);
+
+impl<T: AsRef<str>> FeatureName<T> {
+ /// Validated feature name
+ pub fn new(name: T) -> Result<Self> {
+ restricted_names::validate_feature_name(name.as_ref())?;
+ Ok(Self(name))
+ }
+}
+
/// Corresponds to a `target` entry, but `TomlTarget` is already used.
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "kebab-case")]
pub struct TomlPlatform {
- pub dependencies: Option<BTreeMap<String, MaybeWorkspaceDependency>>,
- pub build_dependencies: Option<BTreeMap<String, MaybeWorkspaceDependency>>,
+ pub dependencies: Option<BTreeMap<PackageName, InheritableDependency>>,
+ pub build_dependencies: Option<BTreeMap<PackageName, InheritableDependency>>,
#[serde(rename = "build_dependencies")]
- pub build_dependencies2: Option<BTreeMap<String, MaybeWorkspaceDependency>>,
- pub dev_dependencies: Option<BTreeMap<String, MaybeWorkspaceDependency>>,
+ pub build_dependencies2: Option<BTreeMap<PackageName, InheritableDependency>>,
+ pub dev_dependencies: Option<BTreeMap<PackageName, InheritableDependency>>,
#[serde(rename = "dev_dependencies")]
- pub dev_dependencies2: Option<BTreeMap<String, MaybeWorkspaceDependency>>,
+ pub dev_dependencies2: Option<BTreeMap<PackageName, InheritableDependency>>,
}
impl TomlPlatform {
- pub fn dev_dependencies(&self) -> Option<&BTreeMap<String, MaybeWorkspaceDependency>> {
+ pub fn dev_dependencies(&self) -> Option<&BTreeMap<PackageName, InheritableDependency>> {
self.dev_dependencies
.as_ref()
.or(self.dev_dependencies2.as_ref())
}
- pub fn build_dependencies(&self) -> Option<&BTreeMap<String, MaybeWorkspaceDependency>> {
+ pub fn build_dependencies(&self) -> Option<&BTreeMap<PackageName, InheritableDependency>> {
self.build_dependencies
.as_ref()
.or(self.build_dependencies2.as_ref())
@@ -1012,7 +1251,7 @@ impl TomlPlatform {
#[derive(Deserialize, Serialize, Debug, Clone)]
#[serde(expecting = "a lints table")]
#[serde(rename_all = "kebab-case")]
-pub struct MaybeWorkspaceLints {
+pub struct InheritableLints {
#[serde(skip_serializing_if = "is_false")]
#[serde(deserialize_with = "bool_no_false", default)]
pub workspace: bool,
@@ -1082,6 +1321,51 @@ pub enum TomlLintLevel {
Allow,
}
+#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Debug, serde::Serialize)]
+#[serde(transparent)]
+pub struct RustVersion(PartialVersion);
+
+impl std::ops::Deref for RustVersion {
+ type Target = PartialVersion;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+impl std::str::FromStr for RustVersion {
+ type Err = anyhow::Error;
+
+ fn from_str(value: &str) -> Result<Self, Self::Err> {
+ let partial = value.parse::<PartialVersion>()?;
+ if partial.pre.is_some() {
+ anyhow::bail!("unexpected prerelease field, expected a version like \"1.32\"")
+ }
+ if partial.build.is_some() {
+ anyhow::bail!("unexpected prerelease field, expected a version like \"1.32\"")
+ }
+ Ok(Self(partial))
+ }
+}
+
+impl<'de> serde::Deserialize<'de> for RustVersion {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ UntaggedEnumVisitor::new()
+ .expecting("SemVer version")
+ .string(|value| value.parse().map_err(serde::de::Error::custom))
+ .deserialize(deserializer)
+ }
+}
+
+impl Display for RustVersion {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
#[derive(Copy, Clone, Debug)]
pub struct InvalidCargoFeatures {}
diff --git a/src/tools/cargo/src/cargo/util_schemas/mod.rs b/src/tools/cargo/src/cargo/util_schemas/mod.rs
new file mode 100644
index 000000000..84b6c39a8
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util_schemas/mod.rs
@@ -0,0 +1,11 @@
+//! Low-level Cargo format schemas
+//!
+//! This is types with logic mostly focused on `serde` and `FromStr` for use in reading files and
+//! parsing command-lines.
+//! Any logic for getting final semantics from these will likely need other tools to process, like
+//! `cargo metadata`.
+
+pub mod core;
+pub mod manifest;
+
+mod restricted_names;
diff --git a/src/tools/cargo/src/cargo/util_schemas/restricted_names.rs b/src/tools/cargo/src/cargo/util_schemas/restricted_names.rs
new file mode 100644
index 000000000..2d22ce4f2
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util_schemas/restricted_names.rs
@@ -0,0 +1,218 @@
+//! Helpers for validating and checking names like package and crate names.
+
+use anyhow::bail;
+use anyhow::Result;
+
+/// Check the base requirements for a package name.
+///
+/// This can be used for other things than package names, to enforce some
+/// level of sanity. Note that package names have other restrictions
+/// elsewhere. `cargo new` has a few restrictions, such as checking for
+/// reserved names. crates.io has even more restrictions.
+pub fn validate_package_name(name: &str, what: &str, help: &str) -> Result<()> {
+ if name.is_empty() {
+ bail!("{what} cannot be empty");
+ }
+
+ let mut chars = name.chars();
+ if let Some(ch) = chars.next() {
+ if ch.is_digit(10) {
+ // A specific error for a potentially common case.
+ bail!(
+ "the name `{}` cannot be used as a {}, \
+ the name cannot start with a digit{}",
+ name,
+ what,
+ help
+ );
+ }
+ if !(unicode_xid::UnicodeXID::is_xid_start(ch) || ch == '_') {
+ bail!(
+ "invalid character `{}` in {}: `{}`, \
+ the first character must be a Unicode XID start character \
+ (most letters or `_`){}",
+ ch,
+ what,
+ name,
+ help
+ );
+ }
+ }
+ for ch in chars {
+ if !(unicode_xid::UnicodeXID::is_xid_continue(ch) || ch == '-') {
+ bail!(
+ "invalid character `{}` in {}: `{}`, \
+ characters must be Unicode XID characters \
+ (numbers, `-`, `_`, or most letters){}",
+ ch,
+ what,
+ name,
+ help
+ );
+ }
+ }
+ Ok(())
+}
+
+/// Ensure a package name is [valid][validate_package_name]
+pub fn sanitize_package_name(name: &str, placeholder: char) -> String {
+ let mut slug = String::new();
+ let mut chars = name.chars();
+ while let Some(ch) = chars.next() {
+ if (unicode_xid::UnicodeXID::is_xid_start(ch) || ch == '_') && !ch.is_digit(10) {
+ slug.push(ch);
+ break;
+ }
+ }
+ while let Some(ch) = chars.next() {
+ if unicode_xid::UnicodeXID::is_xid_continue(ch) || ch == '-' {
+ slug.push(ch);
+ } else {
+ slug.push(placeholder);
+ }
+ }
+ if slug.is_empty() {
+ slug.push_str("package");
+ }
+ slug
+}
+
+/// Validate dir-names and profile names according to RFC 2678.
+pub fn validate_profile_name(name: &str) -> Result<()> {
+ if let Some(ch) = name
+ .chars()
+ .find(|ch| !ch.is_alphanumeric() && *ch != '_' && *ch != '-')
+ {
+ bail!(
+ "invalid character `{}` in profile name `{}`\n\
+ Allowed characters are letters, numbers, underscore, and hyphen.",
+ ch,
+ name
+ );
+ }
+
+ const SEE_DOCS: &str = "See https://doc.rust-lang.org/cargo/reference/profiles.html \
+ for more on configuring profiles.";
+
+ let lower_name = name.to_lowercase();
+ if lower_name == "debug" {
+ bail!(
+ "profile name `{}` is reserved\n\
+ To configure the default development profile, use the name `dev` \
+ as in [profile.dev]\n\
+ {}",
+ name,
+ SEE_DOCS
+ );
+ }
+ if lower_name == "build-override" {
+ bail!(
+ "profile name `{}` is reserved\n\
+ To configure build dependency settings, use [profile.dev.build-override] \
+ and [profile.release.build-override]\n\
+ {}",
+ name,
+ SEE_DOCS
+ );
+ }
+
+ // These are some arbitrary reservations. We have no plans to use
+ // these, but it seems safer to reserve a few just in case we want to
+ // add more built-in profiles in the future. We can also uses special
+ // syntax like cargo:foo if needed. But it is unlikely these will ever
+ // be used.
+ if matches!(
+ lower_name.as_str(),
+ "build"
+ | "check"
+ | "clean"
+ | "config"
+ | "fetch"
+ | "fix"
+ | "install"
+ | "metadata"
+ | "package"
+ | "publish"
+ | "report"
+ | "root"
+ | "run"
+ | "rust"
+ | "rustc"
+ | "rustdoc"
+ | "target"
+ | "tmp"
+ | "uninstall"
+ ) || lower_name.starts_with("cargo")
+ {
+ bail!(
+ "profile name `{}` is reserved\n\
+ Please choose a different name.\n\
+ {}",
+ name,
+ SEE_DOCS
+ );
+ }
+
+ Ok(())
+}
+
+pub fn validate_feature_name(name: &str) -> Result<()> {
+ if name.is_empty() {
+ bail!("feature name cannot be empty");
+ }
+
+ if name.starts_with("dep:") {
+ bail!("feature named `{name}` is not allowed to start with `dep:`",);
+ }
+ if name.contains('/') {
+ bail!("feature named `{name}` is not allowed to contain slashes",);
+ }
+ let mut chars = name.chars();
+ if let Some(ch) = chars.next() {
+ if !(unicode_xid::UnicodeXID::is_xid_start(ch) || ch == '_' || ch.is_digit(10)) {
+ bail!(
+ "invalid character `{ch}` in feature `{name}`, \
+ the first character must be a Unicode XID start character or digit \
+ (most letters or `_` or `0` to `9`)",
+ );
+ }
+ }
+ for ch in chars {
+ if !(unicode_xid::UnicodeXID::is_xid_continue(ch) || ch == '-' || ch == '+' || ch == '.') {
+ bail!(
+ "invalid character `{ch}` in feature `{name}`, \
+ characters must be Unicode XID characters, '-', `+`, or `.` \
+ (numbers, `+`, `-`, `_`, `.`, or most letters)",
+ );
+ }
+ }
+ Ok(())
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn valid_feature_names() {
+ assert!(validate_feature_name("c++17").is_ok());
+ assert!(validate_feature_name("128bit").is_ok());
+ assert!(validate_feature_name("_foo").is_ok());
+ assert!(validate_feature_name("feat-name").is_ok());
+ assert!(validate_feature_name("feat_name").is_ok());
+ assert!(validate_feature_name("foo.bar").is_ok());
+
+ assert!(validate_feature_name("").is_err());
+ assert!(validate_feature_name("+foo").is_err());
+ assert!(validate_feature_name("-foo").is_err());
+ assert!(validate_feature_name(".foo").is_err());
+ assert!(validate_feature_name("dep:bar").is_err());
+ assert!(validate_feature_name("foo/bar").is_err());
+ assert!(validate_feature_name("foo:bar").is_err());
+ assert!(validate_feature_name("foo?").is_err());
+ assert!(validate_feature_name("?foo").is_err());
+ assert!(validate_feature_name("ⒶⒷⒸ").is_err());
+ assert!(validate_feature_name("a¼").is_err());
+ assert!(validate_feature_name("").is_err());
+ }
+}
diff --git a/src/tools/cargo/src/doc/contrib/src/issues.md b/src/tools/cargo/src/doc/contrib/src/issues.md
index b82492d27..c77c083b2 100644
--- a/src/tools/cargo/src/doc/contrib/src/issues.md
+++ b/src/tools/cargo/src/doc/contrib/src/issues.md
@@ -33,10 +33,9 @@ relevant are:
* [`rust-lang/crates.io`] --- Home for the [crates.io] website.
Issues with [`cargo fix`] can be tricky to know where they should be filed,
-since the fixes are driven by `rustc`, processed by [`rustfix`], and the
-front-interface is implemented in Cargo. Feel free to file in the Cargo issue
-tracker, and it will get moved to one of the other issue trackers if
-necessary.
+since the fixes are driven by `rustc`, and the front-interface is implemented
+in Cargo. Feel free to file in the Cargo issue tracker, and it will get moved
+to the [`rust-lang/rust`] issue tracker if necessary.
[Process]: process/index.md
[security policy]: https://www.rust-lang.org/security.html
@@ -51,7 +50,6 @@ necessary.
[`rustup`]: https://rust-lang.github.io/rustup/
[`rust-lang/crates.io`]: https://github.com/rust-lang/crates.io
[crates.io]: https://crates.io/
-[`rustfix`]: https://github.com/rust-lang/rustfix/
[`cargo fix`]: https://doc.rust-lang.org/cargo/commands/cargo-fix.html
## Issue labels
diff --git a/src/tools/cargo/src/doc/contrib/src/process/release.md b/src/tools/cargo/src/doc/contrib/src/process/release.md
index 169d63ed8..3c3062f32 100644
--- a/src/tools/cargo/src/doc/contrib/src/process/release.md
+++ b/src/tools/cargo/src/doc/contrib/src/process/release.md
@@ -146,10 +146,9 @@ Cargo team, or the Release team) should publish it manually using `cargo
publish`.
Some packages are not published automatically because they are not part of the
-Rust release train. These currently include all of the [`credential`] packages
-and the [`home`] package. These are published manually on an as-needed or
-as-requested basis by whoever has permissions (currently [@ehuss] or the
-Release/Infra team).
+Rust release train. This currently only includes the [`home`] package. These
+are published manually on an as-needed or as-requested basis by whoever has
+permissions (currently [@ehuss] or the Release/Infra team).
In the future, these manual publishing options should be integrated with
GitHub Actions so that any team member can trigger them. Likely that should
@@ -158,7 +157,6 @@ Secrets, and setting up GitHub Actions workflows with the appropriate
permissions which can be manually triggered to launch a release.
[`home`]: https://github.com/rust-lang/cargo/tree/master/crates/home
-[`credential`]: https://github.com/rust-lang/cargo/tree/master/credential
[`publish.py` script]: https://github.com/rust-lang/cargo/blob/master/publish.py
## Beta backports
diff --git a/src/tools/cargo/src/doc/contrib/src/team.md b/src/tools/cargo/src/doc/contrib/src/team.md
index 51b8eeddd..39b428849 100644
--- a/src/tools/cargo/src/doc/contrib/src/team.md
+++ b/src/tools/cargo/src/doc/contrib/src/team.md
@@ -61,6 +61,10 @@ Members are given privileges, such as:
The team meets on a weekly basis on a video chat.
If you are interested in participating, feel free to contact us on [Zulip].
+Minutes for all meetings are recorded on [HackMD][meeting-minutes].
+
+[meeting-minutes]: https://hackmd.io/team/rust-cargo-team?nav=overview&tags=%5B%22meetings%22%5D
+
### Becoming a member
A contributor can become a member of the Cargo Team by requesting a review or being nominated by one of the existing members.
@@ -71,6 +75,27 @@ Contributors who wish to join the team should exhibit an interest in carrying th
Members may leave at any time, preferably by letting the team know ahead of time.
+## Team resources
+
+### Zulip
+
+The Cargo team has several streams on Zulip:
+
+- [`#t-cargo`](https://rust-lang.zulipchat.com/#narrow/stream/246057-t-cargo) --- General public channel for discussing any topics related to Cargo.
+- [`#t-cargo/build-integration`](https://rust-lang.zulipchat.com/#narrow/stream/334885-t-cargo.2Fbuild-integration) --- Discussions about integration with build systems.
+- [`#t-cargo/PubGrub`](https://rust-lang.zulipchat.com/#narrow/stream/260232-t-cargo.2FPubGrub) --- Discussions about the [PubGrub](https://github.com/pubgrub-rs/pubgrub) project.
+
+The following are private streams for the Cargo team. The team should avoid using this unless necessary (for example, discussing a security issue or team and meeting organization).
+
+- [`#t-cargo/private`](https://rust-lang.zulipchat.com/#narrow/stream/296752-t-cargo.2Fprivate) --- Private channel for discussions among the team.
+- [`#t-cargo/meetings`](https://rust-lang.zulipchat.com/#narrow/stream/364532-t-cargo.2Fmeetings) --- Private channel for discussions about team meetings, including non-members who regularly attend the meetings.
+
+### HackMD
+
+The Cargo team has a shared, public workspace on HackMD at <https://hackmd.io/team/rust-cargo-team> for drafting documents and recording meeting minutes.
+
+Since this HackMD workspace is using the free service, it does not support private documents. If you need to draft a private document, create it in your personal workspace and use private channels to share the link.
+
## Decision process
The team uses a consensus-driven process for making decisions ranging from new features and major changes to management of the team itself.
diff --git a/src/tools/cargo/src/doc/man/cargo-search.md b/src/tools/cargo/src/doc/man/cargo-search.md
index f3d87cb12..fbecb79ec 100644
--- a/src/tools/cargo/src/doc/man/cargo-search.md
+++ b/src/tools/cargo/src/doc/man/cargo-search.md
@@ -2,7 +2,7 @@
## NAME
-cargo-search --- Search packages in crates.io
+cargo-search --- Search packages in the registry. Default registry is crates.io
## SYNOPSIS
@@ -49,4 +49,5 @@ Limit the number of results (default: 10, max: 100).
cargo search serde
## SEE ALSO
+
{{man "cargo" 1}}, {{man "cargo-install" 1}}, {{man "cargo-publish" 1}}
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-search.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-search.txt
index 74bbda9f7..9fd89b67e 100644
--- a/src/tools/cargo/src/doc/man/generated_txt/cargo-search.txt
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-search.txt
@@ -1,7 +1,8 @@
CARGO-SEARCH(1)
NAME
- cargo-search — Search packages in crates.io
+ cargo-search — Search packages in the registry. Default registry is
+ crates.io
SYNOPSIS
cargo search [options] [query…]
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-search.md b/src/tools/cargo/src/doc/src/commands/cargo-search.md
index 72e2accf3..1a98f2869 100644
--- a/src/tools/cargo/src/doc/src/commands/cargo-search.md
+++ b/src/tools/cargo/src/doc/src/commands/cargo-search.md
@@ -2,7 +2,7 @@
## NAME
-cargo-search --- Search packages in crates.io
+cargo-search --- Search packages in the registry. Default registry is crates.io
## SYNOPSIS
@@ -131,4 +131,5 @@ details on environment variables that Cargo reads.
cargo search serde
## SEE ALSO
+
[cargo(1)](cargo.html), [cargo-install(1)](cargo-install.html), [cargo-publish(1)](cargo-publish.html)
diff --git a/src/tools/cargo/src/doc/src/guide/continuous-integration.md b/src/tools/cargo/src/doc/src/guide/continuous-integration.md
index 0c3cec889..282ca1ab8 100644
--- a/src/tools/cargo/src/doc/src/guide/continuous-integration.md
+++ b/src/tools/cargo/src/doc/src/guide/continuous-integration.md
@@ -156,6 +156,30 @@ jobs:
For projects with higher risks of per-platform or per-Rust version failures,
more combinations may want to be tested.
+## Verifying `rust-version`
+
+When publishing packages that specify [`rust-version`](../reference/manifest.md#the-rust-version-field),
+it is important to verify the correctness of that field.
+
+Some third-party tools that can help with this include:
+- [`cargo-msrv`](https://crates.io/crates/cargo-msrv)
+- [`cargo-hack`](https://crates.io/crates/cargo-hack)
+
+An example of one way to do this, using GitHub Actions:
+```yaml
+jobs:
+ msrv:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: taiki-e/install-action@cargo-hack
+ - run: cargo hack check --rust-version --workspace --all-targets --ignore-private
+```
+This tries to balance thoroughness with turnaround time:
+- A single platform is used as most projects are platform-agnostic, trusting platform-specific dependencies to verify their behavior.
+- `cargo check` is used as most issues contributors will run into are API availability and not behavior.
+- Unpublished packages are skipped as this assumes only consumers of the verified project, through a registry, will care about `rust-version`.
+
[`cargo add`]: ../commands/cargo-add.md
[`cargo install`]: ../commands/cargo-install.md
[Dependabot]: https://docs.github.com/en/code-security/dependabot/working-with-dependabot
diff --git a/src/tools/cargo/src/doc/src/reference/environment-variables.md b/src/tools/cargo/src/doc/src/reference/environment-variables.md
index 3f0552613..40a0d32f1 100644
--- a/src/tools/cargo/src/doc/src/reference/environment-variables.md
+++ b/src/tools/cargo/src/doc/src/reference/environment-variables.md
@@ -265,6 +265,7 @@ corresponding environment variable is set to the empty string, `""`.
where integration tests or benchmarks are free to put any data needed by
the tests/benches. Cargo initially creates this directory but doesn't
manage its content in any way, this is the responsibility of the test code.
+* `CARGO_RUSTC_CURRENT_DIR` --- This is a path that `rustc` is invoked from **(nightly only)**.
[Cargo target]: cargo-targets.md
[binaries]: cargo-targets.md#binaries
@@ -352,6 +353,8 @@ let out_dir = env::var("OUT_DIR").unwrap();
* `CARGO_CFG_TARGET_POINTER_WIDTH=64` --- The CPU [pointer width].
* `CARGO_CFG_TARGET_ENDIAN=little` --- The CPU [target endianness].
* `CARGO_CFG_TARGET_FEATURE=mmx,sse` --- List of CPU [target features] enabled.
+ > Note that different [target triples][Target Triple] have different sets of `cfg` values,
+ > hence variables present in one target triple might not be available in the other.
* `OUT_DIR` --- the folder in which all output and intermediate artifacts should
be placed. This folder is inside the build directory for the
package being built, and it is unique for the package in question.
diff --git a/src/tools/cargo/src/doc/src/reference/manifest.md b/src/tools/cargo/src/doc/src/reference/manifest.md
index e3168a47f..cb6974537 100644
--- a/src/tools/cargo/src/doc/src/reference/manifest.md
+++ b/src/tools/cargo/src/doc/src/reference/manifest.md
@@ -185,6 +185,10 @@ The `rust-version` may be ignored using the `--ignore-rust-version` option.
Setting the `rust-version` key in `[package]` will affect all targets/crates in
the package, including test suites, benchmarks, binaries, examples, etc.
+To find the minimum `rust-version` compatible with your project, you can use third-party tools like [`cargo-msrv`](https://crates.io/crates/cargo-msrv).
+
+When used on packages that get published, we recommend [verifying the `rust-version`](../guide/continuous-integration.md#verifying-rust-version).
+
### The `description` field
The description is a short blurb about the package. [crates.io] will display
@@ -551,7 +555,7 @@ This is short-hand for:
unsafe_code = { level = "forbid", priority = 0 }
```
-`level` corresponds to the lint levels in `rustc`:
+`level` corresponds to the [lint levels](https://doc.rust-lang.org/rustc/lints/levels.html) in `rustc`:
- `forbid`
- `deny`
- `warn`
@@ -576,6 +580,11 @@ unsafe_code = "forbid"
enum_glob_use = "deny"
```
+Generally, these will only affect local development of the current package.
+Cargo only applies these to the current package and not to dependencies.
+As for dependents, Cargo suppresses lints from non-path dependencies with features like
+[`--cap-lints`](../../rustc/lints/levels.html#capping-lints).
+
## The `[badges]` section
The `[badges]` section is for specifying status badges that can be displayed
diff --git a/src/tools/cargo/src/doc/src/reference/pkgid-spec.md b/src/tools/cargo/src/doc/src/reference/pkgid-spec.md
index 7f20973b5..b2dfe827b 100644
--- a/src/tools/cargo/src/doc/src/reference/pkgid-spec.md
+++ b/src/tools/cargo/src/doc/src/reference/pkgid-spec.md
@@ -21,11 +21,13 @@ qualified with a version to make it unique, such as `regex@1.4.3`.
The formal grammar for a Package Id Specification is:
```notrust
-spec := pkgname
- | proto "://" hostname-and-path [ "#" ( pkgname | semver ) ]
+spec := pkgname |
+ [ kind "+" ] proto "://" hostname-and-path [ "?" query] [ "#" ( pkgname | semver ) ]
+query = ( "branch" | "tag" | "rev" ) "=" ref
pkgname := name [ ("@" | ":" ) semver ]
semver := digits [ "." digits [ "." digits [ "-" prerelease ] [ "+" build ]]]
+kind = "registry" | "git" | "file"
proto := "http" | "git" | ...
```
@@ -38,28 +40,32 @@ that come from different sources such as different registries.
The following are references to the `regex` package on `crates.io`:
-| Spec | Name | Version |
-|:------------------------------------------------------------|:-------:|:-------:|
-| `regex` | `regex` | `*` |
-| `regex@1.4` | `regex` | `1.4.*` |
-| `regex@1.4.3` | `regex` | `1.4.3` |
-| `https://github.com/rust-lang/crates.io-index#regex` | `regex` | `*` |
-| `https://github.com/rust-lang/crates.io-index#regex@1.4.3` | `regex` | `1.4.3` |
+| Spec | Name | Version |
+|:------------------------------------------------------------------|:-------:|:-------:|
+| `regex` | `regex` | `*` |
+| `regex@1.4` | `regex` | `1.4.*` |
+| `regex@1.4.3` | `regex` | `1.4.3` |
+| `https://github.com/rust-lang/crates.io-index#regex` | `regex` | `*` |
+| `https://github.com/rust-lang/crates.io-index#regex@1.4.3` | `regex` | `1.4.3` |
+| `registry+https://github.com/rust-lang/crates.io-index#regex@1.4.3` | `regex` | `1.4.3` |
The following are some examples of specs for several different git dependencies:
-| Spec | Name | Version |
-|:----------------------------------------------------------|:----------------:|:--------:|
-| `https://github.com/rust-lang/cargo#0.52.0` | `cargo` | `0.52.0` |
-| `https://github.com/rust-lang/cargo#cargo-platform@0.1.2` | <nobr>`cargo-platform`</nobr> | `0.1.2` |
-| `ssh://git@github.com/rust-lang/regex.git#regex@1.4.3` | `regex` | `1.4.3` |
+| Spec | Name | Version |
+|:-----------------------------------------------------------|:----------------:|:--------:|
+| `https://github.com/rust-lang/cargo#0.52.0` | `cargo` | `0.52.0` |
+| `https://github.com/rust-lang/cargo#cargo-platform@0.1.2` | <nobr>`cargo-platform`</nobr> | `0.1.2` |
+| `ssh://git@github.com/rust-lang/regex.git#regex@1.4.3` | `regex` | `1.4.3` |
+| `git+ssh://git@github.com/rust-lang/regex.git#regex@1.4.3` | `regex` | `1.4.3` |
+| `git+ssh://git@github.com/rust-lang/regex.git?branch=dev#regex@1.4.3` | `regex` | `1.4.3` |
Local packages on the filesystem can use `file://` URLs to reference them:
-| Spec | Name | Version |
-|:---------------------------------------|:-----:|:-------:|
-| `file:///path/to/my/project/foo` | `foo` | `*` |
-| `file:///path/to/my/project/foo#1.1.8` | `foo` | `1.1.8` |
+| Spec | Name | Version |
+|:--------------------------------------------|:-----:|:-------:|
+| `file:///path/to/my/project/foo` | `foo` | `*` |
+| `file:///path/to/my/project/foo#1.1.8` | `foo` | `1.1.8` |
+| `path+file:///path/to/my/project/foo#1.1.8` | `foo` | `1.1.8` |
### Brevity of specifications
diff --git a/src/tools/cargo/src/doc/src/reference/profiles.md b/src/tools/cargo/src/doc/src/reference/profiles.md
index 165b41d60..fac84f836 100644
--- a/src/tools/cargo/src/doc/src/reference/profiles.md
+++ b/src/tools/cargo/src/doc/src/reference/profiles.md
@@ -52,7 +52,7 @@ It is recommended to experiment with different levels to find the right
balance for your project. There may be surprising results, such as level `3`
being slower than `2`, or the `"s"` and `"z"` levels not being necessarily
smaller. You may also want to reevaluate your settings over time as newer
-versions of `rustc` changes optimization behavior.
+versions of `rustc` change optimization behavior.
See also [Profile Guided Optimization] for more advanced optimization
techniques.
diff --git a/src/tools/cargo/src/doc/src/reference/registry-authentication.md b/src/tools/cargo/src/doc/src/reference/registry-authentication.md
index f07bf7066..0508900c0 100644
--- a/src/tools/cargo/src/doc/src/reference/registry-authentication.md
+++ b/src/tools/cargo/src/doc/src/reference/registry-authentication.md
@@ -10,7 +10,7 @@ provider is used if no providers are configured.
Cargo also includes platform-specific providers that use the operating system to securely store
tokens. The `cargo:token` provider is also included which stores credentials in unencrypted plain
-text in the [credentials](config.md#credentials) file.
+text in the [credentials](config.md#credentials) file.
## Recommended configuration
It's recommended to configure a global credential provider list in `$CARGO_HOME/config.toml`
@@ -71,8 +71,12 @@ The Keychain Access app can be used to view stored tokens.
### `cargo:libsecret`
Uses [libsecret](https://wiki.gnome.org/Projects/Libsecret) to store tokens.
-On GNOME, credentials can be viewed using [GNOME Keyring](https://wiki.gnome.org/Projects/GnomeKeyring)
-applications.
+Any password manager with libsecret support can be used to view stored tokens.
+The following are a few examples (non-exhaustive):
+
+- [GNOME Keyring](https://wiki.gnome.org/Projects/GnomeKeyring)
+- [KDE Wallet Manager](https://apps.kde.org/kwalletmanager5/) (since KDE Frameworks 5.97.0)
+- [KeePassXC](https://keepassxc.org/) (since 2.5.0)
### `cargo:token-from-stdout <command> <args>`
Launch a subprocess that returns a token on stdout. Newlines will be trimmed.
diff --git a/src/tools/cargo/src/doc/src/reference/registry-web-api.md b/src/tools/cargo/src/doc/src/reference/registry-web-api.md
index 7b0f3511b..d36b4f16d 100644
--- a/src/tools/cargo/src/doc/src/reference/registry-web-api.md
+++ b/src/tools/cargo/src/doc/src/reference/registry-web-api.md
@@ -10,7 +10,7 @@ visit the registry's website to obtain a token, and Cargo can store the token
using the [`cargo login`] command, or by passing the token on the
command-line.
-Responses use the 200 response code for success.
+Responses use a 2xx response code for success.
Errors should use an appropriate response code, such as 404.
Failure
responses should have a JSON object with the following structure:
diff --git a/src/tools/cargo/src/doc/src/reference/semver.md b/src/tools/cargo/src/doc/src/reference/semver.md
index f09250f1a..ed7095ae7 100644
--- a/src/tools/cargo/src/doc/src/reference/semver.md
+++ b/src/tools/cargo/src/doc/src/reference/semver.md
@@ -319,7 +319,7 @@ fn main() {
#### Minor: `repr(C)` add enum variant {#repr-c-enum-variant-new}
-It is usually safe to add variants to a `repr(C)` enum, if the enum uses `non_exhastive`.
+It is usually safe to add variants to a `repr(C)` enum, if the enum uses `non_exhaustive`.
See [enum-variant-new](#enum-variant-new) for more discussion.
Note that this may be a breaking change since it changes the size and alignment of the type.
diff --git a/src/tools/cargo/src/doc/src/reference/specifying-dependencies.md b/src/tools/cargo/src/doc/src/reference/specifying-dependencies.md
index 2bdbbceee..c9e1e4f93 100644
--- a/src/tools/cargo/src/doc/src/reference/specifying-dependencies.md
+++ b/src/tools/cargo/src/doc/src/reference/specifying-dependencies.md
@@ -113,6 +113,7 @@ Here are some examples of comparison requirements:
= 1.2.3
```
+<span id="multiple-requirements"></span>
### Multiple version requirements
As shown in the examples above, multiple version requirements can be
diff --git a/src/tools/cargo/src/doc/src/reference/unstable.md b/src/tools/cargo/src/doc/src/reference/unstable.md
index 0683daa3c..813276cb6 100644
--- a/src/tools/cargo/src/doc/src/reference/unstable.md
+++ b/src/tools/cargo/src/doc/src/reference/unstable.md
@@ -85,6 +85,7 @@ For the latest nightly, see the [nightly version] of this page.
* [check-cfg](#check-cfg) --- Compile-time validation of `cfg` expressions.
* [host-config](#host-config) --- Allows setting `[target]`-like configuration settings for host build targets.
* [target-applies-to-host](#target-applies-to-host) --- Alters whether certain flags will be passed to host build targets.
+ * [gc](#gc) --- Global cache garbage collection.
* rustdoc
* [rustdoc-map](#rustdoc-map) --- Provides mappings for documentation to link to external sites like [docs.rs](https://docs.rs/).
* [scrape-examples](#scrape-examples) --- Shows examples within documentation.
@@ -303,6 +304,9 @@ my_dep = { version = "1.2.3", public = true }
private_dep = "2.0.0" # Will be 'private' by default
```
+Documentation updates:
+- For workspace's "The `dependencies` table" section, include `public` as an unsupported field for `workspace.dependencies`
+
## msrv-policy
- [#9930](https://github.com/rust-lang/cargo/issues/9930) (MSRV-aware resolver)
- [#10653](https://github.com/rust-lang/cargo/issues/10653) (MSRV-aware cargo-add)
@@ -1383,6 +1387,78 @@ This will not affect any hard-coded paths in the source code, such as in strings
Common paths requiring sanitization include `OUT_DIR` and `CARGO_MANIFEST_DIR`,
plus any other introduced by the build script, such as include directories.
+## gc
+
+* Tracking Issue: [#12633](https://github.com/rust-lang/cargo/issues/12633)
+
+The `-Zgc` flag enables garbage-collection within cargo's global cache within the cargo home directory.
+This includes downloaded dependencies such as compressed `.crate` files, extracted `src` directories, registry index caches, and git dependencies.
+When `-Zgc` is present, cargo will track the last time any index and dependency was used,
+and then uses those timestamps to manually or automatically delete cache entries that have not been used for a while.
+
+```sh
+cargo build -Zgc
+```
+
+### Automatic garbage collection
+
+Automatic deletion happens on commands that are already doing a significant amount of work,
+such as all of the build commands (`cargo build`, `cargo test`, `cargo check`, etc.), and `cargo fetch`.
+The deletion happens just after resolution and packages have been downloaded.
+Automatic deletion is only done once per day (see `gc.auto.frequency` to configure).
+Automatic deletion is disabled if cargo is offline such as with `--offline` or `--frozen` to avoid deleting artifacts that may need to be used if you are offline for a long period of time.
+
+#### Automatic gc configuration
+
+The automatic gc behavior can be specified via a cargo configuration setting.
+The settings available are:
+
+```toml
+# Example config.toml file.
+
+# This table defines the behavior for automatic garbage collection.
+[gc.auto]
+# The maximum frequency that automatic garbage collection happens.
+# Can be "never" to disable automatic-gc, or "always" to run on every command.
+frequency = "1 day"
+# Anything older than this duration will be deleted in the source cache.
+max-src-age = "1 month"
+# Anything older than this duration will be deleted in the compressed crate cache.
+max-crate-age = "3 months"
+# Any index older than this duration will be deleted from the index cache.
+max-index-age = "3 months"
+# Any git checkout older than this duration will be deleted from the checkout cache.
+max-git-co-age = "1 month"
+# Any git clone older than this duration will be deleted from the git cache.
+max-git-db-age = "3 months"
+```
+
+### Manual garbage collection with `cargo clean`
+
+Manual deletion can be done with the `cargo clean gc` command.
+Deletion of cache contents can be performed by passing one of the cache options:
+
+- `--max-src-age=DURATION` --- Deletes source cache files that have not been used since the given age.
+- `--max-crate-age=DURATION` --- Deletes crate cache files that have not been used since the given age.
+- `--max-index-age=DURATION` --- Deletes registry indexes that have not been used since then given age (including their `.crate` and `src` files).
+- `--max-git-co-age=DURATION` --- Deletes git dependency checkouts that have not been used since then given age.
+- `--max-git-db-age=DURATION` --- Deletes git dependency clones that have not been used since then given age.
+- `--max-download-age=DURATION` --- Deletes any downloaded cache data that has not been used since then given age.
+- `--max-src-size=SIZE` --- Deletes the oldest source cache files until the cache is under the given size.
+- `--max-crate-size=SIZE` --- Deletes the oldest crate cache files until the cache is under the given size.
+- `--max-git-size=SIZE` --- Deletes the oldest git dependency caches until the cache is under the given size.
+- `--max-download-size=SIZE` --- Deletes the oldest downloaded cache data until the cache is under the given size.
+
+A DURATION is specified in the form "N seconds/minutes/days/weeks/months" where N is an integer.
+
+A SIZE is specified in the form "N *suffix*" where *suffix* is B, kB, MB, GB, kiB, MiB, or GiB, and N is an integer or floating point number. If no suffix is specified, the number is the number of bytes.
+
+```sh
+cargo clean gc
+cargo clean gc --max-download-age=1week
+cargo clean gc --max-git-size=0 --max-download-size=100MB
+```
+
# Stabilized and removed features
## Compile progress
diff --git a/src/tools/cargo/src/doc/src/reference/workspaces.md b/src/tools/cargo/src/doc/src/reference/workspaces.md
index 17637d6c7..abd2feeca 100644
--- a/src/tools/cargo/src/doc/src/reference/workspaces.md
+++ b/src/tools/cargo/src/doc/src/reference/workspaces.md
@@ -227,7 +227,7 @@ rand.workspace = true
The `workspace.lints` table is where you define lint configuration to be inherited by members of a workspace.
-Specifying a workspace lint configuration is similar to package lints.
+Specifying a workspace lint configuration is similar to [package lints](manifest.md#the-lints-section).
Example:
diff --git a/src/tools/cargo/src/etc/cargo.bashcomp.sh b/src/tools/cargo/src/etc/cargo.bashcomp.sh
index a1e800bc3..c0ba62752 100644
--- a/src/tools/cargo/src/etc/cargo.bashcomp.sh
+++ b/src/tools/cargo/src/etc/cargo.bashcomp.sh
@@ -209,12 +209,12 @@ _get_names_from_array()
line=${line##*=}
line=${line%%\"}
line=${line##*\"}
- names+=($line)
+ names+=("$line")
fi
fi
last_line=$line
- done < $manifest
+ done < "$manifest"
echo "${names[@]}"
}
diff --git a/src/tools/cargo/src/etc/man/cargo-search.1 b/src/tools/cargo/src/etc/man/cargo-search.1
index 245d4e65d..b1d458bd5 100644
--- a/src/tools/cargo/src/etc/man/cargo-search.1
+++ b/src/tools/cargo/src/etc/man/cargo-search.1
@@ -4,7 +4,7 @@
.ad l
.ss \n[.ss] 0
.SH "NAME"
-cargo\-search \[em] Search packages in crates.io
+cargo\-search \[em] Search packages in the registry. Default registry is crates.io
.SH "SYNOPSIS"
\fBcargo search\fR [\fIoptions\fR] [\fIquery\fR\[u2026]]
.SH "DESCRIPTION"
diff --git a/src/tools/cargo/tests/build-std/main.rs b/src/tools/cargo/tests/build-std/main.rs
index 47a4bb671..c905deb49 100644
--- a/src/tools/cargo/tests/build-std/main.rs
+++ b/src/tools/cargo/tests/build-std/main.rs
@@ -18,6 +18,8 @@
//! `CARGO_RUN_BUILD_STD_TESTS` env var to be set to actually run these tests.
//! Otherwise the tests are skipped.
+#![allow(clippy::disallowed_methods)]
+
use cargo_test_support::*;
use std::env;
use std::path::Path;
@@ -227,3 +229,46 @@ fn custom_test_framework() {
.build_std_arg("core")
.run();
}
+
+// Fixing rust-lang/rust#117839.
+// on macOS it never gets remapped.
+// Might be a separate issue, so only run on Linux.
+#[cargo_test(build_std_real)]
+#[cfg(target_os = "linux")]
+fn remap_path_scope() {
+ let p = project()
+ .file(
+ "src/main.rs",
+ "
+ fn main() {
+ panic!(\"remap to /rustc/<hash>\");
+ }
+ ",
+ )
+ .file(
+ ".cargo/config.toml",
+ "
+ [profile.release]
+ debug = \"line-tables-only\"
+ ",
+ )
+ .build();
+
+ p.cargo("run --release -Ztrim-paths")
+ .masquerade_as_nightly_cargo(&["-Ztrim-paths"])
+ .env("RUST_BACKTRACE", "1")
+ .build_std()
+ .target_host()
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+[FINISHED] release [optimized + debuginfo] [..]
+[RUNNING] [..]
+[..]thread '[..]' panicked at [..]src/main.rs:3:[..]",
+ )
+ .with_stderr_contains("remap to /rustc/<hash>")
+ .with_stderr_contains("[..]at /rustc/[..]/library/std/src/[..]")
+ .with_stderr_contains("[..]at src/main.rs:3[..]")
+ .with_stderr_contains("[..]at /rustc/[..]/library/core/src/[..]")
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/alt_registry.rs b/src/tools/cargo/tests/testsuite/alt_registry.rs
index d6d7dd531..e347af1c7 100644
--- a/src/tools/cargo/tests/testsuite/alt_registry.rs
+++ b/src/tools/cargo/tests/testsuite/alt_registry.rs
@@ -715,7 +715,14 @@ fn bad_registry_name() {
[ERROR] failed to parse manifest at `[CWD]/Cargo.toml`
Caused by:
- invalid character ` ` in registry name: `bad name`, [..]",
+ TOML parse error at line 7, column 17
+ |
+ 7 | [dependencies.bar]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ invalid character ` ` in registry name: `bad name`, [..]
+
+
+",
)
.run();
@@ -1546,3 +1553,86 @@ or use environment variable CARGO_REGISTRY_TOKEN",
)
.run();
}
+
+#[cargo_test]
+fn config_empty_registry_name() {
+ let _ = RegistryBuilder::new()
+ .no_configure_token()
+ .alternative()
+ .build();
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config.toml",
+ "[registry.'']
+ ",
+ )
+ .build();
+
+ p.cargo("publish")
+ .arg("--registry")
+ .arg("")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] registry name cannot be empty",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn empty_registry_flag() {
+ let p = project().file("src/lib.rs", "").build();
+
+ p.cargo("publish")
+ .arg("--registry")
+ .arg("")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] registry name cannot be empty",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn empty_dependency_registry() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ bar = { version = "0.1.0", registry = "" }
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
+ extern crate bar;
+ pub fn f() { bar::bar(); }
+ ",
+ )
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[CWD]/Cargo.toml`
+
+Caused by:
+ TOML parse error at line 7, column 23
+ |
+ 7 | bar = { version = \"0.1.0\", registry = \"\" }
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ registry name cannot be empty
+
+
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/artifact_dep.rs b/src/tools/cargo/tests/testsuite/artifact_dep.rs
index c51298735..01b4ecf47 100644
--- a/src/tools/cargo/tests/testsuite/artifact_dep.rs
+++ b/src/tools/cargo/tests/testsuite/artifact_dep.rs
@@ -2167,8 +2167,11 @@ fn doc_lib_true() {
p.cargo("doc -Z bindeps")
.masquerade_as_nightly_cargo(&["bindeps"])
- .env("CARGO_LOG", "cargo::ops::cargo_rustc::fingerprint")
- .with_stdout("")
+ .with_stderr(
+ "\
+[FINISHED] [..]
+[GENERATED] [CWD]/target/doc/foo/index.html",
+ )
.run();
assert!(p.root().join("target/doc").is_dir());
diff --git a/src/tools/cargo/tests/testsuite/build.rs b/src/tools/cargo/tests/testsuite/build.rs
index 23840ad9a..dd67161d6 100644
--- a/src/tools/cargo/tests/testsuite/build.rs
+++ b/src/tools/cargo/tests/testsuite/build.rs
@@ -460,7 +460,11 @@ fn cargo_compile_with_empty_package_name() {
[ERROR] failed to parse manifest at `[..]`
Caused by:
- package name cannot be an empty string
+ TOML parse error at line 3, column 16
+ |
+ 3 | name = \"\"
+ | ^^
+ package name cannot be empty
",
)
.run();
@@ -479,6 +483,10 @@ fn cargo_compile_with_invalid_package_name() {
[ERROR] failed to parse manifest at `[..]`
Caused by:
+ TOML parse error at line 3, column 16
+ |
+ 3 | name = \"foo::bar\"
+ | ^^^^^^^^^^
invalid character `:` in package name: `foo::bar`, [..]
",
)
@@ -760,7 +768,7 @@ fn cargo_compile_with_invalid_code() {
p.cargo("build")
.with_status(101)
.with_stderr_contains(
- "[ERROR] could not compile `foo` (bin \"foo\") due to previous error\n",
+ "[ERROR] could not compile `foo` (bin \"foo\") due to 1 previous error\n",
)
.run();
assert!(p.root().join("Cargo.lock").is_file());
@@ -1182,7 +1190,11 @@ fn cargo_compile_with_invalid_dep_rename() {
error: failed to parse manifest at `[..]`
Caused by:
- invalid character ` ` in dependency name: `haha this isn't a valid name 🐛`, characters must be Unicode XID characters (numbers, `-`, `_`, or most letters)
+ TOML parse error at line 7, column 17
+ |
+ 7 | \"haha this isn't a valid name 🐛\" = { package = \"libc\", version = \"0.1\" }
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ invalid character ` ` in package name: `haha this isn't a valid name 🐛`, characters must be Unicode XID characters (numbers, `-`, `_`, or most letters)
",
)
.run();
@@ -1545,6 +1557,11 @@ fn crate_env_vars() {
// Verify CARGO_TARGET_TMPDIR isn't set for bins
assert!(option_env!("CARGO_TARGET_TMPDIR").is_none());
+
+ // Verify CARGO_RUSTC_CURRENT_DIR is set for examples
+ let workspace_dir = std::path::Path::new(option_env!("CARGO_RUSTC_CURRENT_DIR").expect("CARGO_RUSTC_CURRENT_DIR"));
+ let file_path = workspace_dir.join(file!());
+ assert!(file_path.exists(), "{}", file_path.display());
}
"#,
)
@@ -1581,14 +1598,26 @@ fn crate_env_vars() {
// Check that CARGO_TARGET_TMPDIR isn't set for lib code
assert!(option_env!("CARGO_TARGET_TMPDIR").is_none());
env::var("CARGO_TARGET_TMPDIR").unwrap_err();
+
+ // Verify CARGO_RUSTC_CURRENT_DIR is set for examples
+ let workspace_dir = std::path::Path::new(option_env!("CARGO_RUSTC_CURRENT_DIR").expect("CARGO_RUSTC_CURRENT_DIR"));
+ let file_path = workspace_dir.join(file!());
+ assert!(file_path.exists(), "{}", file_path.display());
}
#[test]
- fn env() {
+ fn unit_env_cargo_target_tmpdir() {
// Check that CARGO_TARGET_TMPDIR isn't set for unit tests
assert!(option_env!("CARGO_TARGET_TMPDIR").is_none());
env::var("CARGO_TARGET_TMPDIR").unwrap_err();
}
+
+ #[test]
+ fn unit_env_cargo_rustc_current_dir() {
+ let workspace_dir = std::path::Path::new(option_env!("CARGO_RUSTC_CURRENT_DIR").expect("CARGO_RUSTC_CURRENT_DIR"));
+ let file_path = workspace_dir.join(file!());
+ assert!(file_path.exists(), "{}", file_path.display());
+ }
"#,
)
.file(
@@ -1605,6 +1634,11 @@ fn crate_env_vars() {
// Verify CARGO_TARGET_TMPDIR isn't set for examples
assert!(option_env!("CARGO_TARGET_TMPDIR").is_none());
+
+ // Verify CARGO_RUSTC_CURRENT_DIR is set for examples
+ let workspace_dir = std::path::Path::new(option_env!("CARGO_RUSTC_CURRENT_DIR").expect("CARGO_RUSTC_CURRENT_DIR"));
+ let file_path = workspace_dir.join(file!());
+ assert!(file_path.exists(), "{}", file_path.display());
}
"#,
)
@@ -1612,9 +1646,16 @@ fn crate_env_vars() {
"tests/env.rs",
r#"
#[test]
- fn env() {
+ fn integration_env_cargo_target_tmpdir() {
foo::check_tmpdir(option_env!("CARGO_TARGET_TMPDIR"));
}
+
+ #[test]
+ fn integration_env_cargo_rustc_current_dir() {
+ let workspace_dir = std::path::Path::new(option_env!("CARGO_RUSTC_CURRENT_DIR").expect("CARGO_RUSTC_CURRENT_DIR"));
+ let file_path = workspace_dir.join(file!());
+ assert!(file_path.exists(), "{}", file_path.display());
+ }
"#,
);
@@ -1627,9 +1668,16 @@ fn crate_env_vars() {
use test::Bencher;
#[bench]
- fn env(_: &mut Bencher) {
+ fn bench_env_cargo_target_tmpdir(_: &mut Bencher) {
foo::check_tmpdir(option_env!("CARGO_TARGET_TMPDIR"));
}
+
+ #[test]
+ fn bench_env_cargo_rustc_current_dir() {
+ let workspace_dir = std::path::Path::new(option_env!("CARGO_RUSTC_CURRENT_DIR").expect("CARGO_RUSTC_CURRENT_DIR"));
+ let file_path = workspace_dir.join(file!());
+ assert!(file_path.exists(), "{}", file_path.display());
+ }
"#,
)
.build()
@@ -1638,7 +1686,9 @@ fn crate_env_vars() {
};
println!("build");
- p.cargo("build -v").run();
+ p.cargo("build -v")
+ .masquerade_as_nightly_cargo(&["CARGO_RUSTC_CURRENT_DIR"])
+ .run();
println!("bin");
p.process(&p.bin("foo-bar"))
@@ -1646,15 +1696,175 @@ fn crate_env_vars() {
.run();
println!("example");
- p.cargo("run --example ex-env-vars -v").run();
+ p.cargo("run --example ex-env-vars -v")
+ .masquerade_as_nightly_cargo(&["CARGO_RUSTC_CURRENT_DIR"])
+ .run();
println!("test");
- p.cargo("test -v").run();
+ p.cargo("test -v")
+ .masquerade_as_nightly_cargo(&["CARGO_RUSTC_CURRENT_DIR"])
+ .run();
if is_nightly() {
println!("bench");
- p.cargo("bench -v").run();
+ p.cargo("bench -v")
+ .masquerade_as_nightly_cargo(&["CARGO_RUSTC_CURRENT_DIR"])
+ .run();
+ }
+}
+
+#[cargo_test]
+fn cargo_rustc_current_dir_foreign_workspace_dep() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ baz.path = "../baz"
+ baz_member.path = "../baz/baz_member"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ let _baz = project()
+ .at("baz")
+ .file(
+ "Cargo.toml",
+ r#"
+ [workspace]
+ members = ["baz_member"]
+
+ [package]
+ name = "baz"
+ version = "0.1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "tests/env.rs",
+ r#"
+ use std::path::Path;
+
+ #[test]
+ fn baz_env() {
+ let workspace_dir = Path::new(option_env!("CARGO_RUSTC_CURRENT_DIR").expect("CARGO_RUSTC_CURRENT_DIR"));
+ let manifest_dir = Path::new(option_env!("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR"));
+ let current_dir = std::env::current_dir().expect("current_dir");
+ let file_path = workspace_dir.join(file!());
+ assert!(file_path.exists(), "{}", file_path.display());
+ let workspace_dir = std::fs::canonicalize(current_dir.join(workspace_dir)).expect("CARGO_RUSTC_CURRENT_DIR");
+ let manifest_dir = std::fs::canonicalize(current_dir.join(manifest_dir)).expect("CARGO_MANIFEST_DIR");
+ assert_eq!(workspace_dir, manifest_dir);
+ }
+ "#,
+ )
+ .file(
+ "baz_member/Cargo.toml",
+ r#"
+ [package]
+ name = "baz_member"
+ version = "0.1.0"
+ authors = []
+ "#,
+ )
+ .file("baz_member/src/lib.rs", "")
+ .file(
+ "baz_member/tests/env.rs",
+ r#"
+ use std::path::Path;
+
+ #[test]
+ fn baz_member_env() {
+ let workspace_dir = Path::new(option_env!("CARGO_RUSTC_CURRENT_DIR").expect("CARGO_RUSTC_CURRENT_DIR"));
+ let file_path = workspace_dir.join(file!());
+ assert!(file_path.exists(), "{}", file_path.display());
+ }
+ "#,
+ )
+ .build();
+
+ // Verify it works from a different workspace
+ foo.cargo("test -p baz")
+ .masquerade_as_nightly_cargo(&["CARGO_RUSTC_CURRENT_DIR"])
+ .with_stdout_contains("running 1 test\ntest baz_env ... ok")
+ .run();
+ foo.cargo("test -p baz_member")
+ .masquerade_as_nightly_cargo(&["CARGO_RUSTC_CURRENT_DIR"])
+ .with_stdout_contains("running 1 test\ntest baz_member_env ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn cargo_rustc_current_dir_non_local_dep() {
+ Package::new("bar", "0.1.0")
+ .file(
+ "tests/bar_env.rs",
+ r#"
+ use std::path::Path;
+
+ #[test]
+ fn bar_env() {
+ let workspace_dir = Path::new(option_env!("CARGO_RUSTC_CURRENT_DIR").expect("CARGO_RUSTC_CURRENT_DIR"));
+ let manifest_dir = Path::new(option_env!("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR"));
+ let current_dir = std::env::current_dir().expect("current_dir");
+ let file_path = workspace_dir.join(file!());
+ assert!(file_path.exists(), "{}", file_path.display());
+ let workspace_dir = std::fs::canonicalize(current_dir.join(workspace_dir)).expect("CARGO_RUSTC_CURRENT_DIR");
+ let manifest_dir = std::fs::canonicalize(current_dir.join(manifest_dir)).expect("CARGO_MANIFEST_DIR");
+ assert_eq!(workspace_dir, manifest_dir);
+ }
+ "#,
+ )
+ .publish();
+
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .build();
+
+ p.cargo("test -p bar")
+ .masquerade_as_nightly_cargo(&["CARGO_RUSTC_CURRENT_DIR"])
+ .with_stdout_contains("running 1 test\ntest bar_env ... ok")
+ .run();
+}
+
+#[cargo_test]
+fn cargo_rustc_current_dir_is_not_stable() {
+ if is_nightly() {
+ return;
}
+ let p = project()
+ .file(
+ "tests/env.rs",
+ r#"
+ use std::path::Path;
+
+ #[test]
+ fn env() {
+ assert_eq!(option_env!("CARGO_RUSTC_CURRENT_DIR"), None);
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("test").run();
}
#[cargo_test]
@@ -2959,12 +3169,12 @@ fn freshness_ignores_excluded() {
// Smoke test to make sure it doesn't compile again
println!("first pass");
- foo.cargo("build").with_stdout("").run();
+ foo.cargo("build").with_stderr("[FINISHED] [..]").run();
// Modify an ignored file and make sure we don't rebuild
println!("second pass");
foo.change_file("src/bar.rs", "");
- foo.cargo("build").with_stdout("").run();
+ foo.cargo("build").with_stderr("[FINISHED] [..]").run();
}
#[cargo_test]
@@ -3064,7 +3274,7 @@ fn recompile_space_in_name() {
.build();
foo.cargo("build").run();
foo.root().move_into_the_past();
- foo.cargo("build").with_stdout("").run();
+ foo.cargo("build").with_stderr("[FINISHED] [..]").run();
}
#[cfg(unix)]
diff --git a/src/tools/cargo/tests/testsuite/build_script.rs b/src/tools/cargo/tests/testsuite/build_script.rs
index 408ce6457..f7361fcf3 100644
--- a/src/tools/cargo/tests/testsuite/build_script.rs
+++ b/src/tools/cargo/tests/testsuite/build_script.rs
@@ -1028,7 +1028,7 @@ versions that meet the requirements `*` are: 0.5.0
the package `a-sys` links to the native library `a`, but it conflicts with a previous package which links to `a` as well:
package `foo v0.5.0 ([..])`
-Only one package in the dependency graph may specify the same links value. This helps ensure that only one copy of a native library is linked in the final binary. Try to adjust your dependencies so that only one package uses the links ='a-sys' value. For more information, see https://doc.rust-lang.org/cargo/reference/resolver.html#links.
+Only one package in the dependency graph may specify the same links value. This helps ensure that only one copy of a native library is linked in the final binary. Try to adjust your dependencies so that only one package uses the `links = \"a\"` value. For more information, see https://doc.rust-lang.org/cargo/reference/resolver.html#links.
failed to select a version for `a-sys` which could resolve this conflict
").run();
@@ -1148,7 +1148,7 @@ versions that meet the requirements `*` are: 0.5.0
the package `a-sys` links to the native library `a`, but it conflicts with a previous package which links to `a` as well:
package `foo v0.5.0 ([..])`
-Only one package in the dependency graph may specify the same links value. This helps ensure that only one copy of a native library is linked in the final binary. Try to adjust your dependencies so that only one package uses the links ='a-sys' value. For more information, see https://doc.rust-lang.org/cargo/reference/resolver.html#links.
+Only one package in the dependency graph may specify the same links value. This helps ensure that only one copy of a native library is linked in the final binary. Try to adjust your dependencies so that only one package uses the `links = \"a\"` value. For more information, see https://doc.rust-lang.org/cargo/reference/resolver.html#links.
failed to select a version for `a-sys` which could resolve this conflict
").run();
@@ -1716,7 +1716,7 @@ fn build_deps_not_for_normal() {
.with_stderr_contains("[..]can't find crate for `aaaaa`[..]")
.with_stderr_contains(
"\
-[ERROR] could not compile `foo` (lib) due to previous error
+[ERROR] could not compile `foo` (lib) due to 1 previous error
Caused by:
process didn't exit successfully: [..]
@@ -3245,7 +3245,6 @@ fn fresh_builds_possible_with_multiple_metadata_overrides() {
.run();
p.cargo("build -v")
- .env("CARGO_LOG", "cargo::ops::cargo_rustc::fingerprint=info")
.with_stderr(
"\
[FRESH] foo v0.5.0 ([..])
@@ -3472,7 +3471,7 @@ fn rebuild_only_on_explicit_paths() {
// random other files do not affect freshness
println!("run baz");
- p.change_file("baz", "");
+ p.change_file("baz", "// modified");
p.cargo("build -v")
.with_stderr(
"\
@@ -3484,7 +3483,7 @@ fn rebuild_only_on_explicit_paths() {
// but changing dependent files does
println!("run foo change");
- p.change_file("foo", "");
+ p.change_file("foo", "// modified");
p.cargo("build -v")
.with_stderr(
"\
@@ -4382,7 +4381,7 @@ versions that meet the requirements `*` are: 0.5.0
the package `a` links to the native library `a`, but it conflicts with a previous package which links to `a` as well:
package `foo v0.5.0 ([..])`
-Only one package in the dependency graph may specify the same links value. This helps ensure that only one copy of a native library is linked in the final binary. Try to adjust your dependencies so that only one package uses the links ='a' value. For more information, see https://doc.rust-lang.org/cargo/reference/resolver.html#links.
+Only one package in the dependency graph may specify the same links value. This helps ensure that only one copy of a native library is linked in the final binary. Try to adjust your dependencies so that only one package uses the `links = \"a\"` value. For more information, see https://doc.rust-lang.org/cargo/reference/resolver.html#links.
failed to select a version for `a` which could resolve this conflict
").run();
diff --git a/src/tools/cargo/tests/testsuite/build_script_env.rs b/src/tools/cargo/tests/testsuite/build_script_env.rs
index afa2925f1..5220506a7 100644
--- a/src/tools/cargo/tests/testsuite/build_script_env.rs
+++ b/src/tools/cargo/tests/testsuite/build_script_env.rs
@@ -96,7 +96,7 @@ fn rerun_if_env_or_file_changes() {
.with_stderr("[FINISHED] [..]")
.run();
sleep_ms(1000);
- p.change_file("foo", "");
+ p.change_file("foo", "// modified");
p.cargo("check")
.env("FOO", "bar")
.with_stderr(
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/out/Cargo.toml
index 70cd31826..bc29fac8e 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_add/change_rename_target/out/Cargo.toml
@@ -6,3 +6,6 @@ version = "0.0.0"
[dependencies]
some-package = { package = "my-package2", version = "99999.0.0", optional = true }
+
+[features]
+some-package = ["dep:some-package"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/primary/Cargo.toml
index 6dd7fb6d6..38ff36eb3 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/primary/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_optional/out/primary/Cargo.toml
@@ -4,3 +4,6 @@ version = "0.0.0"
[dependencies]
foo = { workspace = true, optional = true }
+
+[features]
+foo = ["dep:foo"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/in/Cargo.toml
new file mode 100644
index 000000000..24c50556b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/in/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency"} \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/in/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/in/dependency/Cargo.toml
new file mode 100644
index 000000000..2d247d4d2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/in/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "foo"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/in/dependency/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/in/dependency/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/in/dependency/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/in/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/in/primary/Cargo.toml
new file mode 100644
index 000000000..b5923a106
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/in/primary/Cargo.toml
@@ -0,0 +1,4 @@
+cargo-features = ["public-dependency"]
+[package]
+name = "bar"
+version = "0.0.0" \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/in/primary/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/in/primary/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/in/primary/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/mod.rs
new file mode 100644
index 000000000..680d4c4e3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/mod.rs
@@ -0,0 +1,26 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ cargo_test_support::registry::init();
+
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .args(["foo", "-p", "bar", "--public"])
+ .masquerade_as_nightly_cargo(&["public-dependency"])
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/out/Cargo.toml
new file mode 100644
index 000000000..24c50556b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = ["primary", "dependency"]
+
+[workspace.dependencies]
+foo = { version = "0.0.0", path = "./dependency"} \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/out/dependency/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/out/dependency/Cargo.toml
new file mode 100644
index 000000000..2d247d4d2
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/out/dependency/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "foo"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/out/primary/Cargo.toml
new file mode 100644
index 000000000..665c6ae5e
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/out/primary/Cargo.toml
@@ -0,0 +1,7 @@
+cargo-features = ["public-dependency"]
+[package]
+name = "bar"
+version = "0.0.0"
+
+[dependencies]
+foo = { workspace = true, public = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/stderr.log
new file mode 100644
index 000000000..efa1ae9fa
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/stderr.log
@@ -0,0 +1 @@
+ Adding foo (workspace) to public dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/detect_workspace_inherit_public/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/empty_dep_name/in b/src/tools/cargo/tests/testsuite/cargo_add/empty_dep_name/in
new file mode 120000
index 000000000..6c6a27fcf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/empty_dep_name/in
@@ -0,0 +1 @@
+../add-basic.in \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/empty_dep_name/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/empty_dep_name/mod.rs
new file mode 100644
index 000000000..d7044ee11
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/empty_dep_name/mod.rs
@@ -0,0 +1,24 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ cargo_test_support::registry::init();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("@1.2.3")
+ .current_dir(cwd)
+ .assert()
+ .failure()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/empty_dep_name/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/empty_dep_name/out/Cargo.toml
new file mode 100644
index 000000000..3ecdb6681
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/empty_dep_name/out/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/empty_dep_name/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/empty_dep_name/stderr.log
new file mode 100644
index 000000000..d9547a42a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/empty_dep_name/stderr.log
@@ -0,0 +1 @@
+error: package name cannot be empty
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/empty_dep_name/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/empty_dep_name/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/empty_dep_name/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/help/stdout.log
index cf2a91313..d2931ae9c 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_add/help/stdout.log
@@ -32,6 +32,17 @@ Options:
The package will be removed from your features.
+ --public
+ Mark the dependency as public
+
+ The dependency can be referenced in your library's public API.
+
+ --no-public
+ Mark the dependency as private
+
+ While you can use the crate in your implementation, it cannot be referenced in your public
+ API.
+
--rename <NAME>
Rename the dependency
@@ -45,12 +56,12 @@ Options:
-n, --dry-run
Don't actually write the manifest
- -q, --quiet
- Do not print cargo log messages
-
-v, --verbose...
Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet
+ Do not print cargo log messages
+
--color <WHEN>
Coloring: auto, always, never
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/mod.rs
index e8633b0c4..653c2db94 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_add/mod.rs
@@ -12,10 +12,12 @@ mod deprecated_section;
mod detect_workspace_inherit;
mod detect_workspace_inherit_features;
mod detect_workspace_inherit_optional;
+mod detect_workspace_inherit_public;
mod dev;
mod dev_build_conflict;
mod dev_prefer_existing_version;
mod dry_run;
+mod empty_dep_name;
mod empty_dep_table;
mod features;
mod features_activated_over_limit;
@@ -65,6 +67,7 @@ mod namever;
mod no_args;
mod no_default_features;
mod no_optional;
+mod no_public;
mod offline_empty_cache;
mod optional;
mod overwrite_default_features;
@@ -81,11 +84,16 @@ mod overwrite_no_default_features;
mod overwrite_no_default_features_with_default_features;
mod overwrite_no_optional;
mod overwrite_no_optional_with_optional;
+mod overwrite_no_public;
+mod overwrite_no_public_with_public;
mod overwrite_optional;
mod overwrite_optional_with_no_optional;
+mod overwrite_optional_with_optional;
mod overwrite_path_noop;
mod overwrite_path_with_version;
mod overwrite_preserves_inline_table;
+mod overwrite_public;
+mod overwrite_public_with_no_public;
mod overwrite_rename_with_no_rename;
mod overwrite_rename_with_rename;
mod overwrite_rename_with_rename_noop;
@@ -103,6 +111,7 @@ mod preserve_dep_std_table;
mod preserve_features_table;
mod preserve_sorted;
mod preserve_unsorted;
+mod public;
mod quiet;
mod registry;
mod rename;
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_optional/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/no_optional/mod.rs
index 9145528bf..cc7e79b97 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/no_optional/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_optional/mod.rs
@@ -7,19 +7,7 @@ use cargo_test_support::curr_dir;
#[cargo_test]
fn case() {
cargo_test_support::registry::init();
- for name in ["my-package1", "my-package2"] {
- for ver in [
- "0.1.1+my-package",
- "0.2.0+my-package",
- "0.2.3+my-package",
- "0.4.1+my-package",
- "20.0.0+my-package",
- "99999.0.0+my-package",
- "99999.0.0-alpha.1+my-package",
- ] {
- cargo_test_support::registry::Package::new(name, ver).publish();
- }
- }
+ cargo_test_support::registry::Package::new("my-package", "0.1.0").publish();
let project = Project::from_template(curr_dir!().join("in"));
let project_root = project.root();
@@ -27,7 +15,7 @@ fn case() {
snapbox::cmd::Command::cargo_ui()
.arg("add")
- .arg_line("my-package1 my-package2@0.4.1 --no-optional")
+ .arg_line("my-package --no-optional")
.current_dir(cwd)
.assert()
.success()
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_optional/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/no_optional/out/Cargo.toml
index c5e017892..496ac8a62 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/no_optional/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_optional/out/Cargo.toml
@@ -5,5 +5,4 @@ name = "cargo-list-test-fixture"
version = "0.0.0"
[dependencies]
-my-package1 = "99999.0.0"
-my-package2 = "0.4.1"
+my-package = "0.1.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_optional/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/no_optional/stderr.log
index fb8d4903d..8e025739f 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/no_optional/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_optional/stderr.log
@@ -1,3 +1,2 @@
Updating `dummy-registry` index
- Adding my-package1 v99999.0.0 to dependencies.
- Adding my-package2 v0.4.1 to dependencies.
+ Adding my-package v0.1.0 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_public/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/no_public/in/Cargo.toml
new file mode 100644
index 000000000..e9087535b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_public/in/Cargo.toml
@@ -0,0 +1,6 @@
+cargo-features = ["public-dependency"]
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_public/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/no_public/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_public/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_public/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/no_public/mod.rs
new file mode 100644
index 000000000..912ac3fd3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_public/mod.rs
@@ -0,0 +1,26 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ cargo_test_support::registry::init();
+ cargo_test_support::registry::Package::new("my-package", "0.1.0").publish();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package --no-public")
+ .current_dir(cwd)
+ .masquerade_as_nightly_cargo(&["public-dependency"])
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_public/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/no_public/out/Cargo.toml
new file mode 100644
index 000000000..b9f045116
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_public/out/Cargo.toml
@@ -0,0 +1,9 @@
+cargo-features = ["public-dependency"]
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package = "0.1.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_public/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/no_public/stderr.log
new file mode 100644
index 000000000..8e025739f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_public/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+ Adding my-package v0.1.0 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/no_public/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/no_public/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/no_public/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/optional/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/optional/mod.rs
index 408a46ed3..8daaa961d 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/optional/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_add/optional/mod.rs
@@ -7,19 +7,7 @@ use cargo_test_support::curr_dir;
#[cargo_test]
fn case() {
cargo_test_support::registry::init();
- for name in ["my-package1", "my-package2"] {
- for ver in [
- "0.1.1+my-package",
- "0.2.0+my-package",
- "0.2.3+my-package",
- "0.4.1+my-package",
- "20.0.0+my-package",
- "99999.0.0+my-package",
- "99999.0.0-alpha.1+my-package",
- ] {
- cargo_test_support::registry::Package::new(name, ver).publish();
- }
- }
+ cargo_test_support::registry::Package::new("my-package", "0.1.0").publish();
let project = Project::from_template(curr_dir!().join("in"));
let project_root = project.root();
@@ -27,7 +15,7 @@ fn case() {
snapbox::cmd::Command::cargo_ui()
.arg("add")
- .arg_line("my-package1 my-package2@0.4.1 --optional")
+ .arg_line("my-package --optional")
.current_dir(cwd)
.assert()
.success()
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/optional/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/optional/out/Cargo.toml
index eda5445c5..a8789b033 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/optional/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_add/optional/out/Cargo.toml
@@ -5,5 +5,7 @@ name = "cargo-list-test-fixture"
version = "0.0.0"
[dependencies]
-my-package1 = { version = "99999.0.0", optional = true }
-my-package2 = { version = "0.4.1", optional = true }
+my-package = { version = "0.1.0", optional = true }
+
+[features]
+my-package = ["dep:my-package"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/optional/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/optional/stderr.log
index 8cf4812cf..595ac276b 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/optional/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_add/optional/stderr.log
@@ -1,3 +1,2 @@
Updating `dummy-registry` index
- Adding my-package1 v99999.0.0 to optional dependencies.
- Adding my-package2 v0.4.1 to optional dependencies.
+ Adding my-package v0.1.0 to optional dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/out/primary/Cargo.toml
index ad1205481..27e6e175d 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/out/primary/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_git_with_path/out/primary/Cargo.toml
@@ -6,3 +6,6 @@ version = "0.0.0"
[dependencies]
cargo-list-test-fixture-dependency = { optional = true, path = "../dependency", version = "0.0.0" }
+
+[features]
+cargo-list-test-fixture-dependency = ["dep:cargo-list-test-fixture-dependency"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/primary/Cargo.toml
index 6dd7fb6d6..38ff36eb3 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/primary/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_inherit_optional_noop/out/primary/Cargo.toml
@@ -4,3 +4,6 @@ version = "0.0.0"
[dependencies]
foo = { workspace = true, optional = true }
+
+[features]
+foo = ["dep:foo"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/out/Cargo.toml
index bbaf4f552..717252191 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_name_noop/out/Cargo.toml
@@ -7,3 +7,6 @@ version = "0.0.0"
[dependencies]
your-face = { version = "0.0.0", path = "dependency", optional = true, default-features = false, features = ["nose", "mouth"], registry = "alternative" }
+
+[features]
+your-face = ["dep:your-face"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/in/Cargo.toml
index c5e017892..496ac8a62 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/in/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/in/Cargo.toml
@@ -5,5 +5,4 @@ name = "cargo-list-test-fixture"
version = "0.0.0"
[dependencies]
-my-package1 = "99999.0.0"
-my-package2 = "0.4.1"
+my-package = "0.1.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/mod.rs
index 9145528bf..cc7e79b97 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/mod.rs
@@ -7,19 +7,7 @@ use cargo_test_support::curr_dir;
#[cargo_test]
fn case() {
cargo_test_support::registry::init();
- for name in ["my-package1", "my-package2"] {
- for ver in [
- "0.1.1+my-package",
- "0.2.0+my-package",
- "0.2.3+my-package",
- "0.4.1+my-package",
- "20.0.0+my-package",
- "99999.0.0+my-package",
- "99999.0.0-alpha.1+my-package",
- ] {
- cargo_test_support::registry::Package::new(name, ver).publish();
- }
- }
+ cargo_test_support::registry::Package::new("my-package", "0.1.0").publish();
let project = Project::from_template(curr_dir!().join("in"));
let project_root = project.root();
@@ -27,7 +15,7 @@ fn case() {
snapbox::cmd::Command::cargo_ui()
.arg("add")
- .arg_line("my-package1 my-package2@0.4.1 --no-optional")
+ .arg_line("my-package --no-optional")
.current_dir(cwd)
.assert()
.success()
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/out/Cargo.toml
index c5e017892..496ac8a62 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/out/Cargo.toml
@@ -5,5 +5,4 @@ name = "cargo-list-test-fixture"
version = "0.0.0"
[dependencies]
-my-package1 = "99999.0.0"
-my-package2 = "0.4.1"
+my-package = "0.1.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/stderr.log
index fb8d4903d..8e025739f 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional/stderr.log
@@ -1,3 +1,2 @@
Updating `dummy-registry` index
- Adding my-package1 v99999.0.0 to dependencies.
- Adding my-package2 v0.4.1 to dependencies.
+ Adding my-package v0.1.0 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/in/Cargo.toml
index 8cd2616d4..98e2f7da1 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/in/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/in/Cargo.toml
@@ -5,5 +5,4 @@ name = "cargo-list-test-fixture"
version = "0.0.0"
[dependencies]
-my-package1 = { version = "99999.0.0", optional = false }
-my-package2 = { version = "0.4.1", optional = false }
+my-package = { version = "0.1.0", optional = false }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/mod.rs
index 408a46ed3..8daaa961d 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/mod.rs
@@ -7,19 +7,7 @@ use cargo_test_support::curr_dir;
#[cargo_test]
fn case() {
cargo_test_support::registry::init();
- for name in ["my-package1", "my-package2"] {
- for ver in [
- "0.1.1+my-package",
- "0.2.0+my-package",
- "0.2.3+my-package",
- "0.4.1+my-package",
- "20.0.0+my-package",
- "99999.0.0+my-package",
- "99999.0.0-alpha.1+my-package",
- ] {
- cargo_test_support::registry::Package::new(name, ver).publish();
- }
- }
+ cargo_test_support::registry::Package::new("my-package", "0.1.0").publish();
let project = Project::from_template(curr_dir!().join("in"));
let project_root = project.root();
@@ -27,7 +15,7 @@ fn case() {
snapbox::cmd::Command::cargo_ui()
.arg("add")
- .arg_line("my-package1 my-package2@0.4.1 --optional")
+ .arg_line("my-package --optional")
.current_dir(cwd)
.assert()
.success()
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/out/Cargo.toml
index eda5445c5..a8789b033 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/out/Cargo.toml
@@ -5,5 +5,7 @@ name = "cargo-list-test-fixture"
version = "0.0.0"
[dependencies]
-my-package1 = { version = "99999.0.0", optional = true }
-my-package2 = { version = "0.4.1", optional = true }
+my-package = { version = "0.1.0", optional = true }
+
+[features]
+my-package = ["dep:my-package"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/stderr.log
index 8cf4812cf..595ac276b 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_optional_with_optional/stderr.log
@@ -1,3 +1,2 @@
Updating `dummy-registry` index
- Adding my-package1 v99999.0.0 to optional dependencies.
- Adding my-package2 v0.4.1 to optional dependencies.
+ Adding my-package v0.1.0 to optional dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/in/Cargo.toml
new file mode 100644
index 000000000..43d0d8238
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/in/Cargo.toml
@@ -0,0 +1,9 @@
+cargo-features = ["public-dependency"]
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package = "0.1.0" \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/mod.rs
new file mode 100644
index 000000000..912ac3fd3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/mod.rs
@@ -0,0 +1,26 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ cargo_test_support::registry::init();
+ cargo_test_support::registry::Package::new("my-package", "0.1.0").publish();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package --no-public")
+ .current_dir(cwd)
+ .masquerade_as_nightly_cargo(&["public-dependency"])
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/out/Cargo.toml
new file mode 100644
index 000000000..b9f045116
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/out/Cargo.toml
@@ -0,0 +1,9 @@
+cargo-features = ["public-dependency"]
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package = "0.1.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/stderr.log
new file mode 100644
index 000000000..8e025739f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+ Adding my-package v0.1.0 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/in/Cargo.toml
new file mode 100644
index 000000000..c6e61bdca
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/in/Cargo.toml
@@ -0,0 +1,9 @@
+cargo-features = ["public-dependency"]
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package = { version = "0.1.0", public = false }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/mod.rs
new file mode 100644
index 000000000..bbf8d65a6
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/mod.rs
@@ -0,0 +1,26 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ cargo_test_support::registry::init();
+ cargo_test_support::registry::Package::new("my-package", "0.1.0").publish();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package --public")
+ .current_dir(cwd)
+ .masquerade_as_nightly_cargo(&["public-dependency"])
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/out/Cargo.toml
new file mode 100644
index 000000000..77fccaa6a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/out/Cargo.toml
@@ -0,0 +1,9 @@
+cargo-features = ["public-dependency"]
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package = { version = "0.1.0", public = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/stderr.log
new file mode 100644
index 000000000..5259bbde8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+ Adding my-package v0.1.0 to public dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_no_public_with_public/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/in/Cargo.toml
index c5e017892..496ac8a62 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/in/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/in/Cargo.toml
@@ -5,5 +5,4 @@ name = "cargo-list-test-fixture"
version = "0.0.0"
[dependencies]
-my-package1 = "99999.0.0"
-my-package2 = "0.4.1"
+my-package = "0.1.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/mod.rs
index 408a46ed3..8daaa961d 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/mod.rs
@@ -7,19 +7,7 @@ use cargo_test_support::curr_dir;
#[cargo_test]
fn case() {
cargo_test_support::registry::init();
- for name in ["my-package1", "my-package2"] {
- for ver in [
- "0.1.1+my-package",
- "0.2.0+my-package",
- "0.2.3+my-package",
- "0.4.1+my-package",
- "20.0.0+my-package",
- "99999.0.0+my-package",
- "99999.0.0-alpha.1+my-package",
- ] {
- cargo_test_support::registry::Package::new(name, ver).publish();
- }
- }
+ cargo_test_support::registry::Package::new("my-package", "0.1.0").publish();
let project = Project::from_template(curr_dir!().join("in"));
let project_root = project.root();
@@ -27,7 +15,7 @@ fn case() {
snapbox::cmd::Command::cargo_ui()
.arg("add")
- .arg_line("my-package1 my-package2@0.4.1 --optional")
+ .arg_line("my-package --optional")
.current_dir(cwd)
.assert()
.success()
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/out/Cargo.toml
index eda5445c5..a8789b033 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/out/Cargo.toml
@@ -5,5 +5,7 @@ name = "cargo-list-test-fixture"
version = "0.0.0"
[dependencies]
-my-package1 = { version = "99999.0.0", optional = true }
-my-package2 = { version = "0.4.1", optional = true }
+my-package = { version = "0.1.0", optional = true }
+
+[features]
+my-package = ["dep:my-package"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/stderr.log
index 8cf4812cf..595ac276b 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional/stderr.log
@@ -1,3 +1,2 @@
Updating `dummy-registry` index
- Adding my-package1 v99999.0.0 to optional dependencies.
- Adding my-package2 v0.4.1 to optional dependencies.
+ Adding my-package v0.1.0 to optional dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/in/Cargo.toml
index 5ef953209..a7722da07 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/in/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/in/Cargo.toml
@@ -10,4 +10,3 @@ other = ["your-face/nose"]
[dependencies]
your-face = { version = "99999.0.0", optional = true }
-my-package2 = { version = "0.4.1", optional = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/mod.rs
index 3090a7527..511b31e29 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/mod.rs
@@ -7,17 +7,7 @@ use cargo_test_support::curr_dir;
#[cargo_test]
fn case() {
cargo_test_support::registry::init();
- for ver in [
- "0.1.1+my-package",
- "0.2.0+my-package",
- "0.2.3+my-package",
- "0.4.1+my-package",
- "20.0.0+my-package",
- "99999.0.0+my-package",
- "99999.0.0-alpha.1+my-package",
- ] {
- cargo_test_support::registry::Package::new("my-package2", ver).publish();
- }
+
cargo_test_support::registry::Package::new("your-face", "99999.0.0+my-package")
.feature("nose", &[])
.feature("mouth", &[])
@@ -31,7 +21,7 @@ fn case() {
snapbox::cmd::Command::cargo_ui()
.arg("add")
- .arg_line("your-face my-package2@0.4.1 --no-optional")
+ .arg_line("your-face --no-optional")
.current_dir(cwd)
.assert()
.success()
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/out/Cargo.toml
index bf6c52963..b57286ed5 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/out/Cargo.toml
@@ -10,4 +10,3 @@ other = ["your-face/nose"]
[dependencies]
your-face = { version = "99999.0.0" }
-my-package2 = { version = "0.4.1" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/stderr.log
index 5fe113e86..796b9601b 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_no_optional/stderr.log
@@ -5,4 +5,3 @@
- eyes
- mouth
- nose
- Adding my-package2 v0.4.1 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/in/Cargo.toml
new file mode 100644
index 000000000..e446eca38
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/in/Cargo.toml
@@ -0,0 +1,11 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = { version = "99999.0.0", optional = true }
+
+[features]
+default = ["dep:my-package1"] \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/mod.rs
new file mode 100644
index 000000000..434124e93
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/mod.rs
@@ -0,0 +1,26 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ cargo_test_support::registry::init();
+ cargo_test_support::registry::Package::new("my-package1", "99999.0.0").publish();
+
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package1 --optional")
+ .current_dir(cwd)
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/out/Cargo.toml
new file mode 100644
index 000000000..e68fcdb3c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/out/Cargo.toml
@@ -0,0 +1,11 @@
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package1 = { version = "99999.0.0", optional = true }
+
+[features]
+default = ["dep:my-package1"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/stderr.log
new file mode 100644
index 000000000..ba9cb313d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+ Adding my-package1 v99999.0.0 to optional dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_optional_with_optional/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/out/Cargo.toml
index bbaf4f552..717252191 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_noop/out/Cargo.toml
@@ -7,3 +7,6 @@ version = "0.0.0"
[dependencies]
your-face = { version = "0.0.0", path = "dependency", optional = true, default-features = false, features = ["nose", "mouth"], registry = "alternative" }
+
+[features]
+your-face = ["dep:your-face"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/out/primary/Cargo.toml
index a20f2095d..c45f79491 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/out/primary/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_path_with_version/out/primary/Cargo.toml
@@ -6,3 +6,6 @@ version = "0.0.0"
[dependencies]
cargo-list-test-fixture-dependency = { optional = true, version = "20.0" }
+
+[features]
+cargo-list-test-fixture-dependency = ["dep:cargo-list-test-fixture-dependency"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/in/Cargo.toml
new file mode 100644
index 000000000..43d0d8238
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/in/Cargo.toml
@@ -0,0 +1,9 @@
+cargo-features = ["public-dependency"]
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package = "0.1.0" \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/mod.rs
new file mode 100644
index 000000000..bbf8d65a6
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/mod.rs
@@ -0,0 +1,26 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ cargo_test_support::registry::init();
+ cargo_test_support::registry::Package::new("my-package", "0.1.0").publish();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package --public")
+ .current_dir(cwd)
+ .masquerade_as_nightly_cargo(&["public-dependency"])
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/out/Cargo.toml
new file mode 100644
index 000000000..77fccaa6a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/out/Cargo.toml
@@ -0,0 +1,9 @@
+cargo-features = ["public-dependency"]
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package = { version = "0.1.0", public = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/stderr.log
new file mode 100644
index 000000000..5259bbde8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+ Adding my-package v0.1.0 to public dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/in/Cargo.toml
new file mode 100644
index 000000000..cc7ec1a9b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/in/Cargo.toml
@@ -0,0 +1,9 @@
+cargo-features = ["public-dependency"]
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package = { version = "0.1.0", public = true } \ No newline at end of file
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/mod.rs
new file mode 100644
index 000000000..912ac3fd3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/mod.rs
@@ -0,0 +1,26 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ cargo_test_support::registry::init();
+ cargo_test_support::registry::Package::new("my-package", "0.1.0").publish();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package --no-public")
+ .current_dir(cwd)
+ .masquerade_as_nightly_cargo(&["public-dependency"])
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/out/Cargo.toml
new file mode 100644
index 000000000..cfa80cc13
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/out/Cargo.toml
@@ -0,0 +1,9 @@
+cargo-features = ["public-dependency"]
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package = { version = "0.1.0" }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/stderr.log
new file mode 100644
index 000000000..8e025739f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+ Adding my-package v0.1.0 to dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_public_with_no_public/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/out/Cargo.toml
index 450229245..0217d4176 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_rename_with_rename_noop/out/Cargo.toml
@@ -6,3 +6,6 @@ version = "0.0.0"
[dependencies]
a1 = { package = "versioned-package", version = "0.1.1", optional = true }
+
+[features]
+a1 = ["dep:a1"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/out/Cargo.toml
index 260014024..a3eabd065 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_git/out/Cargo.toml
@@ -6,3 +6,6 @@ version = "0.0.0"
[dependencies]
versioned-package = { version = "0.3.0", optional = true, git = "[ROOTURL]/versioned-package" }
+
+[features]
+versioned-package = ["dep:versioned-package"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/out/primary/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/out/primary/Cargo.toml
index 07253670a..bd460a11b 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/out/primary/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_add/overwrite_version_with_path/out/primary/Cargo.toml
@@ -6,3 +6,6 @@ version = "0.0.0"
[dependencies]
cargo-list-test-fixture-dependency = { version = "0.0.0", optional = true, path = "../dependency" }
+
+[features]
+cargo-list-test-fixture-dependency = ["dep:cargo-list-test-fixture-dependency"]
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/public/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/public/in/Cargo.toml
new file mode 100644
index 000000000..e9087535b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/public/in/Cargo.toml
@@ -0,0 +1,6 @@
+cargo-features = ["public-dependency"]
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/public/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_add/public/in/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/public/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/public/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/public/mod.rs
new file mode 100644
index 000000000..bbf8d65a6
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/public/mod.rs
@@ -0,0 +1,26 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::prelude::*;
+use cargo_test_support::Project;
+
+use cargo_test_support::curr_dir;
+
+#[cargo_test]
+fn case() {
+ cargo_test_support::registry::init();
+ cargo_test_support::registry::Package::new("my-package", "0.1.0").publish();
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg_line("my-package --public")
+ .current_dir(cwd)
+ .masquerade_as_nightly_cargo(&["public-dependency"])
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/public/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_add/public/out/Cargo.toml
new file mode 100644
index 000000000..77fccaa6a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/public/out/Cargo.toml
@@ -0,0 +1,9 @@
+cargo-features = ["public-dependency"]
+[workspace]
+
+[package]
+name = "cargo-list-test-fixture"
+version = "0.0.0"
+
+[dependencies]
+my-package = { version = "0.1.0", public = true }
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/public/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/public/stderr.log
new file mode 100644
index 000000000..5259bbde8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/public/stderr.log
@@ -0,0 +1,2 @@
+ Updating `dummy-registry` index
+ Adding my-package v0.1.0 to public dependencies.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/public/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/public/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/public/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/rust_version_ignore/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/rust_version_ignore/mod.rs
index f8aac0ad8..0404d12b4 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/rust_version_ignore/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_add/rust_version_ignore/mod.rs
@@ -26,7 +26,7 @@ fn case() {
.current_dir(cwd)
.masquerade_as_nightly_cargo(&["msrv-policy"])
.assert()
- .code(101)
+ .code(0)
.stdout_matches_path(curr_dir!().join("stdout.log"))
.stderr_matches_path(curr_dir!().join("stderr.log"));
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/rust_version_ignore/stderr.log b/src/tools/cargo/tests/testsuite/cargo_add/rust_version_ignore/stderr.log
index 96bcbddc2..430abe31b 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/rust_version_ignore/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_add/rust_version_ignore/stderr.log
@@ -1,7 +1,2 @@
Updating `dummy-registry` index
Adding rust-version-user v0.2.1 to dependencies.
-error: failed to select a version for the requirement `rust-version-user = "^0.2.1"`
-candidate versions found which didn't match: 0.2.1, 0.1.0
-location searched: `dummy-registry` index (which is replacing registry `crates-io`)
-required by package `cargo-list-test-fixture v0.0.0 ([ROOT]/case)`
-perhaps a crate was updated and forgotten to be re-vendored?
diff --git a/src/tools/cargo/tests/testsuite/cargo_bench/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_bench/help/stdout.log
index 95546b4a3..cfea6e01e 100644
--- a/src/tools/cargo/tests/testsuite/cargo_bench/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_bench/help/stdout.log
@@ -11,8 +11,8 @@ Options:
--no-fail-fast Run all benchmarks regardless of failure
--ignore-rust-version Ignore `rust-version` specification in packages
--message-format <FMT> Error format
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
diff --git a/src/tools/cargo/tests/testsuite/cargo_build/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_build/help/stdout.log
index 58b12cdcd..3918fd44a 100644
--- a/src/tools/cargo/tests/testsuite/cargo_build/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_build/help/stdout.log
@@ -6,8 +6,8 @@ Options:
--ignore-rust-version Ignore `rust-version` specification in packages
--future-incompat-report Outputs a future incompatibility report at the end of the build
--message-format <FMT> Error format
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
diff --git a/src/tools/cargo/tests/testsuite/cargo_check/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_check/help/stdout.log
index bbf090d1d..7b6289798 100644
--- a/src/tools/cargo/tests/testsuite/cargo_check/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_check/help/stdout.log
@@ -6,8 +6,8 @@ Options:
--ignore-rust-version Ignore `rust-version` specification in packages
--future-incompat-report Outputs a future incompatibility report at the end of the build
--message-format <FMT> Error format
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
diff --git a/src/tools/cargo/tests/testsuite/cargo_clean/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_clean/help/stdout.log
index 6e9e82772..80571e5dc 100644
--- a/src/tools/cargo/tests/testsuite/cargo_clean/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_clean/help/stdout.log
@@ -4,9 +4,9 @@ Usage: cargo[EXE] clean [OPTIONS]
Options:
--doc Whether or not to clean just the documentation directory
- -q, --quiet Do not print cargo log messages
-n, --dry-run Display what would be deleted without deleting anything
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/cargo_config/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_config/help/stdout.log
index 50caca72a..5c14335fc 100644
--- a/src/tools/cargo/tests/testsuite/cargo_config/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_config/help/stdout.log
@@ -7,6 +7,7 @@ Commands:
Options:
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/cargo_doc/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_doc/help/stdout.log
index 8ff5f9b72..e1a19bedd 100644
--- a/src/tools/cargo/tests/testsuite/cargo_doc/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_doc/help/stdout.log
@@ -8,8 +8,8 @@ Options:
--document-private-items Document private items
--ignore-rust-version Ignore `rust-version` specification in packages
--message-format <FMT> Error format
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
diff --git a/src/tools/cargo/tests/testsuite/cargo_features.rs b/src/tools/cargo/tests/testsuite/cargo_features.rs
index d319ed686..8b7daa214 100644
--- a/src/tools/cargo/tests/testsuite/cargo_features.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_features.rs
@@ -296,7 +296,7 @@ fn allow_features_to_rustc() {
"src/lib.rs",
r#"
#![allow(internal_features)]
- #![feature(test_2018_feature)]
+ #![feature(rustc_attrs)]
"#,
)
.build();
@@ -307,7 +307,7 @@ fn allow_features_to_rustc() {
.with_stderr_contains("[..]E0725[..]")
.run();
- p.cargo("-Zallow-features=test_2018_feature check")
+ p.cargo("-Zallow-features=rustc_attrs check")
.masquerade_as_nightly_cargo(&["allow-features"])
.with_stderr(
"\
diff --git a/src/tools/cargo/tests/testsuite/cargo_fetch/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_fetch/help/stdout.log
index 32f29f1b3..5645a6c03 100644
--- a/src/tools/cargo/tests/testsuite/cargo_fetch/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_fetch/help/stdout.log
@@ -3,8 +3,8 @@ Fetch dependencies of a package from the network
Usage: cargo[EXE] fetch [OPTIONS]
Options:
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/cargo_fix/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_fix/help/stdout.log
index 3e8b1427f..a93215c50 100644
--- a/src/tools/cargo/tests/testsuite/cargo_fix/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_fix/help/stdout.log
@@ -11,8 +11,8 @@ Options:
--allow-staged Fix code even if the working directory has staged changes
--ignore-rust-version Ignore `rust-version` specification in packages
--message-format <FMT> Error format
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
diff --git a/src/tools/cargo/tests/testsuite/cargo_generate_lockfile/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_generate_lockfile/help/stdout.log
index 07eff888a..5d0bf1359 100644
--- a/src/tools/cargo/tests/testsuite/cargo_generate_lockfile/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_generate_lockfile/help/stdout.log
@@ -3,8 +3,8 @@ Generate the lockfile for a package
Usage: cargo[EXE] generate-lockfile [OPTIONS]
Options:
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/cargo_help/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_help/help/stdout.log
index a03946b45..1f7a710d6 100644
--- a/src/tools/cargo/tests/testsuite/cargo_help/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_help/help/stdout.log
@@ -7,6 +7,7 @@ Arguments:
Options:
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/cargo_init/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/help/stdout.log
index 588b45ccf..197a5f8d1 100644
--- a/src/tools/cargo/tests/testsuite/cargo_init/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/help/stdout.log
@@ -15,8 +15,8 @@ Options:
2021, 2024]
--name <NAME> Set the resulting package name, defaults to the directory name
--registry <REGISTRY> Registry to use
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/cargo_install/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_install/help/stdout.log
index 5e3458d37..baaba7a72 100644
--- a/src/tools/cargo/tests/testsuite/cargo_install/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_install/help/stdout.log
@@ -20,9 +20,9 @@ Options:
--list list all installed packages and their versions
--ignore-rust-version Ignore `rust-version` specification in packages
--message-format <FMT> Error format
- -q, --quiet Do not print cargo log messages
--debug Build in debug mode (with the 'dev' profile) instead of release mode
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
diff --git a/src/tools/cargo/tests/testsuite/cargo_locate_project/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_locate_project/help/stdout.log
index 1c6ea7b25..f39d61b7a 100644
--- a/src/tools/cargo/tests/testsuite/cargo_locate_project/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_locate_project/help/stdout.log
@@ -5,8 +5,8 @@ Usage: cargo[EXE] locate-project [OPTIONS]
Options:
--workspace Locate Cargo.toml of the workspace root
--message-format <FMT> Output representation [possible values: json, plain]
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
diff --git a/src/tools/cargo/tests/testsuite/cargo_login/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_login/help/stdout.log
index e0d5e7e69..0a699f72f 100644
--- a/src/tools/cargo/tests/testsuite/cargo_login/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_login/help/stdout.log
@@ -8,8 +8,8 @@ Arguments:
Options:
--registry <REGISTRY> Registry to use
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/cargo_logout/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_logout/help/stdout.log
index fe328d765..3f9679f9b 100644
--- a/src/tools/cargo/tests/testsuite/cargo_logout/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_logout/help/stdout.log
@@ -4,8 +4,8 @@ Usage: cargo[EXE] logout [OPTIONS]
Options:
--registry <REGISTRY> Registry to use
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/cargo_metadata/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_metadata/help/stdout.log
index 939fc40c9..f44f66c88 100644
--- a/src/tools/cargo/tests/testsuite/cargo_metadata/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_metadata/help/stdout.log
@@ -8,8 +8,8 @@ Options:
--no-deps Output information only about the workspace members and don't
fetch dependencies
--format-version <VERSION> Format version [possible values: 1]
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
diff --git a/src/tools/cargo/tests/testsuite/cargo_new/empty_name/in/.keep b/src/tools/cargo/tests/testsuite/cargo_new/empty_name/in/.keep
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_new/empty_name/in/.keep
diff --git a/src/tools/cargo/tests/testsuite/cargo_new/empty_name/mod.rs b/src/tools/cargo/tests/testsuite/cargo_new/empty_name/mod.rs
new file mode 100644
index 000000000..a7d56630f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_new/empty_name/mod.rs
@@ -0,0 +1,22 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+#[cargo_test]
+fn case() {
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+ let cwd = &project_root;
+
+ snapbox::cmd::Command::cargo_ui()
+ .arg("new")
+ .args(["foo", "--name", ""])
+ .current_dir(cwd)
+ .assert()
+ .failure()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_new/empty_name/out/.keep b/src/tools/cargo/tests/testsuite/cargo_new/empty_name/out/.keep
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_new/empty_name/out/.keep
diff --git a/src/tools/cargo/tests/testsuite/cargo_new/empty_name/stderr.log b/src/tools/cargo/tests/testsuite/cargo_new/empty_name/stderr.log
new file mode 100644
index 000000000..d9547a42a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_new/empty_name/stderr.log
@@ -0,0 +1 @@
+error: package name cannot be empty
diff --git a/src/tools/cargo/tests/testsuite/cargo_new/empty_name/stdout.log b/src/tools/cargo/tests/testsuite/cargo_new/empty_name/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_new/empty_name/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_new/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_new/help/stdout.log
index 3df5eceb8..52a6f83a1 100644
--- a/src/tools/cargo/tests/testsuite/cargo_new/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_new/help/stdout.log
@@ -15,8 +15,8 @@ Options:
2021, 2024]
--name <NAME> Set the resulting package name, defaults to the directory name
--registry <REGISTRY> Registry to use
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/cargo_new/mod.rs b/src/tools/cargo/tests/testsuite/cargo_new/mod.rs
index da0304409..806bd2ec3 100644
--- a/src/tools/cargo/tests/testsuite/cargo_new/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_new/mod.rs
@@ -4,6 +4,7 @@ mod add_members_to_workspace_with_absolute_package_path;
mod add_members_to_workspace_with_empty_members;
mod add_members_to_workspace_with_exclude_list;
mod add_members_to_workspace_with_members_glob;
+mod empty_name;
mod help;
mod inherit_workspace_lints;
mod inherit_workspace_package_table;
diff --git a/src/tools/cargo/tests/testsuite/cargo_owner/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_owner/help/stdout.log
index 110df8e9a..b6f436d04 100644
--- a/src/tools/cargo/tests/testsuite/cargo_owner/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_owner/help/stdout.log
@@ -12,8 +12,8 @@ Options:
--index <INDEX> Registry index URL to modify owners for
--registry <REGISTRY> Registry to modify owners for
--token <TOKEN> API token to use when authenticating
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/cargo_package/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_package/help/stdout.log
index 5079c2a6f..ec2464a8d 100644
--- a/src/tools/cargo/tests/testsuite/cargo_package/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_package/help/stdout.log
@@ -7,8 +7,8 @@ Options:
--no-verify Don't verify the contents by building them
--no-metadata Ignore warnings about a lack of human-usable metadata
--allow-dirty Allow dirty working directories to be packaged
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/cargo_pkgid/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_pkgid/help/stdout.log
index 5971e88dc..657bb9e5d 100644
--- a/src/tools/cargo/tests/testsuite/cargo_pkgid/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_pkgid/help/stdout.log
@@ -6,8 +6,8 @@ Arguments:
[SPEC]
Options:
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/cargo_publish/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_publish/help/stdout.log
index df2594fb4..d598c93d6 100644
--- a/src/tools/cargo/tests/testsuite/cargo_publish/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_publish/help/stdout.log
@@ -9,8 +9,8 @@ Options:
--token <TOKEN> Token to use when uploading
--no-verify Don't verify the contents by building them
--allow-dirty Allow dirty working directories to be packaged
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/cargo_read_manifest/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_read_manifest/help/stdout.log
index 83db5413d..a645ea3c2 100644
--- a/src/tools/cargo/tests/testsuite/cargo_read_manifest/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_read_manifest/help/stdout.log
@@ -5,8 +5,8 @@ Deprecated, use `cargo metadata --no-deps` instead.
Usage: cargo[EXE] read-manifest [OPTIONS]
Options:
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/help/stdout.log
index 47d2c87ad..c3dc74b2d 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/help/stdout.log
@@ -7,8 +7,8 @@ Arguments:
Options:
-n, --dry-run Don't actually write the manifest
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/in/Cargo.lock b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/in/Cargo.lock
index 06c2052d5..2302220f2 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/in/Cargo.lock
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/in/Cargo.lock
@@ -43,13 +43,13 @@ checksum = "31162e7d23a085553c42dee375787b451a481275473f7779c4a63bcc267a24fd"
name = "semver"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3031434e07edc922bf1b8262f075fac1522694f17b1ee7ad314c4cabd5d2723f"
+checksum = "106bee742e3199d9e59f4269e458dfc825c1b4648c483b1c2b7a45cd2610a308"
[[package]]
name = "serde"
version = "1.0.90"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "75d9264696ebbf5315a6b068e9910c4df9274365afac2d88abf66525df660218"
+checksum = "be7d269f612a60e3c2c4a4a120e2d878a3f3298a5285eda6e95453905a107d9a"
[[package]]
name = "toml"
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/out/Cargo.lock b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/out/Cargo.lock
index bd8c90f46..0946cee47 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/out/Cargo.lock
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/out/Cargo.lock
@@ -36,13 +36,13 @@ checksum = "84949cb53285a6c481d0133065a7b669871acfd9e20f273f4ce1283c309775d5"
name = "semver"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3031434e07edc922bf1b8262f075fac1522694f17b1ee7ad314c4cabd5d2723f"
+checksum = "106bee742e3199d9e59f4269e458dfc825c1b4648c483b1c2b7a45cd2610a308"
[[package]]
name = "serde"
version = "1.0.90"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "75d9264696ebbf5315a6b068e9910c4df9274365afac2d88abf66525df660218"
+checksum = "be7d269f612a60e3c2c4a4a120e2d878a3f3298a5285eda6e95453905a107d9a"
[[package]]
name = "toml"
diff --git a/src/tools/cargo/tests/testsuite/cargo_report/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_report/help/stdout.log
index 67819de55..95872662e 100644
--- a/src/tools/cargo/tests/testsuite/cargo_report/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_report/help/stdout.log
@@ -7,6 +7,7 @@ Commands:
Options:
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/cargo_run/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_run/help/stdout.log
index 97c13382a..2e39d4f9e 100644
--- a/src/tools/cargo/tests/testsuite/cargo_run/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_run/help/stdout.log
@@ -8,8 +8,8 @@ Arguments:
Options:
--ignore-rust-version Ignore `rust-version` specification in packages
--message-format <FMT> Error format
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
diff --git a/src/tools/cargo/tests/testsuite/cargo_rustc/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_rustc/help/stdout.log
index 60069f526..8952330b0 100644
--- a/src/tools/cargo/tests/testsuite/cargo_rustc/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_rustc/help/stdout.log
@@ -11,8 +11,8 @@ Options:
--future-incompat-report Outputs a future incompatibility report at the end of the build
--ignore-rust-version Ignore `rust-version` specification in packages
--message-format <FMT> Error format
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
diff --git a/src/tools/cargo/tests/testsuite/cargo_rustdoc/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_rustdoc/help/stdout.log
index 67ee27e6b..a0a3cde5d 100644
--- a/src/tools/cargo/tests/testsuite/cargo_rustdoc/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_rustdoc/help/stdout.log
@@ -9,8 +9,8 @@ Options:
--open Opens the docs in a browser after the operation
--ignore-rust-version Ignore `rust-version` specification in packages
--message-format <FMT> Error format
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
diff --git a/src/tools/cargo/tests/testsuite/cargo_search/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_search/help/stdout.log
index 9cc508bba..e2024a990 100644
--- a/src/tools/cargo/tests/testsuite/cargo_search/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_search/help/stdout.log
@@ -1,4 +1,4 @@
-Search packages in crates.io
+Search packages in the registry. Default registry is crates.io
Usage: cargo[EXE] search [OPTIONS] [QUERY]...
@@ -9,8 +9,8 @@ Options:
--limit <LIMIT> Limit the number of results (default: 10, max: 100)
--index <INDEX> Registry index URL to search packages in
--registry <REGISTRY> Registry to search packages in
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/cargo_tree/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_tree/help/stdout.log
index 4170583a8..9865fd59e 100644
--- a/src/tools/cargo/tests/testsuite/cargo_tree/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_tree/help/stdout.log
@@ -3,7 +3,6 @@ Display a tree visualization of a dependency graph
Usage: cargo[EXE] tree [OPTIONS]
Options:
- -q, --quiet Do not print cargo log messages
-e, --edges <KINDS> The kinds of dependencies to display (features, normal, build, dev, all,
no-normal, no-build, no-dev, no-proc-macro)
-i, --invert [<SPEC>] Invert the tree direction and focus on the given package
@@ -17,6 +16,7 @@ Options:
ascii]
-f, --format <FORMAT> Format string used for printing dependencies [default: {p}]
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/cargo_uninstall/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_uninstall/help/stdout.log
index efdf11c03..1988e7a0e 100644
--- a/src/tools/cargo/tests/testsuite/cargo_uninstall/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_uninstall/help/stdout.log
@@ -7,8 +7,8 @@ Arguments:
Options:
--root <DIR> Directory to uninstall packages from
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/cargo_update/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_update/help/stdout.log
index 92caeb656..8e0bf2ccb 100644
--- a/src/tools/cargo/tests/testsuite/cargo_update/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_update/help/stdout.log
@@ -6,8 +6,8 @@ Options:
-n, --dry-run Don't actually write the lockfile
--recursive Force updating all dependencies of [SPEC]... as well
--precise <PRECISE> Update [SPEC] to exactly PRECISE
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/cargo_vendor/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_vendor/help/stdout.log
index 7f37ab56e..4e05e75c8 100644
--- a/src/tools/cargo/tests/testsuite/cargo_vendor/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_vendor/help/stdout.log
@@ -10,8 +10,8 @@ Options:
-s, --sync <TOML> Additional `Cargo.toml` to sync and vendor
--respect-source-config Respect `[source]` config in `.cargo/config`
--versioned-dirs Always include version in subdir name
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
diff --git a/src/tools/cargo/tests/testsuite/cargo_verify_project/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_verify_project/help/stdout.log
index a61534500..7adc34e6c 100644
--- a/src/tools/cargo/tests/testsuite/cargo_verify_project/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_verify_project/help/stdout.log
@@ -3,8 +3,8 @@ Check correctness of crate manifest
Usage: cargo[EXE] verify-project [OPTIONS]
Options:
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/cargo_version/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_version/help/stdout.log
index 3f79051ad..2ad1c551c 100644
--- a/src/tools/cargo/tests/testsuite/cargo_version/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_version/help/stdout.log
@@ -3,8 +3,8 @@ Show version information
Usage: cargo[EXE] version [OPTIONS]
Options:
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/cargo_yank/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_yank/help/stdout.log
index 61dc800c7..072ceaac7 100644
--- a/src/tools/cargo/tests/testsuite/cargo_yank/help/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_yank/help/stdout.log
@@ -11,8 +11,8 @@ Options:
--index <INDEX> Registry index URL to yank from
--registry <REGISTRY> Registry to yank from
--token <TOKEN> API token to use when authenticating
- -q, --quiet Do not print cargo log messages
-v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
--color <WHEN> Coloring: auto, always, never
--config <KEY=VALUE> Override a configuration value
-Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
diff --git a/src/tools/cargo/tests/testsuite/check.rs b/src/tools/cargo/tests/testsuite/check.rs
index 03611ae67..b7ad3eb1d 100644
--- a/src/tools/cargo/tests/testsuite/check.rs
+++ b/src/tools/cargo/tests/testsuite/check.rs
@@ -3,11 +3,12 @@
use std::fmt::{self, Write};
use crate::messages::raw_rustc_output;
+use cargo_test_support::compare;
use cargo_test_support::install::exe;
use cargo_test_support::paths::CargoPathExt;
use cargo_test_support::registry::Package;
+use cargo_test_support::tools;
use cargo_test_support::{basic_bin_manifest, basic_manifest, git, project};
-use cargo_test_support::{tools, wrapped_clippy_driver};
#[cargo_test]
fn check_success() {
@@ -804,7 +805,7 @@ fn short_message_format() {
.with_stderr_contains(
"\
src/lib.rs:1:27: error[E0308]: mismatched types
-error: could not compile `foo` (lib) due to previous error
+error: could not compile `foo` (lib) due to 1 previous error
",
)
.run();
@@ -1250,7 +1251,7 @@ fn check_fixable_error_no_fix() {
[CHECKING] foo v0.0.1 ([..])
{}\
[WARNING] `foo` (lib) generated 1 warning
-[ERROR] could not compile `foo` (lib) due to previous error; 1 warning emitted
+[ERROR] could not compile `foo` (lib) due to 1 previous error; 1 warning emitted
",
rustc_message
);
@@ -1432,7 +1433,7 @@ fn check_fixable_warning_for_clippy() {
foo.cargo("check")
// We can't use `clippy` so we use a `rustc` workspace wrapper instead
- .env("RUSTC_WORKSPACE_WRAPPER", wrapped_clippy_driver())
+ .env("RUSTC_WORKSPACE_WRAPPER", tools::wrapped_clippy_driver())
.with_stderr_contains("[..] (run `cargo clippy --fix --lib -p foo` to apply 1 suggestion)")
.run();
}
@@ -1519,3 +1520,43 @@ fn versionless_package() {
)
.run();
}
+
+#[cargo_test]
+fn pkgid_querystring_works() {
+ let git_project = git::new("gitdep", |p| {
+ p.file("Cargo.toml", &basic_manifest("gitdep", "1.0.0"))
+ .file("src/lib.rs", "")
+ });
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+
+ [dependencies]
+ gitdep = {{ git = "{}", branch = "master" }}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("generate-lockfile").run();
+
+ let output = p.cargo("pkgid").arg("gitdep").exec_with_output().unwrap();
+ let gitdep_pkgid = String::from_utf8(output.stdout).unwrap();
+ let gitdep_pkgid = gitdep_pkgid.trim();
+ compare::assert_match_exact("git+file://[..]/gitdep?branch=master#1.0.0", &gitdep_pkgid);
+
+ p.cargo("build -p")
+ .arg(gitdep_pkgid)
+ .with_stderr(
+ "\
+[COMPILING] gitdep v1.0.0 (file:///[..]/gitdep?branch=master#[..])
+[FINISHED] dev [..]",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/check_cfg.rs b/src/tools/cargo/tests/testsuite/check_cfg.rs
index 57d5f8053..42cfe6065 100644
--- a/src/tools/cargo/tests/testsuite/check_cfg.rs
+++ b/src/tools/cargo/tests/testsuite/check_cfg.rs
@@ -15,16 +15,16 @@ macro_rules! x {
$what, '(', $($who,)* ')', "'", "[..]")
}
}};
- ($tool:tt => $what:tt of $who:tt with $($first_value:tt $($other_values:tt)*)?) => {{
+ ($tool:tt => $what:tt of $who:tt with $first_value:tt $($other_values:tt)*) => {{
#[cfg(windows)]
{
concat!("[RUNNING] [..]", $tool, "[..] --check-cfg \"",
- $what, '(', $who, ", values(", $("/\"", $first_value, "/\"", $(", ", "/\"", $other_values, "/\"",)*)* "))", '"', "[..]")
+ $what, '(', $who, ", values(", "/\"", $first_value, "/\"", $(", ", "/\"", $other_values, "/\"",)* "))", '"', "[..]")
}
#[cfg(not(windows))]
{
concat!("[RUNNING] [..]", $tool, "[..] --check-cfg '",
- $what, '(', $who, ", values(", $("\"", $first_value, "\"", $(", ", "\"", $other_values, "\"",)*)* "))", "'", "[..]")
+ $what, '(', $who, ", values(", "\"", $first_value, "\"", $(", ", "\"", $other_values, "\"",)* "))", "'", "[..]")
}
}};
}
@@ -142,6 +142,77 @@ fn features_with_namespaced_features() {
}
#[cargo_test(nightly, reason = "--check-cfg is unstable")]
+fn features_fingerprint() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [features]
+ f_a = []
+ f_b = []
+ "#,
+ )
+ .file("src/lib.rs", "#[cfg(feature = \"f_b\")] fn entry() {}")
+ .build();
+
+ p.cargo("check -v -Zcheck-cfg")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_contains(x!("rustc" => "cfg" of "feature" with "f_a" "f_b"))
+ .with_stderr_does_not_contain("[..]unexpected_cfgs[..]")
+ .run();
+
+ p.cargo("check -v -Zcheck-cfg")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_does_not_contain("[..]rustc[..]")
+ .run();
+
+ // checking that re-ordering the features does not invalid the fingerprint
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [features]
+ f_b = []
+ f_a = []
+ "#,
+ );
+
+ p.cargo("check -v -Zcheck-cfg")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ .with_stderr_does_not_contain("[..]rustc[..]")
+ .run();
+
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [features]
+ f_a = []
+ "#,
+ );
+
+ p.cargo("check -v -Zcheck-cfg")
+ .masquerade_as_nightly_cargo(&["check-cfg"])
+ // we check that the fingerprint is indeed dirty
+ .with_stderr_contains("[..]Dirty[..]the list of declared features changed")
+ // that is cause rustc to be called again with the new check-cfg args
+ .with_stderr_contains(x!("rustc" => "cfg" of "feature" with "f_a"))
+ // and that we indeed found a new warning from the unexpected_cfgs lint
+ .with_stderr_contains("[..]unexpected_cfgs[..]")
+ .run();
+}
+
+#[cargo_test(nightly, reason = "--check-cfg is unstable")]
fn well_known_names_values() {
let p = project()
.file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
@@ -150,7 +221,7 @@ fn well_known_names_values() {
p.cargo("check -v -Zcheck-cfg")
.masquerade_as_nightly_cargo(&["check-cfg"])
- .with_stderr_contains(x!("rustc" => "cfg" of "feature" with))
+ .with_stderr_contains(x!("rustc" => "cfg"))
.run();
}
@@ -213,7 +284,7 @@ fn well_known_names_values_test() {
p.cargo("test -v -Zcheck-cfg")
.masquerade_as_nightly_cargo(&["check-cfg"])
- .with_stderr_contains(x!("rustc" => "cfg" of "feature" with))
+ .with_stderr_contains(x!("rustc" => "cfg"))
.run();
}
@@ -226,8 +297,8 @@ fn well_known_names_values_doctest() {
p.cargo("test -v --doc -Zcheck-cfg")
.masquerade_as_nightly_cargo(&["check-cfg"])
- .with_stderr_contains(x!("rustc" => "cfg" of "feature" with))
- .with_stderr_contains(x!("rustdoc" => "cfg" of "feature" with))
+ .with_stderr_contains(x!("rustc" => "cfg"))
+ .with_stderr_contains(x!("rustdoc" => "cfg"))
.run();
}
diff --git a/src/tools/cargo/tests/testsuite/clean.rs b/src/tools/cargo/tests/testsuite/clean.rs
index fbb4d3e5b..913bf19cb 100644
--- a/src/tools/cargo/tests/testsuite/clean.rs
+++ b/src/tools/cargo/tests/testsuite/clean.rs
@@ -1,5 +1,6 @@
//! Tests for the `cargo clean` command.
+use cargo_test_support::paths::CargoPathExt;
use cargo_test_support::registry::Package;
use cargo_test_support::{
basic_bin_manifest, basic_manifest, git, main_file, project, project_in, rustc_host,
@@ -33,7 +34,10 @@ fn different_dir() {
p.cargo("build").run();
assert!(p.build_dir().is_dir());
- p.cargo("clean").cwd("src").with_stdout("").run();
+ p.cargo("clean")
+ .cwd("src")
+ .with_stderr("[REMOVED] [..]")
+ .run();
assert!(!p.build_dir().is_dir());
}
@@ -81,7 +85,7 @@ fn clean_multiple_packages() {
p.cargo("clean -p d1 -p d2")
.cwd("src")
- .with_stdout("")
+ .with_stderr("[REMOVED] [..]")
.run();
assert!(p.bin("foo").is_file());
assert!(!d1_path.is_file());
@@ -226,7 +230,9 @@ fn clean_release() {
p.cargo("build --release").run();
p.cargo("clean -p foo").run();
- p.cargo("build --release").with_stdout("").run();
+ p.cargo("build --release")
+ .with_stderr("[FINISHED] [..]")
+ .run();
p.cargo("clean -p foo --release").run();
p.cargo("build --release")
@@ -354,7 +360,7 @@ fn clean_git() {
.build();
p.cargo("build").run();
- p.cargo("clean -p dep").with_stdout("").run();
+ p.cargo("clean -p dep").with_stderr("[REMOVED] [..]").run();
p.cargo("build").run();
}
@@ -379,7 +385,7 @@ fn registry() {
Package::new("bar", "0.1.0").publish();
p.cargo("build").run();
- p.cargo("clean -p bar").with_stdout("").run();
+ p.cargo("clean -p bar").with_stderr("[REMOVED] [..]").run();
p.cargo("build").run();
}
@@ -805,15 +811,6 @@ fn clean_dry_run() {
.file("src/lib.rs", "")
.build();
- let ls_r = || -> Vec<_> {
- let mut file_list: Vec<_> = walkdir::WalkDir::new(p.build_dir())
- .into_iter()
- .filter_map(|e| e.map(|e| e.path().to_owned()).ok())
- .collect();
- file_list.sort();
- file_list
- };
-
// Start with no files.
p.cargo("clean --dry-run")
.with_stdout("")
@@ -823,7 +820,7 @@ fn clean_dry_run() {
)
.run();
p.cargo("check").run();
- let before = ls_r();
+ let before = p.build_dir().ls_r();
p.cargo("clean --dry-run")
.with_stderr(
"[SUMMARY] [..] files, [..] total\n\
@@ -831,7 +828,7 @@ fn clean_dry_run() {
)
.run();
// Verify it didn't delete anything.
- let after = ls_r();
+ let after = p.build_dir().ls_r();
assert_eq!(before, after);
let expected = cargo::util::iter_join(before.iter().map(|p| p.to_str().unwrap()), "\n");
eprintln!("{expected}");
@@ -854,3 +851,29 @@ fn doc_with_package_selection() {
.with_stderr("error: --doc cannot be used with -p")
.run();
}
+
+#[cargo_test]
+fn quiet_does_not_show_summary() {
+ // Checks that --quiet works with `cargo clean`, since there was a
+ // subtle issue with how the flag is defined as a global flag.
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+ p.cargo("clean --quiet --dry-run")
+ .with_stdout("")
+ .with_stderr("")
+ .run();
+ // Verify exact same command without -q would actually display something.
+ p.cargo("clean --dry-run")
+ .with_stdout("")
+ .with_stderr(
+ "\
+[SUMMARY] [..] files, [..] total
+[WARNING] no files deleted due to --dry-run
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/config.rs b/src/tools/cargo/tests/testsuite/config.rs
index e5078bd8e..bcd126020 100644
--- a/src/tools/cargo/tests/testsuite/config.rs
+++ b/src/tools/cargo/tests/testsuite/config.rs
@@ -2,9 +2,9 @@
use cargo::core::{PackageIdSpec, Shell};
use cargo::util::config::{self, Config, Definition, JobsConfig, SslVersionConfig, StringList};
-use cargo::util::toml::schema::TomlTrimPaths;
-use cargo::util::toml::schema::TomlTrimPathsValue;
-use cargo::util::toml::schema::{self as cargo_toml, TomlDebugInfo, VecStringOrBool as VSOB};
+use cargo::util_schemas::manifest::TomlTrimPaths;
+use cargo::util_schemas::manifest::TomlTrimPathsValue;
+use cargo::util_schemas::manifest::{self as cargo_toml, TomlDebugInfo, VecStringOrBool as VSOB};
use cargo::CargoResult;
use cargo_test_support::compare;
use cargo_test_support::{panic_error, paths, project, symlink_supported, t};
diff --git a/src/tools/cargo/tests/testsuite/custom_target.rs b/src/tools/cargo/tests/testsuite/custom_target.rs
index a04029075..45cb3ac9f 100644
--- a/src/tools/cargo/tests/testsuite/custom_target.rs
+++ b/src/tools/cargo/tests/testsuite/custom_target.rs
@@ -178,6 +178,8 @@ fn changing_spec_rebuilds() {
}
#[cargo_test(nightly, reason = "requires features no_core, lang_items")]
+// This is randomly crashing in lld. See https://github.com/rust-lang/rust/issues/115985
+#[cfg_attr(all(windows, target_env = "gnu"), ignore = "windows-gnu lld crashing")]
fn changing_spec_relearns_crate_types() {
// Changing the .json file will invalidate the cache of crate types.
let p = project()
diff --git a/src/tools/cargo/tests/testsuite/doc.rs b/src/tools/cargo/tests/testsuite/doc.rs
index 65169d214..37dd47d76 100644
--- a/src/tools/cargo/tests/testsuite/doc.rs
+++ b/src/tools/cargo/tests/testsuite/doc.rs
@@ -75,7 +75,14 @@ fn doc_twice() {
)
.run();
- p.cargo("doc").with_stdout("").run();
+ p.cargo("doc")
+ .with_stderr(
+ "\
+[FINISHED] [..]
+[GENERATED] [CWD]/target/doc/foo/index.html
+",
+ )
+ .run();
}
#[cargo_test]
@@ -118,9 +125,14 @@ fn doc_deps() {
assert_eq!(p.glob("target/debug/**/*.rlib").count(), 0);
assert_eq!(p.glob("target/debug/deps/libbar-*.rmeta").count(), 1);
+ // Make sure it doesn't recompile.
p.cargo("doc")
- .env("CARGO_LOG", "cargo::ops::cargo_rustc::fingerprint")
- .with_stdout("")
+ .with_stderr(
+ "\
+[FINISHED] [..]
+[GENERATED] [CWD]/target/doc/foo/index.html
+",
+ )
.run();
assert!(p.root().join("target/doc").is_dir());
@@ -1686,6 +1698,7 @@ fn doc_message_format() {
r#"
{
"message": {
+ "$message_type": "diagnostic",
"children": "{...}",
"code": "{...}",
"level": "error",
diff --git a/src/tools/cargo/tests/testsuite/docscrape.rs b/src/tools/cargo/tests/testsuite/docscrape.rs
index d4d011ff3..91871be04 100644
--- a/src/tools/cargo/tests/testsuite/docscrape.rs
+++ b/src/tools/cargo/tests/testsuite/docscrape.rs
@@ -48,6 +48,84 @@ fn basic() {
assert!(p.build_dir().join("doc/src/ex/ex.rs.html").exists());
}
+// This test ensures that even if there is no `[workspace]` in the top-level `Cargo.toml` file, the
+// dependencies will get their examples scraped and that they appear in the generated documentation.
+#[cargo_test(nightly, reason = "-Zrustdoc-scrape-examples is unstable")]
+fn scrape_examples_for_non_workspace_reexports() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ edition = "2021"
+ authors = []
+
+ [dependencies]
+ a = { path = "crates/a" }
+ "#,
+ )
+ .file("src/lib.rs", "pub use a::*;")
+ // Example
+ .file(
+ "examples/one.rs",
+ r#"use foo::*;
+fn main() {
+ let foo = Foo::new("yes".into());
+ foo.maybe();
+}"#,
+ )
+ // `a` crate
+ .file(
+ "crates/a/Cargo.toml",
+ r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+ "#,
+ )
+ .file(
+ "crates/a/src/lib.rs",
+ r#"
+#[derive(Debug)]
+pub struct Foo {
+ foo: String,
+ yes: bool,
+}
+
+impl Foo {
+ pub fn new(foo: String) -> Self {
+ Self { foo, yes: true }
+ }
+
+ pub fn maybe(&self) {
+ if self.yes {
+ println!("{}", self.foo)
+ }
+ }
+}"#,
+ )
+ .build();
+
+ p.cargo("doc -Zunstable-options -Zrustdoc-scrape-examples --no-deps")
+ .masquerade_as_nightly_cargo(&["rustdoc-scrape-examples"])
+ .with_stderr_unordered(
+ "\
+[CHECKING] a v0.0.1 ([CWD]/crates/a)
+[CHECKING] foo v0.0.1 ([CWD])
+[SCRAPING] foo v0.0.1 ([CWD])
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] [..]
+[GENERATED] [CWD]/target/doc/foo/index.html",
+ )
+ .run();
+
+ let doc_html = p.read_file("target/doc/foo/struct.Foo.html");
+ assert!(doc_html.contains("Examples found in repository"));
+}
+
#[cargo_test(nightly, reason = "rustdoc scrape examples flags are unstable")]
fn avoid_build_script_cycle() {
let p = project()
diff --git a/src/tools/cargo/tests/testsuite/features.rs b/src/tools/cargo/tests/testsuite/features.rs
index 4b7455c37..febdf52fe 100644
--- a/src/tools/cargo/tests/testsuite/features.rs
+++ b/src/tools/cargo/tests/testsuite/features.rs
@@ -60,6 +60,10 @@ fn empty_feature_name() {
[ERROR] failed to parse manifest at `[..]`
Caused by:
+ TOML parse error at line 8, column 17
+ |
+ 8 | \"\" = []
+ | ^^
feature name cannot be empty
",
)
@@ -627,7 +631,14 @@ fn cyclic_feature2() {
.file("src/main.rs", "fn main() {}")
.build();
- p.cargo("check").with_stdout("").run();
+ p.cargo("check")
+ .with_stderr(
+ "\
+[CHECKING] foo [..]
+[FINISHED] [..]
+",
+ )
+ .run();
}
#[cargo_test]
@@ -1047,8 +1058,8 @@ fn no_rebuild_when_frobbing_default_feature() {
.build();
p.cargo("check").run();
- p.cargo("check").with_stdout("").run();
- p.cargo("check").with_stdout("").run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
}
#[cargo_test]
@@ -1098,8 +1109,8 @@ fn unions_work_with_no_default_features() {
.build();
p.cargo("check").run();
- p.cargo("check").with_stdout("").run();
- p.cargo("check").with_stdout("").run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
}
#[cargo_test]
@@ -2048,7 +2059,11 @@ fn invalid_feature_names_error() {
error: failed to parse manifest at `[ROOT]/foo/Cargo.toml`
Caused by:
- invalid character `+` in feature `+foo` in package foo v0.1.0 ([ROOT]/foo), \
+ TOML parse error at line 8, column 17
+ |
+ 8 | \"+foo\" = []
+ | ^^^^^^
+ invalid character `+` in feature `+foo`, \
the first character must be a Unicode XID start character or digit \
(most letters or `_` or `0` to `9`)
",
@@ -2075,7 +2090,11 @@ Caused by:
error: failed to parse manifest at `[ROOT]/foo/Cargo.toml`
Caused by:
- invalid character `&` in feature `a&b` in package foo v0.1.0 ([ROOT]/foo), \
+ TOML parse error at line 8, column 13
+ |
+ 8 | \"a&b\" = []
+ | ^^^^^
+ invalid character `&` in feature `a&b`, \
characters must be Unicode XID characters, '-', `+`, or `.` \
(numbers, `+`, `-`, `_`, `.`, or most letters)
",
@@ -2108,6 +2127,10 @@ fn invalid_feature_name_slash_error() {
error: failed to parse manifest at `[CWD]/Cargo.toml`
Caused by:
+ TOML parse error at line 7, column 17
+ |
+ 7 | \"foo/bar\" = []
+ | ^^^^^^^^^
feature named `foo/bar` is not allowed to contain slashes
",
)
diff --git a/src/tools/cargo/tests/testsuite/features_namespaced.rs b/src/tools/cargo/tests/testsuite/features_namespaced.rs
index f24186c15..b79be55e8 100644
--- a/src/tools/cargo/tests/testsuite/features_namespaced.rs
+++ b/src/tools/cargo/tests/testsuite/features_namespaced.rs
@@ -439,6 +439,10 @@ fn crate_syntax_bad_name() {
[ERROR] failed to parse manifest at [..]/foo/Cargo.toml`
Caused by:
+ TOML parse error at line 10, column 17
+ |
+ 10 | \"dep:bar\" = []
+ | ^^^^^^^^^
feature named `dep:bar` is not allowed to start with `dep:`
",
)
diff --git a/src/tools/cargo/tests/testsuite/fetch.rs b/src/tools/cargo/tests/testsuite/fetch.rs
index f90131a59..9be5f79d0 100644
--- a/src/tools/cargo/tests/testsuite/fetch.rs
+++ b/src/tools/cargo/tests/testsuite/fetch.rs
@@ -11,7 +11,7 @@ fn no_deps() {
.file("src/a.rs", "")
.build();
- p.cargo("fetch").with_stdout("").run();
+ p.cargo("fetch").with_stderr("").run();
}
#[cargo_test]
diff --git a/src/tools/cargo/tests/testsuite/fix.rs b/src/tools/cargo/tests/testsuite/fix.rs
index 33de721cd..7cb5bd65e 100644
--- a/src/tools/cargo/tests/testsuite/fix.rs
+++ b/src/tools/cargo/tests/testsuite/fix.rs
@@ -5,8 +5,8 @@ use cargo_test_support::compare::assert_match_exact;
use cargo_test_support::git::{self, init};
use cargo_test_support::paths::{self, CargoPathExt};
use cargo_test_support::registry::{Dependency, Package};
+use cargo_test_support::tools;
use cargo_test_support::{basic_manifest, is_nightly, project, Project};
-use cargo_test_support::{tools, wrapped_clippy_driver};
#[cargo_test]
fn do_not_fix_broken_builds() {
@@ -29,7 +29,7 @@ fn do_not_fix_broken_builds() {
p.cargo("fix --allow-no-vcs")
.env("__CARGO_FIX_YOLO", "1")
.with_status(101)
- .with_stderr_contains("[ERROR] could not compile `foo` (lib) due to previous error")
+ .with_stderr_contains("[ERROR] could not compile `foo` (lib) due to 1 previous error")
.run();
assert!(p.read_file("src/lib.rs").contains("let mut x = 3;"));
}
@@ -110,6 +110,7 @@ fn rustc_shim_for_cargo_fix() -> Project {
}
let status = Command::new("rustc")
.args(env::args().skip(1))
+ .env_remove("CARGO_MAKEFLAGS")
.status()
.expect("failed to run rustc");
process::exit(status.code().unwrap_or(2));
@@ -193,7 +194,7 @@ fn broken_clippy_fixes_backed_out() {
.env("__CARGO_FIX_YOLO", "1")
.env("RUSTC", p.root().join("foo/target/debug/foo"))
// We can't use `clippy` so we use a `rustc` workspace wrapper instead
- .env("RUSTC_WORKSPACE_WRAPPER", wrapped_clippy_driver())
+ .env("RUSTC_WORKSPACE_WRAPPER", tools::wrapped_clippy_driver())
.with_stderr_contains(
"warning: failed to automatically apply fixes suggested by rustc \
to crate `bar`\n\
@@ -1857,9 +1858,22 @@ fn non_edition_lint_migration() {
assert!(contents.contains("from_utf8(crate::foo::FOO)"));
}
-// For rust-lang/cargo#9857
#[cargo_test]
fn fix_in_dependency() {
+ // Tests what happens if rustc emits a suggestion to modify a file from a
+ // dependency in cargo's home directory. This should never happen, and
+ // indicates a bug in rustc. However, there are several known bugs in
+ // rustc where it does this (often involving macros), so `cargo fix` has a
+ // guard that says if the suggestion points to some location in CARGO_HOME
+ // to not apply it.
+ //
+ // See https://github.com/rust-lang/cargo/issues/9857 for some other
+ // examples.
+ //
+ // This test uses a simulated rustc which replays a suggestion via a JSON
+ // message that points into CARGO_HOME. This does not use the real rustc
+ // because as the bugs are fixed in the real rustc, that would cause this
+ // test to stop working.
Package::new("bar", "1.0.0")
.file(
"src/lib.rs",
@@ -1895,8 +1909,146 @@ fn fix_in_dependency() {
"#,
)
.build();
+ p.cargo("fetch").run();
+
+ // The path in CARGO_HOME.
+ let bar_path = std::fs::read_dir(paths::home().join(".cargo/registry/src"))
+ .unwrap()
+ .next()
+ .unwrap()
+ .unwrap()
+ .path();
+ // Since this is a substitution into a Rust string (representing a JSON
+ // string), deal with backslashes like on Windows.
+ let bar_path_str = bar_path.to_str().unwrap().replace("\\", "/");
+
+ // This is a fake rustc that will emit a JSON message when the `foo` crate
+ // builds that tells cargo to modify a file it shouldn't.
+ let rustc = project()
+ .at("rustc-replay")
+ .file("Cargo.toml", &basic_manifest("rustc-replay", "1.0.0"))
+ .file("src/main.rs",
+ &r##"
+ fn main() {
+ let pkg_name = match std::env::var("CARGO_PKG_NAME") {
+ Ok(pkg_name) => pkg_name,
+ Err(_) => {
+ let r = std::process::Command::new("rustc")
+ .args(std::env::args_os().skip(1))
+ .status();
+ std::process::exit(r.unwrap().code().unwrap_or(2));
+ }
+ };
+ if pkg_name == "foo" {
+ eprintln!("{}", r#"{
+ "$message_type": "diagnostic",
+ "message": "unused variable: `abc`",
+ "code":
+ {
+ "code": "unused_variables",
+ "explanation": null
+ },
+ "level": "warning",
+ "spans":
+ [
+ {
+ "file_name": "__BAR_PATH__/bar-1.0.0/src/lib.rs",
+ "byte_start": 127,
+ "byte_end": 129,
+ "line_start": 5,
+ "line_end": 5,
+ "column_start": 29,
+ "column_end": 31,
+ "is_primary": true,
+ "text":
+ [
+ {
+ "text": " let $i = 1;",
+ "highlight_start": 29,
+ "highlight_end": 31
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children":
+ [
+ {
+ "message": "`#[warn(unused_variables)]` on by default",
+ "code": null,
+ "level": "note",
+ "spans":
+ [],
+ "children":
+ [],
+ "rendered": null
+ },
+ {
+ "message": "if this is intentional, prefix it with an underscore",
+ "code": null,
+ "level": "help",
+ "spans":
+ [
+ {
+ "file_name": "__BAR_PATH__/bar-1.0.0/src/lib.rs",
+ "byte_start": 127,
+ "byte_end": 129,
+ "line_start": 5,
+ "line_end": 5,
+ "column_start": 29,
+ "column_end": 31,
+ "is_primary": true,
+ "text":
+ [
+ {
+ "text": " let $i = 1;",
+ "highlight_start": 29,
+ "highlight_end": 31
+ }
+ ],
+ "label": null,
+ "suggested_replacement": "_abc",
+ "suggestion_applicability": "MachineApplicable",
+ "expansion": null
+ }
+ ],
+ "children":
+ [],
+ "rendered": null
+ }
+ ],
+ "rendered": "warning: unused variable: `abc`\n --> __BAR_PATH__/bar-1.0.0/src/lib.rs:5:29\n |\n5 | let $i = 1;\n | ^^ help: if this is intentional, prefix it with an underscore: `_abc`\n |\n = note: `#[warn(unused_variables)]` on by default\n\n"
+ }"#.replace("\n", ""));
+ }
+ }
+ "##.replace("__BAR_PATH__", &bar_path_str))
+ .build();
+ rustc.cargo("build").run();
+ let rustc_bin = rustc.bin("rustc-replay");
- p.cargo("fix --allow-no-vcs")
- .with_stderr_does_not_contain("[FIXED] [..]")
+ // The output here should not say `Fixed`.
+ //
+ // It is OK to compare the full diagnostic output here because the text is
+ // hard-coded in rustc-replay. Normally tests should not be checking the
+ // compiler output.
+ p.cargo("fix --lib --allow-no-vcs")
+ .env("RUSTC", &rustc_bin)
+ .with_stderr("\
+[CHECKING] bar v1.0.0
+[CHECKING] foo v0.1.0 [..]
+warning: unused variable: `abc`
+ --> [ROOT]/home/.cargo/registry/src/[..]/bar-1.0.0/src/lib.rs:5:29
+ |
+5 | let $i = 1;
+ | ^^ help: if this is intentional, prefix it with an underscore: `_abc`
+ |
+ = note: `#[warn(unused_variables)]` on by default
+
+warning: `foo` (lib) generated 1 warning (run `cargo fix --lib -p foo` to apply 1 suggestion)
+[FINISHED] [..]
+")
.run();
}
diff --git a/src/tools/cargo/tests/testsuite/freshness.rs b/src/tools/cargo/tests/testsuite/freshness.rs
index d450cbbd9..2d9b3df68 100644
--- a/src/tools/cargo/tests/testsuite/freshness.rs
+++ b/src/tools/cargo/tests/testsuite/freshness.rs
@@ -34,7 +34,7 @@ fn modifying_and_moving() {
)
.run();
- p.cargo("build").with_stdout("").run();
+ p.cargo("build").with_stderr("[FINISHED] [..]").run();
p.root().move_into_the_past();
p.root().join("target").move_into_the_past();
@@ -223,7 +223,7 @@ fn changing_lib_features_caches_targets() {
.with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
.run();
- p.cargo("build").with_stdout("").run();
+ p.cargo("build").with_stderr("[FINISHED] [..]").run();
p.cargo("build --features foo")
.with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
@@ -666,7 +666,7 @@ fn rerun_if_changed_in_dep() {
.build();
p.cargo("build").run();
- p.cargo("build").with_stdout("").run();
+ p.cargo("build").with_stderr("[FINISHED] [..]").run();
}
#[cargo_test]
@@ -1522,7 +1522,7 @@ fn bust_patched_dep() {
sleep_ms(1000);
}
- p.change_file("reg1new/src/lib.rs", "");
+ p.change_file("reg1new/src/lib.rs", "// modified");
if is_coarse_mtime() {
sleep_ms(1000);
}
diff --git a/src/tools/cargo/tests/testsuite/generate_lockfile.rs b/src/tools/cargo/tests/testsuite/generate_lockfile.rs
index d2b633605..ed282fc19 100644
--- a/src/tools/cargo/tests/testsuite/generate_lockfile.rs
+++ b/src/tools/cargo/tests/testsuite/generate_lockfile.rs
@@ -161,13 +161,13 @@ fn cargo_update_generate_lockfile() {
let lockfile = p.root().join("Cargo.lock");
assert!(!lockfile.is_file());
- p.cargo("update").with_stdout("").run();
+ p.cargo("update").with_stderr("").run();
assert!(lockfile.is_file());
fs::remove_file(p.root().join("Cargo.lock")).unwrap();
assert!(!lockfile.is_file());
- p.cargo("update").with_stdout("").run();
+ p.cargo("update").with_stderr("").run();
assert!(lockfile.is_file());
}
diff --git a/src/tools/cargo/tests/testsuite/git.rs b/src/tools/cargo/tests/testsuite/git.rs
index e27315346..d9289acc6 100644
--- a/src/tools/cargo/tests/testsuite/git.rs
+++ b/src/tools/cargo/tests/testsuite/git.rs
@@ -591,12 +591,12 @@ fn recompilation() {
.run();
// Don't recompile the second time
- p.cargo("check").with_stdout("").run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
// Modify a file manually, shouldn't trigger a recompile
git_project.change_file("src/bar.rs", r#"pub fn bar() { println!("hello!"); }"#);
- p.cargo("check").with_stdout("").run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
p.cargo("update")
.with_stderr(&format!(
@@ -605,7 +605,7 @@ fn recompilation() {
))
.run();
- p.cargo("check").with_stdout("").run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
// Commit the changes and make sure we don't trigger a recompile because the
// lock file says not to change
@@ -614,7 +614,7 @@ fn recompilation() {
git::commit(&repo);
println!("compile after commit");
- p.cargo("check").with_stdout("").run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
p.root().move_into_the_past();
// Update the dependency and carry on!
@@ -638,7 +638,7 @@ fn recompilation() {
.run();
// Make sure clean only cleans one dep
- p.cargo("clean -p foo").with_stdout("").run();
+ p.cargo("clean -p foo").with_stderr("[REMOVED] [..]").run();
p.cargo("check")
.with_stderr(
"[CHECKING] foo v0.5.0 ([CWD])\n\
@@ -742,7 +742,14 @@ fn update_with_shared_deps() {
// By default, not transitive updates
println!("dep1 update");
- p.cargo("update dep1").with_stdout("").run();
+ p.cargo("update dep1")
+ .with_stderr(
+ "\
+[UPDATING] git repository [..]
+[UPDATING] bar v0.5.0 [..]
+",
+ )
+ .run();
// Don't do anything bad on a weird --precise argument
println!("bar bad precise update");
@@ -766,7 +773,7 @@ Caused by:
println!("bar precise update");
p.cargo("update bar --precise")
.arg(&old_head.to_string())
- .with_stdout("")
+ .with_stderr("[UPDATING] bar v0.5.0 [..]")
.run();
// Updating recursively should, however, update the repo.
@@ -1496,12 +1503,12 @@ fn git_build_cmd_freshness() {
// Smoke test to make sure it doesn't compile again
println!("first pass");
- foo.cargo("check").with_stdout("").run();
+ foo.cargo("check").with_stderr("[FINISHED] [..]").run();
// Modify an ignored file and make sure we don't rebuild
println!("second pass");
foo.change_file("src/bar.rs", "");
- foo.cargo("check").with_stdout("").run();
+ foo.cargo("check").with_stderr("[FINISHED] [..]").run();
}
#[cargo_test]
@@ -1636,7 +1643,7 @@ fn git_repo_changing_no_rebuild() {
// And now for the real test! Make sure that p1 doesn't get rebuilt
// even though the git repo has changed.
- p1.cargo("check").with_stdout("").run();
+ p1.cargo("check").with_stderr("[FINISHED] [..]").run();
}
#[cargo_test]
@@ -1741,7 +1748,7 @@ fn fetch_downloads() {
))
.run();
- p.cargo("fetch").with_stdout("").run();
+ p.cargo("fetch").with_stderr("").run();
}
#[cargo_test]
@@ -1786,7 +1793,7 @@ fn fetch_downloads_with_git2_first_then_with_gitoxide_and_vice_versa() {
.run();
Package::new("bar", "1.0.0").publish(); // trigger a crates-index change.
- p.cargo("fetch").with_stdout("").run();
+ p.cargo("fetch").with_stderr("").run();
}
#[cargo_test]
diff --git a/src/tools/cargo/tests/testsuite/git_auth.rs b/src/tools/cargo/tests/testsuite/git_auth.rs
index b6e68fa3d..c79ae7ce0 100644
--- a/src/tools/cargo/tests/testsuite/git_auth.rs
+++ b/src/tools/cargo/tests/testsuite/git_auth.rs
@@ -105,11 +105,6 @@ fn setup_failed_auth_test() -> (SocketAddr, JoinHandle<()>, Arc<AtomicUsize>) {
// Tests that HTTP auth is offered from `credential.helper`.
#[cargo_test]
fn http_auth_offered() {
- // TODO(Seb): remove this once possible.
- if cargo_uses_gitoxide() {
- // Without the fixes in https://github.com/Byron/gitoxide/releases/tag/gix-v0.41.0 this test is flaky.
- return;
- }
let (addr, t, connections) = setup_failed_auth_test();
let p = project()
.file(
@@ -372,11 +367,6 @@ Caused by:
#[cargo_test]
fn instead_of_url_printed() {
- // TODO(Seb): remove this once possible.
- if cargo_uses_gitoxide() {
- // Without the fixes in https://github.com/Byron/gitoxide/releases/tag/gix-v0.41.0 this test is flaky.
- return;
- }
let (addr, t, _connections) = setup_failed_auth_test();
let config = paths::home().join(".gitconfig");
let mut config = git2::Config::open(&config).unwrap();
diff --git a/src/tools/cargo/tests/testsuite/global_cache_tracker.rs b/src/tools/cargo/tests/testsuite/global_cache_tracker.rs
new file mode 100644
index 000000000..68a606902
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/global_cache_tracker.rs
@@ -0,0 +1,1862 @@
+//! Tests for last-use tracking and auto-gc.
+//!
+//! Cargo supports an environment variable called `__CARGO_TEST_LAST_USE_NOW`
+//! to have cargo pretend that the current time is the given time (in seconds
+//! since the unix epoch). This is used throughout these tests to simulate
+//! what happens when time passes. The [`days_ago_unix`] and
+//! [`months_ago_unix`] functions help with setting this value.
+
+use super::config::ConfigBuilder;
+use cargo::core::global_cache_tracker::{self, DeferredGlobalLastUse, GlobalCacheTracker};
+use cargo::util::cache_lock::CacheLockMode;
+use cargo::util::interning::InternedString;
+use cargo::Config;
+use cargo_test_support::paths::{self, CargoPathExt};
+use cargo_test_support::registry::{Package, RegistryBuilder};
+use cargo_test_support::{
+ basic_manifest, cargo_process, execs, git, project, retry, sleep_ms, thread_wait_timeout,
+ Project,
+};
+use itertools::Itertools;
+use std::fmt::Write;
+use std::path::PathBuf;
+use std::process::Stdio;
+use std::time::{Duration, SystemTime};
+
+/// Helper to create a simple `foo` project which depends on a registry
+/// dependency called `bar`.
+fn basic_foo_bar_project() -> Project {
+ Package::new("bar", "1.0.0").publish();
+ project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build()
+}
+
+/// Helper to get the names of files in a directory as strings.
+fn get_names(glob: &str) -> Vec<String> {
+ let mut names: Vec<_> = glob::glob(paths::home().join(glob).to_str().unwrap())
+ .unwrap()
+ .map(|p| p.unwrap().file_name().unwrap().to_str().unwrap().to_owned())
+ .collect();
+ names.sort();
+ names
+}
+
+fn get_registry_names(which: &str) -> Vec<String> {
+ get_names(&format!(".cargo/registry/{which}/*/*"))
+}
+
+fn get_index_names() -> Vec<String> {
+ get_names(&format!(".cargo/registry/index/*"))
+}
+
+fn get_git_db_names() -> Vec<String> {
+ get_names(&format!(".cargo/git/db/*"))
+}
+
+fn get_git_checkout_names(db_name: &str) -> Vec<String> {
+ get_names(&format!(".cargo/git/checkouts/{db_name}/*"))
+}
+
+fn days_ago(n: u64) -> SystemTime {
+ SystemTime::now() - Duration::from_secs(60 * 60 * 24 * n)
+}
+
+/// Helper for simulating running cargo in the past. Use with the
+/// __CARGO_TEST_LAST_USE_NOW environment variable.
+fn days_ago_unix(n: u64) -> String {
+ days_ago(n)
+ .duration_since(SystemTime::UNIX_EPOCH)
+ .unwrap()
+ .as_secs()
+ .to_string()
+}
+
+/// Helper for simulating running cargo in the past. Use with the
+/// __CARGO_TEST_LAST_USE_NOW environment variable.
+fn months_ago_unix(n: u64) -> String {
+ days_ago_unix(n * 30)
+}
+
+/// Populates last-use database and the cache files.
+///
+/// This makes it easier to more accurately specify exact sizes. Creating
+/// specific sizes with `Package` is too difficult.
+fn populate_cache(config: &Config, test_crates: &[(&str, u64, u64, u64)]) -> (PathBuf, PathBuf) {
+ let cache_dir = paths::home().join(".cargo/registry/cache/example.com-a6c4a5adcb232b9a");
+ let src_dir = paths::home().join(".cargo/registry/src/example.com-a6c4a5adcb232b9a");
+
+ GlobalCacheTracker::db_path(&config)
+ .into_path_unlocked()
+ .rm_rf();
+
+ let _lock = config
+ .acquire_package_cache_lock(CacheLockMode::MutateExclusive)
+ .unwrap();
+ let mut tracker = GlobalCacheTracker::new(&config).unwrap();
+ let mut deferred = DeferredGlobalLastUse::new();
+
+ cache_dir.rm_rf();
+ cache_dir.mkdir_p();
+ src_dir.rm_rf();
+ src_dir.mkdir_p();
+ paths::home()
+ .join(".cargo/registry/index/example.com-a6c4a5adcb232b9a")
+ .mkdir_p();
+ let mut create = |name: &str, age, crate_size: u64, src_size: u64| {
+ let crate_filename = InternedString::new(&format!("{name}.crate"));
+ deferred.mark_registry_crate_used_stamp(
+ global_cache_tracker::RegistryCrate {
+ encoded_registry_name: "example.com-a6c4a5adcb232b9a".into(),
+ crate_filename,
+ size: crate_size,
+ },
+ Some(&days_ago(age)),
+ );
+ deferred.mark_registry_src_used_stamp(
+ global_cache_tracker::RegistrySrc {
+ encoded_registry_name: "example.com-a6c4a5adcb232b9a".into(),
+ package_dir: name.into(),
+ size: Some(src_size),
+ },
+ Some(&days_ago(age)),
+ );
+ std::fs::write(
+ cache_dir.join(crate_filename),
+ "x".repeat(crate_size as usize),
+ )
+ .unwrap();
+ let path = src_dir.join(name);
+ path.mkdir_p();
+ std::fs::write(path.join("data"), "x".repeat(src_size as usize)).unwrap()
+ };
+
+ for (name, age, crate_size, src_size) in test_crates {
+ create(name, *age, *crate_size, *src_size);
+ }
+ deferred.save(&mut tracker).unwrap();
+
+ (cache_dir, src_dir)
+}
+
+#[cargo_test]
+fn auto_gc_gated() {
+ // Requires -Zgc to both track last-use data and to run auto-gc.
+ let p = basic_foo_bar_project();
+ p.cargo("check")
+ .env("__CARGO_TEST_LAST_USE_NOW", months_ago_unix(4))
+ .run();
+ // Check that it did not create a database or delete anything.
+ let config = ConfigBuilder::new().build();
+ assert!(!GlobalCacheTracker::db_path(&config)
+ .into_path_unlocked()
+ .exists());
+ assert_eq!(get_index_names().len(), 1);
+
+ // Again in the future, shouldn't auto-gc.
+ p.cargo("check").run();
+ assert!(!GlobalCacheTracker::db_path(&config)
+ .into_path_unlocked()
+ .exists());
+ assert_eq!(get_index_names().len(), 1);
+}
+
+#[cargo_test]
+fn clean_gc_gated() {
+ cargo_process("clean gc")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: the `cargo clean gc` command is unstable, and only available on the \
+nightly channel of Cargo, but this is the `stable` channel
+See [..]
+See [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn implies_source() {
+ // Checks that when a src, crate, or checkout is marked as used, the
+ // corresponding index or git db also gets marked as used.
+ let config = ConfigBuilder::new().unstable_flag("gc").build();
+ let _lock = config
+ .acquire_package_cache_lock(CacheLockMode::MutateExclusive)
+ .unwrap();
+ let mut deferred = DeferredGlobalLastUse::new();
+ let mut tracker = GlobalCacheTracker::new(&config).unwrap();
+
+ deferred.mark_registry_crate_used(global_cache_tracker::RegistryCrate {
+ encoded_registry_name: "example.com-a6c4a5adcb232b9a".into(),
+ crate_filename: "regex-1.8.4.crate".into(),
+ size: 123,
+ });
+ deferred.mark_registry_src_used(global_cache_tracker::RegistrySrc {
+ encoded_registry_name: "index.crates.io-6f17d22bba15001f".into(),
+ package_dir: "rand-0.8.5".into(),
+ size: None,
+ });
+ deferred.mark_git_checkout_used(global_cache_tracker::GitCheckout {
+ encoded_git_name: "cargo-e7ff1db891893a9e".into(),
+ short_name: "f0a4ee0".into(),
+ size: None,
+ });
+ deferred.save(&mut tracker).unwrap();
+
+ let mut indexes = tracker.registry_index_all().unwrap();
+ assert_eq!(indexes.len(), 2);
+ indexes.sort_by(|a, b| a.0.encoded_registry_name.cmp(&b.0.encoded_registry_name));
+ assert_eq!(
+ indexes[0].0.encoded_registry_name,
+ "example.com-a6c4a5adcb232b9a"
+ );
+ assert_eq!(
+ indexes[1].0.encoded_registry_name,
+ "index.crates.io-6f17d22bba15001f"
+ );
+
+ let dbs = tracker.git_db_all().unwrap();
+ assert_eq!(dbs.len(), 1);
+ assert_eq!(dbs[0].0.encoded_git_name, "cargo-e7ff1db891893a9e");
+}
+
+#[cargo_test]
+fn auto_gc_defaults() {
+ // Checks that the auto-gc deletes old entries, and leaves new ones intact.
+ Package::new("old", "1.0.0").publish();
+ Package::new("new", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ old = "1.0"
+ new = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ // Populate the last-use data.
+ p.cargo("check -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", months_ago_unix(4))
+ .run();
+ assert_eq!(get_registry_names("src"), ["new-1.0.0", "old-1.0.0"]);
+ assert_eq!(
+ get_registry_names("cache"),
+ ["new-1.0.0.crate", "old-1.0.0.crate"]
+ );
+
+ // Run again with just one package. Make sure the old src gets deleted,
+ // but .crate does not.
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ new = "1.0"
+ "#,
+ );
+ p.cargo("check -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", months_ago_unix(2))
+ .run();
+ assert_eq!(get_registry_names("src"), ["new-1.0.0"]);
+ assert_eq!(
+ get_registry_names("cache"),
+ ["new-1.0.0.crate", "old-1.0.0.crate"]
+ );
+
+ // Run again after the .crate should have aged out.
+ p.cargo("check -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .run();
+ assert_eq!(get_registry_names("src"), ["new-1.0.0"]);
+ assert_eq!(get_registry_names("cache"), ["new-1.0.0.crate"]);
+}
+
+#[cargo_test]
+fn auto_gc_config() {
+ // Can configure auto gc settings.
+ Package::new("old", "1.0.0").publish();
+ Package::new("new", "1.0.0").publish();
+ let p = project()
+ .file(
+ ".cargo/config.toml",
+ r#"
+ [gc.auto]
+ frequency = "always"
+ max-src-age = "1 day"
+ max-crate-age = "3 days"
+ max-index-age = "3 days"
+ max-git-co-age = "1 day"
+ max-git-db-age = "3 days"
+ "#,
+ )
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ old = "1.0"
+ new = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ // Populate the last-use data.
+ p.cargo("check -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", days_ago_unix(4))
+ .run();
+ assert_eq!(get_registry_names("src"), ["new-1.0.0", "old-1.0.0"]);
+ assert_eq!(
+ get_registry_names("cache"),
+ ["new-1.0.0.crate", "old-1.0.0.crate"]
+ );
+
+ // Run again with just one package. Make sure the old src gets deleted,
+ // but .crate does not.
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ new = "1.0"
+ "#,
+ );
+ p.cargo("check -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", days_ago_unix(2))
+ .run();
+ assert_eq!(get_registry_names("src"), ["new-1.0.0"]);
+ assert_eq!(
+ get_registry_names("cache"),
+ ["new-1.0.0.crate", "old-1.0.0.crate"]
+ );
+
+ // Run again after the .crate should have aged out.
+ p.cargo("check -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .run();
+ assert_eq!(get_registry_names("src"), ["new-1.0.0"]);
+ assert_eq!(get_registry_names("cache"), ["new-1.0.0.crate"]);
+}
+
+#[cargo_test]
+fn frequency() {
+ // gc.auto.frequency settings
+ let p = basic_foo_bar_project();
+ p.change_file(
+ ".cargo/config.toml",
+ r#"
+ [gc.auto]
+ frequency = "never"
+ "#,
+ );
+ // Populate data in the past.
+ p.cargo("check -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", months_ago_unix(4))
+ .run();
+ assert_eq!(get_index_names().len(), 1);
+ assert_eq!(get_registry_names("src"), ["bar-1.0.0"]);
+ assert_eq!(get_registry_names("cache"), ["bar-1.0.0.crate"]);
+
+ p.change_file("Cargo.toml", &basic_manifest("foo", "0.2.0"));
+
+ // Try after the default expiration time, with "never" it shouldn't gc.
+ p.cargo("check -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .run();
+ assert_eq!(get_index_names().len(), 1);
+ assert_eq!(get_registry_names("src"), ["bar-1.0.0"]);
+ assert_eq!(get_registry_names("cache"), ["bar-1.0.0.crate"]);
+
+ // Try again with a setting that allows it to run.
+ p.cargo("check -Zgc")
+ .env("CARGO_GC_AUTO_FREQUENCY", "1 day")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .run();
+ assert_eq!(get_index_names().len(), 0);
+ assert_eq!(get_registry_names("src").len(), 0);
+ assert_eq!(get_registry_names("cache").len(), 0);
+}
+
+#[cargo_test]
+fn auto_gc_index() {
+ // Deletes the index if it hasn't been used in a while.
+ let p = basic_foo_bar_project();
+ p.cargo("check -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", months_ago_unix(4))
+ .run();
+ assert_eq!(get_index_names().len(), 1);
+
+ // Make sure it stays within the time frame.
+ p.change_file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ "#,
+ );
+ p.cargo("check -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", months_ago_unix(2))
+ .run();
+ assert_eq!(get_index_names().len(), 1);
+
+ // After it expires, it should be deleted.
+ p.cargo("check -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .run();
+ assert_eq!(get_index_names().len(), 0);
+}
+
+#[cargo_test]
+fn auto_gc_git() {
+ // auto-gc should delete git checkouts and dbs.
+
+ // Returns the short git name of a a checkout.
+ let short_id = |repo: &git2::Repository| -> String {
+ let head = repo.revparse_single("HEAD").unwrap();
+ let short_id = head.short_id().unwrap();
+ short_id.as_str().unwrap().to_owned()
+ };
+
+ // Set up a git dependency and fetch it and populate the database,
+ // 6 months in the past.
+ let (git_project, git_repo) = git::new_repo("bar", |p| {
+ p.file("Cargo.toml", &basic_manifest("bar", "1.0.0"))
+ .file("src/lib.rs", "")
+ });
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = {{ git = '{}' }}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("check -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", months_ago_unix(6))
+ .run();
+ let db_names = get_git_db_names();
+ assert_eq!(db_names.len(), 1);
+ let first_short_oid = short_id(&git_repo);
+ assert_eq!(
+ get_git_checkout_names(&db_names[0]),
+ [first_short_oid.clone()]
+ );
+
+ // Use a new git checkout, should keep both.
+ git_project.change_file("src/lib.rs", "// modified");
+ git::add(&git_repo);
+ git::commit(&git_repo);
+ p.cargo("update -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", months_ago_unix(6))
+ .run();
+ assert_eq!(get_git_db_names().len(), 1);
+ let second_short_oid = short_id(&git_repo);
+ let mut both = vec![first_short_oid, second_short_oid.clone()];
+ both.sort();
+ assert_eq!(get_git_checkout_names(&db_names[0]), both);
+
+ // In the future, using the second checkout should delete the first.
+ p.cargo("check -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", months_ago_unix(4))
+ .run();
+ assert_eq!(get_git_db_names().len(), 1);
+ assert_eq!(
+ get_git_checkout_names(&db_names[0]),
+ [second_short_oid.clone()]
+ );
+
+ // After three months, the db should get deleted.
+ p.change_file("Cargo.toml", &basic_manifest("foo", "0.2.0"));
+ p.cargo("check -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .run();
+ assert_eq!(get_git_db_names().len(), 0);
+ assert_eq!(get_git_checkout_names(&db_names[0]).len(), 0);
+}
+
+#[cargo_test]
+fn auto_gc_various_commands() {
+ // Checks that auto gc works with a variety of commands.
+ //
+ // Auto-gc is only run on a subset of commands. Generally it is run on
+ // commands that are already doing a lot of work, or heavily involve the
+ // use of the registry.
+ Package::new("bar", "1.0.0").publish();
+ let cmds = ["check", "fetch"];
+ for cmd in cmds {
+ eprintln!("checking command {cmd}");
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ // Populate the last-use data.
+ p.cargo(cmd)
+ .arg("-Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", months_ago_unix(4))
+ .run();
+ let config = ConfigBuilder::new().unstable_flag("gc").build();
+ let lock = config
+ .acquire_package_cache_lock(CacheLockMode::MutateExclusive)
+ .unwrap();
+ let tracker = GlobalCacheTracker::new(&config).unwrap();
+ let indexes = tracker.registry_index_all().unwrap();
+ assert_eq!(indexes.len(), 1);
+ let crates = tracker.registry_crate_all().unwrap();
+ assert_eq!(crates.len(), 1);
+ let srcs = tracker.registry_src_all().unwrap();
+ assert_eq!(srcs.len(), 1);
+ drop(lock);
+
+ // After everything is aged out, it should all be deleted.
+ p.change_file("Cargo.toml", &basic_manifest("foo", "0.2.0"));
+ p.cargo(cmd)
+ .arg("-Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .run();
+ let lock = config
+ .acquire_package_cache_lock(CacheLockMode::MutateExclusive)
+ .unwrap();
+ let indexes = tracker.registry_index_all().unwrap();
+ assert_eq!(indexes.len(), 0);
+ let crates = tracker.registry_crate_all().unwrap();
+ assert_eq!(crates.len(), 0);
+ let srcs = tracker.registry_src_all().unwrap();
+ assert_eq!(srcs.len(), 0);
+ drop(tracker);
+ drop(lock);
+ paths::home().join(".cargo/registry").rm_rf();
+ GlobalCacheTracker::db_path(&config)
+ .into_path_unlocked()
+ .rm_rf();
+ }
+}
+
+#[cargo_test]
+fn updates_last_use_various_commands() {
+ // Checks that last-use tracking is updated by various commands.
+ //
+ // Not *all* commands update the index tracking, even though they
+ // technically involve reading the index. There isn't a convenient place
+ // to ensure it gets saved while avoiding saving too often in other
+ // commands. For the most part, this should be fine, since these commands
+ // usually aren't run without running one of the commands that does save
+ // the tracking. Some of the commands are:
+ //
+ // - login, owner, yank, search
+ // - report future-incompatibilities
+ // - package --no-verify
+ // - fetch --locked
+ Package::new("bar", "1.0.0").publish();
+ let cmds = [
+ // name, expected_crates (0=doesn't download)
+ ("check", 1),
+ ("fetch", 1),
+ ("tree", 1),
+ ("generate-lockfile", 0),
+ ("update", 0),
+ ("metadata", 1),
+ ("vendor --respect-source-config", 1),
+ ];
+ for (cmd, expected_crates) in cmds {
+ eprintln!("checking command {cmd}");
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ // Populate the last-use data.
+ p.cargo(cmd)
+ .arg("-Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .run();
+ let config = ConfigBuilder::new().unstable_flag("gc").build();
+ let lock = config
+ .acquire_package_cache_lock(CacheLockMode::MutateExclusive)
+ .unwrap();
+ let tracker = GlobalCacheTracker::new(&config).unwrap();
+ let indexes = tracker.registry_index_all().unwrap();
+ assert_eq!(indexes.len(), 1);
+ let crates = tracker.registry_crate_all().unwrap();
+ assert_eq!(crates.len(), expected_crates);
+ let srcs = tracker.registry_src_all().unwrap();
+ assert_eq!(srcs.len(), expected_crates);
+ drop(tracker);
+ drop(lock);
+ paths::home().join(".cargo/registry").rm_rf();
+ GlobalCacheTracker::db_path(&config)
+ .into_path_unlocked()
+ .rm_rf();
+ }
+}
+
+#[cargo_test]
+fn both_git_and_http_index_cleans() {
+ // Checks that either the git or http index cache gets cleaned.
+ let _crates_io = RegistryBuilder::new().build();
+ let _alternative = RegistryBuilder::new().alternative().http_index().build();
+ Package::new("from_git", "1.0.0").publish();
+ Package::new("from_http", "1.0.0")
+ .alternative(true)
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ from_git = "1.0"
+ from_http = { version = "1.0", registry = "alternative" }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("update -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", months_ago_unix(4))
+ .run();
+ let config = ConfigBuilder::new().unstable_flag("gc").build();
+ let lock = config
+ .acquire_package_cache_lock(CacheLockMode::MutateExclusive)
+ .unwrap();
+ let tracker = GlobalCacheTracker::new(&config).unwrap();
+ let indexes = tracker.registry_index_all().unwrap();
+ assert_eq!(indexes.len(), 2);
+ assert_eq!(get_index_names().len(), 2);
+ drop(lock);
+
+ // Running in the future without these indexes should delete them.
+ p.change_file("Cargo.toml", &basic_manifest("foo", "0.2.0"));
+ p.cargo("clean gc -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .run();
+ let lock = config
+ .acquire_package_cache_lock(CacheLockMode::MutateExclusive)
+ .unwrap();
+ let indexes = tracker.registry_index_all().unwrap();
+ assert_eq!(indexes.len(), 0);
+ assert_eq!(get_index_names().len(), 0);
+ drop(lock);
+}
+
+#[cargo_test]
+fn clean_gc_dry_run() {
+ // Basic `clean --gc --dry-run` test.
+ let p = basic_foo_bar_project();
+ // Populate the last-use data.
+ p.cargo("fetch -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", months_ago_unix(4))
+ .run();
+
+ let registry_root = paths::home().join(".cargo/registry");
+ let glob_registry = |name| -> PathBuf {
+ let mut paths: Vec<_> = glob::glob(registry_root.join(name).join("*").to_str().unwrap())
+ .unwrap()
+ .map(|p| p.unwrap())
+ .collect();
+ assert_eq!(paths.len(), 1);
+ paths.pop().unwrap()
+ };
+ let index = glob_registry("index").ls_r();
+ let src = glob_registry("src").ls_r();
+ let cache = glob_registry("cache").ls_r();
+ let expected_files = index
+ .iter()
+ .chain(src.iter())
+ .chain(cache.iter())
+ .map(|p| p.to_str().unwrap())
+ .join("\n");
+
+ p.cargo("clean gc --dry-run -v -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stdout_unordered(&expected_files)
+ .with_stderr(
+ "[SUMMARY] [..] files, [..] total\n\
+ [WARNING] no files deleted due to --dry-run",
+ )
+ .run();
+
+ // Again, make sure the information is still tracked.
+ p.cargo("clean gc --dry-run -v -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stdout_unordered(&expected_files)
+ .with_stderr(
+ "[SUMMARY] [..] files, [..] total\n\
+ [WARNING] no files deleted due to --dry-run",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn clean_default_gc() {
+ // `clean gc` without options should also gc
+ let p = basic_foo_bar_project();
+ // Populate the last-use data.
+ p.cargo("fetch -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", months_ago_unix(4))
+ .run();
+ p.cargo("clean gc -v -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr_unordered(
+ "\
+[REMOVING] [ROOT]/home/.cargo/registry/index/[..]
+[REMOVING] [ROOT]/home/.cargo/registry/src/[..]
+[REMOVING] [ROOT]/home/.cargo/registry/cache/[..]
+[REMOVED] [..] files, [..] total
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn tracks_sizes() {
+ // Checks that sizes are properly tracked in the db.
+ Package::new("dep1", "1.0.0")
+ .file("src/lib.rs", "")
+ .publish();
+ Package::new("dep2", "1.0.0")
+ .file("src/lib.rs", "")
+ .file("data", &"abcdefghijklmnopqrstuvwxyz".repeat(1000))
+ .publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ dep1 = "1.0"
+ dep2 = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("fetch -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .run();
+
+ // Check that the crate sizes are the same as on disk.
+ let config = ConfigBuilder::new().unstable_flag("gc").build();
+ let _lock = config
+ .acquire_package_cache_lock(CacheLockMode::MutateExclusive)
+ .unwrap();
+ let tracker = GlobalCacheTracker::new(&config).unwrap();
+ let mut crates = tracker.registry_crate_all().unwrap();
+ crates.sort_by(|a, b| a.0.crate_filename.cmp(&b.0.crate_filename));
+ let db_sizes: Vec<_> = crates.iter().map(|c| c.0.size).collect();
+
+ let mut actual: Vec<_> = p
+ .glob(paths::home().join(".cargo/registry/cache/*/*"))
+ .map(|p| p.unwrap())
+ .collect();
+ actual.sort();
+ let actual_sizes: Vec<_> = actual
+ .iter()
+ .map(|path| std::fs::metadata(path).unwrap().len())
+ .collect();
+ assert_eq!(db_sizes, actual_sizes);
+
+ // Also check the src sizes are computed.
+ let mut srcs = tracker.registry_src_all().unwrap();
+ srcs.sort_by(|a, b| a.0.package_dir.cmp(&b.0.package_dir));
+ let db_sizes: Vec<_> = srcs.iter().map(|c| c.0.size.unwrap()).collect();
+ let mut actual: Vec<_> = p
+ .glob(paths::home().join(".cargo/registry/src/*/*"))
+ .map(|p| p.unwrap())
+ .collect();
+ actual.sort();
+ // .cargo-ok is not tracked in the size.
+ actual.iter().for_each(|p| p.join(".cargo-ok").rm_rf());
+ let actual_sizes: Vec<_> = actual
+ .iter()
+ .map(|path| cargo_util::du(path, &[]).unwrap())
+ .collect();
+ assert_eq!(db_sizes, actual_sizes);
+ assert!(db_sizes[1] > 26000);
+}
+
+#[cargo_test]
+fn max_size() {
+ // Checks --max-crate-size and --max-src-size with various cleaning thresholds.
+ let config = ConfigBuilder::new().unstable_flag("gc").build();
+
+ let test_crates = [
+ // name, age, crate_size, src_size
+ ("a-1.0.0", 5, 1, 1),
+ ("b-1.0.0", 6, 2, 2),
+ ("c-1.0.0", 3, 3, 3),
+ ("d-1.0.0", 2, 4, 4),
+ ("e-1.0.0", 2, 5, 5),
+ ("f-1.0.0", 9, 6, 6),
+ ("g-1.0.0", 1, 1, 1),
+ ];
+
+ // Determine the order things get deleted so they can be verified.
+ let mut names_by_timestamp: Vec<_> = test_crates
+ .iter()
+ .map(|(name, age, _, _)| (days_ago_unix(*age), name))
+ .collect();
+ names_by_timestamp.sort();
+ let names_by_timestamp: Vec<_> = names_by_timestamp
+ .into_iter()
+ .map(|(_, name)| name)
+ .collect();
+
+ // This exercises the different boundary conditions.
+ for (clean_size, files, bytes) in [
+ (22, 0, 0),
+ (21, 1, 6),
+ (16, 1, 6),
+ (15, 2, 8),
+ (14, 2, 8),
+ (13, 3, 9),
+ (12, 4, 12),
+ (10, 4, 12),
+ (9, 5, 16),
+ (6, 5, 16),
+ (5, 6, 21),
+ (1, 6, 21),
+ (0, 7, 22),
+ ] {
+ let (removed, kept) = names_by_timestamp.split_at(files);
+ // --max-crate-size
+ let (cache_dir, src_dir) = populate_cache(&config, &test_crates);
+ let mut stderr = String::new();
+ for name in removed {
+ writeln!(stderr, "[REMOVING] [..]{name}.crate").unwrap();
+ }
+ let total_display = if removed.is_empty() {
+ String::new()
+ } else {
+ format!(", {bytes}B total")
+ };
+ let files_display = if files == 1 {
+ format!("1 file")
+ } else {
+ format!("{files} files")
+ };
+ write!(stderr, "[REMOVED] {files_display}{total_display}").unwrap();
+ cargo_process(&format!("clean gc -Zgc -v --max-crate-size={clean_size}"))
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr_unordered(&stderr)
+ .run();
+ for name in kept {
+ assert!(cache_dir.join(format!("{name}.crate")).exists());
+ }
+ for name in removed {
+ assert!(!cache_dir.join(format!("{name}.crate")).exists());
+ }
+
+ // --max-src-size
+ populate_cache(&config, &test_crates);
+ let mut stderr = String::new();
+ for name in removed {
+ writeln!(stderr, "[REMOVING] [..]{name}").unwrap();
+ }
+ let total_display = if files == 0 {
+ String::new()
+ } else {
+ format!(", {bytes}B total")
+ };
+ write!(stderr, "[REMOVED] {files_display}{total_display}").unwrap();
+ cargo_process(&format!("clean gc -Zgc -v --max-src-size={clean_size}"))
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr_unordered(&stderr)
+ .run();
+ for name in kept {
+ assert!(src_dir.join(name).exists());
+ }
+ for name in removed {
+ assert!(!src_dir.join(name).exists());
+ }
+ }
+}
+
+#[cargo_test]
+fn max_size_untracked_crate() {
+ // When a .crate file exists from an older version of cargo that did not
+ // track sizes, `clean --max-crate-size` should populate the db with the
+ // sizes.
+ let config = ConfigBuilder::new().unstable_flag("gc").build();
+ let cache = paths::home().join(".cargo/registry/cache/example.com-a6c4a5adcb232b9a");
+ cache.mkdir_p();
+ paths::home()
+ .join(".cargo/registry/index/example.com-a6c4a5adcb232b9a")
+ .mkdir_p();
+ // Create the `.crate files.
+ let test_crates = [
+ // name, size
+ ("a-1.0.0.crate", 1234),
+ ("b-1.0.0.crate", 42),
+ ("c-1.0.0.crate", 0),
+ ];
+ for (name, size) in test_crates {
+ std::fs::write(cache.join(name), "x".repeat(size as usize)).unwrap()
+ }
+ // This should scan the directory and populate the db with the size information.
+ cargo_process("clean gc -Zgc -v --max-crate-size=100000")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr("[REMOVED] 0 files")
+ .run();
+ // Check that it stored the size data.
+ let _lock = config
+ .acquire_package_cache_lock(CacheLockMode::MutateExclusive)
+ .unwrap();
+ let tracker = GlobalCacheTracker::new(&config).unwrap();
+ let crates = tracker.registry_crate_all().unwrap();
+ let mut actual: Vec<_> = crates
+ .iter()
+ .map(|(rc, _time)| (rc.crate_filename.as_str(), rc.size))
+ .collect();
+ actual.sort();
+ assert_eq!(test_crates, actual.as_slice());
+}
+
+/// Helper to prepare the max-size test.
+fn max_size_untracked_prepare() -> (Config, Project) {
+ // First, publish and download a dependency.
+ let p = basic_foo_bar_project();
+ p.cargo("fetch").run();
+ // Pretend it was an older version that did not track last-use.
+ let config = ConfigBuilder::new().unstable_flag("gc").build();
+ GlobalCacheTracker::db_path(&config)
+ .into_path_unlocked()
+ .rm_rf();
+ (config, p)
+}
+
+/// Helper to verify the max-size test.
+fn max_size_untracked_verify(config: &Config) {
+ let actual: Vec<_> = glob::glob(
+ paths::home()
+ .join(".cargo/registry/src/*/*")
+ .to_str()
+ .unwrap(),
+ )
+ .unwrap()
+ .map(|p| p.unwrap())
+ .collect();
+ assert_eq!(actual.len(), 1);
+ let actual_size = cargo_util::du(&actual[0], &[]).unwrap();
+ let lock = config
+ .acquire_package_cache_lock(CacheLockMode::MutateExclusive)
+ .unwrap();
+ let tracker = GlobalCacheTracker::new(&config).unwrap();
+ let srcs = tracker.registry_src_all().unwrap();
+ assert_eq!(srcs.len(), 1);
+ assert_eq!(srcs[0].0.size, Some(actual_size));
+ drop(lock);
+}
+
+#[cargo_test]
+fn max_size_untracked_src_from_use() {
+ // When a src directory exists from an older version of cargo that did not
+ // track sizes, doing a build should populate the db with an entry with an
+ // unknown size. `clean --max-src-size` should then fix the size.
+ let (config, p) = max_size_untracked_prepare();
+
+ // Run a command that will update the db with an unknown src size.
+ p.cargo("tree -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .run();
+ // Check that it is None.
+ let lock = config
+ .acquire_package_cache_lock(CacheLockMode::MutateExclusive)
+ .unwrap();
+ let tracker = GlobalCacheTracker::new(&config).unwrap();
+ let srcs = tracker.registry_src_all().unwrap();
+ assert_eq!(srcs.len(), 1);
+ assert_eq!(srcs[0].0.size, None);
+ drop(lock);
+
+ // Fix the size.
+ p.cargo("clean gc -v --max-src-size=10000 -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr("[REMOVED] 0 files")
+ .run();
+ max_size_untracked_verify(&config);
+}
+
+#[cargo_test]
+fn max_size_untracked_src_from_clean() {
+ // When a src directory exists from an older version of cargo that did not
+ // track sizes, `clean --max-src-size` should populate the db with the
+ // sizes.
+ let (config, p) = max_size_untracked_prepare();
+
+ // Clean should scan the src and update the db.
+ p.cargo("clean gc -v --max-src-size=10000 -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr("[REMOVED] 0 files")
+ .run();
+ max_size_untracked_verify(&config);
+}
+
+#[cargo_test]
+fn max_download_size() {
+ // --max-download-size
+ //
+ // This creates some sample crates of specific sizes, and then tries
+ // deleting at various specific size thresholds that exercise different
+ // edge conditions.
+ let config = ConfigBuilder::new().unstable_flag("gc").build();
+
+ let test_crates = [
+ // name, age, crate_size, src_size
+ ("d-1.0.0", 4, 4, 5),
+ ("c-1.0.0", 3, 3, 3),
+ ("a-1.0.0", 1, 2, 5),
+ ("b-1.0.0", 1, 1, 7),
+ ];
+
+ for (max_size, num_deleted, files_deleted, bytes) in [
+ (30, 0, 0, 0),
+ (29, 1, 1, 5),
+ (24, 2, 2, 9),
+ (20, 3, 3, 12),
+ (1, 7, 7, 29),
+ (0, 8, 8, 30),
+ ] {
+ populate_cache(&config, &test_crates);
+ // Determine the order things will be deleted.
+ let delete_order: Vec<String> = test_crates
+ .iter()
+ .flat_map(|(name, _, _, _)| [name.to_string(), format!("{name}.crate")])
+ .collect();
+ let (removed, _kept) = delete_order.split_at(num_deleted);
+ let mut stderr = String::new();
+ for name in removed {
+ writeln!(stderr, "[REMOVING] [..]{name}").unwrap();
+ }
+ let files_display = if files_deleted == 1 {
+ format!("1 file")
+ } else {
+ format!("{files_deleted} files")
+ };
+ let total_display = if removed.is_empty() {
+ String::new()
+ } else {
+ format!(", {bytes}B total")
+ };
+ write!(stderr, "[REMOVED] {files_display}{total_display}",).unwrap();
+ cargo_process(&format!("clean gc -Zgc -v --max-download-size={max_size}"))
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr_unordered(&stderr)
+ .run();
+ }
+}
+
+#[cargo_test]
+fn package_cache_lock_during_build() {
+ // Verifies that a shared lock is held during a build. Resolution and
+ // downloads should be OK while that is held, but mutation should block.
+ //
+ // This works by launching a build with a build script that will pause.
+ // Then it performs other cargo commands and verifies their behavior.
+ Package::new("bar", "1.0.0").publish();
+ let p_foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ std::fs::write("blocking", "").unwrap();
+ let path = std::path::Path::new("ready");
+ loop {
+ if path.exists() {
+ break;
+ } else {
+ std::thread::sleep(std::time::Duration::from_millis(100))
+ }
+ }
+ }
+ "#,
+ )
+ .build();
+ let p_foo2 = project()
+ .at("foo2")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo2"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ // Start a build that will pause once the build starts.
+ let mut foo_child = p_foo
+ .cargo("check -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .build_command()
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .spawn()
+ .unwrap();
+
+ // Wait for it to enter build script.
+ retry(100, || p_foo.root().join("blocking").exists().then_some(()));
+
+ // Start a build with a different target directory. It should not block,
+ // even though it gets a download lock, and then a shared lock.
+ //
+ // Also verify that auto-gc gets disabled.
+ p_foo2
+ .cargo("check -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("CARGO_GC_AUTO_FREQUENCY", "always")
+ .env("CARGO_LOG", "gc=debug")
+ .with_stderr_contains("[UPDATING] `dummy-registry` index")
+ .with_stderr_contains("[CHECKING] bar v1.0.0")
+ .with_stderr_contains("[CHECKING] foo2 v0.1.0 [..]")
+ .with_stderr_contains("[FINISHED] [..]")
+ .with_stderr_contains("[..]unable to acquire mutate lock, auto gc disabled")
+ .run();
+
+ // Ensure that the first build really blocked.
+ assert!(matches!(foo_child.try_wait(), Ok(None)));
+
+ // Cleaning while a command is running should block.
+ let mut clean_cmd = p_foo2
+ .cargo("clean gc --max-download-size=0 -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .build_command();
+ clean_cmd.stderr(Stdio::piped());
+ let mut clean_child = clean_cmd.spawn().unwrap();
+
+ // Give the clean command a chance to finish (it shouldn't).
+ sleep_ms(500);
+ // They should both still be running.
+ assert!(matches!(foo_child.try_wait(), Ok(None)));
+ assert!(matches!(clean_child.try_wait(), Ok(None)));
+
+ // Let the original build finish.
+ p_foo.change_file("ready", "");
+
+ // Wait for clean to finish.
+ let thread = std::thread::spawn(|| clean_child.wait_with_output().unwrap());
+ let output = thread_wait_timeout(100, thread);
+ assert!(output.status.success());
+ // Validate the output of the clean.
+ execs()
+ .with_stderr(
+ "\
+[BLOCKING] waiting for file lock on package cache mutation
+[REMOVED] [..]
+",
+ )
+ .run_output(&output);
+}
+
+#[cargo_test]
+fn read_only_locking_auto_gc() {
+ // Tests the behavior for auto-gc on a read-only directory.
+ let p = basic_foo_bar_project();
+ // Populate cache.
+ p.cargo("fetch -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .run();
+ let cargo_home = paths::home().join(".cargo");
+ let mut perms = std::fs::metadata(&cargo_home).unwrap().permissions();
+ // Test when it can't update auto-gc db.
+ perms.set_readonly(true);
+ std::fs::set_permissions(&cargo_home, perms.clone()).unwrap();
+ p.cargo("check -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr(
+ "\
+[CHECKING] bar v1.0.0
+[CHECKING] foo v0.1.0 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
+ // Try again without the last-use existing (such as if the cache was
+ // populated by an older version of cargo).
+ perms.set_readonly(false);
+ std::fs::set_permissions(&cargo_home, perms.clone()).unwrap();
+ let config = ConfigBuilder::new().build();
+ GlobalCacheTracker::db_path(&config)
+ .into_path_unlocked()
+ .rm_rf();
+ perms.set_readonly(true);
+ std::fs::set_permissions(&cargo_home, perms.clone()).unwrap();
+ p.cargo("check -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr("[FINISHED] [..]")
+ .run();
+ perms.set_readonly(false);
+ std::fs::set_permissions(&cargo_home, perms).unwrap();
+}
+
+#[cargo_test]
+fn delete_index_also_deletes_crates() {
+ // Checks that when an index is delete that src and cache directories also get deleted.
+ let p = basic_foo_bar_project();
+ p.cargo("fetch -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", months_ago_unix(4))
+ .run();
+
+ assert_eq!(get_registry_names("src"), ["bar-1.0.0"]);
+ assert_eq!(get_registry_names("cache"), ["bar-1.0.0.crate"]);
+
+ p.cargo("clean gc")
+ .arg("--max-index-age=0 days")
+ .arg("-Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr("[REMOVED] [..]")
+ .run();
+
+ assert_eq!(get_registry_names("src").len(), 0);
+ assert_eq!(get_registry_names("cache").len(), 0);
+}
+
+#[cargo_test]
+fn clean_syncs_missing_files() {
+ // When files go missing in the cache, clean operations that need to track
+ // the size should also remove them from the database.
+ Package::new("bar", "1.0.0").publish();
+ Package::new("baz", "1.0.0").publish();
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = "1.0"
+ baz = "1.0"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("fetch -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .run();
+
+ // Verify things are tracked.
+ let config = ConfigBuilder::new().unstable_flag("gc").build();
+ let lock = config
+ .acquire_package_cache_lock(CacheLockMode::MutateExclusive)
+ .unwrap();
+ let tracker = GlobalCacheTracker::new(&config).unwrap();
+ let crates = tracker.registry_crate_all().unwrap();
+ assert_eq!(crates.len(), 2);
+ let srcs = tracker.registry_src_all().unwrap();
+ assert_eq!(srcs.len(), 2);
+ drop(lock);
+
+ // Remove the files.
+ for pattern in [
+ ".cargo/registry/cache/*/bar-1.0.0.crate",
+ ".cargo/registry/src/*/bar-1.0.0",
+ ] {
+ p.glob(paths::home().join(pattern))
+ .map(|p| p.unwrap())
+ .next()
+ .unwrap()
+ .rm_rf();
+ }
+
+ // Clean should update the db.
+ p.cargo("clean gc -v --max-download-size=1GB -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr("[REMOVED] 0 files")
+ .run();
+
+ // Verify
+ let crates = tracker.registry_crate_all().unwrap();
+ assert_eq!(crates.len(), 1);
+ let srcs = tracker.registry_src_all().unwrap();
+ assert_eq!(srcs.len(), 1);
+}
+
+#[cargo_test]
+fn offline_doesnt_auto_gc() {
+ // When running offline, auto-gc shouldn't run.
+ let p = basic_foo_bar_project();
+ p.cargo("fetch -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", months_ago_unix(4))
+ .run();
+ // Remove the dependency.
+ p.change_file("Cargo.toml", &basic_manifest("foo", "0.1.0"));
+ // Run offline, make sure it doesn't delete anything
+ p.cargo("check --offline -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr("[CHECKING] foo v0.1.0[..]\n[FINISHED][..]")
+ .run();
+ assert_eq!(get_registry_names("src"), ["bar-1.0.0"]);
+ assert_eq!(get_registry_names("cache"), ["bar-1.0.0.crate"]);
+ // Run online, make sure auto-gc runs.
+ p.cargo("check -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr("[FINISHED][..]")
+ .run();
+ assert_eq!(get_registry_names("src"), &[] as &[String]);
+ assert_eq!(get_registry_names("cache"), &[] as &[String]);
+}
+
+#[cargo_test]
+fn can_handle_future_schema() -> anyhow::Result<()> {
+ // It should work when a future version of cargo has made schema changes
+ // to the database.
+ let p = basic_foo_bar_project();
+ p.cargo("fetch -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", months_ago_unix(4))
+ .run();
+ // Modify the schema to pretend this is done by a future version of cargo.
+ let config = ConfigBuilder::new().build();
+ let db_path = GlobalCacheTracker::db_path(&config).into_path_unlocked();
+ let conn = rusqlite::Connection::open(&db_path)?;
+ let user_version: u32 =
+ conn.query_row("SELECT user_version FROM pragma_user_version", [], |row| {
+ row.get(0)
+ })?;
+ conn.execute("ALTER TABLE global_data ADD COLUMN foo DEFAULT 123", [])?;
+ conn.pragma_update(None, "user_version", &(user_version + 1))?;
+ drop(conn);
+ // Verify it doesn't blow up.
+ p.cargo("clean gc --max-download-size=0 -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr("[REMOVED] 4 files, [..] total")
+ .run();
+ Ok(())
+}
+
+#[cargo_test]
+fn clean_max_git_age() {
+ // --max-git-*-age flags
+ let (git_a, git_a_repo) = git::new_repo("git_a", |p| {
+ p.file("Cargo.toml", &basic_manifest("git_a", "1.0.0"))
+ .file("src/lib.rs", "")
+ });
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ git_a = {{ git = '{}' }}
+ "#,
+ git_a.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+ // Populate last-use tracking.
+ p.cargo("fetch -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", days_ago_unix(4))
+ .run();
+ // Update git_a to create a separate checkout.
+ git_a.change_file("src/lib.rs", "// test");
+ git::add(&git_a_repo);
+ git::commit(&git_a_repo);
+ // Update last-use tracking, where the first git checkout will stay "old".
+ p.cargo("update -p git_a -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", days_ago_unix(2))
+ .with_stderr(
+ "\
+[UPDATING] git repository [..]
+[UPDATING] git_a v1.0.0 [..]
+",
+ )
+ .run();
+
+ let db_names = get_git_db_names();
+ assert_eq!(db_names.len(), 1);
+ let db_name = &db_names[0];
+ let co_names = get_git_checkout_names(&db_name);
+ assert_eq!(co_names.len(), 2);
+
+ // Delete the first checkout
+ p.cargo("clean gc -v -Zgc")
+ .arg("--max-git-co-age=3 days")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr(
+ "\
+[REMOVING] [ROOT]/home/.cargo/git/checkouts/git_a-[..]/[..]
+[REMOVED] [..]
+",
+ )
+ .run();
+
+ let db_names = get_git_db_names();
+ assert_eq!(db_names.len(), 1);
+ let co_names = get_git_checkout_names(&db_name);
+ assert_eq!(co_names.len(), 1);
+
+ // delete the second checkout
+ p.cargo("clean gc -v -Zgc")
+ .arg("--max-git-co-age=0 days")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr(
+ "\
+[REMOVING] [ROOT]/home/.cargo/git/checkouts/git_a-[..]/[..]
+[REMOVED] [..]
+",
+ )
+ .run();
+
+ let db_names = get_git_db_names();
+ assert_eq!(db_names.len(), 1);
+ let co_names = get_git_checkout_names(&db_name);
+ assert_eq!(co_names.len(), 0);
+
+ // delete the db
+ p.cargo("clean gc -v -Zgc")
+ .arg("--max-git-db-age=1 days")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr(
+ "\
+[REMOVING] [ROOT]/home/.cargo/git/db/git_a-[..]
+[REMOVING] [ROOT]/home/.cargo/git/checkouts/git_a-[..]
+[REMOVED] [..]
+",
+ )
+ .run();
+
+ let db_names = get_git_db_names();
+ assert_eq!(db_names.len(), 0);
+ let co_names = get_git_checkout_names(&db_name);
+ assert_eq!(co_names.len(), 0);
+}
+
+#[cargo_test]
+fn clean_max_src_crate_age() {
+ // --max-src-age and --max-crate-age flags
+ let p = basic_foo_bar_project();
+ // Populate last-use tracking.
+ p.cargo("fetch -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", days_ago_unix(4))
+ .run();
+ // Update bar to create a separate copy with a different timestamp.
+ Package::new("bar", "1.0.1").publish();
+ p.cargo("update -p bar -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", days_ago_unix(2))
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[UPDATING] bar v1.0.0 -> v1.0.1
+",
+ )
+ .run();
+ p.cargo("fetch -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", days_ago_unix(2))
+ .with_stderr(
+ "\
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v1.0.1 [..]
+",
+ )
+ .run();
+
+ assert_eq!(get_registry_names("src"), ["bar-1.0.0", "bar-1.0.1"]);
+ assert_eq!(
+ get_registry_names("cache"),
+ ["bar-1.0.0.crate", "bar-1.0.1.crate"]
+ );
+
+ // Delete the old src.
+ p.cargo("clean gc -v -Zgc")
+ .arg("--max-src-age=3 days")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr(
+ "\
+[REMOVING] [..]/bar-1.0.0
+[REMOVED] [..]
+",
+ )
+ .run();
+
+ // delete the second src
+ p.cargo("clean gc -v -Zgc")
+ .arg("--max-src-age=0 days")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr(
+ "\
+[REMOVING] [..]/bar-1.0.1
+[REMOVED] [..]
+",
+ )
+ .run();
+
+ // delete the old crate
+ p.cargo("clean gc -v -Zgc")
+ .arg("--max-crate-age=3 days")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr(
+ "\
+[REMOVING] [..]/bar-1.0.0.crate
+[REMOVED] [..]
+",
+ )
+ .run();
+
+ // delete the seecond crate
+ p.cargo("clean gc -v -Zgc")
+ .arg("--max-crate-age=0 days")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr(
+ "\
+[REMOVING] [..]/bar-1.0.1.crate
+[REMOVED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn clean_max_git_size() {
+ // clean --max-git-size
+ //
+ // Creates two checkouts. The sets a size threshold to delete one. And
+ // then with 0 max size to delete everything.
+ let (git_project, git_repo) = git::new_repo("bar", |p| {
+ p.file("Cargo.toml", &basic_manifest("bar", "1.0.0"))
+ .file("src/lib.rs", "")
+ });
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = {{ git = '{}' }}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+ // Fetch and populate db.
+ p.cargo("fetch -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", days_ago_unix(3))
+ .run();
+
+ // Figure out the name of the first checkout.
+ let git_root = paths::home().join(".cargo/git");
+ let db_names = get_git_db_names();
+ assert_eq!(db_names.len(), 1);
+ let db_name = &db_names[0];
+ let co_names = get_git_checkout_names(&db_name);
+ assert_eq!(co_names.len(), 1);
+ let first_co_name = &co_names[0];
+
+ // Make an update and create a new checkout.
+ git_project.change_file("src/lib.rs", "// modified");
+ git::add(&git_repo);
+ git::commit(&git_repo);
+ p.cargo("update -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ // Use a different time so that the first checkout timestamp is less
+ // than the second.
+ .env("__CARGO_TEST_LAST_USE_NOW", days_ago_unix(2))
+ .run();
+
+ // Figure out the threshold to use.
+ let mut co_names = get_git_checkout_names(&db_name);
+ assert_eq!(co_names.len(), 2);
+ co_names.retain(|name| name != first_co_name);
+ assert_eq!(co_names.len(), 1);
+ let second_co_name = &co_names[0];
+ let second_co_path = git_root
+ .join("checkouts")
+ .join(db_name)
+ .join(second_co_name);
+ let second_co_size = cargo_util::du(&second_co_path, &["!.git"]).unwrap();
+
+ let db_size = cargo_util::du(&git_root.join("db").join(db_name), &[]).unwrap();
+
+ let threshold = db_size + second_co_size;
+
+ p.cargo(&format!("clean gc --max-git-size={threshold} -Zgc -v"))
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr(&format!(
+ "\
+[REMOVING] [ROOT]/home/.cargo/git/checkouts/{db_name}/{first_co_name}
+[REMOVED] [..]
+"
+ ))
+ .run();
+
+ // And then try cleaning everything.
+ p.cargo("clean gc --max-git-size=0 -Zgc -v")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr_unordered(&format!(
+ "\
+[REMOVING] [ROOT]/home/.cargo/git/checkouts/{db_name}/{second_co_name}
+[REMOVING] [ROOT]/home/.cargo/git/db/{db_name}
+[REMOVED] [..]
+"
+ ))
+ .run();
+}
+
+// Helper for setting up fake git sizes for git size cleaning.
+fn setup_fake_git_sizes(db_name: &str, db_size: usize, co_sizes: &[usize]) {
+ let base_git = paths::home().join(".cargo/git");
+ let db_path = base_git.join("db").join(db_name);
+ db_path.mkdir_p();
+ std::fs::write(db_path.join("test"), "x".repeat(db_size)).unwrap();
+ let base_co = base_git.join("checkouts").join(db_name);
+ for (i, size) in co_sizes.iter().enumerate() {
+ let co_name = format!("co{i}");
+ let co_path = base_co.join(co_name);
+ co_path.mkdir_p();
+ std::fs::write(co_path.join("test"), "x".repeat(*size)).unwrap();
+ }
+}
+
+#[cargo_test]
+fn clean_max_git_size_untracked() {
+ // If there are git directories that aren't tracked in the database,
+ // `--max-git-size` should pick it up.
+ //
+ // The db_name of "example" depends on the sorting order of the names ("e"
+ // should be after "c"), so that the db comes after the checkouts.
+ setup_fake_git_sizes("example", 5000, &[1000, 2000]);
+ cargo_process(&format!("clean gc -Zgc -v --max-git-size=7000"))
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr(
+ "\
+[REMOVING] [ROOT]/home/.cargo/git/checkouts/example/co0
+[REMOVED] [..]
+",
+ )
+ .run();
+ cargo_process(&format!("clean gc -Zgc -v --max-git-size=5000"))
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr(
+ "\
+[REMOVING] [ROOT]/home/.cargo/git/checkouts/example/co1
+[REMOVED] [..]
+",
+ )
+ .run();
+ cargo_process(&format!("clean gc -Zgc -v --max-git-size=0"))
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr(
+ "\
+[REMOVING] [ROOT]/home/.cargo/git/db/example
+[REMOVED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn clean_max_git_size_deletes_co_from_db() {
+ // In the scenario where it thinks it needs to delete the db, it should
+ // also delete all the checkouts.
+ //
+ // The db_name of "abc" depends on the sorting order of the names ("a"
+ // should be before "c"), so that the db comes before the checkouts.
+ setup_fake_git_sizes("abc", 5000, &[1000, 2000]);
+ // This deletes everything because it tries to delete the db, which then
+ // deletes all checkouts.
+ cargo_process(&format!("clean gc -Zgc -v --max-git-size=3000"))
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr(
+ "\
+[REMOVING] [ROOT]/home/.cargo/git/db/abc
+[REMOVING] [ROOT]/home/.cargo/git/checkouts/abc/co1
+[REMOVING] [ROOT]/home/.cargo/git/checkouts/abc/co0
+[REMOVED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn handles_missing_index() {
+ // Checks behavior when index is missing.
+ let p = basic_foo_bar_project();
+ p.cargo("fetch -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .run();
+ paths::home().join(".cargo/registry/index").rm_rf();
+ cargo_process("clean gc -v --max-download-size=0 -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr_unordered(
+ "\
+[REMOVING] [ROOT]/home/.cargo/registry/cache/[..]
+[REMOVING] [ROOT]/home/.cargo/registry/src/[..]
+[REMOVED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn handles_missing_git_db() {
+ // Checks behavior when git db is missing.
+ let git_project = git::new("bar", |p| {
+ p.file("Cargo.toml", &basic_manifest("bar", "1.0.0"))
+ .file("src/lib.rs", "")
+ });
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = {{ git = '{}' }}
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("fetch -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .run();
+ paths::home().join(".cargo/git/db").rm_rf();
+ cargo_process("clean gc -v --max-git-size=0 -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stderr(
+ "\
+[REMOVING] [ROOT]/home/.cargo/git/checkouts/[..]
+[REMOVED] [..]
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn clean_gc_quiet_is_quiet() {
+ // Checks that --quiet works with `cargo clean gc`, since there was a
+ // subtle issue with how the flag is defined as a global flag.
+ let p = basic_foo_bar_project();
+ p.cargo("fetch -Zgc")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .env("__CARGO_TEST_LAST_USE_NOW", months_ago_unix(4))
+ .run();
+ p.cargo("clean gc --quiet -Zgc --dry-run")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stdout("")
+ .with_stderr("")
+ .run();
+ // Verify exact same command without -q would actually display something.
+ p.cargo("clean gc -Zgc --dry-run")
+ .masquerade_as_nightly_cargo(&["gc"])
+ .with_stdout("")
+ .with_stderr(
+ "\
+[SUMMARY] [..] files, [..] total
+[WARNING] no files deleted due to --dry-run
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/install.rs b/src/tools/cargo/tests/testsuite/install.rs
index fd53b607b..16ed32ee7 100644
--- a/src/tools/cargo/tests/testsuite/install.rs
+++ b/src/tools/cargo/tests/testsuite/install.rs
@@ -3,6 +3,7 @@
use std::fs::{self, OpenOptions};
use std::io::prelude::*;
use std::path::Path;
+use std::thread;
use cargo_test_support::compare;
use cargo_test_support::cross_compile;
@@ -11,10 +12,10 @@ use cargo_test_support::registry::{self, registry_path, Package};
use cargo_test_support::{
basic_manifest, cargo_process, no_such_file_err_msg, project, project_in, symlink_supported, t,
};
-use cargo_util::ProcessError;
+use cargo_util::{ProcessBuilder, ProcessError};
use cargo_test_support::install::{
- assert_has_installed_exe, assert_has_not_installed_exe, cargo_home,
+ assert_has_installed_exe, assert_has_not_installed_exe, cargo_home, exe,
};
use cargo_test_support::paths::{self, CargoPathExt};
use std::env;
@@ -1010,7 +1011,7 @@ fn compile_failure() {
.with_status(101)
.with_stderr_contains(
"\
-[ERROR] could not compile `foo` (bin \"foo\") due to previous error
+[ERROR] could not compile `foo` (bin \"foo\") due to 1 previous error
[ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be \
found at `[..]target`.\nTo reuse those artifacts with a future compilation, \
set the environment variable `CARGO_TARGET_DIR` to that path.
@@ -2507,3 +2508,118 @@ fn install_incompat_msrv() {
")
.with_status(101).run();
}
+
+fn assert_tracker_noexistence(key: &str) {
+ let v1_data: toml::Value =
+ toml::from_str(&fs::read_to_string(cargo_home().join(".crates.toml")).unwrap()).unwrap();
+ let v2_data: serde_json::Value =
+ serde_json::from_str(&fs::read_to_string(cargo_home().join(".crates2.json")).unwrap())
+ .unwrap();
+
+ assert!(v1_data["v1"].get(key).is_none());
+ assert!(v2_data["installs"][key].is_null());
+}
+
+#[cargo_test]
+fn uninstall_running_binary() {
+ use std::io::Write;
+
+ Package::new("foo", "0.0.1")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ use std::net::TcpStream;
+ use std::env::var;
+ use std::io::Read;
+ fn main() {
+ for i in 0..2 {
+ TcpStream::connect(&var("__ADDR__").unwrap()[..])
+ .unwrap()
+ .read_to_end(&mut Vec::new())
+ .unwrap();
+ }
+ }
+ "#,
+ )
+ .publish();
+
+ cargo_process("install foo")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo v0.0.1 (registry [..])
+[INSTALLING] foo v0.0.1
+[COMPILING] foo v0.0.1
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE]
+[INSTALLED] package `foo v0.0.1` (executable `foo[EXE]`)
+[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+ )
+ .run();
+ assert_has_installed_exe(cargo_home(), "foo");
+
+ let foo_bin = cargo_home().join("bin").join(exe("foo"));
+ let l = std::net::TcpListener::bind("127.0.0.1:0").unwrap();
+ let addr = l.local_addr().unwrap().to_string();
+ let t = thread::spawn(move || {
+ ProcessBuilder::new(foo_bin)
+ .env("__ADDR__", addr)
+ .exec()
+ .unwrap();
+ });
+ let key = "foo 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)";
+
+ #[cfg(windows)]
+ {
+ // Ensure foo is running before the first `cargo uninstall` call
+ l.accept().unwrap().0.write_all(&[1]).unwrap();
+ cargo_process("uninstall foo")
+ .with_status(101)
+ .with_stderr_contains("[ERROR] failed to remove file `[CWD]/home/.cargo/bin/foo[EXE]`")
+ .run();
+ // Ensure foo is stopped before the second `cargo uninstall` call
+ l.accept().unwrap().0.write_all(&[1]).unwrap();
+ t.join().unwrap();
+ cargo_process("uninstall foo")
+ .with_stderr("[REMOVING] [CWD]/home/.cargo/bin/foo[EXE]")
+ .run();
+ };
+
+ #[cfg(not(windows))]
+ {
+ // Ensure foo is running before the first `cargo uninstall` call
+ l.accept().unwrap().0.write_all(&[1]).unwrap();
+ cargo_process("uninstall foo")
+ .with_stderr("[REMOVING] [CWD]/home/.cargo/bin/foo[EXE]")
+ .run();
+ l.accept().unwrap().0.write_all(&[1]).unwrap();
+ t.join().unwrap();
+ };
+
+ assert_has_not_installed_exe(cargo_home(), "foo");
+ assert_tracker_noexistence(key);
+
+ cargo_process("install foo")
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[INSTALLING] foo v0.0.1
+[COMPILING] foo v0.0.1
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE]
+[INSTALLED] package `foo v0.0.1` (executable `foo[EXE]`)
+[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/main.rs b/src/tools/cargo/tests/testsuite/main.rs
index 07f749e34..8e2d6dedf 100644
--- a/src/tools/cargo/tests/testsuite/main.rs
+++ b/src/tools/cargo/tests/testsuite/main.rs
@@ -1,6 +1,6 @@
-// See src/cargo/lib.rs for notes on these lint settings.
-#![warn(rust_2018_idioms)]
-#![allow(clippy::all)]
+#![allow(clippy::disallowed_methods)]
+#![allow(clippy::print_stderr)]
+#![allow(clippy::print_stdout)]
#[macro_use]
extern crate cargo_test_macro;
@@ -98,6 +98,7 @@ mod git_auth;
mod git_gc;
mod git_shallow;
mod glob_targets;
+mod global_cache_tracker;
mod help;
mod https;
mod inheritable_workspace_fields;
diff --git a/src/tools/cargo/tests/testsuite/messages.rs b/src/tools/cargo/tests/testsuite/messages.rs
index 2c534d8f0..fb92593bc 100644
--- a/src/tools/cargo/tests/testsuite/messages.rs
+++ b/src/tools/cargo/tests/testsuite/messages.rs
@@ -136,7 +136,7 @@ fn deduplicate_errors() {
.with_stderr(&format!(
"\
[COMPILING] foo v0.0.1 [..]
-{}error: could not compile `foo` (lib) due to previous error
+{}error: could not compile `foo` (lib) due to 1 previous error
",
rustc_message
))
diff --git a/src/tools/cargo/tests/testsuite/metabuild.rs b/src/tools/cargo/tests/testsuite/metabuild.rs
index 022d0bff0..1c0196c98 100644
--- a/src/tools/cargo/tests/testsuite/metabuild.rs
+++ b/src/tools/cargo/tests/testsuite/metabuild.rs
@@ -740,6 +740,7 @@ fn metabuild_failed_build_json() {
r#"
{
"message": {
+ "$message_type": "diagnostic",
"children": "{...}",
"code": "{...}",
"level": "error",
diff --git a/src/tools/cargo/tests/testsuite/package.rs b/src/tools/cargo/tests/testsuite/package.rs
index 4ec4fc0d6..371157e4e 100644
--- a/src/tools/cargo/tests/testsuite/package.rs
+++ b/src/tools/cargo/tests/testsuite/package.rs
@@ -5,6 +5,7 @@ use cargo_test_support::publish::validate_crate_contents;
use cargo_test_support::registry::{self, Package};
use cargo_test_support::{
basic_manifest, cargo_process, git, path2url, paths, project, symlink_supported, t,
+ ProjectBuilder,
};
use flate2::read::GzDecoder;
use std::fs::{self, read_to_string, File};
@@ -54,7 +55,19 @@ src/main.rs
",
)
.run();
- p.cargo("package").with_stdout("").run();
+ p.cargo("package")
+ .with_stderr(
+ "\
+[WARNING] manifest has no documentation[..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] 4 files, [..] ([..] compressed)
+",
+ )
+ .run();
let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
validate_crate_contents(
@@ -695,6 +708,7 @@ fn ignore_nested() {
authors = []
license = "MIT"
description = "foo"
+ homepage = "https://example.com/"
"#;
let main_rs = r#"
fn main() { println!("hello"); }
@@ -711,8 +725,6 @@ fn ignore_nested() {
p.cargo("package")
.with_stderr(
"\
-[WARNING] manifest has no documentation[..]
-See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.
[PACKAGING] foo v0.0.1 ([CWD])
[VERIFYING] foo v0.0.1 ([CWD])
[COMPILING] foo v0.0.1 ([CWD][..])
@@ -732,7 +744,17 @@ src/main.rs
",
)
.run();
- p.cargo("package").with_stdout("").run();
+ p.cargo("package")
+ .with_stderr(
+ "\
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] 4 files, [..] ([..] compressed)
+",
+ )
+ .run();
let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
validate_crate_contents(
@@ -2730,6 +2752,7 @@ fn basic_filesizes() {
exclude = ["*.txt"]
license = "MIT"
description = "foo"
+ homepage = "https://example.com/"
"#;
let main_rs_contents = r#"fn main() { println!("🦀"); }"#;
let cargo_toml_contents = format!(
@@ -2740,6 +2763,7 @@ version = "0.0.1"
authors = []
exclude = ["*.txt"]
description = "foo"
+homepage = "https://example.com/"
license = "MIT"
"#,
cargo::core::package::MANIFEST_PREAMBLE
@@ -2775,7 +2799,17 @@ src/main.rs
",
)
.run();
- p.cargo("package").with_stdout("").run();
+ p.cargo("package")
+ .with_stderr(
+ "\
+[PACKAGING] foo v0.0.1 [..]
+[VERIFYING] foo v0.0.1 [..]
+[COMPILING] foo v0.0.1 [..]
+[FINISHED] [..]
+[PACKAGED] 4 files[..]
+",
+ )
+ .run();
let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
let compressed_size = f.metadata().unwrap().len();
@@ -2802,6 +2836,7 @@ fn larger_filesizes() {
authors = []
license = "MIT"
description = "foo"
+ documentation = "https://example.com/"
"#;
let lots_of_crabs = std::iter::repeat("🦀").take(1337).collect::<String>();
let main_rs_contents = format!(r#"fn main() {{ println!("{}"); }}"#, lots_of_crabs);
@@ -2820,6 +2855,7 @@ name = "foo"
version = "0.0.1"
authors = []
description = "foo"
+documentation = "https://example.com/"
license = "MIT"
"#,
cargo::core::package::MANIFEST_PREAMBLE
@@ -2857,7 +2893,17 @@ src/main.rs
",
)
.run();
- p.cargo("package").with_stdout("").run();
+ p.cargo("package")
+ .with_stderr(
+ "\
+[PACKAGING] foo v0.0.1 [..]
+[VERIFYING] foo v0.0.1 [..]
+[COMPILING] foo v0.0.1 [..]
+[FINISHED] [..]
+[PACKAGED] 5 files, [..]
+",
+ )
+ .run();
let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
let compressed_size = f.metadata().unwrap().len();
@@ -2895,6 +2941,7 @@ fn symlink_filesizes() {
authors = []
license = "MIT"
description = "foo"
+ homepage = "https://example.com/"
"#;
let lots_of_crabs = std::iter::repeat("🦀").take(1337).collect::<String>();
let main_rs_contents = format!(r#"fn main() {{ println!("{}"); }}"#, lots_of_crabs);
@@ -2913,6 +2960,7 @@ name = "foo"
version = "0.0.1"
authors = []
description = "foo"
+homepage = "https://example.com/"
license = "MIT"
"#,
cargo::core::package::MANIFEST_PREAMBLE
@@ -2955,7 +3003,17 @@ src/main.rs.bak
",
)
.run();
- p.cargo("package").with_stdout("").run();
+ p.cargo("package")
+ .with_stderr(
+ "\
+[PACKAGING] foo v0.0.1 [..]
+[VERIFYING] foo v0.0.1 [..]
+[COMPILING] foo v0.0.1 [..]
+[FINISHED] [..]
+[PACKAGED] 7 files, [..]
+",
+ )
+ .run();
let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
let compressed_size = f.metadata().unwrap().len();
@@ -3031,7 +3089,19 @@ src/main.rs
",
)
.run();
- p.cargo("package").with_stdout("").run();
+ p.cargo("package")
+ .with_stderr(
+ "\
+[WARNING] manifest has no documentation[..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] 4 files, [..] ([..] compressed)
+",
+ )
+ .run();
let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
validate_crate_contents(
@@ -3085,7 +3155,19 @@ src/main.rs
",
)
.run();
- p.cargo("package").with_stdout("").run();
+ p.cargo("package")
+ .with_stderr(
+ "\
+[WARNING] manifest has no documentation[..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] 4 files, [..] ([..] compressed)
+",
+ )
+ .run();
let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
validate_crate_contents(
@@ -3132,3 +3214,142 @@ See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for
&[],
);
}
+
+#[cargo_test]
+fn include_files_called_target_project() {
+ // https://github.com/rust-lang/cargo/issues/12790
+ // files and folders called "target" should be included, unless they're the actual target directory
+ let p = init_and_add_inner_target(project())
+ .file("target/foo.txt", "")
+ .build();
+
+ p.cargo("package -l")
+ .with_stdout(
+ "\
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+data/not_target
+data/target
+derp/not_target/foo.txt
+derp/target/foo.txt
+src/main.rs
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn include_files_called_target_git() {
+ // https://github.com/rust-lang/cargo/issues/12790
+ // files and folders called "target" should be included, unless they're the actual target directory
+ let (p, repo) = git::new_repo("foo", |p| init_and_add_inner_target(p));
+ // add target folder but not committed.
+ _ = fs::create_dir(p.build_dir()).unwrap();
+ _ = fs::write(p.build_dir().join("foo.txt"), "").unwrap();
+ p.cargo("package -l")
+ .with_stdout(
+ "\
+.cargo_vcs_info.json
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+data/not_target
+data/target
+derp/not_target/foo.txt
+derp/target/foo.txt
+src/main.rs
+",
+ )
+ .run();
+
+ // if target is committed, it should be include.
+ git::add(&repo);
+ git::commit(&repo);
+ p.cargo("package -l")
+ .with_stdout(
+ "\
+.cargo_vcs_info.json
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+data/not_target
+data/target
+derp/not_target/foo.txt
+derp/target/foo.txt
+src/main.rs
+target/foo.txt
+",
+ )
+ .run();
+}
+
+fn init_and_add_inner_target(p: ProjectBuilder) -> ProjectBuilder {
+ p.file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ // file called target, should be included
+ .file("data/target", "")
+ .file("data/not_target", "")
+ // folder called target, should be included
+ .file("derp/target/foo.txt", "")
+ .file("derp/not_target/foo.txt", "")
+}
+
+#[cargo_test]
+fn build_script_outside_pkg_root() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ license = "MIT"
+ description = "foo"
+ authors = []
+ build = "../t_custom_build/custom_build.rs"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+ let mut expect_msg = String::from("\
+warning: manifest has no documentation, homepage or repository.
+See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.
+error: the source file of build script doesn't appear to exist.
+This may cause issue during packaging, as modules resolution and resources included via macros are often relative to the path of source files.
+Please update the `build` setting in the manifest at `[CWD]/Cargo.toml` and point to a path inside the root of the package.
+");
+ // custom_build.rs does not exist
+ p.cargo("package -l")
+ .with_status(101)
+ .with_stderr(&expect_msg)
+ .run();
+
+ // custom_build.rs outside the package root
+ let custom_build_root = paths::root().join("t_custom_build");
+ _ = fs::create_dir(&custom_build_root).unwrap();
+ _ = fs::write(&custom_build_root.join("custom_build.rs"), "fn main() {}");
+ expect_msg = format!(
+ "\
+warning: manifest has no documentation, homepage or repository.
+See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.
+error: the source file of build script doesn't appear to be a path inside of the package.
+It is at `{}/t_custom_build/custom_build.rs`, whereas the root the package is `[CWD]`.
+This may cause issue during packaging, as modules resolution and resources included via macros are often relative to the path of source files.
+Please update the `build` setting in the manifest at `[CWD]/Cargo.toml` and point to a path inside the root of the package.
+", paths::root().display());
+ p.cargo("package -l")
+ .with_status(101)
+ .with_stderr(&expect_msg)
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/patch.rs b/src/tools/cargo/tests/testsuite/patch.rs
index a467f60b5..a4522e822 100644
--- a/src/tools/cargo/tests/testsuite/patch.rs
+++ b/src/tools/cargo/tests/testsuite/patch.rs
@@ -2700,3 +2700,86 @@ perhaps a crate was updated and forgotten to be re-vendored?"#,
)
.run();
}
+
+#[cargo_test]
+fn from_config_empty() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+ "#,
+ )
+ .file(
+ ".cargo/config.toml",
+ r#"
+ [patch.'']
+ bar = { path = 'bar' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1"))
+ .file("bar/src/lib.rs", r#""#)
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] [patch] entry `` should be a URL or registry name
+
+Caused by:
+ invalid url ``: relative URL without a base
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn from_manifest_empty() {
+ Package::new("bar", "0.1.0").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1.0"
+
+ [patch.'']
+ bar = { path = 'bar' }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1"))
+ .file("bar/src/lib.rs", r#""#)
+ .build();
+
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "\
+[ERROR] failed to parse manifest at `[CWD]/Cargo.toml`
+
+Caused by:
+ [patch] entry `` should be a URL or registry name
+
+Caused by:
+ invalid url ``: relative URL without a base
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/path.rs b/src/tools/cargo/tests/testsuite/path.rs
index ebbb72f9a..f458717cd 100644
--- a/src/tools/cargo/tests/testsuite/path.rs
+++ b/src/tools/cargo/tests/testsuite/path.rs
@@ -83,7 +83,12 @@ fn cargo_compile_with_nested_deps_shorthand() {
p.process(&p.bin("foo")).with_stdout("test passed\n").run();
println!("cleaning");
- p.cargo("clean -v").with_stdout("").run();
+ p.cargo("clean -v")
+ .with_stderr(
+ "[REMOVING] [CWD]/target\n\
+ [REMOVED] [..]",
+ )
+ .run();
println!("building baz");
p.cargo("build -p baz")
.with_stderr(
@@ -350,7 +355,7 @@ fn deep_dependencies_trigger_rebuild() {
in [..]\n",
)
.run();
- p.cargo("check").with_stdout("").run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
// Make sure an update to baz triggers a rebuild of bar
//
@@ -437,7 +442,7 @@ fn no_rebuild_two_deps() {
)
.run();
assert!(p.bin("foo").is_file());
- p.cargo("build").with_stdout("").run();
+ p.cargo("build").with_stderr("[FINISHED] [..]").run();
assert!(p.bin("foo").is_file());
}
diff --git a/src/tools/cargo/tests/testsuite/pkgid.rs b/src/tools/cargo/tests/testsuite/pkgid.rs
index 88d991e80..fee45b215 100644
--- a/src/tools/cargo/tests/testsuite/pkgid.rs
+++ b/src/tools/cargo/tests/testsuite/pkgid.rs
@@ -1,5 +1,7 @@
//! Tests for the `cargo pkgid` command.
+use cargo_test_support::basic_lib_manifest;
+use cargo_test_support::git;
use cargo_test_support::project;
use cargo_test_support::registry::Package;
@@ -34,7 +36,10 @@ fn local() {
p.cargo("generate-lockfile").run();
p.cargo("pkgid foo")
- .with_stdout(format!("file://[..]{}#0.1.0", p.root().to_str().unwrap()))
+ .with_stdout(format!(
+ "path+file://[..]{}#0.1.0",
+ p.root().to_str().unwrap()
+ ))
.run();
// Bad file URL.
@@ -89,7 +94,7 @@ fn registry() {
p.cargo("generate-lockfile").run();
p.cargo("pkgid crates-io")
- .with_stdout("https://github.com/rust-lang/crates.io-index#crates-io@0.1.0")
+ .with_stdout("registry+https://github.com/rust-lang/crates.io-index#crates-io@0.1.0")
.run();
// Bad URL.
@@ -143,7 +148,7 @@ fn multiple_versions() {
p.cargo("generate-lockfile").run();
p.cargo("pkgid two-ver:0.2.0")
- .with_stdout("https://github.com/rust-lang/crates.io-index#two-ver@0.2.0")
+ .with_stdout("registry+https://github.com/rust-lang/crates.io-index#two-ver@0.2.0")
.run();
// Incomplete version.
@@ -163,7 +168,7 @@ Please re-run this command with one of the following specifications:
p.cargo("pkgid two-ver@0.2")
.with_stdout(
"\
-https://github.com/rust-lang/crates.io-index#two-ver@0.2.0
+registry+https://github.com/rust-lang/crates.io-index#two-ver@0.2.0
",
)
.run();
@@ -195,3 +200,88 @@ Did you mean one of these?
)
.run();
}
+
+// Not for `cargo pkgid` but the `PackageIdSpec` format
+#[cargo_test]
+fn multiple_git_same_version() {
+ // Test what happens if different packages refer to the same git repo with
+ // different refs, and the package version is the same.
+ let (xyz_project, xyz_repo) = git::new_repo("xyz", |project| {
+ project
+ .file("Cargo.toml", &basic_lib_manifest("xyz"))
+ .file("src/lib.rs", "fn example() {}")
+ });
+ let rev1 = xyz_repo.revparse_single("HEAD").unwrap().id();
+ xyz_project.change_file("src/lib.rs", "pub fn example() {}");
+ git::add(&xyz_repo);
+ let rev2 = git::commit(&xyz_repo);
+ // Both rev1 and rev2 point to version 0.1.0.
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dependencies]
+ bar = {{ path = "bar" }}
+ xyz = {{ git = "{}", rev = "{}" }}
+
+ "#,
+ xyz_project.url(),
+ rev1
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+
+ [dependencies]
+ xyz = {{ git = "{}", rev = "{}" }}
+ "#,
+ xyz_project.url(),
+ rev2
+ ),
+ )
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+ p.cargo("tree")
+ .with_stdout(&format!(
+ "\
+foo v0.1.0 ([..]/foo)
+├── bar v0.1.0 ([..]/foo/bar)
+│ └── xyz v0.5.0 (file://[..]/xyz?rev={}#{})
+└── xyz v0.5.0 (file://[..]/xyz?rev={}#{})
+",
+ rev2,
+ &rev2.to_string()[..8],
+ rev1,
+ &rev1.to_string()[..8]
+ ))
+ .run();
+ // FIXME: This fails since xyz is ambiguous, but the
+ // possible pkgids are also ambiguous.
+ p.cargo("pkgid xyz")
+ .with_status(101)
+ .with_stderr(
+ "\
+error: There are multiple `xyz` packages in your project, and the specification `xyz` is ambiguous.
+Please re-run this command with one of the following specifications:
+ git+file://[..]/xyz?rev=[..]#0.5.0
+ git+file://[..]/xyz?rev=[..]#0.5.0
+",
+ )
+ .run();
+ // TODO, what should the `-p` value be here?
+ //p.cargo("update -p")
+}
diff --git a/src/tools/cargo/tests/testsuite/profile_config.rs b/src/tools/cargo/tests/testsuite/profile_config.rs
index 710a0d8ef..f8a9ae744 100644
--- a/src/tools/cargo/tests/testsuite/profile_config.rs
+++ b/src/tools/cargo/tests/testsuite/profile_config.rs
@@ -1,6 +1,6 @@
//! Tests for profiles defined in config files.
-use cargo::util::toml::schema::TomlDebugInfo;
+use cargo::util_schemas::manifest::TomlDebugInfo;
use cargo_test_support::paths::CargoPathExt;
use cargo_test_support::registry::Package;
use cargo_test_support::{basic_lib_manifest, paths, project};
@@ -428,8 +428,8 @@ fn named_config_profile() {
let profiles = Profiles::new(&ws, profile_name).unwrap();
let crates_io = cargo::core::SourceId::crates_io(&config).unwrap();
- let a_pkg = PackageId::new("a", "0.1.0", crates_io).unwrap();
- let dep_pkg = PackageId::new("dep", "0.1.0", crates_io).unwrap();
+ let a_pkg = PackageId::try_new("a", "0.1.0", crates_io).unwrap();
+ let dep_pkg = PackageId::try_new("dep", "0.1.0", crates_io).unwrap();
// normal package
let kind = CompileKind::Host;
diff --git a/src/tools/cargo/tests/testsuite/profile_custom.rs b/src/tools/cargo/tests/testsuite/profile_custom.rs
index f7139e552..cf9828d37 100644
--- a/src/tools/cargo/tests/testsuite/profile_custom.rs
+++ b/src/tools/cargo/tests/testsuite/profile_custom.rs
@@ -86,6 +86,10 @@ fn invalid_profile_name() {
[ERROR] failed to parse manifest at [..]
Caused by:
+ TOML parse error at line 7, column 26
+ |
+ 7 | [profile.'.release-lto']
+ | ^^^^^^^^^^^^^^
invalid character `.` in profile name `.release-lto`
Allowed characters are letters, numbers, underscore, and hyphen.
",
@@ -626,6 +630,7 @@ See https://doc.rust-lang.org/cargo/reference/profiles.html for more on configur
),
);
+ let highlight = "^".repeat(name.len());
p.cargo("build")
.with_status(101)
.with_stderr(&format!(
@@ -633,11 +638,14 @@ See https://doc.rust-lang.org/cargo/reference/profiles.html for more on configur
error: failed to parse manifest at `[ROOT]/foo/Cargo.toml`
Caused by:
- profile name `{}` is reserved
+ TOML parse error at line 6, column 30
+ |
+ 6 | [profile.{name}]
+ | {highlight}
+ profile name `{name}` is reserved
Please choose a different name.
See https://doc.rust-lang.org/cargo/reference/profiles.html for more on configuring profiles.
",
- name
))
.run();
}
@@ -663,6 +671,10 @@ Caused by:
error: failed to parse manifest at `[ROOT]/foo/Cargo.toml`
Caused by:
+ TOML parse error at line 7, column 25
+ |
+ 7 | [profile.debug]
+ | ^^^^^
profile name `debug` is reserved
To configure the default development profile, use the name `dev` as in [profile.dev]
See https://doc.rust-lang.org/cargo/reference/profiles.html for more on configuring profiles.
diff --git a/src/tools/cargo/tests/testsuite/profile_trim_paths.rs b/src/tools/cargo/tests/testsuite/profile_trim_paths.rs
index 1d24c159b..8a883a004 100644
--- a/src/tools/cargo/tests/testsuite/profile_trim_paths.rs
+++ b/src/tools/cargo/tests/testsuite/profile_trim_paths.rs
@@ -83,8 +83,8 @@ fn release_profile_default_to_object() {
[COMPILING] foo v0.0.1 ([CWD])
[RUNNING] `rustc [..]\
-Zremap-path-scope=object \
- --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..] \
- --remap-path-prefix=[CWD]= [..]
+ --remap-path-prefix=[CWD]=. \
+ --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..]
[FINISHED] release [..]",
)
.run();
@@ -121,8 +121,8 @@ fn one_option() {
[COMPILING] foo v0.0.1 ([CWD])
[RUNNING] `rustc [..]\
-Zremap-path-scope={option} \
- --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..] \
- --remap-path-prefix=[CWD]= [..]
+ --remap-path-prefix=[CWD]=. \
+ --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..]
[FINISHED] dev [..]",
))
.run();
@@ -158,8 +158,8 @@ fn multiple_options() {
[COMPILING] foo v0.0.1 ([CWD])
[RUNNING] `rustc [..]\
-Zremap-path-scope=diagnostics,macro,object \
- --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..] \
- --remap-path-prefix=[CWD]= [..]
+ --remap-path-prefix=[CWD]=. \
+ --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..]
[FINISHED] dev [..]",
)
.run();
@@ -193,8 +193,8 @@ fn profile_merge_works() {
[COMPILING] foo v0.0.1 ([CWD])
[RUNNING] `rustc [..]\
-Zremap-path-scope=diagnostics \
- --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..] \
- --remap-path-prefix=[CWD]= [..]
+ --remap-path-prefix=[CWD]=. \
+ --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..]
[FINISHED] custom [..]",
)
.run();
@@ -238,13 +238,13 @@ fn registry_dependency() {
[COMPILING] bar v0.0.1
[RUNNING] `rustc [..]\
-Zremap-path-scope=object \
- --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..] \
- --remap-path-prefix={pkg_remap} [..]
+ --remap-path-prefix={pkg_remap} \
+ --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..]
[COMPILING] foo v0.0.1 ([CWD])
[RUNNING] `rustc [..]\
-Zremap-path-scope=object \
- --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..] \
- --remap-path-prefix=[CWD]= [..]
+ --remap-path-prefix=[CWD]=. \
+ --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..]
[FINISHED] dev [..]
[RUNNING] `target/debug/foo[EXE]`"
))
@@ -292,13 +292,13 @@ fn git_dependency() {
[COMPILING] bar v0.0.1 ({url}[..])
[RUNNING] `rustc [..]\
-Zremap-path-scope=object \
- --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..] \
- --remap-path-prefix={pkg_remap} [..]
+ --remap-path-prefix={pkg_remap} \
+ --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..]
[COMPILING] foo v0.0.1 ([CWD])
[RUNNING] `rustc [..]\
-Zremap-path-scope=object \
- --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..] \
- --remap-path-prefix=[CWD]= [..]
+ --remap-path-prefix=[CWD]=. \
+ --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..]
[FINISHED] dev [..]
[RUNNING] `target/debug/foo[EXE]`"
))
@@ -338,13 +338,13 @@ fn path_dependency() {
[COMPILING] bar v0.0.1 ([..]/cocktail-bar)
[RUNNING] `rustc [..]\
-Zremap-path-scope=object \
- --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..] \
- --remap-path-prefix=[CWD]= [..]
+ --remap-path-prefix=[CWD]=. \
+ --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..]
[COMPILING] foo v0.0.1 ([CWD])
[RUNNING] `rustc [..]\
-Zremap-path-scope=object \
- --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..] \
- --remap-path-prefix=[CWD]= [..]
+ --remap-path-prefix=[CWD]=. \
+ --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..]
[FINISHED] dev [..]
[RUNNING] `target/debug/foo[EXE]`"
))
@@ -387,13 +387,13 @@ fn path_dependency_outside_workspace() {
[COMPILING] bar v0.0.1 ([..]/bar)
[RUNNING] `rustc [..]\
-Zremap-path-scope=object \
- --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..] \
- --remap-path-prefix={bar_path}=bar-0.0.1 [..]
+ --remap-path-prefix={bar_path}=bar-0.0.1 \
+ --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..]
[COMPILING] foo v0.0.1 ([CWD])
[RUNNING] `rustc [..]\
-Zremap-path-scope=object \
- --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..] \
- --remap-path-prefix=[CWD]= [..]
+ --remap-path-prefix=[CWD]=. \
+ --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..]
[FINISHED] dev [..]
[RUNNING] `target/debug/foo[EXE]`"
))
@@ -439,31 +439,81 @@ fn diagnostics_works() {
"\
[RUNNING] [..]rustc [..]\
-Zremap-path-scope=diagnostics \
- --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..] \
- --remap-path-prefix={pkg_remap} [..]",
+ --remap-path-prefix={pkg_remap} \
+ --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..]",
))
.with_stderr_contains(
"\
[RUNNING] [..]rustc [..]\
-Zremap-path-scope=diagnostics \
- --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..] \
- --remap-path-prefix=[CWD]= [..]",
+ --remap-path-prefix=[CWD]=. \
+ --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..]",
)
.run();
}
+#[cfg(target_os = "macos")]
+mod object_works {
+ use super::*;
+
+ fn inspect_debuginfo(path: &std::path::Path) -> Vec<u8> {
+ std::process::Command::new("nm")
+ .arg("-pa")
+ .arg(path)
+ .output()
+ .expect("nm works")
+ .stdout
+ }
+
+ #[cargo_test(requires_nm, nightly, reason = "-Zremap-path-scope is unstable")]
+ fn with_split_debuginfo_off() {
+ object_works_helper("off", inspect_debuginfo);
+ }
+
+ #[cargo_test(requires_nm, nightly, reason = "-Zremap-path-scope is unstable")]
+ fn with_split_debuginfo_packed() {
+ object_works_helper("packed", inspect_debuginfo);
+ }
+
+ #[cargo_test(requires_nm, nightly, reason = "-Zremap-path-scope is unstable")]
+ fn with_split_debuginfo_unpacked() {
+ object_works_helper("unpacked", inspect_debuginfo);
+ }
+}
+
#[cfg(target_os = "linux")]
-#[cargo_test(requires_readelf, nightly, reason = "-Zremap-path-scope is unstable")]
-fn object_works() {
- use std::os::unix::ffi::OsStrExt;
+mod object_works {
+ use super::*;
- let run_readelf = |path| {
+ fn inspect_debuginfo(path: &std::path::Path) -> Vec<u8> {
std::process::Command::new("readelf")
- .arg("-wi")
+ .arg("--debug-dump=info")
+ .arg("--debug-dump=no-follow-links") // older version can't recognized but just a warning
.arg(path)
.output()
.expect("readelf works")
- };
+ .stdout
+ }
+
+ #[cargo_test(requires_readelf, nightly, reason = "-Zremap-path-scope is unstable")]
+ fn with_split_debuginfo_off() {
+ object_works_helper("off", inspect_debuginfo);
+ }
+
+ #[cargo_test(requires_readelf, nightly, reason = "-Zremap-path-scope is unstable")]
+ fn with_split_debuginfo_packed() {
+ object_works_helper("packed", inspect_debuginfo);
+ }
+
+ #[cargo_test(requires_readelf, nightly, reason = "-Zremap-path-scope is unstable")]
+ fn with_split_debuginfo_unpacked() {
+ object_works_helper("unpacked", inspect_debuginfo);
+ }
+}
+
+#[cfg(unix)]
+fn object_works_helper(split_debuginfo: &str, run: impl Fn(&std::path::Path) -> Vec<u8>) {
+ use std::os::unix::ffi::OsStrExt;
let registry_src = paths::home().join(".cargo/registry/src");
let pkg_remap = format!("{}/[..]/bar-0.0.1=bar-0.0.1", registry_src.display());
@@ -478,14 +528,19 @@ fn object_works() {
let p = project()
.file(
"Cargo.toml",
- r#"
+ &format!(
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies]
bar = "0.0.1"
- "#,
+
+ [profile.dev]
+ split-debuginfo = "{split_debuginfo}"
+ "#
+ ),
)
.file("src/main.rs", "fn main() { bar::f(); }")
.build();
@@ -497,7 +552,7 @@ fn object_works() {
let bin_path = p.bin("foo");
assert!(bin_path.is_file());
- let stdout = run_readelf(bin_path).stdout;
+ let stdout = run(&bin_path);
// TODO: re-enable this check when rustc bootstrap disables remapping
// <https://github.com/rust-lang/cargo/pull/12625#discussion_r1371714791>
// assert!(memchr::memmem::find(&stdout, rust_src).is_some());
@@ -506,45 +561,69 @@ fn object_works() {
p.cargo("clean").run();
- p.change_file(
- "Cargo.toml",
- r#"
- [package]
- name = "foo"
- version = "0.0.1"
-
- [dependencies]
- bar = "0.0.1"
-
- [profile.dev]
- trim-paths = "object"
- "#,
- );
-
p.cargo("build --verbose -Ztrim-paths")
+ .arg("--config")
+ .arg(r#"profile.dev.trim-paths="object""#)
.masquerade_as_nightly_cargo(&["-Ztrim-paths"])
.with_stderr(&format!(
"\
[COMPILING] bar v0.0.1
-[RUNNING] `rustc [..]\
+[RUNNING] `rustc [..]-C split-debuginfo={split_debuginfo} [..]\
-Zremap-path-scope=object \
- --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..] \
- --remap-path-prefix={pkg_remap} [..]
+ --remap-path-prefix={pkg_remap} \
+ --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..]
[COMPILING] foo v0.0.1 ([CWD])
-[RUNNING] `rustc [..]\
+[RUNNING] `rustc [..]-C split-debuginfo={split_debuginfo} [..]\
-Zremap-path-scope=object \
- --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..] \
- --remap-path-prefix=[CWD]= [..]
+ --remap-path-prefix=[CWD]=. \
+ --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..]
[FINISHED] dev [..]",
))
.run();
let bin_path = p.bin("foo");
assert!(bin_path.is_file());
- let stdout = run_readelf(bin_path).stdout;
+ let stdout = run(&bin_path);
assert!(memchr::memmem::find(&stdout, rust_src).is_none());
- assert!(memchr::memmem::find(&stdout, registry_src_bytes).is_none());
- assert!(memchr::memmem::find(&stdout, pkg_root).is_none());
+ for line in stdout.split(|c| c == &b'\n') {
+ let registry = memchr::memmem::find(line, registry_src_bytes).is_none();
+ let local = memchr::memmem::find(line, pkg_root).is_none();
+ if registry && local {
+ continue;
+ }
+
+ #[cfg(target_os = "macos")]
+ {
+ // `OSO` symbols can't be trimmed at this moment.
+ // See <https://github.com/rust-lang/rust/issues/116948#issuecomment-1793617018>
+ if memchr::memmem::find(line, b" OSO ").is_some() {
+ continue;
+ }
+
+ // on macOS `SO` symbols are embedded in final binaries and should be trimmed.
+ // See rust-lang/rust#117652.
+ if memchr::memmem::find(line, b" SO ").is_some() {
+ continue;
+ }
+ }
+
+ #[cfg(target_os = "linux")]
+ {
+ // There is a bug in rustc `-Zremap-path-scope`.
+ // See rust-lang/rust/pull/118518
+ if memchr::memmem::find(line, b"DW_AT_comp_dir").is_some() {
+ continue;
+ }
+ if memchr::memmem::find(line, b"DW_AT_GNU_dwo_name").is_some() {
+ continue;
+ }
+ }
+
+ panic!(
+ "unexpected untrimmed symbol: {}",
+ String::from_utf8(line.into()).unwrap()
+ );
+ }
}
// TODO: might want to move to test/testsuite/build_script.rs once stabilized.
@@ -612,3 +691,69 @@ fn custom_build_env_var_trim_paths() {
.run();
}
}
+
+#[cfg(unix)]
+#[cargo_test(requires_lldb, nightly, reason = "-Zremap-path-scope is unstable")]
+fn lldb_works_after_trimmed() {
+ use cargo_test_support::compare::match_contains;
+
+ let run_lldb = |path| {
+ std::process::Command::new("lldb")
+ .args(["-o", "breakpoint set --file src/main.rs --line 4"])
+ .args(["-o", "run"])
+ .args(["-o", "continue"])
+ .args(["-o", "exit"])
+ .arg("--no-use-colors")
+ .arg(path)
+ .output()
+ .expect("lldb works")
+ };
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [profile.dev]
+ trim-paths = "object"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ let msg = "Hello, Ferris!";
+ println!("{msg}");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build --verbose -Ztrim-paths")
+ .masquerade_as_nightly_cargo(&["-Ztrim-paths"])
+ .with_stderr_contains(
+ "\
+[RUNNING] `rustc [..]\
+ -Zremap-path-scope=object \
+ --remap-path-prefix=[CWD]=. \
+ --remap-path-prefix=[..]/lib/rustlib/src/rust=/rustc/[..]",
+ )
+ .run();
+
+ let bin_path = p.bin("foo");
+ assert!(bin_path.is_file());
+ let stdout = String::from_utf8(run_lldb(bin_path).stdout).unwrap();
+ match_contains("[..]stopped[..]", &stdout, None).unwrap();
+ match_contains("[..]stop reason = breakpoint[..]", &stdout, None).unwrap();
+ match_contains(
+ "\
+(lldb) continue
+Hello, Ferris!",
+ &stdout,
+ None,
+ )
+ .unwrap();
+}
diff --git a/src/tools/cargo/tests/testsuite/pub_priv.rs b/src/tools/cargo/tests/testsuite/pub_priv.rs
index b2160e0fa..b3d87ce4c 100644
--- a/src/tools/cargo/tests/testsuite/pub_priv.rs
+++ b/src/tools/cargo/tests/testsuite/pub_priv.rs
@@ -199,7 +199,7 @@ Caused by:
}
#[cargo_test(nightly, reason = "exported_private_dependencies lint is unstable")]
-fn workspace_dep_made_public() {
+fn workspace_pub_disallowed() {
Package::new("foo1", "0.1.0")
.file("src/lib.rs", "pub struct FromFoo;")
.publish();
@@ -244,5 +244,238 @@ fn workspace_dep_made_public() {
p.cargo("check")
.masquerade_as_nightly_cargo(&["public-dependency"])
+ .with_status(101)
+ .with_stderr(
+ "\
+error: failed to parse manifest at `[CWD]/Cargo.toml`
+
+Caused by:
+ foo2 is public, but workspace dependencies cannot be public
+",
+ )
+ .run()
+}
+
+#[cargo_test(nightly, reason = "exported_private_dependencies lint is unstable")]
+fn allow_priv_in_tests() {
+ Package::new("priv_dep", "0.1.0")
+ .file("src/lib.rs", "pub struct FromPriv;")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["public-dependency"]
+
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ priv_dep = {version = "0.1.0", public = false}
+ "#,
+ )
+ .file(
+ "tests/mod.rs",
+ "
+ extern crate priv_dep;
+ pub fn use_priv(_: priv_dep::FromPriv) {}
+ ",
+ )
+ .build();
+
+ p.cargo("check --tests --message-format=short")
+ .masquerade_as_nightly_cargo(&["public-dependency"])
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] priv_dep v0.1.0 ([..])
+[CHECKING] priv_dep v0.1.0
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run()
+}
+
+#[cargo_test(nightly, reason = "exported_private_dependencies lint is unstable")]
+fn allow_priv_in_benchs() {
+ Package::new("priv_dep", "0.1.0")
+ .file("src/lib.rs", "pub struct FromPriv;")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["public-dependency"]
+
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ priv_dep = {version = "0.1.0", public = false}
+ "#,
+ )
+ .file(
+ "benches/mod.rs",
+ "
+ extern crate priv_dep;
+ pub fn use_priv(_: priv_dep::FromPriv) {}
+ ",
+ )
+ .build();
+
+ p.cargo("check --benches --message-format=short")
+ .masquerade_as_nightly_cargo(&["public-dependency"])
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] priv_dep v0.1.0 ([..])
+[CHECKING] priv_dep v0.1.0
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run()
+}
+
+#[cargo_test(nightly, reason = "exported_private_dependencies lint is unstable")]
+fn allow_priv_in_bins() {
+ Package::new("priv_dep", "0.1.0")
+ .file("src/lib.rs", "pub struct FromPriv;")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["public-dependency"]
+
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ priv_dep = {version = "0.1.0", public = false}
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "
+ extern crate priv_dep;
+ pub fn use_priv(_: priv_dep::FromPriv) {}
+ fn main() {}
+ ",
+ )
+ .build();
+
+ p.cargo("check --bins --message-format=short")
+ .masquerade_as_nightly_cargo(&["public-dependency"])
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] priv_dep v0.1.0 ([..])
+[CHECKING] priv_dep v0.1.0
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run()
+}
+
+#[cargo_test(nightly, reason = "exported_private_dependencies lint is unstable")]
+fn allow_priv_in_examples() {
+ Package::new("priv_dep", "0.1.0")
+ .file("src/lib.rs", "pub struct FromPriv;")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["public-dependency"]
+
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ priv_dep = {version = "0.1.0", public = false}
+ "#,
+ )
+ .file(
+ "examples/lib.rs",
+ "
+ extern crate priv_dep;
+ pub fn use_priv(_: priv_dep::FromPriv) {}
+ fn main() {}
+ ",
+ )
+ .build();
+
+ p.cargo("check --examples --message-format=short")
+ .masquerade_as_nightly_cargo(&["public-dependency"])
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] priv_dep v0.1.0 ([..])
+[CHECKING] priv_dep v0.1.0
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
+ .run()
+}
+
+#[cargo_test(nightly, reason = "exported_private_dependencies lint is unstable")]
+fn allow_priv_in_custom_build() {
+ Package::new("priv_dep", "0.1.0")
+ .file("src/lib.rs", "pub struct FromPriv;")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["public-dependency"]
+
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [build-dependencies]
+ priv_dep = "0.1.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .file(
+ "build.rs",
+ "
+ extern crate priv_dep;
+ pub fn use_priv(_: priv_dep::FromPriv) {}
+ fn main() {}
+ ",
+ )
+ .build();
+
+ p.cargo("check --all-targets --message-format=short")
+ .masquerade_as_nightly_cargo(&["public-dependency"])
+ .with_stderr(
+ "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] priv_dep v0.1.0 ([..])
+[COMPILING] priv_dep v0.1.0
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+ )
.run()
}
diff --git a/src/tools/cargo/tests/testsuite/publish_lockfile.rs b/src/tools/cargo/tests/testsuite/publish_lockfile.rs
index 35da5131f..0a7c23368 100644
--- a/src/tools/cargo/tests/testsuite/publish_lockfile.rs
+++ b/src/tools/cargo/tests/testsuite/publish_lockfile.rs
@@ -92,7 +92,17 @@ src/main.rs
",
)
.run();
- p.cargo("package").with_stdout("").run();
+ p.cargo("package")
+ .with_stderr(
+ "\
+[PACKAGING] foo v0.0.1 [..]
+[VERIFYING] foo v0.0.1 [..]
+[COMPILING] foo v0.0.1 [..]
+[FINISHED] [..]
+[PACKAGED] 4 files, [..]
+",
+ )
+ .run();
let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
validate_crate_contents(
diff --git a/src/tools/cargo/tests/testsuite/registry.rs b/src/tools/cargo/tests/testsuite/registry.rs
index b5dff2746..6107b6f59 100644
--- a/src/tools/cargo/tests/testsuite/registry.rs
+++ b/src/tools/cargo/tests/testsuite/registry.rs
@@ -549,7 +549,7 @@ fn lockfile_locks() {
p.root().move_into_the_past();
Package::new("bar", "0.0.2").publish();
- p.cargo("check").with_stdout("").run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
}
#[cargo_test]
@@ -602,7 +602,7 @@ fn lockfile_locks_transitively() {
Package::new("baz", "0.0.2").publish();
Package::new("bar", "0.0.2").dep("baz", "*").publish();
- p.cargo("check").with_stdout("").run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
}
#[cargo_test]
@@ -739,7 +739,7 @@ fn yanks_in_lockfiles_are_ok() {
Package::new("bar", "0.0.1").yanked(true).publish();
- p.cargo("check").with_stdout("").run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
p.cargo("update")
.with_status(101)
@@ -792,7 +792,7 @@ fn yanks_in_lockfiles_are_ok_for_other_update() {
Package::new("bar", "0.0.1").yanked(true).publish();
Package::new("baz", "0.0.1").publish();
- p.cargo("check").with_stdout("").run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
Package::new("baz", "0.0.2").publish();
@@ -868,7 +868,18 @@ fn yanks_in_lockfiles_are_ok_with_new_dep() {
"#,
);
- p.cargo("check").with_stdout("").run();
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] baz v0.0.1 (registry `dummy-registry`)
+[CHECKING] baz v0.0.1
+[CHECKING] foo v0.0.1 [..]
+[FINISHED] [..]
+",
+ )
+ .run();
}
#[cargo_test]
@@ -1272,7 +1283,7 @@ fn git_and_registry_dep() {
p.root().move_into_the_past();
println!("second");
- p.cargo("check").with_stdout("").run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
}
#[cargo_test]
diff --git a/src/tools/cargo/tests/testsuite/replace.rs b/src/tools/cargo/tests/testsuite/replace.rs
index b9de51d2f..6c31a023e 100644
--- a/src/tools/cargo/tests/testsuite/replace.rs
+++ b/src/tools/cargo/tests/testsuite/replace.rs
@@ -305,7 +305,7 @@ fn transitive() {
)
.run();
- p.cargo("check").with_stdout("").run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
}
#[cargo_test]
@@ -354,7 +354,7 @@ fn persists_across_rebuilds() {
)
.run();
- p.cargo("check").with_stdout("").run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
}
#[cargo_test]
@@ -530,7 +530,7 @@ fn override_adds_some_deps() {
)
.run();
- p.cargo("check").with_stdout("").run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
Package::new("baz", "0.1.2").publish();
p.cargo("update")
@@ -550,7 +550,7 @@ fn override_adds_some_deps() {
)
.run();
- p.cargo("check").with_stdout("").run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
}
#[cargo_test]
@@ -601,8 +601,8 @@ fn locked_means_locked_yes_no_seriously_i_mean_locked() {
p.cargo("check").run();
- p.cargo("check").with_stdout("").run();
- p.cargo("check").with_stdout("").run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
+ p.cargo("check").with_stderr("[FINISHED] [..]").run();
}
#[cargo_test]
@@ -1399,7 +1399,7 @@ fn override_respects_spec_metadata() {
[..]
[..]
[..]
-error: could not compile `foo` (lib) due to previous error
+error: could not compile `foo` (lib) due to 1 previous error
",
)
.with_status(101)
diff --git a/src/tools/cargo/tests/testsuite/rust_version.rs b/src/tools/cargo/tests/testsuite/rust_version.rs
index 21321b7c5..d0ea33c83 100644
--- a/src/tools/cargo/tests/testsuite/rust_version.rs
+++ b/src/tools/cargo/tests/testsuite/rust_version.rs
@@ -245,49 +245,81 @@ fn dependency_rust_version_newer_than_package() {
.file("src/main.rs", "fn main(){}")
.build();
+ p.cargo("check")
+ .arg("-Zmsrv-policy")
+ .masquerade_as_nightly_cargo(&["msrv-policy"])
+ .run();
p.cargo("check --ignore-rust-version")
.arg("-Zmsrv-policy")
.masquerade_as_nightly_cargo(&["msrv-policy"])
- // This shouldn't fail
- .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn dependency_rust_version_older_and_newer_than_package() {
+ Package::new("bar", "1.5.0")
+ .rust_version("1.55.0")
+ .file("src/lib.rs", "fn other_stuff() {}")
+ .publish();
+ Package::new("bar", "1.6.0")
+ .rust_version("1.65.0")
+ .file("src/lib.rs", "fn other_stuff() {}")
+ .publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ rust-version = "1.60.0"
+ [dependencies]
+ bar = "1.0.0"
+ "#,
+ )
+ .file("src/main.rs", "fn main(){}")
+ .build();
+
+ p.cargo("check --ignore-rust-version")
+ .arg("-Zmsrv-policy")
+ .masquerade_as_nightly_cargo(&["msrv-policy"])
+ // This should pick 1.6.0
.with_stderr(
"\
[UPDATING] `dummy-registry` index
-[ERROR] failed to select a version for the requirement `bar = \"^1.0.0\"`
-candidate versions found which didn't match: 1.6.0
-location searched: `dummy-registry` index (which is replacing registry `crates-io`)
-required by package `foo v0.0.1 ([CWD])`
-perhaps a crate was updated and forgotten to be re-vendored?
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v1.5.0 (registry `dummy-registry`)
+[CHECKING] bar v1.5.0
+[CHECKING] [..]
+[FINISHED] [..]
",
)
.run();
p.cargo("check")
.arg("-Zmsrv-policy")
.masquerade_as_nightly_cargo(&["msrv-policy"])
- .with_status(101)
- // This should have a better error message
.with_stderr(
"\
-[UPDATING] `dummy-registry` index
-[ERROR] failed to select a version for the requirement `bar = \"^1.0.0\"`
-candidate versions found which didn't match: 1.6.0
-location searched: `dummy-registry` index (which is replacing registry `crates-io`)
-required by package `foo v0.0.1 ([CWD])`
-perhaps a crate was updated and forgotten to be re-vendored?
+[FINISHED] [..]
",
)
.run();
}
#[cargo_test]
-fn dependency_rust_version_older_and_newer_than_package() {
- Package::new("bar", "1.5.0")
- .rust_version("1.55.0")
+fn dependency_rust_version_backtracking() {
+ Package::new("has-rust-version", "1.6.0")
+ .rust_version("1.65.0")
.file("src/lib.rs", "fn other_stuff() {}")
.publish();
- Package::new("bar", "1.6.0")
- .rust_version("1.65.0")
+ Package::new("no-rust-version", "2.1.0")
+ .file("src/lib.rs", "fn other_stuff() {}")
+ .publish();
+ Package::new("no-rust-version", "2.2.0")
.file("src/lib.rs", "fn other_stuff() {}")
+ .dep("has-rust-version", "1.6.0")
.publish();
let p = project()
@@ -300,7 +332,7 @@ fn dependency_rust_version_older_and_newer_than_package() {
authors = []
rust-version = "1.60.0"
[dependencies]
- bar = "1.0.0"
+ no-rust-version = "2"
"#,
)
.file("src/main.rs", "fn main(){}")
@@ -309,13 +341,14 @@ fn dependency_rust_version_older_and_newer_than_package() {
p.cargo("check --ignore-rust-version")
.arg("-Zmsrv-policy")
.masquerade_as_nightly_cargo(&["msrv-policy"])
- // This should pick 1.6.0
.with_stderr(
"\
[UPDATING] `dummy-registry` index
[DOWNLOADING] crates ...
-[DOWNLOADED] bar v1.5.0 (registry `dummy-registry`)
-[CHECKING] bar v1.5.0
+[DOWNLOADED] no-rust-version v2.2.0 (registry `dummy-registry`)
+[DOWNLOADED] has-rust-version v1.6.0 (registry `dummy-registry`)
+[CHECKING] has-rust-version v1.6.0
+[CHECKING] no-rust-version v2.2.0
[CHECKING] [..]
[FINISHED] [..]
",
diff --git a/src/tools/cargo/tests/testsuite/rustflags.rs b/src/tools/cargo/tests/testsuite/rustflags.rs
index 6677beb04..788889951 100644
--- a/src/tools/cargo/tests/testsuite/rustflags.rs
+++ b/src/tools/cargo/tests/testsuite/rustflags.rs
@@ -456,7 +456,7 @@ fn env_rustflags_no_recompile() {
p.cargo("check").env("RUSTFLAGS", "--cfg foo").run();
p.cargo("check")
.env("RUSTFLAGS", "--cfg foo")
- .with_stdout("")
+ .with_stderr("[FINISHED] [..]")
.run();
}
@@ -944,7 +944,7 @@ fn build_rustflags_no_recompile() {
p.cargo("check").env("RUSTFLAGS", "--cfg foo").run();
p.cargo("check")
.env("RUSTFLAGS", "--cfg foo")
- .with_stdout("")
+ .with_stderr("[FINISHED] [..]")
.run();
}
@@ -1658,7 +1658,7 @@ fn host_config_rustflags_with_target() {
// regression test for https://github.com/rust-lang/cargo/issues/10206
let p = project()
.file("src/lib.rs", "")
- .file("build.rs.rs", "fn main() { assert!(cfg!(foo)); }")
+ .file("build.rs", "fn main() { assert!(cfg!(foo)); }")
.file(".cargo/config.toml", "target-applies-to-host = false")
.build();
diff --git a/src/tools/cargo/tests/testsuite/test.rs b/src/tools/cargo/tests/testsuite/test.rs
index 5f6528109..6357b950c 100644
--- a/src/tools/cargo/tests/testsuite/test.rs
+++ b/src/tools/cargo/tests/testsuite/test.rs
@@ -1433,7 +1433,7 @@ fn test_then_build() {
.with_stdout_contains("running 0 tests")
.run();
- p.cargo("build").with_stdout("").run();
+ p.cargo("build").with_stderr("[FINISHED] [..]").run();
}
#[cargo_test]
@@ -2423,7 +2423,7 @@ fn dylib_doctest2() {
)
.build();
- p.cargo("test").with_stdout("").run();
+ p.cargo("test").with_stderr("[FINISHED] [..]").run();
}
#[cargo_test]
@@ -3556,6 +3556,39 @@ fn cyclic_dev() {
}
#[cargo_test]
+fn cyclical_dep_with_missing_feature() {
+ // Checks for error handling when a cyclical dev-dependency specify a
+ // feature that doesn't exist.
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+
+ [dev-dependencies]
+ foo = { path = ".", features = ["missing"] }
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+ p.cargo("check")
+ .with_status(101)
+ .with_stderr(
+ "error: failed to select a version for `foo`.
+ ... required by package `foo v0.1.0 ([..]/foo)`
+versions that meet the requirements `*` are: 0.1.0
+
+the package `foo` depends on `foo`, with features: `missing` but `foo` does not have these features.
+
+
+failed to select a version for `foo` which could resolve this conflict",
+ )
+ .run();
+}
+
+#[cargo_test]
fn publish_a_crate_without_tests() {
Package::new("testless", "0.1.0")
.file(
diff --git a/src/tools/cargo/tests/testsuite/update.rs b/src/tools/cargo/tests/testsuite/update.rs
index e636435b0..40bc0b476 100644
--- a/src/tools/cargo/tests/testsuite/update.rs
+++ b/src/tools/cargo/tests/testsuite/update.rs
@@ -1091,11 +1091,8 @@ rustdns.workspace = true
p.cargo("update -p rootcrate")
.with_stderr(&format!(
"\
-[UPDATING] git repository `{}`
[UPDATING] rootcrate v2.29.8 ([CWD]/rootcrate) -> v2.29.81
-[UPDATING] rustdns v0.5.0 ([..]) -> [..]
[UPDATING] subcrate v2.29.8 ([CWD]/subcrate) -> v2.29.81",
- git_project.url(),
))
.run();
}
@@ -1182,11 +1179,96 @@ rustdns.workspace = true
p.cargo("update -p crate2")
.with_stderr(&format!(
"\
-[UPDATING] git repository `{}`
[UPDATING] crate1 v2.29.8 ([CWD]/crate1) -> v2.29.81
-[UPDATING] crate2 v2.29.8 ([CWD]/crate2) -> v2.29.81
-[UPDATING] rustdns v0.5.0 ([..]) -> [..]",
+[UPDATING] crate2 v2.29.8 ([CWD]/crate2) -> v2.29.81",
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn update_only_members_with_workspace() {
+ let git_project = git::new("rustdns", |project| {
+ project
+ .file("Cargo.toml", &basic_lib_manifest("rustdns"))
+ .file("src/lib.rs", "pub fn bar() {}")
+ });
+
+ let workspace_toml = format!(
+ r#"
+[workspace.package]
+version = "2.29.8"
+edition = "2021"
+publish = false
+
+[workspace]
+members = [
+ "crate2",
+ "crate1",
+]
+resolver = "2"
+
+[workspace.dependencies]
+# Internal crates
+crate1 = {{ version = "*", path = "./crate1" }}
+
+# External dependencies
+rustdns = {{ version = "0.5.0", default-features = false, git = "{}" }}
+ "#,
+ git_project.url()
+ );
+ let p = project()
+ .file("Cargo.toml", &workspace_toml)
+ .file(
+ "crate2/Cargo.toml",
+ r#"
+[package]
+name = "crate2"
+version.workspace = true
+edition.workspace = true
+publish.workspace = true
+
+[dependencies]
+crate1.workspace = true
+"#,
+ )
+ .file("crate2/src/main.rs", "fn main() {}")
+ .file(
+ "crate1/Cargo.toml",
+ r#"
+[package]
+name = "crate1"
+version.workspace = true
+edition.workspace = true
+publish.workspace = true
+
+[dependencies]
+rustdns.workspace = true
+"#,
+ )
+ .file("crate1/src/lib.rs", "pub foo() {}")
+ .build();
+
+ // First time around we should compile both foo and bar
+ p.cargo("generate-lockfile")
+ .with_stderr(&format!(
+ "[UPDATING] git repository `{}`\n",
git_project.url(),
))
.run();
+ // Modify a file manually, shouldn't trigger a recompile
+ git_project.change_file("src/lib.rs", r#"pub fn bar() { println!("hello!"); }"#);
+ // Commit the changes and make sure we don't trigger a recompile because the
+ // lock file says not to change
+ let repo = git2::Repository::open(&git_project.root()).unwrap();
+ git::add(&repo);
+ git::commit(&repo);
+ p.change_file("Cargo.toml", &workspace_toml.replace("2.29.8", "2.29.81"));
+
+ p.cargo("update --workspace")
+ .with_stderr(
+ "\
+[UPDATING] crate1 v2.29.8 ([CWD]/crate1) -> v2.29.81
+[UPDATING] crate2 v2.29.8 ([CWD]/crate2) -> v2.29.81",
+ )
+ .run();
}
diff --git a/src/tools/cargo/tests/testsuite/workspaces.rs b/src/tools/cargo/tests/testsuite/workspaces.rs
index 94b5142f4..53ddc1616 100644
--- a/src/tools/cargo/tests/testsuite/workspaces.rs
+++ b/src/tools/cargo/tests/testsuite/workspaces.rs
@@ -2198,6 +2198,7 @@ fn ws_err_unused() {
"[features]",
"[target]",
"[badges]",
+ "[lints]",
] {
let p = project()
.file(
diff --git a/src/tools/cargo/triagebot.toml b/src/tools/cargo/triagebot.toml
index cdf1090a1..c344a0b4d 100644
--- a/src/tools/cargo/triagebot.toml
+++ b/src/tools/cargo/triagebot.toml
@@ -262,6 +262,7 @@ trigger_files = [
"src/bin/cargo/commands/fix.rs",
"src/cargo/ops/fix.rs",
"src/cargo/util/lockserver.rs",
+ "crates/rustfix/",
]
[autolabel."Command-generate-lockfile"]
diff --git a/src/tools/cargo/windows.manifest.xml b/src/tools/cargo/windows.manifest.xml
new file mode 100644
index 000000000..8a9d5d1b7
--- /dev/null
+++ b/src/tools/cargo/windows.manifest.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!--
+This is a Windows application manifest file.
+See: https://docs.microsoft.com/en-us/windows/win32/sbscs/application-manifests
+-->
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0" xmlns:asmv3="urn:schemas-microsoft-com:asm.v3">
+ <!-- Versions cargo supports as hosts -->
+ <compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
+ <application>
+ <!-- Windows 7 --><supportedOS Id="{35138b9a-5d96-4fbd-8e2d-a2440225f93a}"/>
+ <!-- Windows 8 --><supportedOS Id="{4a2f28e3-53b9-4441-ba9c-d69d4a4a6e38}"/>
+ <!-- Windows 8.1 --><supportedOS Id="{1f676c76-80e1-4239-95bb-83d0f6d0da78}"/>
+ <!-- Windows 10 and 11 --><supportedOS Id="{8e0f7a12-bfb3-4fe8-b9a5-48fd50a15a9a}"/>
+ </application>
+ </compatibility>
+ <!-- Use UTF-8 code page -->
+ <asmv3:application>
+ <asmv3:windowsSettings xmlns="http://schemas.microsoft.com/SMI/2019/WindowsSettings">
+ <activeCodePage>UTF-8</activeCodePage>
+ </asmv3:windowsSettings>
+ </asmv3:application>
+ <!-- Remove (most) legacy path limits -->
+ <asmv3:application>
+ <asmv3:windowsSettings xmlns:ws2="http://schemas.microsoft.com/SMI/2016/WindowsSettings">
+ <ws2:longPathAware>true</ws2:longPathAware>
+ </asmv3:windowsSettings>
+ </asmv3:application>
+</assembly>
diff --git a/src/tools/clippy/.github/workflows/clippy_bors.yml b/src/tools/clippy/.github/workflows/clippy_bors.yml
index f67233dec..73c255507 100644
--- a/src/tools/clippy/.github/workflows/clippy_bors.yml
+++ b/src/tools/clippy/.github/workflows/clippy_bors.yml
@@ -206,6 +206,7 @@ jobs:
max-parallel: 6
matrix:
integration:
+ - 'matthiaskrgr/clippy_ci_panic_test'
- 'rust-lang/cargo'
- 'rust-lang/chalk'
- 'rust-lang/rustfmt'
@@ -220,7 +221,6 @@ jobs:
- 'rust-itertools/itertools'
- 'rust-lang-nursery/failure'
- 'rust-lang/log'
- - 'matthiaskrgr/clippy_ci_panic_test'
runs-on: ubuntu-latest
diff --git a/src/tools/clippy/.github/workflows/deploy.yml b/src/tools/clippy/.github/workflows/deploy.yml
index f42928c2c..999ee7acf 100644
--- a/src/tools/clippy/.github/workflows/deploy.yml
+++ b/src/tools/clippy/.github/workflows/deploy.yml
@@ -52,7 +52,9 @@ jobs:
run: cargo generate-lockfile
- name: Cache
- uses: Swatinem/rust-cache@v1.3.0
+ uses: Swatinem/rust-cache@v2.7.0
+ with:
+ save-if: ${{ github.ref == 'refs/heads/master' }}
- name: cargo collect-metadata
run: cargo collect-metadata
diff --git a/src/tools/clippy/CHANGELOG.md b/src/tools/clippy/CHANGELOG.md
index 87a96bdeb..70aff7f60 100644
--- a/src/tools/clippy/CHANGELOG.md
+++ b/src/tools/clippy/CHANGELOG.md
@@ -6,11 +6,70 @@ document.
## Unreleased / Beta / In Rust Nightly
-[1e8fdf49...master](https://github.com/rust-lang/rust-clippy/compare/1e8fdf49...master)
+[7671c283...master](https://github.com/rust-lang/rust-clippy/compare/7671c283...master)
+
+## Rust 1.74
+
+Current stable, released 2023-11-16
+
+[View all 94 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2023-08-11T15%3A29%3A18Z..2023-09-25T08%3A48%3A22Z+base%3Amaster)
+
+### New Lints
+
+* [`redundant_as_str`]
+ [#11526](https://github.com/rust-lang/rust-clippy/pull/11526)
+* [`needless_borrows_for_generic_args`]
+ [#11511](https://github.com/rust-lang/rust-clippy/pull/11511)
+* [`path_ends_with_ext`]
+ [#11483](https://github.com/rust-lang/rust-clippy/pull/11483)
+* [`unnecessary_map_on_constructor`]
+ [#11413](https://github.com/rust-lang/rust-clippy/pull/11413)
+* [`missing_asserts_for_indexing`]
+ [#10692](https://github.com/rust-lang/rust-clippy/pull/10692)
+* [`iter_out_of_bounds`]
+ [#11396](https://github.com/rust-lang/rust-clippy/pull/11396)
+* [`implied_bounds_in_impls`]
+ [#11362](https://github.com/rust-lang/rust-clippy/pull/11362)
+* [`reserve_after_initialization`]
+ [#11373](https://github.com/rust-lang/rust-clippy/pull/11373)
+* [`should_panic_without_expect`]
+ [#11204](https://github.com/rust-lang/rust-clippy/pull/11204)
+
+### Moves and Deprecations
+
+* Renamed `incorrect_clone_impl_on_copy_type` to [`non_canonical_clone_impl`]
+ [#11358](https://github.com/rust-lang/rust-clippy/pull/11358)
+* Renamed `incorrect_partial_ord_impl_on_ord_type` to [`non_canonical_partial_ord_impl`]
+ [#11358](https://github.com/rust-lang/rust-clippy/pull/11358)
+* Moved [`non_canonical_clone_impl`] to `suspicious` (Now warn-by-default)
+ [#11358](https://github.com/rust-lang/rust-clippy/pull/11358)
+* Moved [`non_canonical_partial_ord_impl`] to `suspicious` (Now warn-by-default)
+ [#11358](https://github.com/rust-lang/rust-clippy/pull/11358)
+* Moved [`needless_pass_by_ref_mut`] to `nursery` (Now allow-by-default)
+ [#11596](https://github.com/rust-lang/rust-clippy/pull/11596)
+
+### Enhancements
+
+* [`undocumented_unsafe_blocks`]: The config values [`accept-comment-above-statement`] and
+ [`accept-comment-above-attributes`] to `true` by default
+ [#11170](https://github.com/rust-lang/rust-clippy/pull/11170)
+* [`explicit_iter_loop`]: Added [`enforce-iter-loop-reborrow`] to disable reborrow linting by default
+ [#11418](https://github.com/rust-lang/rust-clippy/pull/11418)
+
+### ICE Fixes
+
+* [`enum_variant_names`]: No longer crashes if the threshold is 0 and the enum has no variants
+ [#11552](https://github.com/rust-lang/rust-clippy/pull/11552)
+* [`cast_possible_truncation`]: No longer crashes on values larger than `u64::MAX`
+ [#11517](https://github.com/rust-lang/rust-clippy/pull/11517)
+* [`tuple_array_conversions`]: No longer crashes if the array length is not usize
+ [#11379](https://github.com/rust-lang/rust-clippy/pull/11379)
+* [`useless_conversion`]: No longer crashes, when the receiver is a non-fn item
+ [#11070](https://github.com/rust-lang/rust-clippy/pull/11070)
## Rust 1.73
-Current stable, released 2023-10-05
+Released 2023-10-05
[View all 103 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2023-07-02T12%3A24%3A40Z..2023-08-11T11%3A09%3A56Z+base%3Amaster)
@@ -4887,6 +4946,7 @@ Released 2018-09-13
[`blanket_clippy_restriction_lints`]: https://rust-lang.github.io/rust-clippy/master/index.html#blanket_clippy_restriction_lints
[`block_in_if_condition_expr`]: https://rust-lang.github.io/rust-clippy/master/index.html#block_in_if_condition_expr
[`block_in_if_condition_stmt`]: https://rust-lang.github.io/rust-clippy/master/index.html#block_in_if_condition_stmt
+[`blocks_in_conditions`]: https://rust-lang.github.io/rust-clippy/master/index.html#blocks_in_conditions
[`blocks_in_if_conditions`]: https://rust-lang.github.io/rust-clippy/master/index.html#blocks_in_if_conditions
[`bool_assert_comparison`]: https://rust-lang.github.io/rust-clippy/master/index.html#bool_assert_comparison
[`bool_comparison`]: https://rust-lang.github.io/rust-clippy/master/index.html#bool_comparison
@@ -5069,6 +5129,7 @@ Released 2018-09-13
[`if_then_some_else_none`]: https://rust-lang.github.io/rust-clippy/master/index.html#if_then_some_else_none
[`ifs_same_cond`]: https://rust-lang.github.io/rust-clippy/master/index.html#ifs_same_cond
[`ignored_unit_patterns`]: https://rust-lang.github.io/rust-clippy/master/index.html#ignored_unit_patterns
+[`impl_hash_borrow_with_str_and_bytes`]: https://rust-lang.github.io/rust-clippy/master/index.html#impl_hash_borrow_with_str_and_bytes
[`impl_trait_in_params`]: https://rust-lang.github.io/rust-clippy/master/index.html#impl_trait_in_params
[`implicit_clone`]: https://rust-lang.github.io/rust-clippy/master/index.html#implicit_clone
[`implicit_hasher`]: https://rust-lang.github.io/rust-clippy/master/index.html#implicit_hasher
@@ -5085,9 +5146,11 @@ Released 2018-09-13
[`index_refutable_slice`]: https://rust-lang.github.io/rust-clippy/master/index.html#index_refutable_slice
[`indexing_slicing`]: https://rust-lang.github.io/rust-clippy/master/index.html#indexing_slicing
[`ineffective_bit_mask`]: https://rust-lang.github.io/rust-clippy/master/index.html#ineffective_bit_mask
+[`ineffective_open_options`]: https://rust-lang.github.io/rust-clippy/master/index.html#ineffective_open_options
[`inefficient_to_string`]: https://rust-lang.github.io/rust-clippy/master/index.html#inefficient_to_string
[`infallible_destructuring_match`]: https://rust-lang.github.io/rust-clippy/master/index.html#infallible_destructuring_match
[`infinite_iter`]: https://rust-lang.github.io/rust-clippy/master/index.html#infinite_iter
+[`infinite_loop`]: https://rust-lang.github.io/rust-clippy/master/index.html#infinite_loop
[`inherent_to_string`]: https://rust-lang.github.io/rust-clippy/master/index.html#inherent_to_string
[`inherent_to_string_shadow_display`]: https://rust-lang.github.io/rust-clippy/master/index.html#inherent_to_string_shadow_display
[`init_numbered_fields`]: https://rust-lang.github.io/rust-clippy/master/index.html#init_numbered_fields
@@ -5123,12 +5186,14 @@ Released 2018-09-13
[`iter_on_empty_collections`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_on_empty_collections
[`iter_on_single_items`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_on_single_items
[`iter_out_of_bounds`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_out_of_bounds
+[`iter_over_hash_type`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_over_hash_type
[`iter_overeager_cloned`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_overeager_cloned
[`iter_skip_next`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_skip_next
[`iter_skip_zero`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_skip_zero
[`iter_with_drain`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_with_drain
[`iter_without_into_iter`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_without_into_iter
[`iterator_step_by_zero`]: https://rust-lang.github.io/rust-clippy/master/index.html#iterator_step_by_zero
+[`join_absolute_paths`]: https://rust-lang.github.io/rust-clippy/master/index.html#join_absolute_paths
[`just_underscores_and_digits`]: https://rust-lang.github.io/rust-clippy/master/index.html#just_underscores_and_digits
[`large_const_arrays`]: https://rust-lang.github.io/rust-clippy/master/index.html#large_const_arrays
[`large_digit_groups`]: https://rust-lang.github.io/rust-clippy/master/index.html#large_digit_groups
@@ -5320,6 +5385,7 @@ Released 2018-09-13
[`option_expect_used`]: https://rust-lang.github.io/rust-clippy/master/index.html#option_expect_used
[`option_filter_map`]: https://rust-lang.github.io/rust-clippy/master/index.html#option_filter_map
[`option_if_let_else`]: https://rust-lang.github.io/rust-clippy/master/index.html#option_if_let_else
+[`option_map_or_err_ok`]: https://rust-lang.github.io/rust-clippy/master/index.html#option_map_or_err_ok
[`option_map_or_none`]: https://rust-lang.github.io/rust-clippy/master/index.html#option_map_or_none
[`option_map_unit_fn`]: https://rust-lang.github.io/rust-clippy/master/index.html#option_map_unit_fn
[`option_map_unwrap_or`]: https://rust-lang.github.io/rust-clippy/master/index.html#option_map_unwrap_or
@@ -5399,6 +5465,7 @@ Released 2018-09-13
[`ref_patterns`]: https://rust-lang.github.io/rust-clippy/master/index.html#ref_patterns
[`regex_macro`]: https://rust-lang.github.io/rust-clippy/master/index.html#regex_macro
[`repeat_once`]: https://rust-lang.github.io/rust-clippy/master/index.html#repeat_once
+[`repeat_vec_with_capacity`]: https://rust-lang.github.io/rust-clippy/master/index.html#repeat_vec_with_capacity
[`replace_consts`]: https://rust-lang.github.io/rust-clippy/master/index.html#replace_consts
[`reserve_after_initialization`]: https://rust-lang.github.io/rust-clippy/master/index.html#reserve_after_initialization
[`rest_pat_in_fully_bound_structs`]: https://rust-lang.github.io/rust-clippy/master/index.html#rest_pat_in_fully_bound_structs
@@ -5482,6 +5549,7 @@ Released 2018-09-13
[`tabs_in_doc_comments`]: https://rust-lang.github.io/rust-clippy/master/index.html#tabs_in_doc_comments
[`temporary_assignment`]: https://rust-lang.github.io/rust-clippy/master/index.html#temporary_assignment
[`temporary_cstring_as_ptr`]: https://rust-lang.github.io/rust-clippy/master/index.html#temporary_cstring_as_ptr
+[`test_attr_in_doctest`]: https://rust-lang.github.io/rust-clippy/master/index.html#test_attr_in_doctest
[`tests_outside_test_module`]: https://rust-lang.github.io/rust-clippy/master/index.html#tests_outside_test_module
[`to_digit_is_some`]: https://rust-lang.github.io/rust-clippy/master/index.html#to_digit_is_some
[`to_string_in_display`]: https://rust-lang.github.io/rust-clippy/master/index.html#to_string_in_display
@@ -5518,6 +5586,7 @@ Released 2018-09-13
[`undropped_manually_drops`]: https://rust-lang.github.io/rust-clippy/master/index.html#undropped_manually_drops
[`unicode_not_nfc`]: https://rust-lang.github.io/rust-clippy/master/index.html#unicode_not_nfc
[`unimplemented`]: https://rust-lang.github.io/rust-clippy/master/index.html#unimplemented
+[`uninhabited_references`]: https://rust-lang.github.io/rust-clippy/master/index.html#uninhabited_references
[`uninit_assumed_init`]: https://rust-lang.github.io/rust-clippy/master/index.html#uninit_assumed_init
[`uninit_vec`]: https://rust-lang.github.io/rust-clippy/master/index.html#uninit_vec
[`uninlined_format_args`]: https://rust-lang.github.io/rust-clippy/master/index.html#uninlined_format_args
@@ -5678,4 +5747,5 @@ Released 2018-09-13
[`absolute-paths-allowed-crates`]: https://doc.rust-lang.org/clippy/lint_configuration.html#absolute-paths-allowed-crates
[`allowed-dotfiles`]: https://doc.rust-lang.org/clippy/lint_configuration.html#allowed-dotfiles
[`enforce-iter-loop-reborrow`]: https://doc.rust-lang.org/clippy/lint_configuration.html#enforce-iter-loop-reborrow
+[`check-private-items`]: https://doc.rust-lang.org/clippy/lint_configuration.html#check-private-items
<!-- end autogenerated links to configuration documentation -->
diff --git a/src/tools/clippy/CONTRIBUTING.md b/src/tools/clippy/CONTRIBUTING.md
index 04af1b98b..b1a59238c 100644
--- a/src/tools/clippy/CONTRIBUTING.md
+++ b/src/tools/clippy/CONTRIBUTING.md
@@ -146,16 +146,10 @@ For example, the [`else_if_without_else`][else_if_without_else] lint is register
pub mod else_if_without_else;
// ...
-pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf: &Conf) {
+pub fn register_lints(store: &mut rustc_lint::LintStore, conf: &'static Conf) {
// ...
store.register_early_pass(|| Box::new(else_if_without_else::ElseIfWithoutElse));
// ...
-
- store.register_group(true, "clippy::restriction", Some("clippy_restriction"), vec![
- // ...
- LintId::of(&else_if_without_else::ELSE_IF_WITHOUT_ELSE),
- // ...
- ]);
}
```
diff --git a/src/tools/clippy/Cargo.toml b/src/tools/clippy/Cargo.toml
index 4b6688a76..f6084a462 100644
--- a/src/tools/clippy/Cargo.toml
+++ b/src/tools/clippy/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "clippy"
-version = "0.1.75"
+version = "0.1.76"
description = "A bunch of helpful lints to avoid common pitfalls in Rust"
repository = "https://github.com/rust-lang/rust-clippy"
readme = "README.md"
@@ -37,7 +37,7 @@ toml = "0.7.3"
walkdir = "2.3"
# This is used by the `collect-metadata` alias.
filetime = "0.2"
-itertools = "0.10.1"
+itertools = "0.11"
# UI test dependencies
clippy_utils = { path = "clippy_utils" }
diff --git a/src/tools/clippy/book/src/development/adding_lints.md b/src/tools/clippy/book/src/development/adding_lints.md
index 55c0e105b..e30a5f9fe 100644
--- a/src/tools/clippy/book/src/development/adding_lints.md
+++ b/src/tools/clippy/book/src/development/adding_lints.md
@@ -202,7 +202,7 @@ is. This file has already imported some initial things we will need:
```rust
use rustc_lint::{EarlyLintPass, EarlyContext};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_ast::ast::*;
```
@@ -270,7 +270,7 @@ When using `cargo dev new_lint`, the lint is automatically registered and
nothing more has to be done.
When declaring a new lint by hand and `cargo dev update_lints` is used, the lint
-pass may have to be registered manually in the `register_plugins` function in
+pass may have to be registered manually in the `register_lints` function in
`clippy_lints/src/lib.rs`:
```rust,ignore
@@ -436,7 +436,7 @@ need to ensure that the MSRV configured for the project is >= the MSRV of the
required Rust feature. If multiple features are required, just use the one with
a lower MSRV.
-First, add an MSRV alias for the required feature in [`clippy_utils::msrvs`].
+First, add an MSRV alias for the required feature in [`clippy_config::msrvs`].
This can be accessed later as `msrvs::STR_STRIP_PREFIX`, for example.
```rust
@@ -506,7 +506,7 @@ fn msrv_1_45() {
```
As a last step, the lint should be added to the lint documentation. This is done
-in `clippy_lints/src/utils/conf.rs`:
+in `clippy_config/src/conf.rs`:
```rust
define_Conf! {
@@ -516,7 +516,9 @@ define_Conf! {
}
```
-[`clippy_utils::msrvs`]: https://doc.rust-lang.org/nightly/nightly-rustc/clippy_utils/msrvs/index.html
+[`clippy_config::msrvs`]: https://doc.rust-lang.org/nightly/nightly-rustc/clippy_config/msrvs/index.html
+
+Afterwards update the documentation for the book as described in [Adding configuration to a lint](#adding-configuration-to-a-lint).
## Author lint
@@ -657,7 +659,7 @@ Adding a configuration to a lint can be useful for
thresholds or to constrain some behavior that can be seen as a false positive
for some users. Adding a configuration is done in the following steps:
-1. Adding a new configuration entry to [`clippy_lints::utils::conf`] like this:
+1. Adding a new configuration entry to [`clippy_config::conf`] like this:
```rust,ignore
/// Lint: LINT_NAME.
@@ -736,7 +738,7 @@ for some users. Adding a configuration is done in the following steps:
Run `cargo collect-metadata` to generate documentation changes for the book.
-[`clippy_lints::utils::conf`]: https://github.com/rust-lang/rust-clippy/blob/master/clippy_lints/src/utils/conf.rs
+[`clippy_config::conf`]: https://github.com/rust-lang/rust-clippy/blob/master/clippy_config/src/conf.rs
[`clippy_lints` lib file]: https://github.com/rust-lang/rust-clippy/blob/master/clippy_lints/src/lib.rs
[`tests/ui`]: https://github.com/rust-lang/rust-clippy/blob/master/tests/ui
[`tests/ui-toml`]: https://github.com/rust-lang/rust-clippy/blob/master/tests/ui-toml
diff --git a/src/tools/clippy/book/src/development/defining_lints.md b/src/tools/clippy/book/src/development/defining_lints.md
index 7c4aa5d45..54f77b001 100644
--- a/src/tools/clippy/book/src/development/defining_lints.md
+++ b/src/tools/clippy/book/src/development/defining_lints.md
@@ -186,7 +186,7 @@ However, sometimes we might want to declare a new lint by hand. In this case,
we'd use `cargo dev update_lints` command afterwards.
When a lint is manually declared, we might need to register the lint pass
-manually in the `register_plugins` function in `clippy_lints/src/lib.rs`:
+manually in the `register_lints` function in `clippy_lints/src/lib.rs`:
```rust
store.register_late_pass(|_| Box::new(foo_functions::FooFunctions));
diff --git a/src/tools/clippy/book/src/development/type_checking.md b/src/tools/clippy/book/src/development/type_checking.md
index d7c2775b8..a8c9660da 100644
--- a/src/tools/clippy/book/src/development/type_checking.md
+++ b/src/tools/clippy/book/src/development/type_checking.md
@@ -119,7 +119,7 @@ an `u32`. As far as `hir::Ty` is concerned those might be different types. But a
understands that they're the same type, in-depth lifetimes, etc...
To get from a `hir::Ty` to a `ty::Ty`, you can use the [`hir_ty_to_ty`][hir_ty_to_ty] function outside of bodies or
-outside of bodies the [`TypeckResults::node_type()`][node_type] method.
+the [`TypeckResults::node_type()`][node_type] method inside of bodies.
> **Warning**: Don't use `hir_ty_to_ty` inside of bodies, because this can cause ICEs.
diff --git a/src/tools/clippy/book/src/lint_configuration.md b/src/tools/clippy/book/src/lint_configuration.md
index 841a5b6d0..2bb89321c 100644
--- a/src/tools/clippy/book/src/lint_configuration.md
+++ b/src/tools/clippy/book/src/lint_configuration.md
@@ -150,6 +150,7 @@ The minimum rust version that the project supports. Defaults to the `rust-versio
* [`tuple_array_conversions`](https://rust-lang.github.io/rust-clippy/master/index.html#tuple_array_conversions)
* [`manual_try_fold`](https://rust-lang.github.io/rust-clippy/master/index.html#manual_try_fold)
* [`manual_hash_one`](https://rust-lang.github.io/rust-clippy/master/index.html#manual_hash_one)
+* [`iter_kv_map`](https://rust-lang.github.io/rust-clippy/master/index.html#iter_kv_map)
## `cognitive-complexity-threshold`
@@ -791,3 +792,16 @@ for _ in &mut *rmvec {}
* [`explicit_iter_loop`](https://rust-lang.github.io/rust-clippy/master/index.html#explicit_iter_loop)
+## `check-private-items`
+
+
+**Default Value:** `false`
+
+---
+**Affected lints:**
+* [`missing_safety_doc`](https://rust-lang.github.io/rust-clippy/master/index.html#missing_safety_doc)
+* [`unnecessary_safety_doc`](https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_safety_doc)
+* [`missing_panics_doc`](https://rust-lang.github.io/rust-clippy/master/index.html#missing_panics_doc)
+* [`missing_errors_doc`](https://rust-lang.github.io/rust-clippy/master/index.html#missing_errors_doc)
+
+
diff --git a/src/tools/clippy/clippy.toml b/src/tools/clippy/clippy.toml
index cda8d17ee..4a1805f75 100644
--- a/src/tools/clippy/clippy.toml
+++ b/src/tools/clippy/clippy.toml
@@ -1 +1,7 @@
avoid-breaking-exported-api = false
+
+# use the various `span_lint_*` methods instead, which also add a link to the docs
+disallowed-methods = [
+ "rustc_lint::context::LintContext::struct_span_lint",
+ "rustc_middle::ty::context::TyCtxt::struct_span_lint_hir"
+]
diff --git a/src/tools/clippy/clippy_config/Cargo.toml b/src/tools/clippy/clippy_config/Cargo.toml
index 2d41087b5..20f313201 100644
--- a/src/tools/clippy/clippy_config/Cargo.toml
+++ b/src/tools/clippy/clippy_config/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "clippy_config"
-version = "0.1.75"
+version = "0.1.76"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
diff --git a/src/tools/clippy/clippy_config/src/conf.rs b/src/tools/clippy/clippy_config/src/conf.rs
index 472597769..88611eb70 100644
--- a/src/tools/clippy/clippy_config/src/conf.rs
+++ b/src/tools/clippy/clippy_config/src/conf.rs
@@ -249,7 +249,7 @@ define_Conf! {
///
/// Suppress lints whenever the suggested change would cause breakage for other crates.
(avoid_breaking_exported_api: bool = true),
- /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, OPTION_MAP_UNWRAP_OR, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT, SEEK_REWIND, UNNECESSARY_LAZY_EVALUATIONS, TRANSMUTE_PTR_TO_REF, ALMOST_COMPLETE_RANGE, NEEDLESS_BORROW, DERIVABLE_IMPLS, MANUAL_IS_ASCII_CHECK, MANUAL_REM_EUCLID, MANUAL_RETAIN, TYPE_REPETITION_IN_BOUNDS, TUPLE_ARRAY_CONVERSIONS, MANUAL_TRY_FOLD, MANUAL_HASH_ONE.
+ /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, OPTION_MAP_UNWRAP_OR, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT, SEEK_REWIND, UNNECESSARY_LAZY_EVALUATIONS, TRANSMUTE_PTR_TO_REF, ALMOST_COMPLETE_RANGE, NEEDLESS_BORROW, DERIVABLE_IMPLS, MANUAL_IS_ASCII_CHECK, MANUAL_REM_EUCLID, MANUAL_RETAIN, TYPE_REPETITION_IN_BOUNDS, TUPLE_ARRAY_CONVERSIONS, MANUAL_TRY_FOLD, MANUAL_HASH_ONE, ITER_KV_MAP.
///
/// The minimum rust version that the project supports. Defaults to the `rust-version` field in `Cargo.toml`
#[default_text = ""]
@@ -543,6 +543,10 @@ define_Conf! {
/// for _ in &mut *rmvec {}
/// ```
(enforce_iter_loop_reborrow: bool = false),
+ /// Lint: MISSING_SAFETY_DOC, UNNECESSARY_SAFETY_DOC, MISSING_PANICS_DOC, MISSING_ERRORS_DOC
+ ///
+ /// Whether to also run the listed lints on private items.
+ (check_private_items: bool = false),
}
/// Search for the configuration file.
diff --git a/src/tools/clippy/clippy_config/src/msrvs.rs b/src/tools/clippy/clippy_config/src/msrvs.rs
index 011d54629..b3ef666e3 100644
--- a/src/tools/clippy/clippy_config/src/msrvs.rs
+++ b/src/tools/clippy/clippy_config/src/msrvs.rs
@@ -23,6 +23,7 @@ msrv_aliases! {
1,62,0 { BOOL_THEN_SOME, DEFAULT_ENUM_ATTRIBUTE }
1,58,0 { FORMAT_ARGS_CAPTURE, PATTERN_TRAIT_CHAR_ARRAY }
1,55,0 { SEEK_REWIND }
+ 1,54,0 { INTO_KEYS }
1,53,0 { OR_PATTERNS, MANUAL_BITS, BTREE_MAP_RETAIN, BTREE_SET_RETAIN, ARRAY_INTO_ITERATOR }
1,52,0 { STR_SPLIT_ONCE, REM_EUCLID_CONST }
1,51,0 { BORROW_AS_PTR, SEEK_FROM_CURRENT, UNSIGNED_ABS }
diff --git a/src/tools/clippy/clippy_config/src/types.rs b/src/tools/clippy/clippy_config/src/types.rs
index e898221ff..df48cc3f5 100644
--- a/src/tools/clippy/clippy_config/src/types.rs
+++ b/src/tools/clippy/clippy_config/src/types.rs
@@ -1,7 +1,6 @@
use serde::de::{self, Deserializer, Visitor};
use serde::{ser, Deserialize, Serialize};
use std::fmt;
-use std::hash::{Hash, Hasher};
#[derive(Clone, Debug, Deserialize)]
pub struct Rename {
@@ -33,32 +32,19 @@ impl DisallowedPath {
}
}
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Deserialize, Serialize)]
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Deserialize, Serialize)]
pub enum MatchLintBehaviour {
AllTypes,
WellKnownTypes,
Never,
}
-#[derive(Clone, Debug)]
+#[derive(Debug)]
pub struct MacroMatcher {
pub name: String,
- pub braces: (String, String),
+ pub braces: (char, char),
}
-impl Hash for MacroMatcher {
- fn hash<H: Hasher>(&self, state: &mut H) {
- self.name.hash(state);
- }
-}
-
-impl PartialEq for MacroMatcher {
- fn eq(&self, other: &Self) -> bool {
- self.name == other.name
- }
-}
-impl Eq for MacroMatcher {}
-
impl<'de> Deserialize<'de> for MacroMatcher {
fn deserialize<D>(deser: D) -> Result<Self, D::Error>
where
@@ -83,7 +69,7 @@ impl<'de> Deserialize<'de> for MacroMatcher {
V: de::MapAccess<'de>,
{
let mut name = None;
- let mut brace: Option<String> = None;
+ let mut brace: Option<char> = None;
while let Some(key) = map.next_key()? {
match key {
Field::Name => {
@@ -104,7 +90,7 @@ impl<'de> Deserialize<'de> for MacroMatcher {
let brace = brace.ok_or_else(|| de::Error::missing_field("brace"))?;
Ok(MacroMatcher {
name,
- braces: [("(", ")"), ("{", "}"), ("[", "]")]
+ braces: [('(', ')'), ('{', '}'), ('[', ']')]
.into_iter()
.find(|b| b.0 == brace)
.map(|(o, c)| (o.to_owned(), c.to_owned()))
diff --git a/src/tools/clippy/clippy_dev/Cargo.toml b/src/tools/clippy/clippy_dev/Cargo.toml
index c3f8a782d..ce738e3f4 100644
--- a/src/tools/clippy/clippy_dev/Cargo.toml
+++ b/src/tools/clippy/clippy_dev/Cargo.toml
@@ -7,7 +7,7 @@ edition = "2021"
aho-corasick = "0.7"
clap = "4.1.4"
indoc = "1.0"
-itertools = "0.10.1"
+itertools = "0.11"
opener = "0.5"
shell-escape = "0.1"
walkdir = "2.3"
diff --git a/src/tools/clippy/clippy_dev/src/lint.rs b/src/tools/clippy/clippy_dev/src/lint.rs
index a19be1bca..906a97278 100644
--- a/src/tools/clippy/clippy_dev/src/lint.rs
+++ b/src/tools/clippy/clippy_dev/src/lint.rs
@@ -1,6 +1,6 @@
use crate::{cargo_clippy_path, exit_if_err};
-use std::fs;
use std::process::{self, Command};
+use std::{env, fs};
pub fn run<'a>(path: &str, args: impl Iterator<Item = &'a String>) {
let is_file = match fs::metadata(path) {
@@ -13,7 +13,7 @@ pub fn run<'a>(path: &str, args: impl Iterator<Item = &'a String>) {
if is_file {
exit_if_err(
- Command::new("cargo")
+ Command::new(env::var("CARGO").unwrap_or("cargo".into()))
.args(["run", "--bin", "clippy-driver", "--"])
.args(["-L", "./target/debug"])
.args(["-Z", "no-codegen"])
@@ -23,7 +23,11 @@ pub fn run<'a>(path: &str, args: impl Iterator<Item = &'a String>) {
.status(),
);
} else {
- exit_if_err(Command::new("cargo").arg("build").status());
+ exit_if_err(
+ Command::new(env::var("CARGO").unwrap_or("cargo".into()))
+ .arg("build")
+ .status(),
+ );
let status = Command::new(cargo_clippy_path())
.arg("clippy")
diff --git a/src/tools/clippy/clippy_dev/src/new_lint.rs b/src/tools/clippy/clippy_dev/src/new_lint.rs
index eeea53ce4..31a42734c 100644
--- a/src/tools/clippy/clippy_dev/src/new_lint.rs
+++ b/src/tools/clippy/clippy_dev/src/new_lint.rs
@@ -283,7 +283,7 @@ fn get_lint_file_contents(lint: &LintData<'_>, enable_msrv: bool) -> String {
use clippy_utils::msrvs::{{self, Msrv}};
{pass_import}
use rustc_lint::{{{context_import}, {pass_type}, LintContext}};
- use rustc_session::{{declare_tool_lint, impl_lint_pass}};
+ use rustc_session::impl_lint_pass;
"#
)
@@ -292,7 +292,7 @@ fn get_lint_file_contents(lint: &LintData<'_>, enable_msrv: bool) -> String {
r#"
{pass_import}
use rustc_lint::{{{context_import}, {pass_type}}};
- use rustc_session::{{declare_lint_pass, declare_tool_lint}};
+ use rustc_session::declare_lint_pass;
"#
)
@@ -320,8 +320,8 @@ fn get_lint_file_contents(lint: &LintData<'_>, enable_msrv: bool) -> String {
extract_msrv_attr!({context_import});
}}
- // TODO: Add MSRV level to `clippy_utils/src/msrvs.rs` if needed.
- // TODO: Update msrv config comment in `clippy_lints/src/utils/conf.rs`
+ // TODO: Add MSRV level to `clippy_config/src/msrvs.rs` if needed.
+ // TODO: Update msrv config comment in `clippy_config/src/conf.rs`
"#
)
} else {
diff --git a/src/tools/clippy/clippy_dev/src/serve.rs b/src/tools/clippy/clippy_dev/src/serve.rs
index 535c25e69..ea925f670 100644
--- a/src/tools/clippy/clippy_dev/src/serve.rs
+++ b/src/tools/clippy/clippy_dev/src/serve.rs
@@ -2,8 +2,8 @@ use std::ffi::OsStr;
use std::num::ParseIntError;
use std::path::Path;
use std::process::Command;
-use std::thread;
use std::time::{Duration, SystemTime};
+use std::{env, thread};
/// # Panics
///
@@ -16,7 +16,7 @@ pub fn run(port: u16, lint: Option<&String>) -> ! {
loop {
if mtime("util/gh-pages/lints.json") < mtime("clippy_lints/src") {
- Command::new("cargo")
+ Command::new(env::var("CARGO").unwrap_or("cargo".into()))
.arg("collect-metadata")
.spawn()
.unwrap()
diff --git a/src/tools/clippy/clippy_lints/Cargo.toml b/src/tools/clippy/clippy_lints/Cargo.toml
index 4bc27fd48..ad8b7ded4 100644
--- a/src/tools/clippy/clippy_lints/Cargo.toml
+++ b/src/tools/clippy/clippy_lints/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "clippy_lints"
-version = "0.1.75"
+version = "0.1.76"
description = "A bunch of helpful lints to avoid common pitfalls in Rust"
repository = "https://github.com/rust-lang/rust-clippy"
readme = "README.md"
@@ -14,10 +14,9 @@ cargo_metadata = "0.15.3"
clippy_config = { path = "../clippy_config" }
clippy_utils = { path = "../clippy_utils" }
declare_clippy_lint = { path = "../declare_clippy_lint" }
-if_chain = "1.0"
-itertools = "0.10.1"
+itertools = "0.11"
quine-mc_cluskey = "0.2"
-regex-syntax = "0.7"
+regex-syntax = "0.8"
serde = { version = "1.0", features = ["derive"] }
serde_json = { version = "1.0", optional = true }
tempfile = { version = "3.3.0", optional = true }
diff --git a/src/tools/clippy/clippy_lints/src/absolute_paths.rs b/src/tools/clippy/clippy_lints/src/absolute_paths.rs
index 582423603..3822b83b4 100644
--- a/src/tools/clippy/clippy_lints/src/absolute_paths.rs
+++ b/src/tools/clippy/clippy_lints/src/absolute_paths.rs
@@ -5,7 +5,7 @@ use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::{DefId, CRATE_DEF_INDEX};
use rustc_hir::{HirId, ItemKind, Node, Path};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::symbol::kw;
declare_clippy_lint! {
@@ -62,7 +62,7 @@ impl LateLintPass<'_> for AbsolutePaths {
} = self;
if !path.span.from_expansion()
- && let Some(node) = cx.tcx.hir().find(hir_id)
+ && let Some(node) = cx.tcx.opt_hir_node(hir_id)
&& !matches!(node, Node::Item(item) if matches!(item.kind, ItemKind::Use(_, _)))
&& let [first, rest @ ..] = path.segments
// Handle `::std`
diff --git a/src/tools/clippy/clippy_lints/src/allow_attributes.rs b/src/tools/clippy/clippy_lints/src/allow_attributes.rs
index e3f4cf79d..39fc49dee 100644
--- a/src/tools/clippy/clippy_lints/src/allow_attributes.rs
+++ b/src/tools/clippy/clippy_lints/src/allow_attributes.rs
@@ -5,7 +5,7 @@ use rustc_ast as ast;
use rustc_errors::Applicability;
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -52,24 +52,22 @@ declare_lint_pass!(AllowAttribute => [ALLOW_ATTRIBUTES]);
impl LateLintPass<'_> for AllowAttribute {
// Separate each crate's features.
fn check_attribute<'cx>(&mut self, cx: &LateContext<'cx>, attr: &'cx Attribute) {
- if_chain! {
- if !in_external_macro(cx.sess(), attr.span);
- if cx.tcx.features().lint_reasons;
- if let AttrStyle::Outer = attr.style;
- if let Some(ident) = attr.ident();
- if ident.name == rustc_span::symbol::sym::allow;
- if !is_from_proc_macro(cx, &attr);
- then {
- span_lint_and_sugg(
- cx,
- ALLOW_ATTRIBUTES,
- ident.span,
- "#[allow] attribute found",
- "replace it with",
- "expect".into(),
- Applicability::MachineApplicable,
- );
- }
+ if !in_external_macro(cx.sess(), attr.span)
+ && cx.tcx.features().lint_reasons
+ && let AttrStyle::Outer = attr.style
+ && let Some(ident) = attr.ident()
+ && ident.name == rustc_span::symbol::sym::allow
+ && !is_from_proc_macro(cx, &attr)
+ {
+ span_lint_and_sugg(
+ cx,
+ ALLOW_ATTRIBUTES,
+ ident.span,
+ "#[allow] attribute found",
+ "replace it with",
+ "expect".into(),
+ Applicability::MachineApplicable,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/almost_complete_range.rs b/src/tools/clippy/clippy_lints/src/almost_complete_range.rs
index e85878eb5..57a5cd8fb 100644
--- a/src/tools/clippy/clippy_lints/src/almost_complete_range.rs
+++ b/src/tools/clippy/clippy_lints/src/almost_complete_range.rs
@@ -5,7 +5,7 @@ use rustc_ast::ast::{Expr, ExprKind, LitKind, Pat, PatKind, RangeEnd, RangeLimit
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::Span;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/approx_const.rs b/src/tools/clippy/clippy_lints/src/approx_const.rs
index b4f778f12..409ae0c85 100644
--- a/src/tools/clippy/clippy_lints/src/approx_const.rs
+++ b/src/tools/clippy/clippy_lints/src/approx_const.rs
@@ -4,7 +4,7 @@ use rustc_ast::ast::{FloatTy, LitFloatType, LitKind};
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_semver::RustcVersion;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::symbol;
use std::f64::consts as f64;
diff --git a/src/tools/clippy/clippy_lints/src/arc_with_non_send_sync.rs b/src/tools/clippy/clippy_lints/src/arc_with_non_send_sync.rs
index 192bc7d9d..657d52d0e 100644
--- a/src/tools/clippy/clippy_lints/src/arc_with_non_send_sync.rs
+++ b/src/tools/clippy/clippy_lints/src/arc_with_non_send_sync.rs
@@ -6,7 +6,7 @@ use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
use rustc_middle::ty::print::with_forced_trimmed_paths;
use rustc_middle::ty::GenericArgKind;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::sym;
declare_clippy_lint! {
@@ -14,7 +14,9 @@ declare_clippy_lint! {
/// This lint warns when you use `Arc` with a type that does not implement `Send` or `Sync`.
///
/// ### Why is this bad?
- /// `Arc<T>` is only `Send`/`Sync` when `T` is [both `Send` and `Sync`](https://doc.rust-lang.org/std/sync/struct.Arc.html#impl-Send-for-Arc%3CT%3E),
+ /// `Arc<T>` is an Atomic `RC<T>` and guarantees that updates to the reference counter are
+ /// Atomic. This is useful in multiprocessing scenarios. To send an `Arc<T>` across processes
+ /// and make use of the atomic ref counter, `T` must be [both `Send` and `Sync`](https://doc.rust-lang.org/std/sync/struct.Arc.html#impl-Send-for-Arc%3CT%3E),
/// either `T` should be made `Send + Sync` or an `Rc` should be used instead of an `Arc`
///
/// ### Example
@@ -34,7 +36,7 @@ declare_clippy_lint! {
#[clippy::version = "1.72.0"]
pub ARC_WITH_NON_SEND_SYNC,
suspicious,
- "using `Arc` with a type that does not implement `Send` or `Sync`"
+ "using `Arc` with a type that does not implement `Send` and `Sync`"
}
declare_lint_pass!(ArcWithNonSendSync => [ARC_WITH_NON_SEND_SYNC]);
@@ -61,19 +63,25 @@ impl<'tcx> LateLintPass<'tcx> for ArcWithNonSendSync {
cx,
ARC_WITH_NON_SEND_SYNC,
expr.span,
- "usage of an `Arc` that is not `Send` or `Sync`",
+ "usage of an `Arc` that is not `Send` and `Sync`",
|diag| {
with_forced_trimmed_paths!({
+ diag.note(format!("`Arc<{arg_ty}>` is not `Send` and `Sync` as:"));
+
if !is_send {
- diag.note(format!("the trait `Send` is not implemented for `{arg_ty}`"));
+ diag.note(format!("- the trait `Send` is not implemented for `{arg_ty}`"));
}
if !is_sync {
- diag.note(format!("the trait `Sync` is not implemented for `{arg_ty}`"));
+ diag.note(format!("- the trait `Sync` is not implemented for `{arg_ty}`"));
}
- diag.note(format!("required for `{ty}` to implement `Send` and `Sync`"));
+ diag.help("consider using an `Rc` instead. `Arc` does not provide benefits for non `Send` and `Sync` types");
+
+ diag.note("if you intend to use `Arc` with `Send` and `Sync` traits");
- diag.help("consider using an `Rc` instead or wrapping the inner type with a `Mutex`");
+ diag.note(format!(
+ "wrap the inner type with a `Mutex` or implement `Send` and `Sync` for `{arg_ty}`"
+ ));
});
},
);
diff --git a/src/tools/clippy/clippy_lints/src/as_conversions.rs b/src/tools/clippy/clippy_lints/src/as_conversions.rs
index 2de205d80..e3daf75c3 100644
--- a/src/tools/clippy/clippy_lints/src/as_conversions.rs
+++ b/src/tools/clippy/clippy_lints/src/as_conversions.rs
@@ -3,7 +3,7 @@ use clippy_utils::is_from_proc_macro;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/asm_syntax.rs b/src/tools/clippy/clippy_lints/src/asm_syntax.rs
index 9717aa9e9..feb6437ee 100644
--- a/src/tools/clippy/clippy_lints/src/asm_syntax.rs
+++ b/src/tools/clippy/clippy_lints/src/asm_syntax.rs
@@ -3,7 +3,7 @@ use std::fmt;
use clippy_utils::diagnostics::span_lint_and_help;
use rustc_ast::ast::{Expr, ExprKind, InlineAsmOptions};
use rustc_lint::{EarlyContext, EarlyLintPass, Lint};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
#[derive(Clone, Copy, PartialEq, Eq)]
enum AsmStyle {
diff --git a/src/tools/clippy/clippy_lints/src/assertions_on_constants.rs b/src/tools/clippy/clippy_lints/src/assertions_on_constants.rs
index b90914e93..a15ec199a 100644
--- a/src/tools/clippy/clippy_lints/src/assertions_on_constants.rs
+++ b/src/tools/clippy/clippy_lints/src/assertions_on_constants.rs
@@ -3,7 +3,7 @@ use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::macros::{find_assert_args, root_macro_call_first_node, PanicExpn};
use rustc_hir::Expr;
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/assertions_on_result_states.rs b/src/tools/clippy/clippy_lints/src/assertions_on_result_states.rs
index 71ec87a88..aec22965b 100644
--- a/src/tools/clippy/clippy_lints/src/assertions_on_result_states.rs
+++ b/src/tools/clippy/clippy_lints/src/assertions_on_result_states.rs
@@ -9,7 +9,7 @@ use rustc_hir::def::Res;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::{self, Ty};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/async_yields_async.rs b/src/tools/clippy/clippy_lints/src/async_yields_async.rs
index ec2447dae..3e5a01c45 100644
--- a/src/tools/clippy/clippy_lints/src/async_yields_async.rs
+++ b/src/tools/clippy/clippy_lints/src/async_yields_async.rs
@@ -4,7 +4,7 @@ use clippy_utils::ty::implements_trait;
use rustc_errors::Applicability;
use rustc_hir::{Body, BodyId, CoroutineKind, CoroutineSource, ExprKind, QPath};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/attrs.rs b/src/tools/clippy/clippy_lints/src/attrs.rs
index 38364af27..da3842287 100644
--- a/src/tools/clippy/clippy_lints/src/attrs.rs
+++ b/src/tools/clippy/clippy_lints/src/attrs.rs
@@ -5,7 +5,6 @@ use clippy_utils::diagnostics::{span_lint, span_lint_and_help, span_lint_and_sug
use clippy_utils::is_from_proc_macro;
use clippy_utils::macros::{is_panic, macro_backtrace};
use clippy_utils::source::{first_line_of_span, is_present_in_source, snippet_opt, without_block_comments};
-use if_chain::if_chain;
use rustc_ast::token::{Token, TokenKind};
use rustc_ast::tokenstream::TokenTree;
use rustc_ast::{
@@ -18,9 +17,9 @@ use rustc_hir::{
use rustc_lint::{EarlyContext, EarlyLintPass, LateContext, LateLintPass, Level, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint, impl_lint_pass};
+use rustc_session::{declare_lint_pass, impl_lint_pass};
use rustc_span::symbol::Symbol;
-use rustc_span::{sym, DUMMY_SP, Span};
+use rustc_span::{sym, Span, DUMMY_SP};
use semver::Version;
static UNIX_SYSTEMS: &[&str] = &[
@@ -121,7 +120,8 @@ declare_clippy_lint! {
declare_clippy_lint! {
/// ### What it does
/// Checks for `#[deprecated]` annotations with a `since`
- /// field that is not a valid semantic version.
+ /// field that is not a valid semantic version. Also allows "TBD" to signal
+ /// future deprecation.
///
/// ### Why is this bad?
/// For checking the version of the deprecation, it must be
@@ -371,7 +371,7 @@ declare_clippy_lint! {
/// let _ = 1 / random();
/// }
/// ```
- #[clippy::version = "1.73.0"]
+ #[clippy::version = "1.74.0"]
pub SHOULD_PANIC_WITHOUT_EXPECT,
pedantic,
"ensures that all `should_panic` attributes specify its expected panic message"
@@ -406,20 +406,26 @@ declare_clippy_lint! {
/// Checks for `#[cfg(features = "...")]` and suggests to replace it with
/// `#[cfg(feature = "...")]`.
///
+ /// It also checks if `cfg(test)` was misspelled.
+ ///
/// ### Why is this bad?
- /// Misspelling `feature` as `features` can be sometimes hard to spot. It
+ /// Misspelling `feature` as `features` or `test` as `tests` can be sometimes hard to spot. It
/// may cause conditional compilation not work quietly.
///
/// ### Example
/// ```no_run
/// #[cfg(features = "some-feature")]
/// fn conditional() { }
+ /// #[cfg(tests)]
+ /// mod tests { }
/// ```
///
/// Use instead:
/// ```no_run
/// #[cfg(feature = "some-feature")]
/// fn conditional() { }
+ /// #[cfg(test)]
+ /// mod tests { }
/// ```
#[clippy::version = "1.69.0"]
pub MAYBE_MISUSED_CFG,
@@ -470,13 +476,11 @@ impl<'tcx> LateLintPass<'tcx> for Attributes {
return;
}
for item in items {
- if_chain! {
- if let NestedMetaItem::MetaItem(mi) = &item;
- if let MetaItemKind::NameValue(lit) = &mi.kind;
- if mi.has_name(sym::since);
- then {
- check_semver(cx, item.span(), lit);
- }
+ if let NestedMetaItem::MetaItem(mi) = &item
+ && let MetaItemKind::NameValue(lit) = &mi.kind
+ && mi.has_name(sym::since)
+ {
+ check_deprecated_since(cx, item.span(), lit);
}
}
}
@@ -579,15 +583,13 @@ impl<'tcx> LateLintPass<'tcx> for Attributes {
/// Returns the lint name if it is clippy lint.
fn extract_clippy_lint(lint: &NestedMetaItem) -> Option<Symbol> {
- if_chain! {
- if let Some(meta_item) = lint.meta_item();
- if meta_item.path.segments.len() > 1;
- if let tool_name = meta_item.path.segments[0].ident;
- if tool_name.name == sym::clippy;
- then {
- let lint_name = meta_item.path.segments.last().unwrap().ident.name;
- return Some(lint_name);
- }
+ if let Some(meta_item) = lint.meta_item()
+ && meta_item.path.segments.len() > 1
+ && let tool_name = meta_item.path.segments[0].ident
+ && tool_name.name == sym::clippy
+ {
+ let lint_name = meta_item.path.segments.last().unwrap().ident.name;
+ return Some(lint_name);
}
None
}
@@ -759,9 +761,9 @@ fn check_attrs(cx: &LateContext<'_>, span: Span, name: Symbol, attrs: &[Attribut
}
}
-fn check_semver(cx: &LateContext<'_>, span: Span, lit: &MetaItemLit) {
+fn check_deprecated_since(cx: &LateContext<'_>, span: Span, lit: &MetaItemLit) {
if let LitKind::Str(is, _) = lit.kind {
- if Version::parse(is.as_str()).is_ok() {
+ if is.as_str() == "TBD" || Version::parse(is.as_str()).is_ok() {
return;
}
}
@@ -856,18 +858,17 @@ fn check_empty_line_after_outer_attr(cx: &EarlyContext<'_>, item: &rustc_ast::It
}
fn check_deprecated_cfg_attr(cx: &EarlyContext<'_>, attr: &Attribute, msrv: &Msrv) {
- if_chain! {
- if msrv.meets(msrvs::TOOL_ATTRIBUTES);
+ if msrv.meets(msrvs::TOOL_ATTRIBUTES)
// check cfg_attr
- if attr.has_name(sym::cfg_attr);
- if let Some(items) = attr.meta_item_list();
- if items.len() == 2;
+ && attr.has_name(sym::cfg_attr)
+ && let Some(items) = attr.meta_item_list()
+ && items.len() == 2
// check for `rustfmt`
- if let Some(feature_item) = items[0].meta_item();
- if feature_item.has_name(sym::rustfmt);
+ && let Some(feature_item) = items[0].meta_item()
+ && feature_item.has_name(sym::rustfmt)
// check for `rustfmt_skip` and `rustfmt::skip`
- if let Some(skip_item) = &items[1].meta_item();
- if skip_item.has_name(sym!(rustfmt_skip))
+ && let Some(skip_item) = &items[1].meta_item()
+ && (skip_item.has_name(sym!(rustfmt_skip))
|| skip_item
.path
.segments
@@ -875,21 +876,20 @@ fn check_deprecated_cfg_attr(cx: &EarlyContext<'_>, attr: &Attribute, msrv: &Msr
.expect("empty path in attribute")
.ident
.name
- == sym::skip;
+ == sym::skip)
// Only lint outer attributes, because custom inner attributes are unstable
// Tracking issue: https://github.com/rust-lang/rust/issues/54726
- if attr.style == AttrStyle::Outer;
- then {
- span_lint_and_sugg(
- cx,
- DEPRECATED_CFG_ATTR,
- attr.span,
- "`cfg_attr` is deprecated for rustfmt and got replaced by tool attributes",
- "use",
- "#[rustfmt::skip]".to_string(),
- Applicability::MachineApplicable,
- );
- }
+ && attr.style == AttrStyle::Outer
+ {
+ span_lint_and_sugg(
+ cx,
+ DEPRECATED_CFG_ATTR,
+ attr.span,
+ "`cfg_attr` is deprecated for rustfmt and got replaced by tool attributes",
+ "use",
+ "#[rustfmt::skip]".to_string(),
+ Applicability::MachineApplicable,
+ );
}
}
@@ -930,21 +930,35 @@ fn check_nested_cfg(cx: &EarlyContext<'_>, items: &[NestedMetaItem]) {
fn check_nested_misused_cfg(cx: &EarlyContext<'_>, items: &[NestedMetaItem]) {
for item in items {
if let NestedMetaItem::MetaItem(meta) = item {
- if meta.has_name(sym!(features))
+ if let Some(ident) = meta.ident()
+ && ident.name.as_str() == "features"
&& let Some(val) = meta.value_str()
{
span_lint_and_sugg(
cx,
MAYBE_MISUSED_CFG,
meta.span,
- "feature may misspelled as features",
- "use",
+ "'feature' may be misspelled as 'features'",
+ "did you mean",
format!("feature = \"{val}\""),
Applicability::MaybeIncorrect,
);
}
if let MetaItemKind::List(list) = &meta.kind {
check_nested_misused_cfg(cx, list);
+ // If this is not a list, then we check for `cfg(test)`.
+ } else if let Some(ident) = meta.ident()
+ && matches!(ident.name.as_str(), "tests" | "Test")
+ {
+ span_lint_and_sugg(
+ cx,
+ MAYBE_MISUSED_CFG,
+ meta.span,
+ &format!("'test' may be misspelled as '{}'", ident.name.as_str()),
+ "did you mean",
+ "test".to_string(),
+ Applicability::MaybeIncorrect,
+ );
}
}
}
@@ -989,12 +1003,10 @@ fn check_mismatched_target_os(cx: &EarlyContext<'_>, attr: &Attribute) {
mismatched.extend(find_mismatched_target_os(list));
},
MetaItemKind::Word => {
- if_chain! {
- if let Some(ident) = meta.ident();
- if let Some(os) = find_os(ident.name.as_str());
- then {
- mismatched.push((os, ident.span));
- }
+ if let Some(ident) = meta.ident()
+ && let Some(os) = find_os(ident.name.as_str())
+ {
+ mismatched.push((os, ident.span));
}
},
MetaItemKind::NameValue(..) => {},
@@ -1005,30 +1017,28 @@ fn check_mismatched_target_os(cx: &EarlyContext<'_>, attr: &Attribute) {
mismatched
}
- if_chain! {
- if attr.has_name(sym::cfg);
- if let Some(list) = attr.meta_item_list();
- let mismatched = find_mismatched_target_os(&list);
- if !mismatched.is_empty();
- then {
- let mess = "operating system used in target family position";
-
- span_lint_and_then(cx, MISMATCHED_TARGET_OS, attr.span, mess, |diag| {
- // Avoid showing the unix suggestion multiple times in case
- // we have more than one mismatch for unix-like systems
- let mut unix_suggested = false;
-
- for (os, span) in mismatched {
- let sugg = format!("target_os = \"{os}\"");
- diag.span_suggestion(span, "try", sugg, Applicability::MaybeIncorrect);
-
- if !unix_suggested && is_unix(os) {
- diag.help("did you mean `unix`?");
- unix_suggested = true;
- }
+ if attr.has_name(sym::cfg)
+ && let Some(list) = attr.meta_item_list()
+ && let mismatched = find_mismatched_target_os(&list)
+ && !mismatched.is_empty()
+ {
+ let mess = "operating system used in target family position";
+
+ span_lint_and_then(cx, MISMATCHED_TARGET_OS, attr.span, mess, |diag| {
+ // Avoid showing the unix suggestion multiple times in case
+ // we have more than one mismatch for unix-like systems
+ let mut unix_suggested = false;
+
+ for (os, span) in mismatched {
+ let sugg = format!("target_os = \"{os}\"");
+ diag.span_suggestion(span, "try", sugg, Applicability::MaybeIncorrect);
+
+ if !unix_suggested && is_unix(os) {
+ diag.help("did you mean `unix`?");
+ unix_suggested = true;
}
- });
- }
+ }
+ });
}
}
diff --git a/src/tools/clippy/clippy_lints/src/await_holding_invalid.rs b/src/tools/clippy/clippy_lints/src/await_holding_invalid.rs
index 06b74b972..9894a1639 100644
--- a/src/tools/clippy/clippy_lints/src/await_holding_invalid.rs
+++ b/src/tools/clippy/clippy_lints/src/await_holding_invalid.rs
@@ -6,7 +6,7 @@ use rustc_hir::def_id::DefId;
use rustc_hir::{Body, CoroutineKind, CoroutineSource};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::mir::CoroutineLayout;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::{sym, Span};
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/blocks_in_conditions.rs b/src/tools/clippy/clippy_lints/src/blocks_in_conditions.rs
new file mode 100644
index 000000000..1417e230a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/blocks_in_conditions.rs
@@ -0,0 +1,137 @@
+use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg};
+use clippy_utils::source::snippet_block_with_applicability;
+use clippy_utils::ty::implements_trait;
+use clippy_utils::visitors::{for_each_expr, Descend};
+use clippy_utils::{get_parent_expr, higher};
+use core::ops::ControlFlow;
+use rustc_errors::Applicability;
+use rustc_hir::{BlockCheckMode, Expr, ExprKind, MatchSource};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::declare_lint_pass;
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `if` conditions that use blocks containing an
+ /// expression, statements or conditions that use closures with blocks.
+ ///
+ /// ### Why is this bad?
+ /// Style, using blocks in the condition makes it hard to read.
+ ///
+ /// ### Examples
+ /// ```no_run
+ /// # fn somefunc() -> bool { true };
+ /// if { true } { /* ... */ }
+ ///
+ /// if { let x = somefunc(); x } { /* ... */ }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```no_run
+ /// # fn somefunc() -> bool { true };
+ /// if true { /* ... */ }
+ ///
+ /// let res = { let x = somefunc(); x };
+ /// if res { /* ... */ }
+ /// ```
+ #[clippy::version = "1.45.0"]
+ pub BLOCKS_IN_CONDITIONS,
+ style,
+ "useless or complex blocks that can be eliminated in conditions"
+}
+
+declare_lint_pass!(BlocksInConditions => [BLOCKS_IN_CONDITIONS]);
+
+const BRACED_EXPR_MESSAGE: &str = "omit braces around single expression condition";
+
+impl<'tcx> LateLintPass<'tcx> for BlocksInConditions {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if in_external_macro(cx.sess(), expr.span) {
+ return;
+ }
+
+ let Some((cond, keyword, desc)) = higher::If::hir(expr)
+ .map(|hif| (hif.cond, "if", "an `if` condition"))
+ .or(if let ExprKind::Match(match_ex, _, MatchSource::Normal) = expr.kind {
+ Some((match_ex, "match", "a `match` scrutinee"))
+ } else {
+ None
+ })
+ else {
+ return;
+ };
+ let complex_block_message = &format!(
+ "in {desc}, avoid complex blocks or closures with blocks; \
+ instead, move the block or closure higher and bind it with a `let`",
+ );
+
+ if let ExprKind::Block(block, _) = &cond.kind {
+ if block.rules == BlockCheckMode::DefaultBlock {
+ if block.stmts.is_empty() {
+ if let Some(ex) = &block.expr {
+ // don't dig into the expression here, just suggest that they remove
+ // the block
+ if expr.span.from_expansion() || ex.span.from_expansion() {
+ return;
+ }
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ BLOCKS_IN_CONDITIONS,
+ cond.span,
+ BRACED_EXPR_MESSAGE,
+ "try",
+ snippet_block_with_applicability(cx, ex.span, "..", Some(expr.span), &mut applicability)
+ .to_string(),
+ applicability,
+ );
+ }
+ } else {
+ let span = block.expr.as_ref().map_or_else(|| block.stmts[0].span, |e| e.span);
+ if span.from_expansion() || expr.span.from_expansion() {
+ return;
+ }
+ // move block higher
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ BLOCKS_IN_CONDITIONS,
+ expr.span.with_hi(cond.span.hi()),
+ complex_block_message,
+ "try",
+ format!(
+ "let res = {}; {keyword} res",
+ snippet_block_with_applicability(cx, block.span, "..", Some(expr.span), &mut applicability),
+ ),
+ applicability,
+ );
+ }
+ }
+ } else {
+ let _: Option<!> = for_each_expr(cond, |e| {
+ if let ExprKind::Closure(closure) = e.kind {
+ // do not lint if the closure is called using an iterator (see #1141)
+ if let Some(parent) = get_parent_expr(cx, e)
+ && let ExprKind::MethodCall(_, self_arg, _, _) = &parent.kind
+ && let caller = cx.typeck_results().expr_ty(self_arg)
+ && let Some(iter_id) = cx.tcx.get_diagnostic_item(sym::Iterator)
+ && implements_trait(cx, caller, iter_id, &[])
+ {
+ return ControlFlow::Continue(Descend::No);
+ }
+
+ let body = cx.tcx.hir().body(closure.body);
+ let ex = &body.value;
+ if let ExprKind::Block(block, _) = ex.kind {
+ if !body.value.span.from_expansion() && !block.stmts.is_empty() {
+ span_lint(cx, BLOCKS_IN_CONDITIONS, ex.span, complex_block_message);
+ return ControlFlow::Continue(Descend::No);
+ }
+ }
+ }
+ ControlFlow::Continue(Descend::Yes)
+ });
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/blocks_in_if_conditions.rs b/src/tools/clippy/clippy_lints/src/blocks_in_if_conditions.rs
deleted file mode 100644
index 04bf541a5..000000000
--- a/src/tools/clippy/clippy_lints/src/blocks_in_if_conditions.rs
+++ /dev/null
@@ -1,142 +0,0 @@
-use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg};
-use clippy_utils::source::snippet_block_with_applicability;
-use clippy_utils::ty::implements_trait;
-use clippy_utils::visitors::{for_each_expr, Descend};
-use clippy_utils::{get_parent_expr, higher};
-use core::ops::ControlFlow;
-use if_chain::if_chain;
-use rustc_errors::Applicability;
-use rustc_hir::{BlockCheckMode, Expr, ExprKind};
-use rustc_lint::{LateContext, LateLintPass, LintContext};
-use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
-use rustc_span::sym;
-
-declare_clippy_lint! {
- /// ### What it does
- /// Checks for `if` conditions that use blocks containing an
- /// expression, statements or conditions that use closures with blocks.
- ///
- /// ### Why is this bad?
- /// Style, using blocks in the condition makes it hard to read.
- ///
- /// ### Examples
- /// ```no_run
- /// # fn somefunc() -> bool { true };
- /// if { true } { /* ... */ }
- ///
- /// if { let x = somefunc(); x } { /* ... */ }
- /// ```
- ///
- /// Use instead:
- /// ```no_run
- /// # fn somefunc() -> bool { true };
- /// if true { /* ... */ }
- ///
- /// let res = { let x = somefunc(); x };
- /// if res { /* ... */ }
- /// ```
- #[clippy::version = "1.45.0"]
- pub BLOCKS_IN_IF_CONDITIONS,
- style,
- "useless or complex blocks that can be eliminated in conditions"
-}
-
-declare_lint_pass!(BlocksInIfConditions => [BLOCKS_IN_IF_CONDITIONS]);
-
-const BRACED_EXPR_MESSAGE: &str = "omit braces around single expression condition";
-const COMPLEX_BLOCK_MESSAGE: &str = "in an `if` condition, avoid complex blocks or closures with blocks; \
- instead, move the block or closure higher and bind it with a `let`";
-
-impl<'tcx> LateLintPass<'tcx> for BlocksInIfConditions {
- fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
- if in_external_macro(cx.sess(), expr.span) {
- return;
- }
- if let Some(higher::If { cond, .. }) = higher::If::hir(expr) {
- if let ExprKind::Block(block, _) = &cond.kind {
- if block.rules == BlockCheckMode::DefaultBlock {
- if block.stmts.is_empty() {
- if let Some(ex) = &block.expr {
- // don't dig into the expression here, just suggest that they remove
- // the block
- if expr.span.from_expansion() || ex.span.from_expansion() {
- return;
- }
- let mut applicability = Applicability::MachineApplicable;
- span_lint_and_sugg(
- cx,
- BLOCKS_IN_IF_CONDITIONS,
- cond.span,
- BRACED_EXPR_MESSAGE,
- "try",
- format!(
- "{}",
- snippet_block_with_applicability(
- cx,
- ex.span,
- "..",
- Some(expr.span),
- &mut applicability
- )
- ),
- applicability,
- );
- }
- } else {
- let span = block.expr.as_ref().map_or_else(|| block.stmts[0].span, |e| e.span);
- if span.from_expansion() || expr.span.from_expansion() {
- return;
- }
- // move block higher
- let mut applicability = Applicability::MachineApplicable;
- span_lint_and_sugg(
- cx,
- BLOCKS_IN_IF_CONDITIONS,
- expr.span.with_hi(cond.span.hi()),
- COMPLEX_BLOCK_MESSAGE,
- "try",
- format!(
- "let res = {}; if res",
- snippet_block_with_applicability(
- cx,
- block.span,
- "..",
- Some(expr.span),
- &mut applicability
- ),
- ),
- applicability,
- );
- }
- }
- } else {
- let _: Option<!> = for_each_expr(cond, |e| {
- if let ExprKind::Closure(closure) = e.kind {
- // do not lint if the closure is called using an iterator (see #1141)
- if_chain! {
- if let Some(parent) = get_parent_expr(cx, e);
- if let ExprKind::MethodCall(_, self_arg, _, _) = &parent.kind;
- let caller = cx.typeck_results().expr_ty(self_arg);
- if let Some(iter_id) = cx.tcx.get_diagnostic_item(sym::Iterator);
- if implements_trait(cx, caller, iter_id, &[]);
- then {
- return ControlFlow::Continue(Descend::No);
- }
- }
-
- let body = cx.tcx.hir().body(closure.body);
- let ex = &body.value;
- if let ExprKind::Block(block, _) = ex.kind {
- if !body.value.span.from_expansion() && !block.stmts.is_empty() {
- span_lint(cx, BLOCKS_IN_IF_CONDITIONS, ex.span, COMPLEX_BLOCK_MESSAGE);
- return ControlFlow::Continue(Descend::No);
- }
- }
- }
- ControlFlow::Continue(Descend::Yes)
- });
- }
- }
- }
-}
diff --git a/src/tools/clippy/clippy_lints/src/bool_assert_comparison.rs b/src/tools/clippy/clippy_lints/src/bool_assert_comparison.rs
index 665dbd6f7..74201e9cc 100644
--- a/src/tools/clippy/clippy_lints/src/bool_assert_comparison.rs
+++ b/src/tools/clippy/clippy_lints/src/bool_assert_comparison.rs
@@ -7,7 +7,7 @@ use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, Lit};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::ty::{self, Ty};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::Ident;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/bool_to_int_with_if.rs b/src/tools/clippy/clippy_lints/src/bool_to_int_with_if.rs
index 156cb34df..cfb76cab6 100644
--- a/src/tools/clippy/clippy_lints/src/bool_to_int_with_if.rs
+++ b/src/tools/clippy/clippy_lints/src/bool_to_int_with_if.rs
@@ -2,7 +2,7 @@ use clippy_utils::higher::If;
use rustc_ast::LitKind;
use rustc_hir::{Block, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::sugg::Sugg;
diff --git a/src/tools/clippy/clippy_lints/src/booleans.rs b/src/tools/clippy/clippy_lints/src/booleans.rs
index 0e0d229e8..e11f83f22 100644
--- a/src/tools/clippy/clippy_lints/src/booleans.rs
+++ b/src/tools/clippy/clippy_lints/src/booleans.rs
@@ -2,13 +2,12 @@ use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_hir_and_then};
use clippy_utils::eq_expr_value;
use clippy_utils::source::snippet_opt;
use clippy_utils::ty::{implements_trait, is_type_diagnostic_item};
-use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::intravisit::{walk_expr, FnKind, Visitor};
use rustc_hir::{BinOpKind, Body, Expr, ExprKind, FnDecl, UnOp};
use rustc_lint::{LateContext, LateLintPass, Level};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::def_id::LocalDefId;
use rustc_span::{sym, Span};
@@ -151,17 +150,15 @@ impl<'a, 'tcx, 'v> Hir2Qmm<'a, 'tcx, 'v> {
return Ok(Bool::Term(n as u8));
}
- if_chain! {
- if let ExprKind::Binary(e_binop, e_lhs, e_rhs) = &e.kind;
- if implements_ord(self.cx, e_lhs);
- if let ExprKind::Binary(expr_binop, expr_lhs, expr_rhs) = &expr.kind;
- if negate(e_binop.node) == Some(expr_binop.node);
- if eq_expr_value(self.cx, e_lhs, expr_lhs);
- if eq_expr_value(self.cx, e_rhs, expr_rhs);
- then {
- #[expect(clippy::cast_possible_truncation)]
- return Ok(Bool::Not(Box::new(Bool::Term(n as u8))));
- }
+ if let ExprKind::Binary(e_binop, e_lhs, e_rhs) = &e.kind
+ && implements_ord(self.cx, e_lhs)
+ && let ExprKind::Binary(expr_binop, expr_lhs, expr_rhs) = &expr.kind
+ && negate(e_binop.node) == Some(expr_binop.node)
+ && eq_expr_value(self.cx, e_lhs, expr_lhs)
+ && eq_expr_value(self.cx, e_rhs, expr_rhs)
+ {
+ #[expect(clippy::cast_possible_truncation)]
+ return Ok(Bool::Not(Box::new(Bool::Term(n as u8))));
}
}
let n = self.terminals.len();
@@ -427,8 +424,9 @@ impl<'a, 'tcx> NonminimalBoolVisitor<'a, 'tcx> {
improvements.push(suggestion);
}
}
- let nonminimal_bool_lint = |suggestions: Vec<_>| {
+ let nonminimal_bool_lint = |mut suggestions: Vec<_>| {
if self.cx.tcx.lint_level_at_node(NONMINIMAL_BOOL, e.hir_id).0 != Level::Allow {
+ suggestions.sort();
span_lint_hir_and_then(
self.cx,
NONMINIMAL_BOOL,
diff --git a/src/tools/clippy/clippy_lints/src/borrow_deref_ref.rs b/src/tools/clippy/clippy_lints/src/borrow_deref_ref.rs
index a38269083..0ca4a0e06 100644
--- a/src/tools/clippy/clippy_lints/src/borrow_deref_ref.rs
+++ b/src/tools/clippy/clippy_lints/src/borrow_deref_ref.rs
@@ -8,7 +8,7 @@ use rustc_hir::{ExprKind, UnOp};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::mir::Mutability;
use rustc_middle::ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/box_default.rs b/src/tools/clippy/clippy_lints/src/box_default.rs
index 9c78c6e53..ef12fe344 100644
--- a/src/tools/clippy/clippy_lints/src/box_default.rs
+++ b/src/tools/clippy/clippy_lints/src/box_default.rs
@@ -10,7 +10,7 @@ use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::print::with_forced_trimmed_paths;
use rustc_middle::ty::IsSuggestable;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
@@ -45,7 +45,7 @@ impl LateLintPass<'_> for BoxDefault {
&& let ExprKind::Path(QPath::TypeRelative(ty, seg)) = box_new.kind
&& let ExprKind::Call(arg_path, ..) = arg.kind
&& !in_external_macro(cx.sess(), expr.span)
- && (expr.span.eq_ctxt(arg.span) || is_vec_expn(cx, arg))
+ && (expr.span.eq_ctxt(arg.span) || is_local_vec_expn(cx, arg, expr))
&& seg.ident.name == sym::new
&& path_def_id(cx, ty).map_or(false, |id| Some(id) == cx.tcx.lang_items().owned_box())
&& is_default_equivalent(cx, arg)
@@ -81,10 +81,10 @@ fn is_plain_default(cx: &LateContext<'_>, arg_path: &Expr<'_>) -> bool {
}
}
-fn is_vec_expn(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
- macro_backtrace(expr.span)
- .next()
- .map_or(false, |call| cx.tcx.is_diagnostic_item(sym::vec_macro, call.def_id))
+fn is_local_vec_expn(cx: &LateContext<'_>, expr: &Expr<'_>, ref_expr: &Expr<'_>) -> bool {
+ macro_backtrace(expr.span).next().map_or(false, |call| {
+ cx.tcx.is_diagnostic_item(sym::vec_macro, call.def_id) && call.span.eq_ctxt(ref_expr.span)
+ })
}
#[derive(Default)]
diff --git a/src/tools/clippy/clippy_lints/src/cargo/mod.rs b/src/tools/clippy/clippy_lints/src/cargo/mod.rs
index 3a872e54c..fea6924d8 100644
--- a/src/tools/clippy/clippy_lints/src/cargo/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/cargo/mod.rs
@@ -8,7 +8,7 @@ use clippy_utils::diagnostics::span_lint;
use clippy_utils::is_lint_allowed;
use rustc_hir::hir_id::CRATE_HIR_ID;
use rustc_lint::{LateContext, LateLintPass, Lint};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::DUMMY_SP;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/cargo/multiple_crate_versions.rs b/src/tools/clippy/clippy_lints/src/cargo/multiple_crate_versions.rs
index f7a5b1857..ec681adb7 100644
--- a/src/tools/clippy/clippy_lints/src/cargo/multiple_crate_versions.rs
+++ b/src/tools/clippy/clippy_lints/src/cargo/multiple_crate_versions.rs
@@ -2,7 +2,6 @@
use cargo_metadata::{DependencyKind, Metadata, Node, Package, PackageId};
use clippy_utils::diagnostics::span_lint;
-use if_chain::if_chain;
use itertools::Itertools;
use rustc_hir::def_id::LOCAL_CRATE;
use rustc_lint::LateContext;
@@ -15,31 +14,33 @@ pub(super) fn check(cx: &LateContext<'_>, metadata: &Metadata) {
let mut packages = metadata.packages.clone();
packages.sort_by(|a, b| a.name.cmp(&b.name));
- if_chain! {
- if let Some(resolve) = &metadata.resolve;
- if let Some(local_id) = packages
- .iter()
- .find_map(|p| if p.name == local_name.as_str() { Some(&p.id) } else { None });
- then {
- for (name, group) in &packages.iter().group_by(|p| p.name.clone()) {
- let group: Vec<&Package> = group.collect();
-
- if group.len() <= 1 {
- continue;
- }
-
- if group.iter().all(|p| is_normal_dep(&resolve.nodes, local_id, &p.id)) {
- let mut versions: Vec<_> = group.into_iter().map(|p| &p.version).collect();
- versions.sort();
- let versions = versions.iter().join(", ");
-
- span_lint(
- cx,
- MULTIPLE_CRATE_VERSIONS,
- DUMMY_SP,
- &format!("multiple versions for dependency `{name}`: {versions}"),
- );
- }
+ if let Some(resolve) = &metadata.resolve
+ && let Some(local_id) = packages.iter().find_map(|p| {
+ if p.name == local_name.as_str() {
+ Some(&p.id)
+ } else {
+ None
+ }
+ })
+ {
+ for (name, group) in &packages.iter().group_by(|p| p.name.clone()) {
+ let group: Vec<&Package> = group.collect();
+
+ if group.len() <= 1 {
+ continue;
+ }
+
+ if group.iter().all(|p| is_normal_dep(&resolve.nodes, local_id, &p.id)) {
+ let mut versions: Vec<_> = group.into_iter().map(|p| &p.version).collect();
+ versions.sort();
+ let versions = versions.iter().join(", ");
+
+ span_lint(
+ cx,
+ MULTIPLE_CRATE_VERSIONS,
+ DUMMY_SP,
+ &format!("multiple versions for dependency `{name}`: {versions}"),
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/cargo/wildcard_dependencies.rs b/src/tools/clippy/clippy_lints/src/cargo/wildcard_dependencies.rs
index 4dcc9cbe3..244e98eb6 100644
--- a/src/tools/clippy/clippy_lints/src/cargo/wildcard_dependencies.rs
+++ b/src/tools/clippy/clippy_lints/src/cargo/wildcard_dependencies.rs
@@ -1,6 +1,5 @@
use cargo_metadata::Metadata;
use clippy_utils::diagnostics::span_lint;
-use if_chain::if_chain;
use rustc_lint::LateContext;
use rustc_span::DUMMY_SP;
@@ -9,19 +8,17 @@ use super::WILDCARD_DEPENDENCIES;
pub(super) fn check(cx: &LateContext<'_>, metadata: &Metadata) {
for dep in &metadata.packages[0].dependencies {
// VersionReq::any() does not work
- if_chain! {
- if let Ok(wildcard_ver) = semver::VersionReq::parse("*");
- if let Some(ref source) = dep.source;
- if !source.starts_with("git");
- if dep.req == wildcard_ver;
- then {
- span_lint(
- cx,
- WILDCARD_DEPENDENCIES,
- DUMMY_SP,
- &format!("wildcard dependency for `{}`", dep.name),
- );
- }
+ if let Ok(wildcard_ver) = semver::VersionReq::parse("*")
+ && let Some(ref source) = dep.source
+ && !source.starts_with("git")
+ && dep.req == wildcard_ver
+ {
+ span_lint(
+ cx,
+ WILDCARD_DEPENDENCIES,
+ DUMMY_SP,
+ &format!("wildcard dependency for `{}`", dep.name),
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/casts/as_ptr_cast_mut.rs b/src/tools/clippy/clippy_lints/src/casts/as_ptr_cast_mut.rs
index 55294f5f3..8bfb7383f 100644
--- a/src/tools/clippy/clippy_lints/src/casts/as_ptr_cast_mut.rs
+++ b/src/tools/clippy/clippy_lints/src/casts/as_ptr_cast_mut.rs
@@ -9,11 +9,10 @@ use rustc_middle::ty::{self, Ty, TypeAndMut};
use super::AS_PTR_CAST_MUT;
pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, cast_to: Ty<'_>) {
- if let ty::RawPtr(
- ptrty @ TypeAndMut {
- mutbl: Mutability::Mut, ..
- },
- ) = cast_to.kind()
+ if let ty::RawPtr(TypeAndMut {
+ mutbl: Mutability::Mut,
+ ty: ptrty,
+ }) = cast_to.kind()
&& let ty::RawPtr(TypeAndMut {
mutbl: Mutability::Not, ..
}) = cx.typeck_results().node_type(cast_expr.hir_id).kind()
@@ -34,7 +33,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>,
cx,
AS_PTR_CAST_MUT,
expr.span,
- &format!("casting the result of `as_ptr` to *{ptrty}"),
+ &format!("casting the result of `as_ptr` to *mut {ptrty}"),
"replace with",
format!("{recv}.as_mut_ptr()"),
applicability,
diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_possible_wrap.rs b/src/tools/clippy/clippy_lints/src/casts/cast_possible_wrap.rs
index ffa571abb..2ddb0f00e 100644
--- a/src/tools/clippy/clippy_lints/src/casts/cast_possible_wrap.rs
+++ b/src/tools/clippy/clippy_lints/src/casts/cast_possible_wrap.rs
@@ -1,5 +1,6 @@
+use clippy_utils::diagnostics::span_lint_and_then;
use rustc_hir::Expr;
-use rustc_lint::{LateContext, LintContext};
+use rustc_lint::LateContext;
use rustc_middle::ty::Ty;
use super::{utils, CAST_POSSIBLE_WRAP};
@@ -78,13 +79,11 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_from: Ty<'_>, ca
),
};
- cx.struct_span_lint(CAST_POSSIBLE_WRAP, expr.span, message, |diag| {
+ span_lint_and_then(cx, CAST_POSSIBLE_WRAP, expr.span, &message, |diag| {
if let EmitState::LintOnPtrSize(16) = should_lint {
diag
- .note("`usize` and `isize` may be as small as 16 bits on some platforms")
- .note("for more information see https://doc.rust-lang.org/reference/types/numeric.html#machine-dependent-integer-types")
- } else {
- diag
- }
+ .note("`usize` and `isize` may be as small as 16 bits on some platforms")
+ .note("for more information see https://doc.rust-lang.org/reference/types/numeric.html#machine-dependent-integer-types");
+ };
});
}
diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_sign_loss.rs b/src/tools/clippy/clippy_lints/src/casts/cast_sign_loss.rs
index a83dfd94d..bd12ee406 100644
--- a/src/tools/clippy/clippy_lints/src/casts/cast_sign_loss.rs
+++ b/src/tools/clippy/clippy_lints/src/casts/cast_sign_loss.rs
@@ -1,7 +1,6 @@
use clippy_utils::consts::{constant, Constant};
use clippy_utils::diagnostics::span_lint;
use clippy_utils::{method_chain_args, sext};
-use if_chain::if_chain;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::LateContext;
use rustc_middle::ty::{self, Ty};
@@ -28,13 +27,11 @@ fn should_lint(cx: &LateContext<'_>, cast_op: &Expr<'_>, cast_from: Ty<'_>, cast
// Don't lint for positive constants.
let const_val = constant(cx, cx.typeck_results(), cast_op);
- if_chain! {
- if let Some(Constant::Int(n)) = const_val;
- if let ty::Int(ity) = *cast_from.kind();
- if sext(cx.tcx, n, ity) >= 0;
- then {
- return false;
- }
+ if let Some(Constant::Int(n)) = const_val
+ && let ty::Int(ity) = *cast_from.kind()
+ && sext(cx.tcx, n, ity) >= 0
+ {
+ return false;
}
// Don't lint for the result of methods that always return non-negative values.
@@ -42,13 +39,11 @@ fn should_lint(cx: &LateContext<'_>, cast_op: &Expr<'_>, cast_from: Ty<'_>, cast
let mut method_name = path.ident.name.as_str();
let allowed_methods = ["abs", "checked_abs", "rem_euclid", "checked_rem_euclid"];
- if_chain! {
- if method_name == "unwrap";
- if let Some(arglist) = method_chain_args(cast_op, &["unwrap"]);
- if let ExprKind::MethodCall(inner_path, ..) = &arglist[0].0.kind;
- then {
- method_name = inner_path.ident.name.as_str();
- }
+ if method_name == "unwrap"
+ && let Some(arglist) = method_chain_args(cast_op, &["unwrap"])
+ && let ExprKind::MethodCall(inner_path, ..) = &arglist[0].0.kind
+ {
+ method_name = inner_path.ident.name.as_str();
}
if allowed_methods.iter().any(|&name| method_name == name) {
diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_slice_different_sizes.rs b/src/tools/clippy/clippy_lints/src/casts/cast_slice_different_sizes.rs
index d14104029..91bad8256 100644
--- a/src/tools/clippy/clippy_lints/src/casts/cast_slice_different_sizes.rs
+++ b/src/tools/clippy/clippy_lints/src/casts/cast_slice_different_sizes.rs
@@ -1,7 +1,6 @@
use clippy_config::msrvs::{self, Msrv};
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::source;
-use if_chain::if_chain;
use rustc_ast::Mutability;
use rustc_hir::{Expr, ExprKind, Node};
use rustc_lint::LateContext;
@@ -69,26 +68,24 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>, msrv: &Msrv
fn is_child_of_cast(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
let map = cx.tcx.hir();
- if_chain! {
- if let Some(parent_id) = map.opt_parent_id(expr.hir_id);
- if let Some(parent) = map.find(parent_id);
- then {
- let expr = match parent {
- Node::Block(block) => {
- if let Some(parent_expr) = block.expr {
- parent_expr
- } else {
- return false;
- }
- },
- Node::Expr(expr) => expr,
- _ => return false,
- };
+ if let Some(parent_id) = map.opt_parent_id(expr.hir_id)
+ && let Some(parent) = cx.tcx.opt_hir_node(parent_id)
+ {
+ let expr = match parent {
+ Node::Block(block) => {
+ if let Some(parent_expr) = block.expr {
+ parent_expr
+ } else {
+ return false;
+ }
+ },
+ Node::Expr(expr) => expr,
+ _ => return false,
+ };
- matches!(expr.kind, ExprKind::Cast(..))
- } else {
- false
- }
+ matches!(expr.kind, ExprKind::Cast(..))
+ } else {
+ false
}
}
diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_slice_from_raw_parts.rs b/src/tools/clippy/clippy_lints/src/casts/cast_slice_from_raw_parts.rs
index badadf2c9..3db1e3e6d 100644
--- a/src/tools/clippy/clippy_lints/src/casts/cast_slice_from_raw_parts.rs
+++ b/src/tools/clippy/clippy_lints/src/casts/cast_slice_from_raw_parts.rs
@@ -1,7 +1,6 @@
use clippy_config::msrvs::{self, Msrv};
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_context;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::def_id::DefId;
use rustc_hir::{Expr, ExprKind};
@@ -25,34 +24,32 @@ fn raw_parts_kind(cx: &LateContext<'_>, did: DefId) -> Option<RawPartsKind> {
}
pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, cast_to: Ty<'_>, msrv: &Msrv) {
- if_chain! {
- if msrv.meets(msrvs::PTR_SLICE_RAW_PARTS);
- if let ty::RawPtr(ptrty) = cast_to.kind();
- if let ty::Slice(_) = ptrty.ty.kind();
- if let ExprKind::Call(fun, [ptr_arg, len_arg]) = cast_expr.peel_blocks().kind;
- if let ExprKind::Path(ref qpath) = fun.kind;
- if let Some(fun_def_id) = cx.qpath_res(qpath, fun.hir_id).opt_def_id();
- if let Some(rpk) = raw_parts_kind(cx, fun_def_id);
- let ctxt = expr.span.ctxt();
- if cast_expr.span.ctxt() == ctxt;
- then {
- let func = match rpk {
- RawPartsKind::Immutable => "from_raw_parts",
- RawPartsKind::Mutable => "from_raw_parts_mut"
- };
- let span = expr.span;
- let mut applicability = Applicability::MachineApplicable;
- let ptr = snippet_with_context(cx, ptr_arg.span, ctxt, "ptr", &mut applicability).0;
- let len = snippet_with_context(cx, len_arg.span, ctxt, "len", &mut applicability).0;
- span_lint_and_sugg(
- cx,
- CAST_SLICE_FROM_RAW_PARTS,
- span,
- &format!("casting the result of `{func}` to {cast_to}"),
- "replace with",
- format!("core::ptr::slice_{func}({ptr}, {len})"),
- applicability
- );
- }
+ if msrv.meets(msrvs::PTR_SLICE_RAW_PARTS)
+ && let ty::RawPtr(ptrty) = cast_to.kind()
+ && let ty::Slice(_) = ptrty.ty.kind()
+ && let ExprKind::Call(fun, [ptr_arg, len_arg]) = cast_expr.peel_blocks().kind
+ && let ExprKind::Path(ref qpath) = fun.kind
+ && let Some(fun_def_id) = cx.qpath_res(qpath, fun.hir_id).opt_def_id()
+ && let Some(rpk) = raw_parts_kind(cx, fun_def_id)
+ && let ctxt = expr.span.ctxt()
+ && cast_expr.span.ctxt() == ctxt
+ {
+ let func = match rpk {
+ RawPartsKind::Immutable => "from_raw_parts",
+ RawPartsKind::Mutable => "from_raw_parts_mut",
+ };
+ let span = expr.span;
+ let mut applicability = Applicability::MachineApplicable;
+ let ptr = snippet_with_context(cx, ptr_arg.span, ctxt, "ptr", &mut applicability).0;
+ let len = snippet_with_context(cx, len_arg.span, ctxt, "len", &mut applicability).0;
+ span_lint_and_sugg(
+ cx,
+ CAST_SLICE_FROM_RAW_PARTS,
+ span,
+ &format!("casting the result of `{func}` to {cast_to}"),
+ "replace with",
+ format!("core::ptr::slice_{func}({ptr}, {len})"),
+ applicability,
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/casts/char_lit_as_u8.rs b/src/tools/clippy/clippy_lints/src/casts/char_lit_as_u8.rs
index 82e07c98a..a7d3868f7 100644
--- a/src/tools/clippy/clippy_lints/src/casts/char_lit_as_u8.rs
+++ b/src/tools/clippy/clippy_lints/src/casts/char_lit_as_u8.rs
@@ -1,6 +1,5 @@
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::source::snippet_with_applicability;
-use if_chain::if_chain;
use rustc_ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind};
@@ -10,32 +9,31 @@ use rustc_middle::ty::{self, UintTy};
use super::CHAR_LIT_AS_U8;
pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>) {
- if_chain! {
- if let ExprKind::Cast(e, _) = &expr.kind;
- if let ExprKind::Lit(l) = &e.kind;
- if let LitKind::Char(c) = l.node;
- if ty::Uint(UintTy::U8) == *cx.typeck_results().expr_ty(expr).kind();
- then {
- let mut applicability = Applicability::MachineApplicable;
- let snippet = snippet_with_applicability(cx, e.span, "'x'", &mut applicability);
+ if let ExprKind::Cast(e, _) = &expr.kind
+ && let ExprKind::Lit(l) = &e.kind
+ && let LitKind::Char(c) = l.node
+ && ty::Uint(UintTy::U8) == *cx.typeck_results().expr_ty(expr).kind()
+ {
+ let mut applicability = Applicability::MachineApplicable;
+ let snippet = snippet_with_applicability(cx, e.span, "'x'", &mut applicability);
- span_lint_and_then(
- cx,
- CHAR_LIT_AS_U8,
- expr.span,
- "casting a character literal to `u8` truncates",
- |diag| {
- diag.note("`char` is four bytes wide, but `u8` is a single byte");
+ span_lint_and_then(
+ cx,
+ CHAR_LIT_AS_U8,
+ expr.span,
+ "casting a character literal to `u8` truncates",
+ |diag| {
+ diag.note("`char` is four bytes wide, but `u8` is a single byte");
- if c.is_ascii() {
- diag.span_suggestion(
- expr.span,
- "use a byte literal instead",
- format!("b{snippet}"),
- applicability,
- );
- }
- });
- }
+ if c.is_ascii() {
+ diag.span_suggestion(
+ expr.span,
+ "use a byte literal instead",
+ format!("b{snippet}"),
+ applicability,
+ );
+ }
+ },
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/casts/mod.rs b/src/tools/clippy/clippy_lints/src/casts/mod.rs
index 49a90a2f3..e05b8f66d 100644
--- a/src/tools/clippy/clippy_lints/src/casts/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/casts/mod.rs
@@ -27,7 +27,7 @@ use clippy_utils::is_hir_ty_cfg_dependant;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs b/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs
index 0c555c1ac..35e36e9ef 100644
--- a/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs
+++ b/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs
@@ -3,12 +3,29 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::sugg::Sugg;
use rustc_errors::Applicability;
-use rustc_hir::{Expr, ExprKind, Mutability, TyKind};
+use rustc_hir::{Expr, ExprKind, Mutability, QPath, TyKind};
+use rustc_hir_pretty::qpath_to_string;
use rustc_lint::LateContext;
use rustc_middle::ty::{self, TypeAndMut};
+use rustc_span::sym;
use super::PTR_AS_PTR;
+enum OmitFollowedCastReason<'a> {
+ None,
+ Null(&'a QPath<'a>),
+ NullMut(&'a QPath<'a>),
+}
+
+impl OmitFollowedCastReason<'_> {
+ fn corresponding_item(&self) -> Option<&QPath<'_>> {
+ match self {
+ OmitFollowedCastReason::None => None,
+ OmitFollowedCastReason::Null(x) | OmitFollowedCastReason::NullMut(x) => Some(*x),
+ }
+ }
+}
+
pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, msrv: &Msrv) {
if !msrv.meets(msrvs::POINTER_CAST) {
return;
@@ -25,7 +42,6 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, msrv: &Msrv) {
&& to_pointee_ty.is_sized(cx.tcx, cx.param_env)
{
let mut app = Applicability::MachineApplicable;
- let cast_expr_sugg = Sugg::hir_with_applicability(cx, cast_expr, "_", &mut app);
let turbofish = match &cast_to_hir_ty.kind {
TyKind::Infer => String::new(),
TyKind::Ptr(mut_ty) => {
@@ -41,13 +57,44 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, msrv: &Msrv) {
_ => return,
};
+ // following `cast` does not compile because it fails to infer what type is expected
+ // as type argument to `std::ptr::ptr_null` or `std::ptr::ptr_null_mut`, so
+ // we omit following `cast`:
+ let omit_cast = if let ExprKind::Call(func, []) = cast_expr.kind
+ && let ExprKind::Path(ref qpath @ QPath::Resolved(None, path)) = func.kind
+ {
+ let method_defid = path.res.def_id();
+ if cx.tcx.is_diagnostic_item(sym::ptr_null, method_defid) {
+ OmitFollowedCastReason::Null(qpath)
+ } else if cx.tcx.is_diagnostic_item(sym::ptr_null_mut, method_defid) {
+ OmitFollowedCastReason::NullMut(qpath)
+ } else {
+ OmitFollowedCastReason::None
+ }
+ } else {
+ OmitFollowedCastReason::None
+ };
+
+ let (help, final_suggestion) = if let Some(method) = omit_cast.corresponding_item() {
+ // don't force absolute path
+ let method = qpath_to_string(method);
+ ("try call directly", format!("{method}{turbofish}()"))
+ } else {
+ let cast_expr_sugg = Sugg::hir_with_applicability(cx, cast_expr, "_", &mut app);
+
+ (
+ "try `pointer::cast`, a safer alternative",
+ format!("{}.cast{turbofish}()", cast_expr_sugg.maybe_par()),
+ )
+ };
+
span_lint_and_sugg(
cx,
PTR_AS_PTR,
expr.span,
"`as` casting between raw pointers without changing its mutability",
- "try `pointer::cast`, a safer alternative",
- format!("{}.cast{turbofish}()", cast_expr_sugg.maybe_par()),
+ help,
+ final_suggestion,
app,
);
}
diff --git a/src/tools/clippy/clippy_lints/src/casts/ptr_cast_constness.rs b/src/tools/clippy/clippy_lints/src/casts/ptr_cast_constness.rs
index 0172e9336..ff069860a 100644
--- a/src/tools/clippy/clippy_lints/src/casts/ptr_cast_constness.rs
+++ b/src/tools/clippy/clippy_lints/src/casts/ptr_cast_constness.rs
@@ -1,7 +1,6 @@
use clippy_config::msrvs::{self, Msrv};
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::sugg::Sugg;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Expr, Mutability};
use rustc_lint::LateContext;
@@ -17,29 +16,35 @@ pub(super) fn check<'tcx>(
cast_to: Ty<'tcx>,
msrv: &Msrv,
) {
- if_chain! {
- if msrv.meets(msrvs::POINTER_CAST_CONSTNESS);
- if let ty::RawPtr(TypeAndMut { mutbl: from_mutbl, ty: from_ty }) = cast_from.kind();
- if let ty::RawPtr(TypeAndMut { mutbl: to_mutbl, ty: to_ty }) = cast_to.kind();
- if matches!((from_mutbl, to_mutbl),
- (Mutability::Not, Mutability::Mut) | (Mutability::Mut, Mutability::Not));
- if from_ty == to_ty;
- then {
- let sugg = Sugg::hir(cx, cast_expr, "_");
- let constness = match *to_mutbl {
- Mutability::Not => "const",
- Mutability::Mut => "mut",
- };
+ if msrv.meets(msrvs::POINTER_CAST_CONSTNESS)
+ && let ty::RawPtr(TypeAndMut {
+ mutbl: from_mutbl,
+ ty: from_ty,
+ }) = cast_from.kind()
+ && let ty::RawPtr(TypeAndMut {
+ mutbl: to_mutbl,
+ ty: to_ty,
+ }) = cast_to.kind()
+ && matches!(
+ (from_mutbl, to_mutbl),
+ (Mutability::Not, Mutability::Mut) | (Mutability::Mut, Mutability::Not)
+ )
+ && from_ty == to_ty
+ {
+ let sugg = Sugg::hir(cx, cast_expr, "_");
+ let constness = match *to_mutbl {
+ Mutability::Not => "const",
+ Mutability::Mut => "mut",
+ };
- span_lint_and_sugg(
- cx,
- PTR_CAST_CONSTNESS,
- expr.span,
- "`as` casting between raw pointers while changing only its constness",
- &format!("try `pointer::cast_{constness}`, a safer alternative"),
- format!("{}.cast_{constness}()", sugg.maybe_par()),
- Applicability::MachineApplicable,
- );
- }
+ span_lint_and_sugg(
+ cx,
+ PTR_CAST_CONSTNESS,
+ expr.span,
+ "`as` casting between raw pointers while changing only its constness",
+ &format!("try `pointer::cast_{constness}`, a safer alternative"),
+ format!("{}.cast_{constness}()", sugg.maybe_par()),
+ Applicability::MachineApplicable,
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs b/src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs
index 61bfce07e..849920bb7 100644
--- a/src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs
+++ b/src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs
@@ -3,7 +3,6 @@ use clippy_utils::numeric_literal::NumericLiteral;
use clippy_utils::source::snippet_opt;
use clippy_utils::visitors::{for_each_expr, Visitable};
use clippy_utils::{get_parent_expr, get_parent_node, is_hir_ty_cfg_dependant, is_ty_alias, path_to_local};
-use if_chain::if_chain;
use rustc_ast::{LitFloatType, LitIntType, LitKind};
use rustc_errors::Applicability;
use rustc_hir::def::{DefKind, Res};
@@ -25,40 +24,40 @@ pub(super) fn check<'tcx>(
) -> bool {
let cast_str = snippet_opt(cx, cast_expr.span).unwrap_or_default();
- if_chain! {
- if let ty::RawPtr(..) = cast_from.kind();
+ if let ty::RawPtr(..) = cast_from.kind()
// check both mutability and type are the same
- if cast_from.kind() == cast_to.kind();
- if let ExprKind::Cast(_, cast_to_hir) = expr.kind;
+ && cast_from.kind() == cast_to.kind()
+ && let ExprKind::Cast(_, cast_to_hir) = expr.kind
// Ignore casts to e.g. type aliases and infer types
// - p as pointer_alias
// - p as _
- if let TyKind::Ptr(to_pointee) = cast_to_hir.kind;
- then {
- match to_pointee.ty.kind {
- // Ignore casts to pointers that are aliases or cfg dependant, e.g.
- // - p as *const std::ffi::c_char (alias)
- // - p as *const std::os::raw::c_char (cfg dependant)
- TyKind::Path(qpath) => {
- if is_ty_alias(&qpath) || is_hir_ty_cfg_dependant(cx, to_pointee.ty) {
- return false;
- }
- },
- // Ignore `p as *const _`
- TyKind::Infer => return false,
- _ => {},
- }
-
- span_lint_and_sugg(
- cx,
- UNNECESSARY_CAST,
- expr.span,
- &format!("casting raw pointers to the same type and constness is unnecessary (`{cast_from}` -> `{cast_to}`)"),
- "try",
- cast_str.clone(),
- Applicability::MaybeIncorrect,
- );
+ && let TyKind::Ptr(to_pointee) = cast_to_hir.kind
+ {
+ match to_pointee.ty.kind {
+ // Ignore casts to pointers that are aliases or cfg dependant, e.g.
+ // - p as *const std::ffi::c_char (alias)
+ // - p as *const std::os::raw::c_char (cfg dependant)
+ TyKind::Path(qpath) => {
+ if is_ty_alias(&qpath) || is_hir_ty_cfg_dependant(cx, to_pointee.ty) {
+ return false;
+ }
+ },
+ // Ignore `p as *const _`
+ TyKind::Infer => return false,
+ _ => {},
}
+
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_CAST,
+ expr.span,
+ &format!(
+ "casting raw pointers to the same type and constness is unnecessary (`{cast_from}` -> `{cast_to}`)"
+ ),
+ "try",
+ cast_str.clone(),
+ Applicability::MaybeIncorrect,
+ );
}
// skip cast of local that is a type alias
@@ -86,14 +85,12 @@ pub(super) fn check<'tcx>(
}
// skip cast to non-primitive type
- if_chain! {
- if let ExprKind::Cast(_, cast_to) = expr.kind;
- if let TyKind::Path(QPath::Resolved(_, path)) = &cast_to.kind;
- if let Res::PrimTy(_) = path.res;
- then {}
- else {
- return false;
- }
+ if let ExprKind::Cast(_, cast_to) = expr.kind
+ && let TyKind::Path(QPath::Resolved(_, path)) = &cast_to.kind
+ && let Res::PrimTy(_) = path.res
+ {
+ } else {
+ return false;
}
// skip cast of fn call that returns type alias
@@ -106,18 +103,19 @@ pub(super) fn check<'tcx>(
if let Some(lit) = get_numeric_literal(cast_expr) {
let literal_str = &cast_str;
- if_chain! {
- if let LitKind::Int(n, _) = lit.node;
- if let Some(src) = snippet_opt(cx, cast_expr.span);
- if cast_to.is_floating_point();
- if let Some(num_lit) = NumericLiteral::from_lit_kind(&src, &lit.node);
- let from_nbits = 128 - n.leading_zeros();
- let to_nbits = fp_ty_mantissa_nbits(cast_to);
- if from_nbits != 0 && to_nbits != 0 && from_nbits <= to_nbits && num_lit.is_decimal();
- then {
- lint_unnecessary_cast(cx, expr, num_lit.integer, cast_from, cast_to);
- return true
- }
+ if let LitKind::Int(n, _) = lit.node
+ && let Some(src) = snippet_opt(cx, cast_expr.span)
+ && cast_to.is_floating_point()
+ && let Some(num_lit) = NumericLiteral::from_lit_kind(&src, &lit.node)
+ && let from_nbits = 128 - n.leading_zeros()
+ && let to_nbits = fp_ty_mantissa_nbits(cast_to)
+ && from_nbits != 0
+ && to_nbits != 0
+ && from_nbits <= to_nbits
+ && num_lit.is_decimal()
+ {
+ lint_unnecessary_cast(cx, expr, num_lit.integer, cast_from, cast_to);
+ return true;
}
match lit.node {
diff --git a/src/tools/clippy/clippy_lints/src/checked_conversions.rs b/src/tools/clippy/clippy_lints/src/checked_conversions.rs
index d31c2268a..92810ea2a 100644
--- a/src/tools/clippy/clippy_lints/src/checked_conversions.rs
+++ b/src/tools/clippy/clippy_lints/src/checked_conversions.rs
@@ -4,12 +4,11 @@ use clippy_config::msrvs::{self, Msrv};
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::{in_constant, is_integer_literal, SpanlessEq};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{BinOp, BinOpKind, Expr, ExprKind, QPath, TyKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -55,20 +54,17 @@ impl<'tcx> LateLintPass<'tcx> for CheckedConversions {
return;
}
- let result = if_chain! {
- if !in_constant(cx, item.hir_id);
- if !in_external_macro(cx.sess(), item.span);
- if let ExprKind::Binary(op, left, right) = &item.kind;
-
- then {
- match op.node {
- BinOpKind::Ge | BinOpKind::Le => single_check(item),
- BinOpKind::And => double_check(cx, left, right),
- _ => None,
- }
- } else {
- None
+ let result = if !in_constant(cx, item.hir_id)
+ && !in_external_macro(cx.sess(), item.span)
+ && let ExprKind::Binary(op, left, right) = &item.kind
+ {
+ match op.node {
+ BinOpKind::Ge | BinOpKind::Le => single_check(item),
+ BinOpKind::And => double_check(cx, left, right),
+ _ => None,
}
+ } else {
+ None
};
if let Some(cv) = result {
@@ -193,16 +189,13 @@ impl ConversionType {
/// Check for `expr <= (to_type::MAX as from_type)`
fn check_upper_bound<'tcx>(expr: &'tcx Expr<'tcx>) -> Option<Conversion<'tcx>> {
- if_chain! {
- if let ExprKind::Binary(ref op, left, right) = &expr.kind;
- if let Some((candidate, check)) = normalize_le_ge(op, left, right);
- if let Some((from, to)) = get_types_from_cast(check, INTS, "max_value", "MAX");
-
- then {
- Conversion::try_new(candidate, from, to)
- } else {
- None
- }
+ if let ExprKind::Binary(ref op, left, right) = &expr.kind
+ && let Some((candidate, check)) = normalize_le_ge(op, left, right)
+ && let Some((from, to)) = get_types_from_cast(check, INTS, "max_value", "MAX")
+ {
+ Conversion::try_new(candidate, from, to)
+ } else {
+ None
}
}
@@ -243,33 +236,27 @@ fn get_types_from_cast<'a>(
) -> Option<(&'a str, &'a str)> {
// `to_type::max_value() as from_type`
// or `to_type::MAX as from_type`
- let call_from_cast: Option<(&Expr<'_>, &str)> = if_chain! {
+ let call_from_cast: Option<(&Expr<'_>, &str)> = if let ExprKind::Cast(limit, from_type) = &expr.kind
// to_type::max_value(), from_type
- if let ExprKind::Cast(limit, from_type) = &expr.kind;
- if let TyKind::Path(ref from_type_path) = &from_type.kind;
- if let Some(from_sym) = int_ty_to_sym(from_type_path);
-
- then {
- Some((limit, from_sym))
- } else {
- None
- }
+ && let TyKind::Path(ref from_type_path) = &from_type.kind
+ && let Some(from_sym) = int_ty_to_sym(from_type_path)
+ {
+ Some((limit, from_sym))
+ } else {
+ None
};
// `from_type::from(to_type::max_value())`
let limit_from: Option<(&Expr<'_>, &str)> = call_from_cast.or_else(|| {
- if_chain! {
+ if let ExprKind::Call(from_func, [limit]) = &expr.kind
// `from_type::from, to_type::max_value()`
- if let ExprKind::Call(from_func, [limit]) = &expr.kind;
// `from_type::from`
- if let ExprKind::Path(ref path) = &from_func.kind;
- if let Some(from_sym) = get_implementing_type(path, INTS, "from");
-
- then {
- Some((limit, from_sym))
- } else {
- None
- }
+ && let ExprKind::Path(ref path) = &from_func.kind
+ && let Some(from_sym) = get_implementing_type(path, INTS, "from")
+ {
+ Some((limit, from_sym))
+ } else {
+ None
}
});
@@ -298,31 +285,27 @@ fn get_types_from_cast<'a>(
/// Gets the type which implements the called function
fn get_implementing_type<'a>(path: &QPath<'_>, candidates: &'a [&str], function: &str) -> Option<&'a str> {
- if_chain! {
- if let QPath::TypeRelative(ty, path) = &path;
- if path.ident.name.as_str() == function;
- if let TyKind::Path(QPath::Resolved(None, tp)) = &ty.kind;
- if let [int] = tp.segments;
- then {
- let name = int.ident.name.as_str();
- candidates.iter().find(|c| &name == *c).copied()
- } else {
- None
- }
+ if let QPath::TypeRelative(ty, path) = &path
+ && path.ident.name.as_str() == function
+ && let TyKind::Path(QPath::Resolved(None, tp)) = &ty.kind
+ && let [int] = tp.segments
+ {
+ let name = int.ident.name.as_str();
+ candidates.iter().find(|c| &name == *c).copied()
+ } else {
+ None
}
}
/// Gets the type as a string, if it is a supported integer
fn int_ty_to_sym<'tcx>(path: &QPath<'_>) -> Option<&'tcx str> {
- if_chain! {
- if let QPath::Resolved(_, path) = *path;
- if let [ty] = path.segments;
- then {
- let name = ty.ident.name.as_str();
- INTS.iter().find(|c| &name == *c).copied()
- } else {
- None
- }
+ if let QPath::Resolved(_, path) = *path
+ && let [ty] = path.segments
+ {
+ let name = ty.ident.name.as_str();
+ INTS.iter().find(|c| &name == *c).copied()
+ } else {
+ None
}
}
diff --git a/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs b/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs
index 74ecaa60c..60f436dc5 100644
--- a/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs
+++ b/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs
@@ -10,7 +10,7 @@ use rustc_ast::ast::Attribute;
use rustc_hir::intravisit::FnKind;
use rustc_hir::{Body, Expr, ExprKind, FnDecl};
use rustc_lint::{LateContext, LateLintPass, LintContext};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::def_id::LocalDefId;
use rustc_span::{sym, BytePos, Span};
diff --git a/src/tools/clippy/clippy_lints/src/collapsible_if.rs b/src/tools/clippy/clippy_lints/src/collapsible_if.rs
index d21ef195d..07b02c98d 100644
--- a/src/tools/clippy/clippy_lints/src/collapsible_if.rs
+++ b/src/tools/clippy/clippy_lints/src/collapsible_if.rs
@@ -15,11 +15,10 @@
use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then};
use clippy_utils::source::{snippet, snippet_block, snippet_block_with_applicability};
use clippy_utils::sugg::Sugg;
-use if_chain::if_chain;
use rustc_ast::ast;
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::Span;
declare_clippy_lint! {
@@ -121,49 +120,55 @@ fn block_starts_with_comment(cx: &EarlyContext<'_>, expr: &ast::Block) -> bool {
}
fn check_collapsible_maybe_if_let(cx: &EarlyContext<'_>, then_span: Span, else_: &ast::Expr) {
- if_chain! {
- if let ast::ExprKind::Block(ref block, _) = else_.kind;
- if !block_starts_with_comment(cx, block);
- if let Some(else_) = expr_block(block);
- if else_.attrs.is_empty();
- if !else_.span.from_expansion();
- if let ast::ExprKind::If(..) = else_.kind;
- then {
- // Prevent "elseif"
- // Check that the "else" is followed by whitespace
- let up_to_else = then_span.between(block.span);
- let requires_space = if let Some(c) = snippet(cx, up_to_else, "..").chars().last() { !c.is_whitespace() } else { false };
+ if let ast::ExprKind::Block(ref block, _) = else_.kind
+ && !block_starts_with_comment(cx, block)
+ && let Some(else_) = expr_block(block)
+ && else_.attrs.is_empty()
+ && !else_.span.from_expansion()
+ && let ast::ExprKind::If(..) = else_.kind
+ {
+ // Prevent "elseif"
+ // Check that the "else" is followed by whitespace
+ let up_to_else = then_span.between(block.span);
+ let requires_space = if let Some(c) = snippet(cx, up_to_else, "..").chars().last() {
+ !c.is_whitespace()
+ } else {
+ false
+ };
- let mut applicability = Applicability::MachineApplicable;
- span_lint_and_sugg(
- cx,
- COLLAPSIBLE_ELSE_IF,
- block.span,
- "this `else { if .. }` block can be collapsed",
- "collapse nested if block",
- format!(
- "{}{}",
- if requires_space { " " } else { "" },
- snippet_block_with_applicability(cx, else_.span, "..", Some(block.span), &mut applicability)
- ),
- applicability,
- );
- }
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ COLLAPSIBLE_ELSE_IF,
+ block.span,
+ "this `else { if .. }` block can be collapsed",
+ "collapse nested if block",
+ format!(
+ "{}{}",
+ if requires_space { " " } else { "" },
+ snippet_block_with_applicability(cx, else_.span, "..", Some(block.span), &mut applicability)
+ ),
+ applicability,
+ );
}
}
fn check_collapsible_no_if_let(cx: &EarlyContext<'_>, expr: &ast::Expr, check: &ast::Expr, then: &ast::Block) {
- if_chain! {
- if !block_starts_with_comment(cx, then);
- if let Some(inner) = expr_block(then);
- if inner.attrs.is_empty();
- if let ast::ExprKind::If(ref check_inner, ref content, None) = inner.kind;
+ if !block_starts_with_comment(cx, then)
+ && let Some(inner) = expr_block(then)
+ && inner.attrs.is_empty()
+ && let ast::ExprKind::If(ref check_inner, ref content, None) = inner.kind
// Prevent triggering on `if c { if let a = b { .. } }`.
- if !matches!(check_inner.kind, ast::ExprKind::Let(..));
- let ctxt = expr.span.ctxt();
- if inner.span.ctxt() == ctxt;
- then {
- span_lint_and_then(cx, COLLAPSIBLE_IF, expr.span, "this `if` statement can be collapsed", |diag| {
+ && !matches!(check_inner.kind, ast::ExprKind::Let(..))
+ && let ctxt = expr.span.ctxt()
+ && inner.span.ctxt() == ctxt
+ {
+ span_lint_and_then(
+ cx,
+ COLLAPSIBLE_IF,
+ expr.span,
+ "this `if` statement can be collapsed",
+ |diag| {
let mut app = Applicability::MachineApplicable;
let lhs = Sugg::ast(cx, check, "..", ctxt, &mut app);
let rhs = Sugg::ast(cx, check_inner, "..", ctxt, &mut app);
@@ -177,8 +182,8 @@ fn check_collapsible_no_if_let(cx: &EarlyContext<'_>, expr: &ast::Expr, check: &
),
app, // snippet
);
- });
- }
+ },
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/collection_is_never_read.rs b/src/tools/clippy/clippy_lints/src/collection_is_never_read.rs
index 1dfc2e251..d0c989cff 100644
--- a/src/tools/clippy/clippy_lints/src/collection_is_never_read.rs
+++ b/src/tools/clippy/clippy_lints/src/collection_is_never_read.rs
@@ -5,7 +5,7 @@ use clippy_utils::{get_enclosing_block, get_parent_node, path_to_local_id};
use core::ops::ControlFlow;
use rustc_hir::{Block, ExprKind, HirId, LangItem, Local, Node, PatKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::sym;
use rustc_span::Symbol;
diff --git a/src/tools/clippy/clippy_lints/src/comparison_chain.rs b/src/tools/clippy/clippy_lints/src/comparison_chain.rs
index 0fe973b49..2c23c0b4f 100644
--- a/src/tools/clippy/clippy_lints/src/comparison_chain.rs
+++ b/src/tools/clippy/clippy_lints/src/comparison_chain.rs
@@ -3,7 +3,7 @@ use clippy_utils::ty::implements_trait;
use clippy_utils::{if_sequence, in_constant, is_else_clause, SpanlessEq};
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/copies.rs b/src/tools/clippy/clippy_lints/src/copies.rs
index e3a09636e..d91af76f5 100644
--- a/src/tools/clippy/clippy_lints/src/copies.rs
+++ b/src/tools/clippy/clippy_lints/src/copies.rs
@@ -13,7 +13,7 @@ use rustc_hir::def_id::DefIdSet;
use rustc_hir::{intravisit, BinOpKind, Block, Expr, ExprKind, HirId, HirIdSet, Stmt, StmtKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::query::Key;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::hygiene::walk_chain;
use rustc_span::source_map::SourceMap;
use rustc_span::{BytePos, Span, Symbol};
@@ -117,7 +117,7 @@ declare_clippy_lint! {
/// ```
#[clippy::version = "pre 1.29.0"]
pub IF_SAME_THEN_ELSE,
- correctness,
+ style,
"`if` with the same `then` and `else` blocks"
}
diff --git a/src/tools/clippy/clippy_lints/src/copy_iterator.rs b/src/tools/clippy/clippy_lints/src/copy_iterator.rs
index 5d04ad011..50fd76a3a 100644
--- a/src/tools/clippy/clippy_lints/src/copy_iterator.rs
+++ b/src/tools/clippy/clippy_lints/src/copy_iterator.rs
@@ -2,11 +2,9 @@ use clippy_utils::diagnostics::span_lint_and_note;
use clippy_utils::ty::is_copy;
use rustc_hir::{Impl, Item, ItemKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
-use if_chain::if_chain;
-
declare_clippy_lint! {
/// ### What it does
/// Checks for types that implement `Copy` as well as
@@ -38,25 +36,23 @@ declare_lint_pass!(CopyIterator => [COPY_ITERATOR]);
impl<'tcx> LateLintPass<'tcx> for CopyIterator {
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
- if_chain! {
- if let ItemKind::Impl(Impl {
- of_trait: Some(ref trait_ref),
- ..
- }) = item.kind;
- let ty = cx.tcx.type_of(item.owner_id).instantiate_identity();
- if is_copy(cx, ty);
- if let Some(trait_id) = trait_ref.trait_def_id();
- if cx.tcx.is_diagnostic_item(sym::Iterator, trait_id);
- then {
- span_lint_and_note(
- cx,
- COPY_ITERATOR,
- item.span,
- "you are implementing `Iterator` on a `Copy` type",
- None,
- "consider implementing `IntoIterator` instead",
- );
- }
+ if let ItemKind::Impl(Impl {
+ of_trait: Some(ref trait_ref),
+ ..
+ }) = item.kind
+ && let ty = cx.tcx.type_of(item.owner_id).instantiate_identity()
+ && is_copy(cx, ty)
+ && let Some(trait_id) = trait_ref.trait_def_id()
+ && cx.tcx.is_diagnostic_item(sym::Iterator, trait_id)
+ {
+ span_lint_and_note(
+ cx,
+ COPY_ITERATOR,
+ item.span,
+ "you are implementing `Iterator` on a `Copy` type",
+ None,
+ "consider implementing `IntoIterator` instead",
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs b/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs
index a2005638d..b1aa472aa 100644
--- a/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs
+++ b/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs
@@ -4,7 +4,7 @@ use rustc_ast::token::{Token, TokenKind};
use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::sym;
use rustc_span::Span;
@@ -53,35 +53,31 @@ declare_lint_pass!(CrateInMacroDef => [CRATE_IN_MACRO_DEF]);
impl EarlyLintPass for CrateInMacroDef {
fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) {
- if_chain! {
- if item.attrs.iter().any(is_macro_export);
- if let ItemKind::MacroDef(macro_def) = &item.kind;
- let tts = macro_def.body.tokens.clone();
- if let Some(span) = contains_unhygienic_crate_reference(&tts);
- then {
- span_lint_and_sugg(
- cx,
- CRATE_IN_MACRO_DEF,
- span,
- "`crate` references the macro call's crate",
- "to reference the macro definition's crate, use",
- String::from("$crate"),
- Applicability::MachineApplicable,
- );
- }
+ if item.attrs.iter().any(is_macro_export)
+ && let ItemKind::MacroDef(macro_def) = &item.kind
+ && let tts = macro_def.body.tokens.clone()
+ && let Some(span) = contains_unhygienic_crate_reference(&tts)
+ {
+ span_lint_and_sugg(
+ cx,
+ CRATE_IN_MACRO_DEF,
+ span,
+ "`crate` references the macro call's crate",
+ "to reference the macro definition's crate, use",
+ String::from("$crate"),
+ Applicability::MachineApplicable,
+ );
}
}
}
fn is_macro_export(attr: &Attribute) -> bool {
- if_chain! {
- if let AttrKind::Normal(normal) = &attr.kind;
- if let [segment] = normal.item.path.segments.as_slice();
- then {
- segment.ident.name == sym::macro_export
- } else {
- false
- }
+ if let AttrKind::Normal(normal) = &attr.kind
+ && let [segment] = normal.item.path.segments.as_slice()
+ {
+ segment.ident.name == sym::macro_export
+ } else {
+ false
}
}
@@ -89,16 +85,14 @@ fn contains_unhygienic_crate_reference(tts: &TokenStream) -> Option<Span> {
let mut prev_is_dollar = false;
let mut cursor = tts.trees();
while let Some(curr) = cursor.next() {
- if_chain! {
- if !prev_is_dollar;
- if let Some(span) = is_crate_keyword(curr);
- if let Some(next) = cursor.look_ahead(0);
- if is_token(next, &TokenKind::ModSep);
- then {
- return Some(span);
- }
+ if !prev_is_dollar
+ && let Some(span) = is_crate_keyword(curr)
+ && let Some(next) = cursor.look_ahead(0)
+ && is_token(next, &TokenKind::ModSep)
+ {
+ return Some(span);
}
- if let TokenTree::Delimited(_, _, tts) = &curr {
+ if let TokenTree::Delimited(.., tts) = &curr {
let span = contains_unhygienic_crate_reference(tts);
if span.is_some() {
return span;
@@ -110,10 +104,18 @@ fn contains_unhygienic_crate_reference(tts: &TokenStream) -> Option<Span> {
}
fn is_crate_keyword(tt: &TokenTree) -> Option<Span> {
- if_chain! {
- if let TokenTree::Token(Token { kind: TokenKind::Ident(symbol, _), span }, _) = tt;
- if symbol.as_str() == "crate";
- then { Some(*span) } else { None }
+ if let TokenTree::Token(
+ Token {
+ kind: TokenKind::Ident(symbol, _),
+ span,
+ },
+ _,
+ ) = tt
+ && symbol.as_str() == "crate"
+ {
+ Some(*span)
+ } else {
+ None
}
}
diff --git a/src/tools/clippy/clippy_lints/src/create_dir.rs b/src/tools/clippy/clippy_lints/src/create_dir.rs
index 2bca695c4..7a3d5a070 100644
--- a/src/tools/clippy/clippy_lints/src/create_dir.rs
+++ b/src/tools/clippy/clippy_lints/src/create_dir.rs
@@ -1,10 +1,9 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
@@ -33,22 +32,20 @@ declare_lint_pass!(CreateDir => [CREATE_DIR]);
impl LateLintPass<'_> for CreateDir {
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
- if_chain! {
- if let ExprKind::Call(func, [arg, ..]) = expr.kind;
- if let ExprKind::Path(ref path) = func.kind;
- if let Some(def_id) = cx.qpath_res(path, func.hir_id).opt_def_id();
- if cx.tcx.is_diagnostic_item(sym::fs_create_dir, def_id);
- then {
- span_lint_and_sugg(
- cx,
- CREATE_DIR,
- expr.span,
- "calling `std::fs::create_dir` where there may be a better way",
- "consider calling `std::fs::create_dir_all` instead",
- format!("create_dir_all({})", snippet(cx, arg.span, "..")),
- Applicability::MaybeIncorrect,
- )
- }
+ if let ExprKind::Call(func, [arg, ..]) = expr.kind
+ && let ExprKind::Path(ref path) = func.kind
+ && let Some(def_id) = cx.qpath_res(path, func.hir_id).opt_def_id()
+ && cx.tcx.is_diagnostic_item(sym::fs_create_dir, def_id)
+ {
+ span_lint_and_sugg(
+ cx,
+ CREATE_DIR,
+ expr.span,
+ "calling `std::fs::create_dir` where there may be a better way",
+ "consider calling `std::fs::create_dir_all` instead",
+ format!("create_dir_all({})", snippet(cx, arg.span, "..")),
+ Applicability::MaybeIncorrect,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/dbg_macro.rs b/src/tools/clippy/clippy_lints/src/dbg_macro.rs
index 49452136d..9424a9103 100644
--- a/src/tools/clippy/clippy_lints/src/dbg_macro.rs
+++ b/src/tools/clippy/clippy_lints/src/dbg_macro.rs
@@ -5,8 +5,8 @@ use clippy_utils::{is_in_cfg_test, is_in_test_function};
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, Node};
use rustc_lint::{LateContext, LateLintPass, LintContext};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
-use rustc_span::{sym, BytePos, Pos, Span};
+use rustc_session::impl_lint_pass;
+use rustc_span::sym;
declare_clippy_lint! {
/// ### What it does
@@ -31,31 +31,6 @@ declare_clippy_lint! {
"`dbg!` macro is intended as a debugging tool"
}
-/// Gets the span of the statement up to the next semicolon, if and only if the next
-/// non-whitespace character actually is a semicolon.
-/// E.g.
-/// ```rust,ignore
-///
-/// dbg!();
-/// ^^^^^^^ this span is returned
-///
-/// foo!(dbg!());
-/// no span is returned
-/// ```
-fn span_including_semi(cx: &LateContext<'_>, span: Span) -> Option<Span> {
- let sm = cx.sess().source_map();
- let sf = sm.lookup_source_file(span.hi());
- let src = sf.src.as_ref()?.get(span.hi().to_usize()..)?;
- let first_non_whitespace = src.find(|c: char| !c.is_whitespace())?;
-
- if src.as_bytes()[first_non_whitespace] == b';' {
- let hi = span.hi() + BytePos::from_usize(first_non_whitespace + 1);
- Some(span.with_hi(hi))
- } else {
- None
- }
-}
-
#[derive(Copy, Clone)]
pub struct DbgMacro {
allow_dbg_in_tests: bool,
@@ -88,10 +63,10 @@ impl LateLintPass<'_> for DbgMacro {
ExprKind::Block(..) => {
// If the `dbg!` macro is a "free" statement and not contained within other expressions,
// remove the whole statement.
- if let Some(Node::Stmt(stmt)) = cx.tcx.hir().find_parent(expr.hir_id)
- && let Some(span) = span_including_semi(cx, stmt.span.source_callsite())
+ if let Some(Node::Stmt(_)) = cx.tcx.hir().find_parent(expr.hir_id)
+ && let Some(semi_span) = cx.sess().source_map().mac_call_stmt_semi_span(macro_call.span)
{
- (span, String::new())
+ (macro_call.span.to(semi_span), String::new())
} else {
(macro_call.span, String::from("()"))
}
diff --git a/src/tools/clippy/clippy_lints/src/declared_lints.rs b/src/tools/clippy/clippy_lints/src/declared_lints.rs
index 1a646ba38..1220eb890 100644
--- a/src/tools/clippy/clippy_lints/src/declared_lints.rs
+++ b/src/tools/clippy/clippy_lints/src/declared_lints.rs
@@ -10,8 +10,6 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
#[cfg(feature = "internal")]
crate::utils::internal_lints::compiler_lint_functions::COMPILER_LINT_FUNCTIONS_INFO,
#[cfg(feature = "internal")]
- crate::utils::internal_lints::if_chain_style::IF_CHAIN_STYLE_INFO,
- #[cfg(feature = "internal")]
crate::utils::internal_lints::interning_defined_symbol::INTERNING_DEFINED_SYMBOL_INFO,
#[cfg(feature = "internal")]
crate::utils::internal_lints::interning_defined_symbol::UNNECESSARY_SYMBOL_STR_INFO,
@@ -65,7 +63,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::await_holding_invalid::AWAIT_HOLDING_INVALID_TYPE_INFO,
crate::await_holding_invalid::AWAIT_HOLDING_LOCK_INFO,
crate::await_holding_invalid::AWAIT_HOLDING_REFCELL_REF_INFO,
- crate::blocks_in_if_conditions::BLOCKS_IN_IF_CONDITIONS_INFO,
+ crate::blocks_in_conditions::BLOCKS_IN_CONDITIONS_INFO,
crate::bool_assert_comparison::BOOL_ASSERT_COMPARISON_INFO,
crate::bool_to_int_with_if::BOOL_TO_INT_WITH_IF_INFO,
crate::booleans::NONMINIMAL_BOOL_INFO,
@@ -141,6 +139,8 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::doc::MISSING_PANICS_DOC_INFO,
crate::doc::MISSING_SAFETY_DOC_INFO,
crate::doc::NEEDLESS_DOCTEST_MAIN_INFO,
+ crate::doc::SUSPICIOUS_DOC_COMMENTS_INFO,
+ crate::doc::TEST_ATTR_IN_DOCTEST_INFO,
crate::doc::UNNECESSARY_SAFETY_DOC_INFO,
crate::double_parens::DOUBLE_PARENS_INFO,
crate::drop_forget_ref::DROP_NON_DROP_INFO,
@@ -205,6 +205,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::if_not_else::IF_NOT_ELSE_INFO,
crate::if_then_some_else_none::IF_THEN_SOME_ELSE_NONE_INFO,
crate::ignored_unit_patterns::IGNORED_UNIT_PATTERNS_INFO,
+ crate::impl_hash_with_borrow_str_and_bytes::IMPL_HASH_BORROW_WITH_STR_AND_BYTES_INFO,
crate::implicit_hasher::IMPLICIT_HASHER_INFO,
crate::implicit_return::IMPLICIT_RETURN_INFO,
crate::implicit_saturating_add::IMPLICIT_SATURATING_ADD_INFO,
@@ -214,6 +215,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::index_refutable_slice::INDEX_REFUTABLE_SLICE_INFO,
crate::indexing_slicing::INDEXING_SLICING_INFO,
crate::indexing_slicing::OUT_OF_BOUNDS_INDEXING_INFO,
+ crate::ineffective_open_options::INEFFECTIVE_OPEN_OPTIONS_INFO,
crate::infinite_iter::INFINITE_ITER_INFO,
crate::infinite_iter::MAYBE_INFINITE_ITER_INFO,
crate::inherent_impl::MULTIPLE_INHERENT_IMPL_INFO,
@@ -232,6 +234,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::items_after_statements::ITEMS_AFTER_STATEMENTS_INFO,
crate::items_after_test_module::ITEMS_AFTER_TEST_MODULE_INFO,
crate::iter_not_returning_iterator::ITER_NOT_RETURNING_ITERATOR_INFO,
+ crate::iter_over_hash_type::ITER_OVER_HASH_TYPE_INFO,
crate::iter_without_into_iter::INTO_ITER_WITHOUT_ITER_INFO,
crate::iter_without_into_iter::ITER_WITHOUT_INTO_ITER_INFO,
crate::large_const_arrays::LARGE_CONST_ARRAYS_INFO,
@@ -263,6 +266,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::loops::EXPLICIT_INTO_ITER_LOOP_INFO,
crate::loops::EXPLICIT_ITER_LOOP_INFO,
crate::loops::FOR_KV_MAP_INFO,
+ crate::loops::INFINITE_LOOP_INFO,
crate::loops::ITER_NEXT_LOOP_INFO,
crate::loops::MANUAL_FIND_INFO,
crate::loops::MANUAL_FLATTEN_INFO,
@@ -376,6 +380,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::methods::ITER_SKIP_NEXT_INFO,
crate::methods::ITER_SKIP_ZERO_INFO,
crate::methods::ITER_WITH_DRAIN_INFO,
+ crate::methods::JOIN_ABSOLUTE_PATHS_INFO,
crate::methods::MANUAL_FILTER_MAP_INFO,
crate::methods::MANUAL_FIND_MAP_INFO,
crate::methods::MANUAL_NEXT_BACK_INFO,
@@ -403,6 +408,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::methods::OK_EXPECT_INFO,
crate::methods::OPTION_AS_REF_DEREF_INFO,
crate::methods::OPTION_FILTER_MAP_INFO,
+ crate::methods::OPTION_MAP_OR_ERR_OK_INFO,
crate::methods::OPTION_MAP_OR_NONE_INFO,
crate::methods::OR_FUN_CALL_INFO,
crate::methods::OR_THEN_UNWRAP_INFO,
@@ -594,6 +600,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::reference::DEREF_ADDROF_INFO,
crate::regex::INVALID_REGEX_INFO,
crate::regex::TRIVIAL_REGEX_INFO,
+ crate::repeat_vec_with_capacity::REPEAT_VEC_WITH_CAPACITY_INFO,
crate::reserve_after_initialization::RESERVE_AFTER_INITIALIZATION_INFO,
crate::return_self_not_must_use::RETURN_SELF_NOT_MUST_USE_INFO,
crate::returns::LET_AND_RETURN_INFO,
@@ -628,7 +635,6 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::strings::STR_TO_STRING_INFO,
crate::strings::TRIM_SPLIT_WHITESPACE_INFO,
crate::strlen_on_c_strings::STRLEN_ON_C_STRINGS_INFO,
- crate::suspicious_doc_comments::SUSPICIOUS_DOC_COMMENTS_INFO,
crate::suspicious_operation_groupings::SUSPICIOUS_OPERATION_GROUPINGS_INFO,
crate::suspicious_trait_impl::SUSPICIOUS_ARITHMETIC_IMPL_INFO,
crate::suspicious_trait_impl::SUSPICIOUS_OP_ASSIGN_IMPL_INFO,
@@ -675,13 +681,13 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::unicode::INVISIBLE_CHARACTERS_INFO,
crate::unicode::NON_ASCII_LITERAL_INFO,
crate::unicode::UNICODE_NOT_NFC_INFO,
+ crate::uninhabited_references::UNINHABITED_REFERENCES_INFO,
crate::uninit_vec::UNINIT_VEC_INFO,
crate::unit_return_expecting_ord::UNIT_RETURN_EXPECTING_ORD_INFO,
crate::unit_types::LET_UNIT_VALUE_INFO,
crate::unit_types::UNIT_ARG_INFO,
crate::unit_types::UNIT_CMP_INFO,
crate::unnamed_address::FN_ADDRESS_COMPARISONS_INFO,
- crate::unnamed_address::VTABLE_ADDRESS_COMPARISONS_INFO,
crate::unnecessary_box_returns::UNNECESSARY_BOX_RETURNS_INFO,
crate::unnecessary_map_on_constructor::UNNECESSARY_MAP_ON_CONSTRUCTOR_INFO,
crate::unnecessary_owned_empty_strings::UNNECESSARY_OWNED_EMPTY_STRINGS_INFO,
diff --git a/src/tools/clippy/clippy_lints/src/default.rs b/src/tools/clippy/clippy_lints/src/default.rs
index c74b2b883..d8a070b78 100644
--- a/src/tools/clippy/clippy_lints/src/default.rs
+++ b/src/tools/clippy/clippy_lints/src/default.rs
@@ -2,7 +2,6 @@ use clippy_utils::diagnostics::{span_lint_and_note, span_lint_and_sugg};
use clippy_utils::source::snippet_with_context;
use clippy_utils::ty::{has_drop, is_copy};
use clippy_utils::{any_parent_is_automatically_derived, contains_name, get_parent_expr, is_from_proc_macro};
-use if_chain::if_chain;
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::Applicability;
use rustc_hir::def::Res;
@@ -10,7 +9,7 @@ use rustc_hir::{Block, Expr, ExprKind, PatKind, QPath, Stmt, StmtKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
use rustc_middle::ty::print::with_forced_trimmed_paths;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::symbol::{Ident, Symbol};
use rustc_span::{sym, Span};
@@ -81,33 +80,31 @@ impl_lint_pass!(Default => [DEFAULT_TRAIT_ACCESS, FIELD_REASSIGN_WITH_DEFAULT]);
impl<'tcx> LateLintPass<'tcx> for Default {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
- if_chain! {
- if !expr.span.from_expansion();
+ if !expr.span.from_expansion()
// Avoid cases already linted by `field_reassign_with_default`
- if !self.reassigned_linted.contains(&expr.span);
- if let ExprKind::Call(path, ..) = expr.kind;
- if !any_parent_is_automatically_derived(cx.tcx, expr.hir_id);
- if let ExprKind::Path(ref qpath) = path.kind;
- if let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id();
- if cx.tcx.is_diagnostic_item(sym::default_fn, def_id);
- if !is_update_syntax_base(cx, expr);
+ && !self.reassigned_linted.contains(&expr.span)
+ && let ExprKind::Call(path, ..) = expr.kind
+ && !any_parent_is_automatically_derived(cx.tcx, expr.hir_id)
+ && let ExprKind::Path(ref qpath) = path.kind
+ && let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
+ && cx.tcx.is_diagnostic_item(sym::default_fn, def_id)
+ && !is_update_syntax_base(cx, expr)
// Detect and ignore <Foo as Default>::default() because these calls do explicitly name the type.
- if let QPath::Resolved(None, _path) = qpath;
- let expr_ty = cx.typeck_results().expr_ty(expr);
- if let ty::Adt(def, ..) = expr_ty.kind();
- if !is_from_proc_macro(cx, expr);
- then {
- let replacement = with_forced_trimmed_paths!(format!("{}::default()", cx.tcx.def_path_str(def.did())));
- span_lint_and_sugg(
- cx,
- DEFAULT_TRAIT_ACCESS,
- expr.span,
- &format!("calling `{replacement}` is more clear than this expression"),
- "try",
- replacement,
- Applicability::Unspecified, // First resolve the TODO above
- );
- }
+ && let QPath::Resolved(None, _path) = qpath
+ && let expr_ty = cx.typeck_results().expr_ty(expr)
+ && let ty::Adt(def, ..) = expr_ty.kind()
+ && !is_from_proc_macro(cx, expr)
+ {
+ let replacement = with_forced_trimmed_paths!(format!("{}::default()", cx.tcx.def_path_str(def.did())));
+ span_lint_and_sugg(
+ cx,
+ DEFAULT_TRAIT_ACCESS,
+ expr.span,
+ &format!("calling `{replacement}` is more clear than this expression"),
+ "try",
+ replacement,
+ Applicability::Unspecified, // First resolve the TODO above
+ );
}
}
@@ -124,38 +121,36 @@ impl<'tcx> LateLintPass<'tcx> for Default {
// find all binding statements like `let mut _ = T::default()` where `T::default()` is the
// `default` method of the `Default` trait, and store statement index in current block being
// checked and the name of the bound variable
- let (local, variant, binding_name, binding_type, span) = if_chain! {
+ let (local, variant, binding_name, binding_type, span) = if let StmtKind::Local(local) = stmt.kind
// only take `let ...` statements
- if let StmtKind::Local(local) = stmt.kind;
- if let Some(expr) = local.init;
- if !any_parent_is_automatically_derived(cx.tcx, expr.hir_id);
- if !expr.span.from_expansion();
+ && let Some(expr) = local.init
+ && !any_parent_is_automatically_derived(cx.tcx, expr.hir_id)
+ && !expr.span.from_expansion()
// only take bindings to identifiers
- if let PatKind::Binding(_, binding_id, ident, _) = local.pat.kind;
+ && let PatKind::Binding(_, binding_id, ident, _) = local.pat.kind
// only when assigning `... = Default::default()`
- if is_expr_default(expr, cx);
- let binding_type = cx.typeck_results().node_type(binding_id);
- if let Some(adt) = binding_type.ty_adt_def();
- if adt.is_struct();
- let variant = adt.non_enum_variant();
- if adt.did().is_local() || !variant.is_field_list_non_exhaustive();
- let module_did = cx.tcx.parent_module(stmt.hir_id);
- if variant
+ && is_expr_default(expr, cx)
+ && let binding_type = cx.typeck_results().node_type(binding_id)
+ && let Some(adt) = binding_type.ty_adt_def()
+ && adt.is_struct()
+ && let variant = adt.non_enum_variant()
+ && (adt.did().is_local() || !variant.is_field_list_non_exhaustive())
+ && let module_did = cx.tcx.parent_module(stmt.hir_id)
+ && variant
.fields
.iter()
- .all(|field| field.vis.is_accessible_from(module_did, cx.tcx));
- let all_fields_are_copy = variant
+ .all(|field| field.vis.is_accessible_from(module_did, cx.tcx))
+ && let all_fields_are_copy = variant
.fields
.iter()
.all(|field| {
is_copy(cx, cx.tcx.type_of(field.did).instantiate_identity())
- });
- if !has_drop(cx, binding_type) || all_fields_are_copy;
- then {
- (local, variant, ident.name, binding_type, expr.span)
- } else {
- continue;
- }
+ })
+ && (!has_drop(cx, binding_type) || all_fields_are_copy)
+ {
+ (local, variant, ident.name, binding_type, expr.span)
+ } else {
+ continue;
};
let init_ctxt = local.span.ctxt();
@@ -216,21 +211,19 @@ impl<'tcx> LateLintPass<'tcx> for Default {
.join(", ");
// give correct suggestion if generics are involved (see #6944)
- let binding_type = if_chain! {
- if let ty::Adt(adt_def, args) = binding_type.kind();
- if !args.is_empty();
- then {
- let adt_def_ty_name = cx.tcx.item_name(adt_def.did());
- let generic_args = args.iter().collect::<Vec<_>>();
- let tys_str = generic_args
- .iter()
- .map(ToString::to_string)
- .collect::<Vec<_>>()
- .join(", ");
- format!("{adt_def_ty_name}::<{}>", &tys_str)
- } else {
- binding_type.to_string()
- }
+ let binding_type = if let ty::Adt(adt_def, args) = binding_type.kind()
+ && !args.is_empty()
+ {
+ let adt_def_ty_name = cx.tcx.item_name(adt_def.did());
+ let generic_args = args.iter().collect::<Vec<_>>();
+ let tys_str = generic_args
+ .iter()
+ .map(ToString::to_string)
+ .collect::<Vec<_>>()
+ .join(", ");
+ format!("{adt_def_ty_name}::<{}>", &tys_str)
+ } else {
+ binding_type.to_string()
};
let sugg = if ext_with_default {
@@ -260,48 +253,42 @@ impl<'tcx> LateLintPass<'tcx> for Default {
/// Checks if the given expression is the `default` method belonging to the `Default` trait.
fn is_expr_default<'tcx>(expr: &'tcx Expr<'tcx>, cx: &LateContext<'tcx>) -> bool {
- if_chain! {
- if let ExprKind::Call(fn_expr, _) = &expr.kind;
- if let ExprKind::Path(qpath) = &fn_expr.kind;
- if let Res::Def(_, def_id) = cx.qpath_res(qpath, fn_expr.hir_id);
- then {
- // right hand side of assignment is `Default::default`
- cx.tcx.is_diagnostic_item(sym::default_fn, def_id)
- } else {
- false
- }
+ if let ExprKind::Call(fn_expr, _) = &expr.kind
+ && let ExprKind::Path(qpath) = &fn_expr.kind
+ && let Res::Def(_, def_id) = cx.qpath_res(qpath, fn_expr.hir_id)
+ {
+ // right hand side of assignment is `Default::default`
+ cx.tcx.is_diagnostic_item(sym::default_fn, def_id)
+ } else {
+ false
}
}
/// Returns the reassigned field and the assigning expression (right-hand side of assign).
fn field_reassigned_by_stmt<'tcx>(this: &Stmt<'tcx>, binding_name: Symbol) -> Option<(Ident, &'tcx Expr<'tcx>)> {
- if_chain! {
+ if let StmtKind::Semi(later_expr) = this.kind
// only take assignments
- if let StmtKind::Semi(later_expr) = this.kind;
- if let ExprKind::Assign(assign_lhs, assign_rhs, _) = later_expr.kind;
+ && let ExprKind::Assign(assign_lhs, assign_rhs, _) = later_expr.kind
// only take assignments to fields where the left-hand side field is a field of
// the same binding as the previous statement
- if let ExprKind::Field(binding, field_ident) = assign_lhs.kind;
- if let ExprKind::Path(QPath::Resolved(_, path)) = binding.kind;
- if let Some(second_binding_name) = path.segments.last();
- if second_binding_name.ident.name == binding_name;
- then {
- Some((field_ident, assign_rhs))
- } else {
- None
- }
+ && let ExprKind::Field(binding, field_ident) = assign_lhs.kind
+ && let ExprKind::Path(QPath::Resolved(_, path)) = binding.kind
+ && let Some(second_binding_name) = path.segments.last()
+ && second_binding_name.ident.name == binding_name
+ {
+ Some((field_ident, assign_rhs))
+ } else {
+ None
}
}
/// Returns whether `expr` is the update syntax base: `Foo { a: 1, .. base }`
fn is_update_syntax_base<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> bool {
- if_chain! {
- if let Some(parent) = get_parent_expr(cx, expr);
- if let ExprKind::Struct(_, _, Some(base)) = parent.kind;
- then {
- base.hir_id == expr.hir_id
- } else {
- false
- }
+ if let Some(parent) = get_parent_expr(cx, expr)
+ && let ExprKind::Struct(_, _, Some(base)) = parent.kind
+ {
+ base.hir_id == expr.hir_id
+ } else {
+ false
}
}
diff --git a/src/tools/clippy/clippy_lints/src/default_constructed_unit_structs.rs b/src/tools/clippy/clippy_lints/src/default_constructed_unit_structs.rs
index bf070432e..9ce5acfbc 100644
--- a/src/tools/clippy/clippy_lints/src/default_constructed_unit_structs.rs
+++ b/src/tools/clippy/clippy_lints/src/default_constructed_unit_structs.rs
@@ -6,7 +6,7 @@ use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
@@ -56,32 +56,30 @@ fn is_alias(ty: hir::Ty<'_>) -> bool {
impl LateLintPass<'_> for DefaultConstructedUnitStructs {
fn check_expr<'tcx>(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'tcx>) {
- if_chain!(
+ if let hir::ExprKind::Call(fn_expr, &[]) = expr.kind
// make sure we have a call to `Default::default`
- if let hir::ExprKind::Call(fn_expr, &[]) = expr.kind;
- if let ExprKind::Path(ref qpath @ hir::QPath::TypeRelative(base, _)) = fn_expr.kind;
+ && let ExprKind::Path(ref qpath @ hir::QPath::TypeRelative(base, _)) = fn_expr.kind
// make sure this isn't a type alias:
// `<Foo as Bar>::Assoc` cannot be used as a constructor
- if !is_alias(*base);
- if let Res::Def(_, def_id) = cx.qpath_res(qpath, fn_expr.hir_id);
- if cx.tcx.is_diagnostic_item(sym::default_fn, def_id);
+ && !is_alias(*base)
+ && let Res::Def(_, def_id) = cx.qpath_res(qpath, fn_expr.hir_id)
+ && cx.tcx.is_diagnostic_item(sym::default_fn, def_id)
// make sure we have a struct with no fields (unit struct)
- if let ty::Adt(def, ..) = cx.typeck_results().expr_ty(expr).kind();
- if def.is_struct();
- if let var @ ty::VariantDef { ctor: Some((hir::def::CtorKind::Const, _)), .. } = def.non_enum_variant();
- if !var.is_field_list_non_exhaustive();
- if !expr.span.from_expansion() && !qpath.span().from_expansion();
- then {
- span_lint_and_sugg(
- cx,
- DEFAULT_CONSTRUCTED_UNIT_STRUCTS,
- expr.span.with_lo(qpath.qself_span().hi()),
- "use of `default` to create a unit struct",
- "remove this call to `default`",
- String::new(),
- Applicability::MachineApplicable,
- )
- }
- );
+ && let ty::Adt(def, ..) = cx.typeck_results().expr_ty(expr).kind()
+ && def.is_struct()
+ && let var @ ty::VariantDef { ctor: Some((hir::def::CtorKind::Const, _)), .. } = def.non_enum_variant()
+ && !var.is_field_list_non_exhaustive()
+ && !expr.span.from_expansion() && !qpath.span().from_expansion()
+ {
+ span_lint_and_sugg(
+ cx,
+ DEFAULT_CONSTRUCTED_UNIT_STRUCTS,
+ expr.span.with_lo(qpath.qself_span().hi()),
+ "use of `default` to create a unit struct",
+ "remove this call to `default`",
+ String::new(),
+ Applicability::MachineApplicable,
+ );
+ };
}
}
diff --git a/src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs b/src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs
index 553b670fd..2472e2ee7 100644
--- a/src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs
+++ b/src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs
@@ -4,7 +4,7 @@ use clippy_utils::source::snippet_with_context;
use rustc_errors::Applicability;
use rustc_hir::{def, Expr, ExprKind, GenericArg, QPath, TyKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{sym, SyntaxContext};
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs b/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs
index b296ea20f..64a924a77 100644
--- a/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs
+++ b/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint_hir_and_then;
use clippy_utils::source::snippet_opt;
use clippy_utils::{get_parent_node, numeric_literal};
-use if_chain::if_chain;
use rustc_ast::ast::{LitFloatType, LitIntType, LitKind};
use rustc_errors::Applicability;
use rustc_hir::intravisit::{walk_expr, walk_stmt, Visitor};
@@ -9,7 +8,7 @@ use rustc_hir::{Body, Expr, ExprKind, HirId, ItemKind, Lit, Node, Stmt, StmtKind
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::{self, FloatTy, IntTy, PolyFnSig, Ty};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use std::iter;
declare_clippy_lint! {
@@ -82,40 +81,40 @@ impl<'a, 'tcx> NumericFallbackVisitor<'a, 'tcx> {
/// Check whether a passed literal has potential to cause fallback or not.
fn check_lit(&self, lit: &Lit, lit_ty: Ty<'tcx>, emit_hir_id: HirId) {
- if_chain! {
- if !in_external_macro(self.cx.sess(), lit.span);
- if matches!(self.ty_bounds.last(), Some(ExplicitTyBound(false)));
- if matches!(lit.node,
- LitKind::Int(_, LitIntType::Unsuffixed) | LitKind::Float(_, LitFloatType::Unsuffixed));
- then {
- let (suffix, is_float) = match lit_ty.kind() {
- ty::Int(IntTy::I32) => ("i32", false),
- ty::Float(FloatTy::F64) => ("f64", true),
- // Default numeric fallback never results in other types.
- _ => return,
- };
-
- let src = if let Some(src) = snippet_opt(self.cx, lit.span) {
- src
- } else {
- match lit.node {
- LitKind::Int(src, _) => format!("{src}"),
- LitKind::Float(src, _) => format!("{src}"),
- _ => return,
- }
- };
- let sugg = numeric_literal::format(&src, Some(suffix), is_float);
- span_lint_hir_and_then(
- self.cx,
- DEFAULT_NUMERIC_FALLBACK,
- emit_hir_id,
- lit.span,
- "default numeric fallback might occur",
- |diag| {
- diag.span_suggestion(lit.span, "consider adding suffix", sugg, Applicability::MaybeIncorrect);
- }
- );
+ if !in_external_macro(self.cx.sess(), lit.span)
+ && matches!(self.ty_bounds.last(), Some(ExplicitTyBound(false)))
+ && matches!(
+ lit.node,
+ LitKind::Int(_, LitIntType::Unsuffixed) | LitKind::Float(_, LitFloatType::Unsuffixed)
+ )
+ {
+ let (suffix, is_float) = match lit_ty.kind() {
+ ty::Int(IntTy::I32) => ("i32", false),
+ ty::Float(FloatTy::F64) => ("f64", true),
+ // Default numeric fallback never results in other types.
+ _ => return,
+ };
+
+ let src = if let Some(src) = snippet_opt(self.cx, lit.span) {
+ src
+ } else {
+ match lit.node {
+ LitKind::Int(src, _) => format!("{src}"),
+ LitKind::Float(src, _) => format!("{src}"),
+ _ => return,
}
+ };
+ let sugg = numeric_literal::format(&src, Some(suffix), is_float);
+ span_lint_hir_and_then(
+ self.cx,
+ DEFAULT_NUMERIC_FALLBACK,
+ emit_hir_id,
+ lit.span,
+ "default numeric fallback might occur",
+ |diag| {
+ diag.span_suggestion(lit.span, "consider adding suffix", sugg, Applicability::MaybeIncorrect);
+ },
+ );
}
}
}
@@ -149,36 +148,33 @@ impl<'a, 'tcx> Visitor<'tcx> for NumericFallbackVisitor<'a, 'tcx> {
ExprKind::Struct(_, fields, base) => {
let ty = self.cx.typeck_results().expr_ty(expr);
- if_chain! {
- if let Some(adt_def) = ty.ty_adt_def();
- if adt_def.is_struct();
- if let Some(variant) = adt_def.variants().iter().next();
- then {
- let fields_def = &variant.fields;
-
- // Push field type then visit each field expr.
- for field in *fields {
- let bound =
- fields_def
- .iter()
- .find_map(|f_def| {
- if f_def.ident(self.cx.tcx) == field.ident
- { Some(self.cx.tcx.type_of(f_def.did).instantiate_identity()) }
- else { None }
- });
- self.ty_bounds.push(bound.into());
- self.visit_expr(field.expr);
- self.ty_bounds.pop();
- }
-
- // Visit base with no bound.
- if let Some(base) = base {
- self.ty_bounds.push(ExplicitTyBound(false));
- self.visit_expr(base);
- self.ty_bounds.pop();
- }
- return;
+ if let Some(adt_def) = ty.ty_adt_def()
+ && adt_def.is_struct()
+ && let Some(variant) = adt_def.variants().iter().next()
+ {
+ let fields_def = &variant.fields;
+
+ // Push field type then visit each field expr.
+ for field in *fields {
+ let bound = fields_def.iter().find_map(|f_def| {
+ if f_def.ident(self.cx.tcx) == field.ident {
+ Some(self.cx.tcx.type_of(f_def.did).instantiate_identity())
+ } else {
+ None
+ }
+ });
+ self.ty_bounds.push(bound.into());
+ self.visit_expr(field.expr);
+ self.ty_bounds.pop();
+ }
+
+ // Visit base with no bound.
+ if let Some(base) = base {
+ self.ty_bounds.push(ExplicitTyBound(false));
+ self.visit_expr(base);
+ self.ty_bounds.pop();
}
+ return;
}
},
diff --git a/src/tools/clippy/clippy_lints/src/default_union_representation.rs b/src/tools/clippy/clippy_lints/src/default_union_representation.rs
index 8c6749a95..db01ff2cd 100644
--- a/src/tools/clippy/clippy_lints/src/default_union_representation.rs
+++ b/src/tools/clippy/clippy_lints/src/default_union_representation.rs
@@ -3,7 +3,7 @@ use rustc_hir::{HirId, Item, ItemKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::layout::LayoutOf;
use rustc_middle::ty::{self, FieldDef, GenericArg, List};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/dereference.rs b/src/tools/clippy/clippy_lints/src/dereference.rs
index 6c109a51f..aaef163ad 100644
--- a/src/tools/clippy/clippy_lints/src/dereference.rs
+++ b/src/tools/clippy/clippy_lints/src/dereference.rs
@@ -1,10 +1,11 @@
use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_hir_and_then};
use clippy_utils::source::{snippet_with_applicability, snippet_with_context};
use clippy_utils::sugg::has_enclosing_paren;
-use clippy_utils::ty::{implements_trait, peel_mid_ty_refs};
+use clippy_utils::ty::{implements_trait, is_manually_drop, peel_mid_ty_refs};
use clippy_utils::{
expr_use_ctxt, get_parent_expr, get_parent_node, is_lint_allowed, path_to_local, DefinedTy, ExprUseNode,
};
+use core::mem;
use rustc_ast::util::parser::{PREC_POSTFIX, PREC_PREFIX};
use rustc_data_structures::fx::FxIndexMap;
use rustc_errors::Applicability;
@@ -16,7 +17,7 @@ use rustc_hir::{
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow, AutoBorrowMutability};
use rustc_middle::ty::{self, ParamEnv, Ty, TyCtxt, TypeVisitableExt, TypeckResults};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::symbol::sym;
use rustc_span::{Span, Symbol};
@@ -170,9 +171,7 @@ pub struct Dereferencing<'tcx> {
#[derive(Debug)]
struct StateData<'tcx> {
- /// Span of the top level expression
- span: Span,
- hir_id: HirId,
+ first_expr: &'tcx Expr<'tcx>,
adjusted_ty: Ty<'tcx>,
}
@@ -198,6 +197,7 @@ enum State {
},
ExplicitDerefField {
name: Symbol,
+ derefs_manually_drop: bool,
},
Reborrow {
mutability: Mutability,
@@ -242,7 +242,7 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
// Stop processing sub expressions when a macro call is seen
if expr.span.from_expansion() {
if let Some((state, data)) = self.state.take() {
- report(cx, expr, state, data);
+ report(cx, expr, state, data, cx.typeck_results());
}
return;
}
@@ -251,7 +251,7 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
let Some((kind, sub_expr)) = try_parse_ref_op(cx.tcx, typeck, expr) else {
// The whole chain of reference operations has been seen
if let Some((state, data)) = self.state.take() {
- report(cx, expr, state, data);
+ report(cx, expr, state, data, typeck);
}
return;
};
@@ -272,14 +272,16 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
(Some(use_cx), RefOp::Deref) => {
let sub_ty = typeck.expr_ty(sub_expr);
if let ExprUseNode::FieldAccess(name) = use_cx.node
- && adjusted_ty.ty_adt_def().map_or(true, |adt| !adt.is_union())
+ && !use_cx.moved_before_use
&& !ty_contains_field(sub_ty, name.name)
{
self.state = Some((
- State::ExplicitDerefField { name: name.name },
+ State::ExplicitDerefField {
+ name: name.name,
+ derefs_manually_drop: is_manually_drop(sub_ty),
+ },
StateData {
- span: expr.span,
- hir_id: expr.hir_id,
+ first_expr: expr,
adjusted_ty,
},
));
@@ -293,8 +295,7 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
self.state = Some((
State::ExplicitDeref { mutability: None },
StateData {
- span: expr.span,
- hir_id: expr.hir_id,
+ first_expr: expr,
adjusted_ty,
},
));
@@ -313,8 +314,7 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
mutbl,
},
StateData {
- span: expr.span,
- hir_id: expr.hir_id,
+ first_expr: expr,
adjusted_ty,
},
));
@@ -342,8 +342,18 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
TyCoercionStability::for_defined_ty(cx, ty, use_cx.node.is_return())
});
let can_auto_borrow = match use_cx.node {
- ExprUseNode::Callee => true,
- ExprUseNode::FieldAccess(_) => adjusted_ty.ty_adt_def().map_or(true, |adt| !adt.is_union()),
+ ExprUseNode::FieldAccess(_)
+ if !use_cx.moved_before_use && matches!(sub_expr.kind, ExprKind::Field(..)) =>
+ {
+ // `DerefMut` will not be automatically applied to `ManuallyDrop<_>`
+ // field expressions when the base type is a union and the parent
+ // expression is also a field access.
+ //
+ // e.g. `&mut x.y.z` where `x` is a union, and accessing `z` requires a
+ // deref through `ManuallyDrop<_>` will not compile.
+ !adjust_derefs_manually_drop(use_cx.adjustments, expr_ty)
+ },
+ ExprUseNode::Callee | ExprUseNode::FieldAccess(_) => true,
ExprUseNode::MethodArg(hir_id, _, 0) if !use_cx.moved_before_use => {
// Check for calls to trait methods where the trait is implemented
// on a reference.
@@ -357,11 +367,8 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
.tcx
.erase_regions(use_cx.adjustments.last().map_or(expr_ty, |a| a.target))
&& let ty::Ref(_, sub_ty, _) = *arg_ty.kind()
- && let args = cx
- .typeck_results()
- .node_args_opt(hir_id)
- .map(|args| &args[1..])
- .unwrap_or_default()
+ && let args =
+ typeck.node_args_opt(hir_id).map(|args| &args[1..]).unwrap_or_default()
&& let impl_ty =
if cx.tcx.fn_sig(fn_id).instantiate_identity().skip_binder().inputs()[0]
.is_ref()
@@ -436,14 +443,16 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
count: deref_count - required_refs,
msg,
stability,
- for_field_access: match use_cx.node {
- ExprUseNode::FieldAccess(name) => Some(name.name),
- _ => None,
+ for_field_access: if let ExprUseNode::FieldAccess(name) = use_cx.node
+ && !use_cx.moved_before_use
+ {
+ Some(name.name)
+ } else {
+ None
},
}),
StateData {
- span: expr.span,
- hir_id: expr.hir_id,
+ first_expr: expr,
adjusted_ty: use_cx.adjustments.last().map_or(expr_ty, |a| a.target),
},
));
@@ -455,8 +464,7 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
self.state = Some((
State::Borrow { mutability },
StateData {
- span: expr.span,
- hir_id: expr.hir_id,
+ first_expr: expr,
adjusted_ty: use_cx.adjustments.last().map_or(expr_ty, |a| a.target),
},
));
@@ -501,13 +509,12 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
(Some((State::DerefedBorrow(state), data)), RefOp::AddrOf(mutability)) => {
let adjusted_ty = data.adjusted_ty;
let stability = state.stability;
- report(cx, expr, State::DerefedBorrow(state), data);
+ report(cx, expr, State::DerefedBorrow(state), data, typeck);
if stability.is_deref_stable() {
self.state = Some((
State::Borrow { mutability },
StateData {
- span: expr.span,
- hir_id: expr.hir_id,
+ first_expr: expr,
adjusted_ty,
},
));
@@ -517,15 +524,18 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
let adjusted_ty = data.adjusted_ty;
let stability = state.stability;
let for_field_access = state.for_field_access;
- report(cx, expr, State::DerefedBorrow(state), data);
+ report(cx, expr, State::DerefedBorrow(state), data, typeck);
if let Some(name) = for_field_access
- && !ty_contains_field(typeck.expr_ty(sub_expr), name)
+ && let sub_expr_ty = typeck.expr_ty(sub_expr)
+ && !ty_contains_field(sub_expr_ty, name)
{
self.state = Some((
- State::ExplicitDerefField { name },
+ State::ExplicitDerefField {
+ name,
+ derefs_manually_drop: is_manually_drop(sub_expr_ty),
+ },
StateData {
- span: expr.span,
- hir_id: expr.hir_id,
+ first_expr: expr,
adjusted_ty,
},
));
@@ -535,8 +545,7 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
self.state = Some((
State::ExplicitDeref { mutability: None },
StateData {
- span: parent.span,
- hir_id: parent.hir_id,
+ first_expr: parent,
adjusted_ty,
},
));
@@ -566,13 +575,28 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
(state @ Some((State::ExplicitDeref { .. }, _)), RefOp::Deref) => {
self.state = state;
},
- (Some((State::ExplicitDerefField { name }, data)), RefOp::Deref)
- if !ty_contains_field(typeck.expr_ty(sub_expr), name) =>
+ (
+ Some((
+ State::ExplicitDerefField {
+ name,
+ derefs_manually_drop,
+ },
+ data,
+ )),
+ RefOp::Deref,
+ ) if let sub_expr_ty = typeck.expr_ty(sub_expr)
+ && !ty_contains_field(sub_expr_ty, name) =>
{
- self.state = Some((State::ExplicitDerefField { name }, data));
+ self.state = Some((
+ State::ExplicitDerefField {
+ name,
+ derefs_manually_drop: derefs_manually_drop || is_manually_drop(sub_expr_ty),
+ },
+ data,
+ ));
},
- (Some((state, data)), _) => report(cx, expr, state, data),
+ (Some((state, data)), _) => report(cx, expr, state, data, typeck),
}
}
@@ -597,26 +621,24 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
return;
}
- if_chain! {
- if !pat.span.from_expansion();
- if let ty::Ref(_, tam, _) = *cx.typeck_results().pat_ty(pat).kind();
+ if !pat.span.from_expansion()
+ && let ty::Ref(_, tam, _) = *cx.typeck_results().pat_ty(pat).kind()
// only lint immutable refs, because borrowed `&mut T` cannot be moved out
- if let ty::Ref(_, _, Mutability::Not) = *tam.kind();
- then {
- let mut app = Applicability::MachineApplicable;
- let snip = snippet_with_context(cx, name.span, pat.span.ctxt(), "..", &mut app).0;
- self.current_body = self.current_body.or(cx.enclosing_body);
- self.ref_locals.insert(
- id,
- Some(RefPat {
- always_deref: true,
- spans: vec![pat.span],
- app,
- replacements: vec![(pat.span, snip.into())],
- hir_id: pat.hir_id,
- }),
- );
- }
+ && let ty::Ref(_, _, Mutability::Not) = *tam.kind()
+ {
+ let mut app = Applicability::MachineApplicable;
+ let snip = snippet_with_context(cx, name.span, pat.span.ctxt(), "..", &mut app).0;
+ self.current_body = self.current_body.or(cx.enclosing_body);
+ self.ref_locals.insert(
+ id,
+ Some(RefPat {
+ always_deref: true,
+ spans: vec![pat.span],
+ app,
+ replacements: vec![(pat.span, snip.into())],
+ hir_id: pat.hir_id,
+ }),
+ );
}
}
}
@@ -689,6 +711,14 @@ fn try_parse_ref_op<'tcx>(
}
}
+// Checks if the adjustments contains a deref of `ManuallyDrop<_>`
+fn adjust_derefs_manually_drop<'tcx>(adjustments: &'tcx [Adjustment<'tcx>], mut ty: Ty<'tcx>) -> bool {
+ adjustments.iter().any(|a| {
+ let ty = mem::replace(&mut ty, a.target);
+ matches!(a.kind, Adjust::Deref(Some(ref op)) if op.mutbl == Mutability::Mut) && is_manually_drop(ty)
+ })
+}
+
// Checks whether the type for a deref call actually changed the type, not just the mutability of
// the reference.
fn deref_method_same_type<'tcx>(result_ty: Ty<'tcx>, arg_ty: Ty<'tcx>) -> bool {
@@ -741,7 +771,7 @@ impl TyCoercionStability {
DefinedTy::Mir(ty) => Self::for_mir_ty(
cx.tcx,
ty.param_env,
- cx.tcx.erase_late_bound_regions(ty.value),
+ cx.tcx.instantiate_bound_regions_with_erased(ty.value),
for_return,
),
}
@@ -898,7 +928,13 @@ fn ty_contains_field(ty: Ty<'_>, name: Symbol) -> bool {
}
#[expect(clippy::needless_pass_by_value, clippy::too_many_lines)]
-fn report<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, state: State, data: StateData<'tcx>) {
+fn report<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+ state: State,
+ data: StateData<'tcx>,
+ typeck: &'tcx TypeckResults<'tcx>,
+) {
match state {
State::DerefMethod {
ty_changed_count,
@@ -906,8 +942,9 @@ fn report<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, state: State, data
mutbl,
} => {
let mut app = Applicability::MachineApplicable;
- let (expr_str, _expr_is_macro_call) = snippet_with_context(cx, expr.span, data.span.ctxt(), "..", &mut app);
- let ty = cx.typeck_results().expr_ty(expr);
+ let (expr_str, _expr_is_macro_call) =
+ snippet_with_context(cx, expr.span, data.first_expr.span.ctxt(), "..", &mut app);
+ let ty = typeck.expr_ty(expr);
let (_, ref_count) = peel_mid_ty_refs(ty);
let deref_str = if ty_changed_count >= ref_count && ref_count != 0 {
// a deref call changing &T -> &U requires two deref operators the first time
@@ -947,7 +984,7 @@ fn report<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, state: State, data
span_lint_and_sugg(
cx,
EXPLICIT_DEREF_METHODS,
- data.span,
+ data.first_expr.span,
match mutbl {
Mutability::Not => "explicit `deref` method call",
Mutability::Mut => "explicit `deref_mut` method call",
@@ -959,26 +996,34 @@ fn report<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, state: State, data
},
State::DerefedBorrow(state) => {
let mut app = Applicability::MachineApplicable;
- let (snip, snip_is_macro) = snippet_with_context(cx, expr.span, data.span.ctxt(), "..", &mut app);
- span_lint_hir_and_then(cx, NEEDLESS_BORROW, data.hir_id, data.span, state.msg, |diag| {
- let (precedence, calls_field) = match get_parent_node(cx.tcx, data.hir_id) {
- Some(Node::Expr(e)) => match e.kind {
- ExprKind::Call(callee, _) if callee.hir_id != data.hir_id => (0, false),
- ExprKind::Call(..) => (PREC_POSTFIX, matches!(expr.kind, ExprKind::Field(..))),
- _ => (e.precedence().order(), false),
- },
- _ => (0, false),
- };
- let sugg = if !snip_is_macro
- && (calls_field || expr.precedence().order() < precedence)
- && !has_enclosing_paren(&snip)
- {
- format!("({snip})")
- } else {
- snip.into()
- };
- diag.span_suggestion(data.span, "change this to", sugg, app);
- });
+ let (snip, snip_is_macro) =
+ snippet_with_context(cx, expr.span, data.first_expr.span.ctxt(), "..", &mut app);
+ span_lint_hir_and_then(
+ cx,
+ NEEDLESS_BORROW,
+ data.first_expr.hir_id,
+ data.first_expr.span,
+ state.msg,
+ |diag| {
+ let (precedence, calls_field) = match get_parent_node(cx.tcx, data.first_expr.hir_id) {
+ Some(Node::Expr(e)) => match e.kind {
+ ExprKind::Call(callee, _) if callee.hir_id != data.first_expr.hir_id => (0, false),
+ ExprKind::Call(..) => (PREC_POSTFIX, matches!(expr.kind, ExprKind::Field(..))),
+ _ => (e.precedence().order(), false),
+ },
+ _ => (0, false),
+ };
+ let sugg = if !snip_is_macro
+ && (calls_field || expr.precedence().order() < precedence)
+ && !has_enclosing_paren(&snip)
+ {
+ format!("({snip})")
+ } else {
+ snip.into()
+ };
+ diag.span_suggestion(data.first_expr.span, "change this to", sugg, app);
+ },
+ );
},
State::ExplicitDeref { mutability } => {
if matches!(
@@ -996,7 +1041,7 @@ fn report<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, state: State, data
}
let (prefix, precedence) = if let Some(mutability) = mutability
- && !cx.typeck_results().expr_ty(expr).is_ref()
+ && !typeck.expr_ty(expr).is_ref()
{
let prefix = match mutability {
Mutability::Not => "&",
@@ -1009,53 +1054,61 @@ fn report<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, state: State, data
span_lint_hir_and_then(
cx,
EXPLICIT_AUTO_DEREF,
- data.hir_id,
- data.span,
+ data.first_expr.hir_id,
+ data.first_expr.span,
"deref which would be done by auto-deref",
|diag| {
let mut app = Applicability::MachineApplicable;
- let (snip, snip_is_macro) = snippet_with_context(cx, expr.span, data.span.ctxt(), "..", &mut app);
+ let (snip, snip_is_macro) =
+ snippet_with_context(cx, expr.span, data.first_expr.span.ctxt(), "..", &mut app);
let sugg =
if !snip_is_macro && expr.precedence().order() < precedence && !has_enclosing_paren(&snip) {
format!("{prefix}({snip})")
} else {
format!("{prefix}{snip}")
};
- diag.span_suggestion(data.span, "try", sugg, app);
+ diag.span_suggestion(data.first_expr.span, "try", sugg, app);
},
);
},
- State::ExplicitDerefField { .. } => {
- if matches!(
- expr.kind,
- ExprKind::Block(..)
- | ExprKind::ConstBlock(_)
- | ExprKind::If(..)
- | ExprKind::Loop(..)
- | ExprKind::Match(..)
- ) && data.adjusted_ty.is_sized(cx.tcx, cx.param_env)
- {
- // Rustc bug: auto deref doesn't work on block expression when targeting sized types.
- return;
- }
-
- if let ExprKind::Field(parent_expr, _) = expr.kind
- && let ty::Adt(adt, _) = cx.typeck_results().expr_ty(parent_expr).kind()
- && adt.is_union()
- {
- // Auto deref does not apply on union field
- return;
- }
+ State::ExplicitDerefField {
+ derefs_manually_drop, ..
+ } => {
+ let (snip_span, needs_parens) = if matches!(expr.kind, ExprKind::Field(..))
+ && (derefs_manually_drop
+ || adjust_derefs_manually_drop(
+ typeck.expr_adjustments(data.first_expr),
+ typeck.expr_ty(data.first_expr),
+ )) {
+ // `DerefMut` will not be automatically applied to `ManuallyDrop<_>`
+ // field expressions when the base type is a union and the parent
+ // expression is also a field access.
+ //
+ // e.g. `&mut x.y.z` where `x` is a union, and accessing `z` requires a
+ // deref through `ManuallyDrop<_>` will not compile.
+ let parent_id = cx.tcx.hir().parent_id(expr.hir_id);
+ if parent_id == data.first_expr.hir_id {
+ return;
+ }
+ (cx.tcx.hir_node(parent_id).expect_expr().span, true)
+ } else {
+ (expr.span, false)
+ };
span_lint_hir_and_then(
cx,
EXPLICIT_AUTO_DEREF,
- data.hir_id,
- data.span,
+ data.first_expr.hir_id,
+ data.first_expr.span,
"deref which would be done by auto-deref",
|diag| {
let mut app = Applicability::MachineApplicable;
- let snip = snippet_with_context(cx, expr.span, data.span.ctxt(), "..", &mut app).0;
- diag.span_suggestion(data.span, "try", snip.into_owned(), app);
+ let snip = snippet_with_context(cx, snip_span, data.first_expr.span.ctxt(), "..", &mut app).0;
+ let sugg = if needs_parens {
+ format!("({snip})")
+ } else {
+ snip.into_owned()
+ };
+ diag.span_suggestion(data.first_expr.span, "try", sugg, app);
},
);
},
diff --git a/src/tools/clippy/clippy_lints/src/derivable_impls.rs b/src/tools/clippy/clippy_lints/src/derivable_impls.rs
index a450becc6..6b0423200 100644
--- a/src/tools/clippy/clippy_lints/src/derivable_impls.rs
+++ b/src/tools/clippy/clippy_lints/src/derivable_impls.rs
@@ -10,7 +10,7 @@ use rustc_hir::{
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::adjustment::{Adjust, PointerCoercion};
use rustc_middle::ty::{self, Adt, AdtDef, GenericArgsRef, Ty, TypeckResults};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
@@ -148,83 +148,65 @@ fn check_struct<'tcx>(
}
fn check_enum<'tcx>(cx: &LateContext<'tcx>, item: &'tcx Item<'_>, func_expr: &Expr<'_>, adt_def: AdtDef<'_>) {
- if_chain! {
- if let ExprKind::Path(QPath::Resolved(None, p)) = &peel_blocks(func_expr).kind;
- if let Res::Def(DefKind::Ctor(CtorOf::Variant, CtorKind::Const), id) = p.res;
- if let variant_id = cx.tcx.parent(id);
- if let Some(variant_def) = adt_def.variants().iter().find(|v| v.def_id == variant_id);
- if variant_def.fields.is_empty();
- if !variant_def.is_field_list_non_exhaustive();
-
- then {
- let enum_span = cx.tcx.def_span(adt_def.did());
- let indent_enum = indent_of(cx, enum_span).unwrap_or(0);
- let variant_span = cx.tcx.def_span(variant_def.def_id);
- let indent_variant = indent_of(cx, variant_span).unwrap_or(0);
- span_lint_and_then(
- cx,
- DERIVABLE_IMPLS,
+ if let ExprKind::Path(QPath::Resolved(None, p)) = &peel_blocks(func_expr).kind
+ && let Res::Def(DefKind::Ctor(CtorOf::Variant, CtorKind::Const), id) = p.res
+ && let variant_id = cx.tcx.parent(id)
+ && let Some(variant_def) = adt_def.variants().iter().find(|v| v.def_id == variant_id)
+ && variant_def.fields.is_empty()
+ && !variant_def.is_field_list_non_exhaustive()
+ {
+ let enum_span = cx.tcx.def_span(adt_def.did());
+ let indent_enum = indent_of(cx, enum_span).unwrap_or(0);
+ let variant_span = cx.tcx.def_span(variant_def.def_id);
+ let indent_variant = indent_of(cx, variant_span).unwrap_or(0);
+ span_lint_and_then(cx, DERIVABLE_IMPLS, item.span, "this `impl` can be derived", |diag| {
+ diag.span_suggestion_hidden(
item.span,
- "this `impl` can be derived",
- |diag| {
- diag.span_suggestion_hidden(
- item.span,
- "remove the manual implementation...",
- String::new(),
- Applicability::MachineApplicable
- );
- diag.span_suggestion(
- enum_span.shrink_to_lo(),
- "...and instead derive it...",
- format!(
- "#[derive(Default)]\n{indent}",
- indent = " ".repeat(indent_enum),
- ),
- Applicability::MachineApplicable
- );
- diag.span_suggestion(
- variant_span.shrink_to_lo(),
- "...and mark the default variant",
- format!(
- "#[default]\n{indent}",
- indent = " ".repeat(indent_variant),
- ),
- Applicability::MachineApplicable
- );
- }
+ "remove the manual implementation...",
+ String::new(),
+ Applicability::MachineApplicable,
);
- }
+ diag.span_suggestion(
+ enum_span.shrink_to_lo(),
+ "...and instead derive it...",
+ format!("#[derive(Default)]\n{indent}", indent = " ".repeat(indent_enum),),
+ Applicability::MachineApplicable,
+ );
+ diag.span_suggestion(
+ variant_span.shrink_to_lo(),
+ "...and mark the default variant",
+ format!("#[default]\n{indent}", indent = " ".repeat(indent_variant),),
+ Applicability::MachineApplicable,
+ );
+ });
}
}
impl<'tcx> LateLintPass<'tcx> for DerivableImpls {
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
- if_chain! {
- if let ItemKind::Impl(Impl {
- of_trait: Some(ref trait_ref),
- items: [child],
- self_ty,
- ..
- }) = item.kind;
- if !cx.tcx.has_attr(item.owner_id, sym::automatically_derived);
- if !item.span.from_expansion();
- if let Some(def_id) = trait_ref.trait_def_id();
- if cx.tcx.is_diagnostic_item(sym::Default, def_id);
- if let impl_item_hir = child.id.hir_id();
- if let Some(Node::ImplItem(impl_item)) = cx.tcx.hir().find(impl_item_hir);
- if let ImplItemKind::Fn(_, b) = &impl_item.kind;
- if let Body { value: func_expr, .. } = cx.tcx.hir().body(*b);
- if let &Adt(adt_def, args) = cx.tcx.type_of(item.owner_id).instantiate_identity().kind();
- if let attrs = cx.tcx.hir().attrs(item.hir_id());
- if !attrs.iter().any(|attr| attr.doc_str().is_some());
- if cx.tcx.hir().attrs(impl_item_hir).is_empty();
-
- then {
- if adt_def.is_struct() {
- check_struct(cx, item, self_ty, func_expr, adt_def, args, cx.tcx.typeck_body(*b));
- } else if adt_def.is_enum() && self.msrv.meets(msrvs::DEFAULT_ENUM_ATTRIBUTE) {
- check_enum(cx, item, func_expr, adt_def);
- }
+ if let ItemKind::Impl(Impl {
+ of_trait: Some(ref trait_ref),
+ items: [child],
+ self_ty,
+ ..
+ }) = item.kind
+ && !cx.tcx.has_attr(item.owner_id, sym::automatically_derived)
+ && !item.span.from_expansion()
+ && let Some(def_id) = trait_ref.trait_def_id()
+ && cx.tcx.is_diagnostic_item(sym::Default, def_id)
+ && let impl_item_hir = child.id.hir_id()
+ && let Some(Node::ImplItem(impl_item)) = cx.tcx.opt_hir_node(impl_item_hir)
+ && let ImplItemKind::Fn(_, b) = &impl_item.kind
+ && let Body { value: func_expr, .. } = cx.tcx.hir().body(*b)
+ && let &Adt(adt_def, args) = cx.tcx.type_of(item.owner_id).instantiate_identity().kind()
+ && let attrs = cx.tcx.hir().attrs(item.hir_id())
+ && !attrs.iter().any(|attr| attr.doc_str().is_some())
+ && cx.tcx.hir().attrs(impl_item_hir).is_empty()
+ {
+ if adt_def.is_struct() {
+ check_struct(cx, item, self_ty, func_expr, adt_def, args, cx.tcx.typeck_body(*b));
+ } else if adt_def.is_enum() && self.msrv.meets(msrvs::DEFAULT_ENUM_ATTRIBUTE) {
+ check_enum(cx, item, func_expr, adt_def);
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/derive.rs b/src/tools/clippy/clippy_lints/src/derive.rs
index 3a331564d..d8abe4110 100644
--- a/src/tools/clippy/clippy_lints/src/derive.rs
+++ b/src/tools/clippy/clippy_lints/src/derive.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_note, span_lint_and_sugg, span_lint_and_then};
use clippy_utils::ty::{implements_trait, implements_trait_with_env, is_copy};
use clippy_utils::{is_lint_allowed, match_def_path, paths};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::def_id::DefId;
use rustc_hir::intravisit::{walk_expr, walk_fn, walk_item, FnKind, Visitor};
@@ -15,7 +14,7 @@ use rustc_middle::ty::{
self, ClauseKind, GenericArgKind, GenericParamDefKind, ImplPolarity, ParamEnv, ToPredicate, TraitPredicate, Ty,
TyCtxt,
};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::def_id::LocalDefId;
use rustc_span::{sym, Span};
@@ -232,42 +231,37 @@ fn check_hash_peq<'tcx>(
ty: Ty<'tcx>,
hash_is_automatically_derived: bool,
) {
- if_chain! {
- if let Some(peq_trait_def_id) = cx.tcx.lang_items().eq_trait();
- if let Some(def_id) = trait_ref.trait_def_id();
- if cx.tcx.is_diagnostic_item(sym::Hash, def_id);
- then {
- // Look for the PartialEq implementations for `ty`
- cx.tcx.for_each_relevant_impl(peq_trait_def_id, ty, |impl_id| {
- let peq_is_automatically_derived = cx.tcx.has_attr(impl_id, sym::automatically_derived);
-
- if !hash_is_automatically_derived || peq_is_automatically_derived {
- return;
- }
-
- let trait_ref = cx.tcx.impl_trait_ref(impl_id).expect("must be a trait implementation");
-
- // Only care about `impl PartialEq<Foo> for Foo`
- // For `impl PartialEq<B> for A, input_types is [A, B]
- if trait_ref.instantiate_identity().args.type_at(1) == ty {
- span_lint_and_then(
- cx,
- DERIVED_HASH_WITH_MANUAL_EQ,
- span,
- "you are deriving `Hash` but have implemented `PartialEq` explicitly",
- |diag| {
- if let Some(local_def_id) = impl_id.as_local() {
- let hir_id = cx.tcx.hir().local_def_id_to_hir_id(local_def_id);
- diag.span_note(
- cx.tcx.hir().span(hir_id),
- "`PartialEq` implemented here"
- );
- }
+ if let Some(peq_trait_def_id) = cx.tcx.lang_items().eq_trait()
+ && let Some(def_id) = trait_ref.trait_def_id()
+ && cx.tcx.is_diagnostic_item(sym::Hash, def_id)
+ {
+ // Look for the PartialEq implementations for `ty`
+ cx.tcx.for_each_relevant_impl(peq_trait_def_id, ty, |impl_id| {
+ let peq_is_automatically_derived = cx.tcx.has_attr(impl_id, sym::automatically_derived);
+
+ if !hash_is_automatically_derived || peq_is_automatically_derived {
+ return;
+ }
+
+ let trait_ref = cx.tcx.impl_trait_ref(impl_id).expect("must be a trait implementation");
+
+ // Only care about `impl PartialEq<Foo> for Foo`
+ // For `impl PartialEq<B> for A, input_types is [A, B]
+ if trait_ref.instantiate_identity().args.type_at(1) == ty {
+ span_lint_and_then(
+ cx,
+ DERIVED_HASH_WITH_MANUAL_EQ,
+ span,
+ "you are deriving `Hash` but have implemented `PartialEq` explicitly",
+ |diag| {
+ if let Some(local_def_id) = impl_id.as_local() {
+ let hir_id = cx.tcx.local_def_id_to_hir_id(local_def_id);
+ diag.span_note(cx.tcx.hir().span(hir_id), "`PartialEq` implemented here");
}
- );
- }
- });
- }
+ },
+ );
+ }
+ });
}
}
@@ -279,49 +273,38 @@ fn check_ord_partial_ord<'tcx>(
ty: Ty<'tcx>,
ord_is_automatically_derived: bool,
) {
- if_chain! {
- if let Some(ord_trait_def_id) = cx.tcx.get_diagnostic_item(sym::Ord);
- if let Some(partial_ord_trait_def_id) = cx.tcx.lang_items().partial_ord_trait();
- if let Some(def_id) = &trait_ref.trait_def_id();
- if *def_id == ord_trait_def_id;
- then {
- // Look for the PartialOrd implementations for `ty`
- cx.tcx.for_each_relevant_impl(partial_ord_trait_def_id, ty, |impl_id| {
- let partial_ord_is_automatically_derived = cx.tcx.has_attr(impl_id, sym::automatically_derived);
-
- if partial_ord_is_automatically_derived == ord_is_automatically_derived {
- return;
- }
-
- let trait_ref = cx.tcx.impl_trait_ref(impl_id).expect("must be a trait implementation");
-
- // Only care about `impl PartialOrd<Foo> for Foo`
- // For `impl PartialOrd<B> for A, input_types is [A, B]
- if trait_ref.instantiate_identity().args.type_at(1) == ty {
- let mess = if partial_ord_is_automatically_derived {
- "you are implementing `Ord` explicitly but have derived `PartialOrd`"
- } else {
- "you are deriving `Ord` but have implemented `PartialOrd` explicitly"
- };
-
- span_lint_and_then(
- cx,
- DERIVE_ORD_XOR_PARTIAL_ORD,
- span,
- mess,
- |diag| {
- if let Some(local_def_id) = impl_id.as_local() {
- let hir_id = cx.tcx.hir().local_def_id_to_hir_id(local_def_id);
- diag.span_note(
- cx.tcx.hir().span(hir_id),
- "`PartialOrd` implemented here"
- );
- }
- }
- );
- }
- });
- }
+ if let Some(ord_trait_def_id) = cx.tcx.get_diagnostic_item(sym::Ord)
+ && let Some(partial_ord_trait_def_id) = cx.tcx.lang_items().partial_ord_trait()
+ && let Some(def_id) = &trait_ref.trait_def_id()
+ && *def_id == ord_trait_def_id
+ {
+ // Look for the PartialOrd implementations for `ty`
+ cx.tcx.for_each_relevant_impl(partial_ord_trait_def_id, ty, |impl_id| {
+ let partial_ord_is_automatically_derived = cx.tcx.has_attr(impl_id, sym::automatically_derived);
+
+ if partial_ord_is_automatically_derived == ord_is_automatically_derived {
+ return;
+ }
+
+ let trait_ref = cx.tcx.impl_trait_ref(impl_id).expect("must be a trait implementation");
+
+ // Only care about `impl PartialOrd<Foo> for Foo`
+ // For `impl PartialOrd<B> for A, input_types is [A, B]
+ if trait_ref.instantiate_identity().args.type_at(1) == ty {
+ let mess = if partial_ord_is_automatically_derived {
+ "you are implementing `Ord` explicitly but have derived `PartialOrd`"
+ } else {
+ "you are deriving `Ord` but have implemented `PartialOrd` explicitly"
+ };
+
+ span_lint_and_then(cx, DERIVE_ORD_XOR_PARTIAL_ORD, span, mess, |diag| {
+ if let Some(local_def_id) = impl_id.as_local() {
+ let hir_id = cx.tcx.local_def_id_to_hir_id(local_def_id);
+ diag.span_note(cx.tcx.hir().span(hir_id), "`PartialOrd` implemented here");
+ }
+ });
+ }
+ });
}
}
@@ -394,27 +377,27 @@ fn check_unsafe_derive_deserialize<'tcx>(
visitor.has_unsafe
}
- if_chain! {
- if let Some(trait_def_id) = trait_ref.trait_def_id();
- if match_def_path(cx, trait_def_id, &paths::SERDE_DESERIALIZE);
- if let ty::Adt(def, _) = ty.kind();
- if let Some(local_def_id) = def.did().as_local();
- let adt_hir_id = cx.tcx.hir().local_def_id_to_hir_id(local_def_id);
- if !is_lint_allowed(cx, UNSAFE_DERIVE_DESERIALIZE, adt_hir_id);
- if cx.tcx.inherent_impls(def.did())
+ if let Some(trait_def_id) = trait_ref.trait_def_id()
+ && match_def_path(cx, trait_def_id, &paths::SERDE_DESERIALIZE)
+ && let ty::Adt(def, _) = ty.kind()
+ && let Some(local_def_id) = def.did().as_local()
+ && let adt_hir_id = cx.tcx.local_def_id_to_hir_id(local_def_id)
+ && !is_lint_allowed(cx, UNSAFE_DERIVE_DESERIALIZE, adt_hir_id)
+ && cx
+ .tcx
+ .inherent_impls(def.did())
.iter()
.map(|imp_did| cx.tcx.hir().expect_item(imp_did.expect_local()))
- .any(|imp| has_unsafe(cx, imp));
- then {
- span_lint_and_help(
- cx,
- UNSAFE_DERIVE_DESERIALIZE,
- item.span,
- "you are deriving `serde::Deserialize` on a type that has methods using `unsafe`",
- None,
- "consider implementing `serde::Deserialize` manually. See https://serde.rs/impl-deserialize.html"
- );
- }
+ .any(|imp| has_unsafe(cx, imp))
+ {
+ span_lint_and_help(
+ cx,
+ UNSAFE_DERIVE_DESERIALIZE,
+ item.span,
+ "you are deriving `serde::Deserialize` on a type that has methods using `unsafe`",
+ None,
+ "consider implementing `serde::Deserialize` manually. See https://serde.rs/impl-deserialize.html",
+ );
}
}
@@ -431,12 +414,10 @@ impl<'tcx> Visitor<'tcx> for UnsafeVisitor<'_, 'tcx> {
return;
}
- if_chain! {
- if let Some(header) = kind.header();
- if header.unsafety == Unsafety::Unsafe;
- then {
- self.has_unsafe = true;
- }
+ if let Some(header) = kind.header()
+ && header.unsafety == Unsafety::Unsafe
+ {
+ self.has_unsafe = true;
}
walk_fn(self, kind, decl, body_id, id);
@@ -463,30 +444,28 @@ impl<'tcx> Visitor<'tcx> for UnsafeVisitor<'_, 'tcx> {
/// Implementation of the `DERIVE_PARTIAL_EQ_WITHOUT_EQ` lint.
fn check_partial_eq_without_eq<'tcx>(cx: &LateContext<'tcx>, span: Span, trait_ref: &hir::TraitRef<'_>, ty: Ty<'tcx>) {
- if_chain! {
- if let ty::Adt(adt, args) = ty.kind();
- if cx.tcx.visibility(adt.did()).is_public();
- if let Some(eq_trait_def_id) = cx.tcx.get_diagnostic_item(sym::Eq);
- if let Some(def_id) = trait_ref.trait_def_id();
- if cx.tcx.is_diagnostic_item(sym::PartialEq, def_id);
- let param_env = param_env_for_derived_eq(cx.tcx, adt.did(), eq_trait_def_id);
- if !implements_trait_with_env(cx.tcx, param_env, ty, eq_trait_def_id, &[]);
+ if let ty::Adt(adt, args) = ty.kind()
+ && cx.tcx.visibility(adt.did()).is_public()
+ && let Some(eq_trait_def_id) = cx.tcx.get_diagnostic_item(sym::Eq)
+ && let Some(def_id) = trait_ref.trait_def_id()
+ && cx.tcx.is_diagnostic_item(sym::PartialEq, def_id)
+ && let param_env = param_env_for_derived_eq(cx.tcx, adt.did(), eq_trait_def_id)
+ && !implements_trait_with_env(cx.tcx, param_env, ty, eq_trait_def_id, adt.did(),&[])
// If all of our fields implement `Eq`, we can implement `Eq` too
- if adt
+ && adt
.all_fields()
.map(|f| f.ty(cx.tcx, args))
- .all(|ty| implements_trait_with_env(cx.tcx, param_env, ty, eq_trait_def_id, &[]));
- then {
- span_lint_and_sugg(
- cx,
- DERIVE_PARTIAL_EQ_WITHOUT_EQ,
- span.ctxt().outer_expn_data().call_site,
- "you are deriving `PartialEq` and can implement `Eq`",
- "consider deriving `Eq` as well",
- "PartialEq, Eq".to_string(),
- Applicability::MachineApplicable,
- )
- }
+ .all(|ty| implements_trait_with_env(cx.tcx, param_env, ty, eq_trait_def_id, adt.did(), &[]))
+ {
+ span_lint_and_sugg(
+ cx,
+ DERIVE_PARTIAL_EQ_WITHOUT_EQ,
+ span.ctxt().outer_expn_data().call_site,
+ "you are deriving `PartialEq` and can implement `Eq`",
+ "consider deriving `Eq` as well",
+ "PartialEq, Eq".to_string(),
+ Applicability::MachineApplicable,
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/disallowed_macros.rs b/src/tools/clippy/clippy_lints/src/disallowed_macros.rs
index 324b5e079..656b3d9bf 100644
--- a/src/tools/clippy/clippy_lints/src/disallowed_macros.rs
+++ b/src/tools/clippy/clippy_lints/src/disallowed_macros.rs
@@ -6,7 +6,7 @@ use rustc_data_structures::fx::FxHashSet;
use rustc_hir::def_id::DefIdMap;
use rustc_hir::{Expr, ExprKind, ForeignItem, HirId, ImplItem, Item, Pat, Path, Stmt, TraitItem, Ty};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::{ExpnId, Span};
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/disallowed_methods.rs b/src/tools/clippy/clippy_lints/src/disallowed_methods.rs
index d23aeebb5..1868d3cd3 100644
--- a/src/tools/clippy/clippy_lints/src/disallowed_methods.rs
+++ b/src/tools/clippy/clippy_lints/src/disallowed_methods.rs
@@ -4,7 +4,7 @@ use clippy_utils::{fn_def_id, get_parent_expr, path_def_id};
use rustc_hir::def_id::DefIdMap;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/disallowed_names.rs b/src/tools/clippy/clippy_lints/src/disallowed_names.rs
index 5e46b29b6..09dad5554 100644
--- a/src/tools/clippy/clippy_lints/src/disallowed_names.rs
+++ b/src/tools/clippy/clippy_lints/src/disallowed_names.rs
@@ -3,7 +3,7 @@ use clippy_utils::is_test_module_or_function;
use rustc_data_structures::fx::FxHashSet;
use rustc_hir::{Item, Pat, PatKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -31,9 +31,9 @@ pub struct DisallowedNames {
}
impl DisallowedNames {
- pub fn new(disallow: FxHashSet<String>) -> Self {
+ pub fn new(disallowed_names: &[String]) -> Self {
Self {
- disallow,
+ disallow: disallowed_names.iter().cloned().collect(),
test_modules_deep: 0,
}
}
diff --git a/src/tools/clippy/clippy_lints/src/disallowed_script_idents.rs b/src/tools/clippy/clippy_lints/src/disallowed_script_idents.rs
index 96a7f0e4f..d5205e65c 100644
--- a/src/tools/clippy/clippy_lints/src/disallowed_script_idents.rs
+++ b/src/tools/clippy/clippy_lints/src/disallowed_script_idents.rs
@@ -2,7 +2,7 @@ use clippy_utils::diagnostics::span_lint;
use rustc_ast::ast;
use rustc_data_structures::fx::FxHashSet;
use rustc_lint::{EarlyContext, EarlyLintPass, Level, LintContext};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use unicode_script::{Script, UnicodeScript};
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/disallowed_types.rs b/src/tools/clippy/clippy_lints/src/disallowed_types.rs
index 3578fb640..130f56b69 100644
--- a/src/tools/clippy/clippy_lints/src/disallowed_types.rs
+++ b/src/tools/clippy/clippy_lints/src/disallowed_types.rs
@@ -5,7 +5,7 @@ use rustc_hir::def::Res;
use rustc_hir::def_id::DefId;
use rustc_hir::{Item, ItemKind, PolyTraitRef, PrimTy, Ty, TyKind, UseKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::Span;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/doc/link_with_quotes.rs b/src/tools/clippy/clippy_lints/src/doc/link_with_quotes.rs
new file mode 100644
index 000000000..01191e811
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/doc/link_with_quotes.rs
@@ -0,0 +1,20 @@
+use std::ops::Range;
+
+use clippy_utils::diagnostics::span_lint;
+use rustc_lint::LateContext;
+
+use super::{Fragments, DOC_LINK_WITH_QUOTES};
+
+pub fn check(cx: &LateContext<'_>, trimmed_text: &str, range: Range<usize>, fragments: Fragments<'_>) {
+ if ((trimmed_text.starts_with('\'') && trimmed_text.ends_with('\''))
+ || (trimmed_text.starts_with('"') && trimmed_text.ends_with('"')))
+ && let Some(span) = fragments.span(cx, range)
+ {
+ span_lint(
+ cx,
+ DOC_LINK_WITH_QUOTES,
+ span,
+ "possible intra-doc link using quotes instead of backticks",
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/doc/markdown.rs b/src/tools/clippy/clippy_lints/src/doc/markdown.rs
new file mode 100644
index 000000000..a58219c29
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/doc/markdown.rs
@@ -0,0 +1,119 @@
+use clippy_utils::diagnostics::{span_lint, span_lint_and_then};
+use clippy_utils::source::snippet_with_applicability;
+use rustc_data_structures::fx::FxHashSet;
+use rustc_errors::{Applicability, SuggestionStyle};
+use rustc_lint::LateContext;
+use rustc_span::{BytePos, Pos, Span};
+use url::Url;
+
+use crate::doc::DOC_MARKDOWN;
+
+pub fn check(cx: &LateContext<'_>, valid_idents: &FxHashSet<String>, text: &str, span: Span) {
+ for orig_word in text.split(|c: char| c.is_whitespace() || c == '\'') {
+ // Trim punctuation as in `some comment (see foo::bar).`
+ // ^^
+ // Or even as in `_foo bar_` which is emphasized. Also preserve `::` as a prefix/suffix.
+ let trim_pattern = |c: char| !c.is_alphanumeric() && c != ':';
+ let mut word = orig_word.trim_end_matches(trim_pattern);
+
+ // If word is immediately followed by `()`, claw it back.
+ if let Some(tmp_word) = orig_word.get(..word.len() + 2)
+ && tmp_word.ends_with("()")
+ {
+ word = tmp_word;
+ }
+
+ word = word.trim_start_matches(trim_pattern);
+
+ // Remove leading or trailing single `:` which may be part of a sentence.
+ if word.starts_with(':') && !word.starts_with("::") {
+ word = word.trim_start_matches(':');
+ }
+ if word.ends_with(':') && !word.ends_with("::") {
+ word = word.trim_end_matches(':');
+ }
+
+ if valid_idents.contains(word) || word.chars().all(|c| c == ':') {
+ continue;
+ }
+
+ // Adjust for the current word
+ let offset = word.as_ptr() as usize - text.as_ptr() as usize;
+ let span = Span::new(
+ span.lo() + BytePos::from_usize(offset),
+ span.lo() + BytePos::from_usize(offset + word.len()),
+ span.ctxt(),
+ span.parent(),
+ );
+
+ check_word(cx, word, span);
+ }
+}
+
+fn check_word(cx: &LateContext<'_>, word: &str, span: Span) {
+ /// Checks if a string is upper-camel-case, i.e., starts with an uppercase and
+ /// contains at least two uppercase letters (`Clippy` is ok) and one lower-case
+ /// letter (`NASA` is ok).
+ /// Plurals are also excluded (`IDs` is ok).
+ fn is_camel_case(s: &str) -> bool {
+ if s.starts_with(|c: char| c.is_ascii_digit() | c.is_ascii_lowercase()) {
+ return false;
+ }
+
+ let s = s.strip_suffix('s').unwrap_or(s);
+
+ s.chars().all(char::is_alphanumeric)
+ && s.chars().filter(|&c| c.is_uppercase()).take(2).count() > 1
+ && s.chars().filter(|&c| c.is_lowercase()).take(1).count() > 0
+ }
+
+ fn has_underscore(s: &str) -> bool {
+ s != "_" && !s.contains("\\_") && s.contains('_')
+ }
+
+ fn has_hyphen(s: &str) -> bool {
+ s != "-" && s.contains('-')
+ }
+
+ if let Ok(url) = Url::parse(word) {
+ // try to get around the fact that `foo::bar` parses as a valid URL
+ if !url.cannot_be_a_base() {
+ span_lint(
+ cx,
+ DOC_MARKDOWN,
+ span,
+ "you should put bare URLs between `<`/`>` or make a proper Markdown link",
+ );
+
+ return;
+ }
+ }
+
+ // We assume that mixed-case words are not meant to be put inside backticks. (Issue #2343)
+ if has_underscore(word) && has_hyphen(word) {
+ return;
+ }
+
+ if has_underscore(word) || word.contains("::") || is_camel_case(word) || word.ends_with("()") {
+ let mut applicability = Applicability::MachineApplicable;
+
+ span_lint_and_then(
+ cx,
+ DOC_MARKDOWN,
+ span,
+ "item in documentation is missing backticks",
+ |diag| {
+ let snippet = snippet_with_applicability(cx, span, "..", &mut applicability);
+ diag.span_suggestion_with_style(
+ span,
+ "try",
+ format!("`{snippet}`"),
+ applicability,
+ // always show the suggestion in a separate line, since the
+ // inline presentation adds another pair of backticks
+ SuggestionStyle::ShowAlways,
+ );
+ },
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/doc/missing_headers.rs b/src/tools/clippy/clippy_lints/src/doc/missing_headers.rs
new file mode 100644
index 000000000..4cbfa97a8
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/doc/missing_headers.rs
@@ -0,0 +1,86 @@
+use clippy_utils::diagnostics::{span_lint, span_lint_and_note};
+use clippy_utils::ty::{implements_trait, is_type_diagnostic_item};
+use clippy_utils::{is_doc_hidden, return_ty};
+use rustc_hir::{BodyId, FnSig, OwnerId, Unsafety};
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_span::{sym, Span};
+
+use super::{DocHeaders, MISSING_ERRORS_DOC, MISSING_PANICS_DOC, MISSING_SAFETY_DOC, UNNECESSARY_SAFETY_DOC};
+
+pub fn check(
+ cx: &LateContext<'_>,
+ owner_id: OwnerId,
+ sig: &FnSig<'_>,
+ headers: DocHeaders,
+ body_id: Option<BodyId>,
+ panic_span: Option<Span>,
+ check_private_items: bool,
+) {
+ if !check_private_items && !cx.effective_visibilities.is_exported(owner_id.def_id) {
+ return; // Private functions do not require doc comments
+ }
+
+ // do not lint if any parent has `#[doc(hidden)]` attribute (#7347)
+ if !check_private_items
+ && cx
+ .tcx
+ .hir()
+ .parent_iter(owner_id.into())
+ .any(|(id, _node)| is_doc_hidden(cx.tcx.hir().attrs(id)))
+ {
+ return;
+ }
+
+ let span = cx.tcx.def_span(owner_id);
+ match (headers.safety, sig.header.unsafety) {
+ (false, Unsafety::Unsafe) => span_lint(
+ cx,
+ MISSING_SAFETY_DOC,
+ span,
+ "unsafe function's docs miss `# Safety` section",
+ ),
+ (true, Unsafety::Normal) => span_lint(
+ cx,
+ UNNECESSARY_SAFETY_DOC,
+ span,
+ "safe function's docs have unnecessary `# Safety` section",
+ ),
+ _ => (),
+ }
+ if !headers.panics && panic_span.is_some() {
+ span_lint_and_note(
+ cx,
+ MISSING_PANICS_DOC,
+ span,
+ "docs for function which may panic missing `# Panics` section",
+ panic_span,
+ "first possible panic found here",
+ );
+ }
+ if !headers.errors {
+ if is_type_diagnostic_item(cx, return_ty(cx, owner_id), sym::Result) {
+ span_lint(
+ cx,
+ MISSING_ERRORS_DOC,
+ span,
+ "docs for function returning `Result` missing `# Errors` section",
+ );
+ } else if let Some(body_id) = body_id
+ && let Some(future) = cx.tcx.lang_items().future_trait()
+ && let typeck = cx.tcx.typeck_body(body_id)
+ && let body = cx.tcx.hir().body(body_id)
+ && let ret_ty = typeck.expr_ty(body.value)
+ && implements_trait(cx, ret_ty, future, &[])
+ && let ty::Coroutine(_, subs, _) = ret_ty.kind()
+ && is_type_diagnostic_item(cx, subs.as_coroutine().return_ty(), sym::Result)
+ {
+ span_lint(
+ cx,
+ MISSING_ERRORS_DOC,
+ span,
+ "docs for function returning `Result` missing `# Errors` section",
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/doc.rs b/src/tools/clippy/clippy_lints/src/doc/mod.rs
index 8982fca6e..ba4527750 100644
--- a/src/tools/clippy/clippy_lints/src/doc.rs
+++ b/src/tools/clippy/clippy_lints/src/doc/mod.rs
@@ -1,20 +1,16 @@
use clippy_utils::attrs::is_doc_hidden;
-use clippy_utils::diagnostics::{span_lint, span_lint_and_help, span_lint_and_note, span_lint_and_then};
+use clippy_utils::diagnostics::{span_lint, span_lint_and_help};
use clippy_utils::macros::{is_panic, root_macro_call_first_node};
-use clippy_utils::source::snippet_with_applicability;
-use clippy_utils::ty::{implements_trait, is_type_diagnostic_item};
-use clippy_utils::{is_entrypoint_fn, method_chain_args, return_ty};
-use if_chain::if_chain;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::visitors::Visitable;
+use clippy_utils::{is_entrypoint_fn, method_chain_args};
use pulldown_cmark::Event::{
Code, End, FootnoteReference, HardBreak, Html, Rule, SoftBreak, Start, TaskListMarker, Text,
};
use pulldown_cmark::Tag::{CodeBlock, Heading, Item, Link, Paragraph};
use pulldown_cmark::{BrokenLink, CodeBlockKind, CowStr, Options};
-use rustc_ast::ast::{Async, Attribute, Fn, FnRetTy, ItemKind};
+use rustc_ast::ast::Attribute;
use rustc_data_structures::fx::FxHashSet;
-use rustc_data_structures::sync::Lrc;
-use rustc_errors::emitter::EmitterWriter;
-use rustc_errors::{Applicability, Handler, SuggestionStyle};
use rustc_hir as hir;
use rustc_hir::intravisit::{self, Visitor};
use rustc_hir::{AnonConst, Expr};
@@ -22,20 +18,21 @@ use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::nested_filter;
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty;
-use rustc_parse::maybe_new_parser_from_source_str;
-use rustc_parse::parser::ForceCollect;
use rustc_resolve::rustdoc::{
add_doc_fragment, attrs_to_doc_fragments, main_body_opts, source_span_for_markdown_range, DocFragment,
};
-use rustc_session::parse::ParseSess;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::edition::Edition;
-use rustc_span::{sym, BytePos, FileName, Pos, Span};
-use rustc_span::source_map::{FilePathMapping, SourceMap};
+use rustc_span::{sym, Span};
use std::ops::Range;
-use std::{io, thread};
use url::Url;
+mod link_with_quotes;
+mod markdown;
+mod missing_headers;
+mod needless_doctest_main;
+mod suspicious_doc_comments;
+
declare_clippy_lint! {
/// ### What it does
/// Checks for the presence of `_`, `::` or camel-case words
@@ -204,6 +201,39 @@ declare_clippy_lint! {
declare_clippy_lint! {
/// ### What it does
+ /// Checks for `#[test]` in doctests unless they are marked with
+ /// either `ignore`, `no_run` or `compile_fail`.
+ ///
+ /// ### Why is this bad?
+ /// Code in examples marked as `#[test]` will somewhat
+ /// surprisingly not be run by `cargo test`. If you really want
+ /// to show how to test stuff in an example, mark it `no_run` to
+ /// make the intent clear.
+ ///
+ /// ### Examples
+ /// ```no_run
+ /// /// An example of a doctest with a `main()` function
+ /// ///
+ /// /// # Examples
+ /// ///
+ /// /// ```
+ /// /// #[test]
+ /// /// fn equality_works() {
+ /// /// assert_eq!(1_u8, 1);
+ /// /// }
+ /// /// ```
+ /// fn test_attr_in_doctest() {
+ /// unimplemented!();
+ /// }
+ /// ```
+ #[clippy::version = "1.40.0"]
+ pub TEST_ATTR_IN_DOCTEST,
+ suspicious,
+ "presence of `#[test]` in code examples"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
/// Detects the syntax `['foo']` in documentation comments (notice quotes instead of backticks)
/// outside of code blocks
/// ### Why is this bad?
@@ -261,33 +291,83 @@ declare_clippy_lint! {
"`pub fn` or `pub trait` with `# Safety` docs"
}
-#[expect(clippy::module_name_repetitions)]
+declare_clippy_lint! {
+ /// ### What it does
+ /// Detects the use of outer doc comments (`///`, `/**`) followed by a bang (`!`): `///!`
+ ///
+ /// ### Why is this bad?
+ /// Triple-slash comments (known as "outer doc comments") apply to items that follow it.
+ /// An outer doc comment followed by a bang (i.e. `///!`) has no specific meaning.
+ ///
+ /// The user most likely meant to write an inner doc comment (`//!`, `/*!`), which
+ /// applies to the parent item (i.e. the item that the comment is contained in,
+ /// usually a module or crate).
+ ///
+ /// ### Known problems
+ /// Inner doc comments can only appear before items, so there are certain cases where the suggestion
+ /// made by this lint is not valid code. For example:
+ /// ```rs
+ /// fn foo() {}
+ /// ///!
+ /// fn bar() {}
+ /// ```
+ /// This lint detects the doc comment and suggests changing it to `//!`, but an inner doc comment
+ /// is not valid at that position.
+ ///
+ /// ### Example
+ /// In this example, the doc comment is attached to the *function*, rather than the *module*.
+ /// ```no_run
+ /// pub mod util {
+ /// ///! This module contains utility functions.
+ ///
+ /// pub fn dummy() {}
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```no_run
+ /// pub mod util {
+ /// //! This module contains utility functions.
+ ///
+ /// pub fn dummy() {}
+ /// }
+ /// ```
+ #[clippy::version = "1.70.0"]
+ pub SUSPICIOUS_DOC_COMMENTS,
+ suspicious,
+ "suspicious usage of (outer) doc comments"
+}
+
#[derive(Clone)]
-pub struct DocMarkdown {
+pub struct Documentation {
valid_idents: FxHashSet<String>,
in_trait_impl: bool,
+ check_private_items: bool,
}
-impl DocMarkdown {
- pub fn new(valid_idents: FxHashSet<String>) -> Self {
+impl Documentation {
+ pub fn new(valid_idents: &[String], check_private_items: bool) -> Self {
Self {
- valid_idents,
+ valid_idents: valid_idents.iter().cloned().collect(),
in_trait_impl: false,
+ check_private_items,
}
}
}
-impl_lint_pass!(DocMarkdown => [
+impl_lint_pass!(Documentation => [
DOC_LINK_WITH_QUOTES,
DOC_MARKDOWN,
MISSING_SAFETY_DOC,
MISSING_ERRORS_DOC,
MISSING_PANICS_DOC,
NEEDLESS_DOCTEST_MAIN,
+ TEST_ATTR_IN_DOCTEST,
UNNECESSARY_SAFETY_DOC,
+ SUSPICIOUS_DOC_COMMENTS
]);
-impl<'tcx> LateLintPass<'tcx> for DocMarkdown {
+impl<'tcx> LateLintPass<'tcx> for Documentation {
fn check_crate(&mut self, cx: &LateContext<'tcx>) {
let attrs = cx.tcx.hir().attrs(hir::CRATE_HIR_ID);
check_attrs(cx, &self.valid_idents, attrs);
@@ -302,13 +382,17 @@ impl<'tcx> LateLintPass<'tcx> for DocMarkdown {
hir::ItemKind::Fn(ref sig, _, body_id) => {
if !(is_entrypoint_fn(cx, item.owner_id.to_def_id()) || in_external_macro(cx.tcx.sess, item.span)) {
let body = cx.tcx.hir().body(body_id);
- let mut fpu = FindPanicUnwrap {
+
+ let panic_span = FindPanicUnwrap::find_span(cx, cx.tcx.typeck(item.owner_id), body.value);
+ missing_headers::check(
cx,
- typeck_results: cx.tcx.typeck(item.owner_id.def_id),
- panic_span: None,
- };
- fpu.visit_expr(body.value);
- lint_for_missing_headers(cx, item.owner_id, sig, headers, Some(body_id), fpu.panic_span);
+ item.owner_id,
+ sig,
+ headers,
+ Some(body_id),
+ panic_span,
+ self.check_private_items,
+ );
}
},
hir::ItemKind::Impl(impl_) => {
@@ -346,7 +430,7 @@ impl<'tcx> LateLintPass<'tcx> for DocMarkdown {
};
if let hir::TraitItemKind::Fn(ref sig, ..) = item.kind {
if !in_external_macro(cx.tcx.sess, item.span) {
- lint_for_missing_headers(cx, item.owner_id, sig, headers, None, None);
+ missing_headers::check(cx, item.owner_id, sig, headers, None, None, self.check_private_items);
}
}
}
@@ -361,92 +445,17 @@ impl<'tcx> LateLintPass<'tcx> for DocMarkdown {
}
if let hir::ImplItemKind::Fn(ref sig, body_id) = item.kind {
let body = cx.tcx.hir().body(body_id);
- let mut fpu = FindPanicUnwrap {
- cx,
- typeck_results: cx.tcx.typeck(item.owner_id.def_id),
- panic_span: None,
- };
- fpu.visit_expr(body.value);
- lint_for_missing_headers(cx, item.owner_id, sig, headers, Some(body_id), fpu.panic_span);
- }
- }
-}
-
-fn lint_for_missing_headers(
- cx: &LateContext<'_>,
- owner_id: hir::OwnerId,
- sig: &hir::FnSig<'_>,
- headers: DocHeaders,
- body_id: Option<hir::BodyId>,
- panic_span: Option<Span>,
-) {
- if !cx.effective_visibilities.is_exported(owner_id.def_id) {
- return; // Private functions do not require doc comments
- }
-
- // do not lint if any parent has `#[doc(hidden)]` attribute (#7347)
- if cx
- .tcx
- .hir()
- .parent_iter(owner_id.into())
- .any(|(id, _node)| is_doc_hidden(cx.tcx.hir().attrs(id)))
- {
- return;
- }
- let span = cx.tcx.def_span(owner_id);
- match (headers.safety, sig.header.unsafety) {
- (false, hir::Unsafety::Unsafe) => span_lint(
- cx,
- MISSING_SAFETY_DOC,
- span,
- "unsafe function's docs miss `# Safety` section",
- ),
- (true, hir::Unsafety::Normal) => span_lint(
- cx,
- UNNECESSARY_SAFETY_DOC,
- span,
- "safe function's docs have unnecessary `# Safety` section",
- ),
- _ => (),
- }
- if !headers.panics && panic_span.is_some() {
- span_lint_and_note(
- cx,
- MISSING_PANICS_DOC,
- span,
- "docs for function which may panic missing `# Panics` section",
- panic_span,
- "first possible panic found here",
- );
- }
- if !headers.errors {
- if is_type_diagnostic_item(cx, return_ty(cx, owner_id), sym::Result) {
- span_lint(
+ let panic_span = FindPanicUnwrap::find_span(cx, cx.tcx.typeck(item.owner_id), body.value);
+ missing_headers::check(
cx,
- MISSING_ERRORS_DOC,
- span,
- "docs for function returning `Result` missing `# Errors` section",
+ item.owner_id,
+ sig,
+ headers,
+ Some(body_id),
+ panic_span,
+ self.check_private_items,
);
- } else {
- if_chain! {
- if let Some(body_id) = body_id;
- if let Some(future) = cx.tcx.lang_items().future_trait();
- let typeck = cx.tcx.typeck_body(body_id);
- let body = cx.tcx.hir().body(body_id);
- let ret_ty = typeck.expr_ty(body.value);
- if implements_trait(cx, ret_ty, future, &[]);
- if let ty::Coroutine(_, subs, _) = ret_ty.kind();
- if is_type_diagnostic_item(cx, subs.as_coroutine().return_ty(), sym::Result);
- then {
- span_lint(
- cx,
- MISSING_ERRORS_DOC,
- span,
- "docs for function returning `Result` missing `# Errors` section",
- );
- }
- }
}
}
}
@@ -470,6 +479,13 @@ struct DocHeaders {
panics: bool,
}
+/// Does some pre-processing on raw, desugared `#[doc]` attributes such as parsing them and
+/// then delegates to `check_doc`.
+/// Some lints are already checked here if they can work with attributes directly and don't need
+/// to work with markdown.
+/// Others are checked elsewhere, e.g. in `check_doc` if they need access to markdown, or
+/// back in the various late lint pass methods if they need the final doc headers, like "Safety" or
+/// "Panics" sections.
fn check_attrs(cx: &LateContext<'_>, valid_idents: &FxHashSet<String>, attrs: &[Attribute]) -> Option<DocHeaders> {
/// We don't want the parser to choke on intra doc links. Since we don't
/// actually care about rendering them, just pretend that all broken links
@@ -483,6 +499,8 @@ fn check_attrs(cx: &LateContext<'_>, valid_idents: &FxHashSet<String>, attrs: &[
return None;
}
+ suspicious_doc_comments::check(cx, attrs);
+
let (fragments, _) = attrs_to_doc_fragments(attrs.iter().map(|attr| (attr, None)), true);
let mut doc = String::new();
for fragment in &fragments {
@@ -513,6 +531,10 @@ fn check_attrs(cx: &LateContext<'_>, valid_idents: &FxHashSet<String>, attrs: &[
const RUST_CODE: &[&str] = &["rust", "no_run", "should_panic", "compile_fail"];
+/// Checks parsed documentation.
+/// This walks the "events" (think sections of markdown) produced by `pulldown_cmark`,
+/// so lints here will generally access that information.
+/// Returns documentation headers -- whether a "Safety", "Errors", "Panic" section was found
#[allow(clippy::too_many_lines)] // Only a big match statement
fn check_doc<'a, Events: Iterator<Item = (pulldown_cmark::Event<'a>, Range<usize>)>>(
cx: &LateContext<'_>,
@@ -527,6 +549,7 @@ fn check_doc<'a, Events: Iterator<Item = (pulldown_cmark::Event<'a>, Range<usize
let mut in_heading = false;
let mut is_rust = false;
let mut no_test = false;
+ let mut ignore = false;
let mut edition = None;
let mut ticks_unbalanced = false;
let mut text_to_check: Vec<(CowStr<'_>, Range<usize>)> = Vec::new();
@@ -542,6 +565,8 @@ fn check_doc<'a, Events: Iterator<Item = (pulldown_cmark::Event<'a>, Range<usize
break;
} else if item == "no_test" {
no_test = true;
+ } else if item == "no_run" || item == "compile_fail" {
+ ignore = true;
}
if let Some(stripped) = item.strip_prefix("edition") {
is_rust = true;
@@ -555,6 +580,7 @@ fn check_doc<'a, Events: Iterator<Item = (pulldown_cmark::Event<'a>, Range<usize
End(CodeBlock(_)) => {
in_code = false;
is_rust = false;
+ ignore = false;
},
Start(Link(_, url, _)) => in_link = Some(url),
End(Link(..)) => in_link = None,
@@ -581,7 +607,7 @@ fn check_doc<'a, Events: Iterator<Item = (pulldown_cmark::Event<'a>, Range<usize
} else {
for (text, range) in text_to_check {
if let Some(span) = fragments.span(cx, range) {
- check_text(cx, valid_idents, &text, span);
+ markdown::check(cx, valid_idents, &text, span);
}
}
}
@@ -608,11 +634,11 @@ fn check_doc<'a, Events: Iterator<Item = (pulldown_cmark::Event<'a>, Range<usize
if in_code {
if is_rust && !no_test {
let edition = edition.unwrap_or_else(|| cx.tcx.sess.edition());
- check_code(cx, &text, edition, range.clone(), fragments);
+ needless_doctest_main::check(cx, &text, edition, range.clone(), fragments, ignore);
}
} else {
if in_link.is_some() {
- check_link_quotes(cx, trimmed_text, range.clone(), fragments);
+ link_with_quotes::check(cx, trimmed_text, range.clone(), fragments);
}
if let Some(link) = in_link.as_ref()
&& let Ok(url) = Url::parse(link)
@@ -629,207 +655,28 @@ fn check_doc<'a, Events: Iterator<Item = (pulldown_cmark::Event<'a>, Range<usize
headers
}
-fn check_link_quotes(cx: &LateContext<'_>, trimmed_text: &str, range: Range<usize>, fragments: Fragments<'_>) {
- if trimmed_text.starts_with('\'')
- && trimmed_text.ends_with('\'')
- && let Some(span) = fragments.span(cx, range)
- {
- span_lint(
- cx,
- DOC_LINK_WITH_QUOTES,
- span,
- "possible intra-doc link using quotes instead of backticks",
- );
- }
-}
-
-fn check_code(cx: &LateContext<'_>, text: &str, edition: Edition, range: Range<usize>, fragments: Fragments<'_>) {
- fn has_needless_main(code: String, edition: Edition) -> bool {
- rustc_driver::catch_fatal_errors(|| {
- rustc_span::create_session_globals_then(edition, || {
- let filename = FileName::anon_source_code(&code);
-
- let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
- let fallback_bundle =
- rustc_errors::fallback_fluent_bundle(rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(), false);
- let emitter = EmitterWriter::new(Box::new(io::sink()), fallback_bundle);
- let handler = Handler::with_emitter(Box::new(emitter)).disable_warnings();
- let sess = ParseSess::with_span_handler(handler, sm);
-
- let mut parser = match maybe_new_parser_from_source_str(&sess, filename, code) {
- Ok(p) => p,
- Err(errs) => {
- drop(errs);
- return false;
- },
- };
-
- let mut relevant_main_found = false;
- loop {
- match parser.parse_item(ForceCollect::No) {
- Ok(Some(item)) => match &item.kind {
- ItemKind::Fn(box Fn {
- sig, body: Some(block), ..
- }) if item.ident.name == sym::main => {
- let is_async = matches!(sig.header.asyncness, Async::Yes { .. });
- let returns_nothing = match &sig.decl.output {
- FnRetTy::Default(..) => true,
- FnRetTy::Ty(ty) if ty.kind.is_unit() => true,
- FnRetTy::Ty(_) => false,
- };
-
- if returns_nothing && !is_async && !block.stmts.is_empty() {
- // This main function should be linted, but only if there are no other functions
- relevant_main_found = true;
- } else {
- // This main function should not be linted, we're done
- return false;
- }
- },
- // Tests with one of these items are ignored
- ItemKind::Static(..)
- | ItemKind::Const(..)
- | ItemKind::ExternCrate(..)
- | ItemKind::ForeignMod(..)
- // Another function was found; this case is ignored
- | ItemKind::Fn(..) => return false,
- _ => {},
- },
- Ok(None) => break,
- Err(e) => {
- e.cancel();
- return false;
- },
- }
- }
-
- relevant_main_found
- })
- })
- .ok()
- .unwrap_or_default()
- }
-
- let trailing_whitespace = text.len() - text.trim_end().len();
-
- // Because of the global session, we need to create a new session in a different thread with
- // the edition we need.
- let text = text.to_owned();
- if thread::spawn(move || has_needless_main(text, edition))
- .join()
- .expect("thread::spawn failed")
- && let Some(span) = fragments.span(cx, range.start..range.end - trailing_whitespace)
- {
- span_lint(cx, NEEDLESS_DOCTEST_MAIN, span, "needless `fn main` in doctest");
- }
-}
-
-fn check_text(cx: &LateContext<'_>, valid_idents: &FxHashSet<String>, text: &str, span: Span) {
- for word in text.split(|c: char| c.is_whitespace() || c == '\'') {
- // Trim punctuation as in `some comment (see foo::bar).`
- // ^^
- // Or even as in `_foo bar_` which is emphasized. Also preserve `::` as a prefix/suffix.
- let mut word = word.trim_matches(|c: char| !c.is_alphanumeric() && c != ':');
-
- // Remove leading or trailing single `:` which may be part of a sentence.
- if word.starts_with(':') && !word.starts_with("::") {
- word = word.trim_start_matches(':');
- }
- if word.ends_with(':') && !word.ends_with("::") {
- word = word.trim_end_matches(':');
- }
-
- if valid_idents.contains(word) || word.chars().all(|c| c == ':') {
- continue;
- }
-
- // Adjust for the current word
- let offset = word.as_ptr() as usize - text.as_ptr() as usize;
- let span = Span::new(
- span.lo() + BytePos::from_usize(offset),
- span.lo() + BytePos::from_usize(offset + word.len()),
- span.ctxt(),
- span.parent(),
- );
-
- check_word(cx, word, span);
- }
-}
-
-fn check_word(cx: &LateContext<'_>, word: &str, span: Span) {
- /// Checks if a string is upper-camel-case, i.e., starts with an uppercase and
- /// contains at least two uppercase letters (`Clippy` is ok) and one lower-case
- /// letter (`NASA` is ok).
- /// Plurals are also excluded (`IDs` is ok).
- fn is_camel_case(s: &str) -> bool {
- if s.starts_with(|c: char| c.is_ascii_digit() | c.is_ascii_lowercase()) {
- return false;
- }
-
- let s = s.strip_suffix('s').unwrap_or(s);
-
- s.chars().all(char::is_alphanumeric)
- && s.chars().filter(|&c| c.is_uppercase()).take(2).count() > 1
- && s.chars().filter(|&c| c.is_lowercase()).take(1).count() > 0
- }
-
- fn has_underscore(s: &str) -> bool {
- s != "_" && !s.contains("\\_") && s.contains('_')
- }
-
- fn has_hyphen(s: &str) -> bool {
- s != "-" && s.contains('-')
- }
-
- if let Ok(url) = Url::parse(word) {
- // try to get around the fact that `foo::bar` parses as a valid URL
- if !url.cannot_be_a_base() {
- span_lint(
- cx,
- DOC_MARKDOWN,
- span,
- "you should put bare URLs between `<`/`>` or make a proper Markdown link",
- );
-
- return;
- }
- }
-
- // We assume that mixed-case words are not meant to be put inside backticks. (Issue #2343)
- if has_underscore(word) && has_hyphen(word) {
- return;
- }
-
- if has_underscore(word) || word.contains("::") || is_camel_case(word) {
- let mut applicability = Applicability::MachineApplicable;
-
- span_lint_and_then(
- cx,
- DOC_MARKDOWN,
- span,
- "item in documentation is missing backticks",
- |diag| {
- let snippet = snippet_with_applicability(cx, span, "..", &mut applicability);
- diag.span_suggestion_with_style(
- span,
- "try",
- format!("`{snippet}`"),
- applicability,
- // always show the suggestion in a separate line, since the
- // inline presentation adds another pair of backticks
- SuggestionStyle::ShowAlways,
- );
- },
- );
- }
-}
-
struct FindPanicUnwrap<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
panic_span: Option<Span>,
typeck_results: &'tcx ty::TypeckResults<'tcx>,
}
+impl<'a, 'tcx> FindPanicUnwrap<'a, 'tcx> {
+ pub fn find_span(
+ cx: &'a LateContext<'tcx>,
+ typeck_results: &'tcx ty::TypeckResults<'tcx>,
+ body: impl Visitable<'tcx>,
+ ) -> Option<Span> {
+ let mut vis = Self {
+ cx,
+ panic_span: None,
+ typeck_results,
+ };
+ body.visit(&mut vis);
+ vis.panic_span
+ }
+}
+
impl<'a, 'tcx> Visitor<'tcx> for FindPanicUnwrap<'a, 'tcx> {
type NestedFilter = nested_filter::OnlyBodies;
diff --git a/src/tools/clippy/clippy_lints/src/doc/needless_doctest_main.rs b/src/tools/clippy/clippy_lints/src/doc/needless_doctest_main.rs
new file mode 100644
index 000000000..c639813a3
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/doc/needless_doctest_main.rs
@@ -0,0 +1,135 @@
+use std::ops::Range;
+use std::{io, thread};
+
+use crate::doc::{NEEDLESS_DOCTEST_MAIN, TEST_ATTR_IN_DOCTEST};
+use clippy_utils::diagnostics::span_lint;
+use rustc_ast::{CoroutineKind, Fn, FnRetTy, Item, ItemKind};
+use rustc_data_structures::sync::Lrc;
+use rustc_errors::emitter::EmitterWriter;
+use rustc_errors::DiagCtxt;
+use rustc_lint::LateContext;
+use rustc_parse::maybe_new_parser_from_source_str;
+use rustc_parse::parser::ForceCollect;
+use rustc_session::parse::ParseSess;
+use rustc_span::edition::Edition;
+use rustc_span::source_map::{FilePathMapping, SourceMap};
+use rustc_span::{sym, FileName, Pos};
+
+use super::Fragments;
+
+fn get_test_spans(item: &Item, test_attr_spans: &mut Vec<Range<usize>>) {
+ test_attr_spans.extend(
+ item.attrs
+ .iter()
+ .find(|attr| attr.has_name(sym::test))
+ .map(|attr| attr.span.lo().to_usize()..item.ident.span.hi().to_usize()),
+ );
+}
+
+pub fn check(
+ cx: &LateContext<'_>,
+ text: &str,
+ edition: Edition,
+ range: Range<usize>,
+ fragments: Fragments<'_>,
+ ignore: bool,
+) {
+ // return whether the code contains a needless `fn main` plus a vector of byte position ranges
+ // of all `#[test]` attributes in not ignored code examples
+ fn check_code_sample(code: String, edition: Edition, ignore: bool) -> (bool, Vec<Range<usize>>) {
+ rustc_driver::catch_fatal_errors(|| {
+ rustc_span::create_session_globals_then(edition, || {
+ let mut test_attr_spans = vec![];
+ let filename = FileName::anon_source_code(&code);
+
+ let fallback_bundle =
+ rustc_errors::fallback_fluent_bundle(rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(), false);
+ let emitter = EmitterWriter::new(Box::new(io::sink()), fallback_bundle);
+ let dcx = DiagCtxt::with_emitter(Box::new(emitter)).disable_warnings();
+ #[expect(clippy::arc_with_non_send_sync)] // `Lrc` is expected by with_dcx
+ let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+ let sess = ParseSess::with_dcx(dcx, sm);
+
+ let mut parser = match maybe_new_parser_from_source_str(&sess, filename, code) {
+ Ok(p) => p,
+ Err(errs) => {
+ drop(errs);
+ return (false, test_attr_spans);
+ },
+ };
+
+ let mut relevant_main_found = false;
+ let mut eligible = true;
+ loop {
+ match parser.parse_item(ForceCollect::No) {
+ Ok(Some(item)) => match &item.kind {
+ ItemKind::Fn(box Fn {
+ sig, body: Some(block), ..
+ }) if item.ident.name == sym::main => {
+ if !ignore {
+ get_test_spans(&item, &mut test_attr_spans);
+ }
+ let is_async = matches!(sig.header.coroutine_kind, Some(CoroutineKind::Async { .. }));
+ let returns_nothing = match &sig.decl.output {
+ FnRetTy::Default(..) => true,
+ FnRetTy::Ty(ty) if ty.kind.is_unit() => true,
+ FnRetTy::Ty(_) => false,
+ };
+
+ if returns_nothing && !is_async && !block.stmts.is_empty() {
+ // This main function should be linted, but only if there are no other functions
+ relevant_main_found = true;
+ } else {
+ // This main function should not be linted, we're done
+ eligible = false;
+ }
+ },
+ // Another function was found; this case is ignored for needless_doctest_main
+ ItemKind::Fn(box Fn { .. }) => {
+ eligible = false;
+ if !ignore {
+ get_test_spans(&item, &mut test_attr_spans);
+ }
+ },
+ // Tests with one of these items are ignored
+ ItemKind::Static(..)
+ | ItemKind::Const(..)
+ | ItemKind::ExternCrate(..)
+ | ItemKind::ForeignMod(..) => {
+ eligible = false;
+ },
+ _ => {},
+ },
+ Ok(None) => break,
+ Err(e) => {
+ e.cancel();
+ return (false, test_attr_spans);
+ },
+ }
+ }
+
+ (relevant_main_found & eligible, test_attr_spans)
+ })
+ })
+ .ok()
+ .unwrap_or_default()
+ }
+
+ let trailing_whitespace = text.len() - text.trim_end().len();
+
+ // Because of the global session, we need to create a new session in a different thread with
+ // the edition we need.
+ let text = text.to_owned();
+ let (has_main, test_attr_spans) = thread::spawn(move || check_code_sample(text, edition, ignore))
+ .join()
+ .expect("thread::spawn failed");
+ if has_main && let Some(span) = fragments.span(cx, range.start..range.end - trailing_whitespace) {
+ span_lint(cx, NEEDLESS_DOCTEST_MAIN, span, "needless `fn main` in doctest");
+ }
+ for span in test_attr_spans {
+ let span = (range.start + span.start)..(range.start + span.end);
+ if let Some(span) = fragments.span(cx, span) {
+ span_lint(cx, TEST_ATTR_IN_DOCTEST, span, "unit tests in doctest are not executed");
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/doc/suspicious_doc_comments.rs b/src/tools/clippy/clippy_lints/src/doc/suspicious_doc_comments.rs
new file mode 100644
index 000000000..d7ad30efe
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/doc/suspicious_doc_comments.rs
@@ -0,0 +1,48 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use rustc_ast::token::CommentKind;
+use rustc_ast::{AttrKind, AttrStyle, Attribute};
+use rustc_errors::Applicability;
+use rustc_lint::LateContext;
+use rustc_span::Span;
+
+use super::SUSPICIOUS_DOC_COMMENTS;
+
+pub fn check(cx: &LateContext<'_>, attrs: &[Attribute]) {
+ let replacements: Vec<_> = collect_doc_replacements(attrs);
+
+ if let Some((&(lo_span, _), &(hi_span, _))) = replacements.first().zip(replacements.last()) {
+ span_lint_and_then(
+ cx,
+ SUSPICIOUS_DOC_COMMENTS,
+ lo_span.to(hi_span),
+ "this is an outer doc comment and does not apply to the parent module or crate",
+ |diag| {
+ diag.multipart_suggestion(
+ "use an inner doc comment to document the parent module or crate",
+ replacements,
+ Applicability::MaybeIncorrect,
+ );
+ },
+ );
+ }
+}
+
+fn collect_doc_replacements(attrs: &[Attribute]) -> Vec<(Span, String)> {
+ attrs
+ .iter()
+ .filter_map(|attr| {
+ if let AttrKind::DocComment(com_kind, sym) = attr.kind
+ && let AttrStyle::Outer = attr.style
+ && let Some(com) = sym.as_str().strip_prefix('!')
+ {
+ let sugg = match com_kind {
+ CommentKind::Line => format!("//!{com}"),
+ CommentKind::Block => format!("/*!{com}*/"),
+ };
+ Some((attr.span, sugg))
+ } else {
+ None
+ }
+ })
+ .collect()
+}
diff --git a/src/tools/clippy/clippy_lints/src/double_parens.rs b/src/tools/clippy/clippy_lints/src/double_parens.rs
index 63f32173b..b51bb7951 100644
--- a/src/tools/clippy/clippy_lints/src/double_parens.rs
+++ b/src/tools/clippy/clippy_lints/src/double_parens.rs
@@ -1,7 +1,7 @@
use clippy_utils::diagnostics::span_lint;
use rustc_ast::ast::{Expr, ExprKind};
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/drop_forget_ref.rs b/src/tools/clippy/clippy_lints/src/drop_forget_ref.rs
index 177e04dfa..124d78fc4 100644
--- a/src/tools/clippy/clippy_lints/src/drop_forget_ref.rs
+++ b/src/tools/clippy/clippy_lints/src/drop_forget_ref.rs
@@ -3,7 +3,7 @@ use clippy_utils::ty::{is_copy, is_must_use_ty, is_type_lang_item};
use clippy_utils::{get_parent_node, is_must_use_func_call};
use rustc_hir::{Arm, Expr, ExprKind, LangItem, Node};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
use std::borrow::Cow;
diff --git a/src/tools/clippy/clippy_lints/src/duplicate_mod.rs b/src/tools/clippy/clippy_lints/src/duplicate_mod.rs
index 7ff7068f0..471335c09 100644
--- a/src/tools/clippy/clippy_lints/src/duplicate_mod.rs
+++ b/src/tools/clippy/clippy_lints/src/duplicate_mod.rs
@@ -2,7 +2,7 @@ use clippy_utils::diagnostics::span_lint_and_help;
use rustc_ast::ast::{Crate, Inline, Item, ItemKind, ModKind};
use rustc_errors::MultiSpan;
use rustc_lint::{EarlyContext, EarlyLintPass, Level, LintContext};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::{FileName, Span};
use std::collections::BTreeMap;
use std::path::PathBuf;
diff --git a/src/tools/clippy/clippy_lints/src/else_if_without_else.rs b/src/tools/clippy/clippy_lints/src/else_if_without_else.rs
index 61db1c1ab..47780cab9 100644
--- a/src/tools/clippy/clippy_lints/src/else_if_without_else.rs
+++ b/src/tools/clippy/clippy_lints/src/else_if_without_else.rs
@@ -4,7 +4,7 @@ use clippy_utils::diagnostics::span_lint_and_help;
use rustc_ast::ast::{Expr, ExprKind};
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/empty_drop.rs b/src/tools/clippy/clippy_lints/src/empty_drop.rs
index 5fcdca7cf..1d2b907b9 100644
--- a/src/tools/clippy/clippy_lints/src/empty_drop.rs
+++ b/src/tools/clippy/clippy_lints/src/empty_drop.rs
@@ -1,10 +1,9 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::peel_blocks;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Body, ExprKind, Impl, ImplItemKind, Item, ItemKind, Node};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -36,31 +35,30 @@ declare_lint_pass!(EmptyDrop => [EMPTY_DROP]);
impl LateLintPass<'_> for EmptyDrop {
fn check_item(&mut self, cx: &LateContext<'_>, item: &Item<'_>) {
- if_chain! {
- if let ItemKind::Impl(Impl {
- of_trait: Some(ref trait_ref),
- items: [child],
- ..
- }) = item.kind;
- if trait_ref.trait_def_id() == cx.tcx.lang_items().drop_trait();
- if let impl_item_hir = child.id.hir_id();
- if let Some(Node::ImplItem(impl_item)) = cx.tcx.hir().find(impl_item_hir);
- if let ImplItemKind::Fn(_, b) = &impl_item.kind;
- if let Body { value: func_expr, .. } = cx.tcx.hir().body(*b);
- let func_expr = peel_blocks(func_expr);
- if let ExprKind::Block(block, _) = func_expr.kind;
- if block.stmts.is_empty() && block.expr.is_none();
- then {
- span_lint_and_sugg(
- cx,
- EMPTY_DROP,
- item.span,
- "empty drop implementation",
- "try removing this impl",
- String::new(),
- Applicability::MaybeIncorrect
- );
- }
+ if let ItemKind::Impl(Impl {
+ of_trait: Some(ref trait_ref),
+ items: [child],
+ ..
+ }) = item.kind
+ && trait_ref.trait_def_id() == cx.tcx.lang_items().drop_trait()
+ && let impl_item_hir = child.id.hir_id()
+ && let Some(Node::ImplItem(impl_item)) = cx.tcx.opt_hir_node(impl_item_hir)
+ && let ImplItemKind::Fn(_, b) = &impl_item.kind
+ && let Body { value: func_expr, .. } = cx.tcx.hir().body(*b)
+ && let func_expr = peel_blocks(func_expr)
+ && let ExprKind::Block(block, _) = func_expr.kind
+ && block.stmts.is_empty()
+ && block.expr.is_none()
+ {
+ span_lint_and_sugg(
+ cx,
+ EMPTY_DROP,
+ item.span,
+ "empty drop implementation",
+ "try removing this impl",
+ String::new(),
+ Applicability::MaybeIncorrect,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/empty_enum.rs b/src/tools/clippy/clippy_lints/src/empty_enum.rs
index a5699727b..420888b6c 100644
--- a/src/tools/clippy/clippy_lints/src/empty_enum.rs
+++ b/src/tools/clippy/clippy_lints/src/empty_enum.rs
@@ -3,7 +3,7 @@
use clippy_utils::diagnostics::span_lint_and_help;
use rustc_hir::{Item, ItemKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/empty_structs_with_brackets.rs b/src/tools/clippy/clippy_lints/src/empty_structs_with_brackets.rs
index 4e2a8b73c..3cf67b3ec 100644
--- a/src/tools/clippy/clippy_lints/src/empty_structs_with_brackets.rs
+++ b/src/tools/clippy/clippy_lints/src/empty_structs_with_brackets.rs
@@ -4,7 +4,7 @@ use rustc_ast::ast::{Item, ItemKind, VariantData};
use rustc_errors::Applicability;
use rustc_lexer::TokenKind;
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::Span;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/endian_bytes.rs b/src/tools/clippy/clippy_lints/src/endian_bytes.rs
index affd08221..b8a817e21 100644
--- a/src/tools/clippy/clippy_lints/src/endian_bytes.rs
+++ b/src/tools/clippy/clippy_lints/src/endian_bytes.rs
@@ -5,7 +5,7 @@ use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::Ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::Symbol;
use std::borrow::Cow;
@@ -114,27 +114,23 @@ impl LateLintPass<'_> for EndianBytes {
return;
}
- if_chain! {
- if let ExprKind::MethodCall(method_name, receiver, args, ..) = expr.kind;
- if args.is_empty();
- let ty = cx.typeck_results().expr_ty(receiver);
- if ty.is_primitive_ty();
- if maybe_lint_endian_bytes(cx, expr, Prefix::To, method_name.ident.name, ty);
- then {
- return;
- }
+ if let ExprKind::MethodCall(method_name, receiver, args, ..) = expr.kind
+ && args.is_empty()
+ && let ty = cx.typeck_results().expr_ty(receiver)
+ && ty.is_primitive_ty()
+ && maybe_lint_endian_bytes(cx, expr, Prefix::To, method_name.ident.name, ty)
+ {
+ return;
}
- if_chain! {
- if let ExprKind::Call(function, ..) = expr.kind;
- if let ExprKind::Path(qpath) = function.kind;
- if let Some(def_id) = cx.qpath_res(&qpath, function.hir_id).opt_def_id();
- if let Some(function_name) = cx.get_def_path(def_id).last();
- let ty = cx.typeck_results().expr_ty(expr);
- if ty.is_primitive_ty();
- then {
- maybe_lint_endian_bytes(cx, expr, Prefix::From, *function_name, ty);
- }
+ if let ExprKind::Call(function, ..) = expr.kind
+ && let ExprKind::Path(qpath) = function.kind
+ && let Some(def_id) = cx.qpath_res(&qpath, function.hir_id).opt_def_id()
+ && let Some(function_name) = cx.get_def_path(def_id).last()
+ && let ty = cx.typeck_results().expr_ty(expr)
+ && ty.is_primitive_ty()
+ {
+ maybe_lint_endian_bytes(cx, expr, Prefix::From, *function_name, ty);
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/entry.rs b/src/tools/clippy/clippy_lints/src/entry.rs
index 3e3c62e85..ce0a1dfdc 100644
--- a/src/tools/clippy/clippy_lints/src/entry.rs
+++ b/src/tools/clippy/clippy_lints/src/entry.rs
@@ -10,7 +10,7 @@ use rustc_hir::hir_id::HirIdSet;
use rustc_hir::intravisit::{walk_expr, Visitor};
use rustc_hir::{Block, Expr, ExprKind, Guard, HirId, Let, Pat, Stmt, StmtKind, UnOp};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{Span, SyntaxContext, DUMMY_SP};
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/enum_clike.rs b/src/tools/clippy/clippy_lints/src/enum_clike.rs
index 003b5fc72..30eb643c4 100644
--- a/src/tools/clippy/clippy_lints/src/enum_clike.rs
+++ b/src/tools/clippy/clippy_lints/src/enum_clike.rs
@@ -7,7 +7,7 @@ use rustc_hir::{Item, ItemKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::util::IntTypeExt;
use rustc_middle::ty::{self, IntTy, UintTy};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/equatable_if_let.rs b/src/tools/clippy/clippy_lints/src/equatable_if_let.rs
index 575fead5b..3c4352942 100644
--- a/src/tools/clippy/clippy_lints/src/equatable_if_let.rs
+++ b/src/tools/clippy/clippy_lints/src/equatable_if_let.rs
@@ -6,7 +6,7 @@ use rustc_hir::{Expr, ExprKind, Pat, PatKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::Ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -46,9 +46,12 @@ fn unary_pattern(pat: &Pat<'_>) -> bool {
pats.iter().all(unary_pattern)
}
match &pat.kind {
- PatKind::Slice(_, _, _) | PatKind::Range(_, _, _) | PatKind::Binding(..) | PatKind::Wild | PatKind::Or(_) => {
- false
- },
+ PatKind::Slice(_, _, _)
+ | PatKind::Range(_, _, _)
+ | PatKind::Binding(..)
+ | PatKind::Wild
+ | PatKind::Never
+ | PatKind::Or(_) => false,
PatKind::Struct(_, a, etc) => !etc && a.iter().all(|x| unary_pattern(x.pat)),
PatKind::Tuple(a, etc) | PatKind::TupleStruct(_, a, etc) => etc.as_opt_usize().is_none() && array_rec(a),
PatKind::Ref(x, _) | PatKind::Box(x) => unary_pattern(x),
diff --git a/src/tools/clippy/clippy_lints/src/error_impl_error.rs b/src/tools/clippy/clippy_lints/src/error_impl_error.rs
index bc878555c..8dbb47fad 100644
--- a/src/tools/clippy/clippy_lints/src/error_impl_error.rs
+++ b/src/tools/clippy/clippy_lints/src/error_impl_error.rs
@@ -5,7 +5,7 @@ use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::{Item, ItemKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::Visibility;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
@@ -58,7 +58,7 @@ impl<'tcx> LateLintPass<'tcx> for ErrorImplError {
if let Some(trait_def_id) = imp.of_trait.and_then(|t| t.trait_def_id())
&& error_def_id == trait_def_id
&& let Some(def_id) = path_res(cx, imp.self_ty).opt_def_id().and_then(DefId::as_local)
- && let hir_id = cx.tcx.hir().local_def_id_to_hir_id(def_id)
+ && let hir_id = cx.tcx.local_def_id_to_hir_id(def_id)
&& let Some(ident) = cx.tcx.opt_item_ident(def_id.to_def_id())
&& ident.name == sym::Error
&& is_visible_outside_module(cx, def_id) =>
diff --git a/src/tools/clippy/clippy_lints/src/escape.rs b/src/tools/clippy/clippy_lints/src/escape.rs
index 3d0ddca19..b77762630 100644
--- a/src/tools/clippy/clippy_lints/src/escape.rs
+++ b/src/tools/clippy/clippy_lints/src/escape.rs
@@ -1,15 +1,15 @@
use clippy_utils::diagnostics::span_lint_hir;
-use rustc_hir::{self, intravisit, AssocItemKind, Body, FnDecl, HirId, HirIdSet, Impl, ItemKind, Node, Pat, PatKind};
+use rustc_hir::{intravisit, AssocItemKind, Body, FnDecl, HirId, HirIdSet, Impl, ItemKind, Node, Pat, PatKind};
use rustc_hir_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId};
use rustc_infer::infer::TyCtxtInferExt;
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::mir::FakeReadCause;
use rustc_middle::ty::layout::LayoutOf;
-use rustc_middle::ty::{self, TraitRef, Ty};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_middle::ty::{self, TraitRef, Ty, TyCtxt};
+use rustc_session::impl_lint_pass;
use rustc_span::def_id::LocalDefId;
-use rustc_span::Span;
use rustc_span::symbol::kw;
+use rustc_span::Span;
use rustc_target::spec::abi::Abi;
#[derive(Copy, Clone)]
@@ -74,9 +74,9 @@ impl<'tcx> LateLintPass<'tcx> for BoxedLocal {
let parent_id = cx
.tcx
.hir()
- .get_parent_item(cx.tcx.hir().local_def_id_to_hir_id(fn_def_id))
+ .get_parent_item(cx.tcx.local_def_id_to_hir_id(fn_def_id))
.def_id;
- let parent_node = cx.tcx.hir().find_by_def_id(parent_id);
+ let parent_node = cx.tcx.opt_hir_node_by_def_id(parent_id);
let mut trait_self_ty = None;
if let Some(Node::Item(item)) = parent_node {
@@ -122,8 +122,8 @@ impl<'tcx> LateLintPass<'tcx> for BoxedLocal {
}
// TODO: Replace with Map::is_argument(..) when it's fixed
-fn is_argument(map: rustc_middle::hir::map::Map<'_>, id: HirId) -> bool {
- match map.find(id) {
+fn is_argument(tcx: TyCtxt<'_>, id: HirId) -> bool {
+ match tcx.opt_hir_node(id) {
Some(Node::Pat(Pat {
kind: PatKind::Binding(..),
..
@@ -131,7 +131,7 @@ fn is_argument(map: rustc_middle::hir::map::Map<'_>, id: HirId) -> bool {
_ => return false,
}
- matches!(map.find_parent(id), Some(Node::Param(_)))
+ matches!(tcx.hir().find_parent(id), Some(Node::Param(_)))
}
impl<'a, 'tcx> Delegate<'tcx> for EscapeDelegate<'a, 'tcx> {
@@ -154,7 +154,7 @@ impl<'a, 'tcx> Delegate<'tcx> for EscapeDelegate<'a, 'tcx> {
fn mutate(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId) {
if cmt.place.projections.is_empty() {
let map = &self.cx.tcx.hir();
- if is_argument(*map, cmt.hir_id) {
+ if is_argument(self.cx.tcx, cmt.hir_id) {
// Skip closure arguments
let parent_id = map.parent_id(cmt.hir_id);
if let Some(Node::Expr(..)) = map.find_parent(parent_id) {
diff --git a/src/tools/clippy/clippy_lints/src/eta_reduction.rs b/src/tools/clippy/clippy_lints/src/eta_reduction.rs
index fad8fbf04..450cee400 100644
--- a/src/tools/clippy/clippy_lints/src/eta_reduction.rs
+++ b/src/tools/clippy/clippy_lints/src/eta_reduction.rs
@@ -13,7 +13,7 @@ use rustc_middle::ty::{
self, Binder, ClosureArgs, ClosureKind, EarlyBinder, FnSig, GenericArg, GenericArgKind, GenericArgsRef,
ImplPolarity, List, Region, RegionKind, Ty, TypeVisitableExt, TypeckResults,
};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::sym;
use rustc_target::spec::abi::Abi;
use rustc_trait_selection::traits::error_reporting::InferCtxtExt as _;
@@ -220,7 +220,8 @@ fn check_inputs(
params.len() == self_arg.map_or(0, |_| 1) + args.len()
&& params.iter().zip(self_arg.into_iter().chain(args)).all(|(p, arg)| {
matches!(
- p.pat.kind,PatKind::Binding(BindingAnnotation::NONE, id, _, None)
+ p.pat.kind,
+ PatKind::Binding(BindingAnnotation::NONE, id, _, None)
if path_to_local_id(arg, id)
)
// Only allow adjustments which change regions (i.e. re-borrowing).
@@ -247,8 +248,7 @@ fn check_sig<'tcx>(cx: &LateContext<'tcx>, closure: ClosureArgs<'tcx>, call_sig:
/// This is needed because rustc is unable to late bind early-bound regions in a function signature.
fn has_late_bound_to_non_late_bound_regions(from_sig: FnSig<'_>, to_sig: FnSig<'_>) -> bool {
fn check_region(from_region: Region<'_>, to_region: Region<'_>) -> bool {
- matches!(from_region.kind(), RegionKind::ReLateBound(..))
- && !matches!(to_region.kind(), RegionKind::ReLateBound(..))
+ matches!(from_region.kind(), RegionKind::ReBound(..)) && !matches!(to_region.kind(), RegionKind::ReBound(..))
}
fn check_subs(from_subs: &[GenericArg<'_>], to_subs: &[GenericArg<'_>]) -> bool {
@@ -290,7 +290,7 @@ fn has_late_bound_to_non_late_bound_regions(from_sig: FnSig<'_>, to_sig: FnSig<'
.zip(to_tys)
.any(|(from_ty, to_ty)| check_ty(from_ty, to_ty))
},
- _ => from_ty.has_late_bound_regions(),
+ _ => from_ty.has_bound_regions(),
}
}
diff --git a/src/tools/clippy/clippy_lints/src/excessive_bools.rs b/src/tools/clippy/clippy_lints/src/excessive_bools.rs
index 1d18e194d..c5f7212c4 100644
--- a/src/tools/clippy/clippy_lints/src/excessive_bools.rs
+++ b/src/tools/clippy/clippy_lints/src/excessive_bools.rs
@@ -3,7 +3,7 @@ use clippy_utils::{get_parent_as_impl, has_repr_attr, is_bool};
use rustc_hir::intravisit::FnKind;
use rustc_hir::{Body, FnDecl, Item, ItemKind, TraitFn, TraitItem, TraitItemKind, Ty};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::def_id::LocalDefId;
use rustc_span::Span;
use rustc_target::spec::abi::Abi;
@@ -171,7 +171,7 @@ impl<'tcx> LateLintPass<'tcx> for ExcessiveBools {
span: Span,
def_id: LocalDefId,
) {
- let hir_id = cx.tcx.hir().local_def_id_to_hir_id(def_id);
+ let hir_id = cx.tcx.local_def_id_to_hir_id(def_id);
if let Some(fn_header) = fn_kind.header()
&& fn_header.abi == Abi::Rust
&& get_parent_as_impl(cx.tcx, hir_id).map_or(true, |impl_item| impl_item.of_trait.is_none())
diff --git a/src/tools/clippy/clippy_lints/src/excessive_nesting.rs b/src/tools/clippy/clippy_lints/src/excessive_nesting.rs
index 83480fc5e..4b0d11c5d 100644
--- a/src/tools/clippy/clippy_lints/src/excessive_nesting.rs
+++ b/src/tools/clippy/clippy_lints/src/excessive_nesting.rs
@@ -5,7 +5,7 @@ use rustc_ast::visit::{walk_block, walk_item, Visitor};
use rustc_ast::{Block, Crate, Inline, Item, ItemKind, ModKind, NodeId};
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::Span;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/exhaustive_items.rs b/src/tools/clippy/clippy_lints/src/exhaustive_items.rs
index f976cfd3f..3a621d967 100644
--- a/src/tools/clippy/clippy_lints/src/exhaustive_items.rs
+++ b/src/tools/clippy/clippy_lints/src/exhaustive_items.rs
@@ -1,10 +1,9 @@
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::source::indent_of;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Item, ItemKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
@@ -71,40 +70,31 @@ declare_lint_pass!(ExhaustiveItems => [EXHAUSTIVE_ENUMS, EXHAUSTIVE_STRUCTS]);
impl LateLintPass<'_> for ExhaustiveItems {
fn check_item(&mut self, cx: &LateContext<'_>, item: &Item<'_>) {
- if_chain! {
- if let ItemKind::Enum(..) | ItemKind::Struct(..) = item.kind;
- if cx.effective_visibilities.is_exported(item.owner_id.def_id);
- let attrs = cx.tcx.hir().attrs(item.hir_id());
- if !attrs.iter().any(|a| a.has_name(sym::non_exhaustive));
- then {
- let (lint, msg) = if let ItemKind::Struct(ref v, ..) = item.kind {
- if v.fields().iter().any(|f| {
- !cx.tcx.visibility(f.def_id).is_public()
- }) {
- // skip structs with private fields
- return;
- }
- (EXHAUSTIVE_STRUCTS, "exported structs should not be exhaustive")
- } else {
- (EXHAUSTIVE_ENUMS, "exported enums should not be exhaustive")
- };
- let suggestion_span = item.span.shrink_to_lo();
- let indent = " ".repeat(indent_of(cx, item.span).unwrap_or(0));
- span_lint_and_then(
- cx,
- lint,
- item.span,
- msg,
- |diag| {
- let sugg = format!("#[non_exhaustive]\n{indent}");
- diag.span_suggestion(suggestion_span,
- "try adding #[non_exhaustive]",
- sugg,
- Applicability::MaybeIncorrect);
- }
+ if let ItemKind::Enum(..) | ItemKind::Struct(..) = item.kind
+ && cx.effective_visibilities.is_exported(item.owner_id.def_id)
+ && let attrs = cx.tcx.hir().attrs(item.hir_id())
+ && !attrs.iter().any(|a| a.has_name(sym::non_exhaustive))
+ {
+ let (lint, msg) = if let ItemKind::Struct(ref v, ..) = item.kind {
+ if v.fields().iter().any(|f| !cx.tcx.visibility(f.def_id).is_public()) {
+ // skip structs with private fields
+ return;
+ }
+ (EXHAUSTIVE_STRUCTS, "exported structs should not be exhaustive")
+ } else {
+ (EXHAUSTIVE_ENUMS, "exported enums should not be exhaustive")
+ };
+ let suggestion_span = item.span.shrink_to_lo();
+ let indent = " ".repeat(indent_of(cx, item.span).unwrap_or(0));
+ span_lint_and_then(cx, lint, item.span, msg, |diag| {
+ let sugg = format!("#[non_exhaustive]\n{indent}");
+ diag.span_suggestion(
+ suggestion_span,
+ "try adding #[non_exhaustive]",
+ sugg,
+ Applicability::MaybeIncorrect,
);
-
- }
+ });
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/exit.rs b/src/tools/clippy/clippy_lints/src/exit.rs
index e14b1c556..6603512c7 100644
--- a/src/tools/clippy/clippy_lints/src/exit.rs
+++ b/src/tools/clippy/clippy_lints/src/exit.rs
@@ -1,9 +1,8 @@
use clippy_utils::diagnostics::span_lint;
use clippy_utils::is_entrypoint_fn;
-use if_chain::if_chain;
use rustc_hir::{Expr, ExprKind, Item, ItemKind, Node};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
@@ -42,19 +41,17 @@ declare_lint_pass!(Exit => [EXIT]);
impl<'tcx> LateLintPass<'tcx> for Exit {
fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
- if_chain! {
- if let ExprKind::Call(path_expr, _args) = e.kind;
- if let ExprKind::Path(ref path) = path_expr.kind;
- if let Some(def_id) = cx.qpath_res(path, path_expr.hir_id).opt_def_id();
- if cx.tcx.is_diagnostic_item(sym::process_exit, def_id);
- let parent = cx.tcx.hir().get_parent_item(e.hir_id).def_id;
- if let Some(Node::Item(Item{kind: ItemKind::Fn(..), ..})) = cx.tcx.hir().find_by_def_id(parent);
+ if let ExprKind::Call(path_expr, _args) = e.kind
+ && let ExprKind::Path(ref path) = path_expr.kind
+ && let Some(def_id) = cx.qpath_res(path, path_expr.hir_id).opt_def_id()
+ && cx.tcx.is_diagnostic_item(sym::process_exit, def_id)
+ && let parent = cx.tcx.hir().get_parent_item(e.hir_id).def_id
+ && let Some(Node::Item(Item{kind: ItemKind::Fn(..), ..})) = cx.tcx.opt_hir_node_by_def_id(parent)
// If the next item up is a function we check if it is an entry point
// and only then emit a linter warning
- if !is_entrypoint_fn(cx, parent.to_def_id());
- then {
- span_lint(cx, EXIT, e.span, "usage of `process::exit`");
- }
+ && !is_entrypoint_fn(cx, parent.to_def_id())
+ {
+ span_lint(cx, EXIT, e.span, "usage of `process::exit`");
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/explicit_write.rs b/src/tools/clippy/clippy_lints/src/explicit_write.rs
index 4b5bcb06a..e8c1e5db3 100644
--- a/src/tools/clippy/clippy_lints/src/explicit_write.rs
+++ b/src/tools/clippy/clippy_lints/src/explicit_write.rs
@@ -2,12 +2,11 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::macros::{find_format_args, format_args_inputs_span};
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::{is_expn_of, path_def_id};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::def::Res;
use rustc_hir::{BindingAnnotation, Block, BlockCheckMode, Expr, ExprKind, Node, PatKind, QPath, Stmt, StmtKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{sym, ExpnId};
declare_clippy_lint! {
@@ -16,7 +15,7 @@ declare_clippy_lint! {
/// replaced with `(e)print!()` / `(e)println!()`
///
/// ### Why is this bad?
- /// Using `(e)println! is clearer and more concise
+ /// Using `(e)println!` is clearer and more concise
///
/// ### Example
/// ```no_run
@@ -101,30 +100,28 @@ impl<'tcx> LateLintPass<'tcx> for ExplicitWrite {
/// If `kind` is a block that looks like `{ let result = $expr; result }` then
/// returns $expr. Otherwise returns `kind`.
fn look_in_block<'tcx, 'hir>(cx: &LateContext<'tcx>, kind: &'tcx ExprKind<'hir>) -> &'tcx ExprKind<'hir> {
- if_chain! {
- if let ExprKind::Block(block, _label @ None) = kind;
- if let Block {
+ if let ExprKind::Block(block, _label @ None) = kind
+ && let Block {
stmts: [Stmt { kind: StmtKind::Local(local), .. }],
expr: Some(expr_end_of_block),
rules: BlockCheckMode::DefaultBlock,
..
- } = block;
+ } = block
// Find id of the local that expr_end_of_block resolves to
- if let ExprKind::Path(QPath::Resolved(None, expr_path)) = expr_end_of_block.kind;
- if let Res::Local(expr_res) = expr_path.res;
- if let Some(Node::Pat(res_pat)) = cx.tcx.hir().find(expr_res);
+ && let ExprKind::Path(QPath::Resolved(None, expr_path)) = expr_end_of_block.kind
+ && let Res::Local(expr_res) = expr_path.res
+ && let Some(Node::Pat(res_pat)) = cx.tcx.opt_hir_node(expr_res)
// Find id of the local we found in the block
- if let PatKind::Binding(BindingAnnotation::NONE, local_hir_id, _ident, None) = local.pat.kind;
+ && let PatKind::Binding(BindingAnnotation::NONE, local_hir_id, _ident, None) = local.pat.kind
// If those two are the same hir id
- if res_pat.hir_id == local_hir_id;
+ && res_pat.hir_id == local_hir_id
- if let Some(init) = local.init;
- then {
- return &init.kind;
- }
+ && let Some(init) = local.init
+ {
+ return &init.kind;
}
kind
}
diff --git a/src/tools/clippy/clippy_lints/src/extra_unused_type_parameters.rs b/src/tools/clippy/clippy_lints/src/extra_unused_type_parameters.rs
index d6c746901..538d29eb4 100644
--- a/src/tools/clippy/clippy_lints/src/extra_unused_type_parameters.rs
+++ b/src/tools/clippy/clippy_lints/src/extra_unused_type_parameters.rs
@@ -10,7 +10,7 @@ use rustc_hir::{
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::hir::nested_filter;
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::def_id::{DefId, LocalDefId};
use rustc_span::Span;
diff --git a/src/tools/clippy/clippy_lints/src/fallible_impl_from.rs b/src/tools/clippy/clippy_lints/src/fallible_impl_from.rs
index efb69476b..044694332 100644
--- a/src/tools/clippy/clippy_lints/src/fallible_impl_from.rs
+++ b/src/tools/clippy/clippy_lints/src/fallible_impl_from.rs
@@ -2,11 +2,10 @@ use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::macros::{is_panic, root_macro_call_first_node};
use clippy_utils::method_chain_args;
use clippy_utils::ty::is_type_diagnostic_item;
-use if_chain::if_chain;
use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{sym, Span};
declare_clippy_lint! {
@@ -53,13 +52,13 @@ declare_lint_pass!(FallibleImplFrom => [FALLIBLE_IMPL_FROM]);
impl<'tcx> LateLintPass<'tcx> for FallibleImplFrom {
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) {
// check for `impl From<???> for ..`
- if_chain! {
- if let hir::ItemKind::Impl(impl_) = &item.kind;
- if let Some(impl_trait_ref) = cx.tcx.impl_trait_ref(item.owner_id);
- if cx.tcx.is_diagnostic_item(sym::From, impl_trait_ref.skip_binder().def_id);
- then {
- lint_impl_body(cx, item.span, impl_.items);
- }
+ if let hir::ItemKind::Impl(impl_) = &item.kind
+ && let Some(impl_trait_ref) = cx.tcx.impl_trait_ref(item.owner_id)
+ && cx
+ .tcx
+ .is_diagnostic_item(sym::From, impl_trait_ref.skip_binder().def_id)
+ {
+ lint_impl_body(cx, item.span, impl_.items);
}
}
}
@@ -98,34 +97,33 @@ fn lint_impl_body(cx: &LateContext<'_>, impl_span: Span, impl_items: &[hir::Impl
}
for impl_item in impl_items {
- if_chain! {
- if impl_item.ident.name == sym::from;
- if let ImplItemKind::Fn(_, body_id) =
- cx.tcx.hir().impl_item(impl_item.id).kind;
- then {
- // check the body for `begin_panic` or `unwrap`
- let body = cx.tcx.hir().body(body_id);
- let mut fpu = FindPanicUnwrap {
- lcx: cx,
- typeck_results: cx.tcx.typeck(impl_item.id.owner_id.def_id),
- result: Vec::new(),
- };
- fpu.visit_expr(body.value);
+ if impl_item.ident.name == sym::from
+ && let ImplItemKind::Fn(_, body_id) = cx.tcx.hir().impl_item(impl_item.id).kind
+ {
+ // check the body for `begin_panic` or `unwrap`
+ let body = cx.tcx.hir().body(body_id);
+ let mut fpu = FindPanicUnwrap {
+ lcx: cx,
+ typeck_results: cx.tcx.typeck(impl_item.id.owner_id.def_id),
+ result: Vec::new(),
+ };
+ fpu.visit_expr(body.value);
- // if we've found one, lint
- if !fpu.result.is_empty() {
- span_lint_and_then(
- cx,
- FALLIBLE_IMPL_FROM,
- impl_span,
- "consider implementing `TryFrom` instead",
- move |diag| {
- diag.help(
- "`From` is intended for infallible conversions only. \
- Use `TryFrom` if there's a possibility for the conversion to fail");
- diag.span_note(fpu.result, "potential failure(s)");
- });
- }
+ // if we've found one, lint
+ if !fpu.result.is_empty() {
+ span_lint_and_then(
+ cx,
+ FALLIBLE_IMPL_FROM,
+ impl_span,
+ "consider implementing `TryFrom` instead",
+ move |diag| {
+ diag.help(
+ "`From` is intended for infallible conversions only. \
+ Use `TryFrom` if there's a possibility for the conversion to fail",
+ );
+ diag.span_note(fpu.result, "potential failure(s)");
+ },
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/float_literal.rs b/src/tools/clippy/clippy_lints/src/float_literal.rs
index 506a11917..38a16c5c8 100644
--- a/src/tools/clippy/clippy_lints/src/float_literal.rs
+++ b/src/tools/clippy/clippy_lints/src/float_literal.rs
@@ -1,12 +1,11 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::numeric_literal;
-use if_chain::if_chain;
use rustc_ast::ast::{self, LitFloatType, LitKind};
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::{self, FloatTy};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use std::fmt;
declare_clippy_lint! {
@@ -64,73 +63,70 @@ declare_lint_pass!(FloatLiteral => [EXCESSIVE_PRECISION, LOSSY_FLOAT_LITERAL]);
impl<'tcx> LateLintPass<'tcx> for FloatLiteral {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
let ty = cx.typeck_results().expr_ty(expr);
- if_chain! {
- if let ty::Float(fty) = *ty.kind();
- if let hir::ExprKind::Lit(lit) = expr.kind;
- if let LitKind::Float(sym, lit_float_ty) = lit.node;
- then {
- let sym_str = sym.as_str();
- let formatter = FloatFormat::new(sym_str);
- // Try to bail out if the float is for sure fine.
- // If its within the 2 decimal digits of being out of precision we
- // check if the parsed representation is the same as the string
- // since we'll need the truncated string anyway.
- let digits = count_digits(sym_str);
- let max = max_digits(fty);
- let type_suffix = match lit_float_ty {
- LitFloatType::Suffixed(ast::FloatTy::F32) => Some("f32"),
- LitFloatType::Suffixed(ast::FloatTy::F64) => Some("f64"),
- LitFloatType::Unsuffixed => None
- };
- let (is_whole, is_inf, mut float_str) = match fty {
- FloatTy::F32 => {
- let value = sym_str.parse::<f32>().unwrap();
+ if let ty::Float(fty) = *ty.kind()
+ && let hir::ExprKind::Lit(lit) = expr.kind
+ && let LitKind::Float(sym, lit_float_ty) = lit.node
+ {
+ let sym_str = sym.as_str();
+ let formatter = FloatFormat::new(sym_str);
+ // Try to bail out if the float is for sure fine.
+ // If its within the 2 decimal digits of being out of precision we
+ // check if the parsed representation is the same as the string
+ // since we'll need the truncated string anyway.
+ let digits = count_digits(sym_str);
+ let max = max_digits(fty);
+ let type_suffix = match lit_float_ty {
+ LitFloatType::Suffixed(ast::FloatTy::F32) => Some("f32"),
+ LitFloatType::Suffixed(ast::FloatTy::F64) => Some("f64"),
+ LitFloatType::Unsuffixed => None,
+ };
+ let (is_whole, is_inf, mut float_str) = match fty {
+ FloatTy::F32 => {
+ let value = sym_str.parse::<f32>().unwrap();
- (value.fract() == 0.0, value.is_infinite(), formatter.format(value))
- },
- FloatTy::F64 => {
- let value = sym_str.parse::<f64>().unwrap();
+ (value.fract() == 0.0, value.is_infinite(), formatter.format(value))
+ },
+ FloatTy::F64 => {
+ let value = sym_str.parse::<f64>().unwrap();
+ (value.fract() == 0.0, value.is_infinite(), formatter.format(value))
+ },
+ };
- (value.fract() == 0.0, value.is_infinite(), formatter.format(value))
- },
- };
-
- if is_inf {
- return;
- }
-
- if is_whole && !sym_str.contains(|c| c == 'e' || c == 'E') {
- // Normalize the literal by stripping the fractional portion
- if sym_str.split('.').next().unwrap() != float_str {
- // If the type suffix is missing the suggestion would be
- // incorrectly interpreted as an integer so adding a `.0`
- // suffix to prevent that.
- if type_suffix.is_none() {
- float_str.push_str(".0");
- }
+ if is_inf {
+ return;
+ }
- span_lint_and_sugg(
- cx,
- LOSSY_FLOAT_LITERAL,
- expr.span,
- "literal cannot be represented as the underlying type without loss of precision",
- "consider changing the type or replacing it with",
- numeric_literal::format(&float_str, type_suffix, true),
- Applicability::MachineApplicable,
- );
+ if is_whole && !sym_str.contains(|c| c == 'e' || c == 'E') {
+ // Normalize the literal by stripping the fractional portion
+ if sym_str.split('.').next().unwrap() != float_str {
+ // If the type suffix is missing the suggestion would be
+ // incorrectly interpreted as an integer so adding a `.0`
+ // suffix to prevent that.
+ if type_suffix.is_none() {
+ float_str.push_str(".0");
}
- } else if digits > max as usize && float_str.len() < sym_str.len() {
+
span_lint_and_sugg(
cx,
- EXCESSIVE_PRECISION,
+ LOSSY_FLOAT_LITERAL,
expr.span,
- "float has excessive precision",
- "consider changing the type or truncating it to",
+ "literal cannot be represented as the underlying type without loss of precision",
+ "consider changing the type or replacing it with",
numeric_literal::format(&float_str, type_suffix, true),
Applicability::MachineApplicable,
);
}
+ } else if digits > max as usize && float_str.len() < sym_str.len() {
+ span_lint_and_sugg(
+ cx,
+ EXCESSIVE_PRECISION,
+ expr.span,
+ "float has excessive precision",
+ "consider changing the type or truncating it to",
+ numeric_literal::format(&float_str, type_suffix, true),
+ Applicability::MachineApplicable,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/floating_point_arithmetic.rs b/src/tools/clippy/clippy_lints/src/floating_point_arithmetic.rs
index 09a9d9924..c8b87e510 100644
--- a/src/tools/clippy/clippy_lints/src/floating_point_arithmetic.rs
+++ b/src/tools/clippy/clippy_lints/src/floating_point_arithmetic.rs
@@ -4,12 +4,11 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::{
eq_expr_value, get_parent_expr, higher, in_constant, is_no_std_crate, numeric_literal, peel_blocks, sugg,
};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind, PathSegment, UnOp};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::source_map::Spanned;
use rustc_ast::ast;
@@ -133,30 +132,25 @@ fn prepare_receiver_sugg<'a>(cx: &LateContext<'_>, mut expr: &'a Expr<'a>) -> Su
expr = inner_expr;
}
- if_chain! {
+ if let ty::Float(float_ty) = cx.typeck_results().expr_ty(expr).kind()
// if the expression is a float literal and it is unsuffixed then
// add a suffix so the suggestion is valid and unambiguous
- if let ty::Float(float_ty) = cx.typeck_results().expr_ty(expr).kind();
- if let ExprKind::Lit(lit) = &expr.kind;
- if let ast::LitKind::Float(sym, ast::LitFloatType::Unsuffixed) = lit.node;
- then {
- let op = format!(
- "{suggestion}{}{}",
- // Check for float literals without numbers following the decimal
- // separator such as `2.` and adds a trailing zero
- if sym.as_str().ends_with('.') {
- "0"
- } else {
- ""
- },
- float_ty.name_str()
- ).into();
-
- suggestion = match suggestion {
- Sugg::MaybeParen(_) => Sugg::MaybeParen(op),
- _ => Sugg::NonParen(op)
- };
- }
+ && let ExprKind::Lit(lit) = &expr.kind
+ && let ast::LitKind::Float(sym, ast::LitFloatType::Unsuffixed) = lit.node
+ {
+ let op = format!(
+ "{suggestion}{}{}",
+ // Check for float literals without numbers following the decimal
+ // separator such as `2.` and adds a trailing zero
+ if sym.as_str().ends_with('.') { "0" } else { "" },
+ float_ty.name_str()
+ )
+ .into();
+
+ suggestion = match suggestion {
+ Sugg::MaybeParen(_) => Sugg::MaybeParen(op),
+ _ => Sugg::NonParen(op),
+ };
}
suggestion.maybe_par()
@@ -359,35 +353,59 @@ fn detect_hypot(cx: &LateContext<'_>, receiver: &Expr<'_>) -> Option<String> {
) = receiver.kind
{
// check if expression of the form x * x + y * y
- if_chain! {
- if let ExprKind::Binary(Spanned { node: BinOpKind::Mul, .. }, lmul_lhs, lmul_rhs) = add_lhs.kind;
- if let ExprKind::Binary(Spanned { node: BinOpKind::Mul, .. }, rmul_lhs, rmul_rhs) = add_rhs.kind;
- if eq_expr_value(cx, lmul_lhs, lmul_rhs);
- if eq_expr_value(cx, rmul_lhs, rmul_rhs);
- then {
- return Some(format!("{}.hypot({})", Sugg::hir(cx, lmul_lhs, "..").maybe_par(), Sugg::hir(cx, rmul_lhs, "..")));
- }
+ if let ExprKind::Binary(
+ Spanned {
+ node: BinOpKind::Mul, ..
+ },
+ lmul_lhs,
+ lmul_rhs,
+ ) = add_lhs.kind
+ && let ExprKind::Binary(
+ Spanned {
+ node: BinOpKind::Mul, ..
+ },
+ rmul_lhs,
+ rmul_rhs,
+ ) = add_rhs.kind
+ && eq_expr_value(cx, lmul_lhs, lmul_rhs)
+ && eq_expr_value(cx, rmul_lhs, rmul_rhs)
+ {
+ return Some(format!(
+ "{}.hypot({})",
+ Sugg::hir(cx, lmul_lhs, "..").maybe_par(),
+ Sugg::hir(cx, rmul_lhs, "..")
+ ));
}
// check if expression of the form x.powi(2) + y.powi(2)
- if_chain! {
- if let ExprKind::MethodCall(
- PathSegment { ident: lmethod_name, .. },
- largs_0, [largs_1, ..],
- _
- ) = &add_lhs.kind;
- if let ExprKind::MethodCall(
- PathSegment { ident: rmethod_name, .. },
- rargs_0, [rargs_1, ..],
- _
- ) = &add_rhs.kind;
- if lmethod_name.as_str() == "powi" && rmethod_name.as_str() == "powi";
- if let Some(lvalue) = constant(cx, cx.typeck_results(), largs_1);
- if let Some(rvalue) = constant(cx, cx.typeck_results(), rargs_1);
- if Int(2) == lvalue && Int(2) == rvalue;
- then {
- return Some(format!("{}.hypot({})", Sugg::hir(cx, largs_0, "..").maybe_par(), Sugg::hir(cx, rargs_0, "..")));
- }
+ if let ExprKind::MethodCall(
+ PathSegment {
+ ident: lmethod_name, ..
+ },
+ largs_0,
+ [largs_1, ..],
+ _,
+ ) = &add_lhs.kind
+ && let ExprKind::MethodCall(
+ PathSegment {
+ ident: rmethod_name, ..
+ },
+ rargs_0,
+ [rargs_1, ..],
+ _,
+ ) = &add_rhs.kind
+ && lmethod_name.as_str() == "powi"
+ && rmethod_name.as_str() == "powi"
+ && let Some(lvalue) = constant(cx, cx.typeck_results(), largs_1)
+ && let Some(rvalue) = constant(cx, cx.typeck_results(), rargs_1)
+ && Int(2) == lvalue
+ && Int(2) == rvalue
+ {
+ return Some(format!(
+ "{}.hypot({})",
+ Sugg::hir(cx, largs_0, "..").maybe_par(),
+ Sugg::hir(cx, rargs_0, "..")
+ ));
}
}
@@ -411,39 +429,44 @@ fn check_hypot(cx: &LateContext<'_>, expr: &Expr<'_>, receiver: &Expr<'_>) {
// TODO: Lint expressions of the form `x.exp() - y` where y > 1
// and suggest usage of `x.exp_m1() - (y - 1)` instead
fn check_expm1(cx: &LateContext<'_>, expr: &Expr<'_>) {
- if_chain! {
- if let ExprKind::Binary(Spanned { node: BinOpKind::Sub, .. }, lhs, rhs) = expr.kind;
- if cx.typeck_results().expr_ty(lhs).is_floating_point();
- if let Some(value) = constant(cx, cx.typeck_results(), rhs);
- if F32(1.0) == value || F64(1.0) == value;
- if let ExprKind::MethodCall(path, self_arg, ..) = &lhs.kind;
- if cx.typeck_results().expr_ty(self_arg).is_floating_point();
- if path.ident.name.as_str() == "exp";
- then {
- span_lint_and_sugg(
- cx,
- IMPRECISE_FLOPS,
- expr.span,
- "(e.pow(x) - 1) can be computed more accurately",
- "consider using",
- format!(
- "{}.exp_m1()",
- Sugg::hir(cx, self_arg, "..").maybe_par()
- ),
- Applicability::MachineApplicable,
- );
- }
+ if let ExprKind::Binary(
+ Spanned {
+ node: BinOpKind::Sub, ..
+ },
+ lhs,
+ rhs,
+ ) = expr.kind
+ && cx.typeck_results().expr_ty(lhs).is_floating_point()
+ && let Some(value) = constant(cx, cx.typeck_results(), rhs)
+ && (F32(1.0) == value || F64(1.0) == value)
+ && let ExprKind::MethodCall(path, self_arg, ..) = &lhs.kind
+ && cx.typeck_results().expr_ty(self_arg).is_floating_point()
+ && path.ident.name.as_str() == "exp"
+ {
+ span_lint_and_sugg(
+ cx,
+ IMPRECISE_FLOPS,
+ expr.span,
+ "(e.pow(x) - 1) can be computed more accurately",
+ "consider using",
+ format!("{}.exp_m1()", Sugg::hir(cx, self_arg, "..").maybe_par()),
+ Applicability::MachineApplicable,
+ );
}
}
fn is_float_mul_expr<'a>(cx: &LateContext<'_>, expr: &'a Expr<'a>) -> Option<(&'a Expr<'a>, &'a Expr<'a>)> {
- if_chain! {
- if let ExprKind::Binary(Spanned { node: BinOpKind::Mul, .. }, lhs, rhs) = &expr.kind;
- if cx.typeck_results().expr_ty(lhs).is_floating_point();
- if cx.typeck_results().expr_ty(rhs).is_floating_point();
- then {
- return Some((lhs, rhs));
- }
+ if let ExprKind::Binary(
+ Spanned {
+ node: BinOpKind::Mul, ..
+ },
+ lhs,
+ rhs,
+ ) = &expr.kind
+ && cx.typeck_results().expr_ty(lhs).is_floating_point()
+ && cx.typeck_results().expr_ty(rhs).is_floating_point()
+ {
+ return Some((lhs, rhs));
}
None
@@ -473,9 +496,13 @@ fn check_mul_add(cx: &LateContext<'_>, expr: &Expr<'_>) {
if let BinOpKind::Sub = op { -sugg } else { sugg }
};
- let (recv, arg1, arg2) = if let Some((inner_lhs, inner_rhs)) = is_float_mul_expr(cx, lhs) {
+ let (recv, arg1, arg2) = if let Some((inner_lhs, inner_rhs)) = is_float_mul_expr(cx, lhs)
+ && cx.typeck_results().expr_ty(rhs).is_floating_point()
+ {
(inner_lhs, Sugg::hir(cx, inner_rhs, ".."), maybe_neg_sugg(rhs))
- } else if let Some((inner_lhs, inner_rhs)) = is_float_mul_expr(cx, rhs) {
+ } else if let Some((inner_lhs, inner_rhs)) = is_float_mul_expr(cx, rhs)
+ && cx.typeck_results().expr_ty(lhs).is_floating_point()
+ {
(inner_lhs, maybe_neg_sugg(inner_rhs), Sugg::hir(cx, lhs, ".."))
} else {
return;
@@ -553,60 +580,72 @@ fn are_negated<'a>(cx: &LateContext<'_>, expr1: &'a Expr<'a>, expr2: &'a Expr<'a
}
fn check_custom_abs(cx: &LateContext<'_>, expr: &Expr<'_>) {
- if_chain! {
- if let Some(higher::If { cond, then, r#else: Some(r#else) }) = higher::If::hir(expr);
- let if_body_expr = peel_blocks(then);
- let else_body_expr = peel_blocks(r#else);
- if let Some((if_expr_positive, body)) = are_negated(cx, if_body_expr, else_body_expr);
- then {
- let positive_abs_sugg = (
- "manual implementation of `abs` method",
- format!("{}.abs()", Sugg::hir(cx, body, "..").maybe_par()),
- );
- let negative_abs_sugg = (
- "manual implementation of negation of `abs` method",
- format!("-{}.abs()", Sugg::hir(cx, body, "..").maybe_par()),
- );
- let sugg = if is_testing_positive(cx, cond, body) {
- if if_expr_positive {
- positive_abs_sugg
- } else {
- negative_abs_sugg
- }
- } else if is_testing_negative(cx, cond, body) {
- if if_expr_positive {
- negative_abs_sugg
- } else {
- positive_abs_sugg
- }
+ if let Some(higher::If {
+ cond,
+ then,
+ r#else: Some(r#else),
+ }) = higher::If::hir(expr)
+ && let if_body_expr = peel_blocks(then)
+ && let else_body_expr = peel_blocks(r#else)
+ && let Some((if_expr_positive, body)) = are_negated(cx, if_body_expr, else_body_expr)
+ {
+ let positive_abs_sugg = (
+ "manual implementation of `abs` method",
+ format!("{}.abs()", Sugg::hir(cx, body, "..").maybe_par()),
+ );
+ let negative_abs_sugg = (
+ "manual implementation of negation of `abs` method",
+ format!("-{}.abs()", Sugg::hir(cx, body, "..").maybe_par()),
+ );
+ let sugg = if is_testing_positive(cx, cond, body) {
+ if if_expr_positive {
+ positive_abs_sugg
} else {
- return;
- };
- span_lint_and_sugg(
- cx,
- SUBOPTIMAL_FLOPS,
- expr.span,
- sugg.0,
- "try",
- sugg.1,
- Applicability::MachineApplicable,
- );
- }
+ negative_abs_sugg
+ }
+ } else if is_testing_negative(cx, cond, body) {
+ if if_expr_positive {
+ negative_abs_sugg
+ } else {
+ positive_abs_sugg
+ }
+ } else {
+ return;
+ };
+ span_lint_and_sugg(
+ cx,
+ SUBOPTIMAL_FLOPS,
+ expr.span,
+ sugg.0,
+ "try",
+ sugg.1,
+ Applicability::MachineApplicable,
+ );
}
}
fn are_same_base_logs(cx: &LateContext<'_>, expr_a: &Expr<'_>, expr_b: &Expr<'_>) -> bool {
- if_chain! {
- if let ExprKind::MethodCall(PathSegment { ident: method_name_a, .. }, _, args_a, _) = expr_a.kind;
- if let ExprKind::MethodCall(PathSegment { ident: method_name_b, .. }, _, args_b, _) = expr_b.kind;
- then {
- return method_name_a.as_str() == method_name_b.as_str() &&
- args_a.len() == args_b.len() &&
- (
- ["ln", "log2", "log10"].contains(&method_name_a.as_str()) ||
- method_name_a.as_str() == "log" && args_a.len() == 1 && eq_expr_value(cx, &args_a[0], &args_b[0])
- );
- }
+ if let ExprKind::MethodCall(
+ PathSegment {
+ ident: method_name_a, ..
+ },
+ _,
+ args_a,
+ _,
+ ) = expr_a.kind
+ && let ExprKind::MethodCall(
+ PathSegment {
+ ident: method_name_b, ..
+ },
+ _,
+ args_b,
+ _,
+ ) = expr_b.kind
+ {
+ return method_name_a.as_str() == method_name_b.as_str()
+ && args_a.len() == args_b.len()
+ && (["ln", "log2", "log10"].contains(&method_name_a.as_str())
+ || method_name_a.as_str() == "log" && args_a.len() == 1 && eq_expr_value(cx, &args_a[0], &args_b[0]));
}
false
@@ -614,103 +653,98 @@ fn are_same_base_logs(cx: &LateContext<'_>, expr_a: &Expr<'_>, expr_b: &Expr<'_>
fn check_log_division(cx: &LateContext<'_>, expr: &Expr<'_>) {
// check if expression of the form x.logN() / y.logN()
- if_chain! {
- if let ExprKind::Binary(
- Spanned {
- node: BinOpKind::Div, ..
- },
- lhs,
- rhs,
- ) = &expr.kind;
- if are_same_base_logs(cx, lhs, rhs);
- if let ExprKind::MethodCall(_, largs_self, ..) = &lhs.kind;
- if let ExprKind::MethodCall(_, rargs_self, ..) = &rhs.kind;
- then {
- span_lint_and_sugg(
- cx,
- SUBOPTIMAL_FLOPS,
- expr.span,
- "log base can be expressed more clearly",
- "consider using",
- format!("{}.log({})", Sugg::hir(cx, largs_self, "..").maybe_par(), Sugg::hir(cx, rargs_self, ".."),),
- Applicability::MachineApplicable,
- );
- }
+ if let ExprKind::Binary(
+ Spanned {
+ node: BinOpKind::Div, ..
+ },
+ lhs,
+ rhs,
+ ) = &expr.kind
+ && are_same_base_logs(cx, lhs, rhs)
+ && let ExprKind::MethodCall(_, largs_self, ..) = &lhs.kind
+ && let ExprKind::MethodCall(_, rargs_self, ..) = &rhs.kind
+ {
+ span_lint_and_sugg(
+ cx,
+ SUBOPTIMAL_FLOPS,
+ expr.span,
+ "log base can be expressed more clearly",
+ "consider using",
+ format!(
+ "{}.log({})",
+ Sugg::hir(cx, largs_self, "..").maybe_par(),
+ Sugg::hir(cx, rargs_self, ".."),
+ ),
+ Applicability::MachineApplicable,
+ );
}
}
fn check_radians(cx: &LateContext<'_>, expr: &Expr<'_>) {
- if_chain! {
- if let ExprKind::Binary(
- Spanned {
- node: BinOpKind::Div, ..
- },
- div_lhs,
- div_rhs,
- ) = &expr.kind;
- if let ExprKind::Binary(
+ if let ExprKind::Binary(
+ Spanned {
+ node: BinOpKind::Div, ..
+ },
+ div_lhs,
+ div_rhs,
+ ) = &expr.kind
+ && let ExprKind::Binary(
Spanned {
node: BinOpKind::Mul, ..
},
mul_lhs,
mul_rhs,
- ) = &div_lhs.kind;
- if let Some(rvalue) = constant(cx, cx.typeck_results(), div_rhs);
- if let Some(lvalue) = constant(cx, cx.typeck_results(), mul_rhs);
- then {
- // TODO: also check for constant values near PI/180 or 180/PI
- if (F32(f32_consts::PI) == rvalue || F64(f64_consts::PI) == rvalue) &&
- (F32(180_f32) == lvalue || F64(180_f64) == lvalue)
+ ) = &div_lhs.kind
+ && let Some(rvalue) = constant(cx, cx.typeck_results(), div_rhs)
+ && let Some(lvalue) = constant(cx, cx.typeck_results(), mul_rhs)
+ {
+ // TODO: also check for constant values near PI/180 or 180/PI
+ if (F32(f32_consts::PI) == rvalue || F64(f64_consts::PI) == rvalue)
+ && (F32(180_f32) == lvalue || F64(180_f64) == lvalue)
+ {
+ let mut proposal = format!("{}.to_degrees()", Sugg::hir(cx, mul_lhs, "..").maybe_par());
+ if let ExprKind::Lit(literal) = mul_lhs.kind
+ && let ast::LitKind::Float(ref value, float_type) = literal.node
+ && float_type == ast::LitFloatType::Unsuffixed
{
- let mut proposal = format!("{}.to_degrees()", Sugg::hir(cx, mul_lhs, "..").maybe_par());
- if_chain! {
- if let ExprKind::Lit(literal) = mul_lhs.kind;
- if let ast::LitKind::Float(ref value, float_type) = literal.node;
- if float_type == ast::LitFloatType::Unsuffixed;
- then {
- if value.as_str().ends_with('.') {
- proposal = format!("{}0_f64.to_degrees()", Sugg::hir(cx, mul_lhs, ".."));
- } else {
- proposal = format!("{}_f64.to_degrees()", Sugg::hir(cx, mul_lhs, ".."));
- }
- }
+ if value.as_str().ends_with('.') {
+ proposal = format!("{}0_f64.to_degrees()", Sugg::hir(cx, mul_lhs, ".."));
+ } else {
+ proposal = format!("{}_f64.to_degrees()", Sugg::hir(cx, mul_lhs, ".."));
}
- span_lint_and_sugg(
- cx,
- SUBOPTIMAL_FLOPS,
- expr.span,
- "conversion to degrees can be done more accurately",
- "consider using",
- proposal,
- Applicability::MachineApplicable,
- );
- } else if
- (F32(180_f32) == rvalue || F64(180_f64) == rvalue) &&
- (F32(f32_consts::PI) == lvalue || F64(f64_consts::PI) == lvalue)
+ }
+ span_lint_and_sugg(
+ cx,
+ SUBOPTIMAL_FLOPS,
+ expr.span,
+ "conversion to degrees can be done more accurately",
+ "consider using",
+ proposal,
+ Applicability::MachineApplicable,
+ );
+ } else if (F32(180_f32) == rvalue || F64(180_f64) == rvalue)
+ && (F32(f32_consts::PI) == lvalue || F64(f64_consts::PI) == lvalue)
+ {
+ let mut proposal = format!("{}.to_radians()", Sugg::hir(cx, mul_lhs, "..").maybe_par());
+ if let ExprKind::Lit(literal) = mul_lhs.kind
+ && let ast::LitKind::Float(ref value, float_type) = literal.node
+ && float_type == ast::LitFloatType::Unsuffixed
{
- let mut proposal = format!("{}.to_radians()", Sugg::hir(cx, mul_lhs, "..").maybe_par());
- if_chain! {
- if let ExprKind::Lit(literal) = mul_lhs.kind;
- if let ast::LitKind::Float(ref value, float_type) = literal.node;
- if float_type == ast::LitFloatType::Unsuffixed;
- then {
- if value.as_str().ends_with('.') {
- proposal = format!("{}0_f64.to_radians()", Sugg::hir(cx, mul_lhs, ".."));
- } else {
- proposal = format!("{}_f64.to_radians()", Sugg::hir(cx, mul_lhs, ".."));
- }
- }
+ if value.as_str().ends_with('.') {
+ proposal = format!("{}0_f64.to_radians()", Sugg::hir(cx, mul_lhs, ".."));
+ } else {
+ proposal = format!("{}_f64.to_radians()", Sugg::hir(cx, mul_lhs, ".."));
}
- span_lint_and_sugg(
- cx,
- SUBOPTIMAL_FLOPS,
- expr.span,
- "conversion to radians can be done more accurately",
- "consider using",
- proposal,
- Applicability::MachineApplicable,
- );
}
+ span_lint_and_sugg(
+ cx,
+ SUBOPTIMAL_FLOPS,
+ expr.span,
+ "conversion to radians can be done more accurately",
+ "consider using",
+ proposal,
+ Applicability::MachineApplicable,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/format.rs b/src/tools/clippy/clippy_lints/src/format.rs
index 18ed05c1c..8a0cd155d 100644
--- a/src/tools/clippy/clippy_lints/src/format.rs
+++ b/src/tools/clippy/clippy_lints/src/format.rs
@@ -7,7 +7,7 @@ use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{sym, Span};
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/format_args.rs b/src/tools/clippy/clippy_lints/src/format_args.rs
index 3c1f2d9d5..8af321e4d 100644
--- a/src/tools/clippy/clippy_lints/src/format_args.rs
+++ b/src/tools/clippy/clippy_lints/src/format_args.rs
@@ -8,7 +8,6 @@ use clippy_utils::macros::{
};
use clippy_utils::source::snippet_opt;
use clippy_utils::ty::{implements_trait, is_type_lang_item};
-use if_chain::if_chain;
use itertools::Itertools;
use rustc_ast::{
FormatArgPosition, FormatArgPositionKind, FormatArgsPiece, FormatArgumentKind, FormatCount, FormatOptions,
@@ -20,7 +19,7 @@ use rustc_hir::{Expr, ExprKind, LangItem};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::ty::adjustment::{Adjust, Adjustment};
use rustc_middle::ty::Ty;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::def_id::DefId;
use rustc_span::edition::Edition::Edition2021;
use rustc_span::{sym, Span, Symbol};
@@ -404,49 +403,43 @@ fn check_format_in_format_args(cx: &LateContext<'_>, call_site: Span, name: Symb
}
fn check_to_string_in_format_args(cx: &LateContext<'_>, name: Symbol, value: &Expr<'_>) {
- if_chain! {
- if !value.span.from_expansion();
- if let ExprKind::MethodCall(_, receiver, [], to_string_span) = value.kind;
- if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(value.hir_id);
- if is_diag_trait_item(cx, method_def_id, sym::ToString);
- let receiver_ty = cx.typeck_results().expr_ty(receiver);
- if let Some(display_trait_id) = cx.tcx.get_diagnostic_item(sym::Display);
- let (n_needed_derefs, target) =
- count_needed_derefs(receiver_ty, cx.typeck_results().expr_adjustments(receiver).iter());
- if implements_trait(cx, target, display_trait_id, &[]);
- if let Some(sized_trait_id) = cx.tcx.lang_items().sized_trait();
- if let Some(receiver_snippet) = snippet_opt(cx, receiver.span);
- then {
- let needs_ref = !implements_trait(cx, receiver_ty, sized_trait_id, &[]);
- if n_needed_derefs == 0 && !needs_ref {
- span_lint_and_sugg(
- cx,
- TO_STRING_IN_FORMAT_ARGS,
- to_string_span.with_lo(receiver.span.hi()),
- &format!(
- "`to_string` applied to a type that implements `Display` in `{name}!` args"
- ),
- "remove this",
- String::new(),
- Applicability::MachineApplicable,
- );
- } else {
- span_lint_and_sugg(
- cx,
- TO_STRING_IN_FORMAT_ARGS,
- value.span,
- &format!(
- "`to_string` applied to a type that implements `Display` in `{name}!` args"
- ),
- "use this",
- format!(
- "{}{:*>n_needed_derefs$}{receiver_snippet}",
- if needs_ref { "&" } else { "" },
- ""
- ),
- Applicability::MachineApplicable,
- );
- }
+ if !value.span.from_expansion()
+ && let ExprKind::MethodCall(_, receiver, [], to_string_span) = value.kind
+ && let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(value.hir_id)
+ && is_diag_trait_item(cx, method_def_id, sym::ToString)
+ && let receiver_ty = cx.typeck_results().expr_ty(receiver)
+ && let Some(display_trait_id) = cx.tcx.get_diagnostic_item(sym::Display)
+ && let (n_needed_derefs, target) =
+ count_needed_derefs(receiver_ty, cx.typeck_results().expr_adjustments(receiver).iter())
+ && implements_trait(cx, target, display_trait_id, &[])
+ && let Some(sized_trait_id) = cx.tcx.lang_items().sized_trait()
+ && let Some(receiver_snippet) = snippet_opt(cx, receiver.span)
+ {
+ let needs_ref = !implements_trait(cx, receiver_ty, sized_trait_id, &[]);
+ if n_needed_derefs == 0 && !needs_ref {
+ span_lint_and_sugg(
+ cx,
+ TO_STRING_IN_FORMAT_ARGS,
+ to_string_span.with_lo(receiver.span.hi()),
+ &format!("`to_string` applied to a type that implements `Display` in `{name}!` args"),
+ "remove this",
+ String::new(),
+ Applicability::MachineApplicable,
+ );
+ } else {
+ span_lint_and_sugg(
+ cx,
+ TO_STRING_IN_FORMAT_ARGS,
+ value.span,
+ &format!("`to_string` applied to a type that implements `Display` in `{name}!` args"),
+ "use this",
+ format!(
+ "{}{:*>n_needed_derefs$}{receiver_snippet}",
+ if needs_ref { "&" } else { "" },
+ ""
+ ),
+ Applicability::MachineApplicable,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/format_impl.rs b/src/tools/clippy/clippy_lints/src/format_impl.rs
index 08ee7032c..9360eb1fa 100644
--- a/src/tools/clippy/clippy_lints/src/format_impl.rs
+++ b/src/tools/clippy/clippy_lints/src/format_impl.rs
@@ -1,12 +1,11 @@
use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg};
use clippy_utils::macros::{find_format_arg_expr, find_format_args, is_format_macro, root_macro_call_first_node};
use clippy_utils::{get_parent_as_impl, is_diag_trait_item, path_to_local, peel_ref_operators};
-use if_chain::if_chain;
use rustc_ast::{FormatArgsPiece, FormatTrait};
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, Impl, ImplItem, ImplItemKind, QPath};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::symbol::kw;
use rustc_span::{sym, Span, Symbol};
@@ -141,27 +140,25 @@ impl<'tcx> LateLintPass<'tcx> for FormatImpl {
}
fn check_to_string_in_display(cx: &LateContext<'_>, expr: &Expr<'_>) {
- if_chain! {
+ if let ExprKind::MethodCall(path, self_arg, ..) = expr.kind
// Get the hir_id of the object we are calling the method on
- if let ExprKind::MethodCall(path, self_arg, ..) = expr.kind;
// Is the method to_string() ?
- if path.ident.name == sym::to_string;
+ && path.ident.name == sym::to_string
// Is the method a part of the ToString trait? (i.e. not to_string() implemented
// separately)
- if let Some(expr_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
- if is_diag_trait_item(cx, expr_def_id, sym::ToString);
+ && let Some(expr_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
+ && is_diag_trait_item(cx, expr_def_id, sym::ToString)
// Is the method is called on self
- if let ExprKind::Path(QPath::Resolved(_, path)) = self_arg.kind;
- if let [segment] = path.segments;
- if segment.ident.name == kw::SelfLower;
- then {
- span_lint(
- cx,
- RECURSIVE_FORMAT_IMPL,
- expr.span,
- "using `self.to_string` in `fmt::Display` implementation will cause infinite recursion",
- );
- }
+ && let ExprKind::Path(QPath::Resolved(_, path)) = self_arg.kind
+ && let [segment] = path.segments
+ && segment.ident.name == kw::SelfLower
+ {
+ span_lint(
+ cx,
+ RECURSIVE_FORMAT_IMPL,
+ expr.span,
+ "using `self.to_string` in `fmt::Display` implementation will cause infinite recursion",
+ );
}
}
@@ -215,55 +212,53 @@ fn check_format_arg_self(cx: &LateContext<'_>, span: Span, arg: &Expr<'_>, impl_
}
fn check_print_in_format_impl(cx: &LateContext<'_>, expr: &Expr<'_>, impl_trait: FormatTraitNames) {
- if_chain! {
- if let Some(macro_call) = root_macro_call_first_node(cx, expr);
- if let Some(name) = cx.tcx.get_diagnostic_name(macro_call.def_id);
- then {
- let replacement = match name {
- sym::print_macro | sym::eprint_macro => "write",
- sym::println_macro | sym::eprintln_macro => "writeln",
- _ => return,
- };
+ if let Some(macro_call) = root_macro_call_first_node(cx, expr)
+ && let Some(name) = cx.tcx.get_diagnostic_name(macro_call.def_id)
+ {
+ let replacement = match name {
+ sym::print_macro | sym::eprint_macro => "write",
+ sym::println_macro | sym::eprintln_macro => "writeln",
+ _ => return,
+ };
- let name = name.as_str().strip_suffix("_macro").unwrap();
+ let name = name.as_str().strip_suffix("_macro").unwrap();
- span_lint_and_sugg(
- cx,
- PRINT_IN_FORMAT_IMPL,
- macro_call.span,
- &format!("use of `{name}!` in `{}` impl", impl_trait.name),
- "replace with",
- if let Some(formatter_name) = impl_trait.formatter_name {
- format!("{replacement}!({formatter_name}, ..)")
- } else {
- format!("{replacement}!(..)")
- },
- Applicability::HasPlaceholders,
- );
- }
+ span_lint_and_sugg(
+ cx,
+ PRINT_IN_FORMAT_IMPL,
+ macro_call.span,
+ &format!("use of `{name}!` in `{}` impl", impl_trait.name),
+ "replace with",
+ if let Some(formatter_name) = impl_trait.formatter_name {
+ format!("{replacement}!({formatter_name}, ..)")
+ } else {
+ format!("{replacement}!(..)")
+ },
+ Applicability::HasPlaceholders,
+ );
}
}
fn is_format_trait_impl(cx: &LateContext<'_>, impl_item: &ImplItem<'_>) -> Option<FormatTraitNames> {
- if_chain! {
- if impl_item.ident.name == sym::fmt;
- if let ImplItemKind::Fn(_, body_id) = impl_item.kind;
- if let Some(Impl { of_trait: Some(trait_ref),..}) = get_parent_as_impl(cx.tcx, impl_item.hir_id());
- if let Some(did) = trait_ref.trait_def_id();
- if let Some(name) = cx.tcx.get_diagnostic_name(did);
- if matches!(name, sym::Debug | sym::Display);
- then {
- let body = cx.tcx.hir().body(body_id);
- let formatter_name = body.params.get(1)
- .and_then(|param| param.pat.simple_ident())
- .map(|ident| ident.name);
+ if impl_item.ident.name == sym::fmt
+ && let ImplItemKind::Fn(_, body_id) = impl_item.kind
+ && let Some(Impl {
+ of_trait: Some(trait_ref),
+ ..
+ }) = get_parent_as_impl(cx.tcx, impl_item.hir_id())
+ && let Some(did) = trait_ref.trait_def_id()
+ && let Some(name) = cx.tcx.get_diagnostic_name(did)
+ && matches!(name, sym::Debug | sym::Display)
+ {
+ let body = cx.tcx.hir().body(body_id);
+ let formatter_name = body
+ .params
+ .get(1)
+ .and_then(|param| param.pat.simple_ident())
+ .map(|ident| ident.name);
- Some(FormatTraitNames {
- name,
- formatter_name,
- })
- } else {
- None
- }
+ Some(FormatTraitNames { name, formatter_name })
+ } else {
+ None
}
}
diff --git a/src/tools/clippy/clippy_lints/src/format_push_string.rs b/src/tools/clippy/clippy_lints/src/format_push_string.rs
index ac45f5aed..3901dd984 100644
--- a/src/tools/clippy/clippy_lints/src/format_push_string.rs
+++ b/src/tools/clippy/clippy_lints/src/format_push_string.rs
@@ -3,7 +3,7 @@ use clippy_utils::ty::is_type_lang_item;
use clippy_utils::{higher, match_def_path, paths};
use rustc_hir::{BinOpKind, Expr, ExprKind, LangItem, MatchSource};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/formatting.rs b/src/tools/clippy/clippy_lints/src/formatting.rs
index 2c9c43d3e..c3ef6f180 100644
--- a/src/tools/clippy/clippy_lints/src/formatting.rs
+++ b/src/tools/clippy/clippy_lints/src/formatting.rs
@@ -1,11 +1,10 @@
use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_note};
use clippy_utils::is_span_if;
use clippy_utils::source::snippet_opt;
-use if_chain::if_chain;
-use rustc_ast::ast::{BinOpKind, Block, Expr, ExprKind, StmtKind, UnOp};
+use rustc_ast::ast::{BinOpKind, Block, Expr, ExprKind, StmtKind};
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::Span;
declare_clippy_lint! {
@@ -145,7 +144,7 @@ fn check_assign(cx: &EarlyContext<'_>, expr: &Expr) {
let eq_span = lhs.span.between(rhs.span);
if let ExprKind::Unary(op, ref sub_rhs) = rhs.kind {
if let Some(eq_snippet) = snippet_opt(cx, eq_span) {
- let op = UnOp::to_string(op);
+ let op = op.as_str();
let eqop_span = lhs.span.between(sub_rhs.span);
if eq_snippet.ends_with('=') {
span_lint_and_note(
@@ -168,93 +167,84 @@ fn check_assign(cx: &EarlyContext<'_>, expr: &Expr) {
/// Implementation of the `SUSPICIOUS_UNARY_OP_FORMATTING` lint.
fn check_unop(cx: &EarlyContext<'_>, expr: &Expr) {
- if_chain! {
- if let ExprKind::Binary(ref binop, ref lhs, ref rhs) = expr.kind;
- if !lhs.span.from_expansion() && !rhs.span.from_expansion();
+ if let ExprKind::Binary(ref binop, ref lhs, ref rhs) = expr.kind
+ && !lhs.span.from_expansion() && !rhs.span.from_expansion()
// span between BinOp LHS and RHS
- let binop_span = lhs.span.between(rhs.span);
+ && let binop_span = lhs.span.between(rhs.span)
// if RHS is an UnOp
- if let ExprKind::Unary(op, ref un_rhs) = rhs.kind;
+ && let ExprKind::Unary(op, ref un_rhs) = rhs.kind
// from UnOp operator to UnOp operand
- let unop_operand_span = rhs.span.until(un_rhs.span);
- if let Some(binop_snippet) = snippet_opt(cx, binop_span);
- if let Some(unop_operand_snippet) = snippet_opt(cx, unop_operand_span);
- let binop_str = BinOpKind::to_string(&binop.node);
+ && let unop_operand_span = rhs.span.until(un_rhs.span)
+ && let Some(binop_snippet) = snippet_opt(cx, binop_span)
+ && let Some(unop_operand_snippet) = snippet_opt(cx, unop_operand_span)
+ && let binop_str = binop.node.as_str()
// no space after BinOp operator and space after UnOp operator
- if binop_snippet.ends_with(binop_str) && unop_operand_snippet.ends_with(' ');
- then {
- let unop_str = UnOp::to_string(op);
- let eqop_span = lhs.span.between(un_rhs.span);
- span_lint_and_help(
- cx,
- SUSPICIOUS_UNARY_OP_FORMATTING,
- eqop_span,
- &format!(
- "by not having a space between `{binop_str}` and `{unop_str}` it looks like \
- `{binop_str}{unop_str}` is a single operator"
- ),
- None,
- &format!(
- "put a space between `{binop_str}` and `{unop_str}` and remove the space after `{unop_str}`"
- ),
- );
- }
+ && binop_snippet.ends_with(binop_str) && unop_operand_snippet.ends_with(' ')
+ {
+ let unop_str = op.as_str();
+ let eqop_span = lhs.span.between(un_rhs.span);
+ span_lint_and_help(
+ cx,
+ SUSPICIOUS_UNARY_OP_FORMATTING,
+ eqop_span,
+ &format!(
+ "by not having a space between `{binop_str}` and `{unop_str}` it looks like \
+ `{binop_str}{unop_str}` is a single operator"
+ ),
+ None,
+ &format!("put a space between `{binop_str}` and `{unop_str}` and remove the space after `{unop_str}`"),
+ );
}
}
/// Implementation of the `SUSPICIOUS_ELSE_FORMATTING` lint for weird `else`.
fn check_else(cx: &EarlyContext<'_>, expr: &Expr) {
- if_chain! {
- if let ExprKind::If(_, then, Some(else_)) = &expr.kind;
- if is_block(else_) || is_if(else_);
- if !then.span.from_expansion() && !else_.span.from_expansion();
- if !in_external_macro(cx.sess(), expr.span);
+ if let ExprKind::If(_, then, Some(else_)) = &expr.kind
+ && (is_block(else_) || is_if(else_))
+ && !then.span.from_expansion() && !else_.span.from_expansion()
+ && !in_external_macro(cx.sess(), expr.span)
// workaround for rust-lang/rust#43081
- if expr.span.lo().0 != 0 && expr.span.hi().0 != 0;
+ && expr.span.lo().0 != 0 && expr.span.hi().0 != 0
// this will be a span from the closing ‘}’ of the “then” block (excluding) to
// the “if” of the “else if” block (excluding)
- let else_span = then.span.between(else_.span);
+ && let else_span = then.span.between(else_.span)
// the snippet should look like " else \n " with maybe comments anywhere
// it’s bad when there is a ‘\n’ after the “else”
- if let Some(else_snippet) = snippet_opt(cx, else_span);
- if let Some((pre_else, post_else)) = else_snippet.split_once("else");
- if let Some((_, post_else_post_eol)) = post_else.split_once('\n');
-
- then {
- // Allow allman style braces `} \n else \n {`
- if_chain! {
- if is_block(else_);
- if let Some((_, pre_else_post_eol)) = pre_else.split_once('\n');
- // Exactly one eol before and after the else
- if !pre_else_post_eol.contains('\n');
- if !post_else_post_eol.contains('\n');
- then {
- return;
- }
- }
-
- // Don't warn if the only thing inside post_else_post_eol is a comment block.
- let trimmed_post_else_post_eol = post_else_post_eol.trim();
- if trimmed_post_else_post_eol.starts_with("/*") && trimmed_post_else_post_eol.ends_with("*/") {
- return
- }
+ && let Some(else_snippet) = snippet_opt(cx, else_span)
+ && let Some((pre_else, post_else)) = else_snippet.split_once("else")
+ && let Some((_, post_else_post_eol)) = post_else.split_once('\n')
+ {
+ // Allow allman style braces `} \n else \n {`
+ if is_block(else_)
+ && let Some((_, pre_else_post_eol)) = pre_else.split_once('\n')
+ // Exactly one eol before and after the else
+ && !pre_else_post_eol.contains('\n')
+ && !post_else_post_eol.contains('\n')
+ {
+ return;
+ }
- let else_desc = if is_if(else_) { "if" } else { "{..}" };
- span_lint_and_note(
- cx,
- SUSPICIOUS_ELSE_FORMATTING,
- else_span,
- &format!("this is an `else {else_desc}` but the formatting might hide it"),
- None,
- &format!(
- "to remove this lint, remove the `else` or remove the new line between \
- `else` and `{else_desc}`",
- ),
- );
+ // Don't warn if the only thing inside post_else_post_eol is a comment block.
+ let trimmed_post_else_post_eol = post_else_post_eol.trim();
+ if trimmed_post_else_post_eol.starts_with("/*") && trimmed_post_else_post_eol.ends_with("*/") {
+ return;
}
+
+ let else_desc = if is_if(else_) { "if" } else { "{..}" };
+ span_lint_and_note(
+ cx,
+ SUSPICIOUS_ELSE_FORMATTING,
+ else_span,
+ &format!("this is an `else {else_desc}` but the formatting might hide it"),
+ None,
+ &format!(
+ "to remove this lint, remove the `else` or remove the new line between \
+ `else` and `{else_desc}`",
+ ),
+ );
}
}
@@ -272,61 +262,56 @@ fn indentation(cx: &EarlyContext<'_>, span: Span) -> usize {
fn check_array(cx: &EarlyContext<'_>, expr: &Expr) {
if let ExprKind::Array(ref array) = expr.kind {
for element in array {
- if_chain! {
- if let ExprKind::Binary(ref op, ref lhs, _) = element.kind;
- if has_unary_equivalent(op.node) && lhs.span.eq_ctxt(op.span);
- let space_span = lhs.span.between(op.span);
- if let Some(space_snippet) = snippet_opt(cx, space_span);
- let lint_span = lhs.span.with_lo(lhs.span.hi());
- if space_snippet.contains('\n');
- if indentation(cx, op.span) <= indentation(cx, lhs.span);
- then {
- span_lint_and_note(
- cx,
- POSSIBLE_MISSING_COMMA,
- lint_span,
- "possibly missing a comma here",
- None,
- "to remove this lint, add a comma or write the expr in a single line",
- );
- }
+ if let ExprKind::Binary(ref op, ref lhs, _) = element.kind
+ && has_unary_equivalent(op.node)
+ && lhs.span.eq_ctxt(op.span)
+ && let space_span = lhs.span.between(op.span)
+ && let Some(space_snippet) = snippet_opt(cx, space_span)
+ && let lint_span = lhs.span.with_lo(lhs.span.hi())
+ && space_snippet.contains('\n')
+ && indentation(cx, op.span) <= indentation(cx, lhs.span)
+ {
+ span_lint_and_note(
+ cx,
+ POSSIBLE_MISSING_COMMA,
+ lint_span,
+ "possibly missing a comma here",
+ None,
+ "to remove this lint, add a comma or write the expr in a single line",
+ );
}
}
}
}
fn check_missing_else(cx: &EarlyContext<'_>, first: &Expr, second: &Expr) {
- if_chain! {
- if !first.span.from_expansion() && !second.span.from_expansion();
- if matches!(first.kind, ExprKind::If(..));
- if is_block(second) || is_if(second);
+ if !first.span.from_expansion() && !second.span.from_expansion()
+ && matches!(first.kind, ExprKind::If(..))
+ && (is_block(second) || is_if(second))
// Proc-macros can give weird spans. Make sure this is actually an `if`.
- if is_span_if(cx, first.span);
+ && is_span_if(cx, first.span)
// If there is a line break between the two expressions, don't lint.
// If there is a non-whitespace character, this span came from a proc-macro.
- let else_span = first.span.between(second.span);
- if let Some(else_snippet) = snippet_opt(cx, else_span);
- if !else_snippet.chars().any(|c| c == '\n' || !c.is_whitespace());
- then {
- let (looks_like, next_thing) = if is_if(second) {
- ("an `else if`", "the second `if`")
- } else {
- ("an `else {..}`", "the next block")
- };
+ && let else_span = first.span.between(second.span)
+ && let Some(else_snippet) = snippet_opt(cx, else_span)
+ && !else_snippet.chars().any(|c| c == '\n' || !c.is_whitespace())
+ {
+ let (looks_like, next_thing) = if is_if(second) {
+ ("an `else if`", "the second `if`")
+ } else {
+ ("an `else {..}`", "the next block")
+ };
- span_lint_and_note(
- cx,
- SUSPICIOUS_ELSE_FORMATTING,
- else_span,
- &format!("this looks like {looks_like} but the `else` is missing"),
- None,
- &format!(
- "to remove this lint, add the missing `else` or add a new line before {next_thing}",
- ),
- );
- }
+ span_lint_and_note(
+ cx,
+ SUSPICIOUS_ELSE_FORMATTING,
+ else_span,
+ &format!("this looks like {looks_like} but the `else` is missing"),
+ None,
+ &format!("to remove this lint, add the missing `else` or add a new line before {next_thing}",),
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/four_forward_slashes.rs b/src/tools/clippy/clippy_lints/src/four_forward_slashes.rs
index 69bc0b726..0599e08e6 100644
--- a/src/tools/clippy/clippy_lints/src/four_forward_slashes.rs
+++ b/src/tools/clippy/clippy_lints/src/four_forward_slashes.rs
@@ -2,7 +2,7 @@ use clippy_utils::diagnostics::span_lint_and_then;
use rustc_errors::Applicability;
use rustc_hir::Item;
use rustc_lint::{LateContext, LateLintPass, LintContext};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::Span;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/from_over_into.rs b/src/tools/clippy/clippy_lints/src/from_over_into.rs
index 5477532bb..fa1f98ba0 100644
--- a/src/tools/clippy/clippy_lints/src/from_over_into.rs
+++ b/src/tools/clippy/clippy_lints/src/from_over_into.rs
@@ -12,7 +12,7 @@ use rustc_hir::{
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::nested_filter::OnlyBodies;
use rustc_middle::ty;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::symbol::{kw, sym};
use rustc_span::{Span, Symbol};
diff --git a/src/tools/clippy/clippy_lints/src/from_raw_with_void_ptr.rs b/src/tools/clippy/clippy_lints/src/from_raw_with_void_ptr.rs
index d9138d48b..c8d10dc4b 100644
--- a/src/tools/clippy/clippy_lints/src/from_raw_with_void_ptr.rs
+++ b/src/tools/clippy/clippy_lints/src/from_raw_with_void_ptr.rs
@@ -5,7 +5,7 @@ use rustc_hir::def_id::DefId;
use rustc_hir::{Expr, ExprKind, QPath};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::{RawPtr, TypeAndMut};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/from_str_radix_10.rs b/src/tools/clippy/clippy_lints/src/from_str_radix_10.rs
index 74a60b6a0..633ed96d6 100644
--- a/src/tools/clippy/clippy_lints/src/from_str_radix_10.rs
+++ b/src/tools/clippy/clippy_lints/src/from_str_radix_10.rs
@@ -2,12 +2,11 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::is_integer_literal;
use clippy_utils::sugg::Sugg;
use clippy_utils::ty::{is_type_diagnostic_item, is_type_lang_item};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{def, Expr, ExprKind, LangItem, PrimTy, QPath, TyKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::Ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::sym;
declare_clippy_lint! {
@@ -46,52 +45,41 @@ declare_lint_pass!(FromStrRadix10 => [FROM_STR_RADIX_10]);
impl<'tcx> LateLintPass<'tcx> for FromStrRadix10 {
fn check_expr(&mut self, cx: &LateContext<'tcx>, exp: &Expr<'tcx>) {
- if_chain! {
- if let ExprKind::Call(maybe_path, [src, radix]) = &exp.kind;
- if let ExprKind::Path(QPath::TypeRelative(ty, pathseg)) = &maybe_path.kind;
+ if let ExprKind::Call(maybe_path, [src, radix]) = &exp.kind
+ && let ExprKind::Path(QPath::TypeRelative(ty, pathseg)) = &maybe_path.kind
// check if the first part of the path is some integer primitive
- if let TyKind::Path(ty_qpath) = &ty.kind;
- let ty_res = cx.qpath_res(ty_qpath, ty.hir_id);
- if let def::Res::PrimTy(prim_ty) = ty_res;
- if matches!(prim_ty, PrimTy::Int(_) | PrimTy::Uint(_));
+ && let TyKind::Path(ty_qpath) = &ty.kind
+ && let ty_res = cx.qpath_res(ty_qpath, ty.hir_id)
+ && let def::Res::PrimTy(prim_ty) = ty_res
+ && matches!(prim_ty, PrimTy::Int(_) | PrimTy::Uint(_))
// check if the second part of the path indeed calls the associated
// function `from_str_radix`
- if pathseg.ident.name.as_str() == "from_str_radix";
+ && pathseg.ident.name.as_str() == "from_str_radix"
// check if the second argument is a primitive `10`
- if is_integer_literal(radix, 10);
+ && is_integer_literal(radix, 10)
+ {
+ let expr = if let ExprKind::AddrOf(_, _, expr) = &src.kind {
+ let ty = cx.typeck_results().expr_ty(expr);
+ if is_ty_stringish(cx, ty) { expr } else { &src }
+ } else {
+ &src
+ };
- then {
- let expr = if let ExprKind::AddrOf(_, _, expr) = &src.kind {
- let ty = cx.typeck_results().expr_ty(expr);
- if is_ty_stringish(cx, ty) {
- expr
- } else {
- &src
- }
- } else {
- &src
- };
+ let sugg =
+ Sugg::hir_with_applicability(cx, expr, "<string>", &mut Applicability::MachineApplicable).maybe_par();
- let sugg = Sugg::hir_with_applicability(
- cx,
- expr,
- "<string>",
- &mut Applicability::MachineApplicable
- ).maybe_par();
-
- span_lint_and_sugg(
- cx,
- FROM_STR_RADIX_10,
- exp.span,
- "this call to `from_str_radix` can be replaced with a call to `str::parse`",
- "try",
- format!("{sugg}.parse::<{}>()", prim_ty.name_str()),
- Applicability::MaybeIncorrect
- );
- }
+ span_lint_and_sugg(
+ cx,
+ FROM_STR_RADIX_10,
+ exp.span,
+ "this call to `from_str_radix` can be replaced with a call to `str::parse`",
+ "try",
+ format!("{sugg}.parse::<{}>()", prim_ty.name_str()),
+ Applicability::MaybeIncorrect,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/functions/impl_trait_in_params.rs b/src/tools/clippy/clippy_lints/src/functions/impl_trait_in_params.rs
index ee66c841e..8fba41c0e 100644
--- a/src/tools/clippy/clippy_lints/src/functions/impl_trait_in_params.rs
+++ b/src/tools/clippy/clippy_lints/src/functions/impl_trait_in_params.rs
@@ -5,18 +5,10 @@ use rustc_hir as hir;
use rustc_hir::intravisit::FnKind;
use rustc_hir::{Body, GenericParam, Generics, HirId, ImplItem, ImplItemKind, TraitItem, TraitItemKind};
use rustc_lint::LateContext;
-use rustc_span::symbol::Ident;
-use rustc_span::{BytePos, Span};
use super::IMPL_TRAIT_IN_PARAMS;
-fn report(
- cx: &LateContext<'_>,
- param: &GenericParam<'_>,
- ident: &Ident,
- generics: &Generics<'_>,
- first_param_span: Span,
-) {
+fn report(cx: &LateContext<'_>, param: &GenericParam<'_>, generics: &Generics<'_>) {
// No generics with nested generics, and no generics like FnMut(x)
span_lint_and_then(
cx,
@@ -35,12 +27,7 @@ fn report(
);
} else {
diag.span_suggestion_with_style(
- Span::new(
- first_param_span.lo() - rustc_span::BytePos(1),
- ident.span.hi(),
- ident.span.ctxt(),
- ident.span.parent(),
- ),
+ generics.span,
"add a type parameter",
format!("<{{ /* Generic name */ }}: {}>", &param.name.ident().as_str()[5..]),
rustc_errors::Applicability::HasPlaceholders,
@@ -52,54 +39,47 @@ fn report(
}
pub(super) fn check_fn<'tcx>(cx: &LateContext<'_>, kind: &'tcx FnKind<'_>, body: &'tcx Body<'_>, hir_id: HirId) {
- if_chain! {
- if let FnKind::ItemFn(ident, generics, _) = kind;
- if cx.tcx.visibility(cx.tcx.hir().body_owner_def_id(body.id())).is_public();
- if !is_in_test_function(cx.tcx, hir_id);
- then {
- for param in generics.params {
- if param.is_impl_trait() {
- report(cx, param, ident, generics, body.params[0].span);
- };
- }
+ if let FnKind::ItemFn(_, generics, _) = kind
+ && cx.tcx.visibility(cx.tcx.hir().body_owner_def_id(body.id())).is_public()
+ && !is_in_test_function(cx.tcx, hir_id)
+ {
+ for param in generics.params {
+ if param.is_impl_trait() {
+ report(cx, param, generics);
+ };
}
}
}
pub(super) fn check_impl_item(cx: &LateContext<'_>, impl_item: &ImplItem<'_>) {
- if_chain! {
- if let ImplItemKind::Fn(_, body_id) = impl_item.kind;
- if let hir::Node::Item(item) = cx.tcx.hir().get_parent(impl_item.hir_id());
- if let hir::ItemKind::Impl(impl_) = item.kind;
- if let hir::Impl { of_trait, .. } = *impl_;
- if of_trait.is_none();
- let body = cx.tcx.hir().body(body_id);
- if cx.tcx.visibility(cx.tcx.hir().body_owner_def_id(body.id())).is_public();
- if !is_in_test_function(cx.tcx, impl_item.hir_id());
- then {
- for param in impl_item.generics.params {
- if param.is_impl_trait() {
- report(cx, param, &impl_item.ident, impl_item.generics, body.params[0].span);
- }
+ if let ImplItemKind::Fn(_, body_id) = impl_item.kind
+ && let hir::Node::Item(item) = cx.tcx.hir().get_parent(impl_item.hir_id())
+ && let hir::ItemKind::Impl(impl_) = item.kind
+ && let hir::Impl { of_trait, .. } = *impl_
+ && of_trait.is_none()
+ && let body = cx.tcx.hir().body(body_id)
+ && cx.tcx.visibility(cx.tcx.hir().body_owner_def_id(body.id())).is_public()
+ && !is_in_test_function(cx.tcx, impl_item.hir_id())
+ {
+ for param in impl_item.generics.params {
+ if param.is_impl_trait() {
+ report(cx, param, impl_item.generics);
}
}
}
}
pub(super) fn check_trait_item(cx: &LateContext<'_>, trait_item: &TraitItem<'_>, avoid_breaking_exported_api: bool) {
- if_chain! {
- if !avoid_breaking_exported_api;
- if let TraitItemKind::Fn(_, _) = trait_item.kind;
- if let hir::Node::Item(item) = cx.tcx.hir().get_parent(trait_item.hir_id());
+ if !avoid_breaking_exported_api
+ && let TraitItemKind::Fn(_, _) = trait_item.kind
+ && let hir::Node::Item(item) = cx.tcx.hir().get_parent(trait_item.hir_id())
// ^^ (Will always be a trait)
- if !item.vis_span.is_empty(); // Is public
- if !is_in_test_function(cx.tcx, trait_item.hir_id());
- then {
- for param in trait_item.generics.params {
- if param.is_impl_trait() {
- let sp = trait_item.ident.span.with_hi(trait_item.ident.span.hi() + BytePos(1));
- report(cx, param, &trait_item.ident, trait_item.generics, sp.shrink_to_hi());
- }
+ && !item.vis_span.is_empty() // Is public
+ && !is_in_test_function(cx.tcx, trait_item.hir_id())
+ {
+ for param in trait_item.generics.params {
+ if param.is_impl_trait() {
+ report(cx, param, trait_item.generics);
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs b/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs
index 18f7368da..bf96c0d62 100644
--- a/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs
+++ b/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs
@@ -43,15 +43,13 @@ pub fn check_fn(cx: &LateContext<'_>, kind: FnKind<'_>, decl: &FnDecl<'_>, body:
// Body must be &(mut) <self_data>.name
// self_data is not necessarily self, to also lint sub-getters, etc…
- let block_expr = if_chain! {
- if let ExprKind::Block(block,_) = body.value.kind;
- if block.stmts.is_empty();
- if let Some(block_expr) = block.expr;
- then {
- block_expr
- } else {
- return;
- }
+ let block_expr = if let ExprKind::Block(block, _) = body.value.kind
+ && block.stmts.is_empty()
+ && let Some(block_expr) = block.expr
+ {
+ block_expr
+ } else {
+ return;
};
let expr_span = block_expr.span;
@@ -61,14 +59,12 @@ pub fn check_fn(cx: &LateContext<'_>, kind: FnKind<'_>, decl: &FnDecl<'_>, body:
} else {
block_expr
};
- let (self_data, used_ident) = if_chain! {
- if let ExprKind::Field(self_data, ident) = expr.kind;
- if ident.name.as_str() != name;
- then {
- (self_data, ident)
- } else {
- return;
- }
+ let (self_data, used_ident) = if let ExprKind::Field(self_data, ident) = expr.kind
+ && ident.name.as_str() != name
+ {
+ (self_data, ident)
+ } else {
+ return;
};
let mut used_field = None;
diff --git a/src/tools/clippy/clippy_lints/src/functions/mod.rs b/src/tools/clippy/clippy_lints/src/functions/mod.rs
index 3f5cceec7..96da2ec2a 100644
--- a/src/tools/clippy/clippy_lints/src/functions/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/functions/mod.rs
@@ -9,7 +9,7 @@ mod too_many_lines;
use rustc_hir as hir;
use rustc_hir::intravisit;
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::def_id::LocalDefId;
use rustc_span::Span;
@@ -407,7 +407,7 @@ impl<'tcx> LateLintPass<'tcx> for Functions {
span: Span,
def_id: LocalDefId,
) {
- let hir_id = cx.tcx.hir().local_def_id_to_hir_id(def_id);
+ let hir_id = cx.tcx.local_def_id_to_hir_id(def_id);
too_many_arguments::check_fn(cx, kind, decl, span, hir_id, self.too_many_arguments_threshold);
too_many_lines::check_fn(cx, kind, span, body, self.too_many_lines_threshold);
not_unsafe_ptr_arg_deref::check_fn(cx, kind, decl, body, def_id);
diff --git a/src/tools/clippy/clippy_lints/src/functions/result.rs b/src/tools/clippy/clippy_lints/src/functions/result.rs
index 485235514..f1200c2ed 100644
--- a/src/tools/clippy/clippy_lints/src/functions/result.rs
+++ b/src/tools/clippy/clippy_lints/src/functions/result.rs
@@ -23,7 +23,7 @@ fn result_err_ty<'tcx>(
&& let hir::FnRetTy::Return(hir_ty) = decl.output
&& let ty = cx
.tcx
- .erase_late_bound_regions(cx.tcx.fn_sig(id).instantiate_identity().output())
+ .instantiate_bound_regions_with_erased(cx.tcx.fn_sig(id).instantiate_identity().output())
&& is_type_diagnostic_item(cx, ty, sym::Result)
&& let ty::Adt(_, args) = ty.kind()
{
@@ -86,59 +86,60 @@ fn check_result_unit_err(cx: &LateContext<'_>, err_ty: Ty<'_>, fn_header_span: S
}
fn check_result_large_err<'tcx>(cx: &LateContext<'tcx>, err_ty: Ty<'tcx>, hir_ty_span: Span, large_err_threshold: u64) {
- if_chain! {
- if let Adt(adt, subst) = err_ty.kind();
- if let Some(local_def_id) = err_ty.ty_adt_def().expect("already checked this is adt").did().as_local();
- if let Some(hir::Node::Item(item)) = cx
- .tcx
- .hir()
- .find_by_def_id(local_def_id);
- if let hir::ItemKind::Enum(ref def, _) = item.kind;
- then {
- let variants_size = AdtVariantInfo::new(cx, *adt, subst);
- if let Some((first_variant, variants)) = variants_size.split_first()
- && first_variant.size >= large_err_threshold
- {
- span_lint_and_then(
- cx,
- RESULT_LARGE_ERR,
- hir_ty_span,
- "the `Err`-variant returned from this function is very large",
- |diag| {
- diag.span_label(
- def.variants[first_variant.ind].span,
- format!("the largest variant contains at least {} bytes", variants_size[0].size),
- );
+ if let Adt(adt, subst) = err_ty.kind()
+ && let Some(local_def_id) = err_ty
+ .ty_adt_def()
+ .expect("already checked this is adt")
+ .did()
+ .as_local()
+ && let Some(hir::Node::Item(item)) = cx.tcx.opt_hir_node_by_def_id(local_def_id)
+ && let hir::ItemKind::Enum(ref def, _) = item.kind
+ {
+ let variants_size = AdtVariantInfo::new(cx, *adt, subst);
+ if let Some((first_variant, variants)) = variants_size.split_first()
+ && first_variant.size >= large_err_threshold
+ {
+ span_lint_and_then(
+ cx,
+ RESULT_LARGE_ERR,
+ hir_ty_span,
+ "the `Err`-variant returned from this function is very large",
+ |diag| {
+ diag.span_label(
+ def.variants[first_variant.ind].span,
+ format!("the largest variant contains at least {} bytes", variants_size[0].size),
+ );
- for variant in variants {
- if variant.size >= large_err_threshold {
- let variant_def = &def.variants[variant.ind];
- diag.span_label(
- variant_def.span,
- format!("the variant `{}` contains at least {} bytes", variant_def.ident, variant.size),
- );
- }
+ for variant in variants {
+ if variant.size >= large_err_threshold {
+ let variant_def = &def.variants[variant.ind];
+ diag.span_label(
+ variant_def.span,
+ format!(
+ "the variant `{}` contains at least {} bytes",
+ variant_def.ident, variant.size
+ ),
+ );
}
-
- diag.help(format!("try reducing the size of `{err_ty}`, for example by boxing large elements or replacing it with `Box<{err_ty}>`"));
}
- );
- }
+
+ diag.help(format!("try reducing the size of `{err_ty}`, for example by boxing large elements or replacing it with `Box<{err_ty}>`"));
+ },
+ );
}
- else {
- let ty_size = approx_ty_size(cx, err_ty);
- if ty_size >= large_err_threshold {
- span_lint_and_then(
- cx,
- RESULT_LARGE_ERR,
- hir_ty_span,
- "the `Err`-variant returned from this function is very large",
- |diag: &mut Diagnostic| {
- diag.span_label(hir_ty_span, format!("the `Err`-variant is at least {ty_size} bytes"));
- diag.help(format!("try reducing the size of `{err_ty}`, for example by boxing large elements or replacing it with `Box<{err_ty}>`"));
- },
- );
- }
+ } else {
+ let ty_size = approx_ty_size(cx, err_ty);
+ if ty_size >= large_err_threshold {
+ span_lint_and_then(
+ cx,
+ RESULT_LARGE_ERR,
+ hir_ty_span,
+ "the `Err`-variant returned from this function is very large",
+ |diag: &mut Diagnostic| {
+ diag.span_label(hir_ty_span, format!("the `Err`-variant is at least {ty_size} bytes"));
+ diag.help(format!("try reducing the size of `{err_ty}`, for example by boxing large elements or replacing it with `Box<{err_ty}>`"));
+ },
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/future_not_send.rs b/src/tools/clippy/clippy_lints/src/future_not_send.rs
index eee5b7540..9fb59a320 100644
--- a/src/tools/clippy/clippy_lints/src/future_not_send.rs
+++ b/src/tools/clippy/clippy_lints/src/future_not_send.rs
@@ -5,7 +5,7 @@ use rustc_hir::{Body, FnDecl};
use rustc_infer::infer::TyCtxtInferExt;
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::{self, AliasTy, ClauseKind, PredicateKind};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::def_id::LocalDefId;
use rustc_span::{sym, Span};
use rustc_trait_selection::traits::error_reporting::suggestions::TypeErrCtxtExt;
@@ -62,7 +62,7 @@ impl<'tcx> LateLintPass<'tcx> for FutureNotSend {
if let FnKind::Closure = kind {
return;
}
- let ret_ty = return_ty(cx, cx.tcx.hir().local_def_id_to_hir_id(fn_def_id).expect_owner());
+ let ret_ty = return_ty(cx, cx.tcx.local_def_id_to_hir_id(fn_def_id).expect_owner());
if let ty::Alias(ty::Opaque, AliasTy { def_id, args, .. }) = *ret_ty.kind() {
let preds = cx.tcx.explicit_item_bounds(def_id);
let mut is_future = false;
diff --git a/src/tools/clippy/clippy_lints/src/if_let_mutex.rs b/src/tools/clippy/clippy_lints/src/if_let_mutex.rs
index e614a8f69..5e354209c 100644
--- a/src/tools/clippy/clippy_lints/src/if_let_mutex.rs
+++ b/src/tools/clippy/clippy_lints/src/if_let_mutex.rs
@@ -1,12 +1,11 @@
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::{higher, SpanlessEq};
-use if_chain::if_chain;
use rustc_errors::Diagnostic;
use rustc_hir::intravisit::{self as visit, Visitor};
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
@@ -127,15 +126,13 @@ impl<'tcx, 'l> ArmVisitor<'tcx, 'l> {
}
fn is_mutex_lock_call<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> Option<&'tcx Expr<'tcx>> {
- if_chain! {
- if let ExprKind::MethodCall(path, self_arg, ..) = &expr.kind;
- if path.ident.as_str() == "lock";
- let ty = cx.typeck_results().expr_ty(self_arg).peel_refs();
- if is_type_diagnostic_item(cx, ty, sym::Mutex);
- then {
- Some(self_arg)
- } else {
- None
- }
+ if let ExprKind::MethodCall(path, self_arg, ..) = &expr.kind
+ && path.ident.as_str() == "lock"
+ && let ty = cx.typeck_results().expr_ty(self_arg).peel_refs()
+ && is_type_diagnostic_item(cx, ty, sym::Mutex)
+ {
+ Some(self_arg)
+ } else {
+ None
}
}
diff --git a/src/tools/clippy/clippy_lints/src/if_not_else.rs b/src/tools/clippy/clippy_lints/src/if_not_else.rs
index cae561f78..4dc1ff837 100644
--- a/src/tools/clippy/clippy_lints/src/if_not_else.rs
+++ b/src/tools/clippy/clippy_lints/src/if_not_else.rs
@@ -6,7 +6,7 @@ use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::is_else_clause;
use rustc_hir::{BinOpKind, Expr, ExprKind, UnOp};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs b/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs
index 66c10ab22..cd6c46a71 100644
--- a/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs
+++ b/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs
@@ -9,7 +9,7 @@ use rustc_hir::LangItem::{OptionNone, OptionSome};
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/ignored_unit_patterns.rs b/src/tools/clippy/clippy_lints/src/ignored_unit_patterns.rs
index 76bdfb94e..0a2fd0c66 100644
--- a/src/tools/clippy/clippy_lints/src/ignored_unit_patterns.rs
+++ b/src/tools/clippy/clippy_lints/src/ignored_unit_patterns.rs
@@ -3,7 +3,7 @@ use hir::{Node, PatKind};
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/impl_hash_with_borrow_str_and_bytes.rs b/src/tools/clippy/clippy_lints/src/impl_hash_with_borrow_str_and_bytes.rs
new file mode 100644
index 000000000..940adbae4
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/impl_hash_with_borrow_str_and_bytes.rs
@@ -0,0 +1,106 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::ty::implements_trait;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::{Item, ItemKind, Path, TraitRef};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::Ty;
+use rustc_session::declare_lint_pass;
+use rustc_span::symbol::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ ///
+ /// This lint is concerned with the semantics of `Borrow` and `Hash` for a
+ /// type that implements all three of `Hash`, `Borrow<str>` and `Borrow<[u8]>`
+ /// as it is impossible to satisfy the semantics of Borrow and `Hash` for
+ /// both `Borrow<str>` and `Borrow<[u8]>`.
+ ///
+ /// ### Why is this bad?
+ ///
+ /// When providing implementations for `Borrow<T>`, one should consider whether the different
+ /// implementations should act as facets or representations of the underlying type. Generic code
+ /// typically uses `Borrow<T>` when it relies on the identical behavior of these additional trait
+ /// implementations. These traits will likely appear as additional trait bounds.
+ ///
+ /// In particular `Eq`, `Ord` and `Hash` must be equivalent for borrowed and owned values:
+ /// `x.borrow() == y.borrow()` should give the same result as `x == y`.
+ /// It follows then that the following equivalence must hold:
+ /// `hash(x) == hash((x as Borrow<[u8]>).borrow()) == hash((x as Borrow<str>).borrow())`
+ ///
+ /// Unfortunately it doesn't hold as `hash("abc") != hash("abc".as_bytes())`.
+ /// This happens because the `Hash` impl for str passes an additional `0xFF` byte to
+ /// the hasher to avoid collisions. For example, given the tuples `("a", "bc")`, and `("ab", "c")`,
+ /// the two tuples would have the same hash value if the `0xFF` byte was not added.
+ ///
+ /// ### Example
+ ///
+ /// ```
+ /// use std::borrow::Borrow;
+ /// use std::hash::{Hash, Hasher};
+ ///
+ /// struct ExampleType {
+ /// data: String
+ /// }
+ ///
+ /// impl Hash for ExampleType {
+ /// fn hash<H: Hasher>(&self, state: &mut H) {
+ /// self.data.hash(state);
+ /// }
+ /// }
+ ///
+ /// impl Borrow<str> for ExampleType {
+ /// fn borrow(&self) -> &str {
+ /// &self.data
+ /// }
+ /// }
+ ///
+ /// impl Borrow<[u8]> for ExampleType {
+ /// fn borrow(&self) -> &[u8] {
+ /// self.data.as_bytes()
+ /// }
+ /// }
+ /// ```
+ /// As a consequence, hashing a `&ExampleType` and hashing the result of the two
+ /// borrows will result in different values.
+ ///
+ #[clippy::version = "1.76.0"]
+ pub IMPL_HASH_BORROW_WITH_STR_AND_BYTES,
+ correctness,
+ "ensures that the semantics of `Borrow` for `Hash` are satisfied when `Borrow<str>` and `Borrow<[u8]>` are implemented"
+}
+
+declare_lint_pass!(ImplHashWithBorrowStrBytes => [IMPL_HASH_BORROW_WITH_STR_AND_BYTES]);
+
+impl LateLintPass<'_> for ImplHashWithBorrowStrBytes {
+ /// We are emitting this lint at the Hash impl of a type that implements all
+ /// three of `Hash`, `Borrow<str>` and `Borrow<[u8]>`.
+ fn check_item(&mut self, cx: &LateContext<'_>, item: &Item<'_>) {
+ if let ItemKind::Impl(imp) = item.kind
+ && let Some(TraitRef {path: Path {span, res, ..}, ..}) = imp.of_trait
+ && let ty = cx.tcx.type_of(item.owner_id).instantiate_identity()
+ && let Some(hash_id) = cx.tcx.get_diagnostic_item(sym::Hash)
+ && Res::Def(DefKind::Trait, hash_id) == *res
+ && let Some(borrow_id) = cx.tcx.get_diagnostic_item(sym::Borrow)
+ // since we are in the `Hash` impl, we don't need to check for that.
+ // we need only to check for `Borrow<str>` and `Borrow<[u8]>`
+ && implements_trait(cx, ty, borrow_id, &[cx.tcx.types.str_.into()])
+ && implements_trait(cx, ty, borrow_id, &[Ty::new_slice(cx.tcx, cx.tcx.types.u8).into()])
+ {
+ span_lint_and_then(
+ cx,
+ IMPL_HASH_BORROW_WITH_STR_AND_BYTES,
+ *span,
+ "the semantics of `Borrow<T>` around `Hash` can't be satisfied when both `Borrow<str>` and `Borrow<[u8]>` are implemented",
+ |diag| {
+ diag.note("the `Borrow` semantics require that `Hash` must behave the same for all implementations of Borrow<T>");
+ diag.note(
+ "however, the hash implementations of a string (`str`) and the bytes of a string `[u8]` do not behave the same ..."
+ );
+ diag.note("... as (`hash(\"abc\") != hash(\"abc\".as_bytes())`");
+ diag.help("consider either removing one of the `Borrow` implementations (`Borrow<str>` or `Borrow<[u8]>`) ...");
+ diag.help("... or not implementing `Hash` for this type");
+ },
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/implicit_hasher.rs b/src/tools/clippy/clippy_lints/src/implicit_hasher.rs
index eaf80de38..43eb6a9b8 100644
--- a/src/tools/clippy/clippy_lints/src/implicit_hasher.rs
+++ b/src/tools/clippy/clippy_lints/src/implicit_hasher.rs
@@ -9,11 +9,9 @@ use rustc_hir_analysis::hir_ty_to_ty;
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::nested_filter;
use rustc_middle::ty::{Ty, TypeckResults};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
-use rustc_span::Span;
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::sym;
-
-use if_chain::if_chain;
+use rustc_span::Span;
use clippy_utils::diagnostics::{multispan_sugg, span_lint_and_then};
use clippy_utils::source::{snippet, snippet_opt};
@@ -337,42 +335,38 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for ImplicitHasherConstructorVisitor<'a, 'b, 't
}
fn visit_expr(&mut self, e: &'tcx Expr<'_>) {
- if_chain! {
- if let ExprKind::Call(fun, args) = e.kind;
- if let ExprKind::Path(QPath::TypeRelative(ty, method)) = fun.kind;
- if let TyKind::Path(QPath::Resolved(None, ty_path)) = ty.kind;
- if let Some(ty_did) = ty_path.res.opt_def_id();
- then {
- if self.target.ty() != self.maybe_typeck_results.unwrap().expr_ty(e) {
- return;
- }
+ if let ExprKind::Call(fun, args) = e.kind
+ && let ExprKind::Path(QPath::TypeRelative(ty, method)) = fun.kind
+ && let TyKind::Path(QPath::Resolved(None, ty_path)) = ty.kind
+ && let Some(ty_did) = ty_path.res.opt_def_id()
+ {
+ if self.target.ty() != self.maybe_typeck_results.unwrap().expr_ty(e) {
+ return;
+ }
- if self.cx.tcx.is_diagnostic_item(sym::HashMap, ty_did) {
- if method.ident.name == sym::new {
- self.suggestions
- .insert(e.span, "HashMap::default()".to_string());
- } else if method.ident.name == sym!(with_capacity) {
- self.suggestions.insert(
- e.span,
- format!(
- "HashMap::with_capacity_and_hasher({}, Default::default())",
- snippet(self.cx, args[0].span, "capacity"),
- ),
- );
- }
- } else if self.cx.tcx.is_diagnostic_item(sym::HashSet, ty_did) {
- if method.ident.name == sym::new {
- self.suggestions
- .insert(e.span, "HashSet::default()".to_string());
- } else if method.ident.name == sym!(with_capacity) {
- self.suggestions.insert(
- e.span,
- format!(
- "HashSet::with_capacity_and_hasher({}, Default::default())",
- snippet(self.cx, args[0].span, "capacity"),
- ),
- );
- }
+ if self.cx.tcx.is_diagnostic_item(sym::HashMap, ty_did) {
+ if method.ident.name == sym::new {
+ self.suggestions.insert(e.span, "HashMap::default()".to_string());
+ } else if method.ident.name == sym!(with_capacity) {
+ self.suggestions.insert(
+ e.span,
+ format!(
+ "HashMap::with_capacity_and_hasher({}, Default::default())",
+ snippet(self.cx, args[0].span, "capacity"),
+ ),
+ );
+ }
+ } else if self.cx.tcx.is_diagnostic_item(sym::HashSet, ty_did) {
+ if method.ident.name == sym::new {
+ self.suggestions.insert(e.span, "HashSet::default()".to_string());
+ } else if method.ident.name == sym!(with_capacity) {
+ self.suggestions.insert(
+ e.span,
+ format!(
+ "HashSet::with_capacity_and_hasher({}, Default::default())",
+ snippet(self.cx, args[0].span, "capacity"),
+ ),
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/implicit_return.rs b/src/tools/clippy/clippy_lints/src/implicit_return.rs
index c6bcf3ba4..d68c5c4ba 100644
--- a/src/tools/clippy/clippy_lints/src/implicit_return.rs
+++ b/src/tools/clippy/clippy_lints/src/implicit_return.rs
@@ -8,7 +8,7 @@ use rustc_hir::intravisit::FnKind;
use rustc_hir::{Block, Body, Expr, ExprKind, FnDecl, FnRetTy, HirId};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::def_id::LocalDefId;
use rustc_span::{Span, SyntaxContext};
diff --git a/src/tools/clippy/clippy_lints/src/implicit_saturating_add.rs b/src/tools/clippy/clippy_lints/src/implicit_saturating_add.rs
index 24f62490f..cc74844f2 100644
--- a/src/tools/clippy/clippy_lints/src/implicit_saturating_add.rs
+++ b/src/tools/clippy/clippy_lints/src/implicit_saturating_add.rs
@@ -2,13 +2,12 @@ use clippy_utils::consts::{constant, Constant};
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::get_parent_expr;
use clippy_utils::source::snippet_with_context;
-use if_chain::if_chain;
use rustc_ast::ast::{LitIntType, LitKind};
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Block, Expr, ExprKind, Stmt, StmtKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::{Int, IntTy, Ty, Uint, UintTy};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -40,42 +39,58 @@ declare_lint_pass!(ImplicitSaturatingAdd => [IMPLICIT_SATURATING_ADD]);
impl<'tcx> LateLintPass<'tcx> for ImplicitSaturatingAdd {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
- if_chain! {
- if let ExprKind::If(cond, then, None) = expr.kind;
- if let ExprKind::DropTemps(expr1) = cond.kind;
- if let Some((c, op_node, l)) = get_const(cx, expr1);
- if let BinOpKind::Ne | BinOpKind::Lt = op_node;
- if let ExprKind::Block(block, None) = then.kind;
- if let Block {
+ if let ExprKind::If(cond, then, None) = expr.kind
+ && let ExprKind::DropTemps(expr1) = cond.kind
+ && let Some((c, op_node, l)) = get_const(cx, expr1)
+ && let BinOpKind::Ne | BinOpKind::Lt = op_node
+ && let ExprKind::Block(block, None) = then.kind
+ && let Block {
stmts:
- [Stmt
- { kind: StmtKind::Expr(ex) | StmtKind::Semi(ex), .. }],
- expr: None, ..} |
- Block { stmts: [], expr: Some(ex), ..} = block;
- if let ExprKind::AssignOp(op1, target, value) = ex.kind;
- let ty = cx.typeck_results().expr_ty(target);
- if Some(c) == get_int_max(ty);
- let ctxt = expr.span.ctxt();
- if ex.span.ctxt() == ctxt;
- if expr1.span.ctxt() == ctxt;
- if clippy_utils::SpanlessEq::new(cx).eq_expr(l, target);
- if BinOpKind::Add == op1.node;
- if let ExprKind::Lit(lit) = value.kind;
- if let LitKind::Int(1, LitIntType::Unsuffixed) = lit.node;
- if block.expr.is_none();
- then {
- let mut app = Applicability::MachineApplicable;
- let code = snippet_with_context(cx, target.span, ctxt, "_", &mut app).0;
- let sugg = if let Some(parent) = get_parent_expr(cx, expr)
- && let ExprKind::If(_cond, _then, Some(else_)) = parent.kind
- && else_.hir_id == expr.hir_id
- {
- format!("{{{code} = {code}.saturating_add(1); }}")
- } else {
- format!("{code} = {code}.saturating_add(1);")
- };
- span_lint_and_sugg(cx, IMPLICIT_SATURATING_ADD, expr.span, "manual saturating add detected", "use instead", sugg, app);
+ [
+ Stmt {
+ kind: StmtKind::Expr(ex) | StmtKind::Semi(ex),
+ ..
+ },
+ ],
+ expr: None,
+ ..
}
+ | Block {
+ stmts: [],
+ expr: Some(ex),
+ ..
+ } = block
+ && let ExprKind::AssignOp(op1, target, value) = ex.kind
+ && let ty = cx.typeck_results().expr_ty(target)
+ && Some(c) == get_int_max(ty)
+ && let ctxt = expr.span.ctxt()
+ && ex.span.ctxt() == ctxt
+ && expr1.span.ctxt() == ctxt
+ && clippy_utils::SpanlessEq::new(cx).eq_expr(l, target)
+ && BinOpKind::Add == op1.node
+ && let ExprKind::Lit(lit) = value.kind
+ && let LitKind::Int(1, LitIntType::Unsuffixed) = lit.node
+ && block.expr.is_none()
+ {
+ let mut app = Applicability::MachineApplicable;
+ let code = snippet_with_context(cx, target.span, ctxt, "_", &mut app).0;
+ let sugg = if let Some(parent) = get_parent_expr(cx, expr)
+ && let ExprKind::If(_cond, _then, Some(else_)) = parent.kind
+ && else_.hir_id == expr.hir_id
+ {
+ format!("{{{code} = {code}.saturating_add(1); }}")
+ } else {
+ format!("{code} = {code}.saturating_add(1);")
+ };
+ span_lint_and_sugg(
+ cx,
+ IMPLICIT_SATURATING_ADD,
+ expr.span,
+ "manual saturating add detected",
+ "use instead",
+ sugg,
+ app,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs b/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs
index 859404289..81df1a889 100644
--- a/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs
+++ b/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs
@@ -1,11 +1,10 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::{higher, is_integer_literal, peel_blocks_with_stmt, SpanlessEq};
-use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind, QPath};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -46,83 +45,76 @@ impl<'tcx> LateLintPass<'tcx> for ImplicitSaturatingSub {
if expr.span.from_expansion() {
return;
}
- if_chain! {
- if let Some(higher::If { cond, then, r#else: None }) = higher::If::hir(expr);
+ if let Some(higher::If { cond, then, r#else: None }) = higher::If::hir(expr)
// Check if the conditional expression is a binary operation
- if let ExprKind::Binary(ref cond_op, cond_left, cond_right) = cond.kind;
+ && let ExprKind::Binary(ref cond_op, cond_left, cond_right) = cond.kind
// Ensure that the binary operator is >, !=, or <
- if BinOpKind::Ne == cond_op.node || BinOpKind::Gt == cond_op.node || BinOpKind::Lt == cond_op.node;
+ && (BinOpKind::Ne == cond_op.node || BinOpKind::Gt == cond_op.node || BinOpKind::Lt == cond_op.node)
// Check if assign operation is done
- if let Some(target) = subtracts_one(cx, then);
+ && let Some(target) = subtracts_one(cx, then)
// Extracting out the variable name
- if let ExprKind::Path(QPath::Resolved(_, ares_path)) = target.kind;
-
- then {
- // Handle symmetric conditions in the if statement
- let (cond_var, cond_num_val) = if SpanlessEq::new(cx).eq_expr(cond_left, target) {
- if BinOpKind::Gt == cond_op.node || BinOpKind::Ne == cond_op.node {
- (cond_left, cond_right)
- } else {
- return;
- }
- } else if SpanlessEq::new(cx).eq_expr(cond_right, target) {
- if BinOpKind::Lt == cond_op.node || BinOpKind::Ne == cond_op.node {
- (cond_right, cond_left)
- } else {
- return;
- }
+ && let ExprKind::Path(QPath::Resolved(_, ares_path)) = target.kind
+ {
+ // Handle symmetric conditions in the if statement
+ let (cond_var, cond_num_val) = if SpanlessEq::new(cx).eq_expr(cond_left, target) {
+ if BinOpKind::Gt == cond_op.node || BinOpKind::Ne == cond_op.node {
+ (cond_left, cond_right)
} else {
return;
- };
-
- // Check if the variable in the condition statement is an integer
- if !cx.typeck_results().expr_ty(cond_var).is_integral() {
+ }
+ } else if SpanlessEq::new(cx).eq_expr(cond_right, target) {
+ if BinOpKind::Lt == cond_op.node || BinOpKind::Ne == cond_op.node {
+ (cond_right, cond_left)
+ } else {
return;
}
+ } else {
+ return;
+ };
- // Get the variable name
- let var_name = ares_path.segments[0].ident.name.as_str();
- match cond_num_val.kind {
- ExprKind::Lit(cond_lit) => {
- // Check if the constant is zero
- if let LitKind::Int(0, _) = cond_lit.node {
- if cx.typeck_results().expr_ty(cond_left).is_signed() {
- } else {
- print_lint_and_sugg(cx, var_name, expr);
- };
- }
- },
- ExprKind::Path(QPath::TypeRelative(_, name)) => {
- if_chain! {
- if name.ident.as_str() == "MIN";
- if let Some(const_id) = cx.typeck_results().type_dependent_def_id(cond_num_val.hir_id);
- if let Some(impl_id) = cx.tcx.impl_of_method(const_id);
- if let None = cx.tcx.impl_trait_ref(impl_id); // An inherent impl
- if cx.tcx.type_of(impl_id).instantiate_identity().is_integral();
- then {
- print_lint_and_sugg(cx, var_name, expr)
- }
- }
- },
- ExprKind::Call(func, []) => {
- if_chain! {
- if let ExprKind::Path(QPath::TypeRelative(_, name)) = func.kind;
- if name.ident.as_str() == "min_value";
- if let Some(func_id) = cx.typeck_results().type_dependent_def_id(func.hir_id);
- if let Some(impl_id) = cx.tcx.impl_of_method(func_id);
- if let None = cx.tcx.impl_trait_ref(impl_id); // An inherent impl
- if cx.tcx.type_of(impl_id).instantiate_identity().is_integral();
- then {
- print_lint_and_sugg(cx, var_name, expr)
- }
- }
- },
- _ => (),
- }
+ // Check if the variable in the condition statement is an integer
+ if !cx.typeck_results().expr_ty(cond_var).is_integral() {
+ return;
+ }
+
+ // Get the variable name
+ let var_name = ares_path.segments[0].ident.name.as_str();
+ match cond_num_val.kind {
+ ExprKind::Lit(cond_lit) => {
+ // Check if the constant is zero
+ if let LitKind::Int(0, _) = cond_lit.node {
+ if cx.typeck_results().expr_ty(cond_left).is_signed() {
+ } else {
+ print_lint_and_sugg(cx, var_name, expr);
+ };
+ }
+ },
+ ExprKind::Path(QPath::TypeRelative(_, name)) => {
+ if name.ident.as_str() == "MIN"
+ && let Some(const_id) = cx.typeck_results().type_dependent_def_id(cond_num_val.hir_id)
+ && let Some(impl_id) = cx.tcx.impl_of_method(const_id)
+ && let None = cx.tcx.impl_trait_ref(impl_id) // An inherent impl
+ && cx.tcx.type_of(impl_id).instantiate_identity().is_integral()
+ {
+ print_lint_and_sugg(cx, var_name, expr);
+ }
+ },
+ ExprKind::Call(func, []) => {
+ if let ExprKind::Path(QPath::TypeRelative(_, name)) = func.kind
+ && name.ident.as_str() == "min_value"
+ && let Some(func_id) = cx.typeck_results().type_dependent_def_id(func.hir_id)
+ && let Some(impl_id) = cx.tcx.impl_of_method(func_id)
+ && let None = cx.tcx.impl_trait_ref(impl_id) // An inherent impl
+ && cx.tcx.type_of(impl_id).instantiate_identity().is_integral()
+ {
+ print_lint_and_sugg(cx, var_name, expr);
+ }
+ },
+ _ => (),
}
}
}
@@ -135,18 +127,14 @@ fn subtracts_one<'a>(cx: &LateContext<'_>, expr: &'a Expr<'a>) -> Option<&'a Exp
(BinOpKind::Sub == op1.node && is_integer_literal(value, 1)).then_some(target)
},
ExprKind::Assign(target, value, _) => {
- if_chain! {
- if let ExprKind::Binary(ref op1, left1, right1) = value.kind;
- if BinOpKind::Sub == op1.node;
-
- if SpanlessEq::new(cx).eq_expr(left1, target);
-
- if is_integer_literal(right1, 1);
- then {
- Some(target)
- } else {
- None
- }
+ if let ExprKind::Binary(ref op1, left1, right1) = value.kind
+ && BinOpKind::Sub == op1.node
+ && SpanlessEq::new(cx).eq_expr(left1, target)
+ && is_integer_literal(right1, 1)
+ {
+ Some(target)
+ } else {
+ None
}
},
_ => None,
diff --git a/src/tools/clippy/clippy_lints/src/implied_bounds_in_impls.rs b/src/tools/clippy/clippy_lints/src/implied_bounds_in_impls.rs
index ff27a5d66..0f5a9ea5d 100644
--- a/src/tools/clippy/clippy_lints/src/implied_bounds_in_impls.rs
+++ b/src/tools/clippy/clippy_lints/src/implied_bounds_in_impls.rs
@@ -10,7 +10,7 @@ use rustc_hir::{
use rustc_hir_analysis::hir_ty_to_ty;
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::{self, ClauseKind, Generics, Ty, TyCtxt};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::Span;
declare_clippy_lint! {
@@ -43,9 +43,9 @@ declare_clippy_lint! {
/// Box::new(123)
/// }
/// ```
- #[clippy::version = "1.73.0"]
+ #[clippy::version = "1.74.0"]
pub IMPLIED_BOUNDS_IN_IMPLS,
- nursery,
+ complexity,
"specifying bounds that are implied by other bounds in `impl Trait` type"
}
declare_lint_pass!(ImpliedBoundsInImpls => [IMPLIED_BOUNDS_IN_IMPLS]);
@@ -194,6 +194,15 @@ fn is_same_generics<'tcx>(
.enumerate()
.skip(1) // skip `Self` implicit arg
.all(|(arg_index, arg)| {
+ if [
+ implied_by_generics.host_effect_index,
+ implied_generics.host_effect_index,
+ ]
+ .contains(&Some(arg_index))
+ {
+ // skip host effect params in determining whether generics are same
+ return true;
+ }
if let Some(ty) = arg.as_type() {
if let &ty::Param(ty::ParamTy { index, .. }) = ty.kind()
// `index == 0` means that it's referring to `Self`,
diff --git a/src/tools/clippy/clippy_lints/src/inconsistent_struct_constructor.rs b/src/tools/clippy/clippy_lints/src/inconsistent_struct_constructor.rs
index a84f7351a..1075975f0 100644
--- a/src/tools/clippy/clippy_lints/src/inconsistent_struct_constructor.rs
+++ b/src/tools/clippy/clippy_lints/src/inconsistent_struct_constructor.rs
@@ -1,11 +1,10 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet;
-use if_chain::if_chain;
use rustc_data_structures::fx::FxHashMap;
use rustc_errors::Applicability;
use rustc_hir::{self as hir, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::Symbol;
use std::fmt::{self, Write as _};
@@ -66,54 +65,53 @@ declare_lint_pass!(InconsistentStructConstructor => [INCONSISTENT_STRUCT_CONSTRU
impl<'tcx> LateLintPass<'tcx> for InconsistentStructConstructor {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
- if_chain! {
- if !expr.span.from_expansion();
- if let ExprKind::Struct(qpath, fields, base) = expr.kind;
- let ty = cx.typeck_results().expr_ty(expr);
- if let Some(adt_def) = ty.ty_adt_def();
- if adt_def.is_struct();
- if let Some(variant) = adt_def.variants().iter().next();
- if fields.iter().all(|f| f.is_shorthand);
- then {
- let mut def_order_map = FxHashMap::default();
- for (idx, field) in variant.fields.iter().enumerate() {
- def_order_map.insert(field.name, idx);
- }
+ if !expr.span.from_expansion()
+ && let ExprKind::Struct(qpath, fields, base) = expr.kind
+ && let ty = cx.typeck_results().expr_ty(expr)
+ && let Some(adt_def) = ty.ty_adt_def()
+ && adt_def.is_struct()
+ && let Some(variant) = adt_def.variants().iter().next()
+ && fields.iter().all(|f| f.is_shorthand)
+ {
+ let mut def_order_map = FxHashMap::default();
+ for (idx, field) in variant.fields.iter().enumerate() {
+ def_order_map.insert(field.name, idx);
+ }
- if is_consistent_order(fields, &def_order_map) {
- return;
- }
+ if is_consistent_order(fields, &def_order_map) {
+ return;
+ }
- let mut ordered_fields: Vec<_> = fields.iter().map(|f| f.ident.name).collect();
- ordered_fields.sort_unstable_by_key(|id| def_order_map[id]);
+ let mut ordered_fields: Vec<_> = fields.iter().map(|f| f.ident.name).collect();
+ ordered_fields.sort_unstable_by_key(|id| def_order_map[id]);
- let mut fields_snippet = String::new();
- let (last_ident, idents) = ordered_fields.split_last().unwrap();
- for ident in idents {
- let _: fmt::Result = write!(fields_snippet, "{ident}, ");
- }
- fields_snippet.push_str(&last_ident.to_string());
+ let mut fields_snippet = String::new();
+ let (last_ident, idents) = ordered_fields.split_last().unwrap();
+ for ident in idents {
+ let _: fmt::Result = write!(fields_snippet, "{ident}, ");
+ }
+ fields_snippet.push_str(&last_ident.to_string());
- let base_snippet = if let Some(base) = base {
- format!(", ..{}", snippet(cx, base.span, ".."))
- } else {
- String::new()
- };
+ let base_snippet = if let Some(base) = base {
+ format!(", ..{}", snippet(cx, base.span, ".."))
+ } else {
+ String::new()
+ };
- let sugg = format!("{} {{ {fields_snippet}{base_snippet} }}",
- snippet(cx, qpath.span(), ".."),
- );
+ let sugg = format!(
+ "{} {{ {fields_snippet}{base_snippet} }}",
+ snippet(cx, qpath.span(), ".."),
+ );
- span_lint_and_sugg(
- cx,
- INCONSISTENT_STRUCT_CONSTRUCTOR,
- expr.span,
- "struct constructor field order is inconsistent with struct definition field order",
- "try",
- sugg,
- Applicability::MachineApplicable,
- )
- }
+ span_lint_and_sugg(
+ cx,
+ INCONSISTENT_STRUCT_CONSTRUCTOR,
+ expr.span,
+ "struct constructor field order is inconsistent with struct definition field order",
+ "try",
+ sugg,
+ Applicability::MachineApplicable,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs b/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs
index c2f1f18e3..5417c13d0 100644
--- a/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs
+++ b/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs
@@ -4,7 +4,6 @@ use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::higher::IfLet;
use clippy_utils::ty::is_copy;
use clippy_utils::{is_expn_of, is_lint_allowed, path_to_local};
-use if_chain::if_chain;
use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
use rustc_errors::Applicability;
use rustc_hir as hir;
@@ -12,7 +11,7 @@ use rustc_hir::intravisit::{self, Visitor};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::nested_filter;
use rustc_middle::ty;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::symbol::Ident;
use rustc_span::Span;
@@ -70,20 +69,17 @@ impl_lint_pass!(IndexRefutableSlice => [INDEX_REFUTABLE_SLICE]);
impl<'tcx> LateLintPass<'tcx> for IndexRefutableSlice {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
- if_chain! {
- if !expr.span.from_expansion() || is_expn_of(expr.span, "if_chain").is_some();
- if let Some(IfLet {let_pat, if_then, ..}) = IfLet::hir(cx, expr);
- if !is_lint_allowed(cx, INDEX_REFUTABLE_SLICE, expr.hir_id);
- if self.msrv.meets(msrvs::SLICE_PATTERNS);
-
- let found_slices = find_slice_values(cx, let_pat);
- if !found_slices.is_empty();
- let filtered_slices = filter_lintable_slices(cx, found_slices, self.max_suggested_slice, if_then);
- if !filtered_slices.is_empty();
- then {
- for slice in filtered_slices.values() {
- lint_slice(cx, slice);
- }
+ if (!expr.span.from_expansion() || is_expn_of(expr.span, "if_chain").is_some())
+ && let Some(IfLet { let_pat, if_then, .. }) = IfLet::hir(cx, expr)
+ && !is_lint_allowed(cx, INDEX_REFUTABLE_SLICE, expr.hir_id)
+ && self.msrv.meets(msrvs::SLICE_PATTERNS)
+ && let found_slices = find_slice_values(cx, let_pat)
+ && !found_slices.is_empty()
+ && let filtered_slices = filter_lintable_slices(cx, found_slices, self.max_suggested_slice, if_then)
+ && !filtered_slices.is_empty()
+ {
+ for slice in filtered_slices.values() {
+ lint_slice(cx, slice);
}
}
}
@@ -245,28 +241,26 @@ impl<'a, 'tcx> Visitor<'tcx> for SliceIndexLintingVisitor<'a, 'tcx> {
max_suggested_slice,
} = *self;
- if_chain! {
+ if let Some(use_info) = slice_lint_info.get_mut(&local_id)
// Check if this is even a local we're interested in
- if let Some(use_info) = slice_lint_info.get_mut(&local_id);
- let map = cx.tcx.hir();
+ && let map = cx.tcx.hir()
// Checking for slice indexing
- let parent_id = map.parent_id(expr.hir_id);
- if let Some(hir::Node::Expr(parent_expr)) = map.find(parent_id);
- if let hir::ExprKind::Index(_, index_expr, _) = parent_expr.kind;
- if let Some(Constant::Int(index_value)) = constant(cx, cx.typeck_results(), index_expr);
- if let Ok(index_value) = index_value.try_into();
- if index_value < max_suggested_slice;
+ && let parent_id = map.parent_id(expr.hir_id)
+ && let Some(hir::Node::Expr(parent_expr)) = cx.tcx.opt_hir_node(parent_id)
+ && let hir::ExprKind::Index(_, index_expr, _) = parent_expr.kind
+ && let Some(Constant::Int(index_value)) = constant(cx, cx.typeck_results(), index_expr)
+ && let Ok(index_value) = index_value.try_into()
+ && index_value < max_suggested_slice
// Make sure that this slice index is read only
- let maybe_addrof_id = map.parent_id(parent_id);
- if let Some(hir::Node::Expr(maybe_addrof_expr)) = map.find(maybe_addrof_id);
- if let hir::ExprKind::AddrOf(_kind, hir::Mutability::Not, _inner_expr) = maybe_addrof_expr.kind;
- then {
- use_info.index_use.push((index_value, map.span(parent_expr.hir_id)));
- return;
- }
+ && let maybe_addrof_id = map.parent_id(parent_id)
+ && let Some(hir::Node::Expr(maybe_addrof_expr)) = cx.tcx.opt_hir_node(maybe_addrof_id)
+ && let hir::ExprKind::AddrOf(_kind, hir::Mutability::Not, _inner_expr) = maybe_addrof_expr.kind
+ {
+ use_info.index_use.push((index_value, map.span(parent_expr.hir_id)));
+ return;
}
// The slice was used for something other than indexing
diff --git a/src/tools/clippy/clippy_lints/src/indexing_slicing.rs b/src/tools/clippy/clippy_lints/src/indexing_slicing.rs
index 1ce7d85d3..0ae03d101 100644
--- a/src/tools/clippy/clippy_lints/src/indexing_slicing.rs
+++ b/src/tools/clippy/clippy_lints/src/indexing_slicing.rs
@@ -7,7 +7,7 @@ use rustc_ast::ast::RangeLimits;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/ineffective_open_options.rs b/src/tools/clippy/clippy_lints/src/ineffective_open_options.rs
new file mode 100644
index 000000000..955f90d42
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/ineffective_open_options.rs
@@ -0,0 +1,95 @@
+use crate::methods::method_call;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::peel_blocks;
+use rustc_ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::declare_lint_pass;
+use rustc_span::{sym, BytePos, Span};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks if both `.write(true)` and `.append(true)` methods are called
+ /// on a same `OpenOptions`.
+ ///
+ /// ### Why is this bad?
+ /// `.append(true)` already enables `write(true)`, making this one
+ /// superflous.
+ ///
+ /// ### Example
+ /// ```no_run
+ /// # use std::fs::OpenOptions;
+ /// let _ = OpenOptions::new()
+ /// .write(true)
+ /// .append(true)
+ /// .create(true)
+ /// .open("file.json");
+ /// ```
+ /// Use instead:
+ /// ```no_run
+ /// # use std::fs::OpenOptions;
+ /// let _ = OpenOptions::new()
+ /// .append(true)
+ /// .create(true)
+ /// .open("file.json");
+ /// ```
+ #[clippy::version = "1.76.0"]
+ pub INEFFECTIVE_OPEN_OPTIONS,
+ suspicious,
+ "usage of both `write(true)` and `append(true)` on same `OpenOptions`"
+}
+
+declare_lint_pass!(IneffectiveOpenOptions => [INEFFECTIVE_OPEN_OPTIONS]);
+
+fn index_if_arg_is_boolean(args: &[Expr<'_>], call_span: Span) -> Option<Span> {
+ if let [arg] = args
+ && let ExprKind::Lit(lit) = peel_blocks(arg).kind
+ && lit.node == LitKind::Bool(true)
+ {
+ // The `.` is not included in the span so we cheat a little bit to include it as well.
+ Some(call_span.with_lo(call_span.lo() - BytePos(1)))
+ } else {
+ None
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for IneffectiveOpenOptions {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ let Some(("open", mut receiver, [_arg], _, _)) = method_call(expr) else {
+ return;
+ };
+ let receiver_ty = cx.typeck_results().expr_ty(receiver);
+ match receiver_ty.peel_refs().kind() {
+ ty::Adt(adt, _) if cx.tcx.is_diagnostic_item(sym::FsOpenOptions, adt.did()) => {},
+ _ => return,
+ }
+
+ let mut append = None;
+ let mut write = None;
+
+ while let Some((name, recv, args, _, span)) = method_call(receiver) {
+ if name == "append" {
+ append = index_if_arg_is_boolean(args, span);
+ } else if name == "write" {
+ write = index_if_arg_is_boolean(args, span);
+ }
+ receiver = recv;
+ }
+
+ if let Some(write_span) = write
+ && append.is_some()
+ {
+ span_lint_and_sugg(
+ cx,
+ INEFFECTIVE_OPEN_OPTIONS,
+ write_span,
+ "unnecessary use of `.write(true)` because there is `.append(true)`",
+ "remove `.write(true)`",
+ String::new(),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/infinite_iter.rs b/src/tools/clippy/clippy_lints/src/infinite_iter.rs
index e9c53671a..9ad027358 100644
--- a/src/tools/clippy/clippy_lints/src/infinite_iter.rs
+++ b/src/tools/clippy/clippy_lints/src/infinite_iter.rs
@@ -3,7 +3,7 @@ use clippy_utils::higher;
use clippy_utils::ty::{implements_trait, is_type_diagnostic_item};
use rustc_hir::{BorrowKind, Closure, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::{sym, Symbol};
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/inherent_impl.rs b/src/tools/clippy/clippy_lints/src/inherent_impl.rs
index a61a64161..e4781752e 100644
--- a/src/tools/clippy/clippy_lints/src/inherent_impl.rs
+++ b/src/tools/clippy/clippy_lints/src/inherent_impl.rs
@@ -6,7 +6,7 @@ use rustc_data_structures::fx::FxHashMap;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::{Item, ItemKind, Node};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::Span;
use std::collections::hash_map::Entry;
@@ -63,7 +63,7 @@ impl<'tcx> LateLintPass<'tcx> for MultipleInherentImpl {
&& !is_lint_allowed(
cx,
MULTIPLE_INHERENT_IMPL,
- cx.tcx.hir().local_def_id_to_hir_id(id),
+ cx.tcx.local_def_id_to_hir_id(id),
)
}) {
for impl_id in impl_ids.iter().map(|id| id.expect_local()) {
@@ -117,12 +117,12 @@ impl<'tcx> LateLintPass<'tcx> for MultipleInherentImpl {
/// Gets the span for the given impl block unless it's not being considered by the lint.
fn get_impl_span(cx: &LateContext<'_>, id: LocalDefId) -> Option<Span> {
- let id = cx.tcx.hir().local_def_id_to_hir_id(id);
+ let id = cx.tcx.local_def_id_to_hir_id(id);
if let Node::Item(&Item {
kind: ItemKind::Impl(impl_item),
span,
..
- }) = cx.tcx.hir().get(id)
+ }) = cx.tcx.hir_node(id)
{
(!span.from_expansion()
&& impl_item.generics.params.is_empty()
diff --git a/src/tools/clippy/clippy_lints/src/inherent_to_string.rs b/src/tools/clippy/clippy_lints/src/inherent_to_string.rs
index fe5eb5cca..ca2ac6030 100644
--- a/src/tools/clippy/clippy_lints/src/inherent_to_string.rs
+++ b/src/tools/clippy/clippy_lints/src/inherent_to_string.rs
@@ -3,7 +3,7 @@ use clippy_utils::ty::{implements_trait, is_type_lang_item};
use clippy_utils::{return_ty, trait_ref_of_method};
use rustc_hir::{GenericParamKind, ImplItem, ImplItemKind, LangItem, Unsafety};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
use rustc_target::spec::abi::Abi;
diff --git a/src/tools/clippy/clippy_lints/src/init_numbered_fields.rs b/src/tools/clippy/clippy_lints/src/init_numbered_fields.rs
index 269311a67..e48656380 100644
--- a/src/tools/clippy/clippy_lints/src/init_numbered_fields.rs
+++ b/src/tools/clippy/clippy_lints/src/init_numbered_fields.rs
@@ -4,7 +4,7 @@ use rustc_errors::Applicability;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use std::borrow::Cow;
use std::cmp::Reverse;
use std::collections::BinaryHeap;
diff --git a/src/tools/clippy/clippy_lints/src/inline_fn_without_body.rs b/src/tools/clippy/clippy_lints/src/inline_fn_without_body.rs
index 899126565..bc236c5c7 100644
--- a/src/tools/clippy/clippy_lints/src/inline_fn_without_body.rs
+++ b/src/tools/clippy/clippy_lints/src/inline_fn_without_body.rs
@@ -6,7 +6,7 @@ use rustc_ast::ast::Attribute;
use rustc_errors::Applicability;
use rustc_hir::{TraitFn, TraitItem, TraitItemKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{sym, Symbol};
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/instant_subtraction.rs b/src/tools/clippy/clippy_lints/src/instant_subtraction.rs
index 32b2cb438..655f4b82a 100644
--- a/src/tools/clippy/clippy_lints/src/instant_subtraction.rs
+++ b/src/tools/clippy/clippy_lints/src/instant_subtraction.rs
@@ -6,7 +6,7 @@ use clippy_utils::ty;
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::source_map::Spanned;
use rustc_span::sym;
@@ -89,27 +89,17 @@ impl LateLintPass<'_> for InstantSubtraction {
rhs,
) = expr.kind
{
- if_chain! {
- if is_instant_now_call(cx, lhs);
-
- if is_an_instant(cx, rhs);
- if let Some(sugg) = Sugg::hir_opt(cx, rhs);
-
- then {
- print_manual_instant_elapsed_sugg(cx, expr, sugg)
- } else {
- if_chain! {
- if !expr.span.from_expansion();
- if self.msrv.meets(msrvs::TRY_FROM);
-
- if is_an_instant(cx, lhs);
- if is_a_duration(cx, rhs);
-
- then {
- print_unchecked_duration_subtraction_sugg(cx, lhs, rhs, expr)
- }
- }
- }
+ if is_instant_now_call(cx, lhs)
+ && is_an_instant(cx, rhs)
+ && let Some(sugg) = Sugg::hir_opt(cx, rhs)
+ {
+ print_manual_instant_elapsed_sugg(cx, expr, sugg);
+ } else if !expr.span.from_expansion()
+ && self.msrv.meets(msrvs::TRY_FROM)
+ && is_an_instant(cx, lhs)
+ && is_a_duration(cx, rhs)
+ {
+ print_unchecked_duration_subtraction_sugg(cx, lhs, rhs, expr);
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/int_plus_one.rs b/src/tools/clippy/clippy_lints/src/int_plus_one.rs
index 9ffcee07d..b8e0eef7c 100644
--- a/src/tools/clippy/clippy_lints/src/int_plus_one.rs
+++ b/src/tools/clippy/clippy_lints/src/int_plus_one.rs
@@ -6,7 +6,7 @@ use rustc_ast::ast::{BinOpKind, Expr, ExprKind, LitKind};
use rustc_ast::token;
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/invalid_upcast_comparisons.rs b/src/tools/clippy/clippy_lints/src/invalid_upcast_comparisons.rs
index de82935e6..8bcd9b532 100644
--- a/src/tools/clippy/clippy_lints/src/invalid_upcast_comparisons.rs
+++ b/src/tools/clippy/clippy_lints/src/invalid_upcast_comparisons.rs
@@ -2,7 +2,7 @@ use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::layout::LayoutOf;
use rustc_middle::ty::{self, IntTy, UintTy};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::Span;
use clippy_utils::comparisons;
diff --git a/src/tools/clippy/clippy_lints/src/item_name_repetitions.rs b/src/tools/clippy/clippy_lints/src/item_name_repetitions.rs
index 90048d96c..a9f1612ff 100644
--- a/src/tools/clippy/clippy_lints/src/item_name_repetitions.rs
+++ b/src/tools/clippy/clippy_lints/src/item_name_repetitions.rs
@@ -6,9 +6,9 @@ use clippy_utils::source::is_present_in_source;
use clippy_utils::str_utils::{camel_case_split, count_match_end, count_match_start, to_camel_case, to_snake_case};
use rustc_hir::{EnumDef, FieldDef, Item, ItemKind, OwnerId, Variant, VariantData};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
-use rustc_span::Span;
+use rustc_session::impl_lint_pass;
use rustc_span::symbol::Symbol;
+use rustc_span::Span;
declare_clippy_lint! {
/// ### What it does
@@ -436,7 +436,7 @@ impl LateLintPass<'_> for ItemNameRepetitions {
{
match item.kind {
ItemKind::Enum(def, _) => check_variant(cx, self.enum_threshold, &def, item_name, item.span),
- ItemKind::Struct(VariantData::Struct(fields, _), _) => {
+ ItemKind::Struct(VariantData::Struct { fields, .. }, _) => {
check_fields(cx, self.struct_threshold, item, fields);
},
_ => (),
diff --git a/src/tools/clippy/clippy_lints/src/items_after_statements.rs b/src/tools/clippy/clippy_lints/src/items_after_statements.rs
index 9605d76fb..39223c204 100644
--- a/src/tools/clippy/clippy_lints/src/items_after_statements.rs
+++ b/src/tools/clippy/clippy_lints/src/items_after_statements.rs
@@ -4,7 +4,7 @@ use clippy_utils::diagnostics::span_lint_hir;
use rustc_hir::{Block, ItemKind, StmtKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/items_after_test_module.rs b/src/tools/clippy/clippy_lints/src/items_after_test_module.rs
index 35e01862c..3614fb8cc 100644
--- a/src/tools/clippy/clippy_lints/src/items_after_test_module.rs
+++ b/src/tools/clippy/clippy_lints/src/items_after_test_module.rs
@@ -4,7 +4,7 @@ use clippy_utils::{fulfill_or_allowed, is_cfg_test, is_from_proc_macro};
use rustc_errors::{Applicability, SuggestionStyle};
use rustc_hir::{HirId, Item, ItemKind, Mod};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::hygiene::AstPass;
use rustc_span::{sym, ExpnKind};
diff --git a/src/tools/clippy/clippy_lints/src/iter_not_returning_iterator.rs b/src/tools/clippy/clippy_lints/src/iter_not_returning_iterator.rs
index 505aadd1a..b9fad7265 100644
--- a/src/tools/clippy/clippy_lints/src/iter_not_returning_iterator.rs
+++ b/src/tools/clippy/clippy_lints/src/iter_not_returning_iterator.rs
@@ -4,7 +4,7 @@ use clippy_utils::ty::implements_trait;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::{FnSig, ImplItem, ImplItemKind, Item, ItemKind, Node, TraitItem, TraitItemKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::sym;
declare_clippy_lint! {
@@ -71,7 +71,7 @@ fn check_sig(cx: &LateContext<'_>, name: &str, sig: &FnSig<'_>, fn_id: LocalDefI
if sig.decl.implicit_self.has_implicit_self() {
let ret_ty = cx
.tcx
- .erase_late_bound_regions(cx.tcx.fn_sig(fn_id).instantiate_identity().output());
+ .instantiate_bound_regions_with_erased(cx.tcx.fn_sig(fn_id).instantiate_identity().output());
let ret_ty = cx
.tcx
.try_normalize_erasing_regions(cx.param_env, ret_ty)
diff --git a/src/tools/clippy/clippy_lints/src/iter_over_hash_type.rs b/src/tools/clippy/clippy_lints/src/iter_over_hash_type.rs
new file mode 100644
index 000000000..8110c1970
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/iter_over_hash_type.rs
@@ -0,0 +1,78 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::higher::ForLoop;
+use clippy_utils::match_any_def_paths;
+use clippy_utils::paths::{
+ HASHMAP_DRAIN, HASHMAP_ITER, HASHMAP_ITER_MUT, HASHMAP_KEYS, HASHMAP_VALUES, HASHMAP_VALUES_MUT, HASHSET_DRAIN,
+ HASHSET_ITER_TY,
+};
+use clippy_utils::ty::is_type_diagnostic_item;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::declare_lint_pass;
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// This is a restriction lint which prevents the use of hash types (i.e., `HashSet` and `HashMap`) in for loops.
+ ///
+ /// ### Why is this bad?
+ /// Because hash types are unordered, when iterated through such as in a for loop, the values are returned in
+ /// an undefined order. As a result, on redundant systems this may cause inconsistencies and anomalies.
+ /// In addition, the unknown order of the elements may reduce readability or introduce other undesired
+ /// side effects.
+ ///
+ /// ### Example
+ /// ```no_run
+ /// let my_map = std::collections::HashMap::<i32, String>::new();
+ /// for (key, value) in my_map { /* ... */ }
+ /// ```
+ /// Use instead:
+ /// ```no_run
+ /// let my_map = std::collections::HashMap::<i32, String>::new();
+ /// let mut keys = my_map.keys().clone().collect::<Vec<_>>();
+ /// keys.sort();
+ /// for key in keys {
+ /// let value = &my_map[key];
+ /// }
+ /// ```
+ #[clippy::version = "1.75.0"]
+ pub ITER_OVER_HASH_TYPE,
+ restriction,
+ "iterating over unordered hash-based types (`HashMap` and `HashSet`)"
+}
+
+declare_lint_pass!(IterOverHashType => [ITER_OVER_HASH_TYPE]);
+
+impl LateLintPass<'_> for IterOverHashType {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &'_ rustc_hir::Expr<'_>) {
+ if let Some(for_loop) = ForLoop::hir(expr)
+ && !for_loop.body.span.from_expansion()
+ && let ty = cx.typeck_results().expr_ty(for_loop.arg).peel_refs()
+ && let Some(adt) = ty.ty_adt_def()
+ && let did = adt.did()
+ && (match_any_def_paths(
+ cx,
+ did,
+ &[
+ &HASHMAP_KEYS,
+ &HASHMAP_VALUES,
+ &HASHMAP_VALUES_MUT,
+ &HASHMAP_ITER,
+ &HASHMAP_ITER_MUT,
+ &HASHMAP_DRAIN,
+ &HASHSET_ITER_TY,
+ &HASHSET_DRAIN,
+ ],
+ )
+ .is_some()
+ || is_type_diagnostic_item(cx, ty, sym::HashMap)
+ || is_type_diagnostic_item(cx, ty, sym::HashSet))
+ {
+ span_lint(
+ cx,
+ ITER_OVER_HASH_TYPE,
+ expr.span,
+ "iteration over unordered hash-based type",
+ );
+ };
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/iter_without_into_iter.rs b/src/tools/clippy/clippy_lints/src/iter_without_into_iter.rs
index 3c291f255..3a5756482 100644
--- a/src/tools/clippy/clippy_lints/src/iter_without_into_iter.rs
+++ b/src/tools/clippy/clippy_lints/src/iter_without_into_iter.rs
@@ -7,7 +7,7 @@ use rustc_errors::Applicability;
use rustc_hir::{FnRetTy, ImplItemKind, ImplicitSelfKind, ItemKind, TyKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::{self, Ty};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{sym, Symbol};
use std::iter;
diff --git a/src/tools/clippy/clippy_lints/src/large_const_arrays.rs b/src/tools/clippy/clippy_lints/src/large_const_arrays.rs
index a4f3d4983..b561054b5 100644
--- a/src/tools/clippy/clippy_lints/src/large_const_arrays.rs
+++ b/src/tools/clippy/clippy_lints/src/large_const_arrays.rs
@@ -1,11 +1,10 @@
use clippy_utils::diagnostics::span_lint_and_then;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Item, ItemKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::layout::LayoutOf;
use rustc_middle::ty::{self, ConstKind};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::{BytePos, Pos, Span};
declare_clippy_lint! {
@@ -47,43 +46,40 @@ impl_lint_pass!(LargeConstArrays => [LARGE_CONST_ARRAYS]);
impl<'tcx> LateLintPass<'tcx> for LargeConstArrays {
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
- if_chain! {
- if !item.span.from_expansion();
- if let ItemKind::Const(_, generics, _) = &item.kind;
+ if !item.span.from_expansion()
+ && let ItemKind::Const(_, generics, _) = &item.kind
// Since static items may not have generics, skip generic const items.
// FIXME(generic_const_items): I don't think checking `generics.hwcp` suffices as it
// doesn't account for empty where-clauses that only consist of keyword `where` IINM.
- if generics.params.is_empty() && !generics.has_where_clause_predicates;
- let ty = cx.tcx.type_of(item.owner_id).instantiate_identity();
- if let ty::Array(element_type, cst) = ty.kind();
- if let ConstKind::Value(ty::ValTree::Leaf(element_count)) = cst.kind();
- if let Ok(element_count) = element_count.try_to_target_usize(cx.tcx);
- if let Ok(element_size) = cx.layout_of(*element_type).map(|l| l.size.bytes());
- if self.maximum_allowed_size < u128::from(element_count) * u128::from(element_size);
-
- then {
- let hi_pos = item.ident.span.lo() - BytePos::from_usize(1);
- let sugg_span = Span::new(
- hi_pos - BytePos::from_usize("const".len()),
- hi_pos,
- item.span.ctxt(),
- item.span.parent(),
- );
- span_lint_and_then(
- cx,
- LARGE_CONST_ARRAYS,
- item.span,
- "large array defined as const",
- |diag| {
- diag.span_suggestion(
- sugg_span,
- "make this a static item",
- "static",
- Applicability::MachineApplicable,
- );
- }
- );
- }
+ && generics.params.is_empty() && !generics.has_where_clause_predicates
+ && let ty = cx.tcx.type_of(item.owner_id).instantiate_identity()
+ && let ty::Array(element_type, cst) = ty.kind()
+ && let ConstKind::Value(ty::ValTree::Leaf(element_count)) = cst.kind()
+ && let Ok(element_count) = element_count.try_to_target_usize(cx.tcx)
+ && let Ok(element_size) = cx.layout_of(*element_type).map(|l| l.size.bytes())
+ && self.maximum_allowed_size < u128::from(element_count) * u128::from(element_size)
+ {
+ let hi_pos = item.ident.span.lo() - BytePos::from_usize(1);
+ let sugg_span = Span::new(
+ hi_pos - BytePos::from_usize("const".len()),
+ hi_pos,
+ item.span.ctxt(),
+ item.span.parent(),
+ );
+ span_lint_and_then(
+ cx,
+ LARGE_CONST_ARRAYS,
+ item.span,
+ "large array defined as const",
+ |diag| {
+ diag.span_suggestion(
+ sugg_span,
+ "make this a static item",
+ "static",
+ Applicability::MachineApplicable,
+ );
+ },
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/large_enum_variant.rs b/src/tools/clippy/clippy_lints/src/large_enum_variant.rs
index 0bf9b8718..6feb18855 100644
--- a/src/tools/clippy/clippy_lints/src/large_enum_variant.rs
+++ b/src/tools/clippy/clippy_lints/src/large_enum_variant.rs
@@ -8,7 +8,7 @@ use rustc_hir::{Item, ItemKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::{Adt, Ty};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::Span;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/large_futures.rs b/src/tools/clippy/clippy_lints/src/large_futures.rs
index 26a727852..eb7570e9b 100644
--- a/src/tools/clippy/clippy_lints/src/large_futures.rs
+++ b/src/tools/clippy/clippy_lints/src/large_futures.rs
@@ -4,7 +4,7 @@ use clippy_utils::ty::implements_trait;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, LangItem, MatchSource, QPath};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_target::abi::Size;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/large_include_file.rs b/src/tools/clippy/clippy_lints/src/large_include_file.rs
index 566901de3..1b5981ecc 100644
--- a/src/tools/clippy/clippy_lints/src/large_include_file.rs
+++ b/src/tools/clippy/clippy_lints/src/large_include_file.rs
@@ -4,7 +4,7 @@ use clippy_utils::macros::root_macro_call_first_node;
use rustc_ast::LitKind;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
@@ -50,37 +50,35 @@ impl_lint_pass!(LargeIncludeFile => [LARGE_INCLUDE_FILE]);
impl LateLintPass<'_> for LargeIncludeFile {
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &'_ Expr<'_>) {
- if_chain! {
- if let Some(macro_call) = root_macro_call_first_node(cx, expr);
- if !is_lint_allowed(cx, LARGE_INCLUDE_FILE, expr.hir_id);
- if cx.tcx.is_diagnostic_item(sym::include_bytes_macro, macro_call.def_id)
- || cx.tcx.is_diagnostic_item(sym::include_str_macro, macro_call.def_id);
- if let ExprKind::Lit(lit) = &expr.kind;
- then {
- let len = match &lit.node {
- // include_bytes
- LitKind::ByteStr(bstr, _) => bstr.len(),
- // include_str
- LitKind::Str(sym, _) => sym.as_str().len(),
- _ => return,
- };
+ if let Some(macro_call) = root_macro_call_first_node(cx, expr)
+ && !is_lint_allowed(cx, LARGE_INCLUDE_FILE, expr.hir_id)
+ && (cx.tcx.is_diagnostic_item(sym::include_bytes_macro, macro_call.def_id)
+ || cx.tcx.is_diagnostic_item(sym::include_str_macro, macro_call.def_id))
+ && let ExprKind::Lit(lit) = &expr.kind
+ {
+ let len = match &lit.node {
+ // include_bytes
+ LitKind::ByteStr(bstr, _) => bstr.len(),
+ // include_str
+ LitKind::Str(sym, _) => sym.as_str().len(),
+ _ => return,
+ };
- if len as u64 <= self.max_file_size {
- return;
- }
-
- span_lint_and_note(
- cx,
- LARGE_INCLUDE_FILE,
- expr.span,
- "attempted to include a large file",
- None,
- &format!(
- "the configuration allows a maximum size of {} bytes",
- self.max_file_size
- ),
- );
+ if len as u64 <= self.max_file_size {
+ return;
}
+
+ span_lint_and_note(
+ cx,
+ LARGE_INCLUDE_FILE,
+ expr.span,
+ "attempted to include a large file",
+ None,
+ &format!(
+ "the configuration allows a maximum size of {} bytes",
+ self.max_file_size
+ ),
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/large_stack_arrays.rs b/src/tools/clippy/clippy_lints/src/large_stack_arrays.rs
index 5e312ab72..fd33ba91b 100644
--- a/src/tools/clippy/clippy_lints/src/large_stack_arrays.rs
+++ b/src/tools/clippy/clippy_lints/src/large_stack_arrays.rs
@@ -4,7 +4,7 @@ use rustc_hir::{Expr, ExprKind, Item, ItemKind, Node};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::layout::LayoutOf;
use rustc_middle::ty::{self, ConstKind};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/large_stack_frames.rs b/src/tools/clippy/clippy_lints/src/large_stack_frames.rs
index 33636eb68..b397180a6 100644
--- a/src/tools/clippy/clippy_lints/src/large_stack_frames.rs
+++ b/src/tools/clippy/clippy_lints/src/large_stack_frames.rs
@@ -6,7 +6,7 @@ use rustc_hir::def_id::LocalDefId;
use rustc_hir::intravisit::FnKind;
use rustc_hir::{Body, FnDecl};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::Span;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/len_zero.rs b/src/tools/clippy/clippy_lints/src/len_zero.rs
index 0f17d2676..8c032b170 100644
--- a/src/tools/clippy/clippy_lints/src/len_zero.rs
+++ b/src/tools/clippy/clippy_lints/src/len_zero.rs
@@ -2,22 +2,21 @@ use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg, span_lint_and_the
use clippy_utils::source::snippet_with_context;
use clippy_utils::sugg::Sugg;
use clippy_utils::{get_item_name, get_parent_as_impl, is_lint_allowed, peel_ref_operators};
-use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::def::Res;
use rustc_hir::def_id::{DefId, DefIdSet};
use rustc_hir::{
AssocItemKind, BinOpKind, Expr, ExprKind, FnRetTy, GenericArg, GenericBound, ImplItem, ImplItemKind,
- ImplicitSelfKind, Item, ItemKind, LangItem, Mutability, Node, PatKind, PathSegment, PrimTy, QPath, TraitItemRef,
- TyKind, TypeBindingKind,
+ ImplicitSelfKind, Item, ItemKind, Mutability, Node, PatKind, PathSegment, PrimTy, QPath, TraitItemRef,
+ TyKind, TypeBindingKind, OpaqueTyOrigin,
};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::{self, AssocKind, FnSig, Ty};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
-use rustc_span::{Span, Symbol};
+use rustc_session::declare_lint_pass;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::sym;
+use rustc_span::{Span, Symbol};
declare_clippy_lint! {
/// ### What it does
@@ -132,37 +131,33 @@ impl<'tcx> LateLintPass<'tcx> for LenZero {
}
fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx ImplItem<'_>) {
- if_chain! {
- if item.ident.name == sym::len;
- if let ImplItemKind::Fn(sig, _) = &item.kind;
- if sig.decl.implicit_self.has_implicit_self();
- if sig.decl.inputs.len() == 1;
- if cx.effective_visibilities.is_exported(item.owner_id.def_id);
- if matches!(sig.decl.output, FnRetTy::Return(_));
- if let Some(imp) = get_parent_as_impl(cx.tcx, item.hir_id());
- if imp.of_trait.is_none();
- if let TyKind::Path(ty_path) = &imp.self_ty.kind;
- if let Some(ty_id) = cx.qpath_res(ty_path, imp.self_ty.hir_id).opt_def_id();
- if let Some(local_id) = ty_id.as_local();
- let ty_hir_id = cx.tcx.hir().local_def_id_to_hir_id(local_id);
- if !is_lint_allowed(cx, LEN_WITHOUT_IS_EMPTY, ty_hir_id);
- if let Some(output) = parse_len_output(
- cx,
- cx.tcx.fn_sig(item.owner_id).instantiate_identity().skip_binder()
- );
- then {
- let (name, kind) = match cx.tcx.hir().find(ty_hir_id) {
- Some(Node::ForeignItem(x)) => (x.ident.name, "extern type"),
- Some(Node::Item(x)) => match x.kind {
- ItemKind::Struct(..) => (x.ident.name, "struct"),
- ItemKind::Enum(..) => (x.ident.name, "enum"),
- ItemKind::Union(..) => (x.ident.name, "union"),
- _ => (x.ident.name, "type"),
- }
- _ => return,
- };
- check_for_is_empty(cx, sig.span, sig.decl.implicit_self, output, ty_id, name, kind)
- }
+ if item.ident.name == sym::len
+ && let ImplItemKind::Fn(sig, _) = &item.kind
+ && sig.decl.implicit_self.has_implicit_self()
+ && sig.decl.inputs.len() == 1
+ && cx.effective_visibilities.is_exported(item.owner_id.def_id)
+ && matches!(sig.decl.output, FnRetTy::Return(_))
+ && let Some(imp) = get_parent_as_impl(cx.tcx, item.hir_id())
+ && imp.of_trait.is_none()
+ && let TyKind::Path(ty_path) = &imp.self_ty.kind
+ && let Some(ty_id) = cx.qpath_res(ty_path, imp.self_ty.hir_id).opt_def_id()
+ && let Some(local_id) = ty_id.as_local()
+ && let ty_hir_id = cx.tcx.local_def_id_to_hir_id(local_id)
+ && !is_lint_allowed(cx, LEN_WITHOUT_IS_EMPTY, ty_hir_id)
+ && let Some(output) =
+ parse_len_output(cx, cx.tcx.fn_sig(item.owner_id).instantiate_identity().skip_binder())
+ {
+ let (name, kind) = match cx.tcx.opt_hir_node(ty_hir_id) {
+ Some(Node::ForeignItem(x)) => (x.ident.name, "extern type"),
+ Some(Node::Item(x)) => match x.kind {
+ ItemKind::Struct(..) => (x.ident.name, "struct"),
+ ItemKind::Enum(..) => (x.ident.name, "enum"),
+ ItemKind::Union(..) => (x.ident.name, "union"),
+ _ => (x.ident.name, "type"),
+ },
+ _ => return,
+ };
+ check_for_is_empty(cx, sig.span, sig.decl.implicit_self, output, ty_id, name, kind);
}
}
@@ -294,8 +289,10 @@ fn extract_future_output<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<&
kind: ItemKind::OpaqueTy(opaque),
..
} = item
- && opaque.bounds.len() == 1
- && let GenericBound::LangItemTrait(LangItem::Future, _, _, generic_args) = &opaque.bounds[0]
+ && let OpaqueTyOrigin::AsyncFn(_) = opaque.origin
+ && let [GenericBound::Trait(trait_ref, _)] = &opaque.bounds
+ && let Some(segment) = trait_ref.trait_ref.path.segments.last()
+ && let Some(generic_args) = segment.args
&& generic_args.bindings.len() == 1
&& let TypeBindingKind::Equality {
term:
diff --git a/src/tools/clippy/clippy_lints/src/let_if_seq.rs b/src/tools/clippy/clippy_lints/src/let_if_seq.rs
index 2f6f36c39..270162ae7 100644
--- a/src/tools/clippy/clippy_lints/src/let_if_seq.rs
+++ b/src/tools/clippy/clippy_lints/src/let_if_seq.rs
@@ -2,12 +2,11 @@ use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::path_to_local_id;
use clippy_utils::source::snippet;
use clippy_utils::visitors::is_local_used;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_hir::{BindingAnnotation, Mutability};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -61,76 +60,85 @@ impl<'tcx> LateLintPass<'tcx> for LetIfSeq {
fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx hir::Block<'_>) {
let mut it = block.stmts.iter().peekable();
while let Some(stmt) = it.next() {
- if_chain! {
- if let Some(expr) = it.peek();
- if let hir::StmtKind::Local(local) = stmt.kind;
- if let hir::PatKind::Binding(mode, canonical_id, ident, None) = local.pat.kind;
- if let hir::StmtKind::Expr(if_) = expr.kind;
- if let hir::ExprKind::If(hir::Expr { kind: hir::ExprKind::DropTemps(cond), ..}, then, else_) = if_.kind;
- if !is_local_used(cx, *cond, canonical_id);
- if let hir::ExprKind::Block(then, _) = then.kind;
- if let Some(value) = check_assign(cx, canonical_id, then);
- if !is_local_used(cx, value, canonical_id);
- then {
- let span = stmt.span.to(if_.span);
+ if let Some(expr) = it.peek()
+ && let hir::StmtKind::Local(local) = stmt.kind
+ && let hir::PatKind::Binding(mode, canonical_id, ident, None) = local.pat.kind
+ && let hir::StmtKind::Expr(if_) = expr.kind
+ && let hir::ExprKind::If(
+ hir::Expr {
+ kind: hir::ExprKind::DropTemps(cond),
+ ..
+ },
+ then,
+ else_,
+ ) = if_.kind
+ && !is_local_used(cx, *cond, canonical_id)
+ && let hir::ExprKind::Block(then, _) = then.kind
+ && let Some(value) = check_assign(cx, canonical_id, then)
+ && !is_local_used(cx, value, canonical_id)
+ {
+ let span = stmt.span.to(if_.span);
- let has_interior_mutability = !cx.typeck_results().node_type(canonical_id).is_freeze(
- cx.tcx,
- cx.param_env,
- );
- if has_interior_mutability { return; }
+ let has_interior_mutability = !cx
+ .typeck_results()
+ .node_type(canonical_id)
+ .is_freeze(cx.tcx, cx.param_env);
+ if has_interior_mutability {
+ return;
+ }
- let (default_multi_stmts, default) = if let Some(else_) = else_ {
- if let hir::ExprKind::Block(else_, _) = else_.kind {
- if let Some(default) = check_assign(cx, canonical_id, else_) {
- (else_.stmts.len() > 1, default)
- } else if let Some(default) = local.init {
- (true, default)
- } else {
- continue;
- }
+ let (default_multi_stmts, default) = if let Some(else_) = else_ {
+ if let hir::ExprKind::Block(else_, _) = else_.kind {
+ if let Some(default) = check_assign(cx, canonical_id, else_) {
+ (else_.stmts.len() > 1, default)
+ } else if let Some(default) = local.init {
+ (true, default)
} else {
continue;
}
- } else if let Some(default) = local.init {
- (false, default)
} else {
continue;
- };
+ }
+ } else if let Some(default) = local.init {
+ (false, default)
+ } else {
+ continue;
+ };
- let mutability = match mode {
- BindingAnnotation(_, Mutability::Mut) => "<mut> ",
- _ => "",
- };
+ let mutability = match mode {
+ BindingAnnotation(_, Mutability::Mut) => "<mut> ",
+ _ => "",
+ };
- // FIXME: this should not suggest `mut` if we can detect that the variable is not
- // use mutably after the `if`
+ // FIXME: this should not suggest `mut` if we can detect that the variable is not
+ // use mutably after the `if`
- let sug = format!(
- "let {mutability}{name} = if {cond} {{{then} {value} }} else {{{else} {default} }};",
- name=ident.name,
- cond=snippet(cx, cond.span, "_"),
- then=if then.stmts.len() > 1 { " ..;" } else { "" },
- else=if default_multi_stmts { " ..;" } else { "" },
- value=snippet(cx, value.span, "<value>"),
- default=snippet(cx, default.span, "<default>"),
- );
- span_lint_and_then(cx,
- USELESS_LET_IF_SEQ,
- span,
- "`if _ { .. } else { .. }` is an expression",
- |diag| {
- diag.span_suggestion(
- span,
- "it is more idiomatic to write",
- sug,
- Applicability::HasPlaceholders,
- );
- if !mutability.is_empty() {
- diag.note("you might not need `mut` at all");
- }
- });
- }
+ let sug = format!(
+ "let {mutability}{name} = if {cond} {{{then} {value} }} else {{{else} {default} }};",
+ name=ident.name,
+ cond=snippet(cx, cond.span, "_"),
+ then=if then.stmts.len() > 1 { " ..;" } else { "" },
+ else=if default_multi_stmts { " ..;" } else { "" },
+ value=snippet(cx, value.span, "<value>"),
+ default=snippet(cx, default.span, "<default>"),
+ );
+ span_lint_and_then(
+ cx,
+ USELESS_LET_IF_SEQ,
+ span,
+ "`if _ { .. } else { .. }` is an expression",
+ |diag| {
+ diag.span_suggestion(
+ span,
+ "it is more idiomatic to write",
+ sug,
+ Applicability::HasPlaceholders,
+ );
+ if !mutability.is_empty() {
+ diag.note("you might not need `mut` at all");
+ }
+ },
+ );
}
}
}
@@ -141,20 +149,23 @@ fn check_assign<'tcx>(
decl: hir::HirId,
block: &'tcx hir::Block<'_>,
) -> Option<&'tcx hir::Expr<'tcx>> {
- if_chain! {
- if block.expr.is_none();
- if let Some(expr) = block.stmts.iter().last();
- if let hir::StmtKind::Semi(expr) = expr.kind;
- if let hir::ExprKind::Assign(var, value, _) = expr.kind;
- if path_to_local_id(var, decl);
- then {
- if block.stmts.iter().take(block.stmts.len()-1).any(|stmt| is_local_used(cx, stmt, decl)) {
- None
- } else {
- Some(value)
- }
- } else {
+ if block.expr.is_none()
+ && let Some(expr) = block.stmts.iter().last()
+ && let hir::StmtKind::Semi(expr) = expr.kind
+ && let hir::ExprKind::Assign(var, value, _) = expr.kind
+ && path_to_local_id(var, decl)
+ {
+ if block
+ .stmts
+ .iter()
+ .take(block.stmts.len() - 1)
+ .any(|stmt| is_local_used(cx, stmt, decl))
+ {
None
+ } else {
+ Some(value)
}
+ } else {
+ None
}
}
diff --git a/src/tools/clippy/clippy_lints/src/let_underscore.rs b/src/tools/clippy/clippy_lints/src/let_underscore.rs
index 04f23a213..606c2ed72 100644
--- a/src/tools/clippy/clippy_lints/src/let_underscore.rs
+++ b/src/tools/clippy/clippy_lints/src/let_underscore.rs
@@ -5,7 +5,7 @@ use rustc_hir::{Local, PatKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::{GenericArgKind, IsSuggestable};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{BytePos, Span};
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/let_with_type_underscore.rs b/src/tools/clippy/clippy_lints/src/let_with_type_underscore.rs
index 79d728a02..5f3f9b43f 100644
--- a/src/tools/clippy/clippy_lints/src/let_with_type_underscore.rs
+++ b/src/tools/clippy/clippy_lints/src/let_with_type_underscore.rs
@@ -3,7 +3,7 @@ use clippy_utils::source::snippet;
use rustc_hir::{Local, TyKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -27,27 +27,25 @@ declare_lint_pass!(UnderscoreTyped => [LET_WITH_TYPE_UNDERSCORE]);
impl LateLintPass<'_> for UnderscoreTyped {
fn check_local(&mut self, cx: &LateContext<'_>, local: &Local<'_>) {
- if_chain! {
- if !in_external_macro(cx.tcx.sess, local.span);
- if let Some(ty) = local.ty; // Ensure that it has a type defined
- if let TyKind::Infer = &ty.kind; // that type is '_'
- if local.span.eq_ctxt(ty.span);
- then {
- // NOTE: Using `is_from_proc_macro` on `init` will require that it's initialized,
- // this doesn't. Alternatively, `WithSearchPat` can be implemented for `Ty`
- if snippet(cx, ty.span, "_").trim() != "_" {
- return;
- }
-
- span_lint_and_help(
- cx,
- LET_WITH_TYPE_UNDERSCORE,
- local.span,
- "variable declared with type underscore",
- Some(ty.span.with_lo(local.pat.span.hi())),
- "remove the explicit type `_` declaration"
- )
+ if !in_external_macro(cx.tcx.sess, local.span)
+ && let Some(ty) = local.ty // Ensure that it has a type defined
+ && let TyKind::Infer = &ty.kind // that type is '_'
+ && local.span.eq_ctxt(ty.span)
+ {
+ // NOTE: Using `is_from_proc_macro` on `init` will require that it's initialized,
+ // this doesn't. Alternatively, `WithSearchPat` can be implemented for `Ty`
+ if snippet(cx, ty.span, "_").trim() != "_" {
+ return;
}
+
+ span_lint_and_help(
+ cx,
+ LET_WITH_TYPE_UNDERSCORE,
+ local.span,
+ "variable declared with type underscore",
+ Some(ty.span.with_lo(local.pat.span.hi())),
+ "remove the explicit type `_` declaration",
+ );
};
}
}
diff --git a/src/tools/clippy/clippy_lints/src/lib.rs b/src/tools/clippy/clippy_lints/src/lib.rs
index ab978a677..7758d6a58 100644
--- a/src/tools/clippy/clippy_lints/src/lib.rs
+++ b/src/tools/clippy/clippy_lints/src/lib.rs
@@ -50,9 +50,10 @@ extern crate clippy_utils;
#[macro_use]
extern crate declare_clippy_lint;
+use std::collections::BTreeMap;
+
use rustc_data_structures::fx::FxHashSet;
use rustc_lint::{Lint, LintId};
-use rustc_session::Session;
#[cfg(feature = "internal")]
pub mod deprecated_lints;
@@ -75,7 +76,7 @@ mod assertions_on_result_states;
mod async_yields_async;
mod attrs;
mod await_holding_invalid;
-mod blocks_in_if_conditions;
+mod blocks_in_conditions;
mod bool_assert_comparison;
mod bool_to_int_with_if;
mod booleans;
@@ -145,6 +146,7 @@ mod if_let_mutex;
mod if_not_else;
mod if_then_some_else_none;
mod ignored_unit_patterns;
+mod impl_hash_with_borrow_str_and_bytes;
mod implicit_hasher;
mod implicit_return;
mod implicit_saturating_add;
@@ -153,6 +155,7 @@ mod implied_bounds_in_impls;
mod inconsistent_struct_constructor;
mod index_refutable_slice;
mod indexing_slicing;
+mod ineffective_open_options;
mod infinite_iter;
mod inherent_impl;
mod inherent_to_string;
@@ -165,6 +168,7 @@ mod item_name_repetitions;
mod items_after_statements;
mod items_after_test_module;
mod iter_not_returning_iterator;
+mod iter_over_hash_type;
mod iter_without_into_iter;
mod large_const_arrays;
mod large_enum_variant;
@@ -288,6 +292,7 @@ mod ref_option_ref;
mod ref_patterns;
mod reference;
mod regex;
+mod repeat_vec_with_capacity;
mod reserve_after_initialization;
mod return_self_not_must_use;
mod returns;
@@ -308,7 +313,6 @@ mod slow_vector_initialization;
mod std_instead_of_core;
mod strings;
mod strlen_on_c_strings;
-mod suspicious_doc_comments;
mod suspicious_operation_groupings;
mod suspicious_trait_impl;
mod suspicious_xor_used_as_pow;
@@ -325,6 +329,7 @@ mod tuple_array_conversions;
mod types;
mod undocumented_unsafe_blocks;
mod unicode;
+mod uninhabited_references;
mod uninit_vec;
mod unit_return_expecting_ord;
mod unit_types;
@@ -492,11 +497,84 @@ fn register_categories(store: &mut rustc_lint::LintStore) {
groups.register(store);
}
-/// Register all lints and lint groups with the rustc plugin registry
+/// Register all lints and lint groups with the rustc lint store
///
/// Used in `./src/driver.rs`.
#[expect(clippy::too_many_lines)]
-pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf: &'static Conf) {
+pub fn register_lints(store: &mut rustc_lint::LintStore, conf: &'static Conf) {
+ let Conf {
+ ref absolute_paths_allowed_crates,
+ absolute_paths_max_segments,
+ accept_comment_above_attributes,
+ accept_comment_above_statement,
+ allow_dbg_in_tests,
+ allow_expect_in_tests,
+ allow_mixed_uninlined_format_args,
+ allow_one_hash_in_raw_strings,
+ allow_print_in_tests,
+ allow_private_module_inception,
+ allow_unwrap_in_tests,
+ ref allowed_dotfiles,
+ ref allowed_idents_below_min_chars,
+ ref allowed_scripts,
+ ref arithmetic_side_effects_allowed_binary,
+ ref arithmetic_side_effects_allowed_unary,
+ ref arithmetic_side_effects_allowed,
+ array_size_threshold,
+ avoid_breaking_exported_api,
+ ref await_holding_invalid_types,
+ cargo_ignore_publish,
+ cognitive_complexity_threshold,
+ ref disallowed_macros,
+ ref disallowed_methods,
+ ref disallowed_names,
+ ref disallowed_types,
+ ref doc_valid_idents,
+ enable_raw_pointer_heuristic_for_send,
+ enforce_iter_loop_reborrow,
+ ref enforced_import_renames,
+ enum_variant_name_threshold,
+ enum_variant_size_threshold,
+ excessive_nesting_threshold,
+ future_size_threshold,
+ ref ignore_interior_mutability,
+ large_error_threshold,
+ literal_representation_threshold,
+ matches_for_let_else,
+ max_fn_params_bools,
+ max_include_file_size,
+ max_struct_bools,
+ max_suggested_slice_pattern_length,
+ max_trait_bounds,
+ min_ident_chars_threshold,
+ missing_docs_in_crate_items,
+ ref msrv,
+ pass_by_value_size_limit,
+ semicolon_inside_block_ignore_singleline,
+ semicolon_outside_block_ignore_multiline,
+ single_char_binding_names_threshold,
+ stack_size_threshold,
+ ref standard_macro_braces,
+ struct_field_name_threshold,
+ suppress_restriction_lint_in_const,
+ too_large_for_stack,
+ too_many_arguments_threshold,
+ too_many_lines_threshold,
+ trivial_copy_size_limit,
+ type_complexity_threshold,
+ unnecessary_box_size,
+ unreadable_literal_lint_fractions,
+ upper_case_acronyms_aggressive,
+ vec_box_size_threshold,
+ verbose_bit_mask_threshold,
+ warn_on_all_wildcard_imports,
+ check_private_items,
+
+ blacklisted_names: _,
+ cyclomatic_complexity_threshold: _,
+ } = *conf;
+ let msrv = || msrv.clone();
+
register_removed_non_tool_lints(store);
register_categories(store);
@@ -521,7 +599,6 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(|_| {
Box::new(utils::internal_lints::compiler_lint_functions::CompilerLintFunctions::new())
});
- store.register_late_pass(|_| Box::new(utils::internal_lints::if_chain_style::IfChainStyle));
store.register_late_pass(|_| Box::new(utils::internal_lints::invalid_paths::InvalidPaths));
store.register_late_pass(|_| {
Box::<utils::internal_lints::interning_defined_symbol::InterningDefinedSymbol>::default()
@@ -537,9 +614,6 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
});
}
- let arithmetic_side_effects_allowed = conf.arithmetic_side_effects_allowed.clone();
- let arithmetic_side_effects_allowed_binary = conf.arithmetic_side_effects_allowed_binary.clone();
- let arithmetic_side_effects_allowed_unary = conf.arithmetic_side_effects_allowed_unary.clone();
store.register_late_pass(move |_| {
Box::new(operators::arithmetic_side_effects::ArithmeticSideEffects::new(
arithmetic_side_effects_allowed
@@ -557,16 +631,12 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_early_pass(|| Box::<utils::format_args_collector::FormatArgsCollector>::default());
store.register_late_pass(|_| Box::new(utils::dump_hir::DumpHir));
store.register_late_pass(|_| Box::new(utils::author::Author));
- let await_holding_invalid_types = conf.await_holding_invalid_types.clone();
store.register_late_pass(move |_| {
Box::new(await_holding_invalid::AwaitHolding::new(
await_holding_invalid_types.clone(),
))
});
store.register_late_pass(|_| Box::new(serde_api::SerdeApi));
- let vec_box_size_threshold = conf.vec_box_size_threshold;
- let type_complexity_threshold = conf.type_complexity_threshold;
- let avoid_breaking_exported_api = conf.avoid_breaking_exported_api;
store.register_late_pass(move |_| {
Box::new(types::Types::new(
vec_box_size_threshold,
@@ -588,7 +658,7 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(|_| Box::<significant_drop_tightening::SignificantDropTightening<'_>>::default());
store.register_late_pass(|_| Box::new(len_zero::LenZero));
store.register_late_pass(|_| Box::new(attrs::Attributes));
- store.register_late_pass(|_| Box::new(blocks_in_if_conditions::BlocksInIfConditions));
+ store.register_late_pass(|_| Box::new(blocks_in_conditions::BlocksInConditions));
store.register_late_pass(|_| Box::new(unicode::Unicode));
store.register_late_pass(|_| Box::new(uninit_vec::UninitVec));
store.register_late_pass(|_| Box::new(unit_return_expecting_ord::UnitReturnExpectingOrd));
@@ -599,19 +669,7 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(|_| Box::new(inconsistent_struct_constructor::InconsistentStructConstructor));
store.register_late_pass(|_| Box::new(non_octal_unix_permissions::NonOctalUnixPermissions));
store.register_early_pass(|| Box::new(unnecessary_self_imports::UnnecessarySelfImports));
-
- let msrv = || conf.msrv.clone();
- let avoid_breaking_exported_api = conf.avoid_breaking_exported_api;
- let allow_expect_in_tests = conf.allow_expect_in_tests;
- let allow_unwrap_in_tests = conf.allow_unwrap_in_tests;
- let suppress_restriction_lint_in_const = conf.suppress_restriction_lint_in_const;
store.register_late_pass(move |_| Box::new(approx_const::ApproxConstant::new(msrv())));
- let allowed_dotfiles = conf
- .allowed_dotfiles
- .iter()
- .cloned()
- .chain(methods::DEFAULT_ALLOWED_DOTFILES.iter().copied().map(ToOwned::to_owned))
- .collect::<FxHashSet<_>>();
store.register_late_pass(move |_| {
Box::new(methods::Methods::new(
avoid_breaking_exported_api,
@@ -622,7 +680,6 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
))
});
store.register_late_pass(move |_| Box::new(matches::Matches::new(msrv())));
- let matches_for_let_else = conf.matches_for_let_else;
store.register_early_pass(move || Box::new(manual_non_exhaustive::ManualNonExhaustiveStruct::new(msrv())));
store.register_late_pass(move |_| Box::new(manual_non_exhaustive::ManualNonExhaustiveEnum::new(msrv())));
store.register_late_pass(move |_| Box::new(manual_strip::ManualStrip::new(msrv())));
@@ -639,7 +696,6 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_early_pass(move || Box::new(unnested_or_patterns::UnnestedOrPatterns::new(msrv())));
store.register_late_pass(|_| Box::new(size_of_in_element_count::SizeOfInElementCount));
store.register_late_pass(|_| Box::new(same_name_method::SameNameMethod));
- let max_suggested_slice_pattern_length = conf.max_suggested_slice_pattern_length;
store.register_late_pass(move |_| {
Box::new(index_refutable_slice::IndexRefutableSlice::new(
max_suggested_slice_pattern_length,
@@ -648,7 +704,6 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
});
store.register_late_pass(|_| Box::<shadow::Shadow>::default());
store.register_late_pass(|_| Box::new(unit_types::UnitTypes));
- let enforce_iter_loop_reborrow = conf.enforce_iter_loop_reborrow;
store.register_late_pass(move |_| Box::new(loops::Loops::new(msrv(), enforce_iter_loop_reborrow)));
store.register_late_pass(|_| Box::<main_recursion::MainRecursion>::default());
store.register_late_pass(|_| Box::new(lifetimes::Lifetimes));
@@ -662,18 +717,17 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(|_| Box::new(no_effect::NoEffect));
store.register_late_pass(|_| Box::new(temporary_assignment::TemporaryAssignment));
store.register_late_pass(move |_| Box::new(transmute::Transmute::new(msrv())));
- let cognitive_complexity_threshold = conf.cognitive_complexity_threshold;
store.register_late_pass(move |_| {
Box::new(cognitive_complexity::CognitiveComplexity::new(
cognitive_complexity_threshold,
))
});
- let too_large_for_stack = conf.too_large_for_stack;
store.register_late_pass(move |_| Box::new(escape::BoxedLocal { too_large_for_stack }));
store.register_late_pass(move |_| {
Box::new(vec::UselessVec {
too_large_for_stack,
msrv: msrv(),
+ span_to_lint_map: BTreeMap::new(),
})
});
store.register_late_pass(|_| Box::new(panic_unimplemented::PanicUnimplemented));
@@ -684,18 +738,13 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(|_| Box::new(empty_enum::EmptyEnum));
store.register_late_pass(|_| Box::new(invalid_upcast_comparisons::InvalidUpcastComparisons));
store.register_late_pass(|_| Box::<regex::Regex>::default());
- let ignore_interior_mutability = conf.ignore_interior_mutability.clone();
store.register_late_pass(move |_| Box::new(copies::CopyAndPaste::new(ignore_interior_mutability.clone())));
store.register_late_pass(|_| Box::new(copy_iterator::CopyIterator));
store.register_late_pass(|_| Box::new(format::UselessFormat));
store.register_late_pass(|_| Box::new(swap::Swap));
store.register_late_pass(|_| Box::new(overflow_check_conditional::OverflowCheckConditional));
store.register_late_pass(|_| Box::<new_without_default::NewWithoutDefault>::default());
- let disallowed_names = conf.disallowed_names.iter().cloned().collect::<FxHashSet<_>>();
- store.register_late_pass(move |_| Box::new(disallowed_names::DisallowedNames::new(disallowed_names.clone())));
- let too_many_arguments_threshold = conf.too_many_arguments_threshold;
- let too_many_lines_threshold = conf.too_many_lines_threshold;
- let large_error_threshold = conf.large_error_threshold;
+ store.register_late_pass(move |_| Box::new(disallowed_names::DisallowedNames::new(disallowed_names)));
store.register_late_pass(move |_| {
Box::new(functions::Functions::new(
too_many_arguments_threshold,
@@ -704,9 +753,7 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
avoid_breaking_exported_api,
))
});
- let doc_valid_idents = conf.doc_valid_idents.iter().cloned().collect::<FxHashSet<_>>();
- let missing_docs_in_crate_items = conf.missing_docs_in_crate_items;
- store.register_late_pass(move |_| Box::new(doc::DocMarkdown::new(doc_valid_idents.clone())));
+ store.register_late_pass(move |_| Box::new(doc::Documentation::new(doc_valid_idents, check_private_items)));
store.register_late_pass(|_| Box::new(neg_multiply::NegMultiply));
store.register_late_pass(|_| Box::new(let_if_seq::LetIfSeq));
store.register_late_pass(|_| Box::new(mixed_read_write_in_expression::EvalOrderDependence));
@@ -716,17 +763,17 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(|_| Box::new(match_result_ok::MatchResultOk));
store.register_late_pass(|_| Box::new(partialeq_ne_impl::PartialEqNeImpl));
store.register_late_pass(|_| Box::new(unused_io_amount::UnusedIoAmount));
- let enum_variant_size_threshold = conf.enum_variant_size_threshold;
store.register_late_pass(move |_| Box::new(large_enum_variant::LargeEnumVariant::new(enum_variant_size_threshold)));
store.register_late_pass(|_| Box::new(explicit_write::ExplicitWrite));
store.register_late_pass(|_| Box::new(needless_pass_by_value::NeedlessPassByValue));
- let pass_by_ref_or_value = pass_by_ref_or_value::PassByRefOrValue::new(
- conf.trivial_copy_size_limit,
- conf.pass_by_value_size_limit,
- conf.avoid_breaking_exported_api,
- &sess.target,
- );
- store.register_late_pass(move |_| Box::new(pass_by_ref_or_value));
+ store.register_late_pass(move |tcx| {
+ Box::new(pass_by_ref_or_value::PassByRefOrValue::new(
+ trivial_copy_size_limit,
+ pass_by_value_size_limit,
+ avoid_breaking_exported_api,
+ tcx.sess.target.pointer_width,
+ ))
+ });
store.register_late_pass(|_| Box::new(ref_option_ref::RefOptionRef));
store.register_late_pass(|_| Box::new(infinite_iter::InfiniteIter));
store.register_late_pass(|_| Box::new(inline_fn_without_body::InlineFnWithoutBody));
@@ -746,7 +793,6 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
suppress_restriction_lint_in_const,
))
});
- let ignore_interior_mutability = conf.ignore_interior_mutability.clone();
store.register_late_pass(move |_| Box::new(non_copy_const::NonCopyConst::new(ignore_interior_mutability.clone())));
store.register_late_pass(|_| Box::new(ptr_offset_with_cast::PtrOffsetWithCast));
store.register_late_pass(|_| Box::new(redundant_clone::RedundantClone));
@@ -755,10 +801,8 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(|_| Box::new(assertions_on_constants::AssertionsOnConstants));
store.register_late_pass(|_| Box::new(assertions_on_result_states::AssertionsOnResultStates));
store.register_late_pass(|_| Box::new(inherent_to_string::InherentToString));
- let max_trait_bounds = conf.max_trait_bounds;
store.register_late_pass(move |_| Box::new(trait_bounds::TraitBounds::new(max_trait_bounds, msrv())));
store.register_late_pass(|_| Box::new(comparison_chain::ComparisonChain));
- let ignore_interior_mutability = conf.ignore_interior_mutability.clone();
store.register_late_pass(move |_| Box::new(mut_key::MutableKeyType::new(ignore_interior_mutability.clone())));
store.register_early_pass(|| Box::new(reference::DerefAddrOf));
store.register_early_pass(|| Box::new(double_parens::DoubleParens));
@@ -779,21 +823,16 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_early_pass(|| Box::new(redundant_else::RedundantElse));
store.register_late_pass(|_| Box::new(create_dir::CreateDir));
store.register_early_pass(|| Box::new(needless_arbitrary_self_type::NeedlessArbitrarySelfType));
- let literal_representation_lint_fraction_readability = conf.unreadable_literal_lint_fractions;
store.register_early_pass(move || {
Box::new(literal_representation::LiteralDigitGrouping::new(
- literal_representation_lint_fraction_readability,
+ unreadable_literal_lint_fractions,
))
});
- let literal_representation_threshold = conf.literal_representation_threshold;
store.register_early_pass(move || {
Box::new(literal_representation::DecimalLiteralRepresentation::new(
literal_representation_threshold,
))
});
- let enum_variant_name_threshold = conf.enum_variant_name_threshold;
- let struct_field_name_threshold = conf.struct_field_name_threshold;
- let allow_private_module_inception = conf.allow_private_module_inception;
store.register_late_pass(move |_| {
Box::new(item_name_repetitions::ItemNameRepetitions::new(
enum_variant_name_threshold,
@@ -803,7 +842,6 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
))
});
store.register_early_pass(|| Box::new(tabs_in_doc_comments::TabsInDocComments));
- let upper_case_acronyms_aggressive = conf.upper_case_acronyms_aggressive;
store.register_late_pass(move |_| {
Box::new(upper_case_acronyms::UpperCaseAcronyms::new(
avoid_breaking_exported_api,
@@ -815,15 +853,12 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(|_| Box::new(mutable_debug_assertion::DebugAssertWithMutCall));
store.register_late_pass(|_| Box::new(exit::Exit));
store.register_late_pass(|_| Box::new(to_digit_is_some::ToDigitIsSome));
- let array_size_threshold = u128::from(conf.array_size_threshold);
- store.register_late_pass(move |_| Box::new(large_stack_arrays::LargeStackArrays::new(array_size_threshold)));
- store.register_late_pass(move |_| Box::new(large_const_arrays::LargeConstArrays::new(array_size_threshold)));
+ store.register_late_pass(move |_| Box::new(large_stack_arrays::LargeStackArrays::new(array_size_threshold.into())));
+ store.register_late_pass(move |_| Box::new(large_const_arrays::LargeConstArrays::new(array_size_threshold.into())));
store.register_late_pass(|_| Box::new(floating_point_arithmetic::FloatingPointArithmetic));
store.register_late_pass(|_| Box::new(as_conversions::AsConversions));
store.register_late_pass(|_| Box::new(let_underscore::LetUnderscore));
store.register_early_pass(|| Box::<single_component_path_imports::SingleComponentPathImports>::default());
- let max_fn_params_bools = conf.max_fn_params_bools;
- let max_struct_bools = conf.max_struct_bools;
store.register_late_pass(move |_| {
Box::new(excessive_bools::ExcessiveBools::new(
max_struct_bools,
@@ -831,36 +866,30 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
))
});
store.register_early_pass(|| Box::new(option_env_unwrap::OptionEnvUnwrap));
- let warn_on_all_wildcard_imports = conf.warn_on_all_wildcard_imports;
store.register_late_pass(move |_| Box::new(wildcard_imports::WildcardImports::new(warn_on_all_wildcard_imports)));
store.register_late_pass(|_| Box::<redundant_pub_crate::RedundantPubCrate>::default());
store.register_late_pass(|_| Box::new(unnamed_address::UnnamedAddress));
store.register_late_pass(|_| Box::<dereference::Dereferencing<'_>>::default());
store.register_late_pass(|_| Box::new(option_if_let_else::OptionIfLetElse));
store.register_late_pass(|_| Box::new(future_not_send::FutureNotSend));
- let future_size_threshold = conf.future_size_threshold;
store.register_late_pass(move |_| Box::new(large_futures::LargeFuture::new(future_size_threshold)));
store.register_late_pass(|_| Box::new(if_let_mutex::IfLetMutex));
store.register_late_pass(|_| Box::new(if_not_else::IfNotElse));
store.register_late_pass(|_| Box::new(equatable_if_let::PatternEquality));
store.register_late_pass(|_| Box::new(manual_async_fn::ManualAsyncFn));
store.register_late_pass(|_| Box::new(panic_in_result_fn::PanicInResultFn));
- let single_char_binding_names_threshold = conf.single_char_binding_names_threshold;
store.register_early_pass(move || {
Box::new(non_expressive_names::NonExpressiveNames {
single_char_binding_names_threshold,
})
});
- let macro_matcher = conf.standard_macro_braces.iter().cloned().collect::<FxHashSet<_>>();
- store.register_early_pass(move || Box::new(nonstandard_macro_braces::MacroBraces::new(&macro_matcher)));
+ store.register_early_pass(move || Box::new(nonstandard_macro_braces::MacroBraces::new(standard_macro_braces)));
store.register_late_pass(|_| Box::<macro_use::MacroUseImports>::default());
store.register_late_pass(|_| Box::new(pattern_type_mismatch::PatternTypeMismatch));
store.register_late_pass(|_| Box::new(unwrap_in_result::UnwrapInResult));
store.register_late_pass(|_| Box::new(semicolon_if_nothing_returned::SemicolonIfNothingReturned));
store.register_late_pass(|_| Box::new(async_yields_async::AsyncYieldsAsync));
- let disallowed_macros = conf.disallowed_macros.clone();
store.register_late_pass(move |_| Box::new(disallowed_macros::DisallowedMacros::new(disallowed_macros.clone())));
- let disallowed_methods = conf.disallowed_methods.clone();
store.register_late_pass(move |_| Box::new(disallowed_methods::DisallowedMethods::new(disallowed_methods.clone())));
store.register_early_pass(|| Box::new(asm_syntax::InlineAsmX86AttSyntax));
store.register_early_pass(|| Box::new(asm_syntax::InlineAsmX86IntelSyntax));
@@ -875,36 +904,30 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(|_| Box::new(bool_assert_comparison::BoolAssertComparison));
store.register_early_pass(move || Box::new(module_style::ModStyle));
store.register_late_pass(|_| Box::<unused_async::UnusedAsync>::default());
- let disallowed_types = conf.disallowed_types.clone();
store.register_late_pass(move |_| Box::new(disallowed_types::DisallowedTypes::new(disallowed_types.clone())));
- let import_renames = conf.enforced_import_renames.clone();
store.register_late_pass(move |_| {
Box::new(missing_enforced_import_rename::ImportRename::new(
- import_renames.clone(),
+ enforced_import_renames.clone(),
))
});
- let scripts = conf.allowed_scripts.clone();
- store.register_early_pass(move || Box::new(disallowed_script_idents::DisallowedScriptIdents::new(&scripts)));
+ store.register_early_pass(move || Box::new(disallowed_script_idents::DisallowedScriptIdents::new(allowed_scripts)));
store.register_late_pass(|_| Box::new(strlen_on_c_strings::StrlenOnCStrings));
store.register_late_pass(move |_| Box::new(self_named_constructors::SelfNamedConstructors));
store.register_late_pass(move |_| Box::new(iter_not_returning_iterator::IterNotReturningIterator));
store.register_late_pass(move |_| Box::new(manual_assert::ManualAssert));
- let enable_raw_pointer_heuristic_for_send = conf.enable_raw_pointer_heuristic_for_send;
store.register_late_pass(move |_| {
Box::new(non_send_fields_in_send_ty::NonSendFieldInSendTy::new(
enable_raw_pointer_heuristic_for_send,
))
});
- let accept_comment_above_statement = conf.accept_comment_above_statement;
- let accept_comment_above_attributes = conf.accept_comment_above_attributes;
store.register_late_pass(move |_| {
Box::new(undocumented_unsafe_blocks::UndocumentedUnsafeBlocks::new(
accept_comment_above_statement,
accept_comment_above_attributes,
))
});
- let allow_mixed_uninlined = conf.allow_mixed_uninlined_format_args;
- store.register_late_pass(move |_| Box::new(format_args::FormatArgs::new(msrv(), allow_mixed_uninlined)));
+ store
+ .register_late_pass(move |_| Box::new(format_args::FormatArgs::new(msrv(), allow_mixed_uninlined_format_args)));
store.register_late_pass(|_| Box::new(trailing_empty_array::TrailingEmptyArray));
store.register_early_pass(|| Box::new(octal_escapes::OctalEscapes));
store.register_late_pass(|_| Box::new(needless_late_init::NeedlessLateInit));
@@ -914,11 +937,8 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(move |_| Box::new(manual_bits::ManualBits::new(msrv())));
store.register_late_pass(|_| Box::new(default_union_representation::DefaultUnionRepresentation));
store.register_late_pass(|_| Box::<only_used_in_recursion::OnlyUsedInRecursion>::default());
- let allow_dbg_in_tests = conf.allow_dbg_in_tests;
store.register_late_pass(move |_| Box::new(dbg_macro::DbgMacro::new(allow_dbg_in_tests)));
- let allow_print_in_tests = conf.allow_print_in_tests;
store.register_late_pass(move |_| Box::new(write::Write::new(allow_print_in_tests)));
- let cargo_ignore_publish = conf.cargo_ignore_publish;
store.register_late_pass(move |_| {
Box::new(cargo::Cargo {
ignore_publish: cargo_ignore_publish,
@@ -929,7 +949,6 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(|_| Box::new(unnecessary_owned_empty_strings::UnnecessaryOwnedEmptyStrings));
store.register_early_pass(|| Box::new(pub_use::PubUse));
store.register_late_pass(|_| Box::new(format_push_string::FormatPushString));
- let max_include_file_size = conf.max_include_file_size;
store.register_late_pass(move |_| Box::new(large_include_file::LargeIncludeFile::new(max_include_file_size)));
store.register_late_pass(|_| Box::new(strings::TrimSplitWhitespace));
store.register_late_pass(|_| Box::new(rc_clone_in_vec_init::RcCloneInVecInit));
@@ -942,7 +961,6 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(|_| Box::new(default_instead_of_iter_empty::DefaultIterEmpty));
store.register_late_pass(move |_| Box::new(manual_rem_euclid::ManualRemEuclid::new(msrv())));
store.register_late_pass(move |_| Box::new(manual_retain::ManualRetain::new(msrv())));
- let verbose_bit_mask_threshold = conf.verbose_bit_mask_threshold;
store.register_late_pass(move |_| Box::new(operators::Operators::new(verbose_bit_mask_threshold)));
store.register_late_pass(|_| Box::<std_instead_of_core::StdReexports>::default());
store.register_late_pass(move |_| Box::new(instant_subtraction::InstantSubtraction::new(msrv())));
@@ -959,8 +977,6 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(|_| Box::new(from_raw_with_void_ptr::FromRawWithVoidPtr));
store.register_late_pass(|_| Box::new(suspicious_xor_used_as_pow::ConfusingXorAndPow));
store.register_late_pass(move |_| Box::new(manual_is_ascii_check::ManualIsAsciiCheck::new(msrv())));
- let semicolon_inside_block_ignore_singleline = conf.semicolon_inside_block_ignore_singleline;
- let semicolon_outside_block_ignore_multiline = conf.semicolon_outside_block_ignore_multiline;
store.register_late_pass(move |_| {
Box::new(semicolon_block::SemicolonBlock::new(
semicolon_inside_block_ignore_singleline,
@@ -983,7 +999,6 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(|_| Box::new(allow_attributes::AllowAttribute));
store.register_late_pass(move |_| Box::new(manual_main_separator_str::ManualMainSeparatorStr::new(msrv())));
store.register_late_pass(|_| Box::new(unnecessary_struct_initialization::UnnecessaryStruct));
- let unnecessary_box_size = conf.unnecessary_box_size;
store.register_late_pass(move |_| {
Box::new(unnecessary_box_returns::UnnecessaryBoxReturns::new(
avoid_breaking_exported_api,
@@ -993,8 +1008,6 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(|_| Box::new(lines_filter_map_ok::LinesFilterMapOk));
store.register_late_pass(|_| Box::new(tests_outside_test_module::TestsOutsideTestModule));
store.register_late_pass(|_| Box::new(manual_slice_size_calculation::ManualSliceSizeCalculation));
- store.register_early_pass(|| Box::new(suspicious_doc_comments::SuspiciousDocComments));
- let excessive_nesting_threshold = conf.excessive_nesting_threshold;
store.register_early_pass(move || {
Box::new(excessive_nesting::ExcessiveNesting {
excessive_nesting_threshold,
@@ -1010,15 +1023,12 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(|_| Box::new(redundant_type_annotations::RedundantTypeAnnotations));
store.register_late_pass(|_| Box::new(arc_with_non_send_sync::ArcWithNonSendSync));
store.register_late_pass(|_| Box::new(needless_if::NeedlessIf));
- let allowed_idents_below_min_chars = conf.allowed_idents_below_min_chars.clone();
- let min_ident_chars_threshold = conf.min_ident_chars_threshold;
store.register_late_pass(move |_| {
Box::new(min_ident_chars::MinIdentChars {
allowed_idents_below_min_chars: allowed_idents_below_min_chars.clone(),
min_ident_chars_threshold,
})
});
- let stack_size_threshold = conf.stack_size_threshold;
store.register_late_pass(move |_| Box::new(large_stack_frames::LargeStackFrames::new(stack_size_threshold)));
store.register_late_pass(|_| Box::new(single_range_in_vec_init::SingleRangeInVecInit));
store.register_late_pass(move |_| {
@@ -1033,10 +1043,9 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
def_id_to_usage: rustc_data_structures::fx::FxHashMap::default(),
})
});
- let needless_raw_string_hashes_allow_one = conf.allow_one_hash_in_raw_strings;
store.register_early_pass(move || {
Box::new(raw_strings::RawStrings {
- needless_raw_string_hashes_allow_one,
+ allow_one_hash_in_raw_strings,
})
});
store.register_late_pass(|_| Box::new(manual_range_patterns::ManualRangePatterns));
@@ -1045,8 +1054,6 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(|_| Box::new(manual_float_methods::ManualFloatMethods));
store.register_late_pass(|_| Box::new(four_forward_slashes::FourForwardSlashes));
store.register_late_pass(|_| Box::new(error_impl_error::ErrorImplError));
- let absolute_paths_max_segments = conf.absolute_paths_max_segments;
- let absolute_paths_allowed_crates = conf.absolute_paths_allowed_crates.clone();
store.register_late_pass(move |_| {
Box::new(absolute_paths::AbsolutePaths {
absolute_paths_max_segments,
@@ -1066,6 +1073,11 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
});
store.register_late_pass(move |_| Box::new(manual_hash_one::ManualHashOne::new(msrv())));
store.register_late_pass(|_| Box::new(iter_without_into_iter::IterWithoutIntoIter));
+ store.register_late_pass(|_| Box::new(iter_over_hash_type::IterOverHashType));
+ store.register_late_pass(|_| Box::new(impl_hash_with_borrow_str_and_bytes::ImplHashWithBorrowStrBytes));
+ store.register_late_pass(|_| Box::new(repeat_vec_with_capacity::RepeatVecWithCapacity));
+ store.register_late_pass(|_| Box::new(uninhabited_references::UninhabitedReferences));
+ store.register_late_pass(|_| Box::new(ineffective_open_options::IneffectiveOpenOptions));
// add lints here, do not remove this comment, it's used in `new_lint`
}
diff --git a/src/tools/clippy/clippy_lints/src/lifetimes.rs b/src/tools/clippy/clippy_lints/src/lifetimes.rs
index 7517003be..ffef84d1f 100644
--- a/src/tools/clippy/clippy_lints/src/lifetimes.rs
+++ b/src/tools/clippy/clippy_lints/src/lifetimes.rs
@@ -18,10 +18,10 @@ use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::hir::map::Map;
use rustc_middle::hir::nested_filter as middle_nested_filter;
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::def_id::LocalDefId;
-use rustc_span::Span;
use rustc_span::symbol::{kw, Ident, Symbol};
+use rustc_span::Span;
declare_clippy_lint! {
/// ### What it does
@@ -195,7 +195,7 @@ fn check_fn_inner<'tcx>(
.iter()
// In principle, the result of the call to `Node::ident` could be `unwrap`ped, as `DefId` should refer to a
// `Node::GenericParam`.
- .filter_map(|&def_id| cx.tcx.hir().get_by_def_id(def_id).ident())
+ .filter_map(|&def_id| cx.tcx.hir_node_by_def_id(def_id).ident())
.map(|ident| ident.to_string())
.collect::<Vec<_>>()
.join(", ");
@@ -310,20 +310,17 @@ fn elision_suggestions(
// elision doesn't work for explicit self types, see rust-lang/rust#69064
fn explicit_self_type<'tcx>(cx: &LateContext<'tcx>, func: &FnDecl<'tcx>, ident: Option<Ident>) -> bool {
- if_chain! {
- if let Some(ident) = ident;
- if ident.name == kw::SelfLower;
- if !func.implicit_self.has_implicit_self();
-
- if let Some(self_ty) = func.inputs.first();
- then {
- let mut visitor = RefVisitor::new(cx);
- visitor.visit_ty(self_ty);
-
- !visitor.all_lts().is_empty()
- } else {
- false
- }
+ if let Some(ident) = ident
+ && ident.name == kw::SelfLower
+ && !func.implicit_self.has_implicit_self()
+ && let Some(self_ty) = func.inputs.first()
+ {
+ let mut visitor = RefVisitor::new(cx);
+ visitor.visit_ty(self_ty);
+
+ !visitor.all_lts().is_empty()
+ } else {
+ false
}
}
diff --git a/src/tools/clippy/clippy_lints/src/lines_filter_map_ok.rs b/src/tools/clippy/clippy_lints/src/lines_filter_map_ok.rs
index 0a5f5a80c..8a0955147 100644
--- a/src/tools/clippy/clippy_lints/src/lines_filter_map_ok.rs
+++ b/src/tools/clippy/clippy_lints/src/lines_filter_map_ok.rs
@@ -4,7 +4,7 @@ use clippy_utils::{is_diag_item_method, is_trait_method, match_def_path, path_to
use rustc_errors::Applicability;
use rustc_hir::{Body, Closure, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
@@ -53,18 +53,45 @@ declare_clippy_lint! {
#[clippy::version = "1.70.0"]
pub LINES_FILTER_MAP_OK,
suspicious,
- "filtering `std::io::Lines` with `filter_map()` or `flat_map()` might cause an infinite loop"
+ "filtering `std::io::Lines` with `filter_map()`, `flat_map()`, or `flatten()` might cause an infinite loop"
}
declare_lint_pass!(LinesFilterMapOk => [LINES_FILTER_MAP_OK]);
impl LateLintPass<'_> for LinesFilterMapOk {
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
- if let ExprKind::MethodCall(fm_method, fm_receiver, [fm_arg], fm_span) = expr.kind
+ if let ExprKind::MethodCall(fm_method, fm_receiver, fm_args, fm_span) = expr.kind
&& is_trait_method(cx, expr, sym::Iterator)
- && (fm_method.ident.as_str() == "filter_map" || fm_method.ident.as_str() == "flat_map")
+ && let fm_method_str = fm_method.ident.as_str()
+ && matches!(fm_method_str, "filter_map" | "flat_map" | "flatten")
&& is_type_diagnostic_item(cx, cx.typeck_results().expr_ty_adjusted(fm_receiver), sym::IoLines)
+ && should_lint(cx, fm_args, fm_method_str)
{
- let lint = match &fm_arg.kind {
+ span_lint_and_then(
+ cx,
+ LINES_FILTER_MAP_OK,
+ fm_span,
+ &format!("`{fm_method_str}()` will run forever if the iterator repeatedly produces an `Err`",),
+ |diag| {
+ diag.span_note(
+ fm_receiver.span,
+ "this expression returning a `std::io::Lines` may produce an infinite number of `Err` in case of a read error");
+ diag.span_suggestion(
+ fm_span,
+ "replace with",
+ "map_while(Result::ok)",
+ Applicability::MaybeIncorrect,
+ );
+ },
+ );
+ }
+ }
+}
+
+fn should_lint(cx: &LateContext<'_>, args: &[Expr<'_>], method_str: &str) -> bool {
+ match args {
+ [] => method_str == "flatten",
+ [fm_arg] => {
+ match &fm_arg.kind {
// Detect `Result::ok`
ExprKind::Path(qpath) => cx
.qpath_res(qpath, fm_arg.hir_id)
@@ -86,29 +113,8 @@ impl LateLintPass<'_> for LinesFilterMapOk {
}
},
_ => false,
- };
- if lint {
- span_lint_and_then(
- cx,
- LINES_FILTER_MAP_OK,
- fm_span,
- &format!(
- "`{}()` will run forever if the iterator repeatedly produces an `Err`",
- fm_method.ident
- ),
- |diag| {
- diag.span_note(
- fm_receiver.span,
- "this expression returning a `std::io::Lines` may produce an infinite number of `Err` in case of a read error");
- diag.span_suggestion(
- fm_span,
- "replace with",
- "map_while(Result::ok)",
- Applicability::MaybeIncorrect,
- );
- },
- );
}
- }
+ },
+ _ => false,
}
}
diff --git a/src/tools/clippy/clippy_lints/src/literal_representation.rs b/src/tools/clippy/clippy_lints/src/literal_representation.rs
index 2c14bb72a..f33151cf4 100644
--- a/src/tools/clippy/clippy_lints/src/literal_representation.rs
+++ b/src/tools/clippy/clippy_lints/src/literal_representation.rs
@@ -4,13 +4,12 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::numeric_literal::{NumericLiteral, Radix};
use clippy_utils::source::snippet_opt;
-use if_chain::if_chain;
use rustc_ast::ast::{Expr, ExprKind, LitKind};
use rustc_ast::token;
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::Span;
use std::iter;
@@ -255,56 +254,48 @@ impl LiteralDigitGrouping {
}
fn check_lit(self, cx: &EarlyContext<'_>, lit: token::Lit, span: Span) {
- if_chain! {
- if let Some(src) = snippet_opt(cx, span);
- if let Ok(lit_kind) = LitKind::from_token_lit(lit);
- if let Some(mut num_lit) = NumericLiteral::from_lit_kind(&src, &lit_kind);
- then {
- if !Self::check_for_mistyped_suffix(cx, span, &mut num_lit) {
- return;
- }
+ if let Some(src) = snippet_opt(cx, span)
+ && let Ok(lit_kind) = LitKind::from_token_lit(lit)
+ && let Some(mut num_lit) = NumericLiteral::from_lit_kind(&src, &lit_kind)
+ {
+ if !Self::check_for_mistyped_suffix(cx, span, &mut num_lit) {
+ return;
+ }
+
+ if Self::is_literal_uuid_formatted(&num_lit) {
+ return;
+ }
- if Self::is_literal_uuid_formatted(&num_lit) {
- return;
+ let result = (|| {
+ let integral_group_size = Self::get_group_size(num_lit.integer.split('_'), num_lit.radix, true)?;
+ if let Some(fraction) = num_lit.fraction {
+ let fractional_group_size =
+ Self::get_group_size(fraction.rsplit('_'), num_lit.radix, self.lint_fraction_readability)?;
+
+ let consistent = Self::parts_consistent(
+ integral_group_size,
+ fractional_group_size,
+ num_lit.integer.len(),
+ fraction.len(),
+ );
+ if !consistent {
+ return Err(WarningType::InconsistentDigitGrouping);
+ };
}
- let result = (|| {
-
- let integral_group_size = Self::get_group_size(num_lit.integer.split('_'), num_lit.radix, true)?;
- if let Some(fraction) = num_lit.fraction {
- let fractional_group_size = Self::get_group_size(
- fraction.rsplit('_'),
- num_lit.radix,
- self.lint_fraction_readability)?;
-
- let consistent = Self::parts_consistent(integral_group_size,
- fractional_group_size,
- num_lit.integer.len(),
- fraction.len());
- if !consistent {
- return Err(WarningType::InconsistentDigitGrouping);
- };
- }
+ Ok(())
+ })();
- Ok(())
- })();
-
-
- if let Err(warning_type) = result {
- let should_warn = match warning_type {
- | WarningType::UnreadableLiteral
- | WarningType::InconsistentDigitGrouping
- | WarningType::UnusualByteGroupings
- | WarningType::LargeDigitGroups => {
- !span.from_expansion()
- }
- WarningType::DecimalRepresentation | WarningType::MistypedLiteralSuffix => {
- true
- }
- };
- if should_warn {
- warning_type.display(num_lit.format(), cx, span);
- }
+ if let Err(warning_type) = result {
+ let should_warn = match warning_type {
+ WarningType::UnreadableLiteral
+ | WarningType::InconsistentDigitGrouping
+ | WarningType::UnusualByteGroupings
+ | WarningType::LargeDigitGroups => !span.from_expansion(),
+ WarningType::DecimalRepresentation | WarningType::MistypedLiteralSuffix => true,
+ };
+ if should_warn {
+ warning_type.display(num_lit.format(), cx, span);
}
}
}
@@ -478,20 +469,18 @@ impl DecimalLiteralRepresentation {
}
fn check_lit(self, cx: &EarlyContext<'_>, lit: token::Lit, span: Span) {
// Lint integral literals.
- if_chain! {
- if let Ok(lit_kind) = LitKind::from_token_lit(lit);
- if let LitKind::Int(val, _) = lit_kind;
- if let Some(src) = snippet_opt(cx, span);
- if let Some(num_lit) = NumericLiteral::from_lit_kind(&src, &lit_kind);
- if num_lit.radix == Radix::Decimal;
- if val >= u128::from(self.threshold);
- then {
- let hex = format!("{val:#X}");
- let num_lit = NumericLiteral::new(&hex, num_lit.suffix, false);
- let _: Result<(), ()> = Self::do_lint(num_lit.integer).map_err(|warning_type| {
- warning_type.display(num_lit.format(), cx, span);
- });
- }
+ if let Ok(lit_kind) = LitKind::from_token_lit(lit)
+ && let LitKind::Int(val, _) = lit_kind
+ && let Some(src) = snippet_opt(cx, span)
+ && let Some(num_lit) = NumericLiteral::from_lit_kind(&src, &lit_kind)
+ && num_lit.radix == Radix::Decimal
+ && val >= u128::from(self.threshold)
+ {
+ let hex = format!("{val:#X}");
+ let num_lit = NumericLiteral::new(&hex, num_lit.suffix, false);
+ let _: Result<(), ()> = Self::do_lint(num_lit.integer).map_err(|warning_type| {
+ warning_type.display(num_lit.format(), cx, span);
+ });
}
}
diff --git a/src/tools/clippy/clippy_lints/src/loops/explicit_counter_loop.rs b/src/tools/clippy/clippy_lints/src/loops/explicit_counter_loop.rs
index 1953ee8a7..277062a84 100644
--- a/src/tools/clippy/clippy_lints/src/loops/explicit_counter_loop.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/explicit_counter_loop.rs
@@ -2,7 +2,6 @@ use super::{make_iterator_snippet, IncrementVisitor, InitializeVisitor, EXPLICIT
use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then};
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::{get_enclosing_block, is_integer_const};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::intravisit::{walk_block, walk_expr};
use rustc_hir::{Expr, Pat};
@@ -30,59 +29,57 @@ pub(super) fn check<'tcx>(
let mut initialize_visitor = InitializeVisitor::new(cx, expr, id);
walk_block(&mut initialize_visitor, block);
- if_chain! {
- if let Some((name, ty, initializer)) = initialize_visitor.get_result();
- if is_integer_const(cx, initializer, 0);
- then {
- let mut applicability = Applicability::MaybeIncorrect;
- let span = expr.span.with_hi(arg.span.hi());
+ if let Some((name, ty, initializer)) = initialize_visitor.get_result()
+ && is_integer_const(cx, initializer, 0)
+ {
+ let mut applicability = Applicability::MaybeIncorrect;
+ let span = expr.span.with_hi(arg.span.hi());
- let int_name = match ty.map(Ty::kind) {
- // usize or inferred
- Some(ty::Uint(UintTy::Usize)) | None => {
- span_lint_and_sugg(
- cx,
- EXPLICIT_COUNTER_LOOP,
- span,
- &format!("the variable `{name}` is used as a loop counter"),
- "consider using",
- format!(
- "for ({name}, {}) in {}.enumerate()",
- snippet_with_applicability(cx, pat.span, "item", &mut applicability),
- make_iterator_snippet(cx, arg, &mut applicability),
- ),
- applicability,
- );
- return;
- }
- Some(ty::Int(int_ty)) => int_ty.name_str(),
- Some(ty::Uint(uint_ty)) => uint_ty.name_str(),
- _ => return,
- };
+ let int_name = match ty.map(Ty::kind) {
+ // usize or inferred
+ Some(ty::Uint(UintTy::Usize)) | None => {
+ span_lint_and_sugg(
+ cx,
+ EXPLICIT_COUNTER_LOOP,
+ span,
+ &format!("the variable `{name}` is used as a loop counter"),
+ "consider using",
+ format!(
+ "for ({name}, {}) in {}.enumerate()",
+ snippet_with_applicability(cx, pat.span, "item", &mut applicability),
+ make_iterator_snippet(cx, arg, &mut applicability),
+ ),
+ applicability,
+ );
+ return;
+ },
+ Some(ty::Int(int_ty)) => int_ty.name_str(),
+ Some(ty::Uint(uint_ty)) => uint_ty.name_str(),
+ _ => return,
+ };
- span_lint_and_then(
- cx,
- EXPLICIT_COUNTER_LOOP,
- span,
- &format!("the variable `{name}` is used as a loop counter"),
- |diag| {
- diag.span_suggestion(
- span,
- "consider using",
- format!(
- "for ({name}, {}) in (0_{int_name}..).zip({})",
- snippet_with_applicability(cx, pat.span, "item", &mut applicability),
- make_iterator_snippet(cx, arg, &mut applicability),
- ),
- applicability,
- );
+ span_lint_and_then(
+ cx,
+ EXPLICIT_COUNTER_LOOP,
+ span,
+ &format!("the variable `{name}` is used as a loop counter"),
+ |diag| {
+ diag.span_suggestion(
+ span,
+ "consider using",
+ format!(
+ "for ({name}, {}) in (0_{int_name}..).zip({})",
+ snippet_with_applicability(cx, pat.span, "item", &mut applicability),
+ make_iterator_snippet(cx, arg, &mut applicability),
+ ),
+ applicability,
+ );
- diag.note(format!(
- "`{name}` is of type `{int_name}`, making it ineligible for `Iterator::enumerate`"
- ));
- },
- );
- }
+ diag.note(format!(
+ "`{name}` is of type `{int_name}`, making it ineligible for `Iterator::enumerate`"
+ ));
+ },
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/loops/explicit_iter_loop.rs b/src/tools/clippy/clippy_lints/src/loops/explicit_iter_loop.rs
index 1c2b7a169..c79800608 100644
--- a/src/tools/clippy/clippy_lints/src/loops/explicit_iter_loop.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/explicit_iter_loop.rs
@@ -118,7 +118,7 @@ fn is_ref_iterable<'tcx>(
.liberate_late_bound_regions(fn_id, cx.tcx.fn_sig(fn_id).skip_binder())
&& let &[req_self_ty, req_res_ty] = &**sig.inputs_and_output
&& let param_env = cx.tcx.param_env(fn_id)
- && implements_trait_with_env(cx.tcx, param_env, req_self_ty, trait_id, &[])
+ && implements_trait_with_env(cx.tcx, param_env, req_self_ty, trait_id, fn_id, &[])
&& let Some(into_iter_ty) =
make_normalized_projection_with_regions(cx.tcx, param_env, trait_id, sym!(IntoIter), [req_self_ty])
&& let req_res_ty = normalize_with_regions(cx.tcx, param_env, req_res_ty)
diff --git a/src/tools/clippy/clippy_lints/src/loops/infinite_loop.rs b/src/tools/clippy/clippy_lints/src/loops/infinite_loop.rs
new file mode 100644
index 000000000..9b88dd76e
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/infinite_loop.rs
@@ -0,0 +1,125 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::{fn_def_id, is_lint_allowed};
+use hir::intravisit::{walk_expr, Visitor};
+use hir::{Expr, ExprKind, FnRetTy, FnSig, Node};
+use rustc_ast::Label;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+
+use super::INFINITE_LOOP;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &Expr<'_>,
+ loop_block: &'tcx hir::Block<'_>,
+ label: Option<Label>,
+) {
+ if is_lint_allowed(cx, INFINITE_LOOP, expr.hir_id) {
+ return;
+ }
+
+ // Skip check if this loop is not in a function/method/closure. (In some weird case)
+ let Some(parent_fn_ret) = get_parent_fn_ret_ty(cx, expr) else {
+ return;
+ };
+ // Or, its parent function is already returning `Never`
+ if matches!(
+ parent_fn_ret,
+ FnRetTy::Return(hir::Ty {
+ kind: hir::TyKind::Never,
+ ..
+ })
+ ) {
+ return;
+ }
+
+ let mut loop_visitor = LoopVisitor {
+ cx,
+ label,
+ is_finite: false,
+ loop_depth: 0,
+ };
+ loop_visitor.visit_block(loop_block);
+
+ let is_finite_loop = loop_visitor.is_finite;
+
+ if !is_finite_loop {
+ span_lint_and_then(cx, INFINITE_LOOP, expr.span, "infinite loop detected", |diag| {
+ if let FnRetTy::DefaultReturn(ret_span) = parent_fn_ret {
+ diag.span_suggestion(
+ ret_span,
+ "if this is intentional, consider specifing `!` as function return",
+ " -> !",
+ Applicability::MaybeIncorrect,
+ );
+ } else {
+ diag.help("if this is not intended, try adding a `break` or `return` condition in the loop");
+ }
+ });
+ }
+}
+
+fn get_parent_fn_ret_ty<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'_>) -> Option<FnRetTy<'tcx>> {
+ for (_, parent_node) in cx.tcx.hir().parent_iter(expr.hir_id) {
+ match parent_node {
+ Node::Item(hir::Item {
+ kind: hir::ItemKind::Fn(FnSig { decl, .. }, _, _),
+ ..
+ })
+ | Node::TraitItem(hir::TraitItem {
+ kind: hir::TraitItemKind::Fn(FnSig { decl, .. }, _),
+ ..
+ })
+ | Node::ImplItem(hir::ImplItem {
+ kind: hir::ImplItemKind::Fn(FnSig { decl, .. }, _),
+ ..
+ })
+ | Node::Expr(Expr {
+ kind: ExprKind::Closure(hir::Closure { fn_decl: decl, .. }),
+ ..
+ }) => return Some(decl.output),
+ _ => (),
+ }
+ }
+ None
+}
+
+struct LoopVisitor<'hir, 'tcx> {
+ cx: &'hir LateContext<'tcx>,
+ label: Option<Label>,
+ loop_depth: usize,
+ is_finite: bool,
+}
+
+impl<'hir> Visitor<'hir> for LoopVisitor<'hir, '_> {
+ fn visit_expr(&mut self, ex: &'hir Expr<'_>) {
+ match &ex.kind {
+ ExprKind::Break(hir::Destination { label, .. }, ..) => {
+ // Assuming breaks the loop when `loop_depth` is 0,
+ // as it could only means this `break` breaks current loop or any of its upper loop.
+ // Or, the depth is not zero but the label is matched.
+ if self.loop_depth == 0 || (label.is_some() && *label == self.label) {
+ self.is_finite = true;
+ }
+ },
+ ExprKind::Ret(..) => self.is_finite = true,
+ ExprKind::Loop(..) => {
+ self.loop_depth += 1;
+ walk_expr(self, ex);
+ self.loop_depth = self.loop_depth.saturating_sub(1);
+ },
+ _ => {
+ // Calls to a function that never return
+ if let Some(did) = fn_def_id(self.cx, ex) {
+ let fn_ret_ty = self.cx.tcx.fn_sig(did).skip_binder().output().skip_binder();
+ if fn_ret_ty.is_never() {
+ self.is_finite = true;
+ return;
+ }
+ }
+ walk_expr(self, ex);
+ },
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/manual_find.rs b/src/tools/clippy/clippy_lints/src/loops/manual_find.rs
index a9a9058c9..d484ce40d 100644
--- a/src/tools/clippy/clippy_lints/src/loops/manual_find.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/manual_find.rs
@@ -4,7 +4,6 @@ use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::ty::implements_trait;
use clippy_utils::{higher, is_res_lang_ctor, path_res, peel_blocks_with_stmt};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::def::Res;
use rustc_hir::lang_items::LangItem;
@@ -23,77 +22,79 @@ pub(super) fn check<'tcx>(
let inner_expr = peel_blocks_with_stmt(body);
// Check for the specific case that the result is returned and optimize suggestion for that (more
// cases can be added later)
- if_chain! {
- if let Some(higher::If { cond, then, r#else: None, }) = higher::If::hir(inner_expr);
- if let Some(binding_id) = get_binding(pat);
- if let ExprKind::Block(block, _) = then.kind;
- if let [stmt] = block.stmts;
- if let StmtKind::Semi(semi) = stmt.kind;
- if let ExprKind::Ret(Some(ret_value)) = semi.kind;
- if let ExprKind::Call(ctor, [inner_ret]) = ret_value.kind;
- if is_res_lang_ctor(cx, path_res(cx, ctor), LangItem::OptionSome);
- if path_res(cx, inner_ret) == Res::Local(binding_id);
- if let Some((last_stmt, last_ret)) = last_stmt_and_ret(cx, expr);
- then {
- let mut applicability = Applicability::MachineApplicable;
- let mut snippet = make_iterator_snippet(cx, arg, &mut applicability);
- // Checks if `pat` is a single reference to a binding (`&x`)
- let is_ref_to_binding =
- matches!(pat.kind, PatKind::Ref(inner, _) if matches!(inner.kind, PatKind::Binding(..)));
- // If `pat` is not a binding or a reference to a binding (`x` or `&x`)
- // we need to map it to the binding returned by the function (i.e. `.map(|(x, _)| x)`)
- if !(matches!(pat.kind, PatKind::Binding(..)) || is_ref_to_binding) {
- snippet.push_str(
- &format!(
- ".map(|{}| {})",
- snippet_with_applicability(cx, pat.span, "..", &mut applicability),
- snippet_with_applicability(cx, inner_ret.span, "..", &mut applicability),
- )[..],
- );
- }
- let ty = cx.typeck_results().expr_ty(inner_ret);
- if cx.tcx.lang_items().copy_trait().map_or(false, |id| implements_trait(cx, ty, id, &[])) {
- snippet.push_str(
- &format!(
- ".find(|{}{}| {})",
- "&".repeat(1 + usize::from(is_ref_to_binding)),
- snippet_with_applicability(cx, inner_ret.span, "..", &mut applicability),
- snippet_with_applicability(cx, cond.span, "..", &mut applicability),
- )[..],
- );
- if is_ref_to_binding {
- snippet.push_str(".copied()");
- }
- } else {
- applicability = Applicability::MaybeIncorrect;
- snippet.push_str(
- &format!(
- ".find(|{}| {})",
- snippet_with_applicability(cx, inner_ret.span, "..", &mut applicability),
- snippet_with_applicability(cx, cond.span, "..", &mut applicability),
- )[..],
- );
+ if let Some(higher::If {
+ cond,
+ then,
+ r#else: None,
+ }) = higher::If::hir(inner_expr)
+ && let Some(binding_id) = get_binding(pat)
+ && let ExprKind::Block(block, _) = then.kind
+ && let [stmt] = block.stmts
+ && let StmtKind::Semi(semi) = stmt.kind
+ && let ExprKind::Ret(Some(ret_value)) = semi.kind
+ && let ExprKind::Call(ctor, [inner_ret]) = ret_value.kind
+ && is_res_lang_ctor(cx, path_res(cx, ctor), LangItem::OptionSome)
+ && path_res(cx, inner_ret) == Res::Local(binding_id)
+ && let Some((last_stmt, last_ret)) = last_stmt_and_ret(cx, expr)
+ {
+ let mut applicability = Applicability::MachineApplicable;
+ let mut snippet = make_iterator_snippet(cx, arg, &mut applicability);
+ // Checks if `pat` is a single reference to a binding (`&x`)
+ let is_ref_to_binding =
+ matches!(pat.kind, PatKind::Ref(inner, _) if matches!(inner.kind, PatKind::Binding(..)));
+ // If `pat` is not a binding or a reference to a binding (`x` or `&x`)
+ // we need to map it to the binding returned by the function (i.e. `.map(|(x, _)| x)`)
+ if !(matches!(pat.kind, PatKind::Binding(..)) || is_ref_to_binding) {
+ snippet.push_str(
+ &format!(
+ ".map(|{}| {})",
+ snippet_with_applicability(cx, pat.span, "..", &mut applicability),
+ snippet_with_applicability(cx, inner_ret.span, "..", &mut applicability),
+ )[..],
+ );
+ }
+ let ty = cx.typeck_results().expr_ty(inner_ret);
+ if cx
+ .tcx
+ .lang_items()
+ .copy_trait()
+ .map_or(false, |id| implements_trait(cx, ty, id, &[]))
+ {
+ snippet.push_str(
+ &format!(
+ ".find(|{}{}| {})",
+ "&".repeat(1 + usize::from(is_ref_to_binding)),
+ snippet_with_applicability(cx, inner_ret.span, "..", &mut applicability),
+ snippet_with_applicability(cx, cond.span, "..", &mut applicability),
+ )[..],
+ );
+ if is_ref_to_binding {
+ snippet.push_str(".copied()");
}
- // Extends to `last_stmt` to include semicolon in case of `return None;`
- let lint_span = span.to(last_stmt.span).to(last_ret.span);
- span_lint_and_then(
- cx,
- MANUAL_FIND,
- lint_span,
- "manual implementation of `Iterator::find`",
- |diag| {
- if applicability == Applicability::MaybeIncorrect {
- diag.note("you may need to dereference some variables");
- }
- diag.span_suggestion(
- lint_span,
- "replace with an iterator",
- snippet,
- applicability,
- );
- },
+ } else {
+ applicability = Applicability::MaybeIncorrect;
+ snippet.push_str(
+ &format!(
+ ".find(|{}| {})",
+ snippet_with_applicability(cx, inner_ret.span, "..", &mut applicability),
+ snippet_with_applicability(cx, cond.span, "..", &mut applicability),
+ )[..],
);
}
+ // Extends to `last_stmt` to include semicolon in case of `return None;`
+ let lint_span = span.to(last_stmt.span).to(last_ret.span);
+ span_lint_and_then(
+ cx,
+ MANUAL_FIND,
+ lint_span,
+ "manual implementation of `Iterator::find`",
+ |diag| {
+ if applicability == Applicability::MaybeIncorrect {
+ diag.note("you may need to dereference some variables");
+ }
+ diag.span_suggestion(lint_span, "replace with an iterator", snippet, applicability);
+ },
+ );
}
}
@@ -124,34 +125,30 @@ fn last_stmt_and_ret<'tcx>(
if let Some(ret) = block.expr {
return Some((last_stmt, ret));
}
- if_chain! {
- if let [.., snd_last, _] = block.stmts;
- if let StmtKind::Semi(last_expr) = last_stmt.kind;
- if let ExprKind::Ret(Some(ret)) = last_expr.kind;
- then {
- return Some((snd_last, ret));
- }
+ if let [.., snd_last, _] = block.stmts
+ && let StmtKind::Semi(last_expr) = last_stmt.kind
+ && let ExprKind::Ret(Some(ret)) = last_expr.kind
+ {
+ return Some((snd_last, ret));
}
}
None
}
let mut parent_iter = cx.tcx.hir().parent_iter(expr.hir_id);
- if_chain! {
+ if let Some((node_hir, Node::Stmt(..))) = parent_iter.next()
// This should be the loop
- if let Some((node_hir, Node::Stmt(..))) = parent_iter.next();
// This should be the function body
- if let Some((_, Node::Block(block))) = parent_iter.next();
- if let Some((last_stmt, last_ret)) = extract(block);
- if last_stmt.hir_id == node_hir;
- if is_res_lang_ctor(cx, path_res(cx, last_ret), LangItem::OptionNone);
- if let Some((_, Node::Expr(_block))) = parent_iter.next();
+ && let Some((_, Node::Block(block))) = parent_iter.next()
+ && let Some((last_stmt, last_ret)) = extract(block)
+ && last_stmt.hir_id == node_hir
+ && is_res_lang_ctor(cx, path_res(cx, last_ret), LangItem::OptionNone)
+ && let Some((_, Node::Expr(_block))) = parent_iter.next()
// This includes the function header
- if let Some((_, func)) = parent_iter.next();
- if func.fn_kind().is_some();
- then {
- Some((block.stmts.last().unwrap(), last_ret))
- } else {
- None
- }
+ && let Some((_, func)) = parent_iter.next()
+ && func.fn_kind().is_some()
+ {
+ Some((block.stmts.last().unwrap(), last_ret))
+ } else {
+ None
}
}
diff --git a/src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs b/src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs
index 124a35f8f..a726b1169 100644
--- a/src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs
@@ -3,7 +3,6 @@ use super::MANUAL_FLATTEN;
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::visitors::is_local_used;
use clippy_utils::{higher, path_to_local_id, peel_blocks_with_stmt};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::{Expr, Pat, PatKind};
@@ -21,66 +20,51 @@ pub(super) fn check<'tcx>(
span: Span,
) {
let inner_expr = peel_blocks_with_stmt(body);
- if_chain! {
- if let Some(higher::IfLet { let_pat, let_expr, if_then, if_else: None })
- = higher::IfLet::hir(cx, inner_expr);
+ if let Some(higher::IfLet { let_pat, let_expr, if_then, if_else: None })
+ = higher::IfLet::hir(cx, inner_expr)
// Ensure match_expr in `if let` statement is the same as the pat from the for-loop
- if let PatKind::Binding(_, pat_hir_id, _, _) = pat.kind;
- if path_to_local_id(let_expr, pat_hir_id);
+ && let PatKind::Binding(_, pat_hir_id, _, _) = pat.kind
+ && path_to_local_id(let_expr, pat_hir_id)
// Ensure the `if let` statement is for the `Some` variant of `Option` or the `Ok` variant of `Result`
- if let PatKind::TupleStruct(ref qpath, _, _) = let_pat.kind;
- if let Res::Def(DefKind::Ctor(..), ctor_id) = cx.qpath_res(qpath, let_pat.hir_id);
- if let Some(variant_id) = cx.tcx.opt_parent(ctor_id);
- let some_ctor = cx.tcx.lang_items().option_some_variant() == Some(variant_id);
- let ok_ctor = cx.tcx.lang_items().result_ok_variant() == Some(variant_id);
- if some_ctor || ok_ctor;
+ && let PatKind::TupleStruct(ref qpath, _, _) = let_pat.kind
+ && let Res::Def(DefKind::Ctor(..), ctor_id) = cx.qpath_res(qpath, let_pat.hir_id)
+ && let Some(variant_id) = cx.tcx.opt_parent(ctor_id)
+ && let some_ctor = cx.tcx.lang_items().option_some_variant() == Some(variant_id)
+ && let ok_ctor = cx.tcx.lang_items().result_ok_variant() == Some(variant_id)
+ && (some_ctor || ok_ctor)
// Ensure expr in `if let` is not used afterwards
- if !is_local_used(cx, if_then, pat_hir_id);
- then {
- let if_let_type = if some_ctor { "Some" } else { "Ok" };
- // Prepare the error message
- let msg = format!("unnecessary `if let` since only the `{if_let_type}` variant of the iterator element is used");
+ && !is_local_used(cx, if_then, pat_hir_id)
+ {
+ let if_let_type = if some_ctor { "Some" } else { "Ok" };
+ // Prepare the error message
+ let msg =
+ format!("unnecessary `if let` since only the `{if_let_type}` variant of the iterator element is used");
- // Prepare the help message
- let mut applicability = Applicability::MaybeIncorrect;
- let arg_snippet = make_iterator_snippet(cx, arg, &mut applicability);
- let copied = match cx.typeck_results().expr_ty(let_expr).kind() {
- ty::Ref(_, inner, _) => match inner.kind() {
- ty::Ref(..) => ".copied()",
- _ => ""
- }
- _ => ""
- };
+ // Prepare the help message
+ let mut applicability = Applicability::MaybeIncorrect;
+ let arg_snippet = make_iterator_snippet(cx, arg, &mut applicability);
+ let copied = match cx.typeck_results().expr_ty(let_expr).kind() {
+ ty::Ref(_, inner, _) => match inner.kind() {
+ ty::Ref(..) => ".copied()",
+ _ => "",
+ },
+ _ => "",
+ };
- let sugg = format!("{arg_snippet}{copied}.flatten()");
+ let sugg = format!("{arg_snippet}{copied}.flatten()");
- // If suggestion is not a one-liner, it won't be shown inline within the error message. In that case,
- // it will be shown in the extra `help` message at the end, which is why the first `help_msg` needs
- // to refer to the correct relative position of the suggestion.
- let help_msg = if sugg.contains('\n') {
- "remove the `if let` statement in the for loop and then..."
- } else {
- "...and remove the `if let` statement in the for loop"
- };
+ // If suggestion is not a one-liner, it won't be shown inline within the error message. In that
+ // case, it will be shown in the extra `help` message at the end, which is why the first
+ // `help_msg` needs to refer to the correct relative position of the suggestion.
+ let help_msg = if sugg.contains('\n') {
+ "remove the `if let` statement in the for loop and then..."
+ } else {
+ "...and remove the `if let` statement in the for loop"
+ };
- span_lint_and_then(
- cx,
- MANUAL_FLATTEN,
- span,
- &msg,
- |diag| {
- diag.span_suggestion(
- arg.span,
- "try",
- sugg,
- applicability,
- );
- diag.span_help(
- inner_expr.span,
- help_msg,
- );
- }
- );
- }
+ span_lint_and_then(cx, MANUAL_FLATTEN, span, &msg, |diag| {
+ diag.span_suggestion(arg.span, "try", sugg, applicability);
+ diag.span_help(inner_expr.span, help_msg);
+ });
}
}
diff --git a/src/tools/clippy/clippy_lints/src/loops/manual_memcpy.rs b/src/tools/clippy/clippy_lints/src/loops/manual_memcpy.rs
index d3fd0e863..fda6c9749 100644
--- a/src/tools/clippy/clippy_lints/src/loops/manual_memcpy.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/manual_memcpy.rs
@@ -4,7 +4,6 @@ use clippy_utils::source::snippet;
use clippy_utils::sugg::Sugg;
use clippy_utils::ty::is_copy;
use clippy_utils::{get_enclosing_block, higher, path_to_local, sugg};
-use if_chain::if_chain;
use rustc_ast::ast;
use rustc_errors::Applicability;
use rustc_hir::intravisit::walk_block;
@@ -13,7 +12,6 @@ use rustc_lint::LateContext;
use rustc_middle::ty::{self, Ty};
use rustc_span::symbol::sym;
use std::fmt::Display;
-use std::iter::Iterator;
/// Checks for `for` loops that sequentially copy items from one slice-like
/// object to another.
@@ -59,22 +57,31 @@ pub(super) fn check<'tcx>(
.map(|o| {
o.and_then(|(lhs, rhs)| {
let rhs = fetch_cloned_expr(rhs);
- if_chain! {
- if let ExprKind::Index(base_left, idx_left, _) = lhs.kind;
- if let ExprKind::Index(base_right, idx_right, _) = rhs.kind;
- if let Some(ty) = get_slice_like_element_ty(cx, cx.typeck_results().expr_ty(base_left));
- if get_slice_like_element_ty(cx, cx.typeck_results().expr_ty(base_right)).is_some();
- if let Some((start_left, offset_left)) = get_details_from_idx(cx, idx_left, &starts);
- if let Some((start_right, offset_right)) = get_details_from_idx(cx, idx_right, &starts);
+ if let ExprKind::Index(base_left, idx_left, _) = lhs.kind
+ && let ExprKind::Index(base_right, idx_right, _) = rhs.kind
+ && let Some(ty) = get_slice_like_element_ty(cx, cx.typeck_results().expr_ty(base_left))
+ && get_slice_like_element_ty(cx, cx.typeck_results().expr_ty(base_right)).is_some()
+ && let Some((start_left, offset_left)) = get_details_from_idx(cx, idx_left, &starts)
+ && let Some((start_right, offset_right)) = get_details_from_idx(cx, idx_right, &starts)
// Source and destination must be different
- if path_to_local(base_left) != path_to_local(base_right);
- then {
- Some((ty, IndexExpr { base: base_left, idx: start_left, idx_offset: offset_left },
- IndexExpr { base: base_right, idx: start_right, idx_offset: offset_right }))
- } else {
- None
- }
+ && path_to_local(base_left) != path_to_local(base_right)
+ {
+ Some((
+ ty,
+ IndexExpr {
+ base: base_left,
+ idx: start_left,
+ idx_offset: offset_left,
+ },
+ IndexExpr {
+ base: base_right,
+ idx: start_right,
+ idx_offset: offset_right,
+ },
+ ))
+ } else {
+ None
}
})
})
@@ -118,23 +125,19 @@ fn build_manual_memcpy_suggestion<'tcx>(
}
let print_limit = |end: &Expr<'_>, end_str: &str, base: &Expr<'_>, sugg: MinifyingSugg<'static>| {
- if_chain! {
- if let ExprKind::MethodCall(method, recv, [], _) = end.kind;
- if method.ident.name == sym::len;
- if path_to_local(recv) == path_to_local(base);
- then {
- if sugg.to_string() == end_str {
- sugg::EMPTY.into()
- } else {
- sugg
- }
+ if let ExprKind::MethodCall(method, recv, [], _) = end.kind
+ && method.ident.name == sym::len
+ && path_to_local(recv) == path_to_local(base)
+ {
+ if sugg.to_string() == end_str {
+ sugg::EMPTY.into()
} else {
- match limits {
- ast::RangeLimits::Closed => {
- sugg + &sugg::ONE.into()
- },
- ast::RangeLimits::HalfOpen => sugg,
- }
+ sugg
+ }
+ } else {
+ match limits {
+ ast::RangeLimits::Closed => sugg + &sugg::ONE.into(),
+ ast::RangeLimits::HalfOpen => sugg,
}
}
};
@@ -174,7 +177,9 @@ fn build_manual_memcpy_suggestion<'tcx>(
let dst_base_str = snippet(cx, dst.base.span, "???");
let src_base_str = snippet(cx, src.base.span, "???");
- let dst = if dst_offset == sugg::EMPTY && dst_limit == sugg::EMPTY {
+ let dst = if (dst_offset == sugg::EMPTY && dst_limit == sugg::EMPTY)
+ || is_array_length_equal_to_range(cx, start, end, dst.base)
+ {
dst_base_str
} else {
format!("{dst_base_str}[{}..{}]", dst_offset.maybe_par(), dst_limit.maybe_par()).into()
@@ -186,11 +191,13 @@ fn build_manual_memcpy_suggestion<'tcx>(
"clone_from_slice"
};
- format!(
- "{dst}.{method_str}(&{src_base_str}[{}..{}]);",
- src_offset.maybe_par(),
- src_limit.maybe_par()
- )
+ let src = if is_array_length_equal_to_range(cx, start, end, src.base) {
+ src_base_str
+ } else {
+ format!("{src_base_str}[{}..{}]", src_offset.maybe_par(), src_limit.maybe_par()).into()
+ };
+
+ format!("{dst}.{method_str}(&{src});")
}
/// a wrapper of `Sugg`. Besides what `Sugg` do, this removes unnecessary `0`;
@@ -331,10 +338,12 @@ fn get_slice_like_element_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Opti
}
fn fetch_cloned_expr<'tcx>(expr: &'tcx Expr<'tcx>) -> &'tcx Expr<'tcx> {
- if_chain! {
- if let ExprKind::MethodCall(method, arg, [], _) = expr.kind;
- if method.ident.name == sym::clone;
- then { arg } else { expr }
+ if let ExprKind::MethodCall(method, arg, [], _) = expr.kind
+ && method.ident.name == sym::clone
+ {
+ arg
+ } else {
+ expr
}
}
@@ -446,3 +455,34 @@ fn get_loop_counters<'a, 'tcx>(
.into()
})
}
+
+fn is_array_length_equal_to_range(cx: &LateContext<'_>, start: &Expr<'_>, end: &Expr<'_>, arr: &Expr<'_>) -> bool {
+ fn extract_lit_value(expr: &Expr<'_>) -> Option<u128> {
+ if let ExprKind::Lit(lit) = expr.kind
+ && let ast::LitKind::Int(value, _) = lit.node
+ {
+ Some(value)
+ } else {
+ None
+ }
+ }
+
+ let arr_ty = cx.typeck_results().expr_ty(arr).peel_refs();
+
+ if let ty::Array(_, s) = arr_ty.kind() {
+ let size: u128 = if let Some(size) = s.try_eval_target_usize(cx.tcx, cx.param_env) {
+ size.into()
+ } else {
+ return false;
+ };
+
+ let range = match (extract_lit_value(start), extract_lit_value(end)) {
+ (Some(start_value), Some(end_value)) => end_value - start_value,
+ _ => return false,
+ };
+
+ size == range
+ } else {
+ false
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/missing_spin_loop.rs b/src/tools/clippy/clippy_lints/src/loops/missing_spin_loop.rs
index 7b7d19c75..e405829b2 100644
--- a/src/tools/clippy/clippy_lints/src/loops/missing_spin_loop.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/missing_spin_loop.rs
@@ -31,26 +31,30 @@ fn unpack_cond<'tcx>(cond: &'tcx Expr<'tcx>) -> &'tcx Expr<'tcx> {
}
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, cond: &'tcx Expr<'_>, body: &'tcx Expr<'_>) {
- if_chain! {
- if let ExprKind::Block(Block { stmts: [], expr: None, ..}, _) = body.kind;
- if let ExprKind::MethodCall(method, callee, ..) = unpack_cond(cond).kind;
- if [sym::load, sym::compare_exchange, sym::compare_exchange_weak].contains(&method.ident.name);
- if let ty::Adt(def, _args) = cx.typeck_results().expr_ty(callee).kind();
- if cx.tcx.is_diagnostic_item(sym::AtomicBool, def.did());
- then {
- span_lint_and_sugg(
- cx,
- MISSING_SPIN_LOOP,
- body.span,
- "busy-waiting loop should at least have a spin loop hint",
- "try",
- (if is_no_std_crate(cx) {
- "{ core::hint::spin_loop() }"
- } else {
- "{ std::hint::spin_loop() }"
- }).into(),
- Applicability::MachineApplicable
- );
- }
+ if let ExprKind::Block(
+ Block {
+ stmts: [], expr: None, ..
+ },
+ _,
+ ) = body.kind
+ && let ExprKind::MethodCall(method, callee, ..) = unpack_cond(cond).kind
+ && [sym::load, sym::compare_exchange, sym::compare_exchange_weak].contains(&method.ident.name)
+ && let ty::Adt(def, _args) = cx.typeck_results().expr_ty(callee).kind()
+ && cx.tcx.is_diagnostic_item(sym::AtomicBool, def.did())
+ {
+ span_lint_and_sugg(
+ cx,
+ MISSING_SPIN_LOOP,
+ body.span,
+ "busy-waiting loop should at least have a spin loop hint",
+ "try",
+ (if is_no_std_crate(cx) {
+ "{ core::hint::spin_loop() }"
+ } else {
+ "{ std::hint::spin_loop() }"
+ })
+ .into(),
+ Applicability::MachineApplicable,
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/loops/mod.rs b/src/tools/clippy/clippy_lints/src/loops/mod.rs
index 67c80fb83..3c9bde86b 100644
--- a/src/tools/clippy/clippy_lints/src/loops/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/mod.rs
@@ -3,6 +3,7 @@ mod explicit_counter_loop;
mod explicit_into_iter_loop;
mod explicit_iter_loop;
mod for_kv_map;
+mod infinite_loop;
mod iter_next_loop;
mod manual_find;
mod manual_flatten;
@@ -24,7 +25,7 @@ use clippy_config::msrvs::Msrv;
use clippy_utils::higher;
use rustc_hir::{Expr, ExprKind, LoopSource, Pat};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::Span;
use utils::{make_iterator_snippet, IncrementVisitor, InitializeVisitor};
@@ -635,6 +636,48 @@ declare_clippy_lint! {
"checking for emptiness of a `Vec` in the loop condition and popping an element in the body"
}
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for infinite loops in a function where the return type is not `!`
+ /// and lint accordingly.
+ ///
+ /// ### Why is this bad?
+ /// A loop should be gently exited somewhere, or at least mark its parent function as
+ /// never return (`!`).
+ ///
+ /// ### Example
+ /// ```no_run,ignore
+ /// fn run_forever() {
+ /// loop {
+ /// // do something
+ /// }
+ /// }
+ /// ```
+ /// If infinite loops are as intended:
+ /// ```no_run,ignore
+ /// fn run_forever() -> ! {
+ /// loop {
+ /// // do something
+ /// }
+ /// }
+ /// ```
+ /// Otherwise add a `break` or `return` condition:
+ /// ```no_run,ignore
+ /// fn run_forever() {
+ /// loop {
+ /// // do something
+ /// if condition {
+ /// break;
+ /// }
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "1.75.0"]
+ pub INFINITE_LOOP,
+ restriction,
+ "possibly unintended infinite loop"
+}
+
pub struct Loops {
msrv: Msrv,
enforce_iter_loop_reborrow: bool,
@@ -669,6 +712,7 @@ impl_lint_pass!(Loops => [
MANUAL_FIND,
MANUAL_WHILE_LET_SOME,
UNUSED_ENUMERATE_INDEX,
+ INFINITE_LOOP,
]);
impl<'tcx> LateLintPass<'tcx> for Loops {
@@ -707,10 +751,11 @@ impl<'tcx> LateLintPass<'tcx> for Loops {
// check for `loop { if let {} else break }` that could be `while let`
// (also matches an explicit "match" instead of "if let")
// (even if the "match" or "if let" is used for declaration)
- if let ExprKind::Loop(block, _, LoopSource::Loop, _) = expr.kind {
+ if let ExprKind::Loop(block, label, LoopSource::Loop, _) = expr.kind {
// also check for empty `loop {}` statements, skipping those in #[panic_handler]
empty_loop::check(cx, expr, block);
while_let_loop::check(cx, expr, block);
+ infinite_loop::check(cx, expr, block, label);
}
while_let_on_iterator::check(cx, expr);
diff --git a/src/tools/clippy/clippy_lints/src/loops/mut_range_bound.rs b/src/tools/clippy/clippy_lints/src/loops/mut_range_bound.rs
index 2c12d9582..c4e60e98a 100644
--- a/src/tools/clippy/clippy_lints/src/loops/mut_range_bound.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/mut_range_bound.rs
@@ -1,7 +1,6 @@
use super::MUT_RANGE_BOUND;
use clippy_utils::diagnostics::span_lint_and_note;
use clippy_utils::{get_enclosing_block, higher, path_to_local};
-use if_chain::if_chain;
use rustc_hir::intravisit::{self, Visitor};
use rustc_hir::{BindingAnnotation, Expr, ExprKind, HirId, Node, PatKind};
use rustc_hir_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId};
@@ -12,19 +11,17 @@ use rustc_middle::ty;
use rustc_span::Span;
pub(super) fn check(cx: &LateContext<'_>, arg: &Expr<'_>, body: &Expr<'_>) {
- if_chain! {
- if let Some(higher::Range {
- start: Some(start),
- end: Some(end),
- ..
- }) = higher::Range::hir(arg);
- let (mut_id_start, mut_id_end) = (check_for_mutability(cx, start), check_for_mutability(cx, end));
- if mut_id_start.is_some() || mut_id_end.is_some();
- then {
- let (span_low, span_high) = check_for_mutation(cx, body, mut_id_start, mut_id_end);
- mut_warn_with_span(cx, span_low);
- mut_warn_with_span(cx, span_high);
- }
+ if let Some(higher::Range {
+ start: Some(start),
+ end: Some(end),
+ ..
+ }) = higher::Range::hir(arg)
+ && let (mut_id_start, mut_id_end) = (check_for_mutability(cx, start), check_for_mutability(cx, end))
+ && (mut_id_start.is_some() || mut_id_end.is_some())
+ {
+ let (span_low, span_high) = check_for_mutation(cx, body, mut_id_start, mut_id_end);
+ mut_warn_with_span(cx, span_low);
+ mut_warn_with_span(cx, span_high);
}
}
@@ -42,13 +39,11 @@ fn mut_warn_with_span(cx: &LateContext<'_>, span: Option<Span>) {
}
fn check_for_mutability(cx: &LateContext<'_>, bound: &Expr<'_>) -> Option<HirId> {
- if_chain! {
- if let Some(hir_id) = path_to_local(bound);
- if let Node::Pat(pat) = cx.tcx.hir().get(hir_id);
- if let PatKind::Binding(BindingAnnotation::MUT, ..) = pat.kind;
- then {
- return Some(hir_id);
- }
+ if let Some(hir_id) = path_to_local(bound)
+ && let Node::Pat(pat) = cx.tcx.hir_node(hir_id)
+ && let PatKind::Binding(BindingAnnotation::MUT, ..) = pat.kind
+ {
+ return Some(hir_id);
}
None
}
diff --git a/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs b/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs
index c4af46b8f..4acf46f73 100644
--- a/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs
@@ -4,7 +4,6 @@ use clippy_utils::source::snippet;
use clippy_utils::ty::has_iter_method;
use clippy_utils::visitors::is_local_used;
use clippy_utils::{contains_name, higher, is_integer_const, sugg, SpanlessEq};
-use if_chain::if_chain;
use rustc_ast::ast;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_hir::def::{DefKind, Res};
@@ -14,8 +13,7 @@ use rustc_lint::LateContext;
use rustc_middle::middle::region;
use rustc_middle::ty::{self, Ty};
use rustc_span::symbol::{sym, Symbol};
-use std::iter::{self, Iterator};
-use std::mem;
+use std::{iter, mem};
/// Checks for looping over a range and then indexing a sequence with it.
/// The iteratee must be a range literal.
@@ -187,15 +185,13 @@ pub(super) fn check<'tcx>(
}
fn is_len_call(expr: &Expr<'_>, var: Symbol) -> bool {
- if_chain! {
- if let ExprKind::MethodCall(method, recv, [], _) = expr.kind;
- if method.ident.name == sym::len;
- if let ExprKind::Path(QPath::Resolved(_, path)) = recv.kind;
- if path.segments.len() == 1;
- if path.segments[0].ident.name == var;
- then {
- return true;
- }
+ if let ExprKind::MethodCall(method, recv, [], _) = expr.kind
+ && method.ident.name == sym::len
+ && let ExprKind::Path(QPath::Resolved(_, path)) = recv.kind
+ && path.segments.len() == 1
+ && path.segments[0].ident.name == var
+ {
+ return true;
}
false
@@ -207,17 +203,15 @@ fn is_end_eq_array_len<'tcx>(
limits: ast::RangeLimits,
indexed_ty: Ty<'tcx>,
) -> bool {
- if_chain! {
- if let ExprKind::Lit(lit) = end.kind;
- if let ast::LitKind::Int(end_int, _) = lit.node;
- if let ty::Array(_, arr_len_const) = indexed_ty.kind();
- if let Some(arr_len) = arr_len_const.try_eval_target_usize(cx.tcx, cx.param_env);
- then {
- return match limits {
- ast::RangeLimits::Closed => end_int + 1 >= arr_len.into(),
- ast::RangeLimits::HalfOpen => end_int >= arr_len.into(),
- };
- }
+ if let ExprKind::Lit(lit) = end.kind
+ && let ast::LitKind::Int(end_int, _) = lit.node
+ && let ty::Array(_, arr_len_const) = indexed_ty.kind()
+ && let Some(arr_len) = arr_len_const.try_eval_target_usize(cx.tcx, cx.param_env)
+ {
+ return match limits {
+ ast::RangeLimits::Closed => end_int + 1 >= arr_len.into(),
+ ast::RangeLimits::HalfOpen => end_int >= arr_len.into(),
+ };
}
false
@@ -248,51 +242,49 @@ struct VarVisitor<'a, 'tcx> {
impl<'a, 'tcx> VarVisitor<'a, 'tcx> {
fn check(&mut self, idx: &'tcx Expr<'_>, seqexpr: &'tcx Expr<'_>, expr: &'tcx Expr<'_>) -> bool {
- if_chain! {
+ if let ExprKind::Path(ref seqpath) = seqexpr.kind
// the indexed container is referenced by a name
- if let ExprKind::Path(ref seqpath) = seqexpr.kind;
- if let QPath::Resolved(None, seqvar) = *seqpath;
- if seqvar.segments.len() == 1;
- if is_local_used(self.cx, idx, self.var);
- then {
- if self.prefer_mutable {
- self.indexed_mut.insert(seqvar.segments[0].ident.name);
- }
- let index_used_directly = matches!(idx.kind, ExprKind::Path(_));
- let res = self.cx.qpath_res(seqpath, seqexpr.hir_id);
- match res {
- Res::Local(hir_id) => {
- let parent_def_id = self.cx.tcx.hir().get_parent_item(expr.hir_id);
- let extent = self
- .cx
- .tcx
- .region_scope_tree(parent_def_id)
- .var_scope(hir_id.local_id)
- .unwrap();
- if index_used_directly {
- self.indexed_directly.insert(
- seqvar.segments[0].ident.name,
- (Some(extent), self.cx.typeck_results().node_type(seqexpr.hir_id)),
- );
- } else {
- self.indexed_indirectly
- .insert(seqvar.segments[0].ident.name, Some(extent));
- }
- return false; // no need to walk further *on the variable*
- },
- Res::Def(DefKind::Static(_) | DefKind::Const, ..) => {
- if index_used_directly {
- self.indexed_directly.insert(
- seqvar.segments[0].ident.name,
- (None, self.cx.typeck_results().node_type(seqexpr.hir_id)),
- );
- } else {
- self.indexed_indirectly.insert(seqvar.segments[0].ident.name, None);
- }
- return false; // no need to walk further *on the variable*
- },
- _ => (),
- }
+ && let QPath::Resolved(None, seqvar) = *seqpath
+ && seqvar.segments.len() == 1
+ && is_local_used(self.cx, idx, self.var)
+ {
+ if self.prefer_mutable {
+ self.indexed_mut.insert(seqvar.segments[0].ident.name);
+ }
+ let index_used_directly = matches!(idx.kind, ExprKind::Path(_));
+ let res = self.cx.qpath_res(seqpath, seqexpr.hir_id);
+ match res {
+ Res::Local(hir_id) => {
+ let parent_def_id = self.cx.tcx.hir().get_parent_item(expr.hir_id);
+ let extent = self
+ .cx
+ .tcx
+ .region_scope_tree(parent_def_id)
+ .var_scope(hir_id.local_id)
+ .unwrap();
+ if index_used_directly {
+ self.indexed_directly.insert(
+ seqvar.segments[0].ident.name,
+ (Some(extent), self.cx.typeck_results().node_type(seqexpr.hir_id)),
+ );
+ } else {
+ self.indexed_indirectly
+ .insert(seqvar.segments[0].ident.name, Some(extent));
+ }
+ return false; // no need to walk further *on the variable*
+ },
+ Res::Def(DefKind::Static(_) | DefKind::Const, ..) => {
+ if index_used_directly {
+ self.indexed_directly.insert(
+ seqvar.segments[0].ident.name,
+ (None, self.cx.typeck_results().node_type(seqexpr.hir_id)),
+ );
+ } else {
+ self.indexed_indirectly.insert(seqvar.segments[0].ident.name, None);
+ }
+ return false; // no need to walk further *on the variable*
+ },
+ _ => (),
}
}
true
@@ -301,42 +293,36 @@ impl<'a, 'tcx> VarVisitor<'a, 'tcx> {
impl<'a, 'tcx> Visitor<'tcx> for VarVisitor<'a, 'tcx> {
fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
- if_chain! {
+ if let ExprKind::MethodCall(meth, args_0, [args_1, ..], _) = &expr.kind
// a range index op
- if let ExprKind::MethodCall(meth, args_0, [args_1, ..], _) = &expr.kind;
- if let Some(trait_id) = self
+ && let Some(trait_id) = self
.cx
.typeck_results()
.type_dependent_def_id(expr.hir_id)
- .and_then(|def_id| self.cx.tcx.trait_of_item(def_id));
- if (meth.ident.name == sym::index && self.cx.tcx.lang_items().index_trait() == Some(trait_id))
- || (meth.ident.name == sym::index_mut && self.cx.tcx.lang_items().index_mut_trait() == Some(trait_id));
- if !self.check(args_1, args_0, expr);
- then {
- return;
- }
+ .and_then(|def_id| self.cx.tcx.trait_of_item(def_id))
+ && ((meth.ident.name == sym::index && self.cx.tcx.lang_items().index_trait() == Some(trait_id))
+ || (meth.ident.name == sym::index_mut && self.cx.tcx.lang_items().index_mut_trait() == Some(trait_id)))
+ && !self.check(args_1, args_0, expr)
+ {
+ return;
}
- if_chain! {
+ if let ExprKind::Index(seqexpr, idx, _) = expr.kind
// an index op
- if let ExprKind::Index(seqexpr, idx, _) = expr.kind;
- if !self.check(idx, seqexpr, expr);
- then {
- return;
- }
+ && !self.check(idx, seqexpr, expr)
+ {
+ return;
}
- if_chain! {
+ if let ExprKind::Path(QPath::Resolved(None, path)) = expr.kind
// directly using a variable
- if let ExprKind::Path(QPath::Resolved(None, path)) = expr.kind;
- if let Res::Local(local_id) = path.res;
- then {
- if local_id == self.var {
- self.nonindex = true;
- } else {
- // not the correct variable, but still a variable
- self.referenced.insert(path.segments[0].ident.name);
- }
+ && let Res::Local(local_id) = path.res
+ {
+ if local_id == self.var {
+ self.nonindex = true;
+ } else {
+ // not the correct variable, but still a variable
+ self.referenced.insert(path.segments[0].ident.name);
}
}
diff --git a/src/tools/clippy/clippy_lints/src/loops/never_loop.rs b/src/tools/clippy/clippy_lints/src/loops/never_loop.rs
index cc054cb46..62bc66319 100644
--- a/src/tools/clippy/clippy_lints/src/loops/never_loop.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/never_loop.rs
@@ -8,7 +8,7 @@ use rustc_errors::Applicability;
use rustc_hir::{Block, Destination, Expr, ExprKind, HirId, InlineAsmOperand, Pat, Stmt, StmtKind};
use rustc_lint::LateContext;
use rustc_span::{sym, Span};
-use std::iter::{once, Iterator};
+use std::iter::once;
pub(super) fn check<'tcx>(
cx: &LateContext<'tcx>,
diff --git a/src/tools/clippy/clippy_lints/src/loops/same_item_push.rs b/src/tools/clippy/clippy_lints/src/loops/same_item_push.rs
index 5fffb27cd..c245eaf1a 100644
--- a/src/tools/clippy/clippy_lints/src/loops/same_item_push.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/same_item_push.rs
@@ -3,7 +3,6 @@ use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::path_to_local;
use clippy_utils::source::snippet_with_context;
use clippy_utils::ty::{implements_trait, is_type_diagnostic_item};
-use if_chain::if_chain;
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::Applicability;
use rustc_hir::def::{DefKind, Res};
@@ -12,7 +11,6 @@ use rustc_hir::{BindingAnnotation, Block, Expr, ExprKind, HirId, Mutability, Nod
use rustc_lint::LateContext;
use rustc_span::symbol::sym;
use rustc_span::SyntaxContext;
-use std::iter::Iterator;
/// Detects for loop pushing the same item into a Vec
pub(super) fn check<'tcx>(
@@ -44,54 +42,50 @@ pub(super) fn check<'tcx>(
// Determine whether it is safe to lint the body
let mut same_item_push_visitor = SameItemPushVisitor::new(cx);
walk_expr(&mut same_item_push_visitor, body);
- if_chain! {
- if same_item_push_visitor.should_lint();
- if let Some((vec, pushed_item, ctxt)) = same_item_push_visitor.vec_push;
- let vec_ty = cx.typeck_results().expr_ty(vec);
- let ty = vec_ty.walk().nth(1).unwrap().expect_ty();
- if cx
+ if same_item_push_visitor.should_lint()
+ && let Some((vec, pushed_item, ctxt)) = same_item_push_visitor.vec_push
+ && let vec_ty = cx.typeck_results().expr_ty(vec)
+ && let ty = vec_ty.walk().nth(1).unwrap().expect_ty()
+ && cx
.tcx
.lang_items()
.clone_trait()
- .map_or(false, |id| implements_trait(cx, ty, id, &[]));
- then {
- // Make sure that the push does not involve possibly mutating values
- match pushed_item.kind {
- ExprKind::Path(ref qpath) => {
- match cx.qpath_res(qpath, pushed_item.hir_id) {
- // immutable bindings that are initialized with literal or constant
- Res::Local(hir_id) => {
- let node = cx.tcx.hir().get(hir_id);
- if_chain! {
- if let Node::Pat(pat) = node;
- if let PatKind::Binding(bind_ann, ..) = pat.kind;
- if !matches!(bind_ann, BindingAnnotation(_, Mutability::Mut));
- let parent_node = cx.tcx.hir().parent_id(hir_id);
- if let Some(Node::Local(parent_let_expr)) = cx.tcx.hir().find(parent_node);
- if let Some(init) = parent_let_expr.init;
- then {
- match init.kind {
- // immutable bindings that are initialized with literal
- ExprKind::Lit(..) => emit_lint(cx, vec, pushed_item, ctxt),
- // immutable bindings that are initialized with constant
- ExprKind::Path(ref path) => {
- if let Res::Def(DefKind::Const, ..) = cx.qpath_res(path, init.hir_id) {
- emit_lint(cx, vec, pushed_item, ctxt);
- }
- }
- _ => {},
+ .map_or(false, |id| implements_trait(cx, ty, id, &[]))
+ {
+ // Make sure that the push does not involve possibly mutating values
+ match pushed_item.kind {
+ ExprKind::Path(ref qpath) => {
+ match cx.qpath_res(qpath, pushed_item.hir_id) {
+ // immutable bindings that are initialized with literal or constant
+ Res::Local(hir_id) => {
+ let node = cx.tcx.hir_node(hir_id);
+ if let Node::Pat(pat) = node
+ && let PatKind::Binding(bind_ann, ..) = pat.kind
+ && !matches!(bind_ann, BindingAnnotation(_, Mutability::Mut))
+ && let parent_node = cx.tcx.hir().parent_id(hir_id)
+ && let Some(Node::Local(parent_let_expr)) = cx.tcx.opt_hir_node(parent_node)
+ && let Some(init) = parent_let_expr.init
+ {
+ match init.kind {
+ // immutable bindings that are initialized with literal
+ ExprKind::Lit(..) => emit_lint(cx, vec, pushed_item, ctxt),
+ // immutable bindings that are initialized with constant
+ ExprKind::Path(ref path) => {
+ if let Res::Def(DefKind::Const, ..) = cx.qpath_res(path, init.hir_id) {
+ emit_lint(cx, vec, pushed_item, ctxt);
}
- }
+ },
+ _ => {},
}
- },
- // constant
- Res::Def(DefKind::Const, ..) => emit_lint(cx, vec, pushed_item, ctxt),
- _ => {},
- }
- },
- ExprKind::Lit(..) => emit_lint(cx, vec, pushed_item, ctxt),
- _ => {},
- }
+ }
+ },
+ // constant
+ Res::Def(DefKind::Const, ..) => emit_lint(cx, vec, pushed_item, ctxt),
+ _ => {},
+ }
+ },
+ ExprKind::Lit(..) => emit_lint(cx, vec, pushed_item, ctxt),
+ _ => {},
}
}
}
@@ -118,16 +112,14 @@ impl<'a, 'tcx> SameItemPushVisitor<'a, 'tcx> {
}
fn should_lint(&self) -> bool {
- if_chain! {
- if !self.non_deterministic_expr;
- if !self.multiple_pushes;
- if let Some((vec, _, _)) = self.vec_push;
- if let Some(hir_id) = path_to_local(vec);
- then {
- !self.used_locals.contains(&hir_id)
- } else {
- false
- }
+ if !self.non_deterministic_expr
+ && !self.multiple_pushes
+ && let Some((vec, _, _)) = self.vec_push
+ && let Some(hir_id) = path_to_local(vec)
+ {
+ !self.used_locals.contains(&hir_id)
+ } else {
+ false
}
}
}
@@ -180,18 +172,16 @@ fn get_vec_push<'tcx>(
cx: &LateContext<'tcx>,
stmt: &'tcx Stmt<'_>,
) -> Option<(&'tcx Expr<'tcx>, &'tcx Expr<'tcx>, SyntaxContext)> {
- if_chain! {
+ if let StmtKind::Semi(semi_stmt) = &stmt.kind
// Extract method being called
- if let StmtKind::Semi(semi_stmt) = &stmt.kind;
- if let ExprKind::MethodCall(path, self_expr, args, _) = &semi_stmt.kind;
+ && let ExprKind::MethodCall(path, self_expr, args, _) = &semi_stmt.kind
// Figure out the parameters for the method call
- if let Some(pushed_item) = args.first();
+ && let Some(pushed_item) = args.first()
// Check that the method being called is push() on a Vec
- if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(self_expr), sym::Vec);
- if path.ident.name.as_str() == "push";
- then {
- return Some((self_expr, pushed_item, semi_stmt.span.ctxt()))
- }
+ && is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(self_expr), sym::Vec)
+ && path.ident.name.as_str() == "push"
+ {
+ return Some((self_expr, pushed_item, semi_stmt.span.ctxt()));
}
None
}
diff --git a/src/tools/clippy/clippy_lints/src/loops/single_element_loop.rs b/src/tools/clippy/clippy_lints/src/loops/single_element_loop.rs
index dfb800ccf..4773a1454 100644
--- a/src/tools/clippy/clippy_lints/src/loops/single_element_loop.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/single_element_loop.rs
@@ -1,8 +1,7 @@
use super::SINGLE_ELEMENT_LOOP;
use clippy_utils::diagnostics::span_lint_and_sugg;
-use clippy_utils::source::{indent_of, snippet_with_applicability};
+use clippy_utils::source::{indent_of, snippet, snippet_with_applicability};
use clippy_utils::visitors::contains_break_or_continue;
-use if_chain::if_chain;
use rustc_ast::util::parser::PREC_PREFIX;
use rustc_ast::Mutability;
use rustc_errors::Applicability;
@@ -66,27 +65,42 @@ pub(super) fn check<'tcx>(
ExprKind::Array([arg]) if cx.tcx.sess.edition() >= Edition::Edition2021 => (arg, ""),
_ => return,
};
- if_chain! {
- if let ExprKind::Block(block, _) = body.kind;
- if !block.stmts.is_empty();
- if !contains_break_or_continue(body);
- then {
- let mut applicability = Applicability::MachineApplicable;
- let pat_snip = snippet_with_applicability(cx, pat.span, "..", &mut applicability);
- let mut arg_snip = snippet_with_applicability(cx, arg_expression.span, "..", &mut applicability);
- let mut block_str = snippet_with_applicability(cx, block.span, "..", &mut applicability).into_owned();
- block_str.remove(0);
- block_str.pop();
- let indent = " ".repeat(indent_of(cx, block.stmts[0].span).unwrap_or(0));
+ if let ExprKind::Block(block, _) = body.kind
+ && !block.stmts.is_empty()
+ && !contains_break_or_continue(body)
+ {
+ let mut applicability = Applicability::MachineApplicable;
+ let pat_snip = snippet_with_applicability(cx, pat.span, "..", &mut applicability);
+ let mut arg_snip = snippet_with_applicability(cx, arg_expression.span, "..", &mut applicability);
+ let mut block_str = snippet_with_applicability(cx, block.span, "..", &mut applicability).into_owned();
+ block_str.remove(0);
+ block_str.pop();
+ let indent = " ".repeat(indent_of(cx, block.stmts[0].span).unwrap_or(0));
- // Reference iterator from `&(mut) []` or `[].iter(_mut)()`.
- if !prefix.is_empty() && (
+ // Reference iterator from `&(mut) []` or `[].iter(_mut)()`.
+ if !prefix.is_empty()
+ && (
// Precedence of internal expression is less than or equal to precedence of `&expr`.
arg_expression.precedence().order() <= PREC_PREFIX || is_range_literal(arg_expression)
- ) {
- arg_snip = format!("({arg_snip})").into();
- }
+ )
+ {
+ arg_snip = format!("({arg_snip})").into();
+ }
+ if clippy_utils::higher::Range::hir(arg_expression).is_some() {
+ let range_expr = snippet(cx, arg_expression.span, "?").to_string();
+
+ let sugg = snippet(cx, arg_expression.span, "..");
+ span_lint_and_sugg(
+ cx,
+ SINGLE_ELEMENT_LOOP,
+ arg.span,
+ format!("this loops only once with `{pat_snip}` being `{range_expr}`").as_str(),
+ "did you mean to iterate over the range instead?",
+ sugg.to_string(),
+ Applicability::Unspecified,
+ );
+ } else {
span_lint_and_sugg(
cx,
SINGLE_ELEMENT_LOOP,
@@ -95,7 +109,7 @@ pub(super) fn check<'tcx>(
"try",
format!("{{\n{indent}let {pat_snip} = {prefix}{arg_snip};{block_str}}}"),
applicability,
- )
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/loops/utils.rs b/src/tools/clippy/clippy_lints/src/loops/utils.rs
index 0a2bd89eb..e685274ad 100644
--- a/src/tools/clippy/clippy_lints/src/loops/utils.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/utils.rs
@@ -1,6 +1,5 @@
use clippy_utils::ty::{has_iter_method, implements_trait};
use clippy_utils::{get_parent_expr, is_integer_const, path_to_local, path_to_local_id, sugg};
-use if_chain::if_chain;
use rustc_ast::ast::{LitIntType, LitKind};
use rustc_errors::Applicability;
use rustc_hir::intravisit::{walk_expr, walk_local, walk_pat, walk_stmt, Visitor};
@@ -10,7 +9,6 @@ use rustc_middle::hir::nested_filter;
use rustc_middle::ty::{self, Ty};
use rustc_span::source_map::Spanned;
use rustc_span::symbol::{sym, Symbol};
-use std::iter::Iterator;
#[derive(Debug, PartialEq, Eq)]
enum IncrementVisitorVarState {
@@ -145,20 +143,18 @@ impl<'a, 'tcx> Visitor<'tcx> for InitializeVisitor<'a, 'tcx> {
fn visit_local(&mut self, l: &'tcx Local<'_>) {
// Look for declarations of the variable
- if_chain! {
- if l.pat.hir_id == self.var_id;
- if let PatKind::Binding(.., ident, _) = l.pat.kind;
- then {
- let ty = l.ty.map(|_| self.cx.typeck_results().pat_ty(l.pat));
+ if l.pat.hir_id == self.var_id
+ && let PatKind::Binding(.., ident, _) = l.pat.kind
+ {
+ let ty = l.ty.map(|_| self.cx.typeck_results().pat_ty(l.pat));
- self.state = l.init.map_or(InitializeVisitorState::Declared(ident.name, ty), |init| {
- InitializeVisitorState::Initialized {
- initializer: init,
- ty,
- name: ident.name,
- }
- })
- }
+ self.state = l.init.map_or(InitializeVisitorState::Declared(ident.name, ty), |init| {
+ InitializeVisitorState::Initialized {
+ initializer: init,
+ ty,
+ name: ident.name,
+ }
+ });
}
walk_local(self, l);
diff --git a/src/tools/clippy/clippy_lints/src/loops/while_immutable_condition.rs b/src/tools/clippy/clippy_lints/src/loops/while_immutable_condition.rs
index 7f24f3c5d..9fd9b7a16 100644
--- a/src/tools/clippy/clippy_lints/src/loops/while_immutable_condition.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/while_immutable_condition.rs
@@ -2,7 +2,6 @@ use super::WHILE_IMMUTABLE_CONDITION;
use clippy_utils::consts::constant;
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::usage::mutated_variables;
-use if_chain::if_chain;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::DefIdMap;
use rustc_hir::intravisit::{walk_expr, Visitor};
@@ -95,20 +94,18 @@ struct VarCollectorVisitor<'a, 'tcx> {
impl<'a, 'tcx> VarCollectorVisitor<'a, 'tcx> {
fn insert_def_id(&mut self, ex: &'tcx Expr<'_>) {
- if_chain! {
- if let ExprKind::Path(ref qpath) = ex.kind;
- if let QPath::Resolved(None, _) = *qpath;
- then {
- match self.cx.qpath_res(qpath, ex.hir_id) {
- Res::Local(hir_id) => {
- self.ids.insert(hir_id);
- },
- Res::Def(DefKind::Static(_), def_id) => {
- let mutable = self.cx.tcx.is_mutable_static(def_id);
- self.def_ids.insert(def_id, mutable);
- },
- _ => {},
- }
+ if let ExprKind::Path(ref qpath) = ex.kind
+ && let QPath::Resolved(None, _) = *qpath
+ {
+ match self.cx.qpath_res(qpath, ex.hir_id) {
+ Res::Local(hir_id) => {
+ self.ids.insert(hir_id);
+ },
+ Res::Def(DefKind::Static(_), def_id) => {
+ let mutable = self.cx.tcx.is_mutable_static(def_id);
+ self.def_ids.insert(def_id, mutable);
+ },
+ _ => {},
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/loops/while_let_on_iterator.rs b/src/tools/clippy/clippy_lints/src/loops/while_let_on_iterator.rs
index 5153070cf..21b9efba5 100644
--- a/src/tools/clippy/clippy_lints/src/loops/while_let_on_iterator.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/while_let_on_iterator.rs
@@ -3,7 +3,6 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::visitors::is_res_used;
use clippy_utils::{get_enclosing_loop_or_multi_call_closure, higher, is_refutable, is_res_lang_ctor, is_trait_method};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::def::Res;
use rustc_hir::intravisit::{walk_expr, Visitor};
@@ -15,59 +14,53 @@ use rustc_span::symbol::sym;
use rustc_span::Symbol;
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
- let (scrutinee_expr, iter_expr_struct, iter_expr, some_pat, loop_expr) = if_chain! {
- if let Some(higher::WhileLet { if_then, let_pat, let_expr }) = higher::WhileLet::hir(expr);
+ if let Some(higher::WhileLet { if_then, let_pat, let_expr }) = higher::WhileLet::hir(expr)
// check for `Some(..)` pattern
- if let PatKind::TupleStruct(ref pat_path, some_pat, _) = let_pat.kind;
- if is_res_lang_ctor(cx, cx.qpath_res(pat_path, let_pat.hir_id), LangItem::OptionSome);
+ && let PatKind::TupleStruct(ref pat_path, some_pat, _) = let_pat.kind
+ && is_res_lang_ctor(cx, cx.qpath_res(pat_path, let_pat.hir_id), LangItem::OptionSome)
// check for call to `Iterator::next`
- if let ExprKind::MethodCall(method_name, iter_expr, [], _) = let_expr.kind;
- if method_name.ident.name == sym::next;
- if is_trait_method(cx, let_expr, sym::Iterator);
- if let Some(iter_expr_struct) = try_parse_iter_expr(cx, iter_expr);
+ && let ExprKind::MethodCall(method_name, iter_expr, [], _) = let_expr.kind
+ && method_name.ident.name == sym::next
+ && is_trait_method(cx, let_expr, sym::Iterator)
+ && let Some(iter_expr_struct) = try_parse_iter_expr(cx, iter_expr)
// get the loop containing the match expression
- if !uses_iter(cx, &iter_expr_struct, if_then);
- then {
- (let_expr, iter_expr_struct, iter_expr, some_pat, expr)
+ && !uses_iter(cx, &iter_expr_struct, if_then)
+ {
+ let mut applicability = Applicability::MachineApplicable;
+ let loop_var = if let Some(some_pat) = some_pat.first() {
+ if is_refutable(cx, some_pat) {
+ // Refutable patterns don't work with for loops.
+ return;
+ }
+ snippet_with_applicability(cx, some_pat.span, "..", &mut applicability)
} else {
- return;
- }
- };
-
- let mut applicability = Applicability::MachineApplicable;
- let loop_var = if let Some(some_pat) = some_pat.first() {
- if is_refutable(cx, some_pat) {
- // Refutable patterns don't work with for loops.
- return;
- }
- snippet_with_applicability(cx, some_pat.span, "..", &mut applicability)
- } else {
- "_".into()
- };
+ "_".into()
+ };
- // If the iterator is a field or the iterator is accessed after the loop is complete it needs to be
- // borrowed mutably. TODO: If the struct can be partially moved from and the struct isn't used
- // afterwards a mutable borrow of a field isn't necessary.
- let by_ref = if cx.typeck_results().expr_ty(iter_expr).ref_mutability() == Some(Mutability::Mut)
- || !iter_expr_struct.can_move
- || !iter_expr_struct.fields.is_empty()
- || needs_mutable_borrow(cx, &iter_expr_struct, loop_expr)
- {
- ".by_ref()"
- } else {
- ""
- };
+ // If the iterator is a field or the iterator is accessed after the loop is complete it needs to be
+ // borrowed mutably. TODO: If the struct can be partially moved from and the struct isn't used
+ // afterwards a mutable borrow of a field isn't necessary.
+ let by_ref = if cx.typeck_results().expr_ty(iter_expr).ref_mutability() == Some(Mutability::Mut)
+ || !iter_expr_struct.can_move
+ || !iter_expr_struct.fields.is_empty()
+ || needs_mutable_borrow(cx, &iter_expr_struct, expr)
+ {
+ ".by_ref()"
+ } else {
+ ""
+ };
- let iterator = snippet_with_applicability(cx, iter_expr.span, "_", &mut applicability);
- span_lint_and_sugg(
- cx,
- WHILE_LET_ON_ITERATOR,
- expr.span.with_hi(scrutinee_expr.span.hi()),
- "this loop could be written as a `for` loop",
- "try",
- format!("for {loop_var} in {iterator}{by_ref}"),
- applicability,
- );
+ let iterator = snippet_with_applicability(cx, iter_expr.span, "_", &mut applicability);
+ span_lint_and_sugg(
+ cx,
+ WHILE_LET_ON_ITERATOR,
+ expr.span.with_hi(let_expr.span.hi()),
+ "this loop could be written as a `for` loop",
+ "try",
+ format!("for {loop_var} in {iterator}{by_ref}"),
+ applicability,
+ );
+ }
}
#[derive(Debug)]
diff --git a/src/tools/clippy/clippy_lints/src/macro_use.rs b/src/tools/clippy/clippy_lints/src/macro_use.rs
index 9b158f18f..8d3e7520a 100644
--- a/src/tools/clippy/clippy_lints/src/macro_use.rs
+++ b/src/tools/clippy/clippy_lints/src/macro_use.rs
@@ -1,15 +1,15 @@
use clippy_utils::diagnostics::span_lint_hir_and_then;
use clippy_utils::source::snippet;
use hir::def::{DefKind, Res};
-use if_chain::if_chain;
use rustc_ast::ast;
-use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_data_structures::fx::FxHashSet;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass, LintContext};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::edition::Edition;
use rustc_span::{sym, Span};
+use std::collections::BTreeMap;
declare_clippy_lint! {
/// ### What it does
@@ -89,30 +89,26 @@ impl MacroUseImports {
impl<'tcx> LateLintPass<'tcx> for MacroUseImports {
fn check_item(&mut self, cx: &LateContext<'_>, item: &hir::Item<'_>) {
- if_chain! {
- if cx.sess().opts.edition >= Edition::Edition2018;
- if let hir::ItemKind::Use(path, _kind) = &item.kind;
- let hir_id = item.hir_id();
- let attrs = cx.tcx.hir().attrs(hir_id);
- if let Some(mac_attr) = attrs.iter().find(|attr| attr.has_name(sym::macro_use));
- if let Some(id) = path.res.iter().find_map(|res| match res {
+ if cx.sess().opts.edition >= Edition::Edition2018
+ && let hir::ItemKind::Use(path, _kind) = &item.kind
+ && let hir_id = item.hir_id()
+ && let attrs = cx.tcx.hir().attrs(hir_id)
+ && let Some(mac_attr) = attrs.iter().find(|attr| attr.has_name(sym::macro_use))
+ && let Some(id) = path.res.iter().find_map(|res| match res {
Res::Def(DefKind::Mod, id) => Some(id),
_ => None,
- });
- if !id.is_local();
- then {
- for kid in cx.tcx.module_children(id) {
- if let Res::Def(DefKind::Macro(_mac_type), mac_id) = kid.res {
- let span = mac_attr.span;
- let def_path = cx.tcx.def_path_str(mac_id);
- self.imports.push((def_path, span, hir_id));
- }
- }
- } else {
- if item.span.from_expansion() {
- self.push_unique_macro_pat_ty(cx, item.span);
+ })
+ && !id.is_local()
+ {
+ for kid in cx.tcx.module_children(id) {
+ if let Res::Def(DefKind::Macro(_mac_type), mac_id) = kid.res {
+ let span = mac_attr.span;
+ let def_path = cx.tcx.def_path_str(mac_id);
+ self.imports.push((def_path, span, hir_id));
}
}
+ } else if item.span.from_expansion() {
+ self.push_unique_macro_pat_ty(cx, item.span);
}
}
fn check_attribute(&mut self, cx: &LateContext<'_>, attr: &ast::Attribute) {
@@ -141,7 +137,7 @@ impl<'tcx> LateLintPass<'tcx> for MacroUseImports {
}
}
fn check_crate_post(&mut self, cx: &LateContext<'_>) {
- let mut used = FxHashMap::default();
+ let mut used = BTreeMap::new();
let mut check_dup = vec![];
for (import, span, hir_id) in &self.imports {
let found_idx = self.mac_refs.iter().position(|mac| import.ends_with(&mac.name));
@@ -190,20 +186,16 @@ impl<'tcx> LateLintPass<'tcx> for MacroUseImports {
}
}
- let mut suggestions = vec![];
- for ((root, span, hir_id), path) in used {
- if path.len() == 1 {
- suggestions.push((span, format!("{root}::{}", path[0]), hir_id));
- } else {
- suggestions.push((span, format!("{root}::{{{}}}", path.join(", ")), hir_id));
- }
- }
-
// If mac_refs is not empty we have encountered an import we could not handle
// such as `std::prelude::v1::foo` or some other macro that expands to an import.
if self.mac_refs.is_empty() {
- for (span, import, hir_id) in suggestions {
- let help = format!("use {import};");
+ for ((root, span, hir_id), path) in used {
+ let import = if let [single] = &path[..] {
+ format!("{root}::{single}")
+ } else {
+ format!("{root}::{{{}}}", path.join(", "))
+ };
+
span_lint_hir_and_then(
cx,
MACRO_USE_IMPORTS,
@@ -214,7 +206,7 @@ impl<'tcx> LateLintPass<'tcx> for MacroUseImports {
diag.span_suggestion(
*span,
"remove the attribute and import the macro directly, try",
- help,
+ format!("use {import};"),
Applicability::MaybeIncorrect,
);
},
diff --git a/src/tools/clippy/clippy_lints/src/main_recursion.rs b/src/tools/clippy/clippy_lints/src/main_recursion.rs
index 20333c150..a381b35cf 100644
--- a/src/tools/clippy/clippy_lints/src/main_recursion.rs
+++ b/src/tools/clippy/clippy_lints/src/main_recursion.rs
@@ -1,10 +1,9 @@
use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::source::snippet;
use clippy_utils::{is_entrypoint_fn, is_no_std_crate};
-use if_chain::if_chain;
use rustc_hir::{Expr, ExprKind, QPath};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -43,21 +42,19 @@ impl LateLintPass<'_> for MainRecursion {
return;
}
- if_chain! {
- if let ExprKind::Call(func, _) = &expr.kind;
- if let ExprKind::Path(QPath::Resolved(_, path)) = &func.kind;
- if let Some(def_id) = path.res.opt_def_id();
- if is_entrypoint_fn(cx, def_id);
- then {
- span_lint_and_help(
- cx,
- MAIN_RECURSION,
- func.span,
- &format!("recursing into entrypoint `{}`", snippet(cx, func.span, "main")),
- None,
- "consider using another function for this recursion"
- )
- }
+ if let ExprKind::Call(func, _) = &expr.kind
+ && let ExprKind::Path(QPath::Resolved(_, path)) = &func.kind
+ && let Some(def_id) = path.res.opt_def_id()
+ && is_entrypoint_fn(cx, def_id)
+ {
+ span_lint_and_help(
+ cx,
+ MAIN_RECURSION,
+ func.span,
+ &format!("recursing into entrypoint `{}`", snippet(cx, func.span, "main")),
+ None,
+ "consider using another function for this recursion",
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/manual_assert.rs b/src/tools/clippy/clippy_lints/src/manual_assert.rs
index 9a3da975f..4f6a2cf01 100644
--- a/src/tools/clippy/clippy_lints/src/manual_assert.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_assert.rs
@@ -5,7 +5,7 @@ use clippy_utils::{is_else_clause, peel_blocks_with_stmt, span_extract_comment,
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, UnOp};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/manual_async_fn.rs b/src/tools/clippy/clippy_lints/src/manual_async_fn.rs
index 998de38a9..eaaaea0be 100644
--- a/src/tools/clippy/clippy_lints/src/manual_async_fn.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_async_fn.rs
@@ -1,6 +1,5 @@
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::source::{position_before_rarrow, snippet_block, snippet_opt};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::intravisit::FnKind;
use rustc_hir::{
@@ -8,7 +7,7 @@ use rustc_hir::{
ImplItem, Item, ItemKind, LifetimeName, Node, Term, TraitRef, Ty, TyKind, TypeBindingKind,
};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::def_id::LocalDefId;
use rustc_span::{sym, Span};
@@ -47,61 +46,57 @@ impl<'tcx> LateLintPass<'tcx> for ManualAsyncFn {
span: Span,
def_id: LocalDefId,
) {
- if_chain! {
- if let Some(header) = kind.header();
- if !header.asyncness.is_async();
+ if let Some(header) = kind.header()
+ && !header.asyncness.is_async()
// Check that this function returns `impl Future`
- if let FnRetTy::Return(ret_ty) = decl.output;
- if let Some((trait_ref, output_lifetimes)) = future_trait_ref(cx, ret_ty);
- if let Some(output) = future_output_ty(trait_ref);
- if captures_all_lifetimes(decl.inputs, &output_lifetimes);
+ && let FnRetTy::Return(ret_ty) = decl.output
+ && let Some((trait_ref, output_lifetimes)) = future_trait_ref(cx, ret_ty)
+ && let Some(output) = future_output_ty(trait_ref)
+ && captures_all_lifetimes(decl.inputs, &output_lifetimes)
// Check that the body of the function consists of one async block
- if let ExprKind::Block(block, _) = body.value.kind;
- if block.stmts.is_empty();
- if let Some(closure_body) = desugared_async_block(cx, block);
- if let Node::Item(Item {vis_span, ..}) | Node::ImplItem(ImplItem {vis_span, ..}) =
- cx.tcx.hir().get_by_def_id(def_id);
- then {
- let header_span = span.with_hi(ret_ty.span.hi());
-
- span_lint_and_then(
- cx,
- MANUAL_ASYNC_FN,
- header_span,
- "this function can be simplified using the `async fn` syntax",
- |diag| {
- if_chain! {
- if let Some(vis_snip) = snippet_opt(cx, *vis_span);
- if let Some(header_snip) = snippet_opt(cx, header_span);
- if let Some(ret_pos) = position_before_rarrow(&header_snip);
- if let Some((ret_sugg, ret_snip)) = suggested_ret(cx, output);
- then {
- let header_snip = if vis_snip.is_empty() {
- format!("async {}", &header_snip[..ret_pos])
- } else {
- format!("{} async {}", vis_snip, &header_snip[vis_snip.len() + 1..ret_pos])
- };
-
- let help = format!("make the function `async` and {ret_sugg}");
- diag.span_suggestion(
- header_span,
- help,
- format!("{header_snip}{ret_snip}"),
- Applicability::MachineApplicable
- );
-
- let body_snip = snippet_block(cx, closure_body.value.span, "..", Some(block.span));
- diag.span_suggestion(
- block.span,
- "move the body of the async block to the enclosing function",
- body_snip,
- Applicability::MachineApplicable
- );
- }
- }
- },
- );
- }
+ && let ExprKind::Block(block, _) = body.value.kind
+ && block.stmts.is_empty()
+ && let Some(closure_body) = desugared_async_block(cx, block)
+ && let Node::Item(Item {vis_span, ..}) | Node::ImplItem(ImplItem {vis_span, ..}) =
+ cx.tcx.hir_node_by_def_id(def_id)
+ {
+ let header_span = span.with_hi(ret_ty.span.hi());
+
+ span_lint_and_then(
+ cx,
+ MANUAL_ASYNC_FN,
+ header_span,
+ "this function can be simplified using the `async fn` syntax",
+ |diag| {
+ if let Some(vis_snip) = snippet_opt(cx, *vis_span)
+ && let Some(header_snip) = snippet_opt(cx, header_span)
+ && let Some(ret_pos) = position_before_rarrow(&header_snip)
+ && let Some((ret_sugg, ret_snip)) = suggested_ret(cx, output)
+ {
+ let header_snip = if vis_snip.is_empty() {
+ format!("async {}", &header_snip[..ret_pos])
+ } else {
+ format!("{} async {}", vis_snip, &header_snip[vis_snip.len() + 1..ret_pos])
+ };
+
+ let help = format!("make the function `async` and {ret_sugg}");
+ diag.span_suggestion(
+ header_span,
+ help,
+ format!("{header_snip}{ret_snip}"),
+ Applicability::MachineApplicable,
+ );
+
+ let body_snip = snippet_block(cx, closure_body.value.span, "..", Some(block.span));
+ diag.span_suggestion(
+ block.span,
+ "move the body of the async block to the enclosing function",
+ body_snip,
+ Applicability::MachineApplicable,
+ );
+ }
+ },
+ );
}
}
}
@@ -110,48 +105,44 @@ fn future_trait_ref<'tcx>(
cx: &LateContext<'tcx>,
ty: &'tcx Ty<'tcx>,
) -> Option<(&'tcx TraitRef<'tcx>, Vec<LifetimeName>)> {
- if_chain! {
- if let TyKind::OpaqueDef(item_id, bounds, false) = ty.kind;
- let item = cx.tcx.hir().item(item_id);
- if let ItemKind::OpaqueTy(opaque) = &item.kind;
- if let Some(trait_ref) = opaque.bounds.iter().find_map(|bound| {
+ if let TyKind::OpaqueDef(item_id, bounds, false) = ty.kind
+ && let item = cx.tcx.hir().item(item_id)
+ && let ItemKind::OpaqueTy(opaque) = &item.kind
+ && let Some(trait_ref) = opaque.bounds.iter().find_map(|bound| {
if let GenericBound::Trait(poly, _) = bound {
Some(&poly.trait_ref)
} else {
None
}
- });
- if trait_ref.trait_def_id() == cx.tcx.lang_items().future_trait();
- then {
- let output_lifetimes = bounds
- .iter()
- .filter_map(|bound| {
- if let GenericArg::Lifetime(lt) = bound {
- Some(lt.res)
- } else {
- None
- }
- })
- .collect();
-
- return Some((trait_ref, output_lifetimes));
- }
+ })
+ && trait_ref.trait_def_id() == cx.tcx.lang_items().future_trait()
+ {
+ let output_lifetimes = bounds
+ .iter()
+ .filter_map(|bound| {
+ if let GenericArg::Lifetime(lt) = bound {
+ Some(lt.res)
+ } else {
+ None
+ }
+ })
+ .collect();
+
+ return Some((trait_ref, output_lifetimes));
}
None
}
fn future_output_ty<'tcx>(trait_ref: &'tcx TraitRef<'tcx>) -> Option<&'tcx Ty<'tcx>> {
- if_chain! {
- if let Some(segment) = trait_ref.path.segments.last();
- if let Some(args) = segment.args;
- if args.bindings.len() == 1;
- let binding = &args.bindings[0];
- if binding.ident.name == sym::Output;
- if let TypeBindingKind::Equality { term: Term::Ty(output) } = binding.kind;
- then {
- return Some(output);
- }
+ if let Some(segment) = trait_ref.path.segments.last()
+ && let Some(args) = segment.args
+ && args.bindings.len() == 1
+ && let binding = &args.bindings[0]
+ && binding.ident.name == sym::Output
+ && let TypeBindingKind::Equality { term: Term::Ty(output) } = binding.kind
+ {
+ return Some(output);
}
None
@@ -181,31 +172,26 @@ fn captures_all_lifetimes(inputs: &[Ty<'_>], output_lifetimes: &[LifetimeName])
}
fn desugared_async_block<'tcx>(cx: &LateContext<'tcx>, block: &'tcx Block<'tcx>) -> Option<&'tcx Body<'tcx>> {
- if_chain! {
- if let Some(block_expr) = block.expr;
- if let Expr {
+ if let Some(block_expr) = block.expr
+ && let Expr {
kind: ExprKind::Closure(&Closure { body, .. }),
..
- } = block_expr;
- let closure_body = cx.tcx.hir().body(body);
- if closure_body.coroutine_kind == Some(CoroutineKind::Async(CoroutineSource::Block));
- then {
- return Some(closure_body);
- }
+ } = block_expr
+ && let closure_body = cx.tcx.hir().body(body)
+ && closure_body.coroutine_kind == Some(CoroutineKind::Async(CoroutineSource::Block))
+ {
+ return Some(closure_body);
}
None
}
fn suggested_ret(cx: &LateContext<'_>, output: &Ty<'_>) -> Option<(&'static str, String)> {
- match output.kind {
- TyKind::Tup(tys) if tys.is_empty() => {
- let sugg = "remove the return type";
- Some((sugg, String::new()))
- },
- _ => {
- let sugg = "return the output of the future directly";
- snippet_opt(cx, output.span).map(|snip| (sugg, format!(" -> {snip}")))
- },
+ if let TyKind::Tup([]) = output.kind {
+ let sugg = "remove the return type";
+ Some((sugg, String::new()))
+ } else {
+ let sugg = "return the output of the future directly";
+ snippet_opt(cx, output.span).map(|snip| (sugg, format!(" -> {snip}")))
}
}
diff --git a/src/tools/clippy/clippy_lints/src/manual_bits.rs b/src/tools/clippy/clippy_lints/src/manual_bits.rs
index cd614c895..96c652283 100644
--- a/src/tools/clippy/clippy_lints/src/manual_bits.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_bits.rs
@@ -8,7 +8,7 @@ use rustc_hir::{BinOpKind, Expr, ExprKind, GenericArg, QPath};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::{self, Ty};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
@@ -53,32 +53,30 @@ impl<'tcx> LateLintPass<'tcx> for ManualBits {
return;
}
- if_chain! {
- if let ExprKind::Binary(bin_op, left_expr, right_expr) = expr.kind;
- if let BinOpKind::Mul = &bin_op.node;
- if !in_external_macro(cx.sess(), expr.span);
- let ctxt = expr.span.ctxt();
- if left_expr.span.ctxt() == ctxt;
- if right_expr.span.ctxt() == ctxt;
- if let Some((real_ty, resolved_ty, other_expr)) = get_one_size_of_ty(cx, left_expr, right_expr);
- if matches!(resolved_ty.kind(), ty::Int(_) | ty::Uint(_));
- if let ExprKind::Lit(lit) = &other_expr.kind;
- if let LitKind::Int(8, _) = lit.node;
- then {
- let mut app = Applicability::MachineApplicable;
- let ty_snip = snippet_with_context(cx, real_ty.span, ctxt, "..", &mut app).0;
- let sugg = create_sugg(cx, expr, format!("{ty_snip}::BITS"));
-
- span_lint_and_sugg(
- cx,
- MANUAL_BITS,
- expr.span,
- "usage of `mem::size_of::<T>()` to obtain the size of `T` in bits",
- "consider using",
- sugg,
- app,
- );
- }
+ if let ExprKind::Binary(bin_op, left_expr, right_expr) = expr.kind
+ && let BinOpKind::Mul = &bin_op.node
+ && !in_external_macro(cx.sess(), expr.span)
+ && let ctxt = expr.span.ctxt()
+ && left_expr.span.ctxt() == ctxt
+ && right_expr.span.ctxt() == ctxt
+ && let Some((real_ty, resolved_ty, other_expr)) = get_one_size_of_ty(cx, left_expr, right_expr)
+ && matches!(resolved_ty.kind(), ty::Int(_) | ty::Uint(_))
+ && let ExprKind::Lit(lit) = &other_expr.kind
+ && let LitKind::Int(8, _) = lit.node
+ {
+ let mut app = Applicability::MachineApplicable;
+ let ty_snip = snippet_with_context(cx, real_ty.span, ctxt, "..", &mut app).0;
+ let sugg = create_sugg(cx, expr, format!("{ty_snip}::BITS"));
+
+ span_lint_and_sugg(
+ cx,
+ MANUAL_BITS,
+ expr.span,
+ "usage of `mem::size_of::<T>()` to obtain the size of `T` in bits",
+ "consider using",
+ sugg,
+ app,
+ );
}
}
@@ -98,22 +96,22 @@ fn get_one_size_of_ty<'tcx>(
}
fn get_size_of_ty<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> Option<(&'tcx rustc_hir::Ty<'tcx>, Ty<'tcx>)> {
- if_chain! {
- if let ExprKind::Call(count_func, _func_args) = expr.kind;
- if let ExprKind::Path(ref count_func_qpath) = count_func.kind;
-
- if let QPath::Resolved(_, count_func_path) = count_func_qpath;
- if let Some(segment_zero) = count_func_path.segments.first();
- if let Some(args) = segment_zero.args;
- if let Some(GenericArg::Type(real_ty)) = args.args.first();
-
- if let Some(def_id) = cx.qpath_res(count_func_qpath, count_func.hir_id).opt_def_id();
- if cx.tcx.is_diagnostic_item(sym::mem_size_of, def_id);
- then {
- cx.typeck_results().node_args(count_func.hir_id).types().next().map(|resolved_ty| (*real_ty, resolved_ty))
- } else {
- None
- }
+ if let ExprKind::Call(count_func, _func_args) = expr.kind
+ && let ExprKind::Path(ref count_func_qpath) = count_func.kind
+ && let QPath::Resolved(_, count_func_path) = count_func_qpath
+ && let Some(segment_zero) = count_func_path.segments.first()
+ && let Some(args) = segment_zero.args
+ && let Some(GenericArg::Type(real_ty)) = args.args.first()
+ && let Some(def_id) = cx.qpath_res(count_func_qpath, count_func.hir_id).opt_def_id()
+ && cx.tcx.is_diagnostic_item(sym::mem_size_of, def_id)
+ {
+ cx.typeck_results()
+ .node_args(count_func.hir_id)
+ .types()
+ .next()
+ .map(|resolved_ty| (*real_ty, resolved_ty))
+ } else {
+ None
}
}
diff --git a/src/tools/clippy/clippy_lints/src/manual_clamp.rs b/src/tools/clippy/clippy_lints/src/manual_clamp.rs
index 09c90e38e..385fe387a 100644
--- a/src/tools/clippy/clippy_lints/src/manual_clamp.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_clamp.rs
@@ -14,7 +14,7 @@ use rustc_hir::def::Res;
use rustc_hir::{Arm, BinOpKind, Block, Expr, ExprKind, Guard, HirId, PatKind, PathSegment, PrimTy, QPath, StmtKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::Ty;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::symbol::sym;
use rustc_span::Span;
use std::ops::Deref;
diff --git a/src/tools/clippy/clippy_lints/src/manual_float_methods.rs b/src/tools/clippy/clippy_lints/src/manual_float_methods.rs
index f923e0ac8..72cf1d7a3 100644
--- a/src/tools/clippy/clippy_lints/src/manual_float_methods.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_float_methods.rs
@@ -6,7 +6,7 @@ use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Constness, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass, Lint, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/manual_hash_one.rs b/src/tools/clippy/clippy_lints/src/manual_hash_one.rs
index 472b4eb90..252b3a83a 100644
--- a/src/tools/clippy/clippy_lints/src/manual_hash_one.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_hash_one.rs
@@ -6,7 +6,7 @@ use clippy_utils::{is_trait_method, path_to_local_id};
use rustc_errors::Applicability;
use rustc_hir::{BindingAnnotation, ExprKind, Local, Node, PatKind, StmtKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs b/src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs
index 468f41707..e433c5a3b 100644
--- a/src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs
@@ -8,7 +8,7 @@ use rustc_ast::LitKind::{Byte, Char};
use rustc_errors::Applicability;
use rustc_hir::{BorrowKind, Expr, ExprKind, PatKind, RangeEnd};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::def_id::DefId;
use rustc_span::{sym, Span};
diff --git a/src/tools/clippy/clippy_lints/src/manual_let_else.rs b/src/tools/clippy/clippy_lints/src/manual_let_else.rs
index 170a040d4..92dc4d57a 100644
--- a/src/tools/clippy/clippy_lints/src/manual_let_else.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_let_else.rs
@@ -5,18 +5,15 @@ use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::higher::IfLetOrMatch;
use clippy_utils::source::snippet_with_context;
use clippy_utils::ty::is_type_diagnostic_item;
-use clippy_utils::visitors::{Descend, Visitable};
-use clippy_utils::{is_lint_allowed, pat_and_expr_can_be_question_mark, peel_blocks};
+use clippy_utils::{is_lint_allowed, is_never_expr, pat_and_expr_can_be_question_mark, peel_blocks};
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_errors::Applicability;
-use rustc_hir::intravisit::{walk_expr, Visitor};
-use rustc_hir::{Expr, ExprKind, HirId, ItemId, Local, MatchSource, Pat, PatKind, QPath, Stmt, StmtKind, Ty};
+use rustc_hir::{Expr, ExprKind, MatchSource, Pat, PatKind, QPath, Stmt, StmtKind};
use rustc_lint::{LateContext, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::declare_tool_lint;
+
use rustc_span::symbol::{sym, Symbol};
use rustc_span::Span;
-use std::ops::ControlFlow;
use std::slice;
declare_clippy_lint! {
@@ -51,7 +48,7 @@ declare_clippy_lint! {
}
impl<'tcx> QuestionMark {
- pub(crate) fn check_manual_let_else(&mut self, cx: &LateContext<'_>, stmt: &'tcx Stmt<'tcx>) {
+ pub(crate) fn check_manual_let_else(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'tcx>) {
if !self.msrv.meets(msrvs::LET_ELSE) || in_external_macro(cx.sess(), stmt.span) {
return;
}
@@ -67,7 +64,7 @@ impl<'tcx> QuestionMark {
IfLetOrMatch::IfLet(if_let_expr, let_pat, if_then, if_else) => {
if let Some(ident_map) = expr_simple_identity_map(local.pat, let_pat, if_then)
&& let Some(if_else) = if_else
- && expr_diverges(cx, if_else)
+ && is_never_expr(cx, if_else).is_some()
&& let qm_allowed = is_lint_allowed(cx, QUESTION_MARK, stmt.hir_id)
&& (qm_allowed || pat_and_expr_can_be_question_mark(cx, let_pat, if_else).is_none())
{
@@ -91,10 +88,9 @@ impl<'tcx> QuestionMark {
return;
}
let check_types = self.matches_behaviour == MatchLintBehaviour::WellKnownTypes;
- let diverging_arm_opt = arms
- .iter()
- .enumerate()
- .find(|(_, arm)| expr_diverges(cx, arm.body) && pat_allowed_for_else(cx, arm.pat, check_types));
+ let diverging_arm_opt = arms.iter().enumerate().find(|(_, arm)| {
+ is_never_expr(cx, arm.body).is_some() && pat_allowed_for_else(cx, arm.pat, check_types)
+ });
let Some((idx, diverging_arm)) = diverging_arm_opt else {
return;
};
@@ -272,104 +268,6 @@ fn replace_in_pattern(
sn_pat.into_owned()
}
-/// Check whether an expression is divergent. May give false negatives.
-fn expr_diverges(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
- struct V<'cx, 'tcx> {
- cx: &'cx LateContext<'tcx>,
- res: ControlFlow<(), Descend>,
- }
- impl<'tcx> Visitor<'tcx> for V<'_, '_> {
- fn visit_expr(&mut self, e: &'tcx Expr<'tcx>) {
- fn is_never(cx: &LateContext<'_>, expr: &'_ Expr<'_>) -> bool {
- if let Some(ty) = cx.typeck_results().expr_ty_opt(expr) {
- return ty.is_never();
- }
- false
- }
-
- if self.res.is_break() {
- return;
- }
-
- // We can't just call is_never on expr and be done, because the type system
- // sometimes coerces the ! type to something different before we can get
- // our hands on it. So instead, we do a manual search. We do fall back to
- // is_never in some places when there is no better alternative.
- self.res = match e.kind {
- ExprKind::Continue(_) | ExprKind::Break(_, _) | ExprKind::Ret(_) => ControlFlow::Break(()),
- ExprKind::Call(call, _) => {
- if is_never(self.cx, e) || is_never(self.cx, call) {
- ControlFlow::Break(())
- } else {
- ControlFlow::Continue(Descend::Yes)
- }
- },
- ExprKind::MethodCall(..) => {
- if is_never(self.cx, e) {
- ControlFlow::Break(())
- } else {
- ControlFlow::Continue(Descend::Yes)
- }
- },
- ExprKind::If(if_expr, if_then, if_else) => {
- let else_diverges = if_else.map_or(false, |ex| expr_diverges(self.cx, ex));
- let diverges =
- expr_diverges(self.cx, if_expr) || (else_diverges && expr_diverges(self.cx, if_then));
- if diverges {
- ControlFlow::Break(())
- } else {
- ControlFlow::Continue(Descend::No)
- }
- },
- ExprKind::Match(match_expr, match_arms, _) => {
- let diverges = expr_diverges(self.cx, match_expr)
- || match_arms.iter().all(|arm| {
- let guard_diverges = arm.guard.as_ref().map_or(false, |g| expr_diverges(self.cx, g.body()));
- guard_diverges || expr_diverges(self.cx, arm.body)
- });
- if diverges {
- ControlFlow::Break(())
- } else {
- ControlFlow::Continue(Descend::No)
- }
- },
-
- // Don't continue into loops or labeled blocks, as they are breakable,
- // and we'd have to start checking labels.
- ExprKind::Block(_, Some(_)) | ExprKind::Loop(..) => ControlFlow::Continue(Descend::No),
-
- // Default: descend
- _ => ControlFlow::Continue(Descend::Yes),
- };
- if let ControlFlow::Continue(Descend::Yes) = self.res {
- walk_expr(self, e);
- }
- }
-
- fn visit_local(&mut self, local: &'tcx Local<'_>) {
- // Don't visit the else block of a let/else statement as it will not make
- // the statement divergent even though the else block is divergent.
- if let Some(init) = local.init {
- self.visit_expr(init);
- }
- }
-
- // Avoid unnecessary `walk_*` calls.
- fn visit_ty(&mut self, _: &'tcx Ty<'tcx>) {}
- fn visit_pat(&mut self, _: &'tcx Pat<'tcx>) {}
- fn visit_qpath(&mut self, _: &'tcx QPath<'tcx>, _: HirId, _: Span) {}
- // Avoid monomorphising all `visit_*` functions.
- fn visit_nested_item(&mut self, _: ItemId) {}
- }
-
- let mut v = V {
- cx,
- res: ControlFlow::Continue(Descend::Yes),
- };
- expr.visit(&mut v);
- v.res.is_break()
-}
-
fn pat_allowed_for_else(cx: &LateContext<'_>, pat: &'_ Pat<'_>, check_types: bool) -> bool {
// Check whether the pattern contains any bindings, as the
// binding might potentially be used in the body.
diff --git a/src/tools/clippy/clippy_lints/src/manual_main_separator_str.rs b/src/tools/clippy/clippy_lints/src/manual_main_separator_str.rs
index 23f47c86f..5732bdda7 100644
--- a/src/tools/clippy/clippy_lints/src/manual_main_separator_str.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_main_separator_str.rs
@@ -6,7 +6,7 @@ use rustc_hir::def::{DefKind, Res};
use rustc_hir::{Expr, ExprKind, Mutability, QPath};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/manual_non_exhaustive.rs b/src/tools/clippy/clippy_lints/src/manual_non_exhaustive.rs
index fc8f23630..d2ac0ad83 100644
--- a/src/tools/clippy/clippy_lints/src/manual_non_exhaustive.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_non_exhaustive.rs
@@ -9,7 +9,7 @@ use rustc_errors::Applicability;
use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res};
use rustc_hir::{self as hir, Expr, ExprKind, QPath};
use rustc_lint::{EarlyContext, EarlyLintPass, LateContext, LateLintPass, LintContext};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::def_id::{DefId, LocalDefId};
use rustc_span::{sym, Span};
@@ -103,7 +103,7 @@ impl EarlyLintPass for ManualNonExhaustiveStruct {
if let ast::ItemKind::Struct(variant_data, _) = &item.kind {
let (fields, delimiter) = match variant_data {
- ast::VariantData::Struct(fields, _) => (&**fields, '{'),
+ ast::VariantData::Struct { fields, .. } => (&**fields, '{'),
ast::VariantData::Tuple(fields, _) => (&**fields, '('),
ast::VariantData::Unit(_) => return,
};
@@ -118,7 +118,6 @@ impl EarlyLintPass for ManualNonExhaustiveStruct {
if let Some(Ok(field)) = iter.next()
&& iter.next().is_none()
&& field.ty.kind.is_unit()
- && field.ident.map_or(true, |name| name.as_str().starts_with('_'))
{
span_lint_and_then(
cx,
@@ -158,7 +157,6 @@ impl<'tcx> LateLintPass<'tcx> for ManualNonExhaustiveEnum {
{
let mut iter = def.variants.iter().filter_map(|v| {
(matches!(v.data, hir::VariantData::Unit(_, _))
- && v.ident.as_str().starts_with('_')
&& is_doc_hidden(cx.tcx.hir().attrs(v.hir_id))
&& !attr::contains_name(cx.tcx.hir().attrs(item.hir_id()), sym::non_exhaustive))
.then_some((v.def_id, v.span))
@@ -173,9 +171,7 @@ impl<'tcx> LateLintPass<'tcx> for ManualNonExhaustiveEnum {
fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
if let ExprKind::Path(QPath::Resolved(None, p)) = &e.kind
- && let [.., name] = p.segments
&& let Res::Def(DefKind::Ctor(CtorOf::Variant, CtorKind::Const), id) = p.res
- && name.ident.as_str().starts_with('_')
{
let variant_id = cx.tcx.parent(id);
let enum_id = cx.tcx.parent(variant_id);
@@ -192,7 +188,7 @@ impl<'tcx> LateLintPass<'tcx> for ManualNonExhaustiveEnum {
.contains(&(enum_id.to_def_id(), variant_id.to_def_id()))
})
{
- let hir_id = cx.tcx.hir().local_def_id_to_hir_id(enum_id);
+ let hir_id = cx.tcx.local_def_id_to_hir_id(enum_id);
span_lint_hir_and_then(
cx,
MANUAL_NON_EXHAUSTIVE,
diff --git a/src/tools/clippy/clippy_lints/src/manual_range_patterns.rs b/src/tools/clippy/clippy_lints/src/manual_range_patterns.rs
index d24bfe182..d585290f7 100644
--- a/src/tools/clippy/clippy_lints/src/manual_range_patterns.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_range_patterns.rs
@@ -6,7 +6,7 @@ use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, PatKind, RangeEnd, UnOp};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{Span, DUMMY_SP};
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs b/src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs
index bc8372fbd..71a83a68d 100644
--- a/src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs
@@ -7,7 +7,7 @@ use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind, Node, TyKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -76,7 +76,7 @@ impl<'tcx> LateLintPass<'tcx> for ManualRemEuclid {
// Also ensures the const is nonzero since zero can't be a divisor
&& const1 == const2 && const2 == const3
&& let Some(hir_id) = path_to_local(expr3)
- && let Some(Node::Pat(_)) = cx.tcx.hir().find(hir_id)
+ && let Some(Node::Pat(_)) = cx.tcx.opt_hir_node(hir_id)
{
// Apply only to params or locals with annotated types
match cx.tcx.hir().find_parent(hir_id) {
diff --git a/src/tools/clippy/clippy_lints/src/manual_retain.rs b/src/tools/clippy/clippy_lints/src/manual_retain.rs
index 2f8682d04..1fe247dac 100644
--- a/src/tools/clippy/clippy_lints/src/manual_retain.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_retain.rs
@@ -9,7 +9,7 @@ use rustc_hir::def_id::DefId;
use rustc_hir::ExprKind::Assign;
use rustc_lint::{LateContext, LateLintPass};
use rustc_semver::RustcVersion;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::symbol::sym;
const ACCEPTABLE_METHODS: [&[&str]; 5] = [
diff --git a/src/tools/clippy/clippy_lints/src/manual_slice_size_calculation.rs b/src/tools/clippy/clippy_lints/src/manual_slice_size_calculation.rs
index 3b97d1659..1de686dbc 100644
--- a/src/tools/clippy/clippy_lints/src/manual_slice_size_calculation.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_slice_size_calculation.rs
@@ -5,7 +5,7 @@ use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::sym;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/manual_string_new.rs b/src/tools/clippy/clippy_lints/src/manual_string_new.rs
index f8afae0e1..781db4b97 100644
--- a/src/tools/clippy/clippy_lints/src/manual_string_new.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_string_new.rs
@@ -4,7 +4,7 @@ use rustc_errors::Applicability::MachineApplicable;
use rustc_hir::{Expr, ExprKind, PathSegment, QPath, TyKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{sym, symbol, Span};
declare_clippy_lint! {
@@ -108,18 +108,16 @@ fn parse_call(cx: &LateContext<'_>, span: Span, func: &Expr<'_>, args: &[Expr<'_
let arg_kind = &args[0].kind;
if let ExprKind::Path(qpath) = &func.kind {
- if let QPath::TypeRelative(_, _) = qpath {
- // String::from(...) or String::try_from(...)
- if let QPath::TypeRelative(ty, path_seg) = qpath
- && [sym::from, sym::try_from].contains(&path_seg.ident.name)
- && let TyKind::Path(qpath) = &ty.kind
- && let QPath::Resolved(_, path) = qpath
- && let [path_seg] = path.segments
- && path_seg.ident.name == sym::String
- && is_expr_kind_empty_str(arg_kind)
- {
- warn_then_suggest(cx, span);
- }
+ // String::from(...) or String::try_from(...)
+ if let QPath::TypeRelative(ty, path_seg) = qpath
+ && [sym::from, sym::try_from].contains(&path_seg.ident.name)
+ && let TyKind::Path(qpath) = &ty.kind
+ && let QPath::Resolved(_, path) = qpath
+ && let [path_seg] = path.segments
+ && path_seg.ident.name == sym::String
+ && is_expr_kind_empty_str(arg_kind)
+ {
+ warn_then_suggest(cx, span);
} else if let QPath::Resolved(_, path) = qpath {
// From::from(...) or TryFrom::try_from(...)
if let [path_seg1, path_seg2] = path.segments
diff --git a/src/tools/clippy/clippy_lints/src/manual_strip.rs b/src/tools/clippy/clippy_lints/src/manual_strip.rs
index 9a9e6af50..7b04fd28b 100644
--- a/src/tools/clippy/clippy_lints/src/manual_strip.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_strip.rs
@@ -4,14 +4,13 @@ use clippy_utils::diagnostics::{multispan_sugg, span_lint_and_then};
use clippy_utils::source::snippet;
use clippy_utils::usage::mutated_variables;
use clippy_utils::{eq_expr_value, higher, match_def_path, paths};
-use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_hir::def::Res;
use rustc_hir::intravisit::{walk_expr, Visitor};
use rustc_hir::{BinOpKind, BorrowKind, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::source_map::Spanned;
use rustc_span::Span;
@@ -71,55 +70,61 @@ impl<'tcx> LateLintPass<'tcx> for ManualStrip {
return;
}
- if_chain! {
- if let Some(higher::If { cond, then, .. }) = higher::If::hir(expr);
- if let ExprKind::MethodCall(_, target_arg, [pattern], _) = cond.kind;
- if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(cond.hir_id);
- if let ExprKind::Path(target_path) = &target_arg.kind;
- then {
- let strip_kind = if match_def_path(cx, method_def_id, &paths::STR_STARTS_WITH) {
- StripKind::Prefix
- } else if match_def_path(cx, method_def_id, &paths::STR_ENDS_WITH) {
- StripKind::Suffix
- } else {
- return;
- };
- let target_res = cx.qpath_res(target_path, target_arg.hir_id);
- if target_res == Res::Err {
- return;
- };
-
- if_chain! {
- if let Res::Local(hir_id) = target_res;
- if let Some(used_mutably) = mutated_variables(then, cx);
- if used_mutably.contains(&hir_id);
- then {
- return;
- }
- }
-
- let strippings = find_stripping(cx, strip_kind, target_res, pattern, then);
- if !strippings.is_empty() {
+ if let Some(higher::If { cond, then, .. }) = higher::If::hir(expr)
+ && let ExprKind::MethodCall(_, target_arg, [pattern], _) = cond.kind
+ && let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(cond.hir_id)
+ && let ExprKind::Path(target_path) = &target_arg.kind
+ {
+ let strip_kind = if match_def_path(cx, method_def_id, &paths::STR_STARTS_WITH) {
+ StripKind::Prefix
+ } else if match_def_path(cx, method_def_id, &paths::STR_ENDS_WITH) {
+ StripKind::Suffix
+ } else {
+ return;
+ };
+ let target_res = cx.qpath_res(target_path, target_arg.hir_id);
+ if target_res == Res::Err {
+ return;
+ };
+
+ if let Res::Local(hir_id) = target_res
+ && let Some(used_mutably) = mutated_variables(then, cx)
+ && used_mutably.contains(&hir_id)
+ {
+ return;
+ }
- let kind_word = match strip_kind {
- StripKind::Prefix => "prefix",
- StripKind::Suffix => "suffix",
- };
+ let strippings = find_stripping(cx, strip_kind, target_res, pattern, then);
+ if !strippings.is_empty() {
+ let kind_word = match strip_kind {
+ StripKind::Prefix => "prefix",
+ StripKind::Suffix => "suffix",
+ };
- let test_span = expr.span.until(then.span);
- span_lint_and_then(cx, MANUAL_STRIP, strippings[0], &format!("stripping a {kind_word} manually"), |diag| {
+ let test_span = expr.span.until(then.span);
+ span_lint_and_then(
+ cx,
+ MANUAL_STRIP,
+ strippings[0],
+ &format!("stripping a {kind_word} manually"),
+ |diag| {
diag.span_note(test_span, format!("the {kind_word} was tested here"));
multispan_sugg(
diag,
&format!("try using the `strip_{kind_word}` method"),
- vec![(test_span,
- format!("if let Some(<stripped>) = {}.strip_{kind_word}({}) ",
- snippet(cx, target_arg.span, ".."),
- snippet(cx, pattern.span, "..")))]
- .into_iter().chain(strippings.into_iter().map(|span| (span, "<stripped>".into()))),
+ vec![(
+ test_span,
+ format!(
+ "if let Some(<stripped>) = {}.strip_{kind_word}({}) ",
+ snippet(cx, target_arg.span, ".."),
+ snippet(cx, pattern.span, "..")
+ ),
+ )]
+ .into_iter()
+ .chain(strippings.into_iter().map(|span| (span, "<stripped>".into()))),
);
- });
- }
+ },
+ );
}
}
}
@@ -129,15 +134,13 @@ impl<'tcx> LateLintPass<'tcx> for ManualStrip {
// Returns `Some(arg)` if `expr` matches `arg.len()` and `None` otherwise.
fn len_arg<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> Option<&'tcx Expr<'tcx>> {
- if_chain! {
- if let ExprKind::MethodCall(_, arg, [], _) = expr.kind;
- if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
- if match_def_path(cx, method_def_id, &paths::STR_LEN);
- then {
- Some(arg)
- } else {
- None
- }
+ if let ExprKind::MethodCall(_, arg, [], _) = expr.kind
+ && let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
+ && match_def_path(cx, method_def_id, &paths::STR_LEN)
+ {
+ Some(arg)
+ } else {
+ None
}
}
@@ -201,36 +204,38 @@ fn find_stripping<'tcx>(
impl<'a, 'tcx> Visitor<'tcx> for StrippingFinder<'a, 'tcx> {
fn visit_expr(&mut self, ex: &'tcx Expr<'_>) {
- if_chain! {
- if is_ref_str(self.cx, ex);
- let unref = peel_ref(ex);
- if let ExprKind::Index(indexed, index, _) = &unref.kind;
- if let Some(higher::Range { start, end, .. }) = higher::Range::hir(index);
- if let ExprKind::Path(path) = &indexed.kind;
- if self.cx.qpath_res(path, ex.hir_id) == self.target;
- then {
- match (self.strip_kind, start, end) {
- (StripKind::Prefix, Some(start), None) => {
- if eq_pattern_length(self.cx, self.pattern, start) {
- self.results.push(ex.span);
- return;
- }
- },
- (StripKind::Suffix, None, Some(end)) => {
- if_chain! {
- if let ExprKind::Binary(Spanned { node: BinOpKind::Sub, .. }, left, right) = end.kind;
- if let Some(left_arg) = len_arg(self.cx, left);
- if let ExprKind::Path(left_path) = &left_arg.kind;
- if self.cx.qpath_res(left_path, left_arg.hir_id) == self.target;
- if eq_pattern_length(self.cx, self.pattern, right);
- then {
- self.results.push(ex.span);
- return;
- }
- }
- },
- _ => {}
- }
+ if is_ref_str(self.cx, ex)
+ && let unref = peel_ref(ex)
+ && let ExprKind::Index(indexed, index, _) = &unref.kind
+ && let Some(higher::Range { start, end, .. }) = higher::Range::hir(index)
+ && let ExprKind::Path(path) = &indexed.kind
+ && self.cx.qpath_res(path, ex.hir_id) == self.target
+ {
+ match (self.strip_kind, start, end) {
+ (StripKind::Prefix, Some(start), None) => {
+ if eq_pattern_length(self.cx, self.pattern, start) {
+ self.results.push(ex.span);
+ return;
+ }
+ },
+ (StripKind::Suffix, None, Some(end)) => {
+ if let ExprKind::Binary(
+ Spanned {
+ node: BinOpKind::Sub, ..
+ },
+ left,
+ right,
+ ) = end.kind
+ && let Some(left_arg) = len_arg(self.cx, left)
+ && let ExprKind::Path(left_path) = &left_arg.kind
+ && self.cx.qpath_res(left_path, left_arg.hir_id) == self.target
+ && eq_pattern_length(self.cx, self.pattern, right)
+ {
+ self.results.push(ex.span);
+ return;
+ }
+ },
+ _ => {},
}
}
diff --git a/src/tools/clippy/clippy_lints/src/map_unit_fn.rs b/src/tools/clippy/clippy_lints/src/map_unit_fn.rs
index 817d072b9..3b82c50a8 100644
--- a/src/tools/clippy/clippy_lints/src/map_unit_fn.rs
+++ b/src/tools/clippy/clippy_lints/src/map_unit_fn.rs
@@ -2,12 +2,11 @@ use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::source::{snippet, snippet_with_applicability, snippet_with_context};
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::{iter_input_pats, method_chain_args};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::{self, Ty};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{sym, Span};
declare_clippy_lint! {
@@ -163,16 +162,14 @@ fn unit_closure<'tcx>(
cx: &LateContext<'tcx>,
expr: &hir::Expr<'_>,
) -> Option<(&'tcx hir::Param<'tcx>, &'tcx hir::Expr<'tcx>)> {
- if_chain! {
- if let hir::ExprKind::Closure(&hir::Closure { fn_decl, body, .. }) = expr.kind;
- let body = cx.tcx.hir().body(body);
- let body_expr = &body.value;
- if fn_decl.inputs.len() == 1;
- if is_unit_expression(cx, body_expr);
- if let Some(binding) = iter_input_pats(fn_decl, body).next();
- then {
- return Some((binding, body_expr));
- }
+ if let hir::ExprKind::Closure(&hir::Closure { fn_decl, body, .. }) = expr.kind
+ && let body = cx.tcx.hir().body(body)
+ && let body_expr = &body.value
+ && fn_decl.inputs.len() == 1
+ && is_unit_expression(cx, body_expr)
+ && let Some(binding) = iter_input_pats(fn_decl, body).next()
+ {
+ return Some((binding, body_expr));
}
None
}
diff --git a/src/tools/clippy/clippy_lints/src/match_result_ok.rs b/src/tools/clippy/clippy_lints/src/match_result_ok.rs
index 841c020f2..62cedc884 100644
--- a/src/tools/clippy/clippy_lints/src/match_result_ok.rs
+++ b/src/tools/clippy/clippy_lints/src/match_result_ok.rs
@@ -2,11 +2,10 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_context;
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::{higher, is_res_lang_ctor};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, LangItem, PatKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
@@ -56,33 +55,31 @@ impl<'tcx> LateLintPass<'tcx> for MatchResultOk {
return;
};
- if_chain! {
- if let ExprKind::MethodCall(ok_path, recv, [], ..) = let_expr.kind; //check is expr.ok() has type Result<T,E>.ok(, _)
- if let PatKind::TupleStruct(ref pat_path, [ok_pat], _) = let_pat.kind; //get operation
- if ok_path.ident.as_str() == "ok";
- if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(recv), sym::Result);
- if is_res_lang_ctor(cx, cx.qpath_res(pat_path, let_pat.hir_id), LangItem::OptionSome);
- let ctxt = expr.span.ctxt();
- if let_expr.span.ctxt() == ctxt;
- if let_pat.span.ctxt() == ctxt;
- then {
- let mut applicability = Applicability::MachineApplicable;
- let some_expr_string = snippet_with_context(cx, ok_pat.span, ctxt, "", &mut applicability).0;
- let trimmed_ok = snippet_with_context(cx, recv.span, ctxt, "", &mut applicability).0;
- let sugg = format!(
- "{ifwhile} let Ok({some_expr_string}) = {}",
- trimmed_ok.trim().trim_end_matches('.'),
- );
- span_lint_and_sugg(
- cx,
- MATCH_RESULT_OK,
- expr.span.with_hi(let_expr.span.hi()),
- "matching on `Some` with `ok()` is redundant",
- &format!("consider matching on `Ok({some_expr_string})` and removing the call to `ok` instead"),
- sugg,
- applicability,
- );
- }
+ if let ExprKind::MethodCall(ok_path, recv, [], ..) = let_expr.kind //check is expr.ok() has type Result<T,E>.ok(, _)
+ && let PatKind::TupleStruct(ref pat_path, [ok_pat], _) = let_pat.kind //get operation
+ && ok_path.ident.as_str() == "ok"
+ && is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(recv), sym::Result)
+ && is_res_lang_ctor(cx, cx.qpath_res(pat_path, let_pat.hir_id), LangItem::OptionSome)
+ && let ctxt = expr.span.ctxt()
+ && let_expr.span.ctxt() == ctxt
+ && let_pat.span.ctxt() == ctxt
+ {
+ let mut applicability = Applicability::MachineApplicable;
+ let some_expr_string = snippet_with_context(cx, ok_pat.span, ctxt, "", &mut applicability).0;
+ let trimmed_ok = snippet_with_context(cx, recv.span, ctxt, "", &mut applicability).0;
+ let sugg = format!(
+ "{ifwhile} let Ok({some_expr_string}) = {}",
+ trimmed_ok.trim().trim_end_matches('.'),
+ );
+ span_lint_and_sugg(
+ cx,
+ MATCH_RESULT_OK,
+ expr.span.with_hi(let_expr.span.hi()),
+ "matching on `Some` with `ok()` is redundant",
+ &format!("consider matching on `Ok({some_expr_string})` and removing the call to `ok` instead"),
+ sugg,
+ applicability,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/matches/collapsible_match.rs b/src/tools/clippy/clippy_lints/src/matches/collapsible_match.rs
index 29b935fb6..48fc5746b 100644
--- a/src/tools/clippy/clippy_lints/src/matches/collapsible_match.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/collapsible_match.rs
@@ -5,7 +5,6 @@ use clippy_utils::visitors::is_local_used;
use clippy_utils::{
is_res_lang_ctor, is_unit_expr, path_to_local, peel_blocks_with_stmt, peel_ref_operators, SpanlessEq,
};
-use if_chain::if_chain;
use rustc_errors::MultiSpan;
use rustc_hir::LangItem::OptionNone;
use rustc_hir::{Arm, Expr, Guard, HirId, Let, Pat, PatKind};
@@ -40,76 +39,73 @@ fn check_arm<'tcx>(
outer_else_body: Option<&'tcx Expr<'tcx>>,
) {
let inner_expr = peel_blocks_with_stmt(outer_then_body);
- if_chain! {
- if let Some(inner) = IfLetOrMatch::parse(cx, inner_expr);
- if let Some((inner_scrutinee, inner_then_pat, inner_else_body)) = match inner {
+ if let Some(inner) = IfLetOrMatch::parse(cx, inner_expr)
+ && let Some((inner_scrutinee, inner_then_pat, inner_else_body)) = match inner {
IfLetOrMatch::IfLet(scrutinee, pat, _, els) => Some((scrutinee, pat, els)),
- IfLetOrMatch::Match(scrutinee, arms, ..) => if_chain! {
+ IfLetOrMatch::Match(scrutinee, arms, ..) => if arms.len() == 2 && arms.iter().all(|a| a.guard.is_none())
// if there are more than two arms, collapsing would be non-trivial
- if arms.len() == 2 && arms.iter().all(|a| a.guard.is_none());
// one of the arms must be "wild-like"
- if let Some(wild_idx) = arms.iter().rposition(|a| arm_is_wild_like(cx, a));
- then {
- let (then, els) = (&arms[1 - wild_idx], &arms[wild_idx]);
- Some((scrutinee, then.pat, Some(els.body)))
- } else {
- None
- }
+ && let Some(wild_idx) = arms.iter().rposition(|a| arm_is_wild_like(cx, a))
+ {
+ let (then, els) = (&arms[1 - wild_idx], &arms[wild_idx]);
+ Some((scrutinee, then.pat, Some(els.body)))
+ } else {
+ None
},
- };
- if outer_pat.span.eq_ctxt(inner_scrutinee.span);
+ }
+ && outer_pat.span.eq_ctxt(inner_scrutinee.span)
// match expression must be a local binding
// match <local> { .. }
- if let Some(binding_id) = path_to_local(peel_ref_operators(cx, inner_scrutinee));
- if !pat_contains_or(inner_then_pat);
+ && let Some(binding_id) = path_to_local(peel_ref_operators(cx, inner_scrutinee))
+ && !pat_contains_or(inner_then_pat)
// the binding must come from the pattern of the containing match arm
// ..<local>.. => match <local> { .. }
- if let (Some(binding_span), is_innermost_parent_pat_struct)
- = find_pat_binding_and_is_innermost_parent_pat_struct(outer_pat, binding_id);
+ && let (Some(binding_span), is_innermost_parent_pat_struct)
+ = find_pat_binding_and_is_innermost_parent_pat_struct(outer_pat, binding_id)
// the "else" branches must be equal
- if match (outer_else_body, inner_else_body) {
+ && match (outer_else_body, inner_else_body) {
(None, None) => true,
(None, Some(e)) | (Some(e), None) => is_unit_expr(e),
(Some(a), Some(b)) => SpanlessEq::new(cx).eq_expr(a, b),
- };
+ }
// the binding must not be used in the if guard
- if outer_guard.map_or(
+ && outer_guard.map_or(
true,
|(Guard::If(e) | Guard::IfLet(Let { init: e, .. }))| !is_local_used(cx, *e, binding_id)
- );
+ )
// ...or anywhere in the inner expression
- if match inner {
+ && match inner {
IfLetOrMatch::IfLet(_, _, body, els) => {
!is_local_used(cx, body, binding_id) && els.map_or(true, |e| !is_local_used(cx, e, binding_id))
},
IfLetOrMatch::Match(_, arms, ..) => !arms.iter().any(|arm| is_local_used(cx, arm, binding_id)),
- };
- then {
- let msg = format!(
- "this `{}` can be collapsed into the outer `{}`",
- if matches!(inner, IfLetOrMatch::Match(..)) { "match" } else { "if let" },
- if outer_is_match { "match" } else { "if let" },
- );
- // collapsing patterns need an explicit field name in struct pattern matching
- // ex: Struct {x: Some(1)}
- let replace_msg = if is_innermost_parent_pat_struct {
- format!(", prefixed by {}:", snippet(cx, binding_span, "their field name"))
+ }
+ {
+ let msg = format!(
+ "this `{}` can be collapsed into the outer `{}`",
+ if matches!(inner, IfLetOrMatch::Match(..)) {
+ "match"
} else {
- String::new()
- };
- span_lint_and_then(
- cx,
- COLLAPSIBLE_MATCH,
- inner_expr.span,
- &msg,
- |diag| {
- let mut help_span = MultiSpan::from_spans(vec![binding_span, inner_then_pat.span]);
- help_span.push_span_label(binding_span, "replace this binding");
- help_span.push_span_label(inner_then_pat.span, format!("with this pattern{replace_msg}"));
- diag.span_help(help_span, "the outer pattern can be modified to include the inner pattern");
- },
+ "if let"
+ },
+ if outer_is_match { "match" } else { "if let" },
+ );
+ // collapsing patterns need an explicit field name in struct pattern matching
+ // ex: Struct {x: Some(1)}
+ let replace_msg = if is_innermost_parent_pat_struct {
+ format!(", prefixed by {}:", snippet(cx, binding_span, "their field name"))
+ } else {
+ String::new()
+ };
+ span_lint_and_then(cx, COLLAPSIBLE_MATCH, inner_expr.span, &msg, |diag| {
+ let mut help_span = MultiSpan::from_spans(vec![binding_span, inner_then_pat.span]);
+ help_span.push_span_label(binding_span, "replace this binding");
+ help_span.push_span_label(inner_then_pat.span, format!("with this pattern{replace_msg}"));
+ diag.span_help(
+ help_span,
+ "the outer pattern can be modified to include the inner pattern",
);
- }
+ });
}
}
diff --git a/src/tools/clippy/clippy_lints/src/matches/infallible_destructuring_match.rs b/src/tools/clippy/clippy_lints/src/matches/infallible_destructuring_match.rs
index 3329f93b7..c8a48246e 100644
--- a/src/tools/clippy/clippy_lints/src/matches/infallible_destructuring_match.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/infallible_destructuring_match.rs
@@ -8,38 +8,35 @@ use rustc_lint::LateContext;
use super::INFALLIBLE_DESTRUCTURING_MATCH;
pub(crate) fn check(cx: &LateContext<'_>, local: &Local<'_>) -> bool {
- if_chain! {
- if !local.span.from_expansion();
- if let Some(expr) = local.init;
- if let ExprKind::Match(target, arms, MatchSource::Normal) = expr.kind;
- if arms.len() == 1 && arms[0].guard.is_none();
- if let PatKind::TupleStruct(
- QPath::Resolved(None, variant_name), args, _) = arms[0].pat.kind;
- if args.len() == 1;
- if let PatKind::Binding(binding, arg, ..) = strip_pat_refs(&args[0]).kind;
- let body = peel_blocks(arms[0].body);
- if path_to_local_id(body, arg);
-
- then {
- let mut applicability = Applicability::MachineApplicable;
- span_lint_and_sugg(
- cx,
- INFALLIBLE_DESTRUCTURING_MATCH,
- local.span,
- "you seem to be trying to use `match` to destructure a single infallible pattern. \
- Consider using `let`",
- "try",
- format!(
- "let {}({}{}) = {};",
- snippet_with_applicability(cx, variant_name.span, "..", &mut applicability),
- if binding.0 == ByRef::Yes { "ref " } else { "" },
- snippet_with_applicability(cx, local.pat.span, "..", &mut applicability),
- snippet_with_applicability(cx, target.span, "..", &mut applicability),
- ),
- applicability,
- );
- return true;
- }
+ if !local.span.from_expansion()
+ && let Some(expr) = local.init
+ && let ExprKind::Match(target, arms, MatchSource::Normal) = expr.kind
+ && arms.len() == 1
+ && arms[0].guard.is_none()
+ && let PatKind::TupleStruct(QPath::Resolved(None, variant_name), args, _) = arms[0].pat.kind
+ && args.len() == 1
+ && let PatKind::Binding(binding, arg, ..) = strip_pat_refs(&args[0]).kind
+ && let body = peel_blocks(arms[0].body)
+ && path_to_local_id(body, arg)
+ {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ INFALLIBLE_DESTRUCTURING_MATCH,
+ local.span,
+ "you seem to be trying to use `match` to destructure a single infallible pattern. \
+ Consider using `let`",
+ "try",
+ format!(
+ "let {}({}{}) = {};",
+ snippet_with_applicability(cx, variant_name.span, "..", &mut applicability),
+ if binding.0 == ByRef::Yes { "ref " } else { "" },
+ snippet_with_applicability(cx, local.pat.span, "..", &mut applicability),
+ snippet_with_applicability(cx, target.span, "..", &mut applicability),
+ ),
+ applicability,
+ );
+ return true;
}
false
}
diff --git a/src/tools/clippy/clippy_lints/src/matches/manual_filter.rs b/src/tools/clippy/clippy_lints/src/matches/manual_filter.rs
index cdb51c33a..619ec8312 100644
--- a/src/tools/clippy/clippy_lints/src/matches/manual_filter.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/manual_filter.rs
@@ -21,19 +21,19 @@ fn get_cond_expr<'tcx>(
expr: &'tcx Expr<'_>,
ctxt: SyntaxContext,
) -> Option<SomeExpr<'tcx>> {
- if_chain! {
- if let Some(block_expr) = peels_blocks_incl_unsafe_opt(expr);
- if let ExprKind::If(cond, then_expr, Some(else_expr)) = block_expr.kind;
- if let PatKind::Binding(_,target, ..) = pat.kind;
- if is_some_expr(cx, target, ctxt, then_expr) && is_none_expr(cx, else_expr)
- || is_none_expr(cx, then_expr) && is_some_expr(cx, target, ctxt, else_expr); // check that one expr resolves to `Some(x)`, the other to `None`
- then {
- return Some(SomeExpr {
- expr: peels_blocks_incl_unsafe(cond.peel_drop_temps()),
- needs_unsafe_block: contains_unsafe_block(cx, expr),
- needs_negated: is_none_expr(cx, then_expr) // if the `then_expr` resolves to `None`, need to negate the cond
- })
- }
+ if let Some(block_expr) = peels_blocks_incl_unsafe_opt(expr)
+ && let ExprKind::If(cond, then_expr, Some(else_expr)) = block_expr.kind
+ && let PatKind::Binding(_, target, ..) = pat.kind
+ && (is_some_expr(cx, target, ctxt, then_expr) && is_none_expr(cx, else_expr)
+ || is_none_expr(cx, then_expr) && is_some_expr(cx, target, ctxt, else_expr))
+ // check that one expr resolves to `Some(x)`, the other to `None`
+ {
+ return Some(SomeExpr {
+ expr: peels_blocks_incl_unsafe(cond.peel_drop_temps()),
+ needs_unsafe_block: contains_unsafe_block(cx, expr),
+ needs_negated: is_none_expr(cx, then_expr), /* if the `then_expr` resolves to `None`, need to negate the
+ * cond */
+ });
};
None
}
diff --git a/src/tools/clippy/clippy_lints/src/matches/manual_unwrap_or.rs b/src/tools/clippy/clippy_lints/src/matches/manual_unwrap_or.rs
index b94501bf0..3e79cabd7 100644
--- a/src/tools/clippy/clippy_lints/src/matches/manual_unwrap_or.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/manual_unwrap_or.rs
@@ -4,7 +4,6 @@ use clippy_utils::source::{indent_of, reindent_multiline, snippet_opt};
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::usage::contains_return_break_continue_macro;
use clippy_utils::{is_res_lang_ctor, path_to_local_id, sugg};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::LangItem::{OptionNone, ResultErr};
@@ -16,65 +15,57 @@ use super::MANUAL_UNWRAP_OR;
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>, scrutinee: &'tcx Expr<'_>, arms: &'tcx [Arm<'_>]) {
let ty = cx.typeck_results().expr_ty(scrutinee);
- if_chain! {
- if let Some(ty_name) = if is_type_diagnostic_item(cx, ty, sym::Option) {
- Some("Option")
- } else if is_type_diagnostic_item(cx, ty, sym::Result) {
- Some("Result")
- } else {
- None
- };
- if let Some(or_arm) = applicable_or_arm(cx, arms);
- if let Some(or_body_snippet) = snippet_opt(cx, or_arm.body.span);
- if let Some(indent) = indent_of(cx, expr.span);
- if constant_simple(cx, cx.typeck_results(), or_arm.body).is_some();
- then {
- let reindented_or_body =
- reindent_multiline(or_body_snippet.into(), true, Some(indent));
+ if let Some(ty_name) = if is_type_diagnostic_item(cx, ty, sym::Option) {
+ Some("Option")
+ } else if is_type_diagnostic_item(cx, ty, sym::Result) {
+ Some("Result")
+ } else {
+ None
+ } && let Some(or_arm) = applicable_or_arm(cx, arms)
+ && let Some(or_body_snippet) = snippet_opt(cx, or_arm.body.span)
+ && let Some(indent) = indent_of(cx, expr.span)
+ && constant_simple(cx, cx.typeck_results(), or_arm.body).is_some()
+ {
+ let reindented_or_body = reindent_multiline(or_body_snippet.into(), true, Some(indent));
- let mut app = Applicability::MachineApplicable;
- let suggestion = sugg::Sugg::hir_with_context(cx, scrutinee, expr.span.ctxt(), "..", &mut app).maybe_par();
- span_lint_and_sugg(
- cx,
- MANUAL_UNWRAP_OR, expr.span,
- &format!("this pattern reimplements `{ty_name}::unwrap_or`"),
- "replace with",
- format!(
- "{suggestion}.unwrap_or({reindented_or_body})",
- ),
- app,
- );
- }
+ let mut app = Applicability::MachineApplicable;
+ let suggestion = sugg::Sugg::hir_with_context(cx, scrutinee, expr.span.ctxt(), "..", &mut app).maybe_par();
+ span_lint_and_sugg(
+ cx,
+ MANUAL_UNWRAP_OR,
+ expr.span,
+ &format!("this pattern reimplements `{ty_name}::unwrap_or`"),
+ "replace with",
+ format!("{suggestion}.unwrap_or({reindented_or_body})",),
+ app,
+ );
}
}
fn applicable_or_arm<'a>(cx: &LateContext<'_>, arms: &'a [Arm<'a>]) -> Option<&'a Arm<'a>> {
- if_chain! {
- if arms.len() == 2;
- if arms.iter().all(|arm| arm.guard.is_none());
- if let Some((idx, or_arm)) = arms.iter().enumerate().find(|(_, arm)| {
- match arm.pat.kind {
- PatKind::Path(ref qpath) => is_res_lang_ctor(cx, cx.qpath_res(qpath, arm.pat.hir_id), OptionNone),
- PatKind::TupleStruct(ref qpath, [pat], _) =>
- matches!(pat.kind, PatKind::Wild)
- && is_res_lang_ctor(cx, cx.qpath_res(qpath, arm.pat.hir_id), ResultErr),
- _ => false,
- }
- });
- let unwrap_arm = &arms[1 - idx];
- if let PatKind::TupleStruct(ref qpath, [unwrap_pat], _) = unwrap_arm.pat.kind;
- if let Res::Def(DefKind::Ctor(..), ctor_id) = cx.qpath_res(qpath, unwrap_arm.pat.hir_id);
- if let Some(variant_id) = cx.tcx.opt_parent(ctor_id);
- if cx.tcx.lang_items().option_some_variant() == Some(variant_id)
- || cx.tcx.lang_items().result_ok_variant() == Some(variant_id);
- if let PatKind::Binding(_, binding_hir_id, ..) = unwrap_pat.kind;
- if path_to_local_id(unwrap_arm.body, binding_hir_id);
- if cx.typeck_results().expr_adjustments(unwrap_arm.body).is_empty();
- if !contains_return_break_continue_macro(or_arm.body);
- then {
- Some(or_arm)
- } else {
- None
- }
+ if arms.len() == 2
+ && arms.iter().all(|arm| arm.guard.is_none())
+ && let Some((idx, or_arm)) = arms.iter().enumerate().find(|(_, arm)| match arm.pat.kind {
+ PatKind::Path(ref qpath) => is_res_lang_ctor(cx, cx.qpath_res(qpath, arm.pat.hir_id), OptionNone),
+ PatKind::TupleStruct(ref qpath, [pat], _) => {
+ matches!(pat.kind, PatKind::Wild)
+ && is_res_lang_ctor(cx, cx.qpath_res(qpath, arm.pat.hir_id), ResultErr)
+ },
+ _ => false,
+ })
+ && let unwrap_arm = &arms[1 - idx]
+ && let PatKind::TupleStruct(ref qpath, [unwrap_pat], _) = unwrap_arm.pat.kind
+ && let Res::Def(DefKind::Ctor(..), ctor_id) = cx.qpath_res(qpath, unwrap_arm.pat.hir_id)
+ && let Some(variant_id) = cx.tcx.opt_parent(ctor_id)
+ && (cx.tcx.lang_items().option_some_variant() == Some(variant_id)
+ || cx.tcx.lang_items().result_ok_variant() == Some(variant_id))
+ && let PatKind::Binding(_, binding_hir_id, ..) = unwrap_pat.kind
+ && path_to_local_id(unwrap_arm.body, binding_hir_id)
+ && cx.typeck_results().expr_adjustments(unwrap_arm.body).is_empty()
+ && !contains_return_break_continue_macro(or_arm.body)
+ {
+ Some(or_arm)
+ } else {
+ None
}
}
diff --git a/src/tools/clippy/clippy_lints/src/matches/manual_utils.rs b/src/tools/clippy/clippy_lints/src/matches/manual_utils.rs
index 781ee138c..0627e458d 100644
--- a/src/tools/clippy/clippy_lints/src/matches/manual_utils.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/manual_utils.rs
@@ -127,32 +127,30 @@ where
let closure_expr_snip = some_expr.to_snippet_with_context(cx, expr_ctxt, &mut app);
let body_str = if let PatKind::Binding(annotation, id, some_binding, None) = some_pat.kind {
- if_chain! {
- if !some_expr.needs_unsafe_block;
- if let Some(func) = can_pass_as_func(cx, id, some_expr.expr);
- if func.span.eq_ctxt(some_expr.expr.span);
- then {
- snippet_with_applicability(cx, func.span, "..", &mut app).into_owned()
- } else {
- if path_to_local_id(some_expr.expr, id)
- && !is_lint_allowed(cx, MATCH_AS_REF, expr.hir_id)
- && binding_ref.is_some()
- {
- return None;
- }
+ if !some_expr.needs_unsafe_block
+ && let Some(func) = can_pass_as_func(cx, id, some_expr.expr)
+ && func.span.eq_ctxt(some_expr.expr.span)
+ {
+ snippet_with_applicability(cx, func.span, "..", &mut app).into_owned()
+ } else {
+ if path_to_local_id(some_expr.expr, id)
+ && !is_lint_allowed(cx, MATCH_AS_REF, expr.hir_id)
+ && binding_ref.is_some()
+ {
+ return None;
+ }
- // `ref` and `ref mut` annotations were handled earlier.
- let annotation = if matches!(annotation, BindingAnnotation::MUT) {
- "mut "
- } else {
- ""
- };
+ // `ref` and `ref mut` annotations were handled earlier.
+ let annotation = if matches!(annotation, BindingAnnotation::MUT) {
+ "mut "
+ } else {
+ ""
+ };
- if some_expr.needs_unsafe_block {
- format!("|{annotation}{some_binding}| unsafe {{ {closure_expr_snip} }}")
- } else {
- format!("|{annotation}{some_binding}| {closure_expr_snip}")
- }
+ if some_expr.needs_unsafe_block {
+ format!("|{annotation}{some_binding}| unsafe {{ {closure_expr_snip} }}")
+ } else {
+ format!("|{annotation}{some_binding}| {closure_expr_snip}")
}
}
} else if !is_wild_none && explicit_ref.is_none() {
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_as_ref.rs b/src/tools/clippy/clippy_lints/src/matches/match_as_ref.rs
index d51cca040..3f737da92 100644
--- a/src/tools/clippy/clippy_lints/src/matches/match_as_ref.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/match_as_ref.rs
@@ -26,18 +26,16 @@ pub(crate) fn check(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>], expr:
let output_ty = cx.typeck_results().expr_ty(expr);
let input_ty = cx.typeck_results().expr_ty(ex);
- let cast = if_chain! {
- if let ty::Adt(_, args) = input_ty.kind();
- let input_ty = args.type_at(0);
- if let ty::Adt(_, args) = output_ty.kind();
- let output_ty = args.type_at(0);
- if let ty::Ref(_, output_ty, _) = *output_ty.kind();
- if input_ty != output_ty;
- then {
- ".map(|x| x as _)"
- } else {
- ""
- }
+ let cast = if let ty::Adt(_, args) = input_ty.kind()
+ && let input_ty = args.type_at(0)
+ && let ty::Adt(_, args) = output_ty.kind()
+ && let output_ty = args.type_at(0)
+ && let ty::Ref(_, output_ty, _) = *output_ty.kind()
+ && input_ty != output_ty
+ {
+ ".map(|x| x as _)"
+ } else {
+ ""
};
let mut applicability = Applicability::MachineApplicable;
@@ -67,17 +65,16 @@ fn is_none_arm(cx: &LateContext<'_>, arm: &Arm<'_>) -> bool {
// Checks if arm has the form `Some(ref v) => Some(v)` (checks for `ref` and `ref mut`)
fn is_ref_some_arm(cx: &LateContext<'_>, arm: &Arm<'_>) -> Option<Mutability> {
- if_chain! {
- if let PatKind::TupleStruct(ref qpath, [first_pat, ..], _) = arm.pat.kind;
- if is_res_lang_ctor(cx, cx.qpath_res(qpath, arm.pat.hir_id), LangItem::OptionSome);
- if let PatKind::Binding(BindingAnnotation(ByRef::Yes, mutabl), .., ident, _) = first_pat.kind;
- if let ExprKind::Call(e, [arg]) = peel_blocks(arm.body).kind;
- if is_res_lang_ctor(cx, path_res(cx, e), LangItem::OptionSome);
- if let ExprKind::Path(QPath::Resolved(_, path2)) = arg.kind;
- if path2.segments.len() == 1 && ident.name == path2.segments[0].ident.name;
- then {
- return Some(mutabl)
- }
+ if let PatKind::TupleStruct(ref qpath, [first_pat, ..], _) = arm.pat.kind
+ && is_res_lang_ctor(cx, cx.qpath_res(qpath, arm.pat.hir_id), LangItem::OptionSome)
+ && let PatKind::Binding(BindingAnnotation(ByRef::Yes, mutabl), .., ident, _) = first_pat.kind
+ && let ExprKind::Call(e, [arg]) = peel_blocks(arm.body).kind
+ && is_res_lang_ctor(cx, path_res(cx, e), LangItem::OptionSome)
+ && let ExprKind::Path(QPath::Resolved(_, path2)) = arg.kind
+ && path2.segments.len() == 1
+ && ident.name == path2.segments[0].ident.name
+ {
+ return Some(mutabl);
}
None
}
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_like_matches.rs b/src/tools/clippy/clippy_lints/src/matches/match_like_matches.rs
index e2ddf11ab..56123326f 100644
--- a/src/tools/clippy/clippy_lints/src/matches/match_like_matches.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/match_like_matches.rs
@@ -76,79 +76,80 @@ where
),
>,
{
- if_chain! {
- if !span_contains_comment(cx.sess().source_map(), expr.span);
- if iter.len() >= 2;
- if cx.typeck_results().expr_ty(expr).is_bool();
- if let Some((_, last_pat_opt, last_expr, _)) = iter.next_back();
- let iter_without_last = iter.clone();
- if let Some((first_attrs, _, first_expr, first_guard)) = iter.next();
- if let Some(b0) = find_bool_lit(&first_expr.kind);
- if let Some(b1) = find_bool_lit(&last_expr.kind);
- if b0 != b1;
- if first_guard.is_none() || iter.len() == 0;
- if first_attrs.is_empty();
- if iter
- .all(|arm| {
- find_bool_lit(&arm.2.kind).map_or(false, |b| b == b0) && arm.3.is_none() && arm.0.is_empty()
- });
- then {
- if let Some(last_pat) = last_pat_opt {
- if !is_wild(last_pat) {
- return false;
- }
+ if !span_contains_comment(cx.sess().source_map(), expr.span)
+ && iter.len() >= 2
+ && cx.typeck_results().expr_ty(expr).is_bool()
+ && let Some((_, last_pat_opt, last_expr, _)) = iter.next_back()
+ && let iter_without_last = iter.clone()
+ && let Some((first_attrs, _, first_expr, first_guard)) = iter.next()
+ && let Some(b0) = find_bool_lit(&first_expr.kind)
+ && let Some(b1) = find_bool_lit(&last_expr.kind)
+ && b0 != b1
+ && (first_guard.is_none() || iter.len() == 0)
+ && first_attrs.is_empty()
+ && iter.all(|arm| find_bool_lit(&arm.2.kind).map_or(false, |b| b == b0) && arm.3.is_none() && arm.0.is_empty())
+ {
+ if let Some(last_pat) = last_pat_opt {
+ if !is_wild(last_pat) {
+ return false;
}
+ }
- for arm in iter_without_last.clone() {
- if let Some(pat) = arm.1 {
- if !is_lint_allowed(cx, REDUNDANT_PATTERN_MATCHING, pat.hir_id) && is_some(pat.kind) {
- return false;
- }
+ for arm in iter_without_last.clone() {
+ if let Some(pat) = arm.1 {
+ if !is_lint_allowed(cx, REDUNDANT_PATTERN_MATCHING, pat.hir_id) && is_some(pat.kind) {
+ return false;
}
}
+ }
- // The suggestion may be incorrect, because some arms can have `cfg` attributes
- // evaluated into `false` and so such arms will be stripped before.
- let mut applicability = Applicability::MaybeIncorrect;
- let pat = {
- use itertools::Itertools as _;
- iter_without_last
- .filter_map(|arm| {
- let pat_span = arm.1?.span;
- Some(snippet_with_applicability(cx, pat_span, "..", &mut applicability))
- })
- .join(" | ")
- };
- let pat_and_guard = if let Some(Guard::If(g)) = first_guard {
- format!("{pat} if {}", snippet_with_applicability(cx, g.span, "..", &mut applicability))
- } else {
- pat
- };
-
- // strip potential borrows (#6503), but only if the type is a reference
- let mut ex_new = ex;
- if let ExprKind::AddrOf(BorrowKind::Ref, .., ex_inner) = ex.kind {
- if let ty::Ref(..) = cx.typeck_results().expr_ty(ex_inner).kind() {
- ex_new = ex_inner;
- }
- };
- span_lint_and_sugg(
- cx,
- MATCH_LIKE_MATCHES_MACRO,
- expr.span,
- &format!("{} expression looks like `matches!` macro", if is_if_let { "if let .. else" } else { "match" }),
- "try",
- format!(
- "{}matches!({}, {pat_and_guard})",
- if b0 { "" } else { "!" },
- snippet_with_applicability(cx, ex_new.span, "..", &mut applicability),
- ),
- applicability,
- );
- true
+ // The suggestion may be incorrect, because some arms can have `cfg` attributes
+ // evaluated into `false` and so such arms will be stripped before.
+ let mut applicability = Applicability::MaybeIncorrect;
+ let pat = {
+ use itertools::Itertools as _;
+ iter_without_last
+ .filter_map(|arm| {
+ let pat_span = arm.1?.span;
+ Some(snippet_with_applicability(cx, pat_span, "..", &mut applicability))
+ })
+ .join(" | ")
+ };
+ let pat_and_guard = if let Some(Guard::If(g)) = first_guard {
+ format!(
+ "{pat} if {}",
+ snippet_with_applicability(cx, g.span, "..", &mut applicability)
+ )
} else {
- false
- }
+ pat
+ };
+
+ // strip potential borrows (#6503), but only if the type is a reference
+ let mut ex_new = ex;
+ if let ExprKind::AddrOf(BorrowKind::Ref, .., ex_inner) = ex.kind {
+ if let ty::Ref(..) = cx.typeck_results().expr_ty(ex_inner).kind() {
+ ex_new = ex_inner;
+ }
+ };
+ span_lint_and_sugg(
+ cx,
+ MATCH_LIKE_MATCHES_MACRO,
+ expr.span,
+ &format!(
+ "{} expression looks like `matches!` macro",
+ if is_if_let { "if let .. else" } else { "match" }
+ ),
+ "try",
+ format!(
+ "{}matches!({}, {pat_and_guard})",
+ if b0 { "" } else { "!" },
+ snippet_with_applicability(cx, ex_new.span, "..", &mut applicability),
+ ),
+ applicability,
+ );
+ true
+ } else {
+ false
}
}
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_on_vec_items.rs b/src/tools/clippy/clippy_lints/src/matches/match_on_vec_items.rs
index bd53ebd48..dd71560e1 100644
--- a/src/tools/clippy/clippy_lints/src/matches/match_on_vec_items.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/match_on_vec_items.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet;
use clippy_utils::ty::{is_type_diagnostic_item, is_type_lang_item};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, LangItem};
use rustc_lint::LateContext;
@@ -10,39 +9,29 @@ use rustc_span::sym;
use super::MATCH_ON_VEC_ITEMS;
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, scrutinee: &'tcx Expr<'_>) {
- if_chain! {
- if let Some(idx_expr) = is_vec_indexing(cx, scrutinee);
- if let ExprKind::Index(vec, idx, _) = idx_expr.kind;
-
- then {
- // FIXME: could be improved to suggest surrounding every pattern with Some(_),
- // but only when `or_patterns` are stabilized.
- span_lint_and_sugg(
- cx,
- MATCH_ON_VEC_ITEMS,
- scrutinee.span,
- "indexing into a vector may panic",
- "try",
- format!(
- "{}.get({})",
- snippet(cx, vec.span, ".."),
- snippet(cx, idx.span, "..")
- ),
- Applicability::MaybeIncorrect
- );
- }
+ if let Some(idx_expr) = is_vec_indexing(cx, scrutinee)
+ && let ExprKind::Index(vec, idx, _) = idx_expr.kind
+ {
+ // FIXME: could be improved to suggest surrounding every pattern with Some(_),
+ // but only when `or_patterns` are stabilized.
+ span_lint_and_sugg(
+ cx,
+ MATCH_ON_VEC_ITEMS,
+ scrutinee.span,
+ "indexing into a vector may panic",
+ "try",
+ format!("{}.get({})", snippet(cx, vec.span, ".."), snippet(cx, idx.span, "..")),
+ Applicability::MaybeIncorrect,
+ );
}
}
fn is_vec_indexing<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> Option<&'tcx Expr<'tcx>> {
- if_chain! {
- if let ExprKind::Index(array, index, _) = expr.kind;
- if is_vector(cx, array);
- if !is_full_range(cx, index);
-
- then {
- return Some(expr);
- }
+ if let ExprKind::Index(array, index, _) = expr.kind
+ && is_vector(cx, array)
+ && !is_full_range(cx, index)
+ {
+ return Some(expr);
}
None
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs b/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs
index 6fc79fadd..c823d07e2 100644
--- a/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs
@@ -1,4 +1,4 @@
-use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::diagnostics::span_lint_hir_and_then;
use clippy_utils::source::snippet;
use clippy_utils::{is_lint_allowed, path_to_local, search_same, SpanlessEq, SpanlessHash};
use core::cmp::Ordering;
@@ -66,25 +66,23 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, arms: &'tcx [Arm<'_>]) {
let mut local_map: HirIdMap<HirId> = HirIdMap::default();
let eq_fallback = |a: &Expr<'_>, b: &Expr<'_>| {
- if_chain! {
- if let Some(a_id) = path_to_local(a);
- if let Some(b_id) = path_to_local(b);
- let entry = match local_map.entry(a_id) {
+ if let Some(a_id) = path_to_local(a)
+ && let Some(b_id) = path_to_local(b)
+ && let entry = match local_map.entry(a_id) {
HirIdMapEntry::Vacant(entry) => entry,
// check if using the same bindings as before
HirIdMapEntry::Occupied(entry) => return *entry.get() == b_id,
- };
- // the names technically don't have to match; this makes the lint more conservative
- if cx.tcx.hir().name(a_id) == cx.tcx.hir().name(b_id);
- if cx.typeck_results().expr_ty(a) == cx.typeck_results().expr_ty(b);
- if pat_contains_local(lhs.pat, a_id);
- if pat_contains_local(rhs.pat, b_id);
- then {
- entry.insert(b_id);
- true
- } else {
- false
}
+ // the names technically don't have to match; this makes the lint more conservative
+ && cx.tcx.hir().name(a_id) == cx.tcx.hir().name(b_id)
+ && cx.typeck_results().expr_ty(a) == cx.typeck_results().expr_ty(b)
+ && pat_contains_local(lhs.pat, a_id)
+ && pat_contains_local(rhs.pat, b_id)
+ {
+ entry.insert(b_id);
+ true
+ } else {
+ false
}
};
// Arms with a guard are ignored, those can’t always be merged together
@@ -106,9 +104,10 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, arms: &'tcx [Arm<'_>]) {
if !cx.tcx.features().non_exhaustive_omitted_patterns_lint
|| is_lint_allowed(cx, NON_EXHAUSTIVE_OMITTED_PATTERNS, arm2.hir_id)
{
- span_lint_and_then(
+ span_lint_hir_and_then(
cx,
MATCH_SAME_ARMS,
+ arm1.hir_id,
arm1.span,
"this match arm has an identical body to the `_` wildcard arm",
|diag| {
@@ -126,9 +125,10 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, arms: &'tcx [Arm<'_>]) {
(arm2, arm1)
};
- span_lint_and_then(
+ span_lint_hir_and_then(
cx,
MATCH_SAME_ARMS,
+ keep_arm.hir_id,
keep_arm.span,
"this match arm has an identical body to another arm",
|diag| {
@@ -152,6 +152,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, arms: &'tcx [Arm<'_>]) {
#[derive(Clone, Copy)]
enum NormalizedPat<'a> {
Wild,
+ Never,
Struct(Option<DefId>, &'a [(Symbol, Self)]),
Tuple(Option<DefId>, &'a [Self]),
Or(&'a [Self]),
@@ -223,7 +224,7 @@ fn iter_matching_struct_fields<'a>(
Iter(left.iter(), right.iter())
}
-#[expect(clippy::similar_names)]
+#[expect(clippy::similar_names, clippy::too_many_lines)]
impl<'a> NormalizedPat<'a> {
fn from_pat(cx: &LateContext<'_>, arena: &'a DroplessArena, pat: &'a Pat<'_>) -> Self {
match pat.kind {
@@ -231,6 +232,7 @@ impl<'a> NormalizedPat<'a> {
PatKind::Binding(.., Some(pat)) | PatKind::Box(pat) | PatKind::Ref(pat, _) => {
Self::from_pat(cx, arena, pat)
},
+ PatKind::Never => Self::Never,
PatKind::Struct(ref path, fields, _) => {
let fields =
arena.alloc_from_iter(fields.iter().map(|f| (f.ident.name, Self::from_pat(cx, arena, f.pat))));
@@ -334,7 +336,7 @@ impl<'a> NormalizedPat<'a> {
/// type.
fn has_overlapping_values(&self, other: &Self) -> bool {
match (*self, *other) {
- (Self::Wild, _) | (_, Self::Wild) => true,
+ (Self::Wild, _) | (_, Self::Wild) | (Self::Never, Self::Never) => true,
(Self::Or(pats), ref other) | (ref other, Self::Or(pats)) => {
pats.iter().any(|pat| pat.has_overlapping_values(other))
},
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_str_case_mismatch.rs b/src/tools/clippy/clippy_lints/src/matches/match_str_case_mismatch.rs
index 675a85ae5..bd38648bc 100644
--- a/src/tools/clippy/clippy_lints/src/matches/match_str_case_mismatch.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/match_str_case_mismatch.rs
@@ -20,21 +20,16 @@ enum CaseMethod {
}
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, scrutinee: &'tcx Expr<'_>, arms: &'tcx [Arm<'_>]) {
- if_chain! {
- if let ty::Ref(_, ty, _) = cx.typeck_results().expr_ty(scrutinee).kind();
- if let ty::Str = ty.kind();
- then {
- let mut visitor = MatchExprVisitor {
- cx,
- case_method: None,
- };
-
- visitor.visit_expr(scrutinee);
-
- if let Some(case_method) = visitor.case_method {
- if let Some((bad_case_span, bad_case_sym)) = verify_case(&case_method, arms) {
- lint(cx, &case_method, bad_case_span, bad_case_sym.as_str());
- }
+ if let ty::Ref(_, ty, _) = cx.typeck_results().expr_ty(scrutinee).kind()
+ && let ty::Str = ty.kind()
+ {
+ let mut visitor = MatchExprVisitor { cx, case_method: None };
+
+ visitor.visit_expr(scrutinee);
+
+ if let Some(case_method) = visitor.case_method {
+ if let Some((bad_case_span, bad_case_sym)) = verify_case(&case_method, arms) {
+ lint(cx, &case_method, bad_case_span, bad_case_sym.as_str());
}
}
}
@@ -88,17 +83,15 @@ fn verify_case<'a>(case_method: &'a CaseMethod, arms: &'a [Arm<'_>]) -> Option<(
};
for arm in arms {
- if_chain! {
- if let PatKind::Lit(Expr {
- kind: ExprKind::Lit(lit),
- ..
- }) = arm.pat.kind;
- if let LitKind::Str(symbol, _) = lit.node;
- let input = symbol.as_str();
- if !case_check(input);
- then {
- return Some((lit.span, symbol));
- }
+ if let PatKind::Lit(Expr {
+ kind: ExprKind::Lit(lit),
+ ..
+ }) = arm.pat.kind
+ && let LitKind::Str(symbol, _) = lit.node
+ && let input = symbol.as_str()
+ && !case_check(input)
+ {
+ return Some((lit.span, symbol));
}
}
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_wild_err_arm.rs b/src/tools/clippy/clippy_lints/src/matches/match_wild_err_arm.rs
index a2903e52a..8a4c0ab90 100644
--- a/src/tools/clippy/clippy_lints/src/matches/match_wild_err_arm.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/match_wild_err_arm.rs
@@ -34,20 +34,19 @@ pub(crate) fn check<'tcx>(cx: &LateContext<'tcx>, ex: &Expr<'tcx>, arms: &[Arm<'
}
}
}
- if_chain! {
- if matching_wild;
- if let Some(macro_call) = root_macro_call(peel_blocks_with_stmt(arm.body).span);
- if is_panic(cx, macro_call.def_id);
- then {
- // `Err(_)` or `Err(_e)` arm with `panic!` found
- span_lint_and_note(cx,
- MATCH_WILD_ERR_ARM,
- arm.pat.span,
- &format!("`Err({ident_bind_name})` matches all errors"),
- None,
- "match each error separately or use the error output, or use `.expect(msg)` if the error case is unreachable",
- );
- }
+ if matching_wild
+ && let Some(macro_call) = root_macro_call(peel_blocks_with_stmt(arm.body).span)
+ && is_panic(cx, macro_call.def_id)
+ {
+ // `Err(_)` or `Err(_e)` arm with `panic!` found
+ span_lint_and_note(
+ cx,
+ MATCH_WILD_ERR_ARM,
+ arm.pat.span,
+ &format!("`Err({ident_bind_name})` matches all errors"),
+ None,
+ "match each error separately or use the error output, or use `.expect(msg)` if the error case is unreachable",
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/matches/mod.rs b/src/tools/clippy/clippy_lints/src/matches/mod.rs
index dea46d4d3..4c7568f39 100644
--- a/src/tools/clippy/clippy_lints/src/matches/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/mod.rs
@@ -31,7 +31,7 @@ use rustc_hir::{Arm, Expr, ExprKind, Local, MatchSource, Pat};
use rustc_lexer::TokenKind;
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::{Span, SpanData, SyntaxContext};
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/matches/overlapping_arms.rs b/src/tools/clippy/clippy_lints/src/matches/overlapping_arms.rs
index 8f0083f81..8199366d1 100644
--- a/src/tools/clippy/clippy_lints/src/matches/overlapping_arms.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/overlapping_arms.rs
@@ -150,7 +150,7 @@ where
#[test]
fn test_overlapping() {
- use rustc_span::source_map::DUMMY_SP;
+ use rustc_span::DUMMY_SP;
let sp = |s, e| SpannedRange {
span: DUMMY_SP,
diff --git a/src/tools/clippy/clippy_lints/src/matches/redundant_guards.rs b/src/tools/clippy/clippy_lints/src/matches/redundant_guards.rs
index 4a44d596a..f57b22374 100644
--- a/src/tools/clippy/clippy_lints/src/matches/redundant_guards.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/redundant_guards.rs
@@ -1,6 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::path_to_local;
-use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::source::snippet;
use clippy_utils::visitors::{for_each_expr, is_local_used};
use rustc_ast::{BorrowKind, LitKind};
use rustc_errors::Applicability;
@@ -8,7 +8,8 @@ use rustc_hir::def::{DefKind, Res};
use rustc_hir::{Arm, BinOpKind, Expr, ExprKind, Guard, MatchSource, Node, Pat, PatKind};
use rustc_lint::LateContext;
use rustc_span::symbol::Ident;
-use rustc_span::Span;
+use rustc_span::{Span, Symbol};
+use std::borrow::Cow;
use std::ops::ControlFlow;
use super::REDUNDANT_GUARDS;
@@ -41,7 +42,14 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, arms: &'tcx [Arm<'tcx>]) {
(PatKind::Ref(..), None) | (_, Some(_)) => continue,
_ => arm.pat.span,
};
- emit_redundant_guards(cx, outer_arm, if_expr.span, pat_span, &binding, arm.guard);
+ emit_redundant_guards(
+ cx,
+ outer_arm,
+ if_expr.span,
+ snippet(cx, pat_span, "<binding>"),
+ &binding,
+ arm.guard,
+ );
}
// `Some(x) if let Some(2) = x`
else if let Guard::IfLet(let_expr) = guard
@@ -52,7 +60,14 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, arms: &'tcx [Arm<'tcx>]) {
(PatKind::Ref(..), None) | (_, Some(_)) => continue,
_ => let_expr.pat.span,
};
- emit_redundant_guards(cx, outer_arm, let_expr.span, pat_span, &binding, None);
+ emit_redundant_guards(
+ cx,
+ outer_arm,
+ let_expr.span,
+ snippet(cx, pat_span, "<binding>"),
+ &binding,
+ None,
+ );
}
// `Some(x) if x == Some(2)`
// `Some(x) if Some(2) == x`
@@ -78,11 +93,76 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, arms: &'tcx [Arm<'tcx>]) {
(ExprKind::AddrOf(..), None) | (_, Some(_)) => continue,
_ => pat.span,
};
- emit_redundant_guards(cx, outer_arm, if_expr.span, pat_span, &binding, None);
+ emit_redundant_guards(
+ cx,
+ outer_arm,
+ if_expr.span,
+ snippet(cx, pat_span, "<binding>"),
+ &binding,
+ None,
+ );
+ } else if let Guard::If(if_expr) = guard
+ && let ExprKind::MethodCall(path, recv, args, ..) = if_expr.kind
+ && let Some(binding) = get_pat_binding(cx, recv, outer_arm)
+ {
+ check_method_calls(cx, outer_arm, path.ident.name, recv, args, if_expr, &binding);
}
}
}
+fn check_method_calls<'tcx>(
+ cx: &LateContext<'tcx>,
+ arm: &Arm<'tcx>,
+ method: Symbol,
+ recv: &Expr<'_>,
+ args: &[Expr<'_>],
+ if_expr: &Expr<'_>,
+ binding: &PatBindingInfo,
+) {
+ let ty = cx.typeck_results().expr_ty(recv).peel_refs();
+ let slice_like = ty.is_slice() || ty.is_array();
+
+ let sugg = if method == sym!(is_empty) {
+ // `s if s.is_empty()` becomes ""
+ // `arr if arr.is_empty()` becomes []
+
+ if ty.is_str() {
+ r#""""#.into()
+ } else if slice_like {
+ "[]".into()
+ } else {
+ return;
+ }
+ } else if slice_like
+ && let Some(needle) = args.first()
+ && let ExprKind::AddrOf(.., needle) = needle.kind
+ && let ExprKind::Array(needles) = needle.kind
+ && needles.iter().all(|needle| expr_can_be_pat(cx, needle))
+ {
+ // `arr if arr.starts_with(&[123])` becomes [123, ..]
+ // `arr if arr.ends_with(&[123])` becomes [.., 123]
+ // `arr if arr.starts_with(&[])` becomes [..] (why would anyone write this?)
+
+ let mut sugg = snippet(cx, needle.span, "<needle>").into_owned();
+
+ if needles.is_empty() {
+ sugg.insert_str(1, "..");
+ } else if method == sym!(starts_with) {
+ sugg.insert_str(sugg.len() - 1, ", ..");
+ } else if method == sym!(ends_with) {
+ sugg.insert_str(1, ".., ");
+ } else {
+ return;
+ }
+
+ sugg.into()
+ } else {
+ return;
+ };
+
+ emit_redundant_guards(cx, arm, if_expr.span, sugg, binding, None);
+}
+
struct PatBindingInfo {
span: Span,
byref_ident: Option<Ident>,
@@ -134,19 +214,16 @@ fn emit_redundant_guards<'tcx>(
cx: &LateContext<'tcx>,
outer_arm: &Arm<'tcx>,
guard_span: Span,
- pat_span: Span,
+ binding_replacement: Cow<'static, str>,
pat_binding: &PatBindingInfo,
inner_guard: Option<Guard<'_>>,
) {
- let mut app = Applicability::MaybeIncorrect;
-
span_lint_and_then(
cx,
REDUNDANT_GUARDS,
guard_span.source_callsite(),
"redundant guard",
|diag| {
- let binding_replacement = snippet_with_applicability(cx, pat_span, "<binding_repl>", &mut app);
let suggestion_span = match *pat_binding {
PatBindingInfo {
span,
@@ -170,14 +247,11 @@ fn emit_redundant_guards<'tcx>(
Guard::IfLet(l) => ("if let", l.span),
};
- format!(
- " {prefix} {}",
- snippet_with_applicability(cx, span, "<guard>", &mut app),
- )
+ format!(" {prefix} {}", snippet(cx, span, "<guard>"))
}),
),
],
- app,
+ Applicability::MaybeIncorrect,
);
},
);
diff --git a/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs b/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs
index 9a7c00823..2582f7edc 100644
--- a/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs
@@ -5,7 +5,6 @@ use clippy_utils::sugg::Sugg;
use clippy_utils::ty::{is_type_diagnostic_item, needs_ordered_drop};
use clippy_utils::visitors::{any_temporaries_need_ordered_drop, for_each_expr};
use clippy_utils::{higher, is_expn_of, is_trait_method};
-use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::def::{DefKind, Res};
@@ -35,15 +34,13 @@ pub(super) fn check_if_let<'tcx>(
// Extract the generic arguments out of a type
fn try_get_generic_ty(ty: Ty<'_>, index: usize) -> Option<Ty<'_>> {
- if_chain! {
- if let ty::Adt(_, subs) = ty.kind();
- if let Some(sub) = subs.get(index);
- if let GenericArgKind::Type(sub_ty) = sub.unpack();
- then {
- Some(sub_ty)
- } else {
- None
- }
+ if let ty::Adt(_, subs) = ty.kind()
+ && let Some(sub) = subs.get(index)
+ && let GenericArgKind::Type(sub_ty) = sub.unpack()
+ {
+ Some(sub_ty)
+ } else {
+ None
}
}
@@ -142,14 +139,12 @@ fn find_sugg_for_if_let<'tcx>(
let needs_drop = needs_ordered_drop(cx, check_ty) || any_temporaries_need_ordered_drop(cx, let_expr);
// check that `while_let_on_iterator` lint does not trigger
- if_chain! {
- if keyword == "while";
- if let ExprKind::MethodCall(method_path, ..) = let_expr.kind;
- if method_path.ident.name == sym::next;
- if is_trait_method(cx, let_expr, sym::Iterator);
- then {
- return;
- }
+ if keyword == "while"
+ && let ExprKind::MethodCall(method_path, ..) = let_expr.kind
+ && method_path.ident.name == sym::next
+ && is_trait_method(cx, let_expr, sym::Iterator)
+ {
+ return;
}
let result_expr = match &let_expr.kind {
diff --git a/src/tools/clippy/clippy_lints/src/matches/rest_pat_in_fully_bound_struct.rs b/src/tools/clippy/clippy_lints/src/matches/rest_pat_in_fully_bound_struct.rs
index 4efe93d4b..316b2f63e 100644
--- a/src/tools/clippy/clippy_lints/src/matches/rest_pat_in_fully_bound_struct.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/rest_pat_in_fully_bound_struct.rs
@@ -6,25 +6,22 @@ use rustc_middle::ty;
use super::REST_PAT_IN_FULLY_BOUND_STRUCTS;
pub(crate) fn check(cx: &LateContext<'_>, pat: &Pat<'_>) {
- if_chain! {
- if !pat.span.from_expansion();
- if let PatKind::Struct(QPath::Resolved(_, path), fields, true) = pat.kind;
- if let Some(def_id) = path.res.opt_def_id();
- let ty = cx.tcx.type_of(def_id).instantiate_identity();
- if let ty::Adt(def, _) = ty.kind();
- if def.is_struct() || def.is_union();
- if fields.len() == def.non_enum_variant().fields.len();
- if !def.non_enum_variant().is_field_list_non_exhaustive();
-
- then {
- span_lint_and_help(
- cx,
- REST_PAT_IN_FULLY_BOUND_STRUCTS,
- pat.span,
- "unnecessary use of `..` pattern in struct binding. All fields were already bound",
- None,
- "consider removing `..` from this binding",
- );
- }
+ if !pat.span.from_expansion()
+ && let PatKind::Struct(QPath::Resolved(_, path), fields, true) = pat.kind
+ && let Some(def_id) = path.res.opt_def_id()
+ && let ty = cx.tcx.type_of(def_id).instantiate_identity()
+ && let ty::Adt(def, _) = ty.kind()
+ && (def.is_struct() || def.is_union())
+ && fields.len() == def.non_enum_variant().fields.len()
+ && !def.non_enum_variant().is_field_list_non_exhaustive()
+ {
+ span_lint_and_help(
+ cx,
+ REST_PAT_IN_FULLY_BOUND_STRUCTS,
+ pat.span,
+ "unnecessary use of `..` pattern in struct binding. All fields were already bound",
+ None,
+ "consider removing `..` from this binding",
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/matches/single_match.rs b/src/tools/clippy/clippy_lints/src/matches/single_match.rs
index 48efd0230..86c414daf 100644
--- a/src/tools/clippy/clippy_lints/src/matches/single_match.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/single_match.rs
@@ -89,50 +89,52 @@ fn report_single_pattern(
});
let (pat, pat_ref_count) = peel_hir_pat_refs(arms[0].pat);
- let (msg, sugg) = if_chain! {
- if let PatKind::Path(_) | PatKind::Lit(_) = pat.kind;
- let (ty, ty_ref_count) = peel_mid_ty_refs(cx.typeck_results().expr_ty(ex));
- if let Some(spe_trait_id) = cx.tcx.lang_items().structural_peq_trait();
- if let Some(pe_trait_id) = cx.tcx.lang_items().eq_trait();
- if ty.is_integral() || ty.is_char() || ty.is_str()
- || (implements_trait(cx, ty, spe_trait_id, &[])
- && implements_trait(cx, ty, pe_trait_id, &[ty.into()]));
- then {
- // scrutinee derives PartialEq and the pattern is a constant.
- let pat_ref_count = match pat.kind {
- // string literals are already a reference.
- PatKind::Lit(Expr { kind: ExprKind::Lit(lit), .. }) if lit.node.is_str() => pat_ref_count + 1,
- _ => pat_ref_count,
- };
- // References are only implicitly added to the pattern, so no overflow here.
- // e.g. will work: match &Some(_) { Some(_) => () }
- // will not: match Some(_) { &Some(_) => () }
- let ref_count_diff = ty_ref_count - pat_ref_count;
-
- // Try to remove address of expressions first.
- let (ex, removed) = peel_n_hir_expr_refs(ex, ref_count_diff);
- let ref_count_diff = ref_count_diff - removed;
-
- let msg = "you seem to be trying to use `match` for an equality check. Consider using `if`";
- let sugg = format!(
- "if {} == {}{} {}{els_str}",
- snippet(cx, ex.span, ".."),
- // PartialEq for different reference counts may not exist.
- "&".repeat(ref_count_diff),
- snippet(cx, arms[0].pat.span, ".."),
- expr_block(cx, arms[0].body, ctxt, "..", Some(expr.span), &mut app),
- );
- (msg, sugg)
- } else {
- let msg = "you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`";
- let sugg = format!(
- "if let {} = {} {}{els_str}",
- snippet(cx, arms[0].pat.span, ".."),
- snippet(cx, ex.span, ".."),
- expr_block(cx, arms[0].body, ctxt, "..", Some(expr.span), &mut app),
- );
- (msg, sugg)
- }
+ let (msg, sugg) = if let PatKind::Path(_) | PatKind::Lit(_) = pat.kind
+ && let (ty, ty_ref_count) = peel_mid_ty_refs(cx.typeck_results().expr_ty(ex))
+ && let Some(spe_trait_id) = cx.tcx.lang_items().structural_peq_trait()
+ && let Some(pe_trait_id) = cx.tcx.lang_items().eq_trait()
+ && (ty.is_integral()
+ || ty.is_char()
+ || ty.is_str()
+ || (implements_trait(cx, ty, spe_trait_id, &[]) && implements_trait(cx, ty, pe_trait_id, &[ty.into()])))
+ {
+ // scrutinee derives PartialEq and the pattern is a constant.
+ let pat_ref_count = match pat.kind {
+ // string literals are already a reference.
+ PatKind::Lit(Expr {
+ kind: ExprKind::Lit(lit),
+ ..
+ }) if lit.node.is_str() => pat_ref_count + 1,
+ _ => pat_ref_count,
+ };
+ // References are only implicitly added to the pattern, so no overflow here.
+ // e.g. will work: match &Some(_) { Some(_) => () }
+ // will not: match Some(_) { &Some(_) => () }
+ let ref_count_diff = ty_ref_count - pat_ref_count;
+
+ // Try to remove address of expressions first.
+ let (ex, removed) = peel_n_hir_expr_refs(ex, ref_count_diff);
+ let ref_count_diff = ref_count_diff - removed;
+
+ let msg = "you seem to be trying to use `match` for an equality check. Consider using `if`";
+ let sugg = format!(
+ "if {} == {}{} {}{els_str}",
+ snippet(cx, ex.span, ".."),
+ // PartialEq for different reference counts may not exist.
+ "&".repeat(ref_count_diff),
+ snippet(cx, arms[0].pat.span, ".."),
+ expr_block(cx, arms[0].body, ctxt, "..", Some(expr.span), &mut app),
+ );
+ (msg, sugg)
+ } else {
+ let msg = "you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`";
+ let sugg = format!(
+ "if let {} = {} {}{els_str}",
+ snippet(cx, arms[0].pat.span, ".."),
+ snippet(cx, ex.span, ".."),
+ expr_block(cx, arms[0].body, ctxt, "..", Some(expr.span), &mut app),
+ );
+ (msg, sugg)
};
span_lint_and_sugg(cx, lint, expr.span, msg, "try", sugg, app);
diff --git a/src/tools/clippy/clippy_lints/src/matches/try_err.rs b/src/tools/clippy/clippy_lints/src/matches/try_err.rs
index 0fd6f533d..dd489fc25 100644
--- a/src/tools/clippy/clippy_lints/src/matches/try_err.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/try_err.rs
@@ -2,7 +2,6 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::{get_parent_expr, is_res_lang_ctor, path_res};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::LangItem::ResultErr;
use rustc_hir::{Expr, ExprKind, LangItem, MatchSource, QPath};
@@ -22,59 +21,57 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, scrutine
// #[allow(unreachable_code)]
// val,
// };
- if_chain! {
- if let ExprKind::Call(match_fun, [try_arg, ..]) = scrutinee.kind;
- if let ExprKind::Path(ref match_fun_path) = match_fun.kind;
- if matches!(match_fun_path, QPath::LangItem(LangItem::TryTraitBranch, ..));
- if let ExprKind::Call(err_fun, [err_arg, ..]) = try_arg.kind;
- if is_res_lang_ctor(cx, path_res(cx, err_fun), ResultErr);
- if let Some(return_ty) = find_return_type(cx, &expr.kind);
- then {
- let prefix;
- let suffix;
- let err_ty;
+ if let ExprKind::Call(match_fun, [try_arg, ..]) = scrutinee.kind
+ && let ExprKind::Path(ref match_fun_path) = match_fun.kind
+ && matches!(match_fun_path, QPath::LangItem(LangItem::TryTraitBranch, ..))
+ && let ExprKind::Call(err_fun, [err_arg, ..]) = try_arg.kind
+ && is_res_lang_ctor(cx, path_res(cx, err_fun), ResultErr)
+ && let Some(return_ty) = find_return_type(cx, &expr.kind)
+ {
+ let prefix;
+ let suffix;
+ let err_ty;
- if let Some(ty) = result_error_type(cx, return_ty) {
- prefix = "Err(";
- suffix = ")";
- err_ty = ty;
- } else if let Some(ty) = poll_result_error_type(cx, return_ty) {
- prefix = "Poll::Ready(Err(";
- suffix = "))";
- err_ty = ty;
- } else if let Some(ty) = poll_option_result_error_type(cx, return_ty) {
- prefix = "Poll::Ready(Some(Err(";
- suffix = ")))";
- err_ty = ty;
- } else {
- return;
- };
+ if let Some(ty) = result_error_type(cx, return_ty) {
+ prefix = "Err(";
+ suffix = ")";
+ err_ty = ty;
+ } else if let Some(ty) = poll_result_error_type(cx, return_ty) {
+ prefix = "Poll::Ready(Err(";
+ suffix = "))";
+ err_ty = ty;
+ } else if let Some(ty) = poll_option_result_error_type(cx, return_ty) {
+ prefix = "Poll::Ready(Some(Err(";
+ suffix = ")))";
+ err_ty = ty;
+ } else {
+ return;
+ };
- let expr_err_ty = cx.typeck_results().expr_ty(err_arg);
- let span = hygiene::walk_chain(err_arg.span, try_arg.span.ctxt());
- let mut applicability = Applicability::MachineApplicable;
- let origin_snippet = snippet_with_applicability(cx, span, "_", &mut applicability);
- let ret_prefix = if get_parent_expr(cx, expr).map_or(false, |e| matches!(e.kind, ExprKind::Ret(_))) {
- "" // already returns
- } else {
- "return "
- };
- let suggestion = if err_ty == expr_err_ty {
- format!("{ret_prefix}{prefix}{origin_snippet}{suffix}")
- } else {
- format!("{ret_prefix}{prefix}{origin_snippet}.into(){suffix}")
- };
+ let expr_err_ty = cx.typeck_results().expr_ty(err_arg);
+ let span = hygiene::walk_chain(err_arg.span, try_arg.span.ctxt());
+ let mut applicability = Applicability::MachineApplicable;
+ let origin_snippet = snippet_with_applicability(cx, span, "_", &mut applicability);
+ let ret_prefix = if get_parent_expr(cx, expr).map_or(false, |e| matches!(e.kind, ExprKind::Ret(_))) {
+ "" // already returns
+ } else {
+ "return "
+ };
+ let suggestion = if err_ty == expr_err_ty {
+ format!("{ret_prefix}{prefix}{origin_snippet}{suffix}")
+ } else {
+ format!("{ret_prefix}{prefix}{origin_snippet}.into(){suffix}")
+ };
- span_lint_and_sugg(
- cx,
- TRY_ERR,
- expr.span,
- "returning an `Err(_)` with the `?` operator",
- "try",
- suggestion,
- applicability,
- );
- }
+ span_lint_and_sugg(
+ cx,
+ TRY_ERR,
+ expr.span,
+ "returning an `Err(_)` with the `?` operator",
+ "try",
+ suggestion,
+ applicability,
+ );
}
}
@@ -92,51 +89,42 @@ fn find_return_type<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx ExprKind<'_>) -> O
/// Extracts the error type from Result<T, E>.
fn result_error_type<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<Ty<'tcx>> {
- if_chain! {
- if let ty::Adt(_, subst) = ty.kind();
- if is_type_diagnostic_item(cx, ty, sym::Result);
- then {
- Some(subst.type_at(1))
- } else {
- None
- }
+ if let ty::Adt(_, subst) = ty.kind()
+ && is_type_diagnostic_item(cx, ty, sym::Result)
+ {
+ Some(subst.type_at(1))
+ } else {
+ None
}
}
/// Extracts the error type from Poll<Result<T, E>>.
fn poll_result_error_type<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<Ty<'tcx>> {
- if_chain! {
- if let ty::Adt(def, subst) = ty.kind();
- if cx.tcx.lang_items().get(LangItem::Poll) == Some(def.did());
- let ready_ty = subst.type_at(0);
-
- if let ty::Adt(ready_def, ready_subst) = ready_ty.kind();
- if cx.tcx.is_diagnostic_item(sym::Result, ready_def.did());
- then {
- Some(ready_subst.type_at(1))
- } else {
- None
- }
+ if let ty::Adt(def, subst) = ty.kind()
+ && cx.tcx.lang_items().get(LangItem::Poll) == Some(def.did())
+ && let ready_ty = subst.type_at(0)
+ && let ty::Adt(ready_def, ready_subst) = ready_ty.kind()
+ && cx.tcx.is_diagnostic_item(sym::Result, ready_def.did())
+ {
+ Some(ready_subst.type_at(1))
+ } else {
+ None
}
}
/// Extracts the error type from Poll<Option<Result<T, E>>>.
fn poll_option_result_error_type<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<Ty<'tcx>> {
- if_chain! {
- if let ty::Adt(def, subst) = ty.kind();
- if cx.tcx.lang_items().get(LangItem::Poll) == Some(def.did());
- let ready_ty = subst.type_at(0);
-
- if let ty::Adt(ready_def, ready_subst) = ready_ty.kind();
- if cx.tcx.is_diagnostic_item(sym::Option, ready_def.did());
- let some_ty = ready_subst.type_at(0);
-
- if let ty::Adt(some_def, some_subst) = some_ty.kind();
- if cx.tcx.is_diagnostic_item(sym::Result, some_def.did());
- then {
- Some(some_subst.type_at(1))
- } else {
- None
- }
+ if let ty::Adt(def, subst) = ty.kind()
+ && cx.tcx.lang_items().get(LangItem::Poll) == Some(def.did())
+ && let ready_ty = subst.type_at(0)
+ && let ty::Adt(ready_def, ready_subst) = ready_ty.kind()
+ && cx.tcx.is_diagnostic_item(sym::Option, ready_def.did())
+ && let some_ty = ready_subst.type_at(0)
+ && let ty::Adt(some_def, some_subst) = some_ty.kind()
+ && cx.tcx.is_diagnostic_item(sym::Result, some_def.did())
+ {
+ Some(some_subst.type_at(1))
+ } else {
+ None
}
}
diff --git a/src/tools/clippy/clippy_lints/src/mem_replace.rs b/src/tools/clippy/clippy_lints/src/mem_replace.rs
index 760c8f59c..c22f76484 100644
--- a/src/tools/clippy/clippy_lints/src/mem_replace.rs
+++ b/src/tools/clippy/clippy_lints/src/mem_replace.rs
@@ -4,15 +4,14 @@ use clippy_utils::source::{snippet, snippet_with_applicability};
use clippy_utils::sugg::Sugg;
use clippy_utils::ty::is_non_aggregate_primitive_type;
use clippy_utils::{is_default_equivalent, is_res_lang_ctor, path_res, peel_ref_operators};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::LangItem::OptionNone;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
-use rustc_span::Span;
+use rustc_session::impl_lint_pass;
use rustc_span::symbol::sym;
+use rustc_span::Span;
declare_clippy_lint! {
/// ### What it does
@@ -125,17 +124,37 @@ fn check_replace_option_with_none(cx: &LateContext<'_>, dest: &Expr<'_>, expr_sp
}
fn check_replace_with_uninit(cx: &LateContext<'_>, src: &Expr<'_>, dest: &Expr<'_>, expr_span: Span) {
- if_chain! {
+ if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(src.hir_id)
// check if replacement is mem::MaybeUninit::uninit().assume_init()
- if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(src.hir_id);
- if cx.tcx.is_diagnostic_item(sym::assume_init, method_def_id);
- then {
+ && cx.tcx.is_diagnostic_item(sym::assume_init, method_def_id)
+ {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ MEM_REPLACE_WITH_UNINIT,
+ expr_span,
+ "replacing with `mem::MaybeUninit::uninit().assume_init()`",
+ "consider using",
+ format!(
+ "std::ptr::read({})",
+ snippet_with_applicability(cx, dest.span, "", &mut applicability)
+ ),
+ applicability,
+ );
+ return;
+ }
+
+ if let ExprKind::Call(repl_func, []) = src.kind
+ && let ExprKind::Path(ref repl_func_qpath) = repl_func.kind
+ && let Some(repl_def_id) = cx.qpath_res(repl_func_qpath, repl_func.hir_id).opt_def_id()
+ {
+ if cx.tcx.is_diagnostic_item(sym::mem_uninitialized, repl_def_id) {
let mut applicability = Applicability::MachineApplicable;
span_lint_and_sugg(
cx,
MEM_REPLACE_WITH_UNINIT,
expr_span,
- "replacing with `mem::MaybeUninit::uninit().assume_init()`",
+ "replacing with `mem::uninitialized()`",
"consider using",
format!(
"std::ptr::read({})",
@@ -143,40 +162,17 @@ fn check_replace_with_uninit(cx: &LateContext<'_>, src: &Expr<'_>, dest: &Expr<'
),
applicability,
);
- return;
- }
- }
-
- if_chain! {
- if let ExprKind::Call(repl_func, []) = src.kind;
- if let ExprKind::Path(ref repl_func_qpath) = repl_func.kind;
- if let Some(repl_def_id) = cx.qpath_res(repl_func_qpath, repl_func.hir_id).opt_def_id();
- then {
- if cx.tcx.is_diagnostic_item(sym::mem_uninitialized, repl_def_id) {
- let mut applicability = Applicability::MachineApplicable;
- span_lint_and_sugg(
- cx,
- MEM_REPLACE_WITH_UNINIT,
- expr_span,
- "replacing with `mem::uninitialized()`",
- "consider using",
- format!(
- "std::ptr::read({})",
- snippet_with_applicability(cx, dest.span, "", &mut applicability)
- ),
- applicability,
- );
- } else if cx.tcx.is_diagnostic_item(sym::mem_zeroed, repl_def_id) &&
- !cx.typeck_results().expr_ty(src).is_primitive() {
- span_lint_and_help(
- cx,
- MEM_REPLACE_WITH_UNINIT,
- expr_span,
- "replacing with `mem::zeroed()`",
- None,
- "consider using a default value or the `take_mut` crate instead",
- );
- }
+ } else if cx.tcx.is_diagnostic_item(sym::mem_zeroed, repl_def_id)
+ && !cx.typeck_results().expr_ty(src).is_primitive()
+ {
+ span_lint_and_help(
+ cx,
+ MEM_REPLACE_WITH_UNINIT,
+ expr_span,
+ "replacing with `mem::zeroed()`",
+ None,
+ "consider using a default value or the `take_mut` crate instead",
+ );
}
}
}
@@ -222,21 +218,19 @@ impl MemReplace {
impl<'tcx> LateLintPass<'tcx> for MemReplace {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
- if_chain! {
+ if let ExprKind::Call(func, [dest, src]) = expr.kind
// Check that `expr` is a call to `mem::replace()`
- if let ExprKind::Call(func, [dest, src]) = expr.kind;
- if let ExprKind::Path(ref func_qpath) = func.kind;
- if let Some(def_id) = cx.qpath_res(func_qpath, func.hir_id).opt_def_id();
- if cx.tcx.is_diagnostic_item(sym::mem_replace, def_id);
- then {
- // Check that second argument is `Option::None`
- if is_res_lang_ctor(cx, path_res(cx, src), OptionNone) {
- check_replace_option_with_none(cx, dest, expr.span);
- } else if self.msrv.meets(msrvs::MEM_TAKE) {
- check_replace_with_default(cx, src, dest, expr.span);
- }
- check_replace_with_uninit(cx, src, dest, expr.span);
+ && let ExprKind::Path(ref func_qpath) = func.kind
+ && let Some(def_id) = cx.qpath_res(func_qpath, func.hir_id).opt_def_id()
+ && cx.tcx.is_diagnostic_item(sym::mem_replace, def_id)
+ {
+ // Check that second argument is `Option::None`
+ if is_res_lang_ctor(cx, path_res(cx, src), OptionNone) {
+ check_replace_option_with_none(cx, dest, expr.span);
+ } else if self.msrv.meets(msrvs::MEM_TAKE) {
+ check_replace_with_default(cx, src, dest, expr.span);
}
+ check_replace_with_uninit(cx, src, dest, expr.span);
}
}
extract_msrv_attr!(LateContext);
diff --git a/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs b/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs
index 3a8cc4174..08bfa2e00 100644
--- a/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs
@@ -3,7 +3,6 @@ use clippy_utils::diagnostics::{multispan_sugg_with_applicability, span_lint_and
use clippy_utils::peel_blocks;
use clippy_utils::source::{snippet, snippet_with_context};
use clippy_utils::visitors::find_all_ret_expressions;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res};
@@ -70,57 +69,50 @@ pub(crate) trait BindInsteadOfMap {
closure_expr: &hir::Expr<'_>,
closure_args_span: Span,
) -> bool {
- if_chain! {
- if let hir::ExprKind::Call(some_expr, [inner_expr]) = closure_expr.kind;
- if let hir::ExprKind::Path(QPath::Resolved(_, path)) = some_expr.kind;
- if Self::is_variant(cx, path.res);
- if !contains_return(inner_expr);
- if let Some(msg) = Self::lint_msg(cx);
- then {
- let mut app = Applicability::MachineApplicable;
- let some_inner_snip = snippet_with_context(cx, inner_expr.span, closure_expr.span.ctxt(), "_", &mut app).0;
-
- let closure_args_snip = snippet(cx, closure_args_span, "..");
- let option_snip = snippet(cx, recv.span, "..");
- let note = format!("{option_snip}.{}({closure_args_snip} {some_inner_snip})", Self::GOOD_METHOD_NAME);
- span_lint_and_sugg(
- cx,
- BIND_INSTEAD_OF_MAP,
- expr.span,
- &msg,
- "try",
- note,
- app,
- );
- true
- } else {
- false
- }
+ if let hir::ExprKind::Call(some_expr, [inner_expr]) = closure_expr.kind
+ && let hir::ExprKind::Path(QPath::Resolved(_, path)) = some_expr.kind
+ && Self::is_variant(cx, path.res)
+ && !contains_return(inner_expr)
+ && let Some(msg) = Self::lint_msg(cx)
+ {
+ let mut app = Applicability::MachineApplicable;
+ let some_inner_snip = snippet_with_context(cx, inner_expr.span, closure_expr.span.ctxt(), "_", &mut app).0;
+
+ let closure_args_snip = snippet(cx, closure_args_span, "..");
+ let option_snip = snippet(cx, recv.span, "..");
+ let note = format!(
+ "{option_snip}.{}({closure_args_snip} {some_inner_snip})",
+ Self::GOOD_METHOD_NAME
+ );
+ span_lint_and_sugg(cx, BIND_INSTEAD_OF_MAP, expr.span, &msg, "try", note, app);
+ true
+ } else {
+ false
}
}
fn lint_closure(cx: &LateContext<'_>, expr: &hir::Expr<'_>, closure_expr: &hir::Expr<'_>) -> bool {
let mut suggs = Vec::new();
let can_sugg: bool = find_all_ret_expressions(cx, closure_expr, |ret_expr| {
- if_chain! {
- if !ret_expr.span.from_expansion();
- if let hir::ExprKind::Call(func_path, [arg]) = ret_expr.kind;
- if let hir::ExprKind::Path(QPath::Resolved(_, path)) = func_path.kind;
- if Self::is_variant(cx, path.res);
- if !contains_return(arg);
- then {
- suggs.push((ret_expr.span, arg.span.source_callsite()));
- true
- } else {
- false
- }
+ if !ret_expr.span.from_expansion()
+ && let hir::ExprKind::Call(func_path, [arg]) = ret_expr.kind
+ && let hir::ExprKind::Path(QPath::Resolved(_, path)) = func_path.kind
+ && Self::is_variant(cx, path.res)
+ && !contains_return(arg)
+ {
+ suggs.push((ret_expr.span, arg.span.source_callsite()));
+ true
+ } else {
+ false
}
});
- let (span, msg) = if_chain! {
- if can_sugg;
- if let hir::ExprKind::MethodCall(segment, ..) = expr.kind;
- if let Some(msg) = Self::lint_msg(cx);
- then { (segment.ident.span, msg) } else { return false; }
+ let (span, msg) = if can_sugg
+ && let hir::ExprKind::MethodCall(segment, ..) = expr.kind
+ && let Some(msg) = Self::lint_msg(cx)
+ {
+ (segment.ident.span, msg)
+ } else {
+ return false;
};
span_lint_and_then(cx, BIND_INSTEAD_OF_MAP, expr.span, &msg, |diag| {
multispan_sugg_with_applicability(
@@ -139,11 +131,12 @@ pub(crate) trait BindInsteadOfMap {
/// Lint use of `_.and_then(|x| Some(y))` for `Option`s
fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr<'_>, arg: &hir::Expr<'_>) -> bool {
- if_chain! {
- if let Some(adt) = cx.typeck_results().expr_ty(recv).ty_adt_def();
- if let Some(vid) = cx.tcx.lang_items().get(Self::VARIANT_LANG_ITEM);
- if adt.did() == cx.tcx.parent(vid);
- then {} else { return false; }
+ if let Some(adt) = cx.typeck_results().expr_ty(recv).ty_adt_def()
+ && let Some(vid) = cx.tcx.lang_items().get(Self::VARIANT_LANG_ITEM)
+ && adt.did() == cx.tcx.parent(vid)
+ {
+ } else {
+ return false;
}
match arg.kind {
diff --git a/src/tools/clippy/clippy_lints/src/methods/bytecount.rs b/src/tools/clippy/clippy_lints/src/methods/bytecount.rs
index 35370355f..4a2124c74 100644
--- a/src/tools/clippy/clippy_lints/src/methods/bytecount.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/bytecount.rs
@@ -3,7 +3,6 @@ use clippy_utils::source::snippet_with_applicability;
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::visitors::is_local_used;
use clippy_utils::{path_to_local_id, peel_blocks, peel_ref_operators, strip_pat_refs};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Closure, Expr, ExprKind, PatKind};
use rustc_lint::LateContext;
@@ -18,53 +17,50 @@ pub(super) fn check<'tcx>(
filter_recv: &'tcx Expr<'_>,
filter_arg: &'tcx Expr<'_>,
) {
- if_chain! {
- if let ExprKind::Closure(&Closure { body, .. }) = filter_arg.kind;
- let body = cx.tcx.hir().body(body);
- if let [param] = body.params;
- if let PatKind::Binding(_, arg_id, _, _) = strip_pat_refs(param.pat).kind;
- if let ExprKind::Binary(ref op, l, r) = body.value.kind;
- if op.node == BinOpKind::Eq;
- if is_type_diagnostic_item(cx,
- cx.typeck_results().expr_ty(filter_recv).peel_refs(),
- sym::SliceIter);
- let operand_is_arg = |expr| {
+ if let ExprKind::Closure(&Closure { body, .. }) = filter_arg.kind
+ && let body = cx.tcx.hir().body(body)
+ && let [param] = body.params
+ && let PatKind::Binding(_, arg_id, _, _) = strip_pat_refs(param.pat).kind
+ && let ExprKind::Binary(ref op, l, r) = body.value.kind
+ && op.node == BinOpKind::Eq
+ && is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(filter_recv).peel_refs(), sym::SliceIter)
+ && let operand_is_arg = (|expr| {
let expr = peel_ref_operators(cx, peel_blocks(expr));
path_to_local_id(expr, arg_id)
- };
- let needle = if operand_is_arg(l) {
+ })
+ && let needle = if operand_is_arg(l) {
r
} else if operand_is_arg(r) {
l
} else {
return;
- };
- if ty::Uint(UintTy::U8) == *cx.typeck_results().expr_ty(needle).peel_refs().kind();
- if !is_local_used(cx, needle, arg_id);
- then {
- let haystack = if let ExprKind::MethodCall(path, receiver, [], _) =
- filter_recv.kind {
- let p = path.ident.name;
- if p == sym::iter || p == sym::iter_mut {
- receiver
- } else {
- filter_recv
- }
+ }
+ && ty::Uint(UintTy::U8) == *cx.typeck_results().expr_ty(needle).peel_refs().kind()
+ && !is_local_used(cx, needle, arg_id)
+ {
+ let haystack = if let ExprKind::MethodCall(path, receiver, [], _) = filter_recv.kind {
+ let p = path.ident.name;
+ if p == sym::iter || p == sym::iter_mut {
+ receiver
} else {
filter_recv
- };
- let mut applicability = Applicability::MaybeIncorrect;
- span_lint_and_sugg(
- cx,
- NAIVE_BYTECOUNT,
- expr.span,
- "you appear to be counting bytes the naive way",
- "consider using the bytecount crate",
- format!("bytecount::count({}, {})",
- snippet_with_applicability(cx, haystack.span, "..", &mut applicability),
- snippet_with_applicability(cx, needle.span, "..", &mut applicability)),
- applicability,
- );
- }
+ }
+ } else {
+ filter_recv
+ };
+ let mut applicability = Applicability::MaybeIncorrect;
+ span_lint_and_sugg(
+ cx,
+ NAIVE_BYTECOUNT,
+ expr.span,
+ "you appear to be counting bytes the naive way",
+ "consider using the bytecount crate",
+ format!(
+ "bytecount::count({}, {})",
+ snippet_with_applicability(cx, haystack.span, "..", &mut applicability),
+ snippet_with_applicability(cx, needle.span, "..", &mut applicability)
+ ),
+ applicability,
+ );
};
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/bytes_count_to_len.rs b/src/tools/clippy/clippy_lints/src/methods/bytes_count_to_len.rs
index 649fc46e4..34159f2d1 100644
--- a/src/tools/clippy/clippy_lints/src/methods/bytes_count_to_len.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/bytes_count_to_len.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::ty::is_type_lang_item;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
@@ -14,23 +13,24 @@ pub(super) fn check<'tcx>(
count_recv: &'tcx hir::Expr<'_>,
bytes_recv: &'tcx hir::Expr<'_>,
) {
- if_chain! {
- if let Some(bytes_id) = cx.typeck_results().type_dependent_def_id(count_recv.hir_id);
- if let Some(impl_id) = cx.tcx.impl_of_method(bytes_id);
- if cx.tcx.type_of(impl_id).instantiate_identity().is_str();
- let ty = cx.typeck_results().expr_ty(bytes_recv).peel_refs();
- if ty.is_str() || is_type_lang_item(cx, ty, hir::LangItem::String);
- then {
- let mut applicability = Applicability::MachineApplicable;
- span_lint_and_sugg(
- cx,
- BYTES_COUNT_TO_LEN,
- expr.span,
- "using long and hard to read `.bytes().count()`",
- "consider calling `.len()` instead",
- format!("{}.len()", snippet_with_applicability(cx, bytes_recv.span, "..", &mut applicability)),
- applicability
- );
- }
+ if let Some(bytes_id) = cx.typeck_results().type_dependent_def_id(count_recv.hir_id)
+ && let Some(impl_id) = cx.tcx.impl_of_method(bytes_id)
+ && cx.tcx.type_of(impl_id).instantiate_identity().is_str()
+ && let ty = cx.typeck_results().expr_ty(bytes_recv).peel_refs()
+ && (ty.is_str() || is_type_lang_item(cx, ty, hir::LangItem::String))
+ {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ BYTES_COUNT_TO_LEN,
+ expr.span,
+ "using long and hard to read `.bytes().count()`",
+ "consider calling `.len()` instead",
+ format!(
+ "{}.len()",
+ snippet_with_applicability(cx, bytes_recv.span, "..", &mut applicability)
+ ),
+ applicability,
+ );
};
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs b/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs
index d5897822e..a37087d0a 100644
--- a/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::source::{indent_of, reindent_multiline, snippet_opt};
use clippy_utils::ty::is_type_lang_item;
-use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, LangItem};
@@ -27,51 +26,54 @@ pub(super) fn check<'tcx>(
}
}
- if_chain! {
- if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
- if let Some(impl_id) = cx.tcx.impl_of_method(method_id);
- if cx.tcx.type_of(impl_id).instantiate_identity().is_str();
- if let ExprKind::Lit(Spanned { node: LitKind::Str(ext_literal, ..), ..}) = arg.kind;
- if (2..=6).contains(&ext_literal.as_str().len());
- let ext_str = ext_literal.as_str();
- if ext_str.starts_with('.');
- if ext_str.chars().skip(1).all(|c| c.is_uppercase() || c.is_ascii_digit())
- || ext_str.chars().skip(1).all(|c| c.is_lowercase() || c.is_ascii_digit());
- let recv_ty = cx.typeck_results().expr_ty(recv).peel_refs();
- if recv_ty.is_str() || is_type_lang_item(cx, recv_ty, LangItem::String);
- then {
- span_lint_and_then(
- cx,
- CASE_SENSITIVE_FILE_EXTENSION_COMPARISONS,
- recv.span.to(call_span),
- "case-sensitive file extension comparison",
- |diag| {
- diag.help("consider using a case-insensitive comparison instead");
- if let Some(mut recv_source) = snippet_opt(cx, recv.span) {
-
- if !cx.typeck_results().expr_ty(recv).is_ref() {
- recv_source = format!("&{recv_source}");
- }
+ if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
+ && let Some(impl_id) = cx.tcx.impl_of_method(method_id)
+ && cx.tcx.type_of(impl_id).instantiate_identity().is_str()
+ && let ExprKind::Lit(Spanned {
+ node: LitKind::Str(ext_literal, ..),
+ ..
+ }) = arg.kind
+ && (2..=6).contains(&ext_literal.as_str().len())
+ && let ext_str = ext_literal.as_str()
+ && ext_str.starts_with('.')
+ && (ext_str.chars().skip(1).all(|c| c.is_uppercase() || c.is_ascii_digit())
+ || ext_str.chars().skip(1).all(|c| c.is_lowercase() || c.is_ascii_digit()))
+ && let recv_ty = cx.typeck_results().expr_ty(recv).peel_refs()
+ && (recv_ty.is_str() || is_type_lang_item(cx, recv_ty, LangItem::String))
+ {
+ span_lint_and_then(
+ cx,
+ CASE_SENSITIVE_FILE_EXTENSION_COMPARISONS,
+ recv.span.to(call_span),
+ "case-sensitive file extension comparison",
+ |diag| {
+ diag.help("consider using a case-insensitive comparison instead");
+ if let Some(mut recv_source) = snippet_opt(cx, recv.span) {
+ if !cx.typeck_results().expr_ty(recv).is_ref() {
+ recv_source = format!("&{recv_source}");
+ }
- let suggestion_source = reindent_multiline(
- format!(
- "std::path::Path::new({})
- .extension()
- .map_or(false, |ext| ext.eq_ignore_ascii_case(\"{}\"))",
- recv_source, ext_str.strip_prefix('.').unwrap()).into(),
- true,
- Some(indent_of(cx, call_span).unwrap_or(0) + 4)
- );
+ let suggestion_source = reindent_multiline(
+ format!(
+ "std::path::Path::new({})
+ .extension()
+ .map_or(false, |ext| ext.eq_ignore_ascii_case(\"{}\"))",
+ recv_source,
+ ext_str.strip_prefix('.').unwrap()
+ )
+ .into(),
+ true,
+ Some(indent_of(cx, call_span).unwrap_or(0) + 4),
+ );
- diag.span_suggestion(
- recv.span.to(call_span),
- "use std::path::Path",
- suggestion_source,
- Applicability::MaybeIncorrect,
- );
- }
+ diag.span_suggestion(
+ recv.span.to(call_span),
+ "use std::path::Path",
+ suggestion_source,
+ Applicability::MaybeIncorrect,
+ );
}
- );
- }
+ },
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs b/src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs
index 0e41f3c21..c99cec067 100644
--- a/src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::{method_chain_args, path_def_id};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::{LateContext, Lint};
@@ -15,34 +14,34 @@ pub(super) fn check(
lint: &'static Lint,
suggest: &str,
) -> bool {
- if_chain! {
- if let Some(args) = method_chain_args(info.chain, chain_methods);
- if let hir::ExprKind::Call(fun, [arg_char]) = info.other.kind;
- if let Some(id) = path_def_id(cx, fun).map(|ctor_id| cx.tcx.parent(ctor_id));
- if Some(id) == cx.tcx.lang_items().option_some_variant();
- then {
- let mut applicability = Applicability::MachineApplicable;
- let self_ty = cx.typeck_results().expr_ty_adjusted(args[0].0).peel_refs();
+ if let Some(args) = method_chain_args(info.chain, chain_methods)
+ && let hir::ExprKind::Call(fun, [arg_char]) = info.other.kind
+ && let Some(id) = path_def_id(cx, fun).map(|ctor_id| cx.tcx.parent(ctor_id))
+ && Some(id) == cx.tcx.lang_items().option_some_variant()
+ {
+ let mut applicability = Applicability::MachineApplicable;
+ let self_ty = cx.typeck_results().expr_ty_adjusted(args[0].0).peel_refs();
- if *self_ty.kind() != ty::Str {
- return false;
- }
+ if *self_ty.kind() != ty::Str {
+ return false;
+ }
- span_lint_and_sugg(
- cx,
- lint,
- info.expr.span,
- &format!("you should use the `{suggest}` method"),
- "like this",
- format!("{}{}.{suggest}({})",
- if info.eq { "" } else { "!" },
- snippet_with_applicability(cx, args[0].0.span, "..", &mut applicability),
- snippet_with_applicability(cx, arg_char.span, "..", &mut applicability)),
- applicability,
- );
+ span_lint_and_sugg(
+ cx,
+ lint,
+ info.expr.span,
+ &format!("you should use the `{suggest}` method"),
+ "like this",
+ format!(
+ "{}{}.{suggest}({})",
+ if info.eq { "" } else { "!" },
+ snippet_with_applicability(cx, args[0].0.span, "..", &mut applicability),
+ snippet_with_applicability(cx, arg_char.span, "..", &mut applicability)
+ ),
+ applicability,
+ );
- return true;
- }
+ return true;
}
false
diff --git a/src/tools/clippy/clippy_lints/src/methods/chars_cmp_with_unwrap.rs b/src/tools/clippy/clippy_lints/src/methods/chars_cmp_with_unwrap.rs
index c9d50a5b0..d07e45434 100644
--- a/src/tools/clippy/clippy_lints/src/methods/chars_cmp_with_unwrap.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/chars_cmp_with_unwrap.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::method_chain_args;
use clippy_utils::source::snippet_with_applicability;
-use if_chain::if_chain;
use rustc_ast::ast;
use rustc_errors::Applicability;
use rustc_hir as hir;
@@ -15,28 +14,28 @@ pub(super) fn check(
lint: &'static Lint,
suggest: &str,
) -> bool {
- if_chain! {
- if let Some(args) = method_chain_args(info.chain, chain_methods);
- if let hir::ExprKind::Lit(lit) = info.other.kind;
- if let ast::LitKind::Char(c) = lit.node;
- then {
- let mut applicability = Applicability::MachineApplicable;
- span_lint_and_sugg(
- cx,
- lint,
- info.expr.span,
- &format!("you should use the `{suggest}` method"),
- "like this",
- format!("{}{}.{suggest}('{}')",
- if info.eq { "" } else { "!" },
- snippet_with_applicability(cx, args[0].0.span, "..", &mut applicability),
- c.escape_default()),
- applicability,
- );
+ if let Some(args) = method_chain_args(info.chain, chain_methods)
+ && let hir::ExprKind::Lit(lit) = info.other.kind
+ && let ast::LitKind::Char(c) = lit.node
+ {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ lint,
+ info.expr.span,
+ &format!("you should use the `{suggest}` method"),
+ "like this",
+ format!(
+ "{}{}.{suggest}('{}')",
+ if info.eq { "" } else { "!" },
+ snippet_with_applicability(cx, args[0].0.span, "..", &mut applicability),
+ c.escape_default()
+ ),
+ applicability,
+ );
- true
- } else {
- false
- }
+ true
+ } else {
+ false
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs b/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs
index eb4f003d3..532bbbeaf 100644
--- a/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs
@@ -61,7 +61,7 @@ pub(super) fn check(
// ? is a Call, makes sure not to rec *x?, but rather (*x)?
ExprKind::Call(hir_callee, _) => matches!(
hir_callee.kind,
- ExprKind::Path(QPath::LangItem(rustc_hir::LangItem::TryTraitBranch, _, _))
+ ExprKind::Path(QPath::LangItem(rustc_hir::LangItem::TryTraitBranch, ..))
),
ExprKind::MethodCall(_, self_arg, ..) if expr.hir_id == self_arg.hir_id => true,
ExprKind::Match(_, _, MatchSource::TryDesugar(_) | MatchSource::AwaitDesugar)
diff --git a/src/tools/clippy/clippy_lints/src/methods/err_expect.rs b/src/tools/clippy/clippy_lints/src/methods/err_expect.rs
index a8d4dd5e4..dc978c8a5 100644
--- a/src/tools/clippy/clippy_lints/src/methods/err_expect.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/err_expect.rs
@@ -16,30 +16,27 @@ pub(super) fn check(
err_span: Span,
msrv: &Msrv,
) {
- if_chain! {
- if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(recv), sym::Result);
+ if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(recv), sym::Result)
// Test the version to make sure the lint can be showed (expect_err has been
// introduced in rust 1.17.0 : https://github.com/rust-lang/rust/pull/38982)
- if msrv.meets(msrvs::EXPECT_ERR);
+ && msrv.meets(msrvs::EXPECT_ERR)
// Grabs the `Result<T, E>` type
- let result_type = cx.typeck_results().expr_ty(recv);
+ && let result_type = cx.typeck_results().expr_ty(recv)
// Tests if the T type in a `Result<T, E>` is not None
- if let Some(data_type) = get_data_type(cx, result_type);
+ && let Some(data_type) = get_data_type(cx, result_type)
// Tests if the T type in a `Result<T, E>` implements debug
- if has_debug_impl(cx, data_type);
-
- then {
- span_lint_and_sugg(
- cx,
- ERR_EXPECT,
- err_span.to(expect_span),
- "called `.err().expect()` on a `Result` value",
- "try",
- "expect_err".to_string(),
- Applicability::MachineApplicable
+ && has_debug_impl(cx, data_type)
+ {
+ span_lint_and_sugg(
+ cx,
+ ERR_EXPECT,
+ err_span.to(expect_span),
+ "called `.err().expect()` on a `Result` value",
+ "try",
+ "expect_err".to_string(),
+ Applicability::MachineApplicable,
);
- }
};
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs b/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs
index a49970b53..f0fc92579 100644
--- a/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs
@@ -6,8 +6,8 @@ use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
use rustc_middle::ty;
-use rustc_span::Span;
use rustc_span::symbol::sym;
+use rustc_span::Span;
use std::borrow::Cow;
use super::EXPECT_FUN_CALL;
diff --git a/src/tools/clippy/clippy_lints/src/methods/extend_with_drain.rs b/src/tools/clippy/clippy_lints/src/methods/extend_with_drain.rs
index 495b26652..460ec7b36 100644
--- a/src/tools/clippy/clippy_lints/src/methods/extend_with_drain.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/extend_with_drain.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::ty::{is_type_diagnostic_item, is_type_lang_item};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, LangItem};
use rustc_lint::LateContext;
@@ -11,35 +10,33 @@ use super::EXTEND_WITH_DRAIN;
pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, arg: &Expr<'_>) {
let ty = cx.typeck_results().expr_ty(recv).peel_refs();
- if_chain! {
- if is_type_diagnostic_item(cx, ty, sym::Vec);
+ if is_type_diagnostic_item(cx, ty, sym::Vec)
//check source object
- if let ExprKind::MethodCall(src_method, drain_vec, [drain_arg], _) = &arg.kind;
- if src_method.ident.as_str() == "drain";
- let src_ty = cx.typeck_results().expr_ty(drain_vec);
+ && let ExprKind::MethodCall(src_method, drain_vec, [drain_arg], _) = &arg.kind
+ && src_method.ident.as_str() == "drain"
+ && let src_ty = cx.typeck_results().expr_ty(drain_vec)
//check if actual src type is mutable for code suggestion
- let immutable = src_ty.is_mutable_ptr();
- let src_ty = src_ty.peel_refs();
- if is_type_diagnostic_item(cx, src_ty, sym::Vec);
+ && let immutable = src_ty.is_mutable_ptr()
+ && let src_ty = src_ty.peel_refs()
+ && is_type_diagnostic_item(cx, src_ty, sym::Vec)
//check drain range
- if let src_ty_range = cx.typeck_results().expr_ty(drain_arg).peel_refs();
- if is_type_lang_item(cx, src_ty_range, LangItem::RangeFull);
- then {
- let mut applicability = Applicability::MachineApplicable;
- span_lint_and_sugg(
- cx,
- EXTEND_WITH_DRAIN,
- expr.span,
- "use of `extend` instead of `append` for adding the full range of a second vector",
- "try",
- format!(
- "{}.append({}{})",
- snippet_with_applicability(cx, recv.span, "..", &mut applicability),
- if immutable { "" } else { "&mut " },
- snippet_with_applicability(cx, drain_vec.span, "..", &mut applicability)
- ),
- applicability,
- );
- }
+ && let src_ty_range = cx.typeck_results().expr_ty(drain_arg).peel_refs()
+ && is_type_lang_item(cx, src_ty_range, LangItem::RangeFull)
+ {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ EXTEND_WITH_DRAIN,
+ expr.span,
+ "use of `extend` instead of `append` for adding the full range of a second vector",
+ "try",
+ format!(
+ "{}.append({}{})",
+ snippet_with_applicability(cx, recv.span, "..", &mut applicability),
+ if immutable { "" } else { "&mut " },
+ snippet_with_applicability(cx, drain_vec.span, "..", &mut applicability)
+ ),
+ applicability,
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/filetype_is_file.rs b/src/tools/clippy/clippy_lints/src/methods/filetype_is_file.rs
index 7818be811..b05361ab2 100644
--- a/src/tools/clippy/clippy_lints/src/methods/filetype_is_file.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/filetype_is_file.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::get_parent_expr;
use clippy_utils::ty::is_type_diagnostic_item;
-use if_chain::if_chain;
use rustc_hir as hir;
use rustc_lint::LateContext;
use rustc_span::{sym, Span};
@@ -19,21 +18,19 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr
let verb: &str;
let lint_unary: &str;
let help_unary: &str;
- if_chain! {
- if let Some(parent) = get_parent_expr(cx, expr);
- if let hir::ExprKind::Unary(op, _) = parent.kind;
- if op == hir::UnOp::Not;
- then {
- lint_unary = "!";
- verb = "denies";
- help_unary = "";
- span = parent.span;
- } else {
- lint_unary = "";
- verb = "covers";
- help_unary = "!";
- span = expr.span;
- }
+ if let Some(parent) = get_parent_expr(cx, expr)
+ && let hir::ExprKind::Unary(op, _) = parent.kind
+ && op == hir::UnOp::Not
+ {
+ lint_unary = "!";
+ verb = "denies";
+ help_unary = "";
+ span = parent.span;
+ } else {
+ lint_unary = "";
+ verb = "covers";
+ help_unary = "!";
+ span = expr.span;
}
let lint_msg = format!("`{lint_unary}FileType::is_file()` only {verb} regular files");
let help_msg = format!("use `{help_unary}FileType::is_dir()` instead");
diff --git a/src/tools/clippy/clippy_lints/src/methods/filter_map.rs b/src/tools/clippy/clippy_lints/src/methods/filter_map.rs
index 5bb8c7a6b..844ab40ca 100644
--- a/src/tools/clippy/clippy_lints/src/methods/filter_map.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/filter_map.rs
@@ -4,15 +4,14 @@ use clippy_utils::source::{indent_of, reindent_multiline, snippet};
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::{higher, is_trait_method, path_to_local_id, peel_blocks, SpanlessEq};
use hir::{Body, HirId, MatchSource, Pat};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_hir::def::Res;
use rustc_hir::{Closure, Expr, ExprKind, PatKind, PathSegment, QPath, UnOp};
use rustc_lint::LateContext;
use rustc_middle::ty::adjustment::Adjust;
-use rustc_span::Span;
use rustc_span::symbol::{sym, Ident, Symbol};
+use rustc_span::Span;
use std::borrow::Cow;
use super::{MANUAL_FILTER_MAP, MANUAL_FIND_MAP, OPTION_FILTER_MAP};
@@ -29,13 +28,11 @@ fn is_method(cx: &LateContext<'_>, expr: &hir::Expr<'_>, method_name: Symbol) ->
let arg_id = body.params[0].pat.hir_id;
match closure_expr.kind {
hir::ExprKind::MethodCall(hir::PathSegment { ident, .. }, receiver, ..) => {
- if_chain! {
- if ident.name == method_name;
- if let hir::ExprKind::Path(path) = &receiver.kind;
- if let Res::Local(ref local) = cx.qpath_res(path, receiver.hir_id);
- then {
- return arg_id == *local
- }
+ if ident.name == method_name
+ && let hir::ExprKind::Path(path) = &receiver.kind
+ && let Res::Local(ref local) = cx.qpath_res(path, receiver.hir_id)
+ {
+ return arg_id == *local;
}
false
},
@@ -139,11 +136,9 @@ impl<'tcx> OffendingFilterExpr<'tcx> {
&& path_to_local_id(map_arg_peeled, map_param_id))
&& let eq_fallback = (|a: &Expr<'_>, b: &Expr<'_>| {
// in `filter(|x| ..)`, replace `*x` with `x`
- let a_path = if_chain! {
- if !is_filter_param_ref;
- if let ExprKind::Unary(UnOp::Deref, expr_path) = a.kind;
- then { expr_path } else { a }
- };
+ let a_path = if !is_filter_param_ref
+ && let ExprKind::Unary(UnOp::Deref, expr_path) = a.kind
+ { expr_path } else { a };
// let the filter closure arg and the map closure arg be equal
path_to_local_id(a_path, filter_param_id)
&& path_to_local_id(b, map_param_id)
@@ -305,87 +300,98 @@ pub(super) fn check(
return;
}
- if_chain! {
- if is_trait_method(cx, map_recv, sym::Iterator);
+ if is_trait_method(cx, map_recv, sym::Iterator)
// filter(|x| ...is_some())...
- if let ExprKind::Closure(&Closure { body: filter_body_id, .. }) = filter_arg.kind;
- let filter_body = cx.tcx.hir().body(filter_body_id);
- if let [filter_param] = filter_body.params;
+ && let ExprKind::Closure(&Closure { body: filter_body_id, .. }) = filter_arg.kind
+ && let filter_body = cx.tcx.hir().body(filter_body_id)
+ && let [filter_param] = filter_body.params
// optional ref pattern: `filter(|&x| ..)`
- let (filter_pat, is_filter_param_ref) = if let PatKind::Ref(ref_pat, _) = filter_param.pat.kind {
+ && let (filter_pat, is_filter_param_ref) = if let PatKind::Ref(ref_pat, _) = filter_param.pat.kind {
(ref_pat, true)
} else {
(filter_param.pat, false)
- };
-
- if let PatKind::Binding(_, filter_param_id, _, None) = filter_pat.kind;
- if let Some(mut offending_expr) = OffendingFilterExpr::hir(cx, filter_body.value, filter_param_id);
+ }
- if let ExprKind::Closure(&Closure { body: map_body_id, .. }) = map_arg.kind;
- let map_body = cx.tcx.hir().body(map_body_id);
- if let [map_param] = map_body.params;
- if let PatKind::Binding(_, map_param_id, map_param_ident, None) = map_param.pat.kind;
+ && let PatKind::Binding(_, filter_param_id, _, None) = filter_pat.kind
+ && let Some(mut offending_expr) = OffendingFilterExpr::hir(cx, filter_body.value, filter_param_id)
- if let Some(check_result) =
- offending_expr.check_map_call(cx, map_body, map_param_id, filter_param_id, is_filter_param_ref);
+ && let ExprKind::Closure(&Closure { body: map_body_id, .. }) = map_arg.kind
+ && let map_body = cx.tcx.hir().body(map_body_id)
+ && let [map_param] = map_body.params
+ && let PatKind::Binding(_, map_param_id, map_param_ident, None) = map_param.pat.kind
- then {
- let span = filter_span.with_hi(expr.span.hi());
- let (filter_name, lint) = if is_find {
- ("find", MANUAL_FIND_MAP)
- } else {
- ("filter", MANUAL_FILTER_MAP)
- };
- let msg = format!("`{filter_name}(..).map(..)` can be simplified as `{filter_name}_map(..)`");
+ && let Some(check_result) =
+ offending_expr.check_map_call(cx, map_body, map_param_id, filter_param_id, is_filter_param_ref)
+ {
+ let span = filter_span.with_hi(expr.span.hi());
+ let (filter_name, lint) = if is_find {
+ ("find", MANUAL_FIND_MAP)
+ } else {
+ ("filter", MANUAL_FILTER_MAP)
+ };
+ let msg = format!("`{filter_name}(..).map(..)` can be simplified as `{filter_name}_map(..)`");
- let (sugg, note_and_span, applicability) = match check_result {
- CheckResult::Method { map_arg, method, side_effect_expr_span } => {
- let (to_opt, deref) = match method {
- CalledMethod::ResultIsOk => (".ok()", String::new()),
- CalledMethod::OptionIsSome => {
- let derefs = cx.typeck_results()
- .expr_adjustments(map_arg)
- .iter()
- .filter(|adj| matches!(adj.kind, Adjust::Deref(_)))
- .count();
+ let (sugg, note_and_span, applicability) = match check_result {
+ CheckResult::Method {
+ map_arg,
+ method,
+ side_effect_expr_span,
+ } => {
+ let (to_opt, deref) = match method {
+ CalledMethod::ResultIsOk => (".ok()", String::new()),
+ CalledMethod::OptionIsSome => {
+ let derefs = cx
+ .typeck_results()
+ .expr_adjustments(map_arg)
+ .iter()
+ .filter(|adj| matches!(adj.kind, Adjust::Deref(_)))
+ .count();
- ("", "*".repeat(derefs))
- }
- };
+ ("", "*".repeat(derefs))
+ },
+ };
- let sugg = format!(
- "{filter_name}_map(|{map_param_ident}| {deref}{}{to_opt})",
- snippet(cx, map_arg.span, ".."),
- );
- let (note_and_span, applicability) = if let Some(span) = side_effect_expr_span {
- let note = "the suggestion might change the behavior of the program when merging `filter` and `map`, \
- because this expression potentially contains side effects and will only execute once";
+ let sugg = format!(
+ "{filter_name}_map(|{map_param_ident}| {deref}{}{to_opt})",
+ snippet(cx, map_arg.span, ".."),
+ );
+ let (note_and_span, applicability) = if let Some(span) = side_effect_expr_span {
+ let note = "the suggestion might change the behavior of the program when merging `filter` and `map`, \
+ because this expression potentially contains side effects and will only execute once";
- (Some((note, span)), Applicability::MaybeIncorrect)
- } else {
- (None, Applicability::MachineApplicable)
- };
+ (Some((note, span)), Applicability::MaybeIncorrect)
+ } else {
+ (None, Applicability::MachineApplicable)
+ };
- (sugg, note_and_span, applicability)
- }
- CheckResult::PatternMatching { variant_span, variant_ident } => {
- let pat = snippet(cx, variant_span, "<pattern>");
+ (sugg, note_and_span, applicability)
+ },
+ CheckResult::PatternMatching {
+ variant_span,
+ variant_ident,
+ } => {
+ let pat = snippet(cx, variant_span, "<pattern>");
- (format!("{filter_name}_map(|{map_param_ident}| match {map_param_ident} {{ \
- {pat} => Some({variant_ident}), \
- _ => None \
- }})"), None, Applicability::MachineApplicable)
- }
- };
- span_lint_and_then(cx, lint, span, &msg, |diag| {
- diag.span_suggestion(span, "try", sugg, applicability);
+ (
+ format!(
+ "{filter_name}_map(|{map_param_ident}| match {map_param_ident} {{ \
+ {pat} => Some({variant_ident}), \
+ _ => None \
+ }})"
+ ),
+ None,
+ Applicability::MachineApplicable,
+ )
+ },
+ };
+ span_lint_and_then(cx, lint, span, &msg, |diag| {
+ diag.span_suggestion(span, "try", sugg, applicability);
- if let Some((note, span)) = note_and_span {
- diag.span_note(span, note);
- }
- });
- }
+ if let Some((note, span)) = note_and_span {
+ diag.span_note(span, note);
+ }
+ });
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/filter_map_bool_then.rs b/src/tools/clippy/clippy_lints/src/methods/filter_map_bool_then.rs
index 9950c4428..2e43d19a6 100644
--- a/src/tools/clippy/clippy_lints/src/methods/filter_map_bool_then.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/filter_map_bool_then.rs
@@ -27,7 +27,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>, arg: &
closure.def_id.to_def_id(),
Binder::bind_with_vars(
cx.typeck_results().node_type(param_ty.hir_id),
- cx.tcx.late_bound_vars(cx.tcx.hir().local_def_id_to_hir_id(closure.def_id)),
+ cx.tcx.late_bound_vars(cx.tcx.local_def_id_to_hir_id(closure.def_id)),
),
)
&& is_copy(cx, param_ty)
diff --git a/src/tools/clippy/clippy_lints/src/methods/filter_next.rs b/src/tools/clippy/clippy_lints/src/methods/filter_next.rs
index ac7bc9bcc..9251130a3 100644
--- a/src/tools/clippy/clippy_lints/src/methods/filter_next.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/filter_next.rs
@@ -44,7 +44,7 @@ pub(super) fn check<'tcx>(
// add note if not multi-line
span_lint_and_then(cx, FILTER_NEXT, expr.span, msg, |diag| {
let (applicability, pat) = if let Some(id) = path_to_local(recv)
- && let Some(hir::Node::Pat(pat)) = cx.tcx.hir().find(id)
+ && let Some(hir::Node::Pat(pat)) = cx.tcx.opt_hir_node(id)
&& let hir::PatKind::Binding(BindingAnnotation(_, Mutability::Not), _, ident, _) = pat.kind
{
(Applicability::Unspecified, Some((pat.span, ident)))
diff --git a/src/tools/clippy/clippy_lints/src/methods/from_iter_instead_of_collect.rs b/src/tools/clippy/clippy_lints/src/methods/from_iter_instead_of_collect.rs
index 4040d3a5f..917a8e33e 100644
--- a/src/tools/clippy/clippy_lints/src/methods/from_iter_instead_of_collect.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/from_iter_instead_of_collect.rs
@@ -2,7 +2,6 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_opt;
use clippy_utils::ty::implements_trait;
use clippy_utils::{is_path_diagnostic_item, sugg};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
@@ -12,28 +11,25 @@ use rustc_span::sym;
use super::FROM_ITER_INSTEAD_OF_COLLECT;
pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, args: &[hir::Expr<'_>], func: &hir::Expr<'_>) {
- if_chain! {
- if is_path_diagnostic_item(cx, func, sym::from_iter_fn);
- let ty = cx.typeck_results().expr_ty(expr);
- let arg_ty = cx.typeck_results().expr_ty(&args[0]);
- if let Some(iter_id) = cx.tcx.get_diagnostic_item(sym::Iterator);
-
- if implements_trait(cx, arg_ty, iter_id, &[]);
- then {
- // `expr` implements `FromIterator` trait
- let iter_expr = sugg::Sugg::hir(cx, &args[0], "..").maybe_par();
- let turbofish = extract_turbofish(cx, expr, ty);
- let sugg = format!("{iter_expr}.collect::<{turbofish}>()");
- span_lint_and_sugg(
- cx,
- FROM_ITER_INSTEAD_OF_COLLECT,
- expr.span,
- "usage of `FromIterator::from_iter`",
- "use `.collect()` instead of `::from_iter()`",
- sugg,
- Applicability::MaybeIncorrect,
- );
- }
+ if is_path_diagnostic_item(cx, func, sym::from_iter_fn)
+ && let ty = cx.typeck_results().expr_ty(expr)
+ && let arg_ty = cx.typeck_results().expr_ty(&args[0])
+ && let Some(iter_id) = cx.tcx.get_diagnostic_item(sym::Iterator)
+ && implements_trait(cx, arg_ty, iter_id, &[])
+ {
+ // `expr` implements `FromIterator` trait
+ let iter_expr = sugg::Sugg::hir(cx, &args[0], "..").maybe_par();
+ let turbofish = extract_turbofish(cx, expr, ty);
+ let sugg = format!("{iter_expr}.collect::<{turbofish}>()");
+ span_lint_and_sugg(
+ cx,
+ FROM_ITER_INSTEAD_OF_COLLECT,
+ expr.span,
+ "usage of `FromIterator::from_iter`",
+ "use `.collect()` instead of `::from_iter()`",
+ sugg,
+ Applicability::MaybeIncorrect,
+ );
}
}
@@ -43,41 +39,43 @@ fn extract_turbofish(cx: &LateContext<'_>, expr: &hir::Expr<'_>, ty: Ty<'_>) ->
}
let call_site = expr.span.source_callsite();
- if_chain! {
- if let Some(snippet) = snippet_opt(cx, call_site);
- let snippet_split = snippet.split("::").collect::<Vec<_>>();
- if let Some((_, elements)) = snippet_split.split_last();
-
- then {
- if_chain! {
- if let [type_specifier, _] = snippet_split.as_slice();
- if let Some(type_specifier) = strip_angle_brackets(type_specifier);
- if let Some((type_specifier, ..)) = type_specifier.split_once(" as ");
- then {
- type_specifier.to_string()
- } else {
- // is there a type specifier? (i.e.: like `<u32>` in `collections::BTreeSet::<u32>::`)
- if let Some(type_specifier) = snippet_split.iter().find(|e| strip_angle_brackets(e).is_some()) {
- // remove the type specifier from the path elements
- let without_ts = elements.iter().filter_map(|e| {
- if e == type_specifier { None } else { Some((*e).to_string()) }
- }).collect::<Vec<_>>();
- // join and add the type specifier at the end (i.e.: `collections::BTreeSet<u32>`)
- format!("{}{type_specifier}", without_ts.join("::"))
- } else {
- // type is not explicitly specified so wildcards are needed
- // i.e.: 2 wildcards in `std::collections::BTreeMap<&i32, &char>`
- let ty_str = ty.to_string();
- let start = ty_str.find('<').unwrap_or(0);
- let end = ty_str.find('>').unwrap_or(ty_str.len());
- let nb_wildcard = ty_str[start..end].split(',').count();
- let wildcards = format!("_{}", ", _".repeat(nb_wildcard - 1));
- format!("{}<{wildcards}>", elements.join("::"))
- }
- }
- }
+ if let Some(snippet) = snippet_opt(cx, call_site)
+ && let snippet_split = snippet.split("::").collect::<Vec<_>>()
+ && let Some((_, elements)) = snippet_split.split_last()
+ {
+ if let [type_specifier, _] = snippet_split.as_slice()
+ && let Some(type_specifier) = strip_angle_brackets(type_specifier)
+ && let Some((type_specifier, ..)) = type_specifier.split_once(" as ")
+ {
+ type_specifier.to_string()
} else {
- ty.to_string()
+ // is there a type specifier? (i.e.: like `<u32>` in `collections::BTreeSet::<u32>::`)
+ if let Some(type_specifier) = snippet_split.iter().find(|e| strip_angle_brackets(e).is_some()) {
+ // remove the type specifier from the path elements
+ let without_ts = elements
+ .iter()
+ .filter_map(|e| {
+ if e == type_specifier {
+ None
+ } else {
+ Some((*e).to_string())
+ }
+ })
+ .collect::<Vec<_>>();
+ // join and add the type specifier at the end (i.e.: `collections::BTreeSet<u32>`)
+ format!("{}{type_specifier}", without_ts.join("::"))
+ } else {
+ // type is not explicitly specified so wildcards are needed
+ // i.e.: 2 wildcards in `std::collections::BTreeMap<&i32, &char>`
+ let ty_str = ty.to_string();
+ let start = ty_str.find('<').unwrap_or(0);
+ let end = ty_str.find('>').unwrap_or(ty_str.len());
+ let nb_wildcard = ty_str[start..end].split(',').count();
+ let wildcards = format!("_{}", ", _".repeat(nb_wildcard - 1));
+ format!("{}<{wildcards}>", elements.join("::"))
+ }
}
+ } else {
+ ty.to_string()
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/get_first.rs b/src/tools/clippy/clippy_lints/src/methods/get_first.rs
index 2e1dd3ec6..e1f1e4893 100644
--- a/src/tools/clippy/clippy_lints/src/methods/get_first.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/get_first.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::ty::is_type_diagnostic_item;
-use if_chain::if_chain;
use rustc_ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir as hir;
@@ -17,37 +16,38 @@ pub(super) fn check<'tcx>(
recv: &'tcx hir::Expr<'_>,
arg: &'tcx hir::Expr<'_>,
) {
- if_chain! {
- if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
- if let Some(impl_id) = cx.tcx.impl_of_method(method_id);
- let identity = cx.tcx.type_of(impl_id).instantiate_identity();
- if let hir::ExprKind::Lit(Spanned { node: LitKind::Int(0, _), .. }) = arg.kind;
- then {
- if identity.is_slice() {
- let mut app = Applicability::MachineApplicable;
- let slice_name = snippet_with_applicability(cx, recv.span, "..", &mut app);
- span_lint_and_sugg(
- cx,
- GET_FIRST,
- expr.span,
- &format!("accessing first element with `{slice_name}.get(0)`"),
- "try",
- format!("{slice_name}.first()"),
- app,
- );
- } else if is_type_diagnostic_item(cx, identity, sym::VecDeque){
- let mut app = Applicability::MachineApplicable;
- let slice_name = snippet_with_applicability(cx, recv.span, "..", &mut app);
- span_lint_and_sugg(
- cx,
- GET_FIRST,
- expr.span,
- &format!("accessing first element with `{slice_name}.get(0)`"),
- "try",
- format!("{slice_name}.front()"),
- app,
- );
- }
+ if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
+ && let Some(impl_id) = cx.tcx.impl_of_method(method_id)
+ && let identity = cx.tcx.type_of(impl_id).instantiate_identity()
+ && let hir::ExprKind::Lit(Spanned {
+ node: LitKind::Int(0, _),
+ ..
+ }) = arg.kind
+ {
+ if identity.is_slice() {
+ let mut app = Applicability::MachineApplicable;
+ let slice_name = snippet_with_applicability(cx, recv.span, "..", &mut app);
+ span_lint_and_sugg(
+ cx,
+ GET_FIRST,
+ expr.span,
+ &format!("accessing first element with `{slice_name}.get(0)`"),
+ "try",
+ format!("{slice_name}.first()"),
+ app,
+ );
+ } else if is_type_diagnostic_item(cx, identity, sym::VecDeque) {
+ let mut app = Applicability::MachineApplicable;
+ let slice_name = snippet_with_applicability(cx, recv.span, "..", &mut app);
+ span_lint_and_sugg(
+ cx,
+ GET_FIRST,
+ expr.span,
+ &format!("accessing first element with `{slice_name}.get(0)`"),
+ "try",
+ format!("{slice_name}.front()"),
+ app,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/implicit_clone.rs b/src/tools/clippy/clippy_lints/src/methods/implicit_clone.rs
index e91ce64d8..78a553eb8 100644
--- a/src/tools/clippy/clippy_lints/src/methods/implicit_clone.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/implicit_clone.rs
@@ -2,7 +2,6 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_context;
use clippy_utils::ty::{implements_trait, peel_mid_ty_refs};
use clippy_utils::{is_diag_item_method, is_diag_trait_item};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
@@ -11,34 +10,32 @@ use rustc_span::sym;
use super::IMPLICIT_CLONE;
pub fn check(cx: &LateContext<'_>, method_name: &str, expr: &hir::Expr<'_>, recv: &hir::Expr<'_>) {
- if_chain! {
- if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
- if is_clone_like(cx, method_name, method_def_id);
- let return_type = cx.typeck_results().expr_ty(expr);
- let input_type = cx.typeck_results().expr_ty(recv);
- let (input_type, ref_count) = peel_mid_ty_refs(input_type);
- if !(ref_count > 0 && is_diag_trait_item(cx, method_def_id, sym::ToOwned));
- if let Some(ty_name) = input_type.ty_adt_def().map(|adt_def| cx.tcx.item_name(adt_def.did()));
- if return_type == input_type;
- if let Some(clone_trait) = cx.tcx.lang_items().clone_trait();
- if implements_trait(cx, return_type, clone_trait, &[]);
- then {
- let mut app = Applicability::MachineApplicable;
- let recv_snip = snippet_with_context(cx, recv.span, expr.span.ctxt(), "..", &mut app).0;
- span_lint_and_sugg(
- cx,
- IMPLICIT_CLONE,
- expr.span,
- &format!("implicitly cloning a `{ty_name}` by calling `{method_name}` on its dereferenced type"),
- "consider using",
- if ref_count > 1 {
- format!("({}{recv_snip}).clone()", "*".repeat(ref_count - 1))
- } else {
- format!("{recv_snip}.clone()")
- },
- app,
- );
- }
+ if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
+ && is_clone_like(cx, method_name, method_def_id)
+ && let return_type = cx.typeck_results().expr_ty(expr)
+ && let input_type = cx.typeck_results().expr_ty(recv)
+ && let (input_type, ref_count) = peel_mid_ty_refs(input_type)
+ && !(ref_count > 0 && is_diag_trait_item(cx, method_def_id, sym::ToOwned))
+ && let Some(ty_name) = input_type.ty_adt_def().map(|adt_def| cx.tcx.item_name(adt_def.did()))
+ && return_type == input_type
+ && let Some(clone_trait) = cx.tcx.lang_items().clone_trait()
+ && implements_trait(cx, return_type, clone_trait, &[])
+ {
+ let mut app = Applicability::MachineApplicable;
+ let recv_snip = snippet_with_context(cx, recv.span, expr.span.ctxt(), "..", &mut app).0;
+ span_lint_and_sugg(
+ cx,
+ IMPLICIT_CLONE,
+ expr.span,
+ &format!("implicitly cloning a `{ty_name}` by calling `{method_name}` on its dereferenced type"),
+ "consider using",
+ if ref_count > 1 {
+ format!("({}{recv_snip}).clone()", "*".repeat(ref_count - 1))
+ } else {
+ format!("{recv_snip}.clone()")
+ },
+ app,
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/inefficient_to_string.rs b/src/tools/clippy/clippy_lints/src/methods/inefficient_to_string.rs
index 6686d42c9..efc3ddd20 100644
--- a/src/tools/clippy/clippy_lints/src/methods/inefficient_to_string.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/inefficient_to_string.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::ty::{is_type_lang_item, walk_ptrs_ty_depth};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
@@ -18,37 +17,36 @@ pub fn check(
receiver: &hir::Expr<'_>,
args: &[hir::Expr<'_>],
) {
- if_chain! {
- if args.is_empty() && method_name == sym::to_string;
- if let Some(to_string_meth_did) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
- if cx.tcx.is_diagnostic_item(sym::to_string_method, to_string_meth_did);
- if let Some(args) = cx.typeck_results().node_args_opt(expr.hir_id);
- let arg_ty = cx.typeck_results().expr_ty_adjusted(receiver);
- let self_ty = args.type_at(0);
- let (deref_self_ty, deref_count) = walk_ptrs_ty_depth(self_ty);
- if deref_count >= 1;
- if specializes_tostring(cx, deref_self_ty);
- then {
- span_lint_and_then(
- cx,
- INEFFICIENT_TO_STRING,
- expr.span,
- &format!("calling `to_string` on `{arg_ty}`"),
- |diag| {
- diag.help(format!(
- "`{self_ty}` implements `ToString` through a slower blanket impl, but `{deref_self_ty}` has a fast specialization of `ToString`"
- ));
- let mut applicability = Applicability::MachineApplicable;
- let arg_snippet = snippet_with_applicability(cx, receiver.span, "..", &mut applicability);
- diag.span_suggestion(
- expr.span,
- "try dereferencing the receiver",
- format!("({}{arg_snippet}).to_string()", "*".repeat(deref_count)),
- applicability,
- );
- },
- );
- }
+ if args.is_empty()
+ && method_name == sym::to_string
+ && let Some(to_string_meth_did) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
+ && cx.tcx.is_diagnostic_item(sym::to_string_method, to_string_meth_did)
+ && let Some(args) = cx.typeck_results().node_args_opt(expr.hir_id)
+ && let arg_ty = cx.typeck_results().expr_ty_adjusted(receiver)
+ && let self_ty = args.type_at(0)
+ && let (deref_self_ty, deref_count) = walk_ptrs_ty_depth(self_ty)
+ && deref_count >= 1
+ && specializes_tostring(cx, deref_self_ty)
+ {
+ span_lint_and_then(
+ cx,
+ INEFFICIENT_TO_STRING,
+ expr.span,
+ &format!("calling `to_string` on `{arg_ty}`"),
+ |diag| {
+ diag.help(format!(
+ "`{self_ty}` implements `ToString` through a slower blanket impl, but `{deref_self_ty}` has a fast specialization of `ToString`"
+ ));
+ let mut applicability = Applicability::MachineApplicable;
+ let arg_snippet = snippet_with_applicability(cx, receiver.span, "..", &mut applicability);
+ diag.span_suggestion(
+ expr.span,
+ "try dereferencing the receiver",
+ format!("({}{arg_snippet}).to_string()", "*".repeat(deref_count)),
+ applicability,
+ );
+ },
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/into_iter_on_ref.rs b/src/tools/clippy/clippy_lints/src/methods/into_iter_on_ref.rs
index bbd964c10..80160d17c 100644
--- a/src/tools/clippy/clippy_lints/src/methods/into_iter_on_ref.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/into_iter_on_ref.rs
@@ -1,13 +1,12 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::is_trait_method;
use clippy_utils::ty::has_iter_method;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
use rustc_middle::ty::{self, Ty};
-use rustc_span::Span;
use rustc_span::symbol::{sym, Symbol};
+use rustc_span::Span;
use super::INTO_ITER_ON_REF;
@@ -19,24 +18,20 @@ pub(super) fn check(
receiver: &hir::Expr<'_>,
) {
let self_ty = cx.typeck_results().expr_ty_adjusted(receiver);
- if_chain! {
- if let ty::Ref(..) = self_ty.kind();
- if method_name == sym::into_iter;
- if is_trait_method(cx, expr, sym::IntoIterator);
- if let Some((kind, method_name)) = ty_has_iter_method(cx, self_ty);
- then {
- span_lint_and_sugg(
- cx,
- INTO_ITER_ON_REF,
- method_span,
- &format!(
- "this `.into_iter()` call is equivalent to `.{method_name}()` and will not consume the `{kind}`",
- ),
- "call directly",
- method_name.to_string(),
- Applicability::MachineApplicable,
- );
- }
+ if let ty::Ref(..) = self_ty.kind()
+ && method_name == sym::into_iter
+ && is_trait_method(cx, expr, sym::IntoIterator)
+ && let Some((kind, method_name)) = ty_has_iter_method(cx, self_ty)
+ {
+ span_lint_and_sugg(
+ cx,
+ INTO_ITER_ON_REF,
+ method_span,
+ &format!("this `.into_iter()` call is equivalent to `.{method_name}()` and will not consume the `{kind}`",),
+ "call directly",
+ method_name.to_string(),
+ Applicability::MachineApplicable,
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_cloned_collect.rs b/src/tools/clippy/clippy_lints/src/methods/iter_cloned_collect.rs
index bde6f92b0..dd741cd43 100644
--- a/src/tools/clippy/clippy_lints/src/methods/iter_cloned_collect.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/iter_cloned_collect.rs
@@ -1,7 +1,6 @@
use crate::methods::utils::derefs_to_slice;
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::ty::is_type_diagnostic_item;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
@@ -10,22 +9,21 @@ use rustc_span::sym;
use super::ITER_CLONED_COLLECT;
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, method_name: &str, expr: &hir::Expr<'_>, recv: &'tcx hir::Expr<'_>) {
- if_chain! {
- if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(expr), sym::Vec);
- if let Some(slice) = derefs_to_slice(cx, recv, cx.typeck_results().expr_ty(recv));
- if let Some(to_replace) = expr.span.trim_start(slice.span.source_callsite());
-
- then {
- span_lint_and_sugg(
- cx,
- ITER_CLONED_COLLECT,
- to_replace,
- &format!("called `iter().{method_name}().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and \
- more readable"),
- "try",
- ".to_vec()".to_string(),
- Applicability::MachineApplicable,
- );
- }
+ if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(expr), sym::Vec)
+ && let Some(slice) = derefs_to_slice(cx, recv, cx.typeck_results().expr_ty(recv))
+ && let Some(to_replace) = expr.span.trim_start(slice.span.source_callsite())
+ {
+ span_lint_and_sugg(
+ cx,
+ ITER_CLONED_COLLECT,
+ to_replace,
+ &format!(
+ "called `iter().{method_name}().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and \
+ more readable"
+ ),
+ "try",
+ ".to_vec()".to_string(),
+ Applicability::MachineApplicable,
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_kv_map.rs b/src/tools/clippy/clippy_lints/src/methods/iter_kv_map.rs
index 625325d4c..e1b934d36 100644
--- a/src/tools/clippy/clippy_lints/src/methods/iter_kv_map.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/iter_kv_map.rs
@@ -1,6 +1,7 @@
#![allow(unused_imports)]
use super::ITER_KV_MAP;
+use clippy_config::msrvs::{self, Msrv};
use clippy_utils::diagnostics::{multispan_sugg, span_lint_and_sugg, span_lint_and_then};
use clippy_utils::source::{snippet, snippet_with_applicability};
use clippy_utils::ty::is_type_diagnostic_item;
@@ -21,65 +22,59 @@ pub(super) fn check<'tcx>(
expr: &'tcx Expr<'tcx>, // .iter().map(|(_, v_| v))
recv: &'tcx Expr<'tcx>, // hashmap
m_arg: &'tcx Expr<'tcx>, // |(_, v)| v
+ msrv: &Msrv,
) {
- if_chain! {
- if !expr.span.from_expansion();
- if let ExprKind::Closure(c) = m_arg.kind;
- if let Body {params: [p], value: body_expr, coroutine_kind: _ } = cx.tcx.hir().body(c.body);
- if let PatKind::Tuple([key_pat, val_pat], _) = p.pat.kind;
-
- let (replacement_kind, annotation, bound_ident) = match (&key_pat.kind, &val_pat.kind) {
+ if map_type == "into_iter" && !msrv.meets(msrvs::INTO_KEYS) {
+ return;
+ }
+ if !expr.span.from_expansion()
+ && let ExprKind::Closure(c) = m_arg.kind
+ && let Body {
+ params: [p],
+ value: body_expr,
+ coroutine_kind: _,
+ } = cx.tcx.hir().body(c.body)
+ && let PatKind::Tuple([key_pat, val_pat], _) = p.pat.kind
+ && let (replacement_kind, annotation, bound_ident) = match (&key_pat.kind, &val_pat.kind) {
(key, PatKind::Binding(ann, _, value, _)) if pat_is_wild(cx, key, m_arg) => ("value", ann, value),
(PatKind::Binding(ann, _, key, _), value) if pat_is_wild(cx, value, m_arg) => ("key", ann, key),
_ => return,
- };
-
- let ty = cx.typeck_results().expr_ty(recv);
- if is_type_diagnostic_item(cx, ty, sym::HashMap) || is_type_diagnostic_item(cx, ty, sym::BTreeMap);
-
- then {
- let mut applicability = rustc_errors::Applicability::MachineApplicable;
- let recv_snippet = snippet_with_applicability(cx, recv.span, "map", &mut applicability);
- let into_prefix = if map_type == "into_iter" {"into_"} else {""};
-
- if_chain! {
- if let ExprKind::Path(rustc_hir::QPath::Resolved(_, path)) = body_expr.kind;
- if let [local_ident] = path.segments;
- if local_ident.ident.as_str() == bound_ident.as_str();
+ }
+ && let ty = cx.typeck_results().expr_ty(recv)
+ && (is_type_diagnostic_item(cx, ty, sym::HashMap) || is_type_diagnostic_item(cx, ty, sym::BTreeMap))
+ {
+ let mut applicability = rustc_errors::Applicability::MachineApplicable;
+ let recv_snippet = snippet_with_applicability(cx, recv.span, "map", &mut applicability);
+ let into_prefix = if map_type == "into_iter" { "into_" } else { "" };
- then {
- span_lint_and_sugg(
- cx,
- ITER_KV_MAP,
- expr.span,
- &format!("iterating on a map's {replacement_kind}s"),
- "try",
- format!("{recv_snippet}.{into_prefix}{replacement_kind}s()"),
- applicability,
- );
- } else {
- let ref_annotation = if annotation.0 == ByRef::Yes {
- "ref "
- } else {
- ""
- };
- let mut_annotation = if annotation.1 == Mutability::Mut {
- "mut "
- } else {
- ""
- };
- span_lint_and_sugg(
- cx,
- ITER_KV_MAP,
- expr.span,
- &format!("iterating on a map's {replacement_kind}s"),
- "try",
- format!("{recv_snippet}.{into_prefix}{replacement_kind}s().map(|{ref_annotation}{mut_annotation}{bound_ident}| {})",
- snippet_with_applicability(cx, body_expr.span, "/* body */", &mut applicability)),
- applicability,
- );
- }
- }
+ if let ExprKind::Path(rustc_hir::QPath::Resolved(_, path)) = body_expr.kind
+ && let [local_ident] = path.segments
+ && local_ident.ident.as_str() == bound_ident.as_str()
+ {
+ span_lint_and_sugg(
+ cx,
+ ITER_KV_MAP,
+ expr.span,
+ &format!("iterating on a map's {replacement_kind}s"),
+ "try",
+ format!("{recv_snippet}.{into_prefix}{replacement_kind}s()"),
+ applicability,
+ );
+ } else {
+ let ref_annotation = if annotation.0 == ByRef::Yes { "ref " } else { "" };
+ let mut_annotation = if annotation.1 == Mutability::Mut { "mut " } else { "" };
+ span_lint_and_sugg(
+ cx,
+ ITER_KV_MAP,
+ expr.span,
+ &format!("iterating on a map's {replacement_kind}s"),
+ "try",
+ format!(
+ "{recv_snippet}.{into_prefix}{replacement_kind}s().map(|{ref_annotation}{mut_annotation}{bound_ident}| {})",
+ snippet_with_applicability(cx, body_expr.span, "/* body */", &mut applicability)
+ ),
+ applicability,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_next_slice.rs b/src/tools/clippy/clippy_lints/src/methods/iter_next_slice.rs
index 8f885e9f7..fd4650e1e 100644
--- a/src/tools/clippy/clippy_lints/src/methods/iter_next_slice.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/iter_next_slice.rs
@@ -3,7 +3,6 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::{get_parent_expr, higher};
-use if_chain::if_chain;
use rustc_ast::ast;
use rustc_errors::Applicability;
use rustc_hir as hir;
@@ -26,29 +25,36 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>, cal
if derefs_to_slice(cx, caller_expr, cx.typeck_results().expr_ty(caller_expr)).is_some() {
// caller is a Slice
- if_chain! {
- if let hir::ExprKind::Index(caller_var, index_expr, _) = &caller_expr.kind;
- if let Some(higher::Range { start: Some(start_expr), end: None, limits: ast::RangeLimits::HalfOpen })
- = higher::Range::hir(index_expr);
- if let hir::ExprKind::Lit(start_lit) = &start_expr.kind;
- if let ast::LitKind::Int(start_idx, _) = start_lit.node;
- then {
- let mut applicability = Applicability::MachineApplicable;
- let suggest = if start_idx == 0 {
- format!("{}.first()", snippet_with_applicability(cx, caller_var.span, "..", &mut applicability))
- } else {
- format!("{}.get({start_idx})", snippet_with_applicability(cx, caller_var.span, "..", &mut applicability))
- };
- span_lint_and_sugg(
- cx,
- ITER_NEXT_SLICE,
- expr.span,
- "using `.iter().next()` on a Slice without end index",
- "try calling",
- suggest,
- applicability,
- );
- }
+ if let hir::ExprKind::Index(caller_var, index_expr, _) = &caller_expr.kind
+ && let Some(higher::Range {
+ start: Some(start_expr),
+ end: None,
+ limits: ast::RangeLimits::HalfOpen,
+ }) = higher::Range::hir(index_expr)
+ && let hir::ExprKind::Lit(start_lit) = &start_expr.kind
+ && let ast::LitKind::Int(start_idx, _) = start_lit.node
+ {
+ let mut applicability = Applicability::MachineApplicable;
+ let suggest = if start_idx == 0 {
+ format!(
+ "{}.first()",
+ snippet_with_applicability(cx, caller_var.span, "..", &mut applicability)
+ )
+ } else {
+ format!(
+ "{}.get({start_idx})",
+ snippet_with_applicability(cx, caller_var.span, "..", &mut applicability)
+ )
+ };
+ span_lint_and_sugg(
+ cx,
+ ITER_NEXT_SLICE,
+ expr.span,
+ "using `.iter().next()` on a Slice without end index",
+ "try calling",
+ suggest,
+ applicability,
+ );
}
} else if is_vec_or_array(cx, caller_expr) {
// caller is a Vec or an Array
diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_skip_next.rs b/src/tools/clippy/clippy_lints/src/methods/iter_skip_next.rs
index 39af52141..d1215290d 100644
--- a/src/tools/clippy/clippy_lints/src/methods/iter_skip_next.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/iter_skip_next.rs
@@ -19,18 +19,16 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr
expr.span.trim_start(recv.span).unwrap(),
"called `skip(..).next()` on an iterator",
|diag| {
- if_chain! {
- if let Some(id) = path_to_local(recv);
- if let Node::Pat(pat) = cx.tcx.hir().get(id);
- if let PatKind::Binding(ann, _, _, _) = pat.kind;
- if ann != BindingAnnotation::MUT;
- then {
- application = Applicability::Unspecified;
- diag.span_help(
- pat.span,
- format!("for this change `{}` has to be mutable", snippet(cx, pat.span, "..")),
- );
- }
+ if let Some(id) = path_to_local(recv)
+ && let Node::Pat(pat) = cx.tcx.hir_node(id)
+ && let PatKind::Binding(ann, _, _, _) = pat.kind
+ && ann != BindingAnnotation::MUT
+ {
+ application = Applicability::Unspecified;
+ diag.span_help(
+ pat.span,
+ format!("for this change `{}` has to be mutable", snippet(cx, pat.span, "..")),
+ );
}
diag.span_suggestion(
diff --git a/src/tools/clippy/clippy_lints/src/methods/join_absolute_paths.rs b/src/tools/clippy/clippy_lints/src/methods/join_absolute_paths.rs
new file mode 100644
index 000000000..02f28779c
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/join_absolute_paths.rs
@@ -0,0 +1,52 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::expr_or_init;
+use clippy_utils::source::snippet_opt;
+use clippy_utils::ty::is_type_diagnostic_item;
+use rustc_ast::ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::LateContext;
+use rustc_span::symbol::sym;
+use rustc_span::Span;
+
+use super::JOIN_ABSOLUTE_PATHS;
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, recv: &'tcx Expr<'tcx>, join_arg: &'tcx Expr<'tcx>, expr_span: Span) {
+ let ty = cx.typeck_results().expr_ty(recv).peel_refs();
+ if (is_type_diagnostic_item(cx, ty, sym::Path) || is_type_diagnostic_item(cx, ty, sym::PathBuf))
+ && let ExprKind::Lit(spanned) = expr_or_init(cx, join_arg).kind
+ && let LitKind::Str(symbol, _) = spanned.node
+ && let sym_str = symbol.as_str()
+ && sym_str.starts_with(['/', '\\'])
+ {
+ span_lint_and_then(
+ cx,
+ JOIN_ABSOLUTE_PATHS,
+ join_arg.span,
+ "argument to `Path::join` starts with a path separator",
+ |diag| {
+ let arg_str = snippet_opt(cx, spanned.span).unwrap_or_else(|| "..".to_string());
+
+ let no_separator = if sym_str.starts_with('/') {
+ arg_str.replacen('/', "", 1)
+ } else {
+ arg_str.replacen('\\', "", 1)
+ };
+
+ diag.note("joining a path starting with separator will replace the path instead")
+ .span_suggestion(
+ spanned.span,
+ "if this is unintentional, try removing the starting separator",
+ no_separator,
+ Applicability::Unspecified,
+ )
+ .span_suggestion(
+ expr_span,
+ "if this is intentional, try using `Path::new` instead",
+ format!("PathBuf::from({arg_str})"),
+ Applicability::Unspecified,
+ );
+ },
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/manual_ok_or.rs b/src/tools/clippy/clippy_lints/src/methods/manual_ok_or.rs
index 3031193e5..b1af0083e 100644
--- a/src/tools/clippy/clippy_lints/src/methods/manual_ok_or.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/manual_ok_or.rs
@@ -2,7 +2,6 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::{indent_of, reindent_multiline, snippet_opt};
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::{is_res_lang_ctor, path_res, path_to_local_id};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::LangItem::{ResultErr, ResultOk};
use rustc_hir::{Expr, ExprKind, PatKind};
@@ -18,30 +17,26 @@ pub(super) fn check<'tcx>(
or_expr: &'tcx Expr<'_>,
map_expr: &'tcx Expr<'_>,
) {
- if_chain! {
- if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
- if let Some(impl_id) = cx.tcx.impl_of_method(method_id);
- if is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::Option);
- if let ExprKind::Call(err_path, [err_arg]) = or_expr.kind;
- if is_res_lang_ctor(cx, path_res(cx, err_path), ResultErr);
- if is_ok_wrapping(cx, map_expr);
- if let Some(recv_snippet) = snippet_opt(cx, recv.span);
- if let Some(err_arg_snippet) = snippet_opt(cx, err_arg.span);
- if let Some(indent) = indent_of(cx, expr.span);
- then {
- let reindented_err_arg_snippet = reindent_multiline(err_arg_snippet.into(), true, Some(indent + 4));
- span_lint_and_sugg(
- cx,
- MANUAL_OK_OR,
- expr.span,
- "this pattern reimplements `Option::ok_or`",
- "replace with",
- format!(
- "{recv_snippet}.ok_or({reindented_err_arg_snippet})"
- ),
- Applicability::MachineApplicable,
- );
- }
+ if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
+ && let Some(impl_id) = cx.tcx.impl_of_method(method_id)
+ && is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::Option)
+ && let ExprKind::Call(err_path, [err_arg]) = or_expr.kind
+ && is_res_lang_ctor(cx, path_res(cx, err_path), ResultErr)
+ && is_ok_wrapping(cx, map_expr)
+ && let Some(recv_snippet) = snippet_opt(cx, recv.span)
+ && let Some(err_arg_snippet) = snippet_opt(cx, err_arg.span)
+ && let Some(indent) = indent_of(cx, expr.span)
+ {
+ let reindented_err_arg_snippet = reindent_multiline(err_arg_snippet.into(), true, Some(indent + 4));
+ span_lint_and_sugg(
+ cx,
+ MANUAL_OK_OR,
+ expr.span,
+ "this pattern reimplements `Option::ok_or`",
+ "replace with",
+ format!("{recv_snippet}.ok_or({reindented_err_arg_snippet})"),
+ Applicability::MachineApplicable,
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/manual_saturating_arithmetic.rs b/src/tools/clippy/clippy_lints/src/methods/manual_saturating_arithmetic.rs
index 540425eef..04bdbc1ea 100644
--- a/src/tools/clippy/clippy_lints/src/methods/manual_saturating_arithmetic.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/manual_saturating_arithmetic.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::{match_def_path, path_def_id};
-use if_chain::if_chain;
use rustc_ast::ast;
use rustc_errors::Applicability;
use rustc_hir as hir;
@@ -69,16 +68,14 @@ enum MinMax {
fn is_min_or_max(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> Option<MinMax> {
// `T::max_value()` `T::min_value()` inherent methods
- if_chain! {
- if let hir::ExprKind::Call(func, args) = &expr.kind;
- if args.is_empty();
- if let hir::ExprKind::Path(hir::QPath::TypeRelative(_, segment)) = &func.kind;
- then {
- match segment.ident.as_str() {
- "max_value" => return Some(MinMax::Max),
- "min_value" => return Some(MinMax::Min),
- _ => {}
- }
+ if let hir::ExprKind::Call(func, args) = &expr.kind
+ && args.is_empty()
+ && let hir::ExprKind::Path(hir::QPath::TypeRelative(_, segment)) = &func.kind
+ {
+ match segment.ident.as_str() {
+ "max_value" => return Some(MinMax::Max),
+ "min_value" => return Some(MinMax::Min),
+ _ => {},
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs b/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs
index ab13d30d8..61e74369c 100644
--- a/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs
@@ -3,7 +3,6 @@ use clippy_utils::is_path_diagnostic_item;
use clippy_utils::source::{snippet_with_applicability, snippet_with_context};
use clippy_utils::sugg::Sugg;
use clippy_utils::ty::{is_type_diagnostic_item, is_type_lang_item};
-use if_chain::if_chain;
use rustc_ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, LangItem};
@@ -55,43 +54,42 @@ pub(super) fn check(
take_self_arg: &Expr<'_>,
take_arg: &Expr<'_>,
) {
- if_chain! {
- if let ExprKind::Call(repeat_fn, [repeat_arg]) = take_self_arg.kind;
- if is_path_diagnostic_item(cx, repeat_fn, sym::iter_repeat);
- if is_type_lang_item(cx, cx.typeck_results().expr_ty(collect_expr), LangItem::String);
- if let Some(take_id) = cx.typeck_results().type_dependent_def_id(take_expr.hir_id);
- if let Some(iter_trait_id) = cx.tcx.get_diagnostic_item(sym::Iterator);
- if cx.tcx.trait_of_item(take_id) == Some(iter_trait_id);
- if let Some(repeat_kind) = parse_repeat_arg(cx, repeat_arg);
- let ctxt = collect_expr.span.ctxt();
- if ctxt == take_expr.span.ctxt();
- if ctxt == take_self_arg.span.ctxt();
- then {
- let mut app = Applicability::MachineApplicable;
- let count_snip = snippet_with_context(cx, take_arg.span, ctxt, "..", &mut app).0;
+ if let ExprKind::Call(repeat_fn, [repeat_arg]) = take_self_arg.kind
+ && is_path_diagnostic_item(cx, repeat_fn, sym::iter_repeat)
+ && is_type_lang_item(cx, cx.typeck_results().expr_ty(collect_expr), LangItem::String)
+ && let Some(take_id) = cx.typeck_results().type_dependent_def_id(take_expr.hir_id)
+ && let Some(iter_trait_id) = cx.tcx.get_diagnostic_item(sym::Iterator)
+ && cx.tcx.trait_of_item(take_id) == Some(iter_trait_id)
+ && let Some(repeat_kind) = parse_repeat_arg(cx, repeat_arg)
+ && let ctxt = collect_expr.span.ctxt()
+ && ctxt == take_expr.span.ctxt()
+ && ctxt == take_self_arg.span.ctxt()
+ {
+ let mut app = Applicability::MachineApplicable;
+ let count_snip = snippet_with_context(cx, take_arg.span, ctxt, "..", &mut app).0;
- let val_str = match repeat_kind {
- RepeatKind::Char(_) if repeat_arg.span.ctxt() != ctxt => return,
- RepeatKind::Char('\'') => r#""'""#.into(),
- RepeatKind::Char('"') => r#""\"""#.into(),
- RepeatKind::Char(_) =>
- match snippet_with_applicability(cx, repeat_arg.span, "..", &mut app) {
- Cow::Owned(s) => Cow::Owned(format!("\"{}\"", &s[1..s.len() - 1])),
- s @ Cow::Borrowed(_) => s,
- },
- RepeatKind::String =>
- Sugg::hir_with_context(cx, repeat_arg, ctxt, "..", &mut app).maybe_par().to_string().into(),
- };
+ let val_str = match repeat_kind {
+ RepeatKind::Char(_) if repeat_arg.span.ctxt() != ctxt => return,
+ RepeatKind::Char('\'') => r#""'""#.into(),
+ RepeatKind::Char('"') => r#""\"""#.into(),
+ RepeatKind::Char(_) => match snippet_with_applicability(cx, repeat_arg.span, "..", &mut app) {
+ Cow::Owned(s) => Cow::Owned(format!("\"{}\"", &s[1..s.len() - 1])),
+ s @ Cow::Borrowed(_) => s,
+ },
+ RepeatKind::String => Sugg::hir_with_context(cx, repeat_arg, ctxt, "..", &mut app)
+ .maybe_par()
+ .to_string()
+ .into(),
+ };
- span_lint_and_sugg(
- cx,
- MANUAL_STR_REPEAT,
- collect_expr.span,
- "manual implementation of `str::repeat` using iterators",
- "try",
- format!("{val_str}.repeat({count_snip})"),
- app
- )
- }
+ span_lint_and_sugg(
+ cx,
+ MANUAL_STR_REPEAT,
+ collect_expr.span,
+ "manual implementation of `str::repeat` using iterators",
+ "try",
+ format!("{val_str}.repeat({count_snip})"),
+ app,
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/manual_try_fold.rs b/src/tools/clippy/clippy_lints/src/methods/manual_try_fold.rs
index 51145afda..f93edded7 100644
--- a/src/tools/clippy/clippy_lints/src/methods/manual_try_fold.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/manual_try_fold.rs
@@ -1,14 +1,14 @@
use clippy_config::msrvs::{self, Msrv};
use clippy_utils::diagnostics::span_lint_and_sugg;
-use clippy_utils::is_from_proc_macro;
use clippy_utils::source::snippet_opt;
use clippy_utils::ty::implements_trait;
+use clippy_utils::{is_from_proc_macro, is_trait_method};
use rustc_errors::Applicability;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_span::Span;
+use rustc_span::{sym, Span};
use super::MANUAL_TRY_FOLD;
@@ -22,6 +22,7 @@ pub(super) fn check<'tcx>(
) {
if !in_external_macro(cx.sess(), fold_span)
&& msrv.meets(msrvs::ITERATOR_TRY_FOLD)
+ && is_trait_method(cx, expr, sym::Iterator)
&& let init_ty = cx.typeck_results().expr_ty(init)
&& let Some(try_trait) = cx.tcx.lang_items().try_trait()
&& implements_trait(cx, init_ty, try_trait, &[])
diff --git a/src/tools/clippy/clippy_lints/src/methods/map_clone.rs b/src/tools/clippy/clippy_lints/src/methods/map_clone.rs
index e0f8cb1b9..cc6eeaa86 100644
--- a/src/tools/clippy/clippy_lints/src/methods/map_clone.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/map_clone.rs
@@ -3,7 +3,6 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::ty::{is_copy, is_type_diagnostic_item};
use clippy_utils::{is_diag_trait_item, peel_blocks};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
@@ -16,57 +15,55 @@ use rustc_span::{sym, Span};
use super::MAP_CLONE;
pub(super) fn check(cx: &LateContext<'_>, e: &hir::Expr<'_>, recv: &hir::Expr<'_>, arg: &hir::Expr<'_>, msrv: &Msrv) {
- if_chain! {
- if let Some(method_id) = cx.typeck_results().type_dependent_def_id(e.hir_id);
- if cx.tcx.impl_of_method(method_id)
- .map_or(false, |id| is_type_diagnostic_item(cx, cx.tcx.type_of(id).instantiate_identity(), sym::Option))
- || is_diag_trait_item(cx, method_id, sym::Iterator);
- if let hir::ExprKind::Closure(&hir::Closure{ body, .. }) = arg.kind;
- then {
- let closure_body = cx.tcx.hir().body(body);
- let closure_expr = peel_blocks(closure_body.value);
- match closure_body.params[0].pat.kind {
- hir::PatKind::Ref(inner, hir::Mutability::Not) => if let hir::PatKind::Binding(
- hir::BindingAnnotation::NONE, .., name, None
- ) = inner.kind {
+ if let Some(method_id) = cx.typeck_results().type_dependent_def_id(e.hir_id)
+ && (cx.tcx.impl_of_method(method_id).map_or(false, |id| {
+ is_type_diagnostic_item(cx, cx.tcx.type_of(id).instantiate_identity(), sym::Option)
+ }) || is_diag_trait_item(cx, method_id, sym::Iterator))
+ && let hir::ExprKind::Closure(&hir::Closure { body, .. }) = arg.kind
+ {
+ let closure_body = cx.tcx.hir().body(body);
+ let closure_expr = peel_blocks(closure_body.value);
+ match closure_body.params[0].pat.kind {
+ hir::PatKind::Ref(inner, hir::Mutability::Not) => {
+ if let hir::PatKind::Binding(hir::BindingAnnotation::NONE, .., name, None) = inner.kind {
if ident_eq(name, closure_expr) {
lint_explicit_closure(cx, e.span, recv.span, true, msrv);
}
- },
- hir::PatKind::Binding(hir::BindingAnnotation::NONE, .., name, None) => {
- match closure_expr.kind {
- hir::ExprKind::Unary(hir::UnOp::Deref, inner) => {
- if ident_eq(name, inner) {
- if let ty::Ref(.., Mutability::Not) = cx.typeck_results().expr_ty(inner).kind() {
- lint_explicit_closure(cx, e.span, recv.span, true, msrv);
- }
+ }
+ },
+ hir::PatKind::Binding(hir::BindingAnnotation::NONE, .., name, None) => {
+ match closure_expr.kind {
+ hir::ExprKind::Unary(hir::UnOp::Deref, inner) => {
+ if ident_eq(name, inner) {
+ if let ty::Ref(.., Mutability::Not) = cx.typeck_results().expr_ty(inner).kind() {
+ lint_explicit_closure(cx, e.span, recv.span, true, msrv);
}
- },
- hir::ExprKind::MethodCall(method, obj, [], _) => if_chain! {
- if ident_eq(name, obj) && method.ident.name == sym::clone;
- if let Some(fn_id) = cx.typeck_results().type_dependent_def_id(closure_expr.hir_id);
- if let Some(trait_id) = cx.tcx.trait_of_item(fn_id);
- if cx.tcx.lang_items().clone_trait().map_or(false, |id| id == trait_id);
- // no autoderefs
- if !cx.typeck_results().expr_adjustments(obj).iter()
- .any(|a| matches!(a.kind, Adjust::Deref(Some(..))));
- then {
- let obj_ty = cx.typeck_results().expr_ty(obj);
- if let ty::Ref(_, ty, mutability) = obj_ty.kind() {
- if matches!(mutability, Mutability::Not) {
- let copy = is_copy(cx, *ty);
- lint_explicit_closure(cx, e.span, recv.span, copy, msrv);
- }
- } else {
- lint_needless_cloning(cx, e.span, recv.span);
+ }
+ },
+ hir::ExprKind::MethodCall(method, obj, [], _) => {
+ if ident_eq(name, obj) && method.ident.name == sym::clone
+ && let Some(fn_id) = cx.typeck_results().type_dependent_def_id(closure_expr.hir_id)
+ && let Some(trait_id) = cx.tcx.trait_of_item(fn_id)
+ && cx.tcx.lang_items().clone_trait().map_or(false, |id| id == trait_id)
+ // no autoderefs
+ && !cx.typeck_results().expr_adjustments(obj).iter()
+ .any(|a| matches!(a.kind, Adjust::Deref(Some(..))))
+ {
+ let obj_ty = cx.typeck_results().expr_ty(obj);
+ if let ty::Ref(_, ty, mutability) = obj_ty.kind() {
+ if matches!(mutability, Mutability::Not) {
+ let copy = is_copy(cx, *ty);
+ lint_explicit_closure(cx, e.span, recv.span, copy, msrv);
}
+ } else {
+ lint_needless_cloning(cx, e.span, recv.span);
}
- },
- _ => {},
- }
- },
- _ => {},
- }
+ }
+ },
+ _ => {},
+ }
+ },
+ _ => {},
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/map_collect_result_unit.rs b/src/tools/clippy/clippy_lints/src/methods/map_collect_result_unit.rs
index 01cdd02e6..e944eac91 100644
--- a/src/tools/clippy/clippy_lints/src/methods/map_collect_result_unit.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/map_collect_result_unit.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet;
use clippy_utils::ty::is_type_diagnostic_item;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
@@ -13,26 +12,24 @@ use super::MAP_COLLECT_RESULT_UNIT;
pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, iter: &hir::Expr<'_>, map_fn: &hir::Expr<'_>) {
// return of collect `Result<(),_>`
let collect_ret_ty = cx.typeck_results().expr_ty(expr);
- if_chain! {
- if is_type_diagnostic_item(cx, collect_ret_ty, sym::Result);
- if let ty::Adt(_, args) = collect_ret_ty.kind();
- if let Some(result_t) = args.types().next();
- if result_t.is_unit();
- // get parts for snippet
- then {
- span_lint_and_sugg(
- cx,
- MAP_COLLECT_RESULT_UNIT,
- expr.span,
- "`.map().collect()` can be replaced with `.try_for_each()`",
- "try",
- format!(
- "{}.try_for_each({})",
- snippet(cx, iter.span, ".."),
- snippet(cx, map_fn.span, "..")
- ),
- Applicability::MachineApplicable,
- );
- }
+ if is_type_diagnostic_item(cx, collect_ret_ty, sym::Result)
+ && let ty::Adt(_, args) = collect_ret_ty.kind()
+ && let Some(result_t) = args.types().next()
+ && result_t.is_unit()
+ // get parts for snippet
+ {
+ span_lint_and_sugg(
+ cx,
+ MAP_COLLECT_RESULT_UNIT,
+ expr.span,
+ "`.map().collect()` can be replaced with `.try_for_each()`",
+ "try",
+ format!(
+ "{}.try_for_each({})",
+ snippet(cx, iter.span, ".."),
+ snippet(cx, map_fn.span, "..")
+ ),
+ Applicability::MachineApplicable,
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs b/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs
index e74a76455..26ef0d10f 100644
--- a/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs
@@ -63,7 +63,7 @@ fn is_map_to_option(cx: &LateContext<'_>, map_arg: &Expr<'_>) -> bool {
ty::Closure(_, args) => args.as_closure().sig(),
_ => map_closure_ty.fn_sig(cx.tcx),
};
- let map_closure_return_ty = cx.tcx.erase_late_bound_regions(map_closure_sig.output());
+ let map_closure_return_ty = cx.tcx.instantiate_bound_regions_with_erased(map_closure_sig.output());
is_type_diagnostic_item(cx, map_closure_return_ty, sym::Option)
},
_ => false,
diff --git a/src/tools/clippy/clippy_lints/src/methods/map_identity.rs b/src/tools/clippy/clippy_lints/src/methods/map_identity.rs
index bcfd0de8e..6da9a87f5 100644
--- a/src/tools/clippy/clippy_lints/src/methods/map_identity.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/map_identity.rs
@@ -18,22 +18,20 @@ pub(super) fn check(
) {
let caller_ty = cx.typeck_results().expr_ty(caller);
- if_chain! {
- if is_trait_method(cx, expr, sym::Iterator)
- || is_type_diagnostic_item(cx, caller_ty, sym::Result)
- || is_type_diagnostic_item(cx, caller_ty, sym::Option);
- if is_expr_untyped_identity_function(cx, map_arg);
- if let Some(sugg_span) = expr.span.trim_start(caller.span);
- then {
- span_lint_and_sugg(
- cx,
- MAP_IDENTITY,
- sugg_span,
- "unnecessary map of the identity function",
- &format!("remove the call to `{name}`"),
- String::new(),
- Applicability::MachineApplicable,
- )
- }
+ if (is_trait_method(cx, expr, sym::Iterator)
+ || is_type_diagnostic_item(cx, caller_ty, sym::Result)
+ || is_type_diagnostic_item(cx, caller_ty, sym::Option))
+ && is_expr_untyped_identity_function(cx, map_arg)
+ && let Some(sugg_span) = expr.span.trim_start(caller.span)
+ {
+ span_lint_and_sugg(
+ cx,
+ MAP_IDENTITY,
+ sugg_span,
+ "unnecessary map of the identity function",
+ &format!("remove the call to `{name}`"),
+ String::new(),
+ Applicability::MachineApplicable,
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/map_unwrap_or.rs b/src/tools/clippy/clippy_lints/src/methods/map_unwrap_or.rs
index cb81b3919..52ea584a2 100644
--- a/src/tools/clippy/clippy_lints/src/methods/map_unwrap_or.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/map_unwrap_or.rs
@@ -44,11 +44,9 @@ pub(super) fn check<'tcx>(
// lint message
let msg = if is_option {
- "called `map(<f>).unwrap_or_else(<g>)` on an `Option` value. This can be done more directly by calling \
- `map_or_else(<g>, <f>)` instead"
+ "called `map(<f>).unwrap_or_else(<g>)` on an `Option` value"
} else {
- "called `map(<f>).unwrap_or_else(<g>)` on a `Result` value. This can be done more directly by calling \
- `.map_or_else(<g>, <f>)` instead"
+ "called `map(<f>).unwrap_or_else(<g>)` on a `Result` value"
};
// get snippets for args to map() and unwrap_or_else()
let map_snippet = snippet(cx, map_arg.span, "..");
diff --git a/src/tools/clippy/clippy_lints/src/methods/mod.rs b/src/tools/clippy/clippy_lints/src/methods/mod.rs
index a71a136eb..b4f60ffad 100644
--- a/src/tools/clippy/clippy_lints/src/methods/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/mod.rs
@@ -49,6 +49,7 @@ mod iter_skip_next;
mod iter_skip_zero;
mod iter_with_drain;
mod iterator_step_by_zero;
+mod join_absolute_paths;
mod manual_next_back;
mod manual_ok_or;
mod manual_saturating_arithmetic;
@@ -69,6 +70,7 @@ mod obfuscated_if_else;
mod ok_expect;
mod open_options;
mod option_as_ref_deref;
+mod option_map_or_err_ok;
mod option_map_or_none;
mod option_map_unwrap_or;
mod or_fun_call;
@@ -80,6 +82,7 @@ mod read_line_without_trim;
mod readonly_write_lock;
mod redundant_as_str;
mod repeat_once;
+mod result_map_or_else_none;
mod search_is_some;
mod seek_from_current;
mod seek_to_start_instead_of_rewind;
@@ -123,7 +126,6 @@ use clippy_utils::consts::{constant, Constant};
use clippy_utils::diagnostics::{span_lint, span_lint_and_help};
use clippy_utils::ty::{contains_ty_adt_constructor_opaque, implements_trait, is_copy, is_type_diagnostic_item};
use clippy_utils::{contains_return, is_bool, is_trait_method, iter_input_pats, peel_blocks, return_ty};
-use if_chain::if_chain;
pub use path_ends_with_ext::DEFAULT_ALLOWED_DOTFILES;
use rustc_data_structures::fx::FxHashSet;
use rustc_hir as hir;
@@ -131,7 +133,7 @@ use rustc_hir::{Expr, ExprKind, Node, Stmt, StmtKind, TraitItem, TraitItemKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::{self, TraitRef, Ty};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::{sym, Span};
declare_clippy_lint! {
@@ -3610,7 +3612,7 @@ declare_clippy_lint! {
declare_clippy_lint! {
/// ### What it does
- /// Checks for usage of `as_str()` on a `String`` chained with a method available on the `String` itself.
+ /// Checks for usage of `as_str()` on a `String` chained with a method available on the `String` itself.
///
/// ### Why is this bad?
/// The `as_str()` conversion is pointless and can be removed for simplicity and cleanliness.
@@ -3619,14 +3621,16 @@ declare_clippy_lint! {
/// ```no_run
/// # #![allow(unused)]
/// let owned_string = "This is a string".to_owned();
- /// owned_string.as_str().as_bytes();
+ /// owned_string.as_str().as_bytes()
+ /// # ;
/// ```
///
/// Use instead:
/// ```no_run
/// # #![allow(unused)]
/// let owned_string = "This is a string".to_owned();
- /// owned_string.as_bytes();
+ /// owned_string.as_bytes()
+ /// # ;
/// ```
#[clippy::version = "1.74.0"]
pub REDUNDANT_AS_STR,
@@ -3683,6 +3687,71 @@ declare_clippy_lint! {
"calling the `try_from` and `try_into` trait methods when `From`/`Into` is implemented"
}
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calls to `Path::join` that start with a path separator (`\\` or `/`).
+ ///
+ /// ### Why is this bad?
+ /// If the argument to `Path::join` starts with a separator, it will overwrite
+ /// the original path. If this is intentional, prefer using `Path::new` instead.
+ ///
+ /// Note the behavior is platform dependent. A leading `\\` will be accepted
+ /// on unix systems as part of the file name
+ ///
+ /// See [`Path::join`](https://doc.rust-lang.org/std/path/struct.Path.html#method.join)
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::path::{Path, PathBuf};
+ /// let path = Path::new("/bin");
+ /// let joined_path = path.join("/sh");
+ /// assert_eq!(joined_path, PathBuf::from("/sh"));
+ /// ```
+ ///
+ /// Use instead;
+ /// ```rust
+ /// # use std::path::{Path, PathBuf};
+ /// let path = Path::new("/bin");
+ ///
+ /// // If this was unintentional, remove the leading separator
+ /// let joined_path = path.join("sh");
+ /// assert_eq!(joined_path, PathBuf::from("/bin/sh"));
+ ///
+ /// // If this was intentional, create a new path instead
+ /// let new = Path::new("/sh");
+ /// assert_eq!(new, PathBuf::from("/sh"));
+ /// ```
+ #[clippy::version = "1.76.0"]
+ pub JOIN_ABSOLUTE_PATHS,
+ suspicious,
+ "calls to `Path::join` which will overwrite the original path"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `_.map_or(Err(_), Ok)`.
+ ///
+ /// ### Why is this bad?
+ /// Readability, this can be written more concisely as
+ /// `_.ok_or(_)`.
+ ///
+ /// ### Example
+ /// ```no_run
+ /// # let opt = Some(1);
+ /// opt.map_or(Err("error"), Ok);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```no_run
+ /// # let opt = Some(1);
+ /// opt.ok_or("error");
+ /// ```
+ #[clippy::version = "1.76.0"]
+ pub OPTION_MAP_OR_ERR_OK,
+ style,
+ "using `Option.map_or(Err(_), Ok)`, which is more succinctly expressed as `Option.ok_or(_)`"
+}
+
pub struct Methods {
avoid_breaking_exported_api: bool,
msrv: Msrv,
@@ -3698,8 +3767,10 @@ impl Methods {
msrv: Msrv,
allow_expect_in_tests: bool,
allow_unwrap_in_tests: bool,
- allowed_dotfiles: FxHashSet<String>,
+ mut allowed_dotfiles: FxHashSet<String>,
) -> Self {
+ allowed_dotfiles.extend(DEFAULT_ALLOWED_DOTFILES.iter().map(ToString::to_string));
+
Self {
avoid_breaking_exported_api,
msrv,
@@ -3830,10 +3901,12 @@ impl_lint_pass!(Methods => [
REDUNDANT_AS_STR,
WAKER_CLONE_WAKE,
UNNECESSARY_FALLIBLE_CONVERSIONS,
+ JOIN_ABSOLUTE_PATHS,
+ OPTION_MAP_OR_ERR_OK,
]);
/// Extracts a method call name, args, and `Span` of the method name.
-fn method_call<'tcx>(
+pub fn method_call<'tcx>(
recv: &'tcx hir::Expr<'tcx>,
) -> Option<(&'tcx str, &'tcx hir::Expr<'tcx>, &'tcx [hir::Expr<'tcx>], Span, Span)> {
if let ExprKind::MethodCall(path, receiver, args, call_span) = recv.kind {
@@ -3896,7 +3969,7 @@ impl<'tcx> LateLintPass<'tcx> for Methods {
let implements_trait = matches!(item.kind, hir::ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }));
if let hir::ImplItemKind::Fn(ref sig, id) = impl_item.kind {
let method_sig = cx.tcx.fn_sig(impl_item.owner_id).instantiate_identity();
- let method_sig = cx.tcx.erase_late_bound_regions(method_sig);
+ let method_sig = cx.tcx.instantiate_bound_regions_with_erased(method_sig);
let first_arg_ty_opt = method_sig.inputs().iter().next().copied();
// if this impl block implements a trait, lint in trait definition instead
if !implements_trait && cx.effective_visibilities.is_exported(impl_item.owner_id.def_id) {
@@ -3977,44 +4050,41 @@ impl<'tcx> LateLintPass<'tcx> for Methods {
return;
}
- if_chain! {
- if let TraitItemKind::Fn(ref sig, _) = item.kind;
- if sig.decl.implicit_self.has_implicit_self();
- if let Some(first_arg_hir_ty) = sig.decl.inputs.first();
- if let Some(&first_arg_ty) = cx.tcx.fn_sig(item.owner_id)
+ if let TraitItemKind::Fn(ref sig, _) = item.kind
+ && sig.decl.implicit_self.has_implicit_self()
+ && let Some(first_arg_hir_ty) = sig.decl.inputs.first()
+ && let Some(&first_arg_ty) = cx
+ .tcx
+ .fn_sig(item.owner_id)
.instantiate_identity()
.inputs()
.skip_binder()
- .first();
- then {
- let self_ty = TraitRef::identity(cx.tcx, item.owner_id.to_def_id()).self_ty();
- wrong_self_convention::check(
- cx,
- item.ident.name.as_str(),
- self_ty,
- first_arg_ty,
- first_arg_hir_ty.span,
- false,
- true,
- );
- }
- }
-
- if_chain! {
- if item.ident.name == sym::new;
- if let TraitItemKind::Fn(_, _) = item.kind;
- let ret_ty = return_ty(cx, item.owner_id);
+ .first()
+ {
let self_ty = TraitRef::identity(cx.tcx, item.owner_id.to_def_id()).self_ty();
- if !ret_ty.contains(self_ty);
+ wrong_self_convention::check(
+ cx,
+ item.ident.name.as_str(),
+ self_ty,
+ first_arg_ty,
+ first_arg_hir_ty.span,
+ false,
+ true,
+ );
+ }
- then {
- span_lint(
- cx,
- NEW_RET_NO_SELF,
- item.span,
- "methods called `new` usually return `Self`",
- );
- }
+ if item.ident.name == sym::new
+ && let TraitItemKind::Fn(_, _) = item.kind
+ && let ret_ty = return_ty(cx, item.owner_id)
+ && let self_ty = TraitRef::identity(cx.tcx, item.owner_id.to_def_id()).self_ty()
+ && !ret_ty.contains(self_ty)
+ {
+ span_lint(
+ cx,
+ NEW_RET_NO_SELF,
+ item.span,
+ "methods called `new` usually return `Self`",
+ );
}
}
@@ -4235,6 +4305,8 @@ impl Methods {
("join", [join_arg]) => {
if let Some(("collect", _, _, span, _)) = method_call(recv) {
unnecessary_join::check(cx, expr, recv, join_arg, span);
+ } else {
+ join_absolute_paths::check(cx, recv, join_arg, expr.span);
}
},
("last", []) => {
@@ -4257,7 +4329,7 @@ impl Methods {
map_clone::check(cx, expr, recv, m_arg, &self.msrv);
match method_call(recv) {
Some((map_name @ ("iter" | "into_iter"), recv2, _, _, _)) => {
- iter_kv_map::check(cx, map_name, expr, recv2, m_arg);
+ iter_kv_map::check(cx, map_name, expr, recv2, m_arg, &self.msrv);
},
Some(("cloned", recv2, [], _, _)) => iter_overeager_cloned::check(
cx,
@@ -4290,6 +4362,10 @@ impl Methods {
("map_or", [def, map]) => {
option_map_or_none::check(cx, expr, recv, def, map);
manual_ok_or::check(cx, expr, recv, def, map);
+ option_map_or_err_ok::check(cx, expr, recv, def, map);
+ },
+ ("map_or_else", [def, map]) => {
+ result_map_or_else_none::check(cx, expr, recv, def, map);
},
("next", []) => {
if let Some((name2, recv2, args2, _, _)) = method_call(recv) {
diff --git a/src/tools/clippy/clippy_lints/src/methods/mut_mutex_lock.rs b/src/tools/clippy/clippy_lints/src/methods/mut_mutex_lock.rs
index 2855e23bf..1a0fce287 100644
--- a/src/tools/clippy/clippy_lints/src/methods/mut_mutex_lock.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/mut_mutex_lock.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::expr_custom_deref_adjustment;
use clippy_utils::ty::is_type_diagnostic_item;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Expr, Mutability};
use rustc_lint::LateContext;
@@ -11,22 +10,20 @@ use rustc_span::{sym, Span};
use super::MUT_MUTEX_LOCK;
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, ex: &'tcx Expr<'tcx>, recv: &'tcx Expr<'tcx>, name_span: Span) {
- if_chain! {
- if matches!(expr_custom_deref_adjustment(cx, recv), None | Some(Mutability::Mut));
- if let ty::Ref(_, _, Mutability::Mut) = cx.typeck_results().expr_ty(recv).kind();
- if let Some(method_id) = cx.typeck_results().type_dependent_def_id(ex.hir_id);
- if let Some(impl_id) = cx.tcx.impl_of_method(method_id);
- if is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::Mutex);
- then {
- span_lint_and_sugg(
- cx,
- MUT_MUTEX_LOCK,
- name_span,
- "calling `&mut Mutex::lock` unnecessarily locks an exclusive (mutable) reference",
- "change this to",
- "get_mut".to_owned(),
- Applicability::MaybeIncorrect,
- );
- }
+ if matches!(expr_custom_deref_adjustment(cx, recv), None | Some(Mutability::Mut))
+ && let ty::Ref(_, _, Mutability::Mut) = cx.typeck_results().expr_ty(recv).kind()
+ && let Some(method_id) = cx.typeck_results().type_dependent_def_id(ex.hir_id)
+ && let Some(impl_id) = cx.tcx.impl_of_method(method_id)
+ && is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::Mutex)
+ {
+ span_lint_and_sugg(
+ cx,
+ MUT_MUTEX_LOCK,
+ name_span,
+ "calling `&mut Mutex::lock` unnecessarily locks an exclusive (mutable) reference",
+ "change this to",
+ "get_mut".to_owned(),
+ Applicability::MaybeIncorrect,
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/needless_collect.rs b/src/tools/clippy/clippy_lints/src/methods/needless_collect.rs
index 2ef71be32..293b4981c 100644
--- a/src/tools/clippy/clippy_lints/src/methods/needless_collect.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/needless_collect.rs
@@ -225,7 +225,10 @@ fn is_contains_sig(cx: &LateContext<'_>, call_id: HirId, iter_expr: &Expr<'_>) -
&& let sig = cx.tcx.fn_sig(id).instantiate_identity()
&& sig.skip_binder().output().is_bool()
&& let [_, search_ty] = *sig.skip_binder().inputs()
- && let ty::Ref(_, search_ty, Mutability::Not) = *cx.tcx.erase_late_bound_regions(sig.rebind(search_ty)).kind()
+ && let ty::Ref(_, search_ty, Mutability::Not) = *cx
+ .tcx
+ .instantiate_bound_regions_with_erased(sig.rebind(search_ty))
+ .kind()
&& let Some(iter_trait) = cx.tcx.get_diagnostic_item(sym::Iterator)
&& let Some(iter_item) = cx.tcx.associated_items(iter_trait).find_by_name_and_kind(
cx.tcx,
diff --git a/src/tools/clippy/clippy_lints/src/methods/no_effect_replace.rs b/src/tools/clippy/clippy_lints/src/methods/no_effect_replace.rs
index 01655e860..81df32bde 100644
--- a/src/tools/clippy/clippy_lints/src/methods/no_effect_replace.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/no_effect_replace.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint;
use clippy_utils::ty::is_type_lang_item;
use clippy_utils::SpanlessEq;
-use if_chain::if_chain;
use rustc_ast::LitKind;
use rustc_hir::{ExprKind, LangItem};
use rustc_lint::LateContext;
@@ -19,17 +18,13 @@ pub(super) fn check<'tcx>(
return;
}
- if_chain! {
- if let ExprKind::Lit(spanned) = &arg1.kind;
- if let Some(param1) = lit_string_value(&spanned.node);
-
- if let ExprKind::Lit(spanned) = &arg2.kind;
- if let LitKind::Str(param2, _) = &spanned.node;
- if param1 == param2.as_str();
-
- then {
- span_lint(cx, NO_EFFECT_REPLACE, expr.span, "replacing text with itself");
- }
+ if let ExprKind::Lit(spanned) = &arg1.kind
+ && let Some(param1) = lit_string_value(&spanned.node)
+ && let ExprKind::Lit(spanned) = &arg2.kind
+ && let LitKind::Str(param2, _) = &spanned.node
+ && param1 == param2.as_str()
+ {
+ span_lint(cx, NO_EFFECT_REPLACE, expr.span, "replacing text with itself");
}
if SpanlessEq::new(cx).eq_expr(arg1, arg2) {
diff --git a/src/tools/clippy/clippy_lints/src/methods/ok_expect.rs b/src/tools/clippy/clippy_lints/src/methods/ok_expect.rs
index f2ef42933..e10bc0216 100644
--- a/src/tools/clippy/clippy_lints/src/methods/ok_expect.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/ok_expect.rs
@@ -1,6 +1,5 @@
use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::ty::{has_debug_impl, is_type_diagnostic_item};
-use if_chain::if_chain;
use rustc_hir as hir;
use rustc_lint::LateContext;
use rustc_middle::ty::{self, Ty};
@@ -10,23 +9,20 @@ use super::OK_EXPECT;
/// lint use of `ok().expect()` for `Result`s
pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr<'_>) {
- if_chain! {
+ if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(recv), sym::Result)
// lint if the caller of `ok()` is a `Result`
- if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(recv), sym::Result);
- let result_type = cx.typeck_results().expr_ty(recv);
- if let Some(error_type) = get_error_type(cx, result_type);
- if has_debug_impl(cx, error_type);
-
- then {
- span_lint_and_help(
- cx,
- OK_EXPECT,
- expr.span,
- "called `ok().expect()` on a `Result` value",
- None,
- "you can call `expect()` directly on the `Result`",
- );
- }
+ && let result_type = cx.typeck_results().expr_ty(recv)
+ && let Some(error_type) = get_error_type(cx, result_type)
+ && has_debug_impl(cx, error_type)
+ {
+ span_lint_and_help(
+ cx,
+ OK_EXPECT,
+ expr.span,
+ "called `ok().expect()` on a `Result` value",
+ None,
+ "you can call `expect()` directly on the `Result`",
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/option_as_ref_deref.rs b/src/tools/clippy/clippy_lints/src/methods/option_as_ref_deref.rs
index 7b81d4571..756dbe62d 100644
--- a/src/tools/clippy/clippy_lints/src/methods/option_as_ref_deref.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/option_as_ref_deref.rs
@@ -3,7 +3,6 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet;
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::{match_def_path, path_to_local_id, paths, peel_blocks};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
@@ -58,34 +57,30 @@ pub(super) fn check(
match &closure_expr.kind {
hir::ExprKind::MethodCall(_, receiver, [], _) => {
- if_chain! {
- if path_to_local_id(receiver, closure_body.params[0].pat.hir_id);
- let adj = cx
+ if path_to_local_id(receiver, closure_body.params[0].pat.hir_id)
+ && let adj = cx
.typeck_results()
.expr_adjustments(receiver)
.iter()
.map(|x| &x.kind)
- .collect::<Box<[_]>>();
- if let [ty::adjustment::Adjust::Deref(None), ty::adjustment::Adjust::Borrow(_)] = *adj;
- then {
- let method_did = cx.typeck_results().type_dependent_def_id(closure_expr.hir_id).unwrap();
- cx.tcx.is_diagnostic_item(sym::deref_method, method_did)
- || cx.tcx.is_diagnostic_item(sym::deref_mut_method, method_did)
- || deref_aliases.iter().any(|path| match_def_path(cx, method_did, path))
- } else {
- false
- }
+ .collect::<Box<[_]>>()
+ && let [ty::adjustment::Adjust::Deref(None), ty::adjustment::Adjust::Borrow(_)] = *adj
+ {
+ let method_did = cx.typeck_results().type_dependent_def_id(closure_expr.hir_id).unwrap();
+ cx.tcx.is_diagnostic_item(sym::deref_method, method_did)
+ || cx.tcx.is_diagnostic_item(sym::deref_mut_method, method_did)
+ || deref_aliases.iter().any(|path| match_def_path(cx, method_did, path))
+ } else {
+ false
}
},
hir::ExprKind::AddrOf(hir::BorrowKind::Ref, m, inner) if same_mutability(m) => {
- if_chain! {
- if let hir::ExprKind::Unary(hir::UnOp::Deref, inner1) = inner.kind;
- if let hir::ExprKind::Unary(hir::UnOp::Deref, inner2) = inner1.kind;
- then {
- path_to_local_id(inner2, closure_body.params[0].pat.hir_id)
- } else {
- false
- }
+ if let hir::ExprKind::Unary(hir::UnOp::Deref, inner1) = inner.kind
+ && let hir::ExprKind::Unary(hir::UnOp::Deref, inner2) = inner1.kind
+ {
+ path_to_local_id(inner2, closure_body.params[0].pat.hir_id)
+ } else {
+ false
}
},
_ => false,
@@ -104,10 +99,7 @@ pub(super) fn check(
let hint = format!("{}.{method_hint}()", snippet(cx, as_ref_recv.span, ".."));
let suggestion = format!("try using {method_hint} instead");
- let msg = format!(
- "called `{current_method}` on an Option value. This can be done more directly \
- by calling `{hint}` instead"
- );
+ let msg = format!("called `{current_method}` on an `Option` value");
span_lint_and_sugg(
cx,
OPTION_AS_REF_DEREF,
diff --git a/src/tools/clippy/clippy_lints/src/methods/option_map_or_err_ok.rs b/src/tools/clippy/clippy_lints/src/methods/option_map_or_err_ok.rs
new file mode 100644
index 000000000..91e39d5a1
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/option_map_or_err_ok.rs
@@ -0,0 +1,41 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{is_res_lang_ctor, path_res};
+use rustc_errors::Applicability;
+use rustc_hir::LangItem::{ResultErr, ResultOk};
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::LateContext;
+use rustc_span::symbol::sym;
+
+use super::OPTION_MAP_OR_ERR_OK;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'tcx>,
+ recv: &'tcx Expr<'_>,
+ or_expr: &'tcx Expr<'_>,
+ map_expr: &'tcx Expr<'_>,
+) {
+ // We check that it's called on an `Option` type.
+ if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(recv), sym::Option)
+ // We check that first we pass an `Err`.
+ && let ExprKind::Call(call, &[arg]) = or_expr.kind
+ && is_res_lang_ctor(cx, path_res(cx, call), ResultErr)
+ // And finally we check that it is mapped as `Ok`.
+ && is_res_lang_ctor(cx, path_res(cx, map_expr), ResultOk)
+ {
+ let msg = "called `map_or(Err(_), Ok)` on an `Option` value";
+ let self_snippet = snippet(cx, recv.span, "..");
+ let err_snippet = snippet(cx, arg.span, "..");
+ span_lint_and_sugg(
+ cx,
+ OPTION_MAP_OR_ERR_OK,
+ expr.span,
+ msg,
+ "try using `ok_or` instead",
+ format!("{self_snippet}.ok_or({err_snippet})"),
+ Applicability::MachineApplicable,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs b/src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs
index cb6a23068..ff4d8cc9e 100644
--- a/src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs
@@ -58,32 +58,28 @@ pub(super) fn check<'tcx>(
if is_option {
let self_snippet = snippet(cx, recv.span, "..");
- if_chain! {
- if let hir::ExprKind::Closure(&hir::Closure { body, fn_decl_span, .. }) = map_arg.kind;
- let arg_snippet = snippet(cx, fn_decl_span, "..");
- let body = cx.tcx.hir().body(body);
- if let Some((func, [arg_char])) = reduce_unit_expression(body.value);
- if let Some(id) = path_def_id(cx, func).map(|ctor_id| cx.tcx.parent(ctor_id));
- if Some(id) == cx.tcx.lang_items().option_some_variant();
- then {
- let func_snippet = snippet(cx, arg_char.span, "..");
- let msg = "called `map_or(None, ..)` on an `Option` value. This can be done more directly by calling \
- `map(..)` instead";
- return span_lint_and_sugg(
- cx,
- OPTION_MAP_OR_NONE,
- expr.span,
- msg,
- "try using `map` instead",
- format!("{self_snippet}.map({arg_snippet} {func_snippet})"),
- Applicability::MachineApplicable,
- );
- }
+ if let hir::ExprKind::Closure(&hir::Closure { body, fn_decl_span, .. }) = map_arg.kind
+ && let arg_snippet = snippet(cx, fn_decl_span, "..")
+ && let body = cx.tcx.hir().body(body)
+ && let Some((func, [arg_char])) = reduce_unit_expression(body.value)
+ && let Some(id) = path_def_id(cx, func).map(|ctor_id| cx.tcx.parent(ctor_id))
+ && Some(id) == cx.tcx.lang_items().option_some_variant()
+ {
+ let func_snippet = snippet(cx, arg_char.span, "..");
+ let msg = "called `map_or(None, ..)` on an `Option` value";
+ return span_lint_and_sugg(
+ cx,
+ OPTION_MAP_OR_NONE,
+ expr.span,
+ msg,
+ "try using `map` instead",
+ format!("{self_snippet}.map({arg_snippet} {func_snippet})"),
+ Applicability::MachineApplicable,
+ );
}
let func_snippet = snippet(cx, map_arg.span, "..");
- let msg = "called `map_or(None, ..)` on an `Option` value. This can be done more directly by calling \
- `and_then(..)` instead";
+ let msg = "called `map_or(None, ..)` on an `Option` value";
span_lint_and_sugg(
cx,
OPTION_MAP_OR_NONE,
@@ -94,8 +90,7 @@ pub(super) fn check<'tcx>(
Applicability::MachineApplicable,
);
} else if f_arg_is_some {
- let msg = "called `map_or(None, Some)` on a `Result` value. This can be done more directly by calling \
- `ok()` instead";
+ let msg = "called `map_or(None, Some)` on a `Result` value";
let self_snippet = snippet(cx, recv.span, "..");
span_lint_and_sugg(
cx,
diff --git a/src/tools/clippy/clippy_lints/src/methods/option_map_unwrap_or.rs b/src/tools/clippy/clippy_lints/src/methods/option_map_unwrap_or.rs
index c78f8b71c..63e64a5b3 100644
--- a/src/tools/clippy/clippy_lints/src/methods/option_map_unwrap_or.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/option_map_unwrap_or.rs
@@ -6,7 +6,7 @@ use rustc_data_structures::fx::FxHashSet;
use rustc_errors::Applicability;
use rustc_hir::def::Res;
use rustc_hir::intravisit::{walk_path, Visitor};
-use rustc_hir::{self, ExprKind, HirId, Node, PatKind, Path, QPath};
+use rustc_hir::{ExprKind, HirId, Node, PatKind, Path, QPath};
use rustc_lint::LateContext;
use rustc_middle::hir::nested_filter;
use rustc_span::{sym, Span};
@@ -97,10 +97,7 @@ pub(super) fn check<'tcx>(
} else {
"map_or(<a>, <f>)"
};
- let msg = &format!(
- "called `map(<f>).unwrap_or({arg})` on an `Option` value. \
- This can be done more directly by calling `{suggest}` instead"
- );
+ let msg = &format!("called `map(<f>).unwrap_or({arg})` on an `Option` value");
span_lint_and_then(cx, MAP_UNWRAP_OR, expr.span, msg, |diag| {
let map_arg_span = map_arg.span;
@@ -138,7 +135,7 @@ impl<'a, 'tcx> Visitor<'tcx> for UnwrapVisitor<'a, 'tcx> {
fn visit_path(&mut self, path: &Path<'tcx>, _: HirId) {
if let Res::Local(local_id) = path.res
- && let Some(Node::Pat(pat)) = self.cx.tcx.hir().find(local_id)
+ && let Some(Node::Pat(pat)) = self.cx.tcx.opt_hir_node(local_id)
&& let PatKind::Binding(_, local_id, ..) = pat.kind
{
self.identifiers.insert(local_id);
@@ -169,7 +166,7 @@ impl<'a, 'tcx> Visitor<'tcx> for ReferenceVisitor<'a, 'tcx> {
&& let ExprKind::Path(ref path) = expr.kind
&& let QPath::Resolved(_, path) = path
&& let Res::Local(local_id) = path.res
- && let Some(Node::Pat(pat)) = self.cx.tcx.hir().find(local_id)
+ && let Some(Node::Pat(pat)) = self.cx.tcx.opt_hir_node(local_id)
&& let PatKind::Binding(_, local_id, ..) = pat.kind
&& self.identifiers.contains(&local_id)
{
diff --git a/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs b/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs
index b89c15146..e38c66f67 100644
--- a/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs
@@ -3,12 +3,11 @@ use clippy_utils::eager_or_lazy::switch_to_lazy_eval;
use clippy_utils::source::snippet_with_context;
use clippy_utils::ty::{expr_type_is_certain, implements_trait, is_type_diagnostic_item};
use clippy_utils::{contains_return, is_default_equivalent, is_default_equivalent_call, last_path_segment};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_lint::LateContext;
use rustc_middle::ty;
-use rustc_span::Span;
use rustc_span::symbol::{self, sym, Symbol};
+use rustc_span::Span;
use {rustc_ast as ast, rustc_hir as hir};
use super::{OR_FUN_CALL, UNWRAP_OR_DEFAULT};
@@ -131,54 +130,47 @@ pub(super) fn check<'tcx>(
(sym::Result, true, &["or", "unwrap_or"], "else"),
];
- if_chain! {
- if KNOW_TYPES.iter().any(|k| k.2.contains(&name));
-
- if switch_to_lazy_eval(cx, arg);
- if !contains_return(arg);
-
- let self_ty = cx.typeck_results().expr_ty(self_expr);
-
- if let Some(&(_, fn_has_arguments, poss, suffix)) =
- KNOW_TYPES.iter().find(|&&i| is_type_diagnostic_item(cx, self_ty, i.0));
-
- if poss.contains(&name);
-
- then {
- let ctxt = span.ctxt();
- let mut app = Applicability::HasPlaceholders;
- let sugg = {
- let (snippet_span, use_lambda) = match (fn_has_arguments, fun_span) {
- (false, Some(fun_span)) => (fun_span, false),
- _ => (arg.span, true),
- };
-
- let snip = snippet_with_context(cx, snippet_span, ctxt, "..", &mut app).0;
- let snip = if use_lambda {
- let l_arg = if fn_has_arguments { "_" } else { "" };
- format!("|{l_arg}| {snip}")
- } else {
- snip.into_owned()
- };
-
- if let Some(f) = second_arg {
- let f = snippet_with_context(cx, f.span, ctxt, "..", &mut app).0;
- format!("{snip}, {f}")
- } else {
- snip
- }
+ if KNOW_TYPES.iter().any(|k| k.2.contains(&name))
+ && switch_to_lazy_eval(cx, arg)
+ && !contains_return(arg)
+ && let self_ty = cx.typeck_results().expr_ty(self_expr)
+ && let Some(&(_, fn_has_arguments, poss, suffix)) =
+ KNOW_TYPES.iter().find(|&&i| is_type_diagnostic_item(cx, self_ty, i.0))
+ && poss.contains(&name)
+ {
+ let ctxt = span.ctxt();
+ let mut app = Applicability::HasPlaceholders;
+ let sugg = {
+ let (snippet_span, use_lambda) = match (fn_has_arguments, fun_span) {
+ (false, Some(fun_span)) => (fun_span, false),
+ _ => (arg.span, true),
+ };
+
+ let snip = snippet_with_context(cx, snippet_span, ctxt, "..", &mut app).0;
+ let snip = if use_lambda {
+ let l_arg = if fn_has_arguments { "_" } else { "" };
+ format!("|{l_arg}| {snip}")
+ } else {
+ snip.into_owned()
};
- let span_replace_word = method_span.with_hi(span.hi());
- span_lint_and_sugg(
- cx,
- OR_FUN_CALL,
- span_replace_word,
- &format!("use of `{name}` followed by a function call"),
- "try",
- format!("{name}_{suffix}({sugg})"),
- app,
- );
- }
+
+ if let Some(f) = second_arg {
+ let f = snippet_with_context(cx, f.span, ctxt, "..", &mut app).0;
+ format!("{snip}, {f}")
+ } else {
+ snip
+ }
+ };
+ let span_replace_word = method_span.with_hi(span.hi());
+ span_lint_and_sugg(
+ cx,
+ OR_FUN_CALL,
+ span_replace_word,
+ &format!("use of `{name}` followed by a function call"),
+ "try",
+ format!("{name}_{suffix}({sugg})"),
+ app,
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/path_buf_push_overwrite.rs b/src/tools/clippy/clippy_lints/src/methods/path_buf_push_overwrite.rs
index 1c07d2a3a..04a27cc98 100644
--- a/src/tools/clippy/clippy_lints/src/methods/path_buf_push_overwrite.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/path_buf_push_overwrite.rs
@@ -1,6 +1,5 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::ty::is_type_diagnostic_item;
-use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind};
@@ -11,27 +10,25 @@ use std::path::{Component, Path};
use super::PATH_BUF_PUSH_OVERWRITE;
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, arg: &'tcx Expr<'_>) {
- if_chain! {
- if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
- if let Some(impl_id) = cx.tcx.impl_of_method(method_id);
- if is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::PathBuf);
- if let ExprKind::Lit(lit) = arg.kind;
- if let LitKind::Str(ref path_lit, _) = lit.node;
- if let pushed_path = Path::new(path_lit.as_str());
- if let Some(pushed_path_lit) = pushed_path.to_str();
- if pushed_path.has_root();
- if let Some(root) = pushed_path.components().next();
- if root == Component::RootDir;
- then {
- span_lint_and_sugg(
- cx,
- PATH_BUF_PUSH_OVERWRITE,
- lit.span,
- "calling `push` with '/' or '\\' (file system root) will overwrite the previous path definition",
- "try",
- format!("\"{}\"", pushed_path_lit.trim_start_matches(|c| c == '/' || c == '\\')),
- Applicability::MachineApplicable,
- );
- }
+ if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
+ && let Some(impl_id) = cx.tcx.impl_of_method(method_id)
+ && is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::PathBuf)
+ && let ExprKind::Lit(lit) = arg.kind
+ && let LitKind::Str(ref path_lit, _) = lit.node
+ && let pushed_path = Path::new(path_lit.as_str())
+ && let Some(pushed_path_lit) = pushed_path.to_str()
+ && pushed_path.has_root()
+ && let Some(root) = pushed_path.components().next()
+ && root == Component::RootDir
+ {
+ span_lint_and_sugg(
+ cx,
+ PATH_BUF_PUSH_OVERWRITE,
+ lit.span,
+ "calling `push` with '/' or '\\' (file system root) will overwrite the previous path definition",
+ "try",
+ format!("\"{}\"", pushed_path_lit.trim_start_matches(|c| c == '/' || c == '\\')),
+ Applicability::MachineApplicable,
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/range_zip_with_len.rs b/src/tools/clippy/clippy_lints/src/methods/range_zip_with_len.rs
index f253d8de9..1148628b0 100644
--- a/src/tools/clippy/clippy_lints/src/methods/range_zip_with_len.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/range_zip_with_len.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint;
use clippy_utils::source::snippet;
use clippy_utils::{higher, is_integer_const, is_trait_method, SpanlessEq};
-use if_chain::if_chain;
use rustc_hir::{Expr, ExprKind, QPath};
use rustc_lint::LateContext;
use rustc_span::sym;
@@ -9,25 +8,26 @@ use rustc_span::sym;
use super::RANGE_ZIP_WITH_LEN;
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, recv: &'tcx Expr<'_>, zip_arg: &'tcx Expr<'_>) {
- if_chain! {
- if is_trait_method(cx, expr, sym::Iterator);
+ if is_trait_method(cx, expr, sym::Iterator)
// range expression in `.zip()` call: `0..x.len()`
- if let Some(higher::Range { start: Some(start), end: Some(end), .. }) = higher::Range::hir(zip_arg);
- if is_integer_const(cx, start, 0);
+ && let Some(higher::Range { start: Some(start), end: Some(end), .. }) = higher::Range::hir(zip_arg)
+ && is_integer_const(cx, start, 0)
// `.len()` call
- if let ExprKind::MethodCall(len_path, len_recv, [], _) = end.kind;
- if len_path.ident.name == sym::len;
+ && let ExprKind::MethodCall(len_path, len_recv, [], _) = end.kind
+ && len_path.ident.name == sym::len
// `.iter()` and `.len()` called on same `Path`
- if let ExprKind::Path(QPath::Resolved(_, iter_path)) = recv.kind;
- if let ExprKind::Path(QPath::Resolved(_, len_path)) = len_recv.kind;
- if SpanlessEq::new(cx).eq_path_segments(iter_path.segments, len_path.segments);
- then {
- span_lint(cx,
- RANGE_ZIP_WITH_LEN,
- expr.span,
- &format!("it is more idiomatic to use `{}.iter().enumerate()`",
- snippet(cx, recv.span, "_"))
- );
- }
+ && let ExprKind::Path(QPath::Resolved(_, iter_path)) = recv.kind
+ && let ExprKind::Path(QPath::Resolved(_, len_path)) = len_recv.kind
+ && SpanlessEq::new(cx).eq_path_segments(iter_path.segments, len_path.segments)
+ {
+ span_lint(
+ cx,
+ RANGE_ZIP_WITH_LEN,
+ expr.span,
+ &format!(
+ "it is more idiomatic to use `{}.iter().enumerate()`",
+ snippet(cx, recv.span, "_")
+ ),
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/result_map_or_else_none.rs b/src/tools/clippy/clippy_lints/src/methods/result_map_or_else_none.rs
new file mode 100644
index 000000000..bc16a1128
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/result_map_or_else_none.rs
@@ -0,0 +1,42 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{is_res_lang_ctor, path_res, peel_blocks};
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_hir::LangItem::{OptionNone, OptionSome};
+use rustc_lint::LateContext;
+use rustc_span::symbol::sym;
+
+use super::RESULT_MAP_OR_INTO_OPTION;
+
+/// lint use of `_.map_or_else(|_| None, Some)` for `Result`s
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx hir::Expr<'_>,
+ recv: &'tcx hir::Expr<'_>,
+ def_arg: &'tcx hir::Expr<'_>,
+ map_arg: &'tcx hir::Expr<'_>,
+) {
+ // lint if the caller of `map_or_else()` is a `Result`
+ if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(recv), sym::Result)
+ // We check that it is mapped as `Some`.
+ && is_res_lang_ctor(cx, path_res(cx, map_arg), OptionSome)
+ && let hir::ExprKind::Closure(&hir::Closure { body, .. }) = def_arg.kind
+ && let body = cx.tcx.hir().body(body)
+ // And finally we check that we return a `None` in the "else case".
+ && is_res_lang_ctor(cx, path_res(cx, peel_blocks(body.value)), OptionNone)
+ {
+ let msg = "called `map_or_else(|_| None, Some)` on a `Result` value";
+ let self_snippet = snippet(cx, recv.span, "..");
+ span_lint_and_sugg(
+ cx,
+ RESULT_MAP_OR_INTO_OPTION,
+ expr.span,
+ msg,
+ "try using `ok` instead",
+ format!("{self_snippet}.ok()"),
+ Applicability::MachineApplicable,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/search_is_some.rs b/src/tools/clippy/clippy_lints/src/methods/search_is_some.rs
index 05a9a06c8..6339011c9 100644
--- a/src/tools/clippy/clippy_lints/src/methods/search_is_some.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/search_is_some.rs
@@ -3,13 +3,12 @@ use clippy_utils::source::{snippet, snippet_with_applicability};
use clippy_utils::sugg::deref_closure_args;
use clippy_utils::ty::is_type_lang_item;
use clippy_utils::{is_trait_method, strip_pat_refs};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_hir::PatKind;
use rustc_lint::LateContext;
-use rustc_span::Span;
use rustc_span::symbol::sym;
+use rustc_span::Span;
use super::SEARCH_IS_SOME;
@@ -35,29 +34,27 @@ pub(super) fn check<'tcx>(
// suggest `any(|x| ..)` instead of `any(|&x| ..)` for `find(|&x| ..).is_some()`
// suggest `any(|..| *..)` instead of `any(|..| **..)` for `find(|..| **..).is_some()`
let mut applicability = Applicability::MachineApplicable;
- let any_search_snippet = if_chain! {
- if search_method == "find";
- if let hir::ExprKind::Closure(&hir::Closure { body, .. }) = search_arg.kind;
- let closure_body = cx.tcx.hir().body(body);
- if let Some(closure_arg) = closure_body.params.first();
- then {
- if let hir::PatKind::Ref(..) = closure_arg.pat.kind {
- Some(search_snippet.replacen('&', "", 1))
- } else if let PatKind::Binding(..) = strip_pat_refs(closure_arg.pat).kind {
- // `find()` provides a reference to the item, but `any` does not,
- // so we should fix item usages for suggestion
- if let Some(closure_sugg) = deref_closure_args(cx, search_arg) {
- applicability = closure_sugg.applicability;
- Some(closure_sugg.suggestion)
- } else {
- Some(search_snippet.to_string())
- }
+ let any_search_snippet = if search_method == "find"
+ && let hir::ExprKind::Closure(&hir::Closure { body, .. }) = search_arg.kind
+ && let closure_body = cx.tcx.hir().body(body)
+ && let Some(closure_arg) = closure_body.params.first()
+ {
+ if let hir::PatKind::Ref(..) = closure_arg.pat.kind {
+ Some(search_snippet.replacen('&', "", 1))
+ } else if let PatKind::Binding(..) = strip_pat_refs(closure_arg.pat).kind {
+ // `find()` provides a reference to the item, but `any` does not,
+ // so we should fix item usages for suggestion
+ if let Some(closure_sugg) = deref_closure_args(cx, search_arg) {
+ applicability = closure_sugg.applicability;
+ Some(closure_sugg.suggestion)
} else {
- None
+ Some(search_snippet.to_string())
}
} else {
None
}
+ } else {
+ None
};
// add note if not multi-line
if is_some {
@@ -110,41 +107,37 @@ pub(super) fn check<'tcx>(
self_ty.is_str()
}
};
- if_chain! {
- if is_string_or_str_slice(search_recv);
- if is_string_or_str_slice(search_arg);
- then {
- let msg = format!("called `{option_check_method}()` after calling `find()` on a string");
- match option_check_method {
- "is_some" => {
- let mut applicability = Applicability::MachineApplicable;
- let find_arg = snippet_with_applicability(cx, search_arg.span, "..", &mut applicability);
- span_lint_and_sugg(
- cx,
- SEARCH_IS_SOME,
- method_span.with_hi(expr.span.hi()),
- &msg,
- "use `contains()` instead",
- format!("contains({find_arg})"),
- applicability,
- );
- },
- "is_none" => {
- let string = snippet(cx, search_recv.span, "..");
- let mut applicability = Applicability::MachineApplicable;
- let find_arg = snippet_with_applicability(cx, search_arg.span, "..", &mut applicability);
- span_lint_and_sugg(
- cx,
- SEARCH_IS_SOME,
- expr.span,
- &msg,
- "use `!_.contains()` instead",
- format!("!{string}.contains({find_arg})"),
- applicability,
- );
- },
- _ => (),
- }
+ if is_string_or_str_slice(search_recv) && is_string_or_str_slice(search_arg) {
+ let msg = format!("called `{option_check_method}()` after calling `find()` on a string");
+ match option_check_method {
+ "is_some" => {
+ let mut applicability = Applicability::MachineApplicable;
+ let find_arg = snippet_with_applicability(cx, search_arg.span, "..", &mut applicability);
+ span_lint_and_sugg(
+ cx,
+ SEARCH_IS_SOME,
+ method_span.with_hi(expr.span.hi()),
+ &msg,
+ "use `contains()` instead",
+ format!("contains({find_arg})"),
+ applicability,
+ );
+ },
+ "is_none" => {
+ let string = snippet(cx, search_recv.span, "..");
+ let mut applicability = Applicability::MachineApplicable;
+ let find_arg = snippet_with_applicability(cx, search_arg.span, "..", &mut applicability);
+ span_lint_and_sugg(
+ cx,
+ SEARCH_IS_SOME,
+ expr.span,
+ &msg,
+ "use `!_.contains()` instead",
+ format!("!{string}.contains({find_arg})"),
+ applicability,
+ );
+ },
+ _ => (),
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/single_char_pattern.rs b/src/tools/clippy/clippy_lints/src/methods/single_char_pattern.rs
index 4d704ec39..3983f0c0c 100644
--- a/src/tools/clippy/clippy_lints/src/methods/single_char_pattern.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/single_char_pattern.rs
@@ -1,6 +1,5 @@
use super::utils::get_hint_if_single_char_arg;
use clippy_utils::diagnostics::span_lint_and_sugg;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
@@ -45,24 +44,23 @@ pub(super) fn check(
args: &[hir::Expr<'_>],
) {
for &(method, pos) in &PATTERN_METHODS {
- if_chain! {
- if let ty::Ref(_, ty, _) = cx.typeck_results().expr_ty_adjusted(receiver).kind();
- if ty.is_str();
- if method_name.as_str() == method && args.len() > pos;
- let arg = &args[pos];
- let mut applicability = Applicability::MachineApplicable;
- if let Some(hint) = get_hint_if_single_char_arg(cx, arg, &mut applicability);
- then {
- span_lint_and_sugg(
- cx,
- SINGLE_CHAR_PATTERN,
- arg.span,
- "single-character string constant used as pattern",
- "try using a `char` instead",
- hint,
- applicability,
- );
- }
+ if let ty::Ref(_, ty, _) = cx.typeck_results().expr_ty_adjusted(receiver).kind()
+ && ty.is_str()
+ && method_name.as_str() == method
+ && args.len() > pos
+ && let arg = &args[pos]
+ && let mut applicability = Applicability::MachineApplicable
+ && let Some(hint) = get_hint_if_single_char_arg(cx, arg, &mut applicability)
+ {
+ span_lint_and_sugg(
+ cx,
+ SINGLE_CHAR_PATTERN,
+ arg.span,
+ "single-character string constant used as pattern",
+ "try using a `char` instead",
+ hint,
+ applicability,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs b/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs
index 9da61bca5..0e7ad8fc9 100644
--- a/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs
@@ -6,7 +6,6 @@ use clippy_utils::usage::local_used_after_expr;
use clippy_utils::visitors::{for_each_expr_with_closures, Descend};
use clippy_utils::{is_diag_item_method, match_def_path, path_to_local_id, paths};
use core::ops::ControlFlow;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{
BindingAnnotation, Expr, ExprKind, HirId, LangItem, Local, MatchSource, Node, Pat, PatKind, QPath, Stmt, StmtKind,
@@ -286,41 +285,35 @@ fn parse_iter_usage<'tcx>(
match (name.ident.as_str(), args) {
("next", []) if cx.tcx.trait_of_item(did) == Some(iter_id) => (IterUsageKind::Nth(0), e.span),
("next_tuple", []) => {
- return if_chain! {
- if match_def_path(cx, did, &paths::ITERTOOLS_NEXT_TUPLE);
- if let ty::Adt(adt_def, subs) = cx.typeck_results().expr_ty(e).kind();
- if cx.tcx.is_diagnostic_item(sym::Option, adt_def.did());
- if let ty::Tuple(subs) = subs.type_at(0).kind();
- if subs.len() == 2;
- then {
- Some(IterUsage {
- kind: IterUsageKind::NextTuple,
- span: e.span,
- unwrap_kind: None
- })
- } else {
- None
- }
+ return if match_def_path(cx, did, &paths::ITERTOOLS_NEXT_TUPLE)
+ && let ty::Adt(adt_def, subs) = cx.typeck_results().expr_ty(e).kind()
+ && cx.tcx.is_diagnostic_item(sym::Option, adt_def.did())
+ && let ty::Tuple(subs) = subs.type_at(0).kind()
+ && subs.len() == 2
+ {
+ Some(IterUsage {
+ kind: IterUsageKind::NextTuple,
+ span: e.span,
+ unwrap_kind: None,
+ })
+ } else {
+ None
};
},
("nth" | "skip", [idx_expr]) if cx.tcx.trait_of_item(did) == Some(iter_id) => {
if let Some(Constant::Int(idx)) = constant(cx, cx.typeck_results(), idx_expr) {
let span = if name.ident.as_str() == "nth" {
e.span
+ } else if let Some((_, Node::Expr(next_expr))) = iter.next()
+ && let ExprKind::MethodCall(next_name, _, [], _) = next_expr.kind
+ && next_name.ident.name == sym::next
+ && next_expr.span.ctxt() == ctxt
+ && let Some(next_id) = cx.typeck_results().type_dependent_def_id(next_expr.hir_id)
+ && cx.tcx.trait_of_item(next_id) == Some(iter_id)
+ {
+ next_expr.span
} else {
- if_chain! {
- if let Some((_, Node::Expr(next_expr))) = iter.next();
- if let ExprKind::MethodCall(next_name, _, [], _) = next_expr.kind;
- if next_name.ident.name == sym::next;
- if next_expr.span.ctxt() == ctxt;
- if let Some(next_id) = cx.typeck_results().type_dependent_def_id(next_expr.hir_id);
- if cx.tcx.trait_of_item(next_id) == Some(iter_id);
- then {
- next_expr.span
- } else {
- return None;
- }
- }
+ return None;
};
(IterUsageKind::Nth(idx), span)
} else {
diff --git a/src/tools/clippy/clippy_lints/src/methods/suspicious_map.rs b/src/tools/clippy/clippy_lints/src/methods/suspicious_map.rs
index 0dc7fe2a2..ed49233ac 100644
--- a/src/tools/clippy/clippy_lints/src/methods/suspicious_map.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/suspicious_map.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::usage::mutated_variables;
use clippy_utils::{expr_or_init, is_trait_method};
-use if_chain::if_chain;
use rustc_hir as hir;
use rustc_lint::LateContext;
use rustc_span::sym;
@@ -9,26 +8,24 @@ use rustc_span::sym;
use super::SUSPICIOUS_MAP;
pub fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, count_recv: &hir::Expr<'_>, map_arg: &hir::Expr<'_>) {
- if_chain! {
- if is_trait_method(cx, count_recv, sym::Iterator);
- if let hir::ExprKind::Closure(closure) = expr_or_init(cx, map_arg).kind;
- let closure_body = cx.tcx.hir().body(closure.body);
- if !cx.typeck_results().expr_ty(closure_body.value).is_unit();
- then {
- if let Some(map_mutated_vars) = mutated_variables(closure_body.value, cx) {
- // A variable is used mutably inside of the closure. Suppress the lint.
- if !map_mutated_vars.is_empty() {
- return;
- }
+ if is_trait_method(cx, count_recv, sym::Iterator)
+ && let hir::ExprKind::Closure(closure) = expr_or_init(cx, map_arg).kind
+ && let closure_body = cx.tcx.hir().body(closure.body)
+ && !cx.typeck_results().expr_ty(closure_body.value).is_unit()
+ {
+ if let Some(map_mutated_vars) = mutated_variables(closure_body.value, cx) {
+ // A variable is used mutably inside of the closure. Suppress the lint.
+ if !map_mutated_vars.is_empty() {
+ return;
}
- span_lint_and_help(
- cx,
- SUSPICIOUS_MAP,
- expr.span,
- "this call to `map()` won't have an effect on the call to `count()`",
- None,
- "make sure you did not confuse `map` with `filter`, `for_each` or `inspect`",
- );
}
+ span_lint_and_help(
+ cx,
+ SUSPICIOUS_MAP,
+ expr.span,
+ "this call to `map()` won't have an effect on the call to `count()`",
+ None,
+ "make sure you did not confuse `map` with `filter`, `for_each` or `inspect`",
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/suspicious_splitn.rs b/src/tools/clippy/clippy_lints/src/methods/suspicious_splitn.rs
index 3cb2719e4..c45212581 100644
--- a/src/tools/clippy/clippy_lints/src/methods/suspicious_splitn.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/suspicious_splitn.rs
@@ -1,5 +1,4 @@
use clippy_utils::diagnostics::span_lint_and_note;
-use if_chain::if_chain;
use rustc_ast::LitKind;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::LateContext;
@@ -8,41 +7,36 @@ use rustc_span::source_map::Spanned;
use super::SUSPICIOUS_SPLITN;
pub(super) fn check(cx: &LateContext<'_>, method_name: &str, expr: &Expr<'_>, self_arg: &Expr<'_>, count: u128) {
- if_chain! {
- if count <= 1;
- if let Some(call_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
- if let Some(impl_id) = cx.tcx.impl_of_method(call_id);
- if cx.tcx.impl_trait_ref(impl_id).is_none();
- let self_ty = cx.tcx.type_of(impl_id).instantiate_identity();
- if self_ty.is_slice() || self_ty.is_str();
- then {
- // Ignore empty slice and string literals when used with a literal count.
- if matches!(self_arg.kind, ExprKind::Array([]))
- || matches!(self_arg.kind, ExprKind::Lit(Spanned { node: LitKind::Str(s, _), .. }) if s.is_empty())
- {
- return;
- }
+ if count <= 1
+ && let Some(call_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
+ && let Some(impl_id) = cx.tcx.impl_of_method(call_id)
+ && cx.tcx.impl_trait_ref(impl_id).is_none()
+ && let self_ty = cx.tcx.type_of(impl_id).instantiate_identity()
+ && (self_ty.is_slice() || self_ty.is_str())
+ {
+ // Ignore empty slice and string literals when used with a literal count.
+ if matches!(self_arg.kind, ExprKind::Array([]))
+ || matches!(self_arg.kind, ExprKind::Lit(Spanned { node: LitKind::Str(s, _), .. }) if s.is_empty())
+ {
+ return;
+ }
- let (msg, note_msg) = if count == 0 {
- (format!("`{method_name}` called with `0` splits"),
- "the resulting iterator will always return `None`")
- } else {
- (format!("`{method_name}` called with `1` split"),
+ let (msg, note_msg) = if count == 0 {
+ (
+ format!("`{method_name}` called with `0` splits"),
+ "the resulting iterator will always return `None`",
+ )
+ } else {
+ (
+ format!("`{method_name}` called with `1` split"),
if self_ty.is_slice() {
"the resulting iterator will always return the entire slice followed by `None`"
} else {
"the resulting iterator will always return the entire string followed by `None`"
- })
- };
+ },
+ )
+ };
- span_lint_and_note(
- cx,
- SUSPICIOUS_SPLITN,
- expr.span,
- &msg,
- None,
- note_msg,
- );
- }
+ span_lint_and_note(cx, SUSPICIOUS_SPLITN, expr.span, &msg, None, note_msg);
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/suspicious_to_owned.rs b/src/tools/clippy/clippy_lints/src/methods/suspicious_to_owned.rs
index 9eb8d6e6e..60864902a 100644
--- a/src/tools/clippy/clippy_lints/src/methods/suspicious_to_owned.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/suspicious_to_owned.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::is_diag_trait_item;
use clippy_utils::source::snippet_with_context;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
@@ -12,40 +11,37 @@ use rustc_span::sym;
use super::SUSPICIOUS_TO_OWNED;
pub fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr<'_>) -> bool {
- if_chain! {
- if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
- if is_diag_trait_item(cx, method_def_id, sym::ToOwned);
- let input_type = cx.typeck_results().expr_ty(expr);
- if let ty::Adt(adt, _) = cx.typeck_results().expr_ty(expr).kind();
- if cx.tcx.is_diagnostic_item(sym::Cow, adt.did());
-
- then {
- let mut app = Applicability::MaybeIncorrect;
- let recv_snip = snippet_with_context(cx, recv.span, expr.span.ctxt(), "..", &mut app).0;
- span_lint_and_then(
- cx,
- SUSPICIOUS_TO_OWNED,
- expr.span,
- &with_forced_trimmed_paths!(format!(
- "this `to_owned` call clones the {input_type} itself and does not cause the {input_type} contents to become owned"
- )),
- |diag| {
- diag.span_suggestion(
- expr.span,
- "depending on intent, either make the Cow an Owned variant",
- format!("{recv_snip}.into_owned()"),
- app
- );
- diag.span_suggestion(
- expr.span,
- "or clone the Cow itself",
- format!("{recv_snip}.clone()"),
- app
- );
- }
- );
- return true;
- }
+ if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
+ && is_diag_trait_item(cx, method_def_id, sym::ToOwned)
+ && let input_type = cx.typeck_results().expr_ty(expr)
+ && let ty::Adt(adt, _) = cx.typeck_results().expr_ty(expr).kind()
+ && cx.tcx.is_diagnostic_item(sym::Cow, adt.did())
+ {
+ let mut app = Applicability::MaybeIncorrect;
+ let recv_snip = snippet_with_context(cx, recv.span, expr.span.ctxt(), "..", &mut app).0;
+ span_lint_and_then(
+ cx,
+ SUSPICIOUS_TO_OWNED,
+ expr.span,
+ &with_forced_trimmed_paths!(format!(
+ "this `to_owned` call clones the {input_type} itself and does not cause the {input_type} contents to become owned"
+ )),
+ |diag| {
+ diag.span_suggestion(
+ expr.span,
+ "depending on intent, either make the Cow an Owned variant",
+ format!("{recv_snip}.into_owned()"),
+ app,
+ );
+ diag.span_suggestion(
+ expr.span,
+ "or clone the Cow itself",
+ format!("{recv_snip}.clone()"),
+ app,
+ );
+ },
+ );
+ return true;
}
false
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/uninit_assumed_init.rs b/src/tools/clippy/clippy_lints/src/methods/uninit_assumed_init.rs
index bc9c518db..1ee655d61 100644
--- a/src/tools/clippy/clippy_lints/src/methods/uninit_assumed_init.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/uninit_assumed_init.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint;
use clippy_utils::is_path_diagnostic_item;
use clippy_utils::ty::is_uninit_value_valid_for_ty;
-use if_chain::if_chain;
use rustc_hir as hir;
use rustc_lint::LateContext;
use rustc_span::sym;
@@ -10,18 +9,16 @@ use super::UNINIT_ASSUMED_INIT;
/// lint for `MaybeUninit::uninit().assume_init()` (we already have the latter)
pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr<'_>) {
- if_chain! {
- if let hir::ExprKind::Call(callee, args) = recv.kind;
- if args.is_empty();
- if is_path_diagnostic_item(cx, callee, sym::maybe_uninit_uninit);
- if !is_uninit_value_valid_for_ty(cx, cx.typeck_results().expr_ty_adjusted(expr));
- then {
- span_lint(
- cx,
- UNINIT_ASSUMED_INIT,
- expr.span,
- "this call for this type may be undefined behavior"
- );
- }
+ if let hir::ExprKind::Call(callee, args) = recv.kind
+ && args.is_empty()
+ && is_path_diagnostic_item(cx, callee, sym::maybe_uninit_uninit)
+ && !is_uninit_value_valid_for_ty(cx, cx.typeck_results().expr_ty_adjusted(expr))
+ {
+ span_lint(
+ cx,
+ UNINIT_ASSUMED_INIT,
+ expr.span,
+ "this call for this type may be undefined behavior",
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_fallible_conversions.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_fallible_conversions.rs
index bb32b1bb7..89cf20c14 100644
--- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_fallible_conversions.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_fallible_conversions.rs
@@ -1,10 +1,11 @@
-use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::get_parent_expr;
use clippy_utils::ty::implements_trait;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::LateContext;
use rustc_middle::ty;
+use rustc_middle::ty::print::with_forced_trimmed_paths;
use rustc_span::{sym, Span};
use super::UNNECESSARY_FALLIBLE_CONVERSIONS;
@@ -42,6 +43,7 @@ fn check<'tcx>(
// (else there would be conflicting impls, even with #![feature(spec)]), so we don't even need to check
// what `<T as TryFrom<U>>::Error` is: it's always `Infallible`
&& implements_trait(cx, self_ty, from_into_trait, &[other_ty])
+ && let Some(other_ty) = other_ty.as_type()
{
let parent_unwrap_call = get_parent_expr(cx, expr).and_then(|parent| {
if let ExprKind::MethodCall(path, .., span) = parent.kind
@@ -52,8 +54,7 @@ fn check<'tcx>(
None
}
});
-
- let (sugg, span, applicability) = match kind {
+ let (source_ty, target_ty, sugg, span, applicability) = match kind {
FunctionKind::TryIntoMethod if let Some(unwrap_span) = parent_unwrap_call => {
// Extend the span to include the unwrap/expect call:
// `foo.try_into().expect("..")`
@@ -63,24 +64,41 @@ fn check<'tcx>(
// so that can be machine-applicable
(
+ self_ty,
+ other_ty,
"into()",
primary_span.with_hi(unwrap_span.hi()),
Applicability::MachineApplicable,
)
},
- FunctionKind::TryFromFunction => ("From::from", primary_span, Applicability::Unspecified),
- FunctionKind::TryIntoFunction => ("Into::into", primary_span, Applicability::Unspecified),
- FunctionKind::TryIntoMethod => ("into", primary_span, Applicability::Unspecified),
+ FunctionKind::TryFromFunction => (
+ other_ty,
+ self_ty,
+ "From::from",
+ primary_span,
+ Applicability::Unspecified,
+ ),
+ FunctionKind::TryIntoFunction => (
+ self_ty,
+ other_ty,
+ "Into::into",
+ primary_span,
+ Applicability::Unspecified,
+ ),
+ FunctionKind::TryIntoMethod => (self_ty, other_ty, "into", primary_span, Applicability::Unspecified),
};
- span_lint_and_sugg(
+ span_lint_and_then(
cx,
UNNECESSARY_FALLIBLE_CONVERSIONS,
span,
"use of a fallible conversion when an infallible one could be used",
- "use",
- sugg.into(),
- applicability,
+ |diag| {
+ with_forced_trimmed_paths!({
+ diag.note(format!("converting `{source_ty}` to `{target_ty}` cannot fail"));
+ });
+ diag.span_suggestion(span, "use", sugg, applicability);
+ },
);
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_fold.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_fold.rs
index 6d51c4ab0..ebbdde48b 100644
--- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_fold.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_fold.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::{is_trait_method, path_to_local_id, peel_blocks, strip_pat_refs};
-use if_chain::if_chain;
use rustc_ast::ast;
use rustc_errors::Applicability;
use rustc_hir as hir;
@@ -60,57 +59,51 @@ fn check_fold_with_op(
op: hir::BinOpKind,
replacement: Replacement,
) {
- if_chain! {
+ if let hir::ExprKind::Closure(&hir::Closure { body, .. }) = acc.kind
// Extract the body of the closure passed to fold
- if let hir::ExprKind::Closure(&hir::Closure { body, .. }) = acc.kind;
- let closure_body = cx.tcx.hir().body(body);
- let closure_expr = peel_blocks(closure_body.value);
+ && let closure_body = cx.tcx.hir().body(body)
+ && let closure_expr = peel_blocks(closure_body.value)
// Check if the closure body is of the form `acc <op> some_expr(x)`
- if let hir::ExprKind::Binary(ref bin_op, left_expr, right_expr) = closure_expr.kind;
- if bin_op.node == op;
+ && let hir::ExprKind::Binary(ref bin_op, left_expr, right_expr) = closure_expr.kind
+ && bin_op.node == op
// Extract the names of the two arguments to the closure
- if let [param_a, param_b] = closure_body.params;
- if let PatKind::Binding(_, first_arg_id, ..) = strip_pat_refs(param_a.pat).kind;
- if let PatKind::Binding(_, second_arg_id, second_arg_ident, _) = strip_pat_refs(param_b.pat).kind;
+ && let [param_a, param_b] = closure_body.params
+ && let PatKind::Binding(_, first_arg_id, ..) = strip_pat_refs(param_a.pat).kind
+ && let PatKind::Binding(_, second_arg_id, second_arg_ident, _) = strip_pat_refs(param_b.pat).kind
- if path_to_local_id(left_expr, first_arg_id);
- if replacement.has_args || path_to_local_id(right_expr, second_arg_id);
-
- then {
- let mut applicability = Applicability::MachineApplicable;
-
- let turbofish = if replacement.has_generic_return {
- format!("::<{}>", cx.typeck_results().expr_ty_adjusted(right_expr).peel_refs())
- } else {
- String::new()
- };
-
- let sugg = if replacement.has_args {
- format!(
- "{method}{turbofish}(|{second_arg_ident}| {r})",
- method = replacement.method_name,
- r = snippet_with_applicability(cx, right_expr.span, "EXPR", &mut applicability),
- )
- } else {
- format!(
- "{method}{turbofish}()",
- method = replacement.method_name,
- )
- };
-
- span_lint_and_sugg(
- cx,
- UNNECESSARY_FOLD,
- fold_span.with_hi(expr.span.hi()),
- // TODO #2371 don't suggest e.g., .any(|x| f(x)) if we can suggest .any(f)
- "this `.fold` can be written more succinctly using another method",
- "try",
- sugg,
- applicability,
- );
- }
+ && path_to_local_id(left_expr, first_arg_id)
+ && (replacement.has_args || path_to_local_id(right_expr, second_arg_id))
+ {
+ let mut applicability = Applicability::MachineApplicable;
+
+ let turbofish = if replacement.has_generic_return {
+ format!("::<{}>", cx.typeck_results().expr_ty_adjusted(right_expr).peel_refs())
+ } else {
+ String::new()
+ };
+
+ let sugg = if replacement.has_args {
+ format!(
+ "{method}{turbofish}(|{second_arg_ident}| {r})",
+ method = replacement.method_name,
+ r = snippet_with_applicability(cx, right_expr.span, "EXPR", &mut applicability),
+ )
+ } else {
+ format!("{method}{turbofish}()", method = replacement.method_name,)
+ };
+
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_FOLD,
+ fold_span.with_hi(expr.span.hi()),
+ // TODO #2371 don't suggest e.g., .any(|x| f(x)) if we can suggest .any(f)
+ "this `.fold` can be written more succinctly using another method",
+ "try",
+ sugg,
+ applicability,
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs
index 0c72c13a3..36497d59a 100644
--- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs
@@ -13,15 +13,13 @@ use rustc_span::{sym, Symbol};
use super::UNNECESSARY_TO_OWNED;
pub fn check(cx: &LateContext<'_>, expr: &Expr<'_>, method_name: Symbol, receiver: &Expr<'_>) -> bool {
- if_chain! {
- if let Some(parent) = get_parent_expr(cx, expr);
- if let Some(callee_def_id) = fn_def_id(cx, parent);
- if is_into_iter(cx, callee_def_id);
- then {
- check_for_loop_iter(cx, parent, method_name, receiver, false)
- } else {
- false
- }
+ if let Some(parent) = get_parent_expr(cx, expr)
+ && let Some(callee_def_id) = fn_def_id(cx, parent)
+ && is_into_iter(cx, callee_def_id)
+ {
+ check_for_loop_iter(cx, parent, method_name, receiver, false)
+ } else {
+ false
}
}
@@ -36,65 +34,58 @@ pub fn check_for_loop_iter(
receiver: &Expr<'_>,
cloned_before_iter: bool,
) -> bool {
- if_chain! {
- if let Some(grandparent) = get_parent_expr(cx, expr).and_then(|parent| get_parent_expr(cx, parent));
- if let Some(ForLoop { pat, body, .. }) = ForLoop::hir(grandparent);
- let (clone_or_copy_needed, addr_of_exprs) = clone_or_copy_needed(cx, pat, body);
- if !clone_or_copy_needed;
- if let Some(receiver_snippet) = snippet_opt(cx, receiver.span);
- then {
- let snippet = if_chain! {
- if let ExprKind::MethodCall(maybe_iter_method_name, collection, [], _) = receiver.kind;
- if maybe_iter_method_name.ident.name == sym::iter;
-
- if let Some(iterator_trait_id) = cx.tcx.get_diagnostic_item(sym::Iterator);
- let receiver_ty = cx.typeck_results().expr_ty(receiver);
- if implements_trait(cx, receiver_ty, iterator_trait_id, &[]);
- if let Some(iter_item_ty) = get_iterator_item_ty(cx, receiver_ty);
-
- if let Some(into_iterator_trait_id) = cx.tcx.get_diagnostic_item(sym::IntoIterator);
- let collection_ty = cx.typeck_results().expr_ty(collection);
- if implements_trait(cx, collection_ty, into_iterator_trait_id, &[]);
- if let Some(into_iter_item_ty) = cx.get_associated_type(collection_ty, into_iterator_trait_id, "Item");
-
- if iter_item_ty == into_iter_item_ty;
- if let Some(collection_snippet) = snippet_opt(cx, collection.span);
- then {
- collection_snippet
+ if let Some(grandparent) = get_parent_expr(cx, expr).and_then(|parent| get_parent_expr(cx, parent))
+ && let Some(ForLoop { pat, body, .. }) = ForLoop::hir(grandparent)
+ && let (clone_or_copy_needed, addr_of_exprs) = clone_or_copy_needed(cx, pat, body)
+ && !clone_or_copy_needed
+ && let Some(receiver_snippet) = snippet_opt(cx, receiver.span)
+ {
+ let snippet = if let ExprKind::MethodCall(maybe_iter_method_name, collection, [], _) = receiver.kind
+ && maybe_iter_method_name.ident.name == sym::iter
+ && let Some(iterator_trait_id) = cx.tcx.get_diagnostic_item(sym::Iterator)
+ && let receiver_ty = cx.typeck_results().expr_ty(receiver)
+ && implements_trait(cx, receiver_ty, iterator_trait_id, &[])
+ && let Some(iter_item_ty) = get_iterator_item_ty(cx, receiver_ty)
+ && let Some(into_iterator_trait_id) = cx.tcx.get_diagnostic_item(sym::IntoIterator)
+ && let collection_ty = cx.typeck_results().expr_ty(collection)
+ && implements_trait(cx, collection_ty, into_iterator_trait_id, &[])
+ && let Some(into_iter_item_ty) = cx.get_associated_type(collection_ty, into_iterator_trait_id, "Item")
+ && iter_item_ty == into_iter_item_ty
+ && let Some(collection_snippet) = snippet_opt(cx, collection.span)
+ {
+ collection_snippet
+ } else {
+ receiver_snippet
+ };
+ span_lint_and_then(
+ cx,
+ UNNECESSARY_TO_OWNED,
+ expr.span,
+ &format!("unnecessary use of `{method_name}`"),
+ |diag| {
+ // If `check_into_iter_call_arg` called `check_for_loop_iter` because a call to
+ // a `to_owned`-like function was removed, then the next suggestion may be
+ // incorrect. This is because the iterator that results from the call's removal
+ // could hold a reference to a resource that is used mutably. See
+ // https://github.com/rust-lang/rust-clippy/issues/8148.
+ let applicability = if cloned_before_iter {
+ Applicability::MaybeIncorrect
} else {
- receiver_snippet
- }
- };
- span_lint_and_then(
- cx,
- UNNECESSARY_TO_OWNED,
- expr.span,
- &format!("unnecessary use of `{method_name}`"),
- |diag| {
- // If `check_into_iter_call_arg` called `check_for_loop_iter` because a call to
- // a `to_owned`-like function was removed, then the next suggestion may be
- // incorrect. This is because the iterator that results from the call's removal
- // could hold a reference to a resource that is used mutably. See
- // https://github.com/rust-lang/rust-clippy/issues/8148.
- let applicability = if cloned_before_iter {
- Applicability::MaybeIncorrect
- } else {
- Applicability::MachineApplicable
- };
- diag.span_suggestion(expr.span, "use", snippet, applicability);
- for addr_of_expr in addr_of_exprs {
- match addr_of_expr.kind {
- ExprKind::AddrOf(_, _, referent) => {
- let span = addr_of_expr.span.with_hi(referent.span.lo());
- diag.span_suggestion(span, "remove this `&`", "", applicability);
- }
- _ => unreachable!(),
- }
+ Applicability::MachineApplicable
+ };
+ diag.span_suggestion(expr.span, "use", snippet, applicability);
+ for addr_of_expr in addr_of_exprs {
+ match addr_of_expr.kind {
+ ExprKind::AddrOf(_, _, referent) => {
+ let span = addr_of_expr.span.with_hi(referent.span.lo());
+ diag.span_suggestion(span, "remove this `&`", "", applicability);
+ },
+ _ => unreachable!(),
}
}
- );
- return true;
- }
+ },
+ );
+ return true;
}
false
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_join.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_join.rs
index d0c62fb56..e2b389e96 100644
--- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_join.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_join.rs
@@ -18,25 +18,23 @@ pub(super) fn check<'tcx>(
) {
let applicability = Applicability::MachineApplicable;
let collect_output_adjusted_type = cx.typeck_results().expr_ty_adjusted(join_self_arg);
- if_chain! {
+ if let Ref(_, ref_type, _) = collect_output_adjusted_type.kind()
// the turbofish for collect is ::<Vec<String>>
- if let Ref(_, ref_type, _) = collect_output_adjusted_type.kind();
- if let Slice(slice) = ref_type.kind();
- if is_type_lang_item(cx, *slice, LangItem::String);
+ && let Slice(slice) = ref_type.kind()
+ && is_type_lang_item(cx, *slice, LangItem::String)
// the argument for join is ""
- if let ExprKind::Lit(spanned) = &join_arg.kind;
- if let LitKind::Str(symbol, _) = spanned.node;
- if symbol.is_empty();
- then {
- span_lint_and_sugg(
- cx,
- UNNECESSARY_JOIN,
- span.with_hi(expr.span.hi()),
- r#"called `.collect::<Vec<String>>().join("")` on an iterator"#,
- "try using",
- "collect::<String>()".to_owned(),
- applicability,
- );
- }
+ && let ExprKind::Lit(spanned) = &join_arg.kind
+ && let LitKind::Str(symbol, _) = spanned.node
+ && symbol.is_empty()
+ {
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_JOIN,
+ span.with_hi(expr.span.hi()),
+ r#"called `.collect::<Vec<String>>().join("")` on an iterator"#,
+ "try using",
+ "collect::<String>()".to_owned(),
+ applicability,
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs
index e62a65a27..696e5e74d 100644
--- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs
@@ -2,7 +2,6 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::is_trait_method;
use clippy_utils::sugg::Sugg;
use clippy_utils::ty::implements_trait;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Closure, Expr, ExprKind, Mutability, Param, Pat, PatKind, Path, PathSegment, QPath};
use rustc_lint::LateContext;
@@ -115,55 +114,72 @@ fn mirrored_exprs(a_expr: &Expr<'_>, a_ident: &Ident, b_expr: &Expr<'_>, b_ident
}
fn detect_lint(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, arg: &Expr<'_>) -> Option<LintTrigger> {
- if_chain! {
- if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
- if let Some(impl_id) = cx.tcx.impl_of_method(method_id);
- if cx.tcx.type_of(impl_id).instantiate_identity().is_slice();
- if let ExprKind::Closure(&Closure { body, .. }) = arg.kind;
- if let closure_body = cx.tcx.hir().body(body);
- if let &[
- Param { pat: Pat { kind: PatKind::Binding(_, _, left_ident, _), .. }, ..},
- Param { pat: Pat { kind: PatKind::Binding(_, _, right_ident, _), .. }, .. }
- ] = &closure_body.params;
- if let ExprKind::MethodCall(method_path, left_expr, [right_expr], _) = closure_body.value.kind;
- if method_path.ident.name == sym::cmp;
- if is_trait_method(cx, closure_body.value, sym::Ord);
- then {
- let (closure_body, closure_arg, reverse) = if mirrored_exprs(
- left_expr,
- left_ident,
- right_expr,
- right_ident
- ) {
- (Sugg::hir(cx, left_expr, "..").to_string(), left_ident.name.to_string(), false)
- } else if mirrored_exprs(left_expr, right_ident, right_expr, left_ident) {
- (Sugg::hir(cx, left_expr, "..").to_string(), right_ident.name.to_string(), true)
- } else {
- return None;
- };
- let vec_name = Sugg::hir(cx, recv, "..").to_string();
+ if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
+ && let Some(impl_id) = cx.tcx.impl_of_method(method_id)
+ && cx.tcx.type_of(impl_id).instantiate_identity().is_slice()
+ && let ExprKind::Closure(&Closure { body, .. }) = arg.kind
+ && let closure_body = cx.tcx.hir().body(body)
+ && let &[
+ Param {
+ pat:
+ Pat {
+ kind: PatKind::Binding(_, _, left_ident, _),
+ ..
+ },
+ ..
+ },
+ Param {
+ pat:
+ Pat {
+ kind: PatKind::Binding(_, _, right_ident, _),
+ ..
+ },
+ ..
+ },
+ ] = &closure_body.params
+ && let ExprKind::MethodCall(method_path, left_expr, [right_expr], _) = closure_body.value.kind
+ && method_path.ident.name == sym::cmp
+ && is_trait_method(cx, closure_body.value, sym::Ord)
+ {
+ let (closure_body, closure_arg, reverse) = if mirrored_exprs(left_expr, left_ident, right_expr, right_ident) {
+ (
+ Sugg::hir(cx, left_expr, "..").to_string(),
+ left_ident.name.to_string(),
+ false,
+ )
+ } else if mirrored_exprs(left_expr, right_ident, right_expr, left_ident) {
+ (
+ Sugg::hir(cx, left_expr, "..").to_string(),
+ right_ident.name.to_string(),
+ true,
+ )
+ } else {
+ return None;
+ };
+ let vec_name = Sugg::hir(cx, recv, "..").to_string();
- if_chain! {
- if let ExprKind::Path(QPath::Resolved(_, Path {
- segments: [PathSegment { ident: left_name, .. }], ..
- })) = &left_expr.kind;
- if left_name == left_ident;
- if cx.tcx.get_diagnostic_item(sym::Ord).map_or(false, |id| {
- implements_trait(cx, cx.typeck_results().expr_ty(left_expr), id, &[])
- });
- then {
- return Some(LintTrigger::Sort(SortDetection { vec_name }));
- }
- }
+ if let ExprKind::Path(QPath::Resolved(
+ _,
+ Path {
+ segments: [PathSegment { ident: left_name, .. }],
+ ..
+ },
+ )) = &left_expr.kind
+ && left_name == left_ident
+ && cx.tcx.get_diagnostic_item(sym::Ord).map_or(false, |id| {
+ implements_trait(cx, cx.typeck_results().expr_ty(left_expr), id, &[])
+ })
+ {
+ return Some(LintTrigger::Sort(SortDetection { vec_name }));
+ }
- if !expr_borrows(cx, left_expr) {
- return Some(LintTrigger::SortByKey(SortByKeyDetection {
- vec_name,
- closure_arg,
- closure_body,
- reverse,
- }));
- }
+ if !expr_borrows(cx, left_expr) {
+ return Some(LintTrigger::SortByKey(SortByKeyDetection {
+ vec_name,
+ closure_arg,
+ closure_body,
+ reverse,
+ }));
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs
index 7a50feff6..c4775b6bd 100644
--- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs
@@ -31,25 +31,23 @@ pub fn check<'tcx>(
args: &'tcx [Expr<'_>],
msrv: &Msrv,
) {
- if_chain! {
- if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
- if args.is_empty();
- then {
- if is_cloned_or_copied(cx, method_name, method_def_id) {
- unnecessary_iter_cloned::check(cx, expr, method_name, receiver);
- } else if is_to_owned_like(cx, expr, method_name, method_def_id) {
- // At this point, we know the call is of a `to_owned`-like function. The functions
- // `check_addr_of_expr` and `check_call_arg` determine whether the call is unnecessary
- // based on its context, that is, whether it is a referent in an `AddrOf` expression, an
- // argument in a `into_iter` call, or an argument in the call of some other function.
- if check_addr_of_expr(cx, expr, method_name, method_def_id, receiver) {
- return;
- }
- if check_into_iter_call_arg(cx, expr, method_name, receiver, msrv) {
- return;
- }
- check_other_call_arg(cx, expr, method_name, receiver);
+ if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
+ && args.is_empty()
+ {
+ if is_cloned_or_copied(cx, method_name, method_def_id) {
+ unnecessary_iter_cloned::check(cx, expr, method_name, receiver);
+ } else if is_to_owned_like(cx, expr, method_name, method_def_id) {
+ // At this point, we know the call is of a `to_owned`-like function. The functions
+ // `check_addr_of_expr` and `check_call_arg` determine whether the call is unnecessary
+ // based on its context, that is, whether it is a referent in an `AddrOf` expression, an
+ // argument in a `into_iter` call, or an argument in the call of some other function.
+ if check_addr_of_expr(cx, expr, method_name, method_def_id, receiver) {
+ return;
}
+ if check_into_iter_call_arg(cx, expr, method_name, receiver, msrv) {
+ return;
+ }
+ check_other_call_arg(cx, expr, method_name, receiver);
}
}
}
@@ -64,11 +62,10 @@ fn check_addr_of_expr(
method_def_id: DefId,
receiver: &Expr<'_>,
) -> bool {
- if_chain! {
- if let Some(parent) = get_parent_expr(cx, expr);
- if let ExprKind::AddrOf(BorrowKind::Ref, Mutability::Not, _) = parent.kind;
- let adjustments = cx.typeck_results().expr_adjustments(parent).iter().collect::<Vec<_>>();
- if let
+ if let Some(parent) = get_parent_expr(cx, expr)
+ && let ExprKind::AddrOf(BorrowKind::Ref, Mutability::Not, _) = parent.kind
+ && let adjustments = cx.typeck_results().expr_adjustments(parent).iter().collect::<Vec<_>>()
+ && let
// For matching uses of `Cow::from`
[
Adjustment {
@@ -109,10 +106,10 @@ fn check_addr_of_expr(
kind: Adjust::Borrow(_),
target: target_ty,
},
- ] = adjustments[..];
- let receiver_ty = cx.typeck_results().expr_ty(receiver);
- let (target_ty, n_target_refs) = peel_mid_ty_refs(*target_ty);
- let (receiver_ty, n_receiver_refs) = peel_mid_ty_refs(receiver_ty);
+ ] = adjustments[..]
+ && let receiver_ty = cx.typeck_results().expr_ty(receiver)
+ && let (target_ty, n_target_refs) = peel_mid_ty_refs(*target_ty)
+ && let (receiver_ty, n_receiver_refs) = peel_mid_ty_refs(receiver_ty)
// Only flag cases satisfying at least one of the following three conditions:
// * the referent and receiver types are distinct
// * the referent/receiver type is a copyable array
@@ -122,77 +119,72 @@ fn check_addr_of_expr(
// https://github.com/rust-lang/rust-clippy/issues/8759
// Arrays are a bit of a corner case. Non-copyable arrays are handled by
// `redundant_clone`, but copyable arrays are not.
- if *referent_ty != receiver_ty
+ && (*referent_ty != receiver_ty
|| (matches!(referent_ty.kind(), ty::Array(..)) && is_copy(cx, *referent_ty))
- || is_cow_into_owned(cx, method_name, method_def_id);
- if let Some(receiver_snippet) = snippet_opt(cx, receiver.span);
- then {
- if receiver_ty == target_ty && n_target_refs >= n_receiver_refs {
+ || is_cow_into_owned(cx, method_name, method_def_id))
+ && let Some(receiver_snippet) = snippet_opt(cx, receiver.span)
+ {
+ if receiver_ty == target_ty && n_target_refs >= n_receiver_refs {
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_TO_OWNED,
+ parent.span,
+ &format!("unnecessary use of `{method_name}`"),
+ "use",
+ format!(
+ "{:&>width$}{receiver_snippet}",
+ "",
+ width = n_target_refs - n_receiver_refs
+ ),
+ Applicability::MachineApplicable,
+ );
+ return true;
+ }
+ if let Some(deref_trait_id) = cx.tcx.get_diagnostic_item(sym::Deref)
+ && implements_trait(cx, receiver_ty, deref_trait_id, &[])
+ && cx.get_associated_type(receiver_ty, deref_trait_id, "Target") == Some(target_ty)
+ // Make sure that it's actually calling the right `.to_string()`, (#10033)
+ // *or* this is a `Cow::into_owned()` call (which would be the wrong into_owned receiver (str != Cow)
+ // but that's ok for Cow::into_owned specifically)
+ && (cx.typeck_results().expr_ty_adjusted(receiver).peel_refs() == target_ty
+ || is_cow_into_owned(cx, method_name, method_def_id))
+ {
+ if n_receiver_refs > 0 {
span_lint_and_sugg(
cx,
UNNECESSARY_TO_OWNED,
parent.span,
&format!("unnecessary use of `{method_name}`"),
"use",
- format!(
- "{:&>width$}{receiver_snippet}",
- "",
- width = n_target_refs - n_receiver_refs
- ),
+ receiver_snippet,
+ Applicability::MachineApplicable,
+ );
+ } else {
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_TO_OWNED,
+ expr.span.with_lo(receiver.span.hi()),
+ &format!("unnecessary use of `{method_name}`"),
+ "remove this",
+ String::new(),
Applicability::MachineApplicable,
);
- return true;
- }
- if_chain! {
- if let Some(deref_trait_id) = cx.tcx.get_diagnostic_item(sym::Deref);
- if implements_trait(cx, receiver_ty, deref_trait_id, &[]);
- if cx.get_associated_type(receiver_ty, deref_trait_id, "Target") == Some(target_ty);
- // Make sure that it's actually calling the right `.to_string()`, (#10033)
- // *or* this is a `Cow::into_owned()` call (which would be the wrong into_owned receiver (str != Cow)
- // but that's ok for Cow::into_owned specifically)
- if cx.typeck_results().expr_ty_adjusted(receiver).peel_refs() == target_ty
- || is_cow_into_owned(cx, method_name, method_def_id);
- then {
- if n_receiver_refs > 0 {
- span_lint_and_sugg(
- cx,
- UNNECESSARY_TO_OWNED,
- parent.span,
- &format!("unnecessary use of `{method_name}`"),
- "use",
- receiver_snippet,
- Applicability::MachineApplicable,
- );
- } else {
- span_lint_and_sugg(
- cx,
- UNNECESSARY_TO_OWNED,
- expr.span.with_lo(receiver.span.hi()),
- &format!("unnecessary use of `{method_name}`"),
- "remove this",
- String::new(),
- Applicability::MachineApplicable,
- );
- }
- return true;
- }
- }
- if_chain! {
- if let Some(as_ref_trait_id) = cx.tcx.get_diagnostic_item(sym::AsRef);
- if implements_trait(cx, receiver_ty, as_ref_trait_id, &[GenericArg::from(target_ty)]);
- then {
- span_lint_and_sugg(
- cx,
- UNNECESSARY_TO_OWNED,
- parent.span,
- &format!("unnecessary use of `{method_name}`"),
- "use",
- format!("{receiver_snippet}.as_ref()"),
- Applicability::MachineApplicable,
- );
- return true;
- }
}
+ return true;
+ }
+ if let Some(as_ref_trait_id) = cx.tcx.get_diagnostic_item(sym::AsRef)
+ && implements_trait(cx, receiver_ty, as_ref_trait_id, &[GenericArg::from(target_ty)])
+ {
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_TO_OWNED,
+ parent.span,
+ &format!("unnecessary use of `{method_name}`"),
+ "use",
+ format!("{receiver_snippet}.as_ref()"),
+ Applicability::MachineApplicable,
+ );
+ return true;
}
}
false
@@ -207,38 +199,36 @@ fn check_into_iter_call_arg(
receiver: &Expr<'_>,
msrv: &Msrv,
) -> bool {
- if_chain! {
- if let Some(parent) = get_parent_expr(cx, expr);
- if let Some(callee_def_id) = fn_def_id(cx, parent);
- if is_into_iter(cx, callee_def_id);
- if let Some(iterator_trait_id) = cx.tcx.get_diagnostic_item(sym::Iterator);
- let parent_ty = cx.typeck_results().expr_ty(parent);
- if implements_trait(cx, parent_ty, iterator_trait_id, &[]);
- if let Some(item_ty) = get_iterator_item_ty(cx, parent_ty);
- if let Some(receiver_snippet) = snippet_opt(cx, receiver.span);
- then {
- if unnecessary_iter_cloned::check_for_loop_iter(cx, parent, method_name, receiver, true) {
- return true;
- }
- let cloned_or_copied = if is_copy(cx, item_ty) && msrv.meets(msrvs::ITERATOR_COPIED) {
- "copied"
- } else {
- "cloned"
- };
- // The next suggestion may be incorrect because the removal of the `to_owned`-like
- // function could cause the iterator to hold a reference to a resource that is used
- // mutably. See https://github.com/rust-lang/rust-clippy/issues/8148.
- span_lint_and_sugg(
- cx,
- UNNECESSARY_TO_OWNED,
- parent.span,
- &format!("unnecessary use of `{method_name}`"),
- "use",
- format!("{receiver_snippet}.iter().{cloned_or_copied}()"),
- Applicability::MaybeIncorrect,
- );
+ if let Some(parent) = get_parent_expr(cx, expr)
+ && let Some(callee_def_id) = fn_def_id(cx, parent)
+ && is_into_iter(cx, callee_def_id)
+ && let Some(iterator_trait_id) = cx.tcx.get_diagnostic_item(sym::Iterator)
+ && let parent_ty = cx.typeck_results().expr_ty(parent)
+ && implements_trait(cx, parent_ty, iterator_trait_id, &[])
+ && let Some(item_ty) = get_iterator_item_ty(cx, parent_ty)
+ && let Some(receiver_snippet) = snippet_opt(cx, receiver.span)
+ {
+ if unnecessary_iter_cloned::check_for_loop_iter(cx, parent, method_name, receiver, true) {
return true;
}
+ let cloned_or_copied = if is_copy(cx, item_ty) && msrv.meets(msrvs::ITERATOR_COPIED) {
+ "copied"
+ } else {
+ "cloned"
+ };
+ // The next suggestion may be incorrect because the removal of the `to_owned`-like
+ // function could cause the iterator to hold a reference to a resource that is used
+ // mutably. See https://github.com/rust-lang/rust-clippy/issues/8148.
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_TO_OWNED,
+ parent.span,
+ &format!("unnecessary use of `{method_name}`"),
+ "use",
+ format!("{receiver_snippet}.iter().{cloned_or_copied}()"),
+ Applicability::MaybeIncorrect,
+ );
+ return true;
}
false
}
@@ -251,26 +241,25 @@ fn check_other_call_arg<'tcx>(
method_name: Symbol,
receiver: &'tcx Expr<'tcx>,
) -> bool {
- if_chain! {
- if let Some((maybe_call, maybe_arg)) = skip_addr_of_ancestors(cx, expr);
- if let Some((callee_def_id, _, recv, call_args)) = get_callee_generic_args_and_args(cx, maybe_call);
- let fn_sig = cx.tcx.fn_sig(callee_def_id).instantiate_identity().skip_binder();
- if let Some(i) = recv.into_iter().chain(call_args).position(|arg| arg.hir_id == maybe_arg.hir_id);
- if let Some(input) = fn_sig.inputs().get(i);
- let (input, n_refs) = peel_mid_ty_refs(*input);
- if let (trait_predicates, _) = get_input_traits_and_projections(cx, callee_def_id, input);
- if let Some(sized_def_id) = cx.tcx.lang_items().sized_trait();
- if let [trait_predicate] = trait_predicates
+ if let Some((maybe_call, maybe_arg)) = skip_addr_of_ancestors(cx, expr)
+ && let Some((callee_def_id, _, recv, call_args)) = get_callee_generic_args_and_args(cx, maybe_call)
+ && let fn_sig = cx.tcx.fn_sig(callee_def_id).instantiate_identity().skip_binder()
+ && let Some(i) = recv.into_iter().chain(call_args).position(|arg| arg.hir_id == maybe_arg.hir_id)
+ && let Some(input) = fn_sig.inputs().get(i)
+ && let (input, n_refs) = peel_mid_ty_refs(*input)
+ && let (trait_predicates, _) = get_input_traits_and_projections(cx, callee_def_id, input)
+ && let Some(sized_def_id) = cx.tcx.lang_items().sized_trait()
+ && let [trait_predicate] = trait_predicates
.iter()
.filter(|trait_predicate| trait_predicate.def_id() != sized_def_id)
- .collect::<Vec<_>>()[..];
- if let Some(deref_trait_id) = cx.tcx.get_diagnostic_item(sym::Deref);
- if let Some(as_ref_trait_id) = cx.tcx.get_diagnostic_item(sym::AsRef);
- if trait_predicate.def_id() == deref_trait_id || trait_predicate.def_id() == as_ref_trait_id;
- let receiver_ty = cx.typeck_results().expr_ty(receiver);
+ .collect::<Vec<_>>()[..]
+ && let Some(deref_trait_id) = cx.tcx.get_diagnostic_item(sym::Deref)
+ && let Some(as_ref_trait_id) = cx.tcx.get_diagnostic_item(sym::AsRef)
+ && (trait_predicate.def_id() == deref_trait_id || trait_predicate.def_id() == as_ref_trait_id)
+ && let receiver_ty = cx.typeck_results().expr_ty(receiver)
// We can't add an `&` when the trait is `Deref` because `Target = &T` won't match
// `Target = T`.
- if let Some((n_refs, receiver_ty)) = if n_refs > 0 || is_copy(cx, receiver_ty) {
+ && let Some((n_refs, receiver_ty)) = if n_refs > 0 || is_copy(cx, receiver_ty) {
Some((n_refs, receiver_ty))
} else if trait_predicate.def_id() != deref_trait_id {
Some((1, Ty::new_ref(cx.tcx,
@@ -282,21 +271,20 @@ fn check_other_call_arg<'tcx>(
)))
} else {
None
- };
- if can_change_type(cx, maybe_arg, receiver_ty);
- if let Some(receiver_snippet) = snippet_opt(cx, receiver.span);
- then {
- span_lint_and_sugg(
- cx,
- UNNECESSARY_TO_OWNED,
- maybe_arg.span,
- &format!("unnecessary use of `{method_name}`"),
- "use",
- format!("{:&>n_refs$}{receiver_snippet}", ""),
- Applicability::MachineApplicable,
- );
- return true;
}
+ && can_change_type(cx, maybe_arg, receiver_ty)
+ && let Some(receiver_snippet) = snippet_opt(cx, receiver.span)
+ {
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_TO_OWNED,
+ maybe_arg.span,
+ &format!("unnecessary use of `{method_name}`"),
+ "use",
+ format!("{:&>n_refs$}{receiver_snippet}", ""),
+ Applicability::MachineApplicable,
+ );
+ return true;
}
false
}
@@ -328,22 +316,18 @@ fn get_callee_generic_args_and_args<'tcx>(
Option<&'tcx Expr<'tcx>>,
&'tcx [Expr<'tcx>],
)> {
- if_chain! {
- if let ExprKind::Call(callee, args) = expr.kind;
- let callee_ty = cx.typeck_results().expr_ty(callee);
- if let ty::FnDef(callee_def_id, _) = callee_ty.kind();
- then {
- let generic_args = cx.typeck_results().node_args(callee.hir_id);
- return Some((*callee_def_id, generic_args, None, args));
- }
+ if let ExprKind::Call(callee, args) = expr.kind
+ && let callee_ty = cx.typeck_results().expr_ty(callee)
+ && let ty::FnDef(callee_def_id, _) = callee_ty.kind()
+ {
+ let generic_args = cx.typeck_results().node_args(callee.hir_id);
+ return Some((*callee_def_id, generic_args, None, args));
}
- if_chain! {
- if let ExprKind::MethodCall(_, recv, args, _) = expr.kind;
- if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
- then {
- let generic_args = cx.typeck_results().node_args(expr.hir_id);
- return Some((method_def_id, generic_args, Some(recv), args));
- }
+ if let ExprKind::MethodCall(_, recv, args, _) = expr.kind
+ && let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
+ {
+ let generic_args = cx.typeck_results().node_args(expr.hir_id);
+ return Some((method_def_id, generic_args, Some(recv), args));
}
None
}
@@ -458,7 +442,7 @@ fn can_change_type<'a>(cx: &LateContext<'a>, mut expr: &'a Expr<'a>, mut ty: Ty<
return false;
}
- let output_ty = cx.tcx.erase_late_bound_regions(fn_sig.output());
+ let output_ty = cx.tcx.instantiate_bound_regions_with_erased(fn_sig.output());
if output_ty.contains(param_ty) {
if let Ok(new_ty) = cx.tcx.try_instantiate_and_normalize_erasing_regions(
new_subst,
diff --git a/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs b/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs
index b5f810edd..84ee64e88 100644
--- a/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs
@@ -2,7 +2,6 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::ty::walk_ptrs_ty_depth;
use clippy_utils::{get_parent_expr, is_trait_method};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
@@ -22,13 +21,11 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, call_name: &str,
let (base_rcv_ty, rcv_depth) = walk_ptrs_ty_depth(rcv_ty);
if base_rcv_ty == base_res_ty && rcv_depth >= res_depth {
// allow the `as_ref` or `as_mut` if it is followed by another method call
- if_chain! {
- if let Some(parent) = get_parent_expr(cx, expr);
- if let hir::ExprKind::MethodCall(segment, ..) = parent.kind;
- if segment.ident.span != expr.span;
- then {
- return;
- }
+ if let Some(parent) = get_parent_expr(cx, expr)
+ && let hir::ExprKind::MethodCall(segment, ..) = parent.kind
+ && segment.ident.span != expr.span
+ {
+ return;
}
let mut applicability = Applicability::MachineApplicable;
diff --git a/src/tools/clippy/clippy_lints/src/methods/utils.rs b/src/tools/clippy/clippy_lints/src/methods/utils.rs
index 9f1f73e60..9ad4250a1 100644
--- a/src/tools/clippy/clippy_lints/src/methods/utils.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/utils.rs
@@ -1,7 +1,6 @@
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::{get_parent_expr, path_to_local_id, usage};
-use if_chain::if_chain;
use rustc_ast::ast;
use rustc_errors::Applicability;
use rustc_hir as hir;
@@ -55,32 +54,33 @@ pub(super) fn get_hint_if_single_char_arg(
arg: &hir::Expr<'_>,
applicability: &mut Applicability,
) -> Option<String> {
- if_chain! {
- if let hir::ExprKind::Lit(lit) = &arg.kind;
- if let ast::LitKind::Str(r, style) = lit.node;
- let string = r.as_str();
- if string.chars().count() == 1;
- then {
- let snip = snippet_with_applicability(cx, arg.span, string, applicability);
- let ch = if let ast::StrStyle::Raw(nhash) = style {
- let nhash = nhash as usize;
- // for raw string: r##"a"##
- &snip[(nhash + 2)..(snip.len() - 1 - nhash)]
- } else {
- // for regular string: "a"
- &snip[1..(snip.len() - 1)]
- };
+ if let hir::ExprKind::Lit(lit) = &arg.kind
+ && let ast::LitKind::Str(r, style) = lit.node
+ && let string = r.as_str()
+ && string.chars().count() == 1
+ {
+ let snip = snippet_with_applicability(cx, arg.span, string, applicability);
+ let ch = if let ast::StrStyle::Raw(nhash) = style {
+ let nhash = nhash as usize;
+ // for raw string: r##"a"##
+ &snip[(nhash + 2)..(snip.len() - 1 - nhash)]
+ } else {
+ // for regular string: "a"
+ &snip[1..(snip.len() - 1)]
+ };
- let hint = format!("'{}'", match ch {
- "'" => "\\'" ,
+ let hint = format!(
+ "'{}'",
+ match ch {
+ "'" => "\\'",
r"\" => "\\\\",
_ => ch,
- });
+ }
+ );
- Some(hint)
- } else {
- None
- }
+ Some(hint)
+ } else {
+ None
}
}
@@ -140,15 +140,13 @@ impl<'cx, 'tcx> Visitor<'tcx> for CloneOrCopyVisitor<'cx, 'tcx> {
return;
},
ExprKind::MethodCall(.., args, _) => {
- if_chain! {
- if args.iter().all(|arg| !self.is_binding(arg));
- if let Some(method_def_id) = self.cx.typeck_results().type_dependent_def_id(parent.hir_id);
- let method_ty = self.cx.tcx.type_of(method_def_id).instantiate_identity();
- let self_ty = method_ty.fn_sig(self.cx.tcx).input(0).skip_binder();
- if matches!(self_ty.kind(), ty::Ref(_, _, Mutability::Not));
- then {
- return;
- }
+ if args.iter().all(|arg| !self.is_binding(arg))
+ && let Some(method_def_id) = self.cx.typeck_results().type_dependent_def_id(parent.hir_id)
+ && let method_ty = self.cx.tcx.type_of(method_def_id).instantiate_identity()
+ && let self_ty = method_ty.fn_sig(self.cx.tcx).input(0).skip_binder()
+ && matches!(self_ty.kind(), ty::Ref(_, _, Mutability::Not))
+ {
+ return;
}
},
_ => {},
diff --git a/src/tools/clippy/clippy_lints/src/methods/vec_resize_to_zero.rs b/src/tools/clippy/clippy_lints/src/methods/vec_resize_to_zero.rs
index 730727186..9e87fb45a 100644
--- a/src/tools/clippy/clippy_lints/src/methods/vec_resize_to_zero.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/vec_resize_to_zero.rs
@@ -1,6 +1,5 @@
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::ty::is_type_diagnostic_item;
-use if_chain::if_chain;
use rustc_ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind};
@@ -17,29 +16,32 @@ pub(super) fn check<'tcx>(
default_arg: &'tcx Expr<'_>,
name_span: Span,
) {
- if_chain! {
- if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
- if let Some(impl_id) = cx.tcx.impl_of_method(method_id);
- if is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::Vec);
- if let ExprKind::Lit(Spanned { node: LitKind::Int(0, _), .. }) = count_arg.kind;
- if let ExprKind::Lit(Spanned { node: LitKind::Int(..), .. }) = default_arg.kind;
- then {
- let method_call_span = expr.span.with_lo(name_span.lo());
- span_lint_and_then(
- cx,
- VEC_RESIZE_TO_ZERO,
- expr.span,
- "emptying a vector with `resize`",
- |db| {
- db.help("the arguments may be inverted...");
- db.span_suggestion(
- method_call_span,
- "...or you can empty the vector with",
- "clear()".to_string(),
- Applicability::MaybeIncorrect,
- );
- },
- );
- }
+ if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
+ && let Some(impl_id) = cx.tcx.impl_of_method(method_id)
+ && is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::Vec)
+ && let ExprKind::Lit(Spanned {
+ node: LitKind::Int(0, _),
+ ..
+ }) = count_arg.kind
+ && let ExprKind::Lit(Spanned {
+ node: LitKind::Int(..), ..
+ }) = default_arg.kind
+ {
+ let method_call_span = expr.span.with_lo(name_span.lo());
+ span_lint_and_then(
+ cx,
+ VEC_RESIZE_TO_ZERO,
+ expr.span,
+ "emptying a vector with `resize`",
+ |db| {
+ db.help("the arguments may be inverted...");
+ db.span_suggestion(
+ method_call_span,
+ "...or you can empty the vector with",
+ "clear()".to_string(),
+ Applicability::MaybeIncorrect,
+ );
+ },
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/zst_offset.rs b/src/tools/clippy/clippy_lints/src/methods/zst_offset.rs
index e9f268da6..0b829d99a 100644
--- a/src/tools/clippy/clippy_lints/src/methods/zst_offset.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/zst_offset.rs
@@ -1,5 +1,4 @@
use clippy_utils::diagnostics::span_lint;
-use if_chain::if_chain;
use rustc_hir as hir;
use rustc_lint::LateContext;
use rustc_middle::ty;
@@ -7,12 +6,10 @@ use rustc_middle::ty;
use super::ZST_OFFSET;
pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr<'_>) {
- if_chain! {
- if let ty::RawPtr(ty::TypeAndMut { ty, .. }) = cx.typeck_results().expr_ty(recv).kind();
- if let Ok(layout) = cx.tcx.layout_of(cx.param_env.and(*ty));
- if layout.is_zst();
- then {
- span_lint(cx, ZST_OFFSET, expr.span, "offset calculation on zero-sized value");
- }
+ if let ty::RawPtr(ty::TypeAndMut { ty, .. }) = cx.typeck_results().expr_ty(recv).kind()
+ && let Ok(layout) = cx.tcx.layout_of(cx.param_env.and(*ty))
+ && layout.is_zst()
+ {
+ span_lint(cx, ZST_OFFSET, expr.span, "offset calculation on zero-sized value");
}
}
diff --git a/src/tools/clippy/clippy_lints/src/min_ident_chars.rs b/src/tools/clippy/clippy_lints/src/min_ident_chars.rs
index 4ad12e899..34b8e0dbe 100644
--- a/src/tools/clippy/clippy_lints/src/min_ident_chars.rs
+++ b/src/tools/clippy/clippy_lints/src/min_ident_chars.rs
@@ -6,7 +6,7 @@ use rustc_hir::intravisit::{walk_item, Visitor};
use rustc_hir::{GenericParamKind, HirId, Item, ItemKind, ItemLocalId, Node, Pat, PatKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::Span;
use std::borrow::Cow;
@@ -91,7 +91,7 @@ impl Visitor<'_> for IdentVisitor<'_, '_> {
let node = if hir_id.local_id == ItemLocalId::from_u32(0) {
// In this case, we can just use `find`, `Owner`'s `node` field is private anyway so we can't
// reimplement it even if we wanted to
- cx.tcx.hir().find(hir_id)
+ cx.tcx.opt_hir_node(hir_id)
} else {
let Some(owner) = cx.tcx.hir_owner_nodes(hir_id.owner).as_owner() else {
return;
diff --git a/src/tools/clippy/clippy_lints/src/minmax.rs b/src/tools/clippy/clippy_lints/src/minmax.rs
index e0904f17b..fca626fa5 100644
--- a/src/tools/clippy/clippy_lints/src/minmax.rs
+++ b/src/tools/clippy/clippy_lints/src/minmax.rs
@@ -3,7 +3,7 @@ use clippy_utils::diagnostics::span_lint;
use clippy_utils::is_trait_method;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
use std::cmp::Ordering;
diff --git a/src/tools/clippy/clippy_lints/src/misc.rs b/src/tools/clippy/clippy_lints/src/misc.rs
index f4af5f37b..b9784a585 100644
--- a/src/tools/clippy/clippy_lints/src/misc.rs
+++ b/src/tools/clippy/clippy_lints/src/misc.rs
@@ -5,7 +5,6 @@ use clippy_utils::{
any_parent_is_automatically_derived, fulfill_or_allowed, get_parent_expr, is_lint_allowed, iter_input_pats,
last_path_segment, SpanlessEq,
};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::def::Res;
use rustc_hir::intravisit::FnKind;
@@ -14,7 +13,7 @@ use rustc_hir::{
};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::def_id::LocalDefId;
use rustc_span::Span;
@@ -143,73 +142,64 @@ impl<'tcx> LateLintPass<'tcx> for LintPass {
}
fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) {
- if_chain! {
- if !in_external_macro(cx.tcx.sess, stmt.span);
- if let StmtKind::Local(local) = stmt.kind;
- if let PatKind::Binding(BindingAnnotation(ByRef::Yes, mutabl), .., name, None) = local.pat.kind;
- if let Some(init) = local.init;
+ if !in_external_macro(cx.tcx.sess, stmt.span)
+ && let StmtKind::Local(local) = stmt.kind
+ && let PatKind::Binding(BindingAnnotation(ByRef::Yes, mutabl), .., name, None) = local.pat.kind
+ && let Some(init) = local.init
// Do not emit if clippy::ref_patterns is not allowed to avoid having two lints for the same issue.
- if is_lint_allowed(cx, REF_PATTERNS, local.pat.hir_id);
- then {
- let ctxt = local.span.ctxt();
- let mut app = Applicability::MachineApplicable;
- let sugg_init = Sugg::hir_with_context(cx, init, ctxt, "..", &mut app);
- let (mutopt, initref) = if mutabl == Mutability::Mut {
- ("mut ", sugg_init.mut_addr())
- } else {
- ("", sugg_init.addr())
- };
- let tyopt = if let Some(ty) = local.ty {
- let ty_snip = snippet_with_context(cx, ty.span, ctxt, "_", &mut app).0;
- format!(": &{mutopt}{ty_snip}")
- } else {
- String::new()
- };
- span_lint_hir_and_then(
- cx,
- TOPLEVEL_REF_ARG,
- init.hir_id,
- local.pat.span,
- "`ref` on an entire `let` pattern is discouraged, take a reference with `&` instead",
- |diag| {
- diag.span_suggestion(
- stmt.span,
- "try",
- format!(
- "let {name}{tyopt} = {initref};",
- name=snippet(cx, name.span, ".."),
- ),
- app,
- );
- }
- );
- }
+ && is_lint_allowed(cx, REF_PATTERNS, local.pat.hir_id)
+ {
+ let ctxt = local.span.ctxt();
+ let mut app = Applicability::MachineApplicable;
+ let sugg_init = Sugg::hir_with_context(cx, init, ctxt, "..", &mut app);
+ let (mutopt, initref) = if mutabl == Mutability::Mut {
+ ("mut ", sugg_init.mut_addr())
+ } else {
+ ("", sugg_init.addr())
+ };
+ let tyopt = if let Some(ty) = local.ty {
+ let ty_snip = snippet_with_context(cx, ty.span, ctxt, "_", &mut app).0;
+ format!(": &{mutopt}{ty_snip}")
+ } else {
+ String::new()
+ };
+ span_lint_hir_and_then(
+ cx,
+ TOPLEVEL_REF_ARG,
+ init.hir_id,
+ local.pat.span,
+ "`ref` on an entire `let` pattern is discouraged, take a reference with `&` instead",
+ |diag| {
+ diag.span_suggestion(
+ stmt.span,
+ "try",
+ format!("let {name}{tyopt} = {initref};", name = snippet(cx, name.span, ".."),),
+ app,
+ );
+ },
+ );
};
- if_chain! {
- if let StmtKind::Semi(expr) = stmt.kind;
- if let ExprKind::Binary(ref binop, a, b) = expr.kind;
- if binop.node == BinOpKind::And || binop.node == BinOpKind::Or;
- if let Some(sugg) = Sugg::hir_opt(cx, a);
- then {
- span_lint_hir_and_then(
- cx,
- SHORT_CIRCUIT_STATEMENT,
- expr.hir_id,
- stmt.span,
- "boolean short circuit operator in statement may be clearer using an explicit test",
- |diag| {
- let sugg = if binop.node == BinOpKind::Or { !sugg } else { sugg };
- diag.span_suggestion(
- stmt.span,
- "replace it with",
- format!(
- "if {sugg} {{ {}; }}",
- &snippet(cx, b.span, ".."),
- ),
- Applicability::MachineApplicable, // snippet
- );
- });
- }
+ if let StmtKind::Semi(expr) = stmt.kind
+ && let ExprKind::Binary(ref binop, a, b) = expr.kind
+ && (binop.node == BinOpKind::And || binop.node == BinOpKind::Or)
+ && let Some(sugg) = Sugg::hir_opt(cx, a)
+ {
+ span_lint_hir_and_then(
+ cx,
+ SHORT_CIRCUIT_STATEMENT,
+ expr.hir_id,
+ stmt.span,
+ "boolean short circuit operator in statement may be clearer using an explicit test",
+ |diag| {
+ let sugg = if binop.node == BinOpKind::Or { !sugg } else { sugg };
+ diag.span_suggestion(
+ stmt.span,
+ "replace it with",
+ format!("if {sugg} {{ {}; }}", &snippet(cx, b.span, ".."),),
+ Applicability::MachineApplicable, // snippet
+ );
+ },
+ );
};
}
diff --git a/src/tools/clippy/clippy_lints/src/misc_early/mod.rs b/src/tools/clippy/clippy_lints/src/misc_early/mod.rs
index df0dd9e4e..abe5b00e8 100644
--- a/src/tools/clippy/clippy_lints/src/misc_early/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/misc_early/mod.rs
@@ -16,7 +16,7 @@ use rustc_ast::visit::FnKind;
use rustc_data_structures::fx::FxHashMap;
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::Span;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/mismatching_type_param_order.rs b/src/tools/clippy/clippy_lints/src/mismatching_type_param_order.rs
index 0d79ece08..0739b49fe 100644
--- a/src/tools/clippy/clippy_lints/src/mismatching_type_param_order.rs
+++ b/src/tools/clippy/clippy_lints/src/mismatching_type_param_order.rs
@@ -4,7 +4,7 @@ use rustc_hir::def::{DefKind, Res};
use rustc_hir::{GenericArg, Item, ItemKind, QPath, Ty, TyKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::GenericParamDefKind;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -49,59 +49,59 @@ declare_lint_pass!(TypeParamMismatch => [MISMATCHING_TYPE_PARAM_ORDER]);
impl<'tcx> LateLintPass<'tcx> for TypeParamMismatch {
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) {
- if_chain! {
- if !item.span.from_expansion();
- if let ItemKind::Impl(imp) = &item.kind;
- if let TyKind::Path(QPath::Resolved(_, path)) = &imp.self_ty.kind;
- if let Some(segment) = path.segments.iter().next();
- if let Some(generic_args) = segment.args;
- if !generic_args.args.is_empty();
- then {
- // get the name and span of the generic parameters in the Impl
- let mut impl_params = Vec::new();
- for p in generic_args.args {
- match p {
- GenericArg::Type(Ty {kind: TyKind::Path(QPath::Resolved(_, path)), ..}) =>
- impl_params.push((path.segments[0].ident.to_string(), path.span)),
- GenericArg::Type(_) => return,
- _ => (),
- };
- }
-
- // find the type that the Impl is for
- // only lint on struct/enum/union for now
- let Res::Def(DefKind::Struct | DefKind::Enum | DefKind::Union, defid) = path.res else {
- return
+ if !item.span.from_expansion()
+ && let ItemKind::Impl(imp) = &item.kind
+ && let TyKind::Path(QPath::Resolved(_, path)) = &imp.self_ty.kind
+ && let Some(segment) = path.segments.iter().next()
+ && let Some(generic_args) = segment.args
+ && !generic_args.args.is_empty()
+ {
+ // get the name and span of the generic parameters in the Impl
+ let mut impl_params = Vec::new();
+ for p in generic_args.args {
+ match p {
+ GenericArg::Type(Ty {
+ kind: TyKind::Path(QPath::Resolved(_, path)),
+ ..
+ }) => impl_params.push((path.segments[0].ident.to_string(), path.span)),
+ GenericArg::Type(_) => return,
+ _ => (),
};
+ }
+
+ // find the type that the Impl is for
+ // only lint on struct/enum/union for now
+ let Res::Def(DefKind::Struct | DefKind::Enum | DefKind::Union, defid) = path.res else {
+ return;
+ };
- // get the names of the generic parameters in the type
- let type_params = &cx.tcx.generics_of(defid).params;
- let type_param_names: Vec<_> = type_params.iter()
- .filter_map(|p|
- match p.kind {
- GenericParamDefKind::Type {..} => Some(p.name.to_string()),
- _ => None,
- }
- ).collect();
- // hashmap of name -> index for mismatch_param_name
- let type_param_names_hashmap: FxHashMap<&String, usize> =
- type_param_names.iter().enumerate().map(|(i, param)| (param, i)).collect();
+ // get the names of the generic parameters in the type
+ let type_params = &cx.tcx.generics_of(defid).params;
+ let type_param_names: Vec<_> = type_params
+ .iter()
+ .filter_map(|p| match p.kind {
+ GenericParamDefKind::Type { .. } => Some(p.name.to_string()),
+ _ => None,
+ })
+ .collect();
+ // hashmap of name -> index for mismatch_param_name
+ let type_param_names_hashmap: FxHashMap<&String, usize> = type_param_names
+ .iter()
+ .enumerate()
+ .map(|(i, param)| (param, i))
+ .collect();
- let type_name = segment.ident;
- for (i, (impl_param_name, impl_param_span)) in impl_params.iter().enumerate() {
- if mismatch_param_name(i, impl_param_name, &type_param_names_hashmap) {
- let msg = format!("`{type_name}` has a similarly named generic type parameter `{impl_param_name}` in its declaration, but in a different order");
- let help = format!("try `{}`, or a name that does not conflict with `{type_name}`'s generic params",
- type_param_names[i]);
- span_lint_and_help(
- cx,
- MISMATCHING_TYPE_PARAM_ORDER,
- *impl_param_span,
- &msg,
- None,
- &help
- );
- }
+ let type_name = segment.ident;
+ for (i, (impl_param_name, impl_param_span)) in impl_params.iter().enumerate() {
+ if mismatch_param_name(i, impl_param_name, &type_param_names_hashmap) {
+ let msg = format!(
+ "`{type_name}` has a similarly named generic type parameter `{impl_param_name}` in its declaration, but in a different order"
+ );
+ let help = format!(
+ "try `{}`, or a name that does not conflict with `{type_name}`'s generic params",
+ type_param_names[i]
+ );
+ span_lint_and_help(cx, MISMATCHING_TYPE_PARAM_ORDER, *impl_param_span, &msg, None, &help);
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/missing_assert_message.rs b/src/tools/clippy/clippy_lints/src/missing_assert_message.rs
index 4e00215c5..04df7b7a7 100644
--- a/src/tools/clippy/clippy_lints/src/missing_assert_message.rs
+++ b/src/tools/clippy/clippy_lints/src/missing_assert_message.rs
@@ -3,7 +3,7 @@ use clippy_utils::macros::{find_assert_args, find_assert_eq_args, root_macro_cal
use clippy_utils::{is_in_cfg_test, is_in_test_function};
use rustc_hir::Expr;
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/missing_asserts_for_indexing.rs b/src/tools/clippy/clippy_lints/src/missing_asserts_for_indexing.rs
index dccf72d3c..0f18e9434 100644
--- a/src/tools/clippy/clippy_lints/src/missing_asserts_for_indexing.rs
+++ b/src/tools/clippy/clippy_lints/src/missing_asserts_for_indexing.rs
@@ -9,9 +9,9 @@ use clippy_utils::{eq_expr_value, hash_expr, higher};
use rustc_ast::{LitKind, RangeLimits};
use rustc_data_structures::unhash::UnhashMap;
use rustc_errors::{Applicability, Diagnostic};
-use rustc_hir::{BinOp, Block, Expr, ExprKind, UnOp};
+use rustc_hir::{BinOp, Block, Body, Expr, ExprKind, UnOp};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::source_map::Spanned;
use rustc_span::{sym, Span};
@@ -52,12 +52,12 @@ declare_clippy_lint! {
/// Use instead:
/// ```no_run
/// fn sum(v: &[u8]) -> u8 {
- /// assert!(v.len() > 4);
+ /// assert!(v.len() > 3);
/// // no bounds checks
/// v[0] + v[1] + v[2] + v[3]
/// }
/// ```
- #[clippy::version = "1.70.0"]
+ #[clippy::version = "1.74.0"]
pub MISSING_ASSERTS_FOR_INDEXING,
restriction,
"indexing into a slice multiple times without an `assert`"
@@ -87,11 +87,14 @@ enum LengthComparison {
LengthLessThanOrEqualInt,
/// `5 <= v.len()`
IntLessThanOrEqualLength,
+ /// `5 == v.len()`
+ /// `v.len() == 5`
+ LengthEqualInt,
}
/// Extracts parts out of a length comparison expression.
///
-/// E.g. for `v.len() > 5` this returns `Some((LengthComparison::IntLessThanLength, 5, `v.len()`))`
+/// E.g. for `v.len() > 5` this returns `Some((LengthComparison::IntLessThanLength, 5, v.len()))`
fn len_comparison<'hir>(
bin_op: BinOp,
left: &'hir Expr<'hir>,
@@ -114,6 +117,8 @@ fn len_comparison<'hir>(
(Rel::Lt, _, int_lit_pat!(right)) => Some((LengthComparison::LengthLessThanInt, *right as usize, left)),
(Rel::Le, int_lit_pat!(left), _) => Some((LengthComparison::IntLessThanOrEqualLength, *left as usize, right)),
(Rel::Le, _, int_lit_pat!(right)) => Some((LengthComparison::LengthLessThanOrEqualInt, *right as usize, left)),
+ (Rel::Eq, int_lit_pat!(left), _) => Some((LengthComparison::LengthEqualInt, *left as usize, right)),
+ (Rel::Eq, _, int_lit_pat!(right)) => Some((LengthComparison::LengthEqualInt, *right as usize, left)),
_ => None,
}
}
@@ -316,11 +321,11 @@ fn report_indexes(cx: &LateContext<'_>, map: &UnhashMap<u64, Vec<IndexEntry<'_>>
continue;
};
- match entry {
+ match *entry {
IndexEntry::AssertWithIndex {
highest_index,
asserted_len,
- indexes,
+ ref indexes,
comparison,
assert_span,
slice,
@@ -343,6 +348,12 @@ fn report_indexes(cx: &LateContext<'_>, map: &UnhashMap<u64, Vec<IndexEntry<'_>>
"assert!({}.len() > {highest_index})",
snippet(cx, slice.span, "..")
)),
+ // `highest_index` here is rather a length, so we need to add 1 to it
+ LengthComparison::LengthEqualInt if asserted_len < highest_index + 1 => Some(format!(
+ "assert!({}.len() == {})",
+ snippet(cx, slice.span, ".."),
+ highest_index + 1
+ )),
_ => None,
};
@@ -354,7 +365,7 @@ fn report_indexes(cx: &LateContext<'_>, map: &UnhashMap<u64, Vec<IndexEntry<'_>>
indexes,
|diag| {
diag.span_suggestion(
- *assert_span,
+ assert_span,
"provide the highest index that is indexed with",
sugg,
Applicability::MachineApplicable,
@@ -364,7 +375,7 @@ fn report_indexes(cx: &LateContext<'_>, map: &UnhashMap<u64, Vec<IndexEntry<'_>>
}
},
IndexEntry::IndexWithoutAssert {
- indexes,
+ ref indexes,
highest_index,
slice,
} if indexes.len() > 1 => {
@@ -390,10 +401,10 @@ fn report_indexes(cx: &LateContext<'_>, map: &UnhashMap<u64, Vec<IndexEntry<'_>>
}
impl LateLintPass<'_> for MissingAssertsForIndexing {
- fn check_block(&mut self, cx: &LateContext<'_>, block: &Block<'_>) {
+ fn check_body(&mut self, cx: &LateContext<'_>, body: &Body<'_>) {
let mut map = UnhashMap::default();
- for_each_expr(block, |expr| {
+ for_each_expr(body.value, |expr| {
check_index(cx, expr, &mut map);
check_assert(cx, expr, &mut map);
ControlFlow::<!, ()>::Continue(())
diff --git a/src/tools/clippy/clippy_lints/src/missing_const_for_fn.rs b/src/tools/clippy/clippy_lints/src/missing_const_for_fn.rs
index 97522cbe6..acaa6be30 100644
--- a/src/tools/clippy/clippy_lints/src/missing_const_for_fn.rs
+++ b/src/tools/clippy/clippy_lints/src/missing_const_for_fn.rs
@@ -9,7 +9,7 @@ use rustc_hir::intravisit::FnKind;
use rustc_hir::{Body, Constness, FnDecl, GenericParamKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::def_id::LocalDefId;
use rustc_span::Span;
@@ -131,13 +131,13 @@ impl<'tcx> LateLintPass<'tcx> for MissingConstForFn {
FnKind::Closure => return,
}
- let hir_id = cx.tcx.hir().local_def_id_to_hir_id(def_id);
+ let hir_id = cx.tcx.local_def_id_to_hir_id(def_id);
// Const fns are not allowed as methods in a trait.
{
let parent = cx.tcx.hir().get_parent_item(hir_id).def_id;
if parent != CRATE_DEF_ID {
- if let hir::Node::Item(item) = cx.tcx.hir().get_by_def_id(parent) {
+ if let hir::Node::Item(item) = cx.tcx.hir_node_by_def_id(parent) {
if let hir::ItemKind::Trait(..) = &item.kind {
return;
}
diff --git a/src/tools/clippy/clippy_lints/src/missing_doc.rs b/src/tools/clippy/clippy_lints/src/missing_doc.rs
index 973caa72b..bf4af7946 100644
--- a/src/tools/clippy/clippy_lints/src/missing_doc.rs
+++ b/src/tools/clippy/clippy_lints/src/missing_doc.rs
@@ -8,13 +8,12 @@
use clippy_utils::attrs::is_doc_hidden;
use clippy_utils::diagnostics::span_lint;
use clippy_utils::is_from_proc_macro;
-use if_chain::if_chain;
use rustc_ast::ast::{self, MetaItem, MetaItemKind};
use rustc_hir as hir;
use rustc_hir::def_id::LocalDefId;
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::ty::Visibility;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::def_id::CRATE_DEF_ID;
use rustc_span::{sym, Span};
@@ -63,16 +62,14 @@ impl MissingDoc {
}
fn has_include(meta: Option<MetaItem>) -> bool {
- if_chain! {
- if let Some(meta) = meta;
- if let MetaItemKind::List(list) = meta.kind;
- if let Some(meta) = list.first();
- if let Some(name) = meta.ident();
- then {
- name.name == sym::include
- } else {
- false
- }
+ if let Some(meta) = meta
+ && let MetaItemKind::List(list) = meta.kind
+ && let Some(meta) = list.first()
+ && let Some(name) = meta.ident()
+ {
+ name.name == sym::include
+ } else {
+ false
}
}
diff --git a/src/tools/clippy/clippy_lints/src/missing_enforced_import_rename.rs b/src/tools/clippy/clippy_lints/src/missing_enforced_import_rename.rs
index 16ff98a59..c1f6c71a6 100644
--- a/src/tools/clippy/clippy_lints/src/missing_enforced_import_rename.rs
+++ b/src/tools/clippy/clippy_lints/src/missing_enforced_import_rename.rs
@@ -7,7 +7,7 @@ use rustc_hir::def::Res;
use rustc_hir::def_id::DefId;
use rustc_hir::{Item, ItemKind, UseKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::Symbol;
declare_clippy_lint! {
@@ -72,13 +72,12 @@ impl LateLintPass<'_> for ImportRename {
fn check_item(&mut self, cx: &LateContext<'_>, item: &Item<'_>) {
if let ItemKind::Use(path, UseKind::Single) = &item.kind {
for &res in &path.res {
- if_chain! {
- if let Res::Def(_, id) = res;
- if let Some(name) = self.renames.get(&id);
+ if let Res::Def(_, id) = res
+ && let Some(name) = self.renames.get(&id)
// Remove semicolon since it is not present for nested imports
- let span_without_semi = cx.sess().source_map().span_until_char(item.span, ';');
- if let Some(snip) = snippet_opt(cx, span_without_semi);
- if let Some(import) = match snip.split_once(" as ") {
+ && let span_without_semi = cx.sess().source_map().span_until_char(item.span, ';')
+ && let Some(snip) = snippet_opt(cx, span_without_semi)
+ && let Some(import) = match snip.split_once(" as ") {
None => Some(snip.as_str()),
Some((import, rename)) => {
if rename.trim() == name.as_str() {
@@ -87,20 +86,17 @@ impl LateLintPass<'_> for ImportRename {
Some(import.trim())
}
},
- };
- then {
- span_lint_and_sugg(
- cx,
- MISSING_ENFORCED_IMPORT_RENAMES,
- span_without_semi,
- "this import should be renamed",
- "try",
- format!(
- "{import} as {name}",
- ),
- Applicability::MachineApplicable,
- );
}
+ {
+ span_lint_and_sugg(
+ cx,
+ MISSING_ENFORCED_IMPORT_RENAMES,
+ span_without_semi,
+ "this import should be renamed",
+ "try",
+ format!("{import} as {name}",),
+ Applicability::MachineApplicable,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs b/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs
index 95f9df4e4..88b331dde 100644
--- a/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs
+++ b/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs
@@ -12,7 +12,7 @@ use rustc_hir::{
};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::{Ty, TypeckResults};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{sym, Span, Symbol};
declare_clippy_lint! {
@@ -220,7 +220,7 @@ impl<'tcx> LateLintPass<'tcx> for MissingFieldsInDebug {
&& let self_ty = cx.tcx.type_of(self_path_did).skip_binder().peel_refs()
&& let Some(self_adt) = self_ty.ty_adt_def()
&& let Some(self_def_id) = self_adt.did().as_local()
- && let Some(Node::Item(self_item)) = cx.tcx.hir().find_by_def_id(self_def_id)
+ && let Some(Node::Item(self_item)) = cx.tcx.opt_hir_node_by_def_id(self_def_id)
// NB: can't call cx.typeck_results() as we are not in a body
&& let typeck_results = cx.tcx.typeck_body(*body_id)
&& should_lint(cx, typeck_results, block)
diff --git a/src/tools/clippy/clippy_lints/src/missing_inline.rs b/src/tools/clippy/clippy_lints/src/missing_inline.rs
index b815da79b..7393b39c8 100644
--- a/src/tools/clippy/clippy_lints/src/missing_inline.rs
+++ b/src/tools/clippy/clippy_lints/src/missing_inline.rs
@@ -1,8 +1,8 @@
use clippy_utils::diagnostics::span_lint;
use rustc_ast::ast;
use rustc_hir as hir;
-use rustc_lint::{self, LateContext, LateLintPass, LintContext};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_session::declare_lint_pass;
use rustc_span::{sym, Span};
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/missing_trait_methods.rs b/src/tools/clippy/clippy_lints/src/missing_trait_methods.rs
index ad5f45a32..6bbf18d52 100644
--- a/src/tools/clippy/clippy_lints/src/missing_trait_methods.rs
+++ b/src/tools/clippy/clippy_lints/src/missing_trait_methods.rs
@@ -5,7 +5,7 @@ use rustc_hir::def_id::DefIdMap;
use rustc_hir::{Impl, Item, ItemKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::AssocItem;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs b/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs
index 215161b04..3ff40081c 100644
--- a/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs
+++ b/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs
@@ -1,11 +1,10 @@
use clippy_utils::diagnostics::{span_lint, span_lint_and_note};
use clippy_utils::{get_parent_expr, path_to_local, path_to_local_id};
-use if_chain::if_chain;
use rustc_hir::intravisit::{walk_expr, Visitor};
use rustc_hir::{BinOpKind, Block, Expr, ExprKind, Guard, HirId, Local, Node, Stmt, StmtKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -80,11 +79,13 @@ declare_lint_pass!(EvalOrderDependence => [MIXED_READ_WRITE_IN_EXPRESSION, DIVER
impl<'tcx> LateLintPass<'tcx> for EvalOrderDependence {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
// Find a write to a local variable.
- let var = if_chain! {
- if let ExprKind::Assign(lhs, ..) | ExprKind::AssignOp(_, lhs, _) = expr.kind;
- if let Some(var) = path_to_local(lhs);
- if expr.span.desugaring_kind().is_none();
- then { var } else { return; }
+ let var = if let ExprKind::Assign(lhs, ..) | ExprKind::AssignOp(_, lhs, _) = expr.kind
+ && let Some(var) = path_to_local(lhs)
+ && expr.span.desugaring_kind().is_none()
+ {
+ var
+ } else {
+ return;
};
let mut visitor = ReadVisitor {
cx,
@@ -164,7 +165,7 @@ impl<'a, 'tcx> Visitor<'tcx> for DivergenceVisitor<'a, 'tcx> {
match typ.kind() {
ty::FnDef(..) | ty::FnPtr(_) => {
let sig = typ.fn_sig(self.cx.tcx);
- if self.cx.tcx.erase_late_bound_regions(sig).output().kind() == &ty::Never {
+ if self.cx.tcx.instantiate_bound_regions_with_erased(sig).output().kind() == &ty::Never {
self.report_diverging_sub_expr(e);
}
},
@@ -212,7 +213,9 @@ fn check_for_unsequenced_reads(vis: &mut ReadVisitor<'_, '_>) {
if parent_id == cur_id {
break;
}
- let Some(parent_node) = map.find(parent_id) else { break };
+ let Some(parent_node) = vis.cx.tcx.opt_hir_node(parent_id) else {
+ break;
+ };
let stop_early = match parent_node {
Node::Expr(expr) => check_expr(vis, expr),
diff --git a/src/tools/clippy/clippy_lints/src/module_style.rs b/src/tools/clippy/clippy_lints/src/module_style.rs
index efdc7560e..0226b31dd 100644
--- a/src/tools/clippy/clippy_lints/src/module_style.rs
+++ b/src/tools/clippy/clippy_lints/src/module_style.rs
@@ -1,7 +1,8 @@
+use clippy_utils::diagnostics::span_lint_and_help;
use rustc_ast::ast;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_lint::{EarlyContext, EarlyLintPass, Level, LintContext};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::def_id::LOCAL_CRATE;
use rustc_span::{FileName, SourceFile, Span, SyntaxContext};
use std::ffi::OsStr;
@@ -124,11 +125,13 @@ impl EarlyLintPass for ModStyle {
correct.pop();
correct.push(folder);
correct.push("mod.rs");
- cx.struct_span_lint(
+ span_lint_and_help(
+ cx,
SELF_NAMED_MODULE_FILES,
Span::new(file.start_pos, file.start_pos, SyntaxContext::root(), None),
- format!("`mod.rs` files are required, found `{}`", path.display()),
- |lint| lint.help(format!("move `{}` to `{}`", path.display(), correct.display(),)),
+ &format!("`mod.rs` files are required, found `{}`", path.display()),
+ None,
+ &format!("move `{}` to `{}`", path.display(), correct.display(),),
);
}
}
@@ -153,17 +156,22 @@ fn process_paths_for_mod_files<'a>(
}
/// Checks every path for the presence of `mod.rs` files and emits the lint if found.
+/// We should not emit a lint for test modules in the presence of `mod.rs`.
+/// Using `mod.rs` in integration tests is a [common pattern](https://doc.rust-lang.org/book/ch11-03-test-organization.html#submodules-in-integration-test)
+/// for code-sharing between tests.
fn check_self_named_mod_exists(cx: &EarlyContext<'_>, path: &Path, file: &SourceFile) {
- if path.ends_with("mod.rs") {
+ if path.ends_with("mod.rs") && !path.starts_with("tests") {
let mut mod_file = path.to_path_buf();
mod_file.pop();
mod_file.set_extension("rs");
- cx.struct_span_lint(
+ span_lint_and_help(
+ cx,
MOD_MODULE_FILES,
Span::new(file.start_pos, file.start_pos, SyntaxContext::root(), None),
- format!("`mod.rs` files are not allowed, found `{}`", path.display()),
- |lint| lint.help(format!("move `{}` to `{}`", path.display(), mod_file.display())),
+ &format!("`mod.rs` files are not allowed, found `{}`", path.display()),
+ None,
+ &format!("move `{}` to `{}`", path.display(), mod_file.display()),
);
}
}
diff --git a/src/tools/clippy/clippy_lints/src/multi_assignments.rs b/src/tools/clippy/clippy_lints/src/multi_assignments.rs
index b42dce7a1..9a6b1dfc5 100644
--- a/src/tools/clippy/clippy_lints/src/multi_assignments.rs
+++ b/src/tools/clippy/clippy_lints/src/multi_assignments.rs
@@ -1,7 +1,7 @@
use clippy_utils::diagnostics::span_lint;
use rustc_ast::ast::{Expr, ExprKind, Stmt, StmtKind};
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs b/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs
index d4f8008ae..049f44f32 100644
--- a/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs
+++ b/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs
@@ -8,7 +8,7 @@ use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{DesugaringKind, Span};
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/mut_key.rs b/src/tools/clippy/clippy_lints/src/mut_key.rs
index ebfd53f1e..04d2ced6a 100644
--- a/src/tools/clippy/clippy_lints/src/mut_key.rs
+++ b/src/tools/clippy/clippy_lints/src/mut_key.rs
@@ -6,10 +6,10 @@ use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::query::Key;
use rustc_middle::ty::{Adt, Ty};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::def_id::LocalDefId;
-use rustc_span::Span;
use rustc_span::symbol::sym;
+use rustc_span::Span;
use std::iter;
declare_clippy_lint! {
@@ -143,7 +143,11 @@ impl MutableKeyType {
for (hir_ty, ty) in iter::zip(decl.inputs, fn_sig.inputs().skip_binder()) {
self.check_ty_(cx, hir_ty.span, *ty);
}
- self.check_ty_(cx, decl.output.span(), cx.tcx.erase_late_bound_regions(fn_sig.output()));
+ self.check_ty_(
+ cx,
+ decl.output.span(),
+ cx.tcx.instantiate_bound_regions_with_erased(fn_sig.output()),
+ );
}
// We want to lint 1. sets or maps with 2. not immutable key types and 3. no unerased
diff --git a/src/tools/clippy/clippy_lints/src/mut_mut.rs b/src/tools/clippy/clippy_lints/src/mut_mut.rs
index 6989504a4..72a2cca1e 100644
--- a/src/tools/clippy/clippy_lints/src/mut_mut.rs
+++ b/src/tools/clippy/clippy_lints/src/mut_mut.rs
@@ -5,7 +5,7 @@ use rustc_hir::intravisit;
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/mut_reference.rs b/src/tools/clippy/clippy_lints/src/mut_reference.rs
index 4f8e24422..f905a4e5b 100644
--- a/src/tools/clippy/clippy_lints/src/mut_reference.rs
+++ b/src/tools/clippy/clippy_lints/src/mut_reference.rs
@@ -2,7 +2,7 @@ use clippy_utils::diagnostics::span_lint;
use rustc_hir::{BorrowKind, Expr, ExprKind, Mutability};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::{self, Ty};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use std::iter;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/mutable_debug_assertion.rs b/src/tools/clippy/clippy_lints/src/mutable_debug_assertion.rs
index dea432fdb..96cd81ecd 100644
--- a/src/tools/clippy/clippy_lints/src/mutable_debug_assertion.rs
+++ b/src/tools/clippy/clippy_lints/src/mutable_debug_assertion.rs
@@ -5,7 +5,7 @@ use rustc_hir::{BorrowKind, Expr, ExprKind, MatchSource, Mutability};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::nested_filter;
use rustc_middle::ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::Span;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/mutex_atomic.rs b/src/tools/clippy/clippy_lints/src/mutex_atomic.rs
index 9d8c06cd0..a23e12f7a 100644
--- a/src/tools/clippy/clippy_lints/src/mutex_atomic.rs
+++ b/src/tools/clippy/clippy_lints/src/mutex_atomic.rs
@@ -7,7 +7,7 @@ use clippy_utils::ty::is_type_diagnostic_item;
use rustc_hir::Expr;
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::{self, Ty};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/needless_arbitrary_self_type.rs b/src/tools/clippy/clippy_lints/src/needless_arbitrary_self_type.rs
index 97e8f1c03..2ab83f733 100644
--- a/src/tools/clippy/clippy_lints/src/needless_arbitrary_self_type.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_arbitrary_self_type.rs
@@ -1,9 +1,8 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
-use if_chain::if_chain;
use rustc_ast::ast::{BindingAnnotation, ByRef, Lifetime, Mutability, Param, PatKind, Path, TyKind};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::kw;
use rustc_span::Span;
@@ -66,48 +65,46 @@ enum Mode {
}
fn check_param_inner(cx: &EarlyContext<'_>, path: &Path, span: Span, binding_mode: &Mode, mutbl: Mutability) {
- if_chain! {
- if let [segment] = &path.segments[..];
- if segment.ident.name == kw::SelfUpper;
- then {
- // In case we have a named lifetime, we check if the name comes from expansion.
- // If it does, at this point we know the rest of the parameter was written by the user,
- // so let them decide what the name of the lifetime should be.
- // See #6089 for more details.
- let mut applicability = Applicability::MachineApplicable;
- let self_param = match (binding_mode, mutbl) {
- (Mode::Ref(None), Mutability::Mut) => "&mut self".to_string(),
- (Mode::Ref(Some(lifetime)), Mutability::Mut) => {
- if lifetime.ident.span.from_expansion() {
- applicability = Applicability::HasPlaceholders;
- "&'_ mut self".to_string()
- } else {
- format!("&{} mut self", &lifetime.ident.name)
- }
- },
- (Mode::Ref(None), Mutability::Not) => "&self".to_string(),
- (Mode::Ref(Some(lifetime)), Mutability::Not) => {
- if lifetime.ident.span.from_expansion() {
- applicability = Applicability::HasPlaceholders;
- "&'_ self".to_string()
- } else {
- format!("&{} self", &lifetime.ident.name)
- }
- },
- (Mode::Value, Mutability::Mut) => "mut self".to_string(),
- (Mode::Value, Mutability::Not) => "self".to_string(),
- };
+ if let [segment] = &path.segments[..]
+ && segment.ident.name == kw::SelfUpper
+ {
+ // In case we have a named lifetime, we check if the name comes from expansion.
+ // If it does, at this point we know the rest of the parameter was written by the user,
+ // so let them decide what the name of the lifetime should be.
+ // See #6089 for more details.
+ let mut applicability = Applicability::MachineApplicable;
+ let self_param = match (binding_mode, mutbl) {
+ (Mode::Ref(None), Mutability::Mut) => "&mut self".to_string(),
+ (Mode::Ref(Some(lifetime)), Mutability::Mut) => {
+ if lifetime.ident.span.from_expansion() {
+ applicability = Applicability::HasPlaceholders;
+ "&'_ mut self".to_string()
+ } else {
+ format!("&{} mut self", &lifetime.ident.name)
+ }
+ },
+ (Mode::Ref(None), Mutability::Not) => "&self".to_string(),
+ (Mode::Ref(Some(lifetime)), Mutability::Not) => {
+ if lifetime.ident.span.from_expansion() {
+ applicability = Applicability::HasPlaceholders;
+ "&'_ self".to_string()
+ } else {
+ format!("&{} self", &lifetime.ident.name)
+ }
+ },
+ (Mode::Value, Mutability::Mut) => "mut self".to_string(),
+ (Mode::Value, Mutability::Not) => "self".to_string(),
+ };
- span_lint_and_sugg(
- cx,
- NEEDLESS_ARBITRARY_SELF_TYPE,
- span,
- "the type of the `self` parameter does not need to be arbitrary",
- "consider to change this parameter to",
- self_param,
- applicability,
- )
- }
+ span_lint_and_sugg(
+ cx,
+ NEEDLESS_ARBITRARY_SELF_TYPE,
+ span,
+ "the type of the `self` parameter does not need to be arbitrary",
+ "consider to change this parameter to",
+ self_param,
+ applicability,
+ );
}
}
@@ -125,12 +122,10 @@ impl EarlyLintPass for NeedlessArbitrarySelfType {
}
},
TyKind::Ref(lifetime, mut_ty) => {
- if_chain! {
- if let TyKind::Path(None, path) = &mut_ty.ty.kind;
- if let PatKind::Ident(BindingAnnotation::NONE, _, _) = p.pat.kind;
- then {
- check_param_inner(cx, path, p.span.to(p.ty.span), &Mode::Ref(*lifetime), mut_ty.mutbl);
- }
+ if let TyKind::Path(None, path) = &mut_ty.ty.kind
+ && let PatKind::Ident(BindingAnnotation::NONE, _, _) = p.pat.kind
+ {
+ check_param_inner(cx, path, p.span.to(p.ty.span), &Mode::Ref(*lifetime), mut_ty.mutbl);
}
},
_ => {},
diff --git a/src/tools/clippy/clippy_lints/src/needless_bool.rs b/src/tools/clippy/clippy_lints/src/needless_bool.rs
index 02c177c92..218ca5e80 100644
--- a/src/tools/clippy/clippy_lints/src/needless_bool.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_bool.rs
@@ -13,7 +13,7 @@ use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Block, Expr, ExprKind, HirId, Node, UnOp};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::source_map::Spanned;
use rustc_span::Span;
diff --git a/src/tools/clippy/clippy_lints/src/needless_borrowed_ref.rs b/src/tools/clippy/clippy_lints/src/needless_borrowed_ref.rs
index fdb91f0dc..4710a6944 100644
--- a/src/tools/clippy/clippy_lints/src/needless_borrowed_ref.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_borrowed_ref.rs
@@ -2,7 +2,7 @@ use clippy_utils::diagnostics::span_lint_and_then;
use rustc_errors::Applicability;
use rustc_hir::{BindingAnnotation, Mutability, Node, Pat, PatKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/needless_borrows_for_generic_args.rs b/src/tools/clippy/clippy_lints/src/needless_borrows_for_generic_args.rs
index dcfb109a4..a32bca3d0 100644
--- a/src/tools/clippy/clippy_lints/src/needless_borrows_for_generic_args.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_borrows_for_generic_args.rs
@@ -2,7 +2,7 @@ use clippy_config::msrvs::{self, Msrv};
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::mir::{enclosing_mir, expr_local, local_assignments, used_exactly_once, PossibleBorrowerMap};
use clippy_utils::source::snippet_with_context;
-use clippy_utils::ty::is_copy;
+use clippy_utils::ty::{implements_trait, is_copy};
use clippy_utils::{expr_use_ctxt, peel_n_hir_expr_refs, DefinedTy, ExprUseNode};
use rustc_errors::Applicability;
use rustc_hir::def::{DefKind, Res};
@@ -15,7 +15,7 @@ use rustc_middle::mir::{Rvalue, StatementKind};
use rustc_middle::ty::{
self, ClauseKind, EarlyBinder, FnSig, GenericArg, GenericArgKind, List, ParamTy, ProjectionPredicate, Ty,
};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::symbol::sym;
use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt as _;
use rustc_trait_selection::traits::{Obligation, ObligationCause};
@@ -50,7 +50,7 @@ declare_clippy_lint! {
/// let x = "foo";
/// f(x);
/// ```
- #[clippy::version = "pre 1.29.0"]
+ #[clippy::version = "1.74.0"]
pub NEEDLESS_BORROWS_FOR_GENERIC_ARGS,
style,
"taking a reference that is going to be automatically dereferenced"
@@ -169,6 +169,7 @@ fn needless_borrow_count<'tcx>(
) -> usize {
let destruct_trait_def_id = cx.tcx.lang_items().destruct_trait();
let sized_trait_def_id = cx.tcx.lang_items().sized_trait();
+ let drop_trait_def_id = cx.tcx.lang_items().drop_trait();
let fn_sig = cx.tcx.fn_sig(fn_id).instantiate_identity().skip_binder();
let predicates = cx.tcx.param_env(fn_id).caller_bounds();
@@ -223,7 +224,14 @@ fn needless_borrow_count<'tcx>(
// elements are modified each time `check_referent` is called.
let mut args_with_referent_ty = callee_args.to_vec();
- let mut check_reference_and_referent = |reference, referent| {
+ let mut check_reference_and_referent = |reference: &Expr<'tcx>, referent: &Expr<'tcx>| {
+ if let ExprKind::Field(base, _) = &referent.kind {
+ let base_ty = cx.typeck_results().expr_ty(base);
+ if drop_trait_def_id.map_or(false, |id| implements_trait(cx, base_ty, id, &[])) {
+ return false;
+ }
+ }
+
let referent_ty = cx.typeck_results().expr_ty(referent);
if !is_copy(cx, referent_ty)
diff --git a/src/tools/clippy/clippy_lints/src/needless_continue.rs b/src/tools/clippy/clippy_lints/src/needless_continue.rs
index cb2738947..4b9ab50e4 100644
--- a/src/tools/clippy/clippy_lints/src/needless_continue.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_continue.rs
@@ -37,7 +37,7 @@ use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::source::{indent_of, snippet, snippet_block};
use rustc_ast::ast;
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::Span;
declare_clippy_lint! {
@@ -362,21 +362,19 @@ fn suggestion_snippet_for_continue_inside_else(cx: &EarlyContext<'_>, data: &Lin
}
fn check_and_warn(cx: &EarlyContext<'_>, expr: &ast::Expr) {
- if_chain! {
- if let ast::ExprKind::Loop(loop_block, ..) = &expr.kind;
- if let Some(last_stmt) = loop_block.stmts.last();
- if let ast::StmtKind::Expr(inner_expr) | ast::StmtKind::Semi(inner_expr) = &last_stmt.kind;
- if let ast::ExprKind::Continue(_) = inner_expr.kind;
- then {
- span_lint_and_help(
- cx,
- NEEDLESS_CONTINUE,
- last_stmt.span,
- MSG_REDUNDANT_CONTINUE_EXPRESSION,
- None,
- DROP_CONTINUE_EXPRESSION_MSG,
- );
- }
+ if let ast::ExprKind::Loop(loop_block, ..) = &expr.kind
+ && let Some(last_stmt) = loop_block.stmts.last()
+ && let ast::StmtKind::Expr(inner_expr) | ast::StmtKind::Semi(inner_expr) = &last_stmt.kind
+ && let ast::ExprKind::Continue(_) = inner_expr.kind
+ {
+ span_lint_and_help(
+ cx,
+ NEEDLESS_CONTINUE,
+ last_stmt.span,
+ MSG_REDUNDANT_CONTINUE_EXPRESSION,
+ None,
+ DROP_CONTINUE_EXPRESSION_MSG,
+ );
}
with_loop_block(expr, |loop_block, label| {
for (i, stmt) in loop_block.stmts.iter().enumerate() {
diff --git a/src/tools/clippy/clippy_lints/src/needless_else.rs b/src/tools/clippy/clippy_lints/src/needless_else.rs
index d881c13f8..b6aad69d1 100644
--- a/src/tools/clippy/clippy_lints/src/needless_else.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_else.rs
@@ -3,7 +3,7 @@ use clippy_utils::source::{snippet_opt, trim_span};
use rustc_ast::ast::{Expr, ExprKind};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/needless_for_each.rs b/src/tools/clippy/clippy_lints/src/needless_for_each.rs
index c71996131..84a07df1b 100644
--- a/src/tools/clippy/clippy_lints/src/needless_for_each.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_for_each.rs
@@ -2,11 +2,9 @@ use rustc_errors::Applicability;
use rustc_hir::intravisit::{walk_expr, Visitor};
use rustc_hir::{Closure, Expr, ExprKind, Stmt, StmtKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{sym, Span, Symbol};
-use if_chain::if_chain;
-
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::is_trait_method;
use clippy_utils::source::snippet_with_applicability;
@@ -51,65 +49,63 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessForEach {
return;
};
- if_chain! {
+ if let ExprKind::MethodCall(method_name, for_each_recv, [for_each_arg], _) = expr.kind
// Check the method name is `for_each`.
- if let ExprKind::MethodCall(method_name, for_each_recv, [for_each_arg], _) = expr.kind;
- if method_name.ident.name == Symbol::intern("for_each");
+ && method_name.ident.name == Symbol::intern("for_each")
// Check `for_each` is an associated function of `Iterator`.
- if is_trait_method(cx, expr, sym::Iterator);
+ && is_trait_method(cx, expr, sym::Iterator)
// Checks the receiver of `for_each` is also a method call.
- if let ExprKind::MethodCall(_, iter_recv, [], _) = for_each_recv.kind;
+ && let ExprKind::MethodCall(_, iter_recv, [], _) = for_each_recv.kind
// Skip the lint if the call chain is too long. e.g. `v.field.iter().for_each()` or
// `v.foo().iter().for_each()` must be skipped.
- if matches!(
+ && matches!(
iter_recv.kind,
ExprKind::Array(..) | ExprKind::Call(..) | ExprKind::Path(..)
- );
+ )
// Checks the type of the `iter` method receiver is NOT a user defined type.
- if has_iter_method(cx, cx.typeck_results().expr_ty(iter_recv)).is_some();
+ && has_iter_method(cx, cx.typeck_results().expr_ty(iter_recv)).is_some()
// Skip the lint if the body is not block because this is simpler than `for` loop.
// e.g. `v.iter().for_each(f)` is simpler and clearer than using `for` loop.
- if let ExprKind::Closure(&Closure { body, .. }) = for_each_arg.kind;
- let body = cx.tcx.hir().body(body);
- if let ExprKind::Block(..) = body.value.kind;
- then {
- let mut ret_collector = RetCollector::default();
- ret_collector.visit_expr(body.value);
-
- // Skip the lint if `return` is used in `Loop` in order not to suggest using `'label`.
- if ret_collector.ret_in_loop {
- return;
- }
-
- let (mut applicability, ret_suggs) = if ret_collector.spans.is_empty() {
- (Applicability::MachineApplicable, None)
- } else {
- (
- Applicability::MaybeIncorrect,
- Some(
- ret_collector
- .spans
- .into_iter()
- .map(|span| (span, "continue".to_string()))
- .collect(),
- ),
- )
- };
+ && let ExprKind::Closure(&Closure { body, .. }) = for_each_arg.kind
+ && let body = cx.tcx.hir().body(body)
+ && let ExprKind::Block(..) = body.value.kind
+ {
+ let mut ret_collector = RetCollector::default();
+ ret_collector.visit_expr(body.value);
+
+ // Skip the lint if `return` is used in `Loop` in order not to suggest using `'label`.
+ if ret_collector.ret_in_loop {
+ return;
+ }
- let sugg = format!(
- "for {} in {} {}",
- snippet_with_applicability(cx, body.params[0].pat.span, "..", &mut applicability),
- snippet_with_applicability(cx, for_each_recv.span, "..", &mut applicability),
- snippet_with_applicability(cx, body.value.span, "..", &mut applicability),
- );
+ let (mut applicability, ret_suggs) = if ret_collector.spans.is_empty() {
+ (Applicability::MachineApplicable, None)
+ } else {
+ (
+ Applicability::MaybeIncorrect,
+ Some(
+ ret_collector
+ .spans
+ .into_iter()
+ .map(|span| (span, "continue".to_string()))
+ .collect(),
+ ),
+ )
+ };
+
+ let sugg = format!(
+ "for {} in {} {}",
+ snippet_with_applicability(cx, body.params[0].pat.span, "..", &mut applicability),
+ snippet_with_applicability(cx, for_each_recv.span, "..", &mut applicability),
+ snippet_with_applicability(cx, body.value.span, "..", &mut applicability),
+ );
- span_lint_and_then(cx, NEEDLESS_FOR_EACH, stmt.span, "needless use of `for_each`", |diag| {
- diag.span_suggestion(stmt.span, "try", sugg, applicability);
- if let Some(ret_suggs) = ret_suggs {
- diag.multipart_suggestion("...and replace `return` with `continue`", ret_suggs, applicability);
- }
- })
- }
+ span_lint_and_then(cx, NEEDLESS_FOR_EACH, stmt.span, "needless use of `for_each`", |diag| {
+ diag.span_suggestion(stmt.span, "try", sugg, applicability);
+ if let Some(ret_suggs) = ret_suggs {
+ diag.multipart_suggestion("...and replace `return` with `continue`", ret_suggs, applicability);
+ }
+ });
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/needless_if.rs b/src/tools/clippy/clippy_lints/src/needless_if.rs
index 23aabc548..51bee4b51 100644
--- a/src/tools/clippy/clippy_lints/src/needless_if.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_if.rs
@@ -6,7 +6,7 @@ use rustc_errors::Applicability;
use rustc_hir::{ExprKind, Stmt, StmtKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/needless_late_init.rs b/src/tools/clippy/clippy_lints/src/needless_late_init.rs
index c8888c744..3e63c0a1d 100644
--- a/src/tools/clippy/clippy_lints/src/needless_late_init.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_late_init.rs
@@ -10,7 +10,7 @@ use rustc_hir::{
StmtKind,
};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::Span;
declare_clippy_lint! {
@@ -128,21 +128,18 @@ impl LocalAssign {
let assign = match expr.kind {
ExprKind::Block(Block { expr: Some(expr), .. }, _) => Self::from_expr(expr, expr.span),
ExprKind::Block(block, _) => {
- if_chain! {
- if let Some((last, other_stmts)) = block.stmts.split_last();
- if let StmtKind::Expr(expr) | StmtKind::Semi(expr) = last.kind;
+ if let Some((last, other_stmts)) = block.stmts.split_last()
+ && let StmtKind::Expr(expr) | StmtKind::Semi(expr) = last.kind
- let assign = Self::from_expr(expr, last.span)?;
+ && let assign = Self::from_expr(expr, last.span)?
// avoid visiting if not needed
- if assign.lhs_id == binding_id;
- if other_stmts.iter().all(|stmt| !contains_assign_expr(cx, stmt));
-
- then {
- Some(assign)
- } else {
- None
- }
+ && assign.lhs_id == binding_id
+ && other_stmts.iter().all(|stmt| !contains_assign_expr(cx, stmt))
+ {
+ Some(assign)
+ } else {
+ None
}
},
ExprKind::Assign(..) => Self::from_expr(expr, expr.span),
@@ -368,22 +365,20 @@ fn check<'tcx>(
impl<'tcx> LateLintPass<'tcx> for NeedlessLateInit {
fn check_local(&mut self, cx: &LateContext<'tcx>, local: &'tcx Local<'tcx>) {
let mut parents = cx.tcx.hir().parent_iter(local.hir_id);
- if_chain! {
- if let Local {
- init: None,
- pat: &Pat {
+ if let Local {
+ init: None,
+ pat:
+ &Pat {
kind: PatKind::Binding(BindingAnnotation::NONE, binding_id, _, None),
..
},
- source: LocalSource::Normal,
- ..
- } = local;
- if let Some((_, Node::Stmt(local_stmt))) = parents.next();
- if let Some((_, Node::Block(block))) = parents.next();
-
- then {
- check(cx, local, local_stmt, block, binding_id);
- }
+ source: LocalSource::Normal,
+ ..
+ } = local
+ && let Some((_, Node::Stmt(local_stmt))) = parents.next()
+ && let Some((_, Node::Block(block))) = parents.next()
+ {
+ check(cx, local, local_stmt, block, binding_id);
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/needless_parens_on_range_literals.rs b/src/tools/clippy/clippy_lints/src/needless_parens_on_range_literals.rs
index 7bbf1fb4c..8a6210637 100644
--- a/src/tools/clippy/clippy_lints/src/needless_parens_on_range_literals.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_parens_on_range_literals.rs
@@ -7,7 +7,7 @@ use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -53,21 +53,23 @@ fn check_for_parens(cx: &LateContext<'_>, e: &Expr<'_>, is_start: bool) {
// don't check floating point literals on the start expression of a range
return;
}
- if_chain! {
- if let ExprKind::Lit(literal) = e.kind;
+ if let ExprKind::Lit(literal) = e.kind
// the indicator that parenthesis surround the literal is that the span of the expression and the literal differ
- if (literal.span.data().hi - literal.span.data().lo) != (e.span.data().hi - e.span.data().lo);
+ && (literal.span.data().hi - literal.span.data().lo) != (e.span.data().hi - e.span.data().lo)
// inspect the source code of the expression for parenthesis
- if snippet_enclosed_in_parenthesis(&snippet(cx, e.span, ""));
- then {
- let mut applicability = Applicability::MachineApplicable;
- span_lint_and_then(cx, NEEDLESS_PARENS_ON_RANGE_LITERALS, e.span,
- "needless parenthesis on range literals can be removed",
- |diag| {
- let suggestion = snippet_with_applicability(cx, literal.span, "_", &mut applicability);
- diag.span_suggestion(e.span, "try", suggestion, applicability);
- });
- }
+ && snippet_enclosed_in_parenthesis(&snippet(cx, e.span, ""))
+ {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_then(
+ cx,
+ NEEDLESS_PARENS_ON_RANGE_LITERALS,
+ e.span,
+ "needless parenthesis on range literals can be removed",
+ |diag| {
+ let suggestion = snippet_with_applicability(cx, literal.span, "_", &mut applicability);
+ diag.span_suggestion(e.span, "try", suggestion, applicability);
+ },
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/needless_pass_by_ref_mut.rs b/src/tools/clippy/clippy_lints/src/needless_pass_by_ref_mut.rs
index d610ba520..64ef709e2 100644
--- a/src/tools/clippy/clippy_lints/src/needless_pass_by_ref_mut.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_pass_by_ref_mut.rs
@@ -17,7 +17,7 @@ use rustc_middle::hir::map::associated_body;
use rustc_middle::hir::nested_filter::OnlyBodies;
use rustc_middle::mir::FakeReadCause;
use rustc_middle::ty::{self, Ty, TyCtxt, UpvarId, UpvarPath};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::def_id::LocalDefId;
use rustc_span::symbol::kw;
use rustc_span::Span;
@@ -113,8 +113,9 @@ fn check_closures<'tcx>(
}
ctx.prev_bind = None;
ctx.prev_move_to_closure.clear();
- if let Some(body) = hir
- .find_by_def_id(closure)
+ if let Some(body) = cx
+ .tcx
+ .opt_hir_node_by_def_id(closure)
.and_then(associated_body)
.map(|(_, body_id)| hir.body(body_id))
{
@@ -137,7 +138,7 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByRefMut<'tcx> {
return;
}
- let hir_id = cx.tcx.hir().local_def_id_to_hir_id(fn_def_id);
+ let hir_id = cx.tcx.local_def_id_to_hir_id(fn_def_id);
let is_async = match kind {
FnKind::ItemFn(.., header) => {
if header.is_unsafe() {
@@ -256,7 +257,7 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByRefMut<'tcx> {
span_lint_hir_and_then(
cx,
NEEDLESS_PASS_BY_REF_MUT,
- cx.tcx.hir().local_def_id_to_hir_id(*fn_def_id),
+ cx.tcx.local_def_id_to_hir_id(*fn_def_id),
sp,
"this argument is a mutable reference, but not used mutably",
|diag| {
@@ -412,7 +413,7 @@ impl<'tcx> euv::Delegate<'tcx> for MutablyUsedVariablesCtxt<'tcx> {
],
),
..
- }) = self.tcx.hir().get(cmt.hir_id)
+ }) = self.tcx.hir_node(cmt.hir_id)
{
self.async_closures.insert(*def_id);
}
@@ -521,7 +522,7 @@ impl<'tcx> Visitor<'tcx> for FnNeedsMutVisitor<'_, 'tcx> {
let Self { cx, used_fn_def_ids } = self;
// #11182; do not lint if mutability is required elsewhere
- if let Node::Expr(expr) = cx.tcx.hir().get(hir_id)
+ if let Node::Expr(expr) = cx.tcx.hir_node(hir_id)
&& let Some(parent) = get_parent_node(cx.tcx, expr.hir_id)
&& let ty::FnDef(def_id, _) = cx
.tcx
diff --git a/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs b/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs
index 8fa461ac1..2c5c3dcaa 100644
--- a/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs
@@ -5,7 +5,6 @@ use clippy_utils::source::{snippet, snippet_opt};
use clippy_utils::ty::{
implements_trait, implements_trait_with_env_from_iter, is_copy, is_type_diagnostic_item, is_type_lang_item,
};
-use if_chain::if_chain;
use rustc_ast::ast::Attribute;
use rustc_errors::{Applicability, Diagnostic};
use rustc_hir::intravisit::FnKind;
@@ -18,7 +17,7 @@ use rustc_infer::infer::TyCtxtInferExt;
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::mir::FakeReadCause;
use rustc_middle::ty::{self, Ty, TypeVisitableExt};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::def_id::LocalDefId;
use rustc_span::symbol::kw;
use rustc_span::{sym, Span};
@@ -87,7 +86,7 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue {
return;
}
- let hir_id = cx.tcx.hir().local_def_id_to_hir_id(fn_def_id);
+ let hir_id = cx.tcx.local_def_id_to_hir_id(fn_def_id);
match kind {
FnKind::ItemFn(.., header) => {
@@ -177,118 +176,120 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue {
)
};
- if_chain! {
- if !is_self(arg);
- if !ty.is_mutable_ptr();
- if !is_copy(cx, ty);
- if ty.is_sized(cx.tcx, cx.param_env);
- if !allowed_traits.iter().any(|&t| implements_trait_with_env_from_iter(
- cx.tcx,
- cx.param_env,
- ty,
- t,
- [Option::<ty::GenericArg<'tcx>>::None],
- ));
- if !implements_borrow_trait;
- if !all_borrowable_trait;
-
- if let PatKind::Binding(BindingAnnotation(_, Mutability::Not), canonical_id, ..) = arg.pat.kind;
- if !moved_vars.contains(&canonical_id);
- then {
- // Dereference suggestion
- let sugg = |diag: &mut Diagnostic| {
- if let ty::Adt(def, ..) = ty.kind() {
- if let Some(span) = cx.tcx.hir().span_if_local(def.did()) {
- if type_allowed_to_implement_copy(
- cx.tcx,
- cx.param_env,
- ty,
- traits::ObligationCause::dummy_with_span(span),
- ).is_ok() {
- diag.span_help(span, "consider marking this type as `Copy`");
- }
+ if !is_self(arg)
+ && !ty.is_mutable_ptr()
+ && !is_copy(cx, ty)
+ && ty.is_sized(cx.tcx, cx.param_env)
+ && !allowed_traits.iter().any(|&t| {
+ implements_trait_with_env_from_iter(
+ cx.tcx,
+ cx.param_env,
+ ty,
+ t,
+ None,
+ [Option::<ty::GenericArg<'tcx>>::None],
+ )
+ })
+ && !implements_borrow_trait
+ && !all_borrowable_trait
+ && let PatKind::Binding(BindingAnnotation(_, Mutability::Not), canonical_id, ..) = arg.pat.kind
+ && !moved_vars.contains(&canonical_id)
+ {
+ // Dereference suggestion
+ let sugg = |diag: &mut Diagnostic| {
+ if let ty::Adt(def, ..) = ty.kind() {
+ if let Some(span) = cx.tcx.hir().span_if_local(def.did()) {
+ if type_allowed_to_implement_copy(
+ cx.tcx,
+ cx.param_env,
+ ty,
+ traits::ObligationCause::dummy_with_span(span),
+ )
+ .is_ok()
+ {
+ diag.span_help(span, "consider marking this type as `Copy`");
}
}
+ }
- if_chain! {
- if is_type_diagnostic_item(cx, ty, sym::Vec);
- if let Some(clone_spans) =
- get_spans(cx, Some(body.id()), idx, &[("clone", ".to_owned()")]);
- if let TyKind::Path(QPath::Resolved(_, path)) = input.kind;
- if let Some(elem_ty) = path.segments.iter()
- .find(|seg| seg.ident.name == sym::Vec)
- .and_then(|ps| ps.args.as_ref())
- .map(|params| params.args.iter().find_map(|arg| match arg {
- GenericArg::Type(ty) => Some(ty),
- _ => None,
- }).unwrap());
- then {
- let slice_ty = format!("&[{}]", snippet(cx, elem_ty.span, "_"));
- diag.span_suggestion(
- input.span,
- "consider changing the type to",
- slice_ty,
- Applicability::Unspecified,
- );
-
- for (span, suggestion) in clone_spans {
- diag.span_suggestion(
- span,
- snippet_opt(cx, span)
- .map_or(
- "change the call to".into(),
- |x| format!("change `{x}` to"),
- ),
- suggestion,
- Applicability::Unspecified,
- );
- }
-
- // cannot be destructured, no need for `*` suggestion
- return;
- }
+ if is_type_diagnostic_item(cx, ty, sym::Vec)
+ && let Some(clone_spans) = get_spans(cx, Some(body.id()), idx, &[("clone", ".to_owned()")])
+ && let TyKind::Path(QPath::Resolved(_, path)) = input.kind
+ && let Some(elem_ty) = path
+ .segments
+ .iter()
+ .find(|seg| seg.ident.name == sym::Vec)
+ .and_then(|ps| ps.args.as_ref())
+ .map(|params| {
+ params
+ .args
+ .iter()
+ .find_map(|arg| match arg {
+ GenericArg::Type(ty) => Some(ty),
+ _ => None,
+ })
+ .unwrap()
+ })
+ {
+ let slice_ty = format!("&[{}]", snippet(cx, elem_ty.span, "_"));
+ diag.span_suggestion(
+ input.span,
+ "consider changing the type to",
+ slice_ty,
+ Applicability::Unspecified,
+ );
+
+ for (span, suggestion) in clone_spans {
+ diag.span_suggestion(
+ span,
+ snippet_opt(cx, span)
+ .map_or("change the call to".into(), |x| format!("change `{x}` to")),
+ suggestion,
+ Applicability::Unspecified,
+ );
}
- if is_type_lang_item(cx, ty, LangItem::String) {
- if let Some(clone_spans) =
- get_spans(cx, Some(body.id()), idx, &[("clone", ".to_string()"), ("as_str", "")]) {
+ // cannot be destructured, no need for `*` suggestion
+ return;
+ }
+
+ if is_type_lang_item(cx, ty, LangItem::String) {
+ if let Some(clone_spans) =
+ get_spans(cx, Some(body.id()), idx, &[("clone", ".to_string()"), ("as_str", "")])
+ {
+ diag.span_suggestion(
+ input.span,
+ "consider changing the type to",
+ "&str",
+ Applicability::Unspecified,
+ );
+
+ for (span, suggestion) in clone_spans {
diag.span_suggestion(
- input.span,
- "consider changing the type to",
- "&str",
+ span,
+ snippet_opt(cx, span)
+ .map_or("change the call to".into(), |x| format!("change `{x}` to")),
+ suggestion,
Applicability::Unspecified,
);
-
- for (span, suggestion) in clone_spans {
- diag.span_suggestion(
- span,
- snippet_opt(cx, span)
- .map_or(
- "change the call to".into(),
- |x| format!("change `{x}` to")
- ),
- suggestion,
- Applicability::Unspecified,
- );
- }
-
- return;
}
+
+ return;
}
+ }
- let spans = vec![(input.span, format!("&{}", snippet(cx, input.span, "_")))];
+ let spans = vec![(input.span, format!("&{}", snippet(cx, input.span, "_")))];
- multispan_sugg(diag, "consider taking a reference instead", spans);
- };
+ multispan_sugg(diag, "consider taking a reference instead", spans);
+ };
- span_lint_and_then(
- cx,
- NEEDLESS_PASS_BY_VALUE,
- input.span,
- "this argument is passed by value, but not consumed in the function body",
- sugg,
- );
- }
+ span_lint_and_then(
+ cx,
+ NEEDLESS_PASS_BY_VALUE,
+ input.span,
+ "this argument is passed by value, but not consumed in the function body",
+ sugg,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/needless_question_mark.rs b/src/tools/clippy/clippy_lints/src/needless_question_mark.rs
index 074c9fef1..a4d3aaf0d 100644
--- a/src/tools/clippy/clippy_lints/src/needless_question_mark.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_question_mark.rs
@@ -1,12 +1,11 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::path_res;
use clippy_utils::source::snippet;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::{Block, Body, CoroutineKind, CoroutineSource, Expr, ExprKind, LangItem, MatchSource, QPath};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -111,34 +110,32 @@ impl LateLintPass<'_> for NeedlessQuestionMark {
}
fn check(cx: &LateContext<'_>, expr: &Expr<'_>) {
- if_chain! {
- if let ExprKind::Call(path, [arg]) = expr.kind;
- if let Res::Def(DefKind::Ctor(..), ctor_id) = path_res(cx, path);
- if let Some(variant_id) = cx.tcx.opt_parent(ctor_id);
- let sugg_remove = if cx.tcx.lang_items().option_some_variant() == Some(variant_id) {
+ if let ExprKind::Call(path, [arg]) = expr.kind
+ && let Res::Def(DefKind::Ctor(..), ctor_id) = path_res(cx, path)
+ && let Some(variant_id) = cx.tcx.opt_parent(ctor_id)
+ && let sugg_remove = if cx.tcx.lang_items().option_some_variant() == Some(variant_id) {
"Some()"
} else if cx.tcx.lang_items().result_ok_variant() == Some(variant_id) {
"Ok()"
} else {
return;
- };
- if let ExprKind::Match(inner_expr_with_q, _, MatchSource::TryDesugar(_)) = &arg.kind;
- if let ExprKind::Call(called, [inner_expr]) = &inner_expr_with_q.kind;
- if let ExprKind::Path(QPath::LangItem(LangItem::TryTraitBranch, ..)) = &called.kind;
- if expr.span.eq_ctxt(inner_expr.span);
- let expr_ty = cx.typeck_results().expr_ty(expr);
- let inner_ty = cx.typeck_results().expr_ty(inner_expr);
- if expr_ty == inner_ty;
- then {
- span_lint_and_sugg(
- cx,
- NEEDLESS_QUESTION_MARK,
- expr.span,
- "question mark operator is useless here",
- &format!("try removing question mark and `{sugg_remove}`"),
- format!("{}", snippet(cx, inner_expr.span, r#""...""#)),
- Applicability::MachineApplicable,
- );
}
+ && let ExprKind::Match(inner_expr_with_q, _, MatchSource::TryDesugar(_)) = &arg.kind
+ && let ExprKind::Call(called, [inner_expr]) = &inner_expr_with_q.kind
+ && let ExprKind::Path(QPath::LangItem(LangItem::TryTraitBranch, ..)) = &called.kind
+ && expr.span.eq_ctxt(inner_expr.span)
+ && let expr_ty = cx.typeck_results().expr_ty(expr)
+ && let inner_ty = cx.typeck_results().expr_ty(inner_expr)
+ && expr_ty == inner_ty
+ {
+ span_lint_and_sugg(
+ cx,
+ NEEDLESS_QUESTION_MARK,
+ expr.span,
+ "question mark operator is useless here",
+ &format!("try removing question mark and `{sugg_remove}`"),
+ format!("{}", snippet(cx, inner_expr.span, r#""...""#)),
+ Applicability::MachineApplicable,
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/needless_update.rs b/src/tools/clippy/clippy_lints/src/needless_update.rs
index f8888d368..6a2893cef 100644
--- a/src/tools/clippy/clippy_lints/src/needless_update.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_update.rs
@@ -2,7 +2,7 @@ use clippy_utils::diagnostics::span_lint;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/neg_cmp_op_on_partial_ord.rs b/src/tools/clippy/clippy_lints/src/neg_cmp_op_on_partial_ord.rs
index 56c67406d..f7621822b 100644
--- a/src/tools/clippy/clippy_lints/src/neg_cmp_op_on_partial_ord.rs
+++ b/src/tools/clippy/clippy_lints/src/neg_cmp_op_on_partial_ord.rs
@@ -1,10 +1,9 @@
use clippy_utils::diagnostics::span_lint;
use clippy_utils::ty::implements_trait;
-use if_chain::if_chain;
use rustc_hir::{BinOpKind, Expr, ExprKind, UnOp};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
@@ -46,42 +45,39 @@ declare_lint_pass!(NoNegCompOpForPartialOrd => [NEG_CMP_OP_ON_PARTIAL_ORD]);
impl<'tcx> LateLintPass<'tcx> for NoNegCompOpForPartialOrd {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
- if_chain! {
- if !in_external_macro(cx.sess(), expr.span);
- if let ExprKind::Unary(UnOp::Not, inner) = expr.kind;
- if let ExprKind::Binary(ref op, left, _) = inner.kind;
- if let BinOpKind::Le | BinOpKind::Ge | BinOpKind::Lt | BinOpKind::Gt = op.node;
+ if !in_external_macro(cx.sess(), expr.span)
+ && let ExprKind::Unary(UnOp::Not, inner) = expr.kind
+ && let ExprKind::Binary(ref op, left, _) = inner.kind
+ && let BinOpKind::Le | BinOpKind::Ge | BinOpKind::Lt | BinOpKind::Gt = op.node
+ {
+ let ty = cx.typeck_results().expr_ty(left);
- then {
- let ty = cx.typeck_results().expr_ty(left);
-
- let implements_ord = {
- if let Some(id) = cx.tcx.get_diagnostic_item(sym::Ord) {
- implements_trait(cx, ty, id, &[])
- } else {
- return;
- }
- };
-
- let implements_partial_ord = {
- if let Some(id) = cx.tcx.lang_items().partial_ord_trait() {
- implements_trait(cx, ty, id, &[ty.into()])
- } else {
- return;
- }
- };
+ let implements_ord = {
+ if let Some(id) = cx.tcx.get_diagnostic_item(sym::Ord) {
+ implements_trait(cx, ty, id, &[])
+ } else {
+ return;
+ }
+ };
- if implements_partial_ord && !implements_ord {
- span_lint(
- cx,
- NEG_CMP_OP_ON_PARTIAL_ORD,
- expr.span,
- "the use of negated comparison operators on partially ordered \
- types produces code that is hard to read and refactor, please \
- consider using the `partial_cmp` method instead, to make it \
- clear that the two values could be incomparable",
- );
+ let implements_partial_ord = {
+ if let Some(id) = cx.tcx.lang_items().partial_ord_trait() {
+ implements_trait(cx, ty, id, &[ty.into()])
+ } else {
+ return;
}
+ };
+
+ if implements_partial_ord && !implements_ord {
+ span_lint(
+ cx,
+ NEG_CMP_OP_ON_PARTIAL_ORD,
+ expr.span,
+ "the use of negated comparison operators on partially ordered \
+ types produces code that is hard to read and refactor, please \
+ consider using the `partial_cmp` method instead, to make it \
+ clear that the two values could be incomparable",
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/neg_multiply.rs b/src/tools/clippy/clippy_lints/src/neg_multiply.rs
index 8b69f94cb..f84d9fadb 100644
--- a/src/tools/clippy/clippy_lints/src/neg_multiply.rs
+++ b/src/tools/clippy/clippy_lints/src/neg_multiply.rs
@@ -2,12 +2,11 @@ use clippy_utils::consts::{self, Constant};
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_context;
use clippy_utils::sugg::has_enclosing_paren;
-use if_chain::if_chain;
use rustc_ast::util::parser::PREC_PREFIX;
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind, UnOp};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::Span;
declare_clippy_lint! {
@@ -53,28 +52,25 @@ impl<'tcx> LateLintPass<'tcx> for NegMultiply {
}
fn check_mul(cx: &LateContext<'_>, span: Span, lit: &Expr<'_>, exp: &Expr<'_>) {
- if_chain! {
- if let ExprKind::Lit(l) = lit.kind;
- if consts::lit_to_mir_constant(&l.node, cx.typeck_results().expr_ty_opt(lit)) == Constant::Int(1);
- if cx.typeck_results().expr_ty(exp).is_integral();
-
- then {
- let mut applicability = Applicability::MachineApplicable;
- let (snip, from_macro) = snippet_with_context(cx, exp.span, span.ctxt(), "..", &mut applicability);
- let suggestion = if !from_macro && exp.precedence().order() < PREC_PREFIX && !has_enclosing_paren(&snip) {
- format!("-({snip})")
- } else {
- format!("-{snip}")
- };
- span_lint_and_sugg(
- cx,
- NEG_MULTIPLY,
- span,
- "this multiplication by -1 can be written more succinctly",
- "consider using",
- suggestion,
- applicability,
- );
- }
+ if let ExprKind::Lit(l) = lit.kind
+ && consts::lit_to_mir_constant(&l.node, cx.typeck_results().expr_ty_opt(lit)) == Constant::Int(1)
+ && cx.typeck_results().expr_ty(exp).is_integral()
+ {
+ let mut applicability = Applicability::MachineApplicable;
+ let (snip, from_macro) = snippet_with_context(cx, exp.span, span.ctxt(), "..", &mut applicability);
+ let suggestion = if !from_macro && exp.precedence().order() < PREC_PREFIX && !has_enclosing_paren(&snip) {
+ format!("-({snip})")
+ } else {
+ format!("-{snip}")
+ };
+ span_lint_and_sugg(
+ cx,
+ NEG_MULTIPLY,
+ span,
+ "this multiplication by -1 can be written more succinctly",
+ "consider using",
+ suggestion,
+ applicability,
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/new_without_default.rs b/src/tools/clippy/clippy_lints/src/new_without_default.rs
index f7f9dccfb..9de6ad421 100644
--- a/src/tools/clippy/clippy_lints/src/new_without_default.rs
+++ b/src/tools/clippy/clippy_lints/src/new_without_default.rs
@@ -2,13 +2,12 @@ use clippy_utils::diagnostics::span_lint_hir_and_then;
use clippy_utils::return_ty;
use clippy_utils::source::snippet;
use clippy_utils::sugg::DiagnosticExt;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_hir::HirIdSet;
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
@@ -93,73 +92,69 @@ impl<'tcx> LateLintPass<'tcx> for NewWithoutDefault {
// an impl of `Default`
return;
}
- if_chain! {
- if sig.decl.inputs.is_empty();
- if name == sym::new;
- if cx.effective_visibilities.is_reachable(impl_item.owner_id.def_id);
- let self_def_id = cx.tcx.hir().get_parent_item(id.into());
- let self_ty = cx.tcx.type_of(self_def_id).instantiate_identity();
- if self_ty == return_ty(cx, id);
- if let Some(default_trait_id) = cx.tcx.get_diagnostic_item(sym::Default);
- then {
- if self.impling_types.is_none() {
- let mut impls = HirIdSet::default();
- cx.tcx.for_each_impl(default_trait_id, |d| {
- let ty = cx.tcx.type_of(d).instantiate_identity();
- if let Some(ty_def) = ty.ty_adt_def() {
- if let Some(local_def_id) = ty_def.did().as_local() {
- impls.insert(cx.tcx.hir().local_def_id_to_hir_id(local_def_id));
- }
+ if sig.decl.inputs.is_empty()
+ && name == sym::new
+ && cx.effective_visibilities.is_reachable(impl_item.owner_id.def_id)
+ && let self_def_id = cx.tcx.hir().get_parent_item(id.into())
+ && let self_ty = cx.tcx.type_of(self_def_id).instantiate_identity()
+ && self_ty == return_ty(cx, id)
+ && let Some(default_trait_id) = cx.tcx.get_diagnostic_item(sym::Default)
+ {
+ if self.impling_types.is_none() {
+ let mut impls = HirIdSet::default();
+ cx.tcx.for_each_impl(default_trait_id, |d| {
+ let ty = cx.tcx.type_of(d).instantiate_identity();
+ if let Some(ty_def) = ty.ty_adt_def() {
+ if let Some(local_def_id) = ty_def.did().as_local() {
+ impls.insert(cx.tcx.local_def_id_to_hir_id(local_def_id));
}
- });
- self.impling_types = Some(impls);
- }
-
- // Check if a Default implementation exists for the Self type, regardless of
- // generics
- if_chain! {
- if let Some(ref impling_types) = self.impling_types;
- let self_def = cx.tcx.type_of(self_def_id).instantiate_identity();
- if let Some(self_def) = self_def.ty_adt_def();
- if let Some(self_local_did) = self_def.did().as_local();
- let self_id = cx.tcx.hir().local_def_id_to_hir_id(self_local_did);
- if impling_types.contains(&self_id);
- then {
- return;
}
- }
+ });
+ self.impling_types = Some(impls);
+ }
- let generics_sugg = snippet(cx, generics.span, "");
- let where_clause_sugg = if generics.has_where_clause_predicates {
- format!("\n{}\n", snippet(cx, generics.where_clause_span, ""))
- } else {
- String::new()
- };
- let self_ty_fmt = self_ty.to_string();
- let self_type_snip = snippet(cx, impl_self_ty.span, &self_ty_fmt);
- span_lint_hir_and_then(
- cx,
- NEW_WITHOUT_DEFAULT,
- id.into(),
- impl_item.span,
- &format!(
- "you should consider adding a `Default` implementation for `{self_type_snip}`"
- ),
- |diag| {
- diag.suggest_prepend_item(
- cx,
- item.span,
- "try adding this",
- &create_new_without_default_suggest_msg(
- &self_type_snip,
- &generics_sugg,
- &where_clause_sugg
- ),
- Applicability::MachineApplicable,
- );
- },
- );
+ // Check if a Default implementation exists for the Self type, regardless of
+ // generics
+ if let Some(ref impling_types) = self.impling_types
+ && let self_def = cx.tcx.type_of(self_def_id).instantiate_identity()
+ && let Some(self_def) = self_def.ty_adt_def()
+ && let Some(self_local_did) = self_def.did().as_local()
+ && let self_id = cx.tcx.local_def_id_to_hir_id(self_local_did)
+ && impling_types.contains(&self_id)
+ {
+ return;
}
+
+ let generics_sugg = snippet(cx, generics.span, "");
+ let where_clause_sugg = if generics.has_where_clause_predicates {
+ format!("\n{}\n", snippet(cx, generics.where_clause_span, ""))
+ } else {
+ String::new()
+ };
+ let self_ty_fmt = self_ty.to_string();
+ let self_type_snip = snippet(cx, impl_self_ty.span, &self_ty_fmt);
+ span_lint_hir_and_then(
+ cx,
+ NEW_WITHOUT_DEFAULT,
+ id.into(),
+ impl_item.span,
+ &format!(
+ "you should consider adding a `Default` implementation for `{self_type_snip}`"
+ ),
+ |diag| {
+ diag.suggest_prepend_item(
+ cx,
+ item.span,
+ "try adding this",
+ &create_new_without_default_suggest_msg(
+ &self_type_snip,
+ &generics_sugg,
+ &where_clause_sugg,
+ ),
+ Applicability::MachineApplicable,
+ );
+ },
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/no_effect.rs b/src/tools/clippy/clippy_lints/src/no_effect.rs
index 3a28e511f..5978da831 100644
--- a/src/tools/clippy/clippy_lints/src/no_effect.rs
+++ b/src/tools/clippy/clippy_lints/src/no_effect.rs
@@ -10,7 +10,7 @@ use rustc_hir::{
use rustc_infer::infer::TyCtxtInferExt as _;
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use std::ops::Deref;
declare_clippy_lint! {
@@ -87,6 +87,17 @@ impl<'tcx> LateLintPass<'tcx> for NoEffect {
fn check_no_effect(cx: &LateContext<'_>, stmt: &Stmt<'_>) -> bool {
if let StmtKind::Semi(expr) = stmt.kind {
+ // move `expr.span.from_expansion()` ahead
+ if expr.span.from_expansion() {
+ return false;
+ }
+ let expr = peel_blocks(expr);
+
+ if is_operator_overridden(cx, expr) {
+ // Return `true`, to prevent `check_unnecessary_operation` from
+ // linting on this statement as well.
+ return true;
+ }
if has_no_effect(cx, expr) {
span_lint_hir_and_then(
cx,
@@ -132,34 +143,47 @@ fn check_no_effect(cx: &LateContext<'_>, stmt: &Stmt<'_>) -> bool {
return true;
}
} else if let StmtKind::Local(local) = stmt.kind {
- if_chain! {
- if !is_lint_allowed(cx, NO_EFFECT_UNDERSCORE_BINDING, local.hir_id);
- if let Some(init) = local.init;
- if local.els.is_none();
- if !local.pat.span.from_expansion();
- if has_no_effect(cx, init);
- if let PatKind::Binding(_, _, ident, _) = local.pat.kind;
- if ident.name.to_ident_string().starts_with('_');
- then {
- span_lint_hir(
- cx,
- NO_EFFECT_UNDERSCORE_BINDING,
- init.hir_id,
- stmt.span,
- "binding to `_` prefixed variable with no side-effect"
- );
- return true;
- }
+ if !is_lint_allowed(cx, NO_EFFECT_UNDERSCORE_BINDING, local.hir_id)
+ && let Some(init) = local.init
+ && local.els.is_none()
+ && !local.pat.span.from_expansion()
+ && has_no_effect(cx, init)
+ && let PatKind::Binding(_, _, ident, _) = local.pat.kind
+ && ident.name.to_ident_string().starts_with('_')
+ {
+ span_lint_hir(
+ cx,
+ NO_EFFECT_UNDERSCORE_BINDING,
+ init.hir_id,
+ stmt.span,
+ "binding to `_` prefixed variable with no side-effect",
+ );
+ return true;
}
}
false
}
-fn has_no_effect(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
- if expr.span.from_expansion() {
- return false;
+fn is_operator_overridden(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ // It's very hard or impossable to check whether overridden operator have side-effect this lint.
+ // So, this function assume user-defined operator is overridden with an side-effect.
+ // The definition of user-defined structure here is ADT-type,
+ // Althrough this will weaken the ability of this lint, less error lint-fix happen.
+ match expr.kind {
+ ExprKind::Binary(..) | ExprKind::Unary(..) => {
+ // No need to check type of `lhs` and `rhs`
+ // because if the operator is overridden, at least one operand is ADT type
+
+ // reference: rust/compiler/rustc_middle/src/ty/typeck_results.rs: `is_method_call`.
+ // use this function to check whether operator is overridden in `ExprKind::{Binary, Unary}`.
+ cx.typeck_results().is_method_call(expr)
+ },
+ _ => false,
}
- match peel_blocks(expr).kind {
+}
+
+fn has_no_effect(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ match expr.kind {
ExprKind::Lit(..) | ExprKind::Closure { .. } => true,
ExprKind::Path(..) => !has_drop(cx, cx.typeck_results().expr_ty(expr)),
ExprKind::Index(a, b, _) | ExprKind::Binary(_, a, b) => has_no_effect(cx, a) && has_no_effect(cx, b),
@@ -199,63 +223,60 @@ fn has_no_effect(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
}
fn check_unnecessary_operation(cx: &LateContext<'_>, stmt: &Stmt<'_>) {
- if_chain! {
- if let StmtKind::Semi(expr) = stmt.kind;
- let ctxt = stmt.span.ctxt();
- if expr.span.ctxt() == ctxt;
- if let Some(reduced) = reduce_expression(cx, expr);
- if !in_external_macro(cx.sess(), stmt.span);
- if reduced.iter().all(|e| e.span.ctxt() == ctxt);
- then {
- if let ExprKind::Index(..) = &expr.kind {
- let snippet = if let (Some(arr), Some(func)) =
- (snippet_opt(cx, reduced[0].span), snippet_opt(cx, reduced[1].span))
- {
+ if let StmtKind::Semi(expr) = stmt.kind
+ && let ctxt = stmt.span.ctxt()
+ && expr.span.ctxt() == ctxt
+ && let Some(reduced) = reduce_expression(cx, expr)
+ && !in_external_macro(cx.sess(), stmt.span)
+ && reduced.iter().all(|e| e.span.ctxt() == ctxt)
+ {
+ if let ExprKind::Index(..) = &expr.kind {
+ let snippet =
+ if let (Some(arr), Some(func)) = (snippet_opt(cx, reduced[0].span), snippet_opt(cx, reduced[1].span)) {
format!("assert!({}.len() > {});", &arr, &func)
} else {
return;
};
- span_lint_hir_and_then(
- cx,
- UNNECESSARY_OPERATION,
- expr.hir_id,
- stmt.span,
- "unnecessary operation",
- |diag| {
- diag.span_suggestion(
- stmt.span,
- "statement can be written as",
- snippet,
- Applicability::MaybeIncorrect,
- );
- },
- );
- } else {
- let mut snippet = String::new();
- for e in reduced {
- if let Some(snip) = snippet_opt(cx, e.span) {
- snippet.push_str(&snip);
- snippet.push(';');
- } else {
- return;
- }
+ span_lint_hir_and_then(
+ cx,
+ UNNECESSARY_OPERATION,
+ expr.hir_id,
+ stmt.span,
+ "unnecessary operation",
+ |diag| {
+ diag.span_suggestion(
+ stmt.span,
+ "statement can be written as",
+ snippet,
+ Applicability::MaybeIncorrect,
+ );
+ },
+ );
+ } else {
+ let mut snippet = String::new();
+ for e in reduced {
+ if let Some(snip) = snippet_opt(cx, e.span) {
+ snippet.push_str(&snip);
+ snippet.push(';');
+ } else {
+ return;
}
- span_lint_hir_and_then(
- cx,
- UNNECESSARY_OPERATION,
- expr.hir_id,
- stmt.span,
- "unnecessary operation",
- |diag| {
- diag.span_suggestion(
- stmt.span,
- "statement can be reduced to",
- snippet,
- Applicability::MachineApplicable,
- );
- },
- );
}
+ span_lint_hir_and_then(
+ cx,
+ UNNECESSARY_OPERATION,
+ expr.hir_id,
+ stmt.span,
+ "unnecessary operation",
+ |diag| {
+ diag.span_suggestion(
+ stmt.span,
+ "statement can be reduced to",
+ snippet,
+ Applicability::MachineApplicable,
+ );
+ },
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/no_mangle_with_rust_abi.rs b/src/tools/clippy/clippy_lints/src/no_mangle_with_rust_abi.rs
index 04d750148..8d5a523fd 100644
--- a/src/tools/clippy/clippy_lints/src/no_mangle_with_rust_abi.rs
+++ b/src/tools/clippy/clippy_lints/src/no_mangle_with_rust_abi.rs
@@ -3,7 +3,7 @@ use clippy_utils::source::snippet_with_applicability;
use rustc_errors::Applicability;
use rustc_hir::{Item, ItemKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{BytePos, Pos};
use rustc_target::spec::abi::Abi;
diff --git a/src/tools/clippy/clippy_lints/src/non_canonical_impls.rs b/src/tools/clippy/clippy_lints/src/non_canonical_impls.rs
index 9689f63a0..63050080a 100644
--- a/src/tools/clippy/clippy_lints/src/non_canonical_impls.rs
+++ b/src/tools/clippy/clippy_lints/src/non_canonical_impls.rs
@@ -6,7 +6,7 @@ use rustc_hir::def_id::LocalDefId;
use rustc_hir::{Expr, ExprKind, ImplItem, ImplItemKind, LangItem, Node, UnOp};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::EarlyBinder;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
use rustc_span::symbol::kw;
diff --git a/src/tools/clippy/clippy_lints/src/non_copy_const.rs b/src/tools/clippy/clippy_lints/src/non_copy_const.rs
index 54cec066b..4013cb345 100644
--- a/src/tools/clippy/clippy_lints/src/non_copy_const.rs
+++ b/src/tools/clippy/clippy_lints/src/non_copy_const.rs
@@ -7,7 +7,6 @@ use std::ptr;
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::macros::macro_backtrace;
use clippy_utils::{def_path_def_ids, in_constant};
-use if_chain::if_chain;
use rustc_data_structures::fx::FxHashSet;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::DefId;
@@ -19,7 +18,7 @@ use rustc_middle::mir::interpret::{ErrorHandled, EvalToValTreeResult, GlobalId};
use rustc_middle::query::Key;
use rustc_middle::ty::adjustment::Adjust;
use rustc_middle::ty::{self, Ty, TyCtxt};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::{sym, InnerSpan, Span};
use rustc_target::abi::VariantIdx;
@@ -386,15 +385,14 @@ impl<'tcx> LateLintPass<'tcx> for NonCopyConst {
of_trait: Some(of_trait_ref),
..
}) => {
- if_chain! {
+ if let Some(of_trait_def_id) = of_trait_ref.trait_def_id()
// Lint a trait impl item only when the definition is a generic type,
// assuming an assoc const is not meant to be an interior mutable type.
- if let Some(of_trait_def_id) = of_trait_ref.trait_def_id();
- if let Some(of_assoc_item) = cx
+ && let Some(of_assoc_item) = cx
.tcx
.associated_item(impl_item.owner_id)
- .trait_item_def_id;
- if cx
+ .trait_item_def_id
+ && cx
.tcx
.layout_of(cx.tcx.param_env(of_trait_def_id).and(
// Normalize assoc types because ones originated from generic params
@@ -405,23 +403,17 @@ impl<'tcx> LateLintPass<'tcx> for NonCopyConst {
cx.tcx.type_of(of_assoc_item).instantiate_identity(),
),
))
- .is_err();
+ .is_err()
// If there were a function like `has_frozen_variant` described above,
// we should use here as a frozen variant is a potential to be frozen
// similar to unknown layouts.
// e.g. `layout_of(...).is_err() || has_frozen_variant(...);`
- let ty = cx.tcx.type_of(impl_item.owner_id).instantiate_identity();
- let normalized = cx.tcx.normalize_erasing_regions(cx.param_env, ty);
- if !self.is_ty_ignored(ty) && Self::is_unfrozen(cx, normalized);
- if self.is_value_unfrozen_poly(cx, *body_id, normalized);
- then {
- lint(
- cx,
- Source::Assoc {
- item: impl_item.span,
- },
- );
- }
+ && let ty = cx.tcx.type_of(impl_item.owner_id).instantiate_identity()
+ && let normalized = cx.tcx.normalize_erasing_regions(cx.param_env, ty)
+ && !self.is_ty_ignored(ty) && Self::is_unfrozen(cx, normalized)
+ && self.is_value_unfrozen_poly(cx, *body_id, normalized)
+ {
+ lint(cx, Source::Assoc { item: impl_item.span });
}
},
ItemKind::Impl(Impl { of_trait: None, .. }) => {
@@ -462,7 +454,7 @@ impl<'tcx> LateLintPass<'tcx> for NonCopyConst {
if parent_id == cur_expr.hir_id {
break;
}
- if let Some(Node::Expr(parent_expr)) = cx.tcx.hir().find(parent_id) {
+ if let Some(Node::Expr(parent_expr)) = cx.tcx.opt_hir_node(parent_id) {
match &parent_expr.kind {
ExprKind::AddrOf(..) => {
// `&e` => `e` must be referenced.
diff --git a/src/tools/clippy/clippy_lints/src/non_expressive_names.rs b/src/tools/clippy/clippy_lints/src/non_expressive_names.rs
index 61622034d..ba9230dab 100644
--- a/src/tools/clippy/clippy_lints/src/non_expressive_names.rs
+++ b/src/tools/clippy/clippy_lints/src/non_expressive_names.rs
@@ -5,9 +5,9 @@ use rustc_ast::ast::{
use rustc_ast::visit::{walk_block, walk_expr, walk_pat, Visitor};
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
-use rustc_span::{sym, Span};
+use rustc_session::impl_lint_pass;
use rustc_span::symbol::{Ident, Symbol};
+use rustc_span::{sym, Span};
use std::cmp::Ordering;
declare_clippy_lint! {
@@ -341,7 +341,9 @@ impl<'a, 'tcx> Visitor<'tcx> for SimilarNamesLocalVisitor<'a, 'tcx> {
self.apply(|this| {
SimilarNamesNameVisitor(this).visit_pat(&arm.pat);
- this.apply(|this| walk_expr(this, &arm.body));
+ if let Some(body) = &arm.body {
+ this.apply(|this| walk_expr(this, body));
+ }
});
self.check_single_char_names();
diff --git a/src/tools/clippy/clippy_lints/src/non_octal_unix_permissions.rs b/src/tools/clippy/clippy_lints/src/non_octal_unix_permissions.rs
index e94e45899..49e9e2c00 100644
--- a/src/tools/clippy/clippy_lints/src/non_octal_unix_permissions.rs
+++ b/src/tools/clippy/clippy_lints/src/non_octal_unix_permissions.rs
@@ -1,11 +1,10 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::{snippet_opt, snippet_with_applicability};
use clippy_utils::{match_def_path, paths};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
@@ -44,38 +43,36 @@ impl<'tcx> LateLintPass<'tcx> for NonOctalUnixPermissions {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
match &expr.kind {
ExprKind::MethodCall(path, func, [param], _) => {
- if_chain! {
- if let Some(adt) = cx.typeck_results().expr_ty(func).peel_refs().ty_adt_def();
- if (path.ident.name == sym!(mode)
- && matches!(cx.tcx.get_diagnostic_name(adt.did()), Some(sym::FsOpenOptions | sym::DirBuilder)))
+ if let Some(adt) = cx.typeck_results().expr_ty(func).peel_refs().ty_adt_def()
+ && ((path.ident.name == sym!(mode)
+ && matches!(
+ cx.tcx.get_diagnostic_name(adt.did()),
+ Some(sym::FsOpenOptions | sym::DirBuilder)
+ ))
|| (path.ident.name == sym!(set_mode)
- && cx.tcx.is_diagnostic_item(sym::FsPermissions, adt.did()));
- if let ExprKind::Lit(_) = param.kind;
- if param.span.eq_ctxt(expr.span);
+ && cx.tcx.is_diagnostic_item(sym::FsPermissions, adt.did())))
+ && let ExprKind::Lit(_) = param.kind
+ && param.span.eq_ctxt(expr.span)
+ {
+ let Some(snip) = snippet_opt(cx, param.span) else {
+ return;
+ };
- then {
- let Some(snip) = snippet_opt(cx, param.span) else {
- return
- };
-
- if !snip.starts_with("0o") {
- show_error(cx, param);
- }
+ if !snip.starts_with("0o") {
+ show_error(cx, param);
}
}
},
ExprKind::Call(func, [param]) => {
- if_chain! {
- if let ExprKind::Path(ref path) = func.kind;
- if let Some(def_id) = cx.qpath_res(path, func.hir_id).opt_def_id();
- if match_def_path(cx, def_id, &paths::PERMISSIONS_FROM_MODE);
- if let ExprKind::Lit(_) = param.kind;
- if param.span.eq_ctxt(expr.span);
- if let Some(snip) = snippet_opt(cx, param.span);
- if !snip.starts_with("0o");
- then {
- show_error(cx, param);
- }
+ if let ExprKind::Path(ref path) = func.kind
+ && let Some(def_id) = cx.qpath_res(path, func.hir_id).opt_def_id()
+ && match_def_path(cx, def_id, &paths::PERMISSIONS_FROM_MODE)
+ && let ExprKind::Lit(_) = param.kind
+ && param.span.eq_ctxt(expr.span)
+ && let Some(snip) = snippet_opt(cx, param.span)
+ && !snip.starts_with("0o")
+ {
+ show_error(cx, param);
}
},
_ => {},
diff --git a/src/tools/clippy/clippy_lints/src/non_send_fields_in_send_ty.rs b/src/tools/clippy/clippy_lints/src/non_send_fields_in_send_ty.rs
index 62ef48c8a..793a3a954 100644
--- a/src/tools/clippy/clippy_lints/src/non_send_fields_in_send_ty.rs
+++ b/src/tools/clippy/clippy_lints/src/non_send_fields_in_send_ty.rs
@@ -8,7 +8,7 @@ use rustc_hir::{FieldDef, Item, ItemKind, Node};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::{self, GenericArgKind, Ty};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
@@ -81,73 +81,73 @@ impl<'tcx> LateLintPass<'tcx> for NonSendFieldInSendTy {
// We start from `Send` impl instead of `check_field_def()` because
// single `AdtDef` may have multiple `Send` impls due to generic
// parameters, and the lint is much easier to implement in this way.
- if_chain! {
- if !in_external_macro(cx.tcx.sess, item.span);
- if let Some(send_trait) = cx.tcx.get_diagnostic_item(sym::Send);
- if let ItemKind::Impl(hir_impl) = &item.kind;
- if let Some(trait_ref) = &hir_impl.of_trait;
- if let Some(trait_id) = trait_ref.trait_def_id();
- if send_trait == trait_id;
- if hir_impl.polarity == ImplPolarity::Positive;
- if let Some(ty_trait_ref) = cx.tcx.impl_trait_ref(item.owner_id);
- if let self_ty = ty_trait_ref.instantiate_identity().self_ty();
- if let ty::Adt(adt_def, impl_trait_args) = self_ty.kind();
- then {
- let mut non_send_fields = Vec::new();
-
- let hir_map = cx.tcx.hir();
- for variant in adt_def.variants() {
- for field in &variant.fields {
- if_chain! {
- if let Some(field_hir_id) = field
- .did
- .as_local()
- .map(|local_def_id| hir_map.local_def_id_to_hir_id(local_def_id));
- if !is_lint_allowed(cx, NON_SEND_FIELDS_IN_SEND_TY, field_hir_id);
- if let field_ty = field.ty(cx.tcx, impl_trait_args);
- if !ty_allowed_in_send(cx, field_ty, send_trait);
- if let Node::Field(field_def) = hir_map.get(field_hir_id);
- then {
- non_send_fields.push(NonSendField {
- def: field_def,
- ty: field_ty,
- generic_params: collect_generic_params(field_ty),
- })
- }
- }
+ if !in_external_macro(cx.tcx.sess, item.span)
+ && let Some(send_trait) = cx.tcx.get_diagnostic_item(sym::Send)
+ && let ItemKind::Impl(hir_impl) = &item.kind
+ && let Some(trait_ref) = &hir_impl.of_trait
+ && let Some(trait_id) = trait_ref.trait_def_id()
+ && send_trait == trait_id
+ && hir_impl.polarity == ImplPolarity::Positive
+ && let Some(ty_trait_ref) = cx.tcx.impl_trait_ref(item.owner_id)
+ && let self_ty = ty_trait_ref.instantiate_identity().self_ty()
+ && let ty::Adt(adt_def, impl_trait_args) = self_ty.kind()
+ {
+ let mut non_send_fields = Vec::new();
+
+ for variant in adt_def.variants() {
+ for field in &variant.fields {
+ if let Some(field_hir_id) = field
+ .did
+ .as_local()
+ .map(|local_def_id| cx.tcx.local_def_id_to_hir_id(local_def_id))
+ && !is_lint_allowed(cx, NON_SEND_FIELDS_IN_SEND_TY, field_hir_id)
+ && let field_ty = field.ty(cx.tcx, impl_trait_args)
+ && !ty_allowed_in_send(cx, field_ty, send_trait)
+ && let Node::Field(field_def) = cx.tcx.hir_node(field_hir_id)
+ {
+ non_send_fields.push(NonSendField {
+ def: field_def,
+ ty: field_ty,
+ generic_params: collect_generic_params(field_ty),
+ });
}
}
+ }
- if !non_send_fields.is_empty() {
- span_lint_and_then(
- cx,
- NON_SEND_FIELDS_IN_SEND_TY,
- item.span,
- &format!(
- "some fields in `{}` are not safe to be sent to another thread",
- snippet(cx, hir_impl.self_ty.span, "Unknown")
- ),
- |diag| {
- for field in non_send_fields {
- diag.span_note(
- field.def.span,
- format!("it is not safe to send field `{}` to another thread", field.def.ident.name),
- );
-
- match field.generic_params.len() {
- 0 => diag.help("use a thread-safe type that implements `Send`"),
- 1 if is_ty_param(field.ty) => diag.help(format!("add `{}: Send` bound in `Send` impl", field.ty)),
- _ => diag.help(format!(
- "add bounds on type parameter{} `{}` that satisfy `{}: Send`",
- if field.generic_params.len() > 1 { "s" } else { "" },
- field.generic_params_string(),
- snippet(cx, field.def.ty.span, "Unknown"),
- )),
- };
- }
- },
- );
- }
+ if !non_send_fields.is_empty() {
+ span_lint_and_then(
+ cx,
+ NON_SEND_FIELDS_IN_SEND_TY,
+ item.span,
+ &format!(
+ "some fields in `{}` are not safe to be sent to another thread",
+ snippet(cx, hir_impl.self_ty.span, "Unknown")
+ ),
+ |diag| {
+ for field in non_send_fields {
+ diag.span_note(
+ field.def.span,
+ format!(
+ "it is not safe to send field `{}` to another thread",
+ field.def.ident.name
+ ),
+ );
+
+ match field.generic_params.len() {
+ 0 => diag.help("use a thread-safe type that implements `Send`"),
+ 1 if is_ty_param(field.ty) => {
+ diag.help(format!("add `{}: Send` bound in `Send` impl", field.ty))
+ },
+ _ => diag.help(format!(
+ "add bounds on type parameter{} `{}` that satisfy `{}: Send`",
+ if field.generic_params.len() > 1 { "s" } else { "" },
+ field.generic_params_string(),
+ snippet(cx, field.def.ty.span, "Unknown"),
+ )),
+ };
+ }
+ },
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/nonstandard_macro_braces.rs b/src/tools/clippy/clippy_lints/src/nonstandard_macro_braces.rs
index 11c3a5417..1c6069e9c 100644
--- a/src/tools/clippy/clippy_lints/src/nonstandard_macro_braces.rs
+++ b/src/tools/clippy/clippy_lints/src/nonstandard_macro_braces.rs
@@ -1,13 +1,12 @@
use clippy_config::types::MacroMatcher;
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_opt;
-use if_chain::if_chain;
use rustc_ast::ast;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_errors::Applicability;
use rustc_hir::def_id::DefId;
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::hygiene::{ExpnKind, MacroKind};
use rustc_span::Span;
@@ -34,17 +33,17 @@ declare_clippy_lint! {
}
/// The (callsite span, (open brace, close brace), source snippet)
-type MacroInfo<'a> = (Span, &'a (String, String), String);
+type MacroInfo = (Span, (char, char), String);
-#[derive(Clone, Debug, Default)]
+#[derive(Debug)]
pub struct MacroBraces {
- macro_braces: FxHashMap<String, (String, String)>,
+ macro_braces: FxHashMap<String, (char, char)>,
done: FxHashSet<Span>,
}
impl MacroBraces {
- pub fn new(conf: &FxHashSet<MacroMatcher>) -> Self {
- let macro_braces = macro_braces(conf.clone());
+ pub fn new(conf: &[MacroMatcher]) -> Self {
+ let macro_braces = macro_braces(conf);
Self {
macro_braces,
done: FxHashSet::default(),
@@ -84,7 +83,7 @@ impl EarlyLintPass for MacroBraces {
}
}
-fn is_offending_macro<'a>(cx: &EarlyContext<'_>, span: Span, mac_braces: &'a MacroBraces) -> Option<MacroInfo<'a>> {
+fn is_offending_macro(cx: &EarlyContext<'_>, span: Span, mac_braces: &MacroBraces) -> Option<MacroInfo> {
let unnested_or_local = || {
!span.ctxt().outer_expn_data().call_site.from_expansion()
|| span
@@ -93,28 +92,26 @@ fn is_offending_macro<'a>(cx: &EarlyContext<'_>, span: Span, mac_braces: &'a Mac
.map_or(false, |e| e.macro_def_id.map_or(false, DefId::is_local))
};
let span_call_site = span.ctxt().outer_expn_data().call_site;
- if_chain! {
- if let ExpnKind::Macro(MacroKind::Bang, mac_name) = span.ctxt().outer_expn_data().kind;
- let name = mac_name.as_str();
- if let Some(braces) = mac_braces.macro_braces.get(name);
- if let Some(snip) = snippet_opt(cx, span_call_site);
+ if let ExpnKind::Macro(MacroKind::Bang, mac_name) = span.ctxt().outer_expn_data().kind
+ && let name = mac_name.as_str()
+ && let Some(&braces) = mac_braces.macro_braces.get(name)
+ && let Some(snip) = snippet_opt(cx, span_call_site)
// we must check only invocation sites
// https://github.com/rust-lang/rust-clippy/issues/7422
- if snip.starts_with(&format!("{name}!"));
- if unnested_or_local();
+ && snip.starts_with(&format!("{name}!"))
+ && unnested_or_local()
// make formatting consistent
- let c = snip.replace(' ', "");
- if !c.starts_with(&format!("{name}!{}", braces.0));
- if !mac_braces.done.contains(&span_call_site);
- then {
- Some((span_call_site, braces, snip))
- } else {
- None
- }
+ && let c = snip.replace(' ', "")
+ && !c.starts_with(&format!("{name}!{}", braces.0))
+ && !mac_braces.done.contains(&span_call_site)
+ {
+ Some((span_call_site, braces, snip))
+ } else {
+ None
}
}
-fn emit_help(cx: &EarlyContext<'_>, snip: &str, braces: &(String, String), span: Span) {
+fn emit_help(cx: &EarlyContext<'_>, snip: &str, (open, close): (char, char), span: Span) {
if let Some((macro_name, macro_args_str)) = snip.split_once('!') {
let mut macro_args = macro_args_str.trim().to_string();
// now remove the wrong braces
@@ -126,67 +123,31 @@ fn emit_help(cx: &EarlyContext<'_>, snip: &str, braces: &(String, String), span:
span,
&format!("use of irregular braces for `{macro_name}!` macro"),
"consider writing",
- format!("{macro_name}!{}{macro_args}{}", braces.0, braces.1),
+ format!("{macro_name}!{open}{macro_args}{close}"),
Applicability::MachineApplicable,
);
}
}
-fn macro_braces(conf: FxHashSet<MacroMatcher>) -> FxHashMap<String, (String, String)> {
- let mut braces = vec![
- macro_matcher!(
- name: "print",
- braces: ("(", ")"),
- ),
- macro_matcher!(
- name: "println",
- braces: ("(", ")"),
- ),
- macro_matcher!(
- name: "eprint",
- braces: ("(", ")"),
- ),
- macro_matcher!(
- name: "eprintln",
- braces: ("(", ")"),
- ),
- macro_matcher!(
- name: "write",
- braces: ("(", ")"),
- ),
- macro_matcher!(
- name: "writeln",
- braces: ("(", ")"),
- ),
- macro_matcher!(
- name: "format",
- braces: ("(", ")"),
- ),
- macro_matcher!(
- name: "format_args",
- braces: ("(", ")"),
- ),
- macro_matcher!(
- name: "vec",
- braces: ("[", "]"),
- ),
- macro_matcher!(
- name: "matches",
- braces: ("(", ")"),
- ),
- ]
- .into_iter()
- .collect::<FxHashMap<_, _>>();
+fn macro_braces(conf: &[MacroMatcher]) -> FxHashMap<String, (char, char)> {
+ let mut braces = FxHashMap::from_iter(
+ [
+ ("print", ('(', ')')),
+ ("println", ('(', ')')),
+ ("eprint", ('(', ')')),
+ ("eprintln", ('(', ')')),
+ ("write", ('(', ')')),
+ ("writeln", ('(', ')')),
+ ("format", ('(', ')')),
+ ("format_args", ('(', ')')),
+ ("vec", ('[', ']')),
+ ("matches", ('(', ')')),
+ ]
+ .map(|(k, v)| (k.to_string(), v)),
+ );
// We want users items to override any existing items
for it in conf {
- braces.insert(it.name, it.braces);
+ braces.insert(it.name.clone(), it.braces);
}
braces
}
-
-macro_rules! macro_matcher {
- (name: $name:expr, braces: ($open:expr, $close:expr) $(,)?) => {
- ($name.to_owned(), ($open.to_owned(), $close.to_owned()))
- };
-}
-pub(crate) use macro_matcher;
diff --git a/src/tools/clippy/clippy_lints/src/octal_escapes.rs b/src/tools/clippy/clippy_lints/src/octal_escapes.rs
index 0faf4ce3d..8822dfeed 100644
--- a/src/tools/clippy/clippy_lints/src/octal_escapes.rs
+++ b/src/tools/clippy/clippy_lints/src/octal_escapes.rs
@@ -4,7 +4,7 @@ use rustc_ast::token::{Lit, LitKind};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::Span;
use std::fmt::Write;
diff --git a/src/tools/clippy/clippy_lints/src/only_used_in_recursion.rs b/src/tools/clippy/clippy_lints/src/only_used_in_recursion.rs
index ef7b36764..d621051ef 100644
--- a/src/tools/clippy/clippy_lints/src/only_used_in_recursion.rs
+++ b/src/tools/clippy/clippy_lints/src/only_used_in_recursion.rs
@@ -8,7 +8,7 @@ use rustc_hir::hir_id::HirIdMap;
use rustc_hir::{Body, Expr, ExprKind, HirId, ImplItem, ImplItemKind, Node, PatKind, TraitItem, TraitItemKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::{self, ConstKind, EarlyBinder, GenericArgKind, GenericArgsRef};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::symbol::{kw, Ident};
use rustc_span::Span;
use std::iter;
diff --git a/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs b/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs
index 4c6462b77..c081dec9b 100644
--- a/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs
+++ b/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs
@@ -7,9 +7,9 @@ use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::Ty;
use rustc_session::impl_lint_pass;
-use rustc_span::{Span, Symbol};
use rustc_span::source_map::Spanned;
use rustc_span::symbol::sym;
+use rustc_span::{Span, Symbol};
use {rustc_ast as ast, rustc_hir as hir};
const HARD_CODED_ALLOWED_BINARY: &[[&str; 2]] = &[["f32", "f32"], ["f64", "f64"], ["std::string::String", "str"]];
diff --git a/src/tools/clippy/clippy_lints/src/operators/assign_op_pattern.rs b/src/tools/clippy/clippy_lints/src/operators/assign_op_pattern.rs
index c4572a09d..2f85130fb 100644
--- a/src/tools/clippy/clippy_lints/src/operators/assign_op_pattern.rs
+++ b/src/tools/clippy/clippy_lints/src/operators/assign_op_pattern.rs
@@ -4,7 +4,6 @@ use clippy_utils::ty::implements_trait;
use clippy_utils::visitors::for_each_expr;
use clippy_utils::{binop_traits, eq_expr_value, trait_ref_of_method};
use core::ops::ControlFlow;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_hir_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId};
@@ -25,43 +24,40 @@ pub(super) fn check<'tcx>(
let lint = |assignee: &hir::Expr<'_>, rhs: &hir::Expr<'_>| {
let ty = cx.typeck_results().expr_ty(assignee);
let rty = cx.typeck_results().expr_ty(rhs);
- if_chain! {
- if let Some((_, lang_item)) = binop_traits(op.node);
- if let Some(trait_id) = cx.tcx.lang_items().get(lang_item);
- let parent_fn = cx.tcx.hir().get_parent_item(e.hir_id).def_id;
- if trait_ref_of_method(cx, parent_fn)
- .map_or(true, |t| t.path.res.def_id() != trait_id);
- if implements_trait(cx, ty, trait_id, &[rty.into()]);
- then {
- // Primitive types execute assign-ops right-to-left. Every other type is left-to-right.
- if !(ty.is_primitive() && rty.is_primitive()) {
- // TODO: This will have false negatives as it doesn't check if the borrows are
- // actually live at the end of their respective expressions.
- let mut_borrows = mut_borrows_in_expr(cx, assignee);
- let imm_borrows = imm_borrows_in_expr(cx, rhs);
- if mut_borrows.iter().any(|id| imm_borrows.contains(id)) {
- return;
- }
+ if let Some((_, lang_item)) = binop_traits(op.node)
+ && let Some(trait_id) = cx.tcx.lang_items().get(lang_item)
+ && let parent_fn = cx.tcx.hir().get_parent_item(e.hir_id).def_id
+ && trait_ref_of_method(cx, parent_fn).map_or(true, |t| t.path.res.def_id() != trait_id)
+ && implements_trait(cx, ty, trait_id, &[rty.into()])
+ {
+ // Primitive types execute assign-ops right-to-left. Every other type is left-to-right.
+ if !(ty.is_primitive() && rty.is_primitive()) {
+ // TODO: This will have false negatives as it doesn't check if the borrows are
+ // actually live at the end of their respective expressions.
+ let mut_borrows = mut_borrows_in_expr(cx, assignee);
+ let imm_borrows = imm_borrows_in_expr(cx, rhs);
+ if mut_borrows.iter().any(|id| imm_borrows.contains(id)) {
+ return;
}
- span_lint_and_then(
- cx,
- ASSIGN_OP_PATTERN,
- expr.span,
- "manual implementation of an assign operation",
- |diag| {
- if let (Some(snip_a), Some(snip_r)) =
- (snippet_opt(cx, assignee.span), snippet_opt(cx, rhs.span))
- {
- diag.span_suggestion(
- expr.span,
- "replace it with",
- format!("{snip_a} {}= {snip_r}", op.node.as_str()),
- Applicability::MachineApplicable,
- );
- }
- },
- );
}
+ span_lint_and_then(
+ cx,
+ ASSIGN_OP_PATTERN,
+ expr.span,
+ "manual implementation of an assign operation",
+ |diag| {
+ if let (Some(snip_a), Some(snip_r)) =
+ (snippet_opt(cx, assignee.span), snippet_opt(cx, rhs.span))
+ {
+ diag.span_suggestion(
+ expr.span,
+ "replace it with",
+ format!("{snip_a} {}= {snip_r}", op.node.as_str()),
+ Applicability::MachineApplicable,
+ );
+ }
+ },
+ );
}
};
diff --git a/src/tools/clippy/clippy_lints/src/operators/const_comparisons.rs b/src/tools/clippy/clippy_lints/src/operators/const_comparisons.rs
index ec2bb8699..e278cf983 100644
--- a/src/tools/clippy/clippy_lints/src/operators/const_comparisons.rs
+++ b/src/tools/clippy/clippy_lints/src/operators/const_comparisons.rs
@@ -3,13 +3,12 @@
use std::cmp::Ordering;
use clippy_utils::consts::{constant, Constant};
-use if_chain::if_chain;
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::LateContext;
use rustc_middle::ty::layout::HasTyCtxt;
use rustc_middle::ty::{Ty, TypeckResults};
-use rustc_span::Span;
use rustc_span::source_map::Spanned;
+use rustc_span::Span;
use clippy_utils::diagnostics::span_lint_and_note;
use clippy_utils::source::snippet;
@@ -24,19 +23,17 @@ fn comparison_to_const<'tcx>(
typeck: &TypeckResults<'tcx>,
expr: &'tcx Expr<'tcx>,
) -> Option<(CmpOp, &'tcx Expr<'tcx>, &'tcx Expr<'tcx>, Constant<'tcx>, Ty<'tcx>)> {
- if_chain! {
- if let ExprKind::Binary(operator, left, right) = expr.kind;
- if let Ok(cmp_op) = CmpOp::try_from(operator.node);
- then {
- match (constant(cx, typeck, left), constant(cx, typeck, right)) {
- (Some(_), Some(_)) => None,
- (_, Some(con)) => Some((cmp_op, left, right, con, typeck.expr_ty(right))),
- (Some(con), _) => Some((cmp_op.reverse(), right, left, con, typeck.expr_ty(left))),
- _ => None,
- }
- } else {
- None
+ if let ExprKind::Binary(operator, left, right) = expr.kind
+ && let Ok(cmp_op) = CmpOp::try_from(operator.node)
+ {
+ match (constant(cx, typeck, left), constant(cx, typeck, right)) {
+ (Some(_), Some(_)) => None,
+ (_, Some(con)) => Some((cmp_op, left, right, con, typeck.expr_ty(right))),
+ (Some(con), _) => Some((cmp_op.reverse(), right, left, con, typeck.expr_ty(left))),
+ _ => None,
}
+ } else {
+ None
}
}
@@ -47,87 +44,89 @@ pub(super) fn check<'tcx>(
right_cond: &'tcx Expr<'tcx>,
span: Span,
) {
- if_chain! {
+ if and_op.node == BinOpKind::And
// Ensure that the binary operator is &&
- if and_op.node == BinOpKind::And;
// Check that both operands to '&&' are themselves a binary operation
// The `comparison_to_const` step also checks this, so this step is just an optimization
- if let ExprKind::Binary(_, _, _) = left_cond.kind;
- if let ExprKind::Binary(_, _, _) = right_cond.kind;
+ && let ExprKind::Binary(_, _, _) = left_cond.kind
+ && let ExprKind::Binary(_, _, _) = right_cond.kind
- let typeck = cx.typeck_results();
+ && let typeck = cx.typeck_results()
// Check that both operands to '&&' compare a non-literal to a literal
- if let Some((left_cmp_op, left_expr, left_const_expr, left_const, left_type)) =
- comparison_to_const(cx, typeck, left_cond);
- if let Some((right_cmp_op, right_expr, right_const_expr, right_const, right_type)) =
- comparison_to_const(cx, typeck, right_cond);
+ && let Some((left_cmp_op, left_expr, left_const_expr, left_const, left_type)) =
+ comparison_to_const(cx, typeck, left_cond)
+ && let Some((right_cmp_op, right_expr, right_const_expr, right_const, right_type)) =
+ comparison_to_const(cx, typeck, right_cond)
- if left_type == right_type;
+ && left_type == right_type
// Check that the same expression is compared in both comparisons
- if SpanlessEq::new(cx).eq_expr(left_expr, right_expr);
+ && SpanlessEq::new(cx).eq_expr(left_expr, right_expr)
- if !left_expr.can_have_side_effects();
+ && !left_expr.can_have_side_effects()
// Compare the two constant expressions
- if let Some(ordering) = Constant::partial_cmp(cx.tcx(), left_type, &left_const, &right_const);
+ && let Some(ordering) = Constant::partial_cmp(cx.tcx(), left_type, &left_const, &right_const)
// Rule out the `x >= 42 && x <= 42` corner case immediately
// Mostly to simplify the implementation, but it is also covered by `clippy::double_comparisons`
- if !matches!(
+ && !matches!(
(&left_cmp_op, &right_cmp_op, ordering),
(CmpOp::Le | CmpOp::Ge, CmpOp::Le | CmpOp::Ge, Ordering::Equal)
- );
-
- then {
- if left_cmp_op.direction() == right_cmp_op.direction() {
- let lhs_str = snippet(cx, left_cond.span, "<lhs>");
- let rhs_str = snippet(cx, right_cond.span, "<rhs>");
- // We already know that either side of `&&` has no effect,
- // but emit a different error message depending on which side it is
- if left_side_is_useless(left_cmp_op, ordering) {
- span_lint_and_note(
- cx,
- REDUNDANT_COMPARISONS,
- span,
- "left-hand side of `&&` operator has no effect",
- Some(left_cond.span.until(right_cond.span)),
- &format!("`if `{rhs_str}` evaluates to true, {lhs_str}` will always evaluate to true as well"),
- );
- } else {
- span_lint_and_note(
- cx,
- REDUNDANT_COMPARISONS,
- span,
- "right-hand side of `&&` operator has no effect",
- Some(and_op.span.to(right_cond.span)),
- &format!("`if `{lhs_str}` evaluates to true, {rhs_str}` will always evaluate to true as well"),
- );
- }
- // We could autofix this error but choose not to,
- // because code triggering this lint probably not behaving correctly in the first place
- }
- else if !comparison_is_possible(left_cmp_op.direction(), ordering) {
- let expr_str = snippet(cx, left_expr.span, "..");
- let lhs_str = snippet(cx, left_const_expr.span, "<lhs>");
- let rhs_str = snippet(cx, right_const_expr.span, "<rhs>");
- let note = match ordering {
- Ordering::Less => format!("since `{lhs_str}` < `{rhs_str}`, the expression evaluates to false for any value of `{expr_str}`"),
- Ordering::Equal => format!("`{expr_str}` cannot simultaneously be greater than and less than `{lhs_str}`"),
- Ordering::Greater => format!("since `{lhs_str}` > `{rhs_str}`, the expression evaluates to false for any value of `{expr_str}`"),
- };
+ )
+ {
+ if left_cmp_op.direction() == right_cmp_op.direction() {
+ let lhs_str = snippet(cx, left_cond.span, "<lhs>");
+ let rhs_str = snippet(cx, right_cond.span, "<rhs>");
+ // We already know that either side of `&&` has no effect,
+ // but emit a different error message depending on which side it is
+ if left_side_is_useless(left_cmp_op, ordering) {
span_lint_and_note(
cx,
- IMPOSSIBLE_COMPARISONS,
+ REDUNDANT_COMPARISONS,
span,
- "boolean expression will never evaluate to 'true'",
- None,
- &note,
+ "left-hand side of `&&` operator has no effect",
+ Some(left_cond.span.until(right_cond.span)),
+ &format!("`if `{rhs_str}` evaluates to true, {lhs_str}` will always evaluate to true as well"),
);
+ } else {
+ span_lint_and_note(
+ cx,
+ REDUNDANT_COMPARISONS,
+ span,
+ "right-hand side of `&&` operator has no effect",
+ Some(and_op.span.to(right_cond.span)),
+ &format!("`if `{lhs_str}` evaluates to true, {rhs_str}` will always evaluate to true as well"),
+ );
+ }
+ // We could autofix this error but choose not to,
+ // because code triggering this lint probably not behaving correctly in the first place
+ } else if !comparison_is_possible(left_cmp_op.direction(), ordering) {
+ let expr_str = snippet(cx, left_expr.span, "..");
+ let lhs_str = snippet(cx, left_const_expr.span, "<lhs>");
+ let rhs_str = snippet(cx, right_const_expr.span, "<rhs>");
+ let note = match ordering {
+ Ordering::Less => format!(
+ "since `{lhs_str}` < `{rhs_str}`, the expression evaluates to false for any value of `{expr_str}`"
+ ),
+ Ordering::Equal => {
+ format!("`{expr_str}` cannot simultaneously be greater than and less than `{lhs_str}`")
+ },
+ Ordering::Greater => format!(
+ "since `{lhs_str}` > `{rhs_str}`, the expression evaluates to false for any value of `{expr_str}`"
+ ),
};
- }
+ span_lint_and_note(
+ cx,
+ IMPOSSIBLE_COMPARISONS,
+ span,
+ "boolean expression will never evaluate to 'true'",
+ None,
+ &note,
+ );
+ };
}
}
diff --git a/src/tools/clippy/clippy_lints/src/operators/eq_op.rs b/src/tools/clippy/clippy_lints/src/operators/eq_op.rs
index fd3502ad8..01dd418c3 100644
--- a/src/tools/clippy/clippy_lints/src/operators/eq_op.rs
+++ b/src/tools/clippy/clippy_lints/src/operators/eq_op.rs
@@ -36,7 +36,7 @@ pub(crate) fn check<'tcx>(
left: &'tcx Expr<'_>,
right: &'tcx Expr<'_>,
) {
- if is_useless_with_eq_exprs(op.into()) && eq_expr_value(cx, left, right) && !is_in_test_function(cx.tcx, e.hir_id) {
+ if is_useless_with_eq_exprs(op) && eq_expr_value(cx, left, right) && !is_in_test_function(cx.tcx, e.hir_id) {
span_lint_and_then(
cx,
EQ_OP,
diff --git a/src/tools/clippy/clippy_lints/src/operators/float_cmp.rs b/src/tools/clippy/clippy_lints/src/operators/float_cmp.rs
index bce6bdcaf..0561739d1 100644
--- a/src/tools/clippy/clippy_lints/src/operators/float_cmp.rs
+++ b/src/tools/clippy/clippy_lints/src/operators/float_cmp.rs
@@ -2,7 +2,6 @@ use clippy_utils::consts::{constant_with_source, Constant};
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::get_item_name;
use clippy_utils::sugg::Sugg;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind, UnOp};
use rustc_lint::LateContext;
@@ -105,14 +104,12 @@ fn is_signum(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
return is_signum(cx, child_expr);
}
- if_chain! {
- if let ExprKind::MethodCall(method_name, self_arg, ..) = expr.kind;
- if sym!(signum) == method_name.ident.name;
- // Check that the receiver of the signum() is a float (expressions[0] is the receiver of
- // the method call)
- then {
- return is_float(cx, self_arg);
- }
+ if let ExprKind::MethodCall(method_name, self_arg, ..) = expr.kind
+ && sym!(signum) == method_name.ident.name
+ // Check that the receiver of the signum() is a float (expressions[0] is the receiver of
+ // the method call)
+ {
+ return is_float(cx, self_arg);
}
false
}
diff --git a/src/tools/clippy/clippy_lints/src/operators/float_equality_without_abs.rs b/src/tools/clippy/clippy_lints/src/operators/float_equality_without_abs.rs
index a0a8b6aab..cace85a24 100644
--- a/src/tools/clippy/clippy_lints/src/operators/float_equality_without_abs.rs
+++ b/src/tools/clippy/clippy_lints/src/operators/float_equality_without_abs.rs
@@ -1,6 +1,5 @@
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::{match_def_path, paths, sugg};
-use if_chain::if_chain;
use rustc_ast::util::parser::AssocOp;
use rustc_errors::Applicability;
use rustc_hir::def::{DefKind, Res};
@@ -24,48 +23,43 @@ pub(crate) fn check<'tcx>(
_ => return,
};
- if_chain! {
+ if let ExprKind::Binary(
// left hand side is a subtraction
- if let ExprKind::Binary(
Spanned {
node: BinOpKind::Sub,
..
},
val_l,
val_r,
- ) = lhs.kind;
+ ) = lhs.kind
// right hand side matches either f32::EPSILON or f64::EPSILON
- if let ExprKind::Path(ref epsilon_path) = rhs.kind;
- if let Res::Def(DefKind::AssocConst, def_id) = cx.qpath_res(epsilon_path, rhs.hir_id);
- if match_def_path(cx, def_id, &paths::F32_EPSILON) || match_def_path(cx, def_id, &paths::F64_EPSILON);
+ && let ExprKind::Path(ref epsilon_path) = rhs.kind
+ && let Res::Def(DefKind::AssocConst, def_id) = cx.qpath_res(epsilon_path, rhs.hir_id)
+ && (match_def_path(cx, def_id, &paths::F32_EPSILON) || match_def_path(cx, def_id, &paths::F64_EPSILON))
// values of the subtractions on the left hand side are of the type float
- let t_val_l = cx.typeck_results().expr_ty(val_l);
- let t_val_r = cx.typeck_results().expr_ty(val_r);
- if let ty::Float(_) = t_val_l.kind();
- if let ty::Float(_) = t_val_r.kind();
-
- then {
- let sug_l = sugg::Sugg::hir(cx, val_l, "..");
- let sug_r = sugg::Sugg::hir(cx, val_r, "..");
- // format the suggestion
- let suggestion = format!("{}.abs()", sugg::make_assoc(AssocOp::Subtract, &sug_l, &sug_r).maybe_par());
- // spans the lint
- span_lint_and_then(
- cx,
- FLOAT_EQUALITY_WITHOUT_ABS,
- expr.span,
- "float equality check without `.abs()`",
- | diag | {
- diag.span_suggestion(
- lhs.span,
- "add `.abs()`",
- suggestion,
- Applicability::MaybeIncorrect,
- );
- }
- );
- }
+ && let t_val_l = cx.typeck_results().expr_ty(val_l)
+ && let t_val_r = cx.typeck_results().expr_ty(val_r)
+ && let ty::Float(_) = t_val_l.kind()
+ && let ty::Float(_) = t_val_r.kind()
+ {
+ let sug_l = sugg::Sugg::hir(cx, val_l, "..");
+ let sug_r = sugg::Sugg::hir(cx, val_r, "..");
+ // format the suggestion
+ let suggestion = format!(
+ "{}.abs()",
+ sugg::make_assoc(AssocOp::Subtract, &sug_l, &sug_r).maybe_par()
+ );
+ // spans the lint
+ span_lint_and_then(
+ cx,
+ FLOAT_EQUALITY_WITHOUT_ABS,
+ expr.span,
+ "float equality check without `.abs()`",
+ |diag| {
+ diag.span_suggestion(lhs.span, "add `.abs()`", suggestion, Applicability::MaybeIncorrect);
+ },
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/operators/misrefactored_assign_op.rs b/src/tools/clippy/clippy_lints/src/operators/misrefactored_assign_op.rs
index 5eabb349e..fecc5a857 100644
--- a/src/tools/clippy/clippy_lints/src/operators/misrefactored_assign_op.rs
+++ b/src/tools/clippy/clippy_lints/src/operators/misrefactored_assign_op.rs
@@ -46,7 +46,7 @@ fn lint_misrefactored_assign_op(
if let (Some(snip_a), Some(snip_r)) = (snippet_opt(cx, assignee.span), snippet_opt(cx, rhs_other.span)) {
let a = &sugg::Sugg::hir(cx, assignee, "..");
let r = &sugg::Sugg::hir(cx, rhs, "..");
- let long = format!("{snip_a} = {}", sugg::make_binop(op.into(), a, r));
+ let long = format!("{snip_a} = {}", sugg::make_binop(op, a, r));
diag.span_suggestion(
expr.span,
format!(
diff --git a/src/tools/clippy/clippy_lints/src/operators/mod.rs b/src/tools/clippy/clippy_lints/src/operators/mod.rs
index ee79ea276..4c09c4eea 100644
--- a/src/tools/clippy/clippy_lints/src/operators/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/operators/mod.rs
@@ -25,7 +25,7 @@ pub(crate) mod arithmetic_side_effects;
use rustc_hir::{Body, Expr, ExprKind, UnOp};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/operators/modulo_arithmetic.rs b/src/tools/clippy/clippy_lints/src/operators/modulo_arithmetic.rs
index a2c3a4d8b..cb3916484 100644
--- a/src/tools/clippy/clippy_lints/src/operators/modulo_arithmetic.rs
+++ b/src/tools/clippy/clippy_lints/src/operators/modulo_arithmetic.rs
@@ -1,7 +1,6 @@
use clippy_utils::consts::{constant, Constant};
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::sext;
-use if_chain::if_chain;
use rustc_hir::{BinOpKind, Expr};
use rustc_lint::LateContext;
use rustc_middle::ty::{self, Ty};
@@ -19,15 +18,12 @@ pub(super) fn check<'tcx>(
if op == BinOpKind::Rem {
let lhs_operand = analyze_operand(lhs, cx, e);
let rhs_operand = analyze_operand(rhs, cx, e);
- if_chain! {
- if let Some(lhs_operand) = lhs_operand;
- if let Some(rhs_operand) = rhs_operand;
- then {
- check_const_operands(cx, e, &lhs_operand, &rhs_operand);
- }
- else {
- check_non_const_operands(cx, e, lhs);
- }
+ if let Some(lhs_operand) = lhs_operand
+ && let Some(rhs_operand) = rhs_operand
+ {
+ check_const_operands(cx, e, &lhs_operand, &rhs_operand);
+ } else {
+ check_non_const_operands(cx, e, lhs);
}
};
}
diff --git a/src/tools/clippy/clippy_lints/src/operators/op_ref.rs b/src/tools/clippy/clippy_lints/src/operators/op_ref.rs
index 932dd470f..7d8aa3f56 100644
--- a/src/tools/clippy/clippy_lints/src/operators/op_ref.rs
+++ b/src/tools/clippy/clippy_lints/src/operators/op_ref.rs
@@ -2,7 +2,6 @@ use clippy_utils::diagnostics::{multispan_sugg, span_lint_and_then};
use clippy_utils::get_enclosing_block;
use clippy_utils::source::snippet;
use clippy_utils::ty::{implements_trait, is_copy};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::def::Res;
use rustc_hir::def_id::DefId;
@@ -180,41 +179,33 @@ fn in_impl<'tcx>(
e: &'tcx Expr<'_>,
bin_op: DefId,
) -> Option<(&'tcx rustc_hir::Ty<'tcx>, &'tcx rustc_hir::Ty<'tcx>)> {
- if_chain! {
- if let Some(block) = get_enclosing_block(cx, e.hir_id);
- if let Some(impl_def_id) = cx.tcx.impl_of_method(block.hir_id.owner.to_def_id());
- let item = cx.tcx.hir().expect_item(impl_def_id.expect_local());
- if let ItemKind::Impl(item) = &item.kind;
- if let Some(of_trait) = &item.of_trait;
- if let Some(seg) = of_trait.path.segments.last();
- if let Res::Def(_, trait_id) = seg.res;
- if trait_id == bin_op;
- if let Some(generic_args) = seg.args;
- if let Some(GenericArg::Type(other_ty)) = generic_args.args.last();
-
- then {
- Some((item.self_ty, other_ty))
- }
- else {
- None
- }
+ if let Some(block) = get_enclosing_block(cx, e.hir_id)
+ && let Some(impl_def_id) = cx.tcx.impl_of_method(block.hir_id.owner.to_def_id())
+ && let item = cx.tcx.hir().expect_item(impl_def_id.expect_local())
+ && let ItemKind::Impl(item) = &item.kind
+ && let Some(of_trait) = &item.of_trait
+ && let Some(seg) = of_trait.path.segments.last()
+ && let Res::Def(_, trait_id) = seg.res
+ && trait_id == bin_op
+ && let Some(generic_args) = seg.args
+ && let Some(GenericArg::Type(other_ty)) = generic_args.args.last()
+ {
+ Some((item.self_ty, other_ty))
+ } else {
+ None
}
}
fn are_equal(cx: &LateContext<'_>, middle_ty: Ty<'_>, hir_ty: &rustc_hir::Ty<'_>) -> bool {
- if_chain! {
- if let ty::Adt(adt_def, _) = middle_ty.kind();
- if let Some(local_did) = adt_def.did().as_local();
- let item = cx.tcx.hir().expect_item(local_did);
- let middle_ty_id = item.owner_id.to_def_id();
- if let TyKind::Path(QPath::Resolved(_, path)) = hir_ty.kind;
- if let Res::Def(_, hir_ty_id) = path.res;
-
- then {
- hir_ty_id == middle_ty_id
- }
- else {
- false
- }
+ if let ty::Adt(adt_def, _) = middle_ty.kind()
+ && let Some(local_did) = adt_def.did().as_local()
+ && let item = cx.tcx.hir().expect_item(local_did)
+ && let middle_ty_id = item.owner_id.to_def_id()
+ && let TyKind::Path(QPath::Resolved(_, path)) = hir_ty.kind
+ && let Res::Def(_, hir_ty_id) = path.res
+ {
+ hir_ty_id == middle_ty_id
+ } else {
+ false
}
}
diff --git a/src/tools/clippy/clippy_lints/src/operators/ptr_eq.rs b/src/tools/clippy/clippy_lints/src/operators/ptr_eq.rs
index 1229c202f..9db2e2463 100644
--- a/src/tools/clippy/clippy_lints/src/operators/ptr_eq.rs
+++ b/src/tools/clippy/clippy_lints/src/operators/ptr_eq.rs
@@ -1,6 +1,5 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_opt;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::LateContext;
@@ -22,22 +21,20 @@ pub(super) fn check<'tcx>(
_ => (left, right),
};
- if_chain! {
- if let Some(left_var) = expr_as_cast_to_raw_pointer(cx, left);
- if let Some(right_var) = expr_as_cast_to_raw_pointer(cx, right);
- if let Some(left_snip) = snippet_opt(cx, left_var.span);
- if let Some(right_snip) = snippet_opt(cx, right_var.span);
- then {
- span_lint_and_sugg(
- cx,
- PTR_EQ,
- expr.span,
- LINT_MSG,
- "try",
- format!("std::ptr::eq({left_snip}, {right_snip})"),
- Applicability::MachineApplicable,
- );
- }
+ if let Some(left_var) = expr_as_cast_to_raw_pointer(cx, left)
+ && let Some(right_var) = expr_as_cast_to_raw_pointer(cx, right)
+ && let Some(left_snip) = snippet_opt(cx, left_var.span)
+ && let Some(right_snip) = snippet_opt(cx, right_var.span)
+ {
+ span_lint_and_sugg(
+ cx,
+ PTR_EQ,
+ expr.span,
+ LINT_MSG,
+ "try",
+ format!("std::ptr::eq({left_snip}, {right_snip})"),
+ Applicability::MachineApplicable,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/option_env_unwrap.rs b/src/tools/clippy/clippy_lints/src/option_env_unwrap.rs
index 7792efe6a..4bfb26209 100644
--- a/src/tools/clippy/clippy_lints/src/option_env_unwrap.rs
+++ b/src/tools/clippy/clippy_lints/src/option_env_unwrap.rs
@@ -2,7 +2,7 @@ use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::is_direct_expn_of;
use rustc_ast::ast::{Expr, ExprKind, MethodCall};
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{sym, Span};
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/option_if_let_else.rs b/src/tools/clippy/clippy_lints/src/option_if_let_else.rs
index d7cbbe13a..89e4e3c74 100644
--- a/src/tools/clippy/clippy_lints/src/option_if_let_else.rs
+++ b/src/tools/clippy/clippy_lints/src/option_if_let_else.rs
@@ -4,13 +4,12 @@ use clippy_utils::{
can_move_expr_to_closure, eager_or_lazy, higher, in_constant, is_else_clause, is_res_lang_ctor, peel_blocks,
peel_hir_expr_while, CaptureKind,
};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::def::Res;
use rustc_hir::LangItem::{OptionNone, OptionSome, ResultErr, ResultOk};
use rustc_hir::{Arm, BindingAnnotation, Expr, ExprKind, MatchSource, Mutability, Pat, PatKind, Path, QPath, UnOp};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::SyntaxContext;
declare_clippy_lint! {
@@ -122,73 +121,97 @@ fn try_get_option_occurrence<'tcx>(
_ => expr,
};
let (inner_pat, is_result) = try_get_inner_pat_and_is_result(cx, pat)?;
- if_chain! {
- if let PatKind::Binding(bind_annotation, _, id, None) = inner_pat.kind;
- if let Some(some_captures) = can_move_expr_to_closure(cx, if_then);
- if let Some(none_captures) = can_move_expr_to_closure(cx, if_else);
- if some_captures
+ if let PatKind::Binding(bind_annotation, _, id, None) = inner_pat.kind
+ && let Some(some_captures) = can_move_expr_to_closure(cx, if_then)
+ && let Some(none_captures) = can_move_expr_to_closure(cx, if_else)
+ && some_captures
.iter()
.filter_map(|(id, &c)| none_captures.get(id).map(|&c2| (c, c2)))
- .all(|(x, y)| x.is_imm_ref() && y.is_imm_ref());
- then {
- let capture_mut = if bind_annotation == BindingAnnotation::MUT { "mut " } else { "" };
- let some_body = peel_blocks(if_then);
- let none_body = peel_blocks(if_else);
- let method_sugg = if eager_or_lazy::switch_to_eager_eval(cx, none_body) { "map_or" } else { "map_or_else" };
- let capture_name = id.name.to_ident_string();
- let (as_ref, as_mut) = match &expr.kind {
- ExprKind::AddrOf(_, Mutability::Not, _) => (true, false),
- ExprKind::AddrOf(_, Mutability::Mut, _) => (false, true),
- _ if let Some(mutb) = cx.typeck_results().expr_ty(expr).ref_mutability() => {
- (mutb == Mutability::Not, mutb == Mutability::Mut)
- }
- _ => (bind_annotation == BindingAnnotation::REF, bind_annotation == BindingAnnotation::REF_MUT),
- };
+ .all(|(x, y)| x.is_imm_ref() && y.is_imm_ref())
+ {
+ let capture_mut = if bind_annotation == BindingAnnotation::MUT {
+ "mut "
+ } else {
+ ""
+ };
+ let some_body = peel_blocks(if_then);
+ let none_body = peel_blocks(if_else);
+ let method_sugg = if eager_or_lazy::switch_to_eager_eval(cx, none_body) {
+ "map_or"
+ } else {
+ "map_or_else"
+ };
+ let capture_name = id.name.to_ident_string();
+ let (as_ref, as_mut) = match &expr.kind {
+ ExprKind::AddrOf(_, Mutability::Not, _) => (true, false),
+ ExprKind::AddrOf(_, Mutability::Mut, _) => (false, true),
+ _ if let Some(mutb) = cx.typeck_results().expr_ty(expr).ref_mutability() => {
+ (mutb == Mutability::Not, mutb == Mutability::Mut)
+ },
+ _ => (
+ bind_annotation == BindingAnnotation::REF,
+ bind_annotation == BindingAnnotation::REF_MUT,
+ ),
+ };
- // Check if captures the closure will need conflict with borrows made in the scrutinee.
- // TODO: check all the references made in the scrutinee expression. This will require interacting
- // with the borrow checker. Currently only `<local>[.<field>]*` is checked for.
- if as_ref || as_mut {
- let e = peel_hir_expr_while(cond_expr, |e| match e.kind {
- ExprKind::Field(e, _) | ExprKind::AddrOf(_, _, e) => Some(e),
- _ => None,
- });
- if let ExprKind::Path(QPath::Resolved(None, Path { res: Res::Local(local_id), .. })) = e.kind {
- match some_captures.get(local_id)
- .or_else(|| (method_sugg == "map_or_else").then_some(()).and_then(|()| none_captures.get(local_id)))
- {
- Some(CaptureKind::Value | CaptureKind::Ref(Mutability::Mut)) => return None,
- Some(CaptureKind::Ref(Mutability::Not)) if as_mut => return None,
- Some(CaptureKind::Ref(Mutability::Not)) | None => (),
- }
+ // Check if captures the closure will need conflict with borrows made in the scrutinee.
+ // TODO: check all the references made in the scrutinee expression. This will require interacting
+ // with the borrow checker. Currently only `<local>[.<field>]*` is checked for.
+ if as_ref || as_mut {
+ let e = peel_hir_expr_while(cond_expr, |e| match e.kind {
+ ExprKind::Field(e, _) | ExprKind::AddrOf(_, _, e) => Some(e),
+ _ => None,
+ });
+ if let ExprKind::Path(QPath::Resolved(
+ None,
+ Path {
+ res: Res::Local(local_id),
+ ..
+ },
+ )) = e.kind
+ {
+ match some_captures.get(local_id).or_else(|| {
+ (method_sugg == "map_or_else")
+ .then_some(())
+ .and_then(|()| none_captures.get(local_id))
+ }) {
+ Some(CaptureKind::Value | CaptureKind::Ref(Mutability::Mut)) => return None,
+ Some(CaptureKind::Ref(Mutability::Not)) if as_mut => return None,
+ Some(CaptureKind::Ref(Mutability::Not)) | None => (),
}
}
+ }
- let mut app = Applicability::Unspecified;
+ let mut app = Applicability::Unspecified;
- let (none_body, is_argless_call) = match none_body.kind {
- ExprKind::Call(call_expr, []) if !none_body.span.from_expansion() => (call_expr, true),
- _ => (none_body, false),
- };
+ let (none_body, is_argless_call) = match none_body.kind {
+ ExprKind::Call(call_expr, []) if !none_body.span.from_expansion() => (call_expr, true),
+ _ => (none_body, false),
+ };
- return Some(OptionOccurrence {
- option: format_option_in_sugg(
- Sugg::hir_with_context(cx, cond_expr, ctxt, "..", &mut app),
- as_ref,
- as_mut,
- ),
- method_sugg: method_sugg.to_string(),
- some_expr: format!(
- "|{capture_mut}{capture_name}| {}",
- Sugg::hir_with_context(cx, some_body, ctxt, "..", &mut app),
- ),
- none_expr: format!(
- "{}{}",
- if method_sugg == "map_or" || is_argless_call { "" } else if is_result { "|_| " } else { "|| "},
- Sugg::hir_with_context(cx, none_body, ctxt, "..", &mut app),
- ),
- });
- }
+ return Some(OptionOccurrence {
+ option: format_option_in_sugg(
+ Sugg::hir_with_context(cx, cond_expr, ctxt, "..", &mut app),
+ as_ref,
+ as_mut,
+ ),
+ method_sugg: method_sugg.to_string(),
+ some_expr: format!(
+ "|{capture_mut}{capture_name}| {}",
+ Sugg::hir_with_context(cx, some_body, ctxt, "..", &mut app),
+ ),
+ none_expr: format!(
+ "{}{}",
+ if method_sugg == "map_or" || is_argless_call {
+ ""
+ } else if is_result {
+ "|_| "
+ } else {
+ "|| "
+ },
+ Sugg::hir_with_context(cx, none_body, ctxt, "..", &mut app),
+ ),
+ });
}
None
@@ -216,21 +239,24 @@ fn detect_option_if_let_else<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>) ->
if_then,
if_else: Some(if_else),
}) = higher::IfLet::hir(cx, expr)
+ && !cx.typeck_results().expr_ty(expr).is_unit()
+ && !is_else_clause(cx.tcx, expr)
{
- if !is_else_clause(cx.tcx, expr) {
- return try_get_option_occurrence(cx, expr.span.ctxt(), let_pat, let_expr, if_then, if_else);
- }
+ try_get_option_occurrence(cx, expr.span.ctxt(), let_pat, let_expr, if_then, if_else)
+ } else {
+ None
}
- None
}
fn detect_option_match<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>) -> Option<OptionOccurrence> {
- if let ExprKind::Match(ex, arms, MatchSource::Normal) = expr.kind {
- if let Some((let_pat, if_then, if_else)) = try_convert_match(cx, arms) {
- return try_get_option_occurrence(cx, expr.span.ctxt(), let_pat, ex, if_then, if_else);
- }
+ if let ExprKind::Match(ex, arms, MatchSource::Normal) = expr.kind
+ && !cx.typeck_results().expr_ty(expr).is_unit()
+ && let Some((let_pat, if_then, if_else)) = try_convert_match(cx, arms)
+ {
+ try_get_option_occurrence(cx, expr.span.ctxt(), let_pat, ex, if_then, if_else)
+ } else {
+ None
}
- None
}
fn try_convert_match<'tcx>(
diff --git a/src/tools/clippy/clippy_lints/src/overflow_check_conditional.rs b/src/tools/clippy/clippy_lints/src/overflow_check_conditional.rs
index 38cd5043a..de7898793 100644
--- a/src/tools/clippy/clippy_lints/src/overflow_check_conditional.rs
+++ b/src/tools/clippy/clippy_lints/src/overflow_check_conditional.rs
@@ -1,9 +1,8 @@
use clippy_utils::diagnostics::span_lint;
use clippy_utils::SpanlessEq;
-use if_chain::if_chain;
use rustc_hir::{BinOpKind, Expr, ExprKind, QPath};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -34,41 +33,37 @@ impl<'tcx> LateLintPass<'tcx> for OverflowCheckConditional {
// a + b < a, a > a + b, a < a - b, a - b > a
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
let eq = |l, r| SpanlessEq::new(cx).eq_path_segment(l, r);
- if_chain! {
- if let ExprKind::Binary(ref op, first, second) = expr.kind;
- if let ExprKind::Binary(ref op2, ident1, ident2) = first.kind;
- if let ExprKind::Path(QPath::Resolved(_, path1)) = ident1.kind;
- if let ExprKind::Path(QPath::Resolved(_, path2)) = ident2.kind;
- if let ExprKind::Path(QPath::Resolved(_, path3)) = second.kind;
- if eq(&path1.segments[0], &path3.segments[0]) || eq(&path2.segments[0], &path3.segments[0]);
- if cx.typeck_results().expr_ty(ident1).is_integral();
- if cx.typeck_results().expr_ty(ident2).is_integral();
- then {
- if op.node == BinOpKind::Lt && op2.node == BinOpKind::Add {
- span_lint(cx, OVERFLOW_CHECK_CONDITIONAL, expr.span, OVERFLOW_MSG);
- }
- if op.node == BinOpKind::Gt && op2.node == BinOpKind::Sub {
- span_lint(cx, OVERFLOW_CHECK_CONDITIONAL, expr.span, UNDERFLOW_MSG);
- }
+ if let ExprKind::Binary(ref op, first, second) = expr.kind
+ && let ExprKind::Binary(ref op2, ident1, ident2) = first.kind
+ && let ExprKind::Path(QPath::Resolved(_, path1)) = ident1.kind
+ && let ExprKind::Path(QPath::Resolved(_, path2)) = ident2.kind
+ && let ExprKind::Path(QPath::Resolved(_, path3)) = second.kind
+ && (eq(&path1.segments[0], &path3.segments[0]) || eq(&path2.segments[0], &path3.segments[0]))
+ && cx.typeck_results().expr_ty(ident1).is_integral()
+ && cx.typeck_results().expr_ty(ident2).is_integral()
+ {
+ if op.node == BinOpKind::Lt && op2.node == BinOpKind::Add {
+ span_lint(cx, OVERFLOW_CHECK_CONDITIONAL, expr.span, OVERFLOW_MSG);
+ }
+ if op.node == BinOpKind::Gt && op2.node == BinOpKind::Sub {
+ span_lint(cx, OVERFLOW_CHECK_CONDITIONAL, expr.span, UNDERFLOW_MSG);
}
}
- if_chain! {
- if let ExprKind::Binary(ref op, first, second) = expr.kind;
- if let ExprKind::Binary(ref op2, ident1, ident2) = second.kind;
- if let ExprKind::Path(QPath::Resolved(_, path1)) = ident1.kind;
- if let ExprKind::Path(QPath::Resolved(_, path2)) = ident2.kind;
- if let ExprKind::Path(QPath::Resolved(_, path3)) = first.kind;
- if eq(&path1.segments[0], &path3.segments[0]) || eq(&path2.segments[0], &path3.segments[0]);
- if cx.typeck_results().expr_ty(ident1).is_integral();
- if cx.typeck_results().expr_ty(ident2).is_integral();
- then {
- if op.node == BinOpKind::Gt && op2.node == BinOpKind::Add {
- span_lint(cx, OVERFLOW_CHECK_CONDITIONAL, expr.span, OVERFLOW_MSG);
- }
- if op.node == BinOpKind::Lt && op2.node == BinOpKind::Sub {
- span_lint(cx, OVERFLOW_CHECK_CONDITIONAL, expr.span, UNDERFLOW_MSG);
- }
+ if let ExprKind::Binary(ref op, first, second) = expr.kind
+ && let ExprKind::Binary(ref op2, ident1, ident2) = second.kind
+ && let ExprKind::Path(QPath::Resolved(_, path1)) = ident1.kind
+ && let ExprKind::Path(QPath::Resolved(_, path2)) = ident2.kind
+ && let ExprKind::Path(QPath::Resolved(_, path3)) = first.kind
+ && (eq(&path1.segments[0], &path3.segments[0]) || eq(&path2.segments[0], &path3.segments[0]))
+ && cx.typeck_results().expr_ty(ident1).is_integral()
+ && cx.typeck_results().expr_ty(ident2).is_integral()
+ {
+ if op.node == BinOpKind::Gt && op2.node == BinOpKind::Add {
+ span_lint(cx, OVERFLOW_CHECK_CONDITIONAL, expr.span, OVERFLOW_MSG);
+ }
+ if op.node == BinOpKind::Lt && op2.node == BinOpKind::Sub {
+ span_lint(cx, OVERFLOW_CHECK_CONDITIONAL, expr.span, UNDERFLOW_MSG);
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/panic_in_result_fn.rs b/src/tools/clippy/clippy_lints/src/panic_in_result_fn.rs
index 6a760f9fe..f821a4efe 100644
--- a/src/tools/clippy/clippy_lints/src/panic_in_result_fn.rs
+++ b/src/tools/clippy/clippy_lints/src/panic_in_result_fn.rs
@@ -7,7 +7,7 @@ use core::ops::ControlFlow;
use rustc_hir as hir;
use rustc_hir::intravisit::FnKind;
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::def_id::LocalDefId;
use rustc_span::{sym, Span};
@@ -55,7 +55,7 @@ impl<'tcx> LateLintPass<'tcx> for PanicInResultFn {
if matches!(fn_kind, FnKind::Closure) {
return;
}
- let owner = cx.tcx.hir().local_def_id_to_hir_id(def_id).expect_owner();
+ let owner = cx.tcx.local_def_id_to_hir_id(def_id).expect_owner();
if is_type_diagnostic_item(cx, return_ty(cx, owner), sym::Result) {
lint_impl_body(cx, span, body);
}
diff --git a/src/tools/clippy/clippy_lints/src/panic_unimplemented.rs b/src/tools/clippy/clippy_lints/src/panic_unimplemented.rs
index f4f1f6ddb..ef51a9a9a 100644
--- a/src/tools/clippy/clippy_lints/src/panic_unimplemented.rs
+++ b/src/tools/clippy/clippy_lints/src/panic_unimplemented.rs
@@ -2,7 +2,7 @@ use clippy_utils::diagnostics::span_lint;
use clippy_utils::macros::{is_panic, root_macro_call_first_node};
use rustc_hir::Expr;
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/partial_pub_fields.rs b/src/tools/clippy/clippy_lints/src/partial_pub_fields.rs
index 99ba55b6b..ffa403e27 100644
--- a/src/tools/clippy/clippy_lints/src/partial_pub_fields.rs
+++ b/src/tools/clippy/clippy_lints/src/partial_pub_fields.rs
@@ -1,7 +1,7 @@
use clippy_utils::diagnostics::span_lint_and_help;
use rustc_ast::ast::{Item, ItemKind};
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/partialeq_ne_impl.rs b/src/tools/clippy/clippy_lints/src/partialeq_ne_impl.rs
index 68d3d00ac..18e6aad9c 100644
--- a/src/tools/clippy/clippy_lints/src/partialeq_ne_impl.rs
+++ b/src/tools/clippy/clippy_lints/src/partialeq_ne_impl.rs
@@ -1,8 +1,7 @@
use clippy_utils::diagnostics::span_lint_hir;
-use if_chain::if_chain;
use rustc_hir::{Impl, Item, ItemKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
@@ -34,22 +33,24 @@ declare_lint_pass!(PartialEqNeImpl => [PARTIALEQ_NE_IMPL]);
impl<'tcx> LateLintPass<'tcx> for PartialEqNeImpl {
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
- if_chain! {
- if let ItemKind::Impl(Impl { of_trait: Some(ref trait_ref), items: impl_items, .. }) = item.kind;
- if !cx.tcx.has_attr(item.owner_id, sym::automatically_derived);
- if let Some(eq_trait) = cx.tcx.lang_items().eq_trait();
- if trait_ref.path.res.def_id() == eq_trait;
- then {
- for impl_item in *impl_items {
- if impl_item.ident.name == sym::ne {
- span_lint_hir(
- cx,
- PARTIALEQ_NE_IMPL,
- impl_item.id.hir_id(),
- impl_item.span,
- "re-implementing `PartialEq::ne` is unnecessary",
- );
- }
+ if let ItemKind::Impl(Impl {
+ of_trait: Some(ref trait_ref),
+ items: impl_items,
+ ..
+ }) = item.kind
+ && !cx.tcx.has_attr(item.owner_id, sym::automatically_derived)
+ && let Some(eq_trait) = cx.tcx.lang_items().eq_trait()
+ && trait_ref.path.res.def_id() == eq_trait
+ {
+ for impl_item in *impl_items {
+ if impl_item.ident.name == sym::ne {
+ span_lint_hir(
+ cx,
+ PARTIALEQ_NE_IMPL,
+ impl_item.id.hir_id(),
+ impl_item.span,
+ "re-implementing `PartialEq::ne` is unnecessary",
+ );
}
}
};
diff --git a/src/tools/clippy/clippy_lints/src/partialeq_to_none.rs b/src/tools/clippy/clippy_lints/src/partialeq_to_none.rs
index 11e9a2bc3..6d4216970 100644
--- a/src/tools/clippy/clippy_lints/src/partialeq_to_none.rs
+++ b/src/tools/clippy/clippy_lints/src/partialeq_to_none.rs
@@ -4,7 +4,7 @@ use clippy_utils::{is_res_lang_ctor, path_res, peel_hir_expr_refs, peel_ref_oper
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind, LangItem};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs b/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs
index 4d7a055da..57d37067e 100644
--- a/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs
+++ b/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs
@@ -5,7 +5,6 @@ use clippy_utils::source::snippet;
use clippy_utils::ty::{for_each_top_level_late_bound_region, is_copy};
use clippy_utils::{is_self, is_self_ty};
use core::ops::ControlFlow;
-use if_chain::if_chain;
use rustc_ast::attr;
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::Applicability;
@@ -16,11 +15,10 @@ use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::adjustment::{Adjust, PointerCoercion};
use rustc_middle::ty::layout::LayoutOf;
use rustc_middle::ty::{self, RegionKind};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::def_id::LocalDefId;
use rustc_span::{sym, Span};
use rustc_target::spec::abi::Abi;
-use rustc_target::spec::Target;
declare_clippy_lint! {
/// ### What it does
@@ -117,10 +115,10 @@ impl<'tcx> PassByRefOrValue {
ref_min_size: Option<u64>,
value_max_size: u64,
avoid_breaking_exported_api: bool,
- target: &Target,
+ pointer_width: u32,
) -> Self {
let ref_min_size = ref_min_size.unwrap_or_else(|| {
- let bit_width = u64::from(target.pointer_width);
+ let bit_width = u64::from(pointer_width);
// Cap the calculated bit width at 32-bits to reduce
// portability problems between 32 and 64-bit targets
let bit_width = cmp::min(bit_width, 32);
@@ -168,18 +166,18 @@ impl<'tcx> PassByRefOrValue {
match *ty.skip_binder().kind() {
ty::Ref(lt, ty, Mutability::Not) => {
match lt.kind() {
- RegionKind::ReLateBound(index, region)
+ RegionKind::ReBound(index, region)
if index.as_u32() == 0 && output_regions.contains(&region) =>
{
continue;
},
// Early bound regions on functions are either from the containing item, are bounded by another
// lifetime, or are used as a bound for a type or lifetime.
- RegionKind::ReEarlyBound(..) => continue,
+ RegionKind::ReEarlyParam(..) => continue,
_ => (),
}
- let ty = cx.tcx.erase_late_bound_regions(fn_sig.rebind(ty));
+ let ty = cx.tcx.instantiate_bound_regions_with_erased(fn_sig.rebind(ty));
if is_copy(cx, ty)
&& let Some(size) = cx.layout_of(ty).ok().map(|l| l.size.bytes())
&& size <= self.ref_min_size
@@ -227,24 +225,25 @@ impl<'tcx> PassByRefOrValue {
_ => continue,
}
}
- let ty = cx.tcx.erase_late_bound_regions(ty);
+ let ty = cx.tcx.instantiate_bound_regions_with_erased(ty);
- if_chain! {
- if is_copy(cx, ty);
- if !is_self_ty(input);
- if let Some(size) = cx.layout_of(ty).ok().map(|l| l.size.bytes());
- if size > self.value_max_size;
- then {
- span_lint_and_sugg(
- cx,
- LARGE_TYPES_PASSED_BY_VALUE,
- input.span,
- &format!("this argument ({size} byte) is passed by value, but might be more efficient if passed by reference (limit: {} byte)", self.value_max_size),
- "consider passing by reference instead",
- format!("&{}", snippet(cx, input.span, "_")),
- Applicability::MaybeIncorrect,
- );
- }
+ if is_copy(cx, ty)
+ && !is_self_ty(input)
+ && let Some(size) = cx.layout_of(ty).ok().map(|l| l.size.bytes())
+ && size > self.value_max_size
+ {
+ span_lint_and_sugg(
+ cx,
+ LARGE_TYPES_PASSED_BY_VALUE,
+ input.span,
+ &format!(
+ "this argument ({size} byte) is passed by value, but might be more efficient if passed by reference (limit: {} byte)",
+ self.value_max_size
+ ),
+ "consider passing by reference instead",
+ format!("&{}", snippet(cx, input.span, "_")),
+ Applicability::MaybeIncorrect,
+ );
}
},
@@ -280,7 +279,7 @@ impl<'tcx> LateLintPass<'tcx> for PassByRefOrValue {
return;
}
- let hir_id = cx.tcx.hir().local_def_id_to_hir_id(def_id);
+ let hir_id = cx.tcx.local_def_id_to_hir_id(def_id);
match kind {
FnKind::ItemFn(.., header) => {
if header.abi != Abi::Rust {
diff --git a/src/tools/clippy/clippy_lints/src/pattern_type_mismatch.rs b/src/tools/clippy/clippy_lints/src/pattern_type_mismatch.rs
index dcd1e7af0..60ced9c12 100644
--- a/src/tools/clippy/clippy_lints/src/pattern_type_mismatch.rs
+++ b/src/tools/clippy/clippy_lints/src/pattern_type_mismatch.rs
@@ -3,7 +3,7 @@ use rustc_hir::{intravisit, Body, Expr, ExprKind, FnDecl, Let, LocalSource, Muta
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::def_id::LocalDefId;
use rustc_span::Span;
diff --git a/src/tools/clippy/clippy_lints/src/permissions_set_readonly_false.rs b/src/tools/clippy/clippy_lints/src/permissions_set_readonly_false.rs
index b98005d59..704acdc10 100644
--- a/src/tools/clippy/clippy_lints/src/permissions_set_readonly_false.rs
+++ b/src/tools/clippy/clippy_lints/src/permissions_set_readonly_false.rs
@@ -3,7 +3,7 @@ use clippy_utils::ty::is_type_diagnostic_item;
use rustc_ast::ast::LitKind;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/precedence.rs b/src/tools/clippy/clippy_lints/src/precedence.rs
index 057b7e306..ff83725da 100644
--- a/src/tools/clippy/clippy_lints/src/precedence.rs
+++ b/src/tools/clippy/clippy_lints/src/precedence.rs
@@ -1,11 +1,10 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
-use if_chain::if_chain;
use rustc_ast::ast::{BinOpKind, Expr, ExprKind, MethodCall, UnOp};
use rustc_ast::token;
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::source_map::Spanned;
const ALLOWED_ODD_FUNCTIONS: [&str; 14] = [
@@ -79,7 +78,7 @@ impl EarlyLintPass for Precedence {
let sugg = format!(
"({}) {} ({})",
snippet_with_applicability(cx, left.span, "..", &mut applicability),
- op.to_string(),
+ op.as_str(),
snippet_with_applicability(cx, right.span, "..", &mut applicability)
);
span_sugg(expr, sugg, applicability);
@@ -88,7 +87,7 @@ impl EarlyLintPass for Precedence {
let sugg = format!(
"({}) {} {}",
snippet_with_applicability(cx, left.span, "..", &mut applicability),
- op.to_string(),
+ op.as_str(),
snippet_with_applicability(cx, right.span, "..", &mut applicability)
);
span_sugg(expr, sugg, applicability);
@@ -97,7 +96,7 @@ impl EarlyLintPass for Precedence {
let sugg = format!(
"{} {} ({})",
snippet_with_applicability(cx, left.span, "..", &mut applicability),
- op.to_string(),
+ op.as_str(),
snippet_with_applicability(cx, right.span, "..", &mut applicability)
);
span_sugg(expr, sugg, applicability);
@@ -118,25 +117,23 @@ impl EarlyLintPass for Precedence {
arg = receiver;
}
- if_chain! {
- if !all_odd;
- if let ExprKind::Lit(lit) = &arg.kind;
- if let token::LitKind::Integer | token::LitKind::Float = &lit.kind;
- then {
- let mut applicability = Applicability::MachineApplicable;
- span_lint_and_sugg(
- cx,
- PRECEDENCE,
- expr.span,
- "unary minus has lower precedence than method call",
- "consider adding parentheses to clarify your intent",
- format!(
- "-({})",
- snippet_with_applicability(cx, operand.span, "..", &mut applicability)
- ),
- applicability,
- );
- }
+ if !all_odd
+ && let ExprKind::Lit(lit) = &arg.kind
+ && let token::LitKind::Integer | token::LitKind::Float = &lit.kind
+ {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ PRECEDENCE,
+ expr.span,
+ "unary minus has lower precedence than method call",
+ "consider adding parentheses to clarify your intent",
+ format!(
+ "-({})",
+ snippet_with_applicability(cx, operand.span, "..", &mut applicability)
+ ),
+ applicability,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/ptr.rs b/src/tools/clippy/clippy_lints/src/ptr.rs
index 83863b92c..2587b3881 100644
--- a/src/tools/clippy/clippy_lints/src/ptr.rs
+++ b/src/tools/clippy/clippy_lints/src/ptr.rs
@@ -20,14 +20,16 @@ use rustc_infer::traits::{Obligation, ObligationCause};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::nested_filter;
use rustc_middle::ty::{self, Binder, ClauseKind, ExistentialPredicate, List, PredicateKind, Ty};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
-use rustc_span::{sym, Span};
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::Symbol;
+use rustc_span::{sym, Span};
use rustc_target::spec::abi::Abi;
use rustc_trait_selection::infer::InferCtxtExt as _;
use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt as _;
use std::{fmt, iter};
+use crate::vec::is_allowed_vec_method;
+
declare_clippy_lint! {
/// ### What it does
/// This lint checks for function arguments of type `&String`, `&Vec`,
@@ -465,9 +467,9 @@ fn check_fn_args<'cx, 'tcx: 'cx>(
.walk()
.filter_map(|arg| {
arg.as_region().and_then(|lifetime| match lifetime.kind() {
- ty::ReEarlyBound(r) => Some(r.def_id),
- ty::ReLateBound(_, r) => r.kind.get_id(),
- ty::ReFree(r) => r.bound_region.get_id(),
+ ty::ReEarlyParam(r) => Some(r.def_id),
+ ty::ReBound(_, r) => r.kind.get_id(),
+ ty::ReLateParam(r) => r.bound_region.get_id(),
ty::ReStatic
| ty::ReVar(_)
| ty::RePlaceholder(_)
@@ -660,7 +662,7 @@ fn check_ptr_arg_usage<'tcx>(cx: &LateContext<'tcx>, body: &'tcx Body<'_>, args:
},
// If the types match check for methods which exist on both types. e.g. `Vec::len` and
// `slice::len`
- ty::Adt(def, _) if def.did() == args.ty_did => {
+ ty::Adt(def, _) if def.did() == args.ty_did && !is_allowed_vec_method(self.cx, e) => {
set_skip_flag();
},
_ => (),
@@ -712,23 +714,25 @@ fn matches_preds<'tcx>(
preds: &'tcx [ty::PolyExistentialPredicate<'tcx>],
) -> bool {
let infcx = cx.tcx.infer_ctxt().build();
- preds.iter().all(|&p| match cx.tcx.erase_late_bound_regions(p) {
- ExistentialPredicate::Trait(p) => infcx
- .type_implements_trait(p.def_id, [ty.into()].into_iter().chain(p.args.iter()), cx.param_env)
- .must_apply_modulo_regions(),
- ExistentialPredicate::Projection(p) => infcx.predicate_must_hold_modulo_regions(&Obligation::new(
- cx.tcx,
- ObligationCause::dummy(),
- cx.param_env,
- cx.tcx
- .mk_predicate(Binder::dummy(PredicateKind::Clause(ClauseKind::Projection(
- p.with_self_ty(cx.tcx, ty),
- )))),
- )),
- ExistentialPredicate::AutoTrait(p) => infcx
- .type_implements_trait(p, [ty], cx.param_env)
- .must_apply_modulo_regions(),
- })
+ preds
+ .iter()
+ .all(|&p| match cx.tcx.instantiate_bound_regions_with_erased(p) {
+ ExistentialPredicate::Trait(p) => infcx
+ .type_implements_trait(p.def_id, [ty.into()].into_iter().chain(p.args.iter()), cx.param_env)
+ .must_apply_modulo_regions(),
+ ExistentialPredicate::Projection(p) => infcx.predicate_must_hold_modulo_regions(&Obligation::new(
+ cx.tcx,
+ ObligationCause::dummy(),
+ cx.param_env,
+ cx.tcx
+ .mk_predicate(Binder::dummy(PredicateKind::Clause(ClauseKind::Projection(
+ p.with_self_ty(cx.tcx, ty),
+ )))),
+ )),
+ ExistentialPredicate::AutoTrait(p) => infcx
+ .type_implements_trait(p, [ty], cx.param_env)
+ .must_apply_modulo_regions(),
+ })
}
fn get_ref_lm<'tcx>(ty: &'tcx hir::Ty<'tcx>) -> Option<(&'tcx Lifetime, Mutability, Span)> {
diff --git a/src/tools/clippy/clippy_lints/src/ptr_offset_with_cast.rs b/src/tools/clippy/clippy_lints/src/ptr_offset_with_cast.rs
index 66d869bc4..ff8ec2ad5 100644
--- a/src/tools/clippy/clippy_lints/src/ptr_offset_with_cast.rs
+++ b/src/tools/clippy/clippy_lints/src/ptr_offset_with_cast.rs
@@ -3,7 +3,7 @@ use clippy_utils::source::snippet_opt;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
use std::fmt;
diff --git a/src/tools/clippy/clippy_lints/src/pub_use.rs b/src/tools/clippy/clippy_lints/src/pub_use.rs
index 316a72988..c0e999e76 100644
--- a/src/tools/clippy/clippy_lints/src/pub_use.rs
+++ b/src/tools/clippy/clippy_lints/src/pub_use.rs
@@ -1,7 +1,7 @@
use clippy_utils::diagnostics::span_lint_and_help;
use rustc_ast::ast::{Item, ItemKind, VisibilityKind};
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/question_mark.rs b/src/tools/clippy/clippy_lints/src/question_mark.rs
index b133635e8..fc5835408 100644
--- a/src/tools/clippy/clippy_lints/src/question_mark.rs
+++ b/src/tools/clippy/clippy_lints/src/question_mark.rs
@@ -10,7 +10,6 @@ use clippy_utils::{
is_res_lang_ctor, pat_and_expr_can_be_question_mark, path_to_local, path_to_local_id, peel_blocks,
peel_blocks_with_stmt,
};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::def::Res;
use rustc_hir::LangItem::{self, OptionNone, OptionSome, ResultErr, ResultOk};
@@ -19,7 +18,7 @@ use rustc_hir::{
};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::Ty;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::sym;
use rustc_span::symbol::Symbol;
@@ -179,17 +178,15 @@ fn expr_return_none_or_err(
_ => false,
},
ExprKind::Call(call_expr, args_expr) => {
- if_chain! {
- if smbl == sym::Result;
- if let ExprKind::Path(QPath::Resolved(_, path)) = &call_expr.kind;
- if let Some(segment) = path.segments.first();
- if let Some(err_sym) = err_sym;
- if let Some(arg) = args_expr.first();
- if let ExprKind::Path(QPath::Resolved(_, arg_path)) = &arg.kind;
- if let Some(PathSegment { ident, .. }) = arg_path.segments.first();
- then {
- return segment.ident.name == sym::Err && err_sym == ident.name;
- }
+ if smbl == sym::Result
+ && let ExprKind::Path(QPath::Resolved(_, path)) = &call_expr.kind
+ && let Some(segment) = path.segments.first()
+ && let Some(err_sym) = err_sym
+ && let Some(arg) = args_expr.first()
+ && let ExprKind::Path(QPath::Resolved(_, arg_path)) = &arg.kind
+ && let Some(PathSegment { ident, .. }) = arg_path.segments.first()
+ {
+ return segment.ident.name == sym::Err && err_sym == ident.name;
}
false
},
@@ -218,81 +215,85 @@ impl QuestionMark {
///
/// If it matches, it will suggest to use the question mark operator instead
fn check_is_none_or_err_and_early_return<'tcx>(&self, cx: &LateContext<'tcx>, expr: &Expr<'tcx>) {
- if_chain! {
- if !self.inside_try_block();
- if let Some(higher::If { cond, then, r#else }) = higher::If::hir(expr);
- if !is_else_clause(cx.tcx, expr);
- if let ExprKind::MethodCall(segment, caller, ..) = &cond.kind;
- let caller_ty = cx.typeck_results().expr_ty(caller);
- let if_block = IfBlockType::IfIs(caller, caller_ty, segment.ident.name, then, r#else);
- if is_early_return(sym::Option, cx, &if_block) || is_early_return(sym::Result, cx, &if_block);
- then {
- let mut applicability = Applicability::MachineApplicable;
- let receiver_str = snippet_with_applicability(cx, caller.span, "..", &mut applicability);
- let by_ref = !caller_ty.is_copy_modulo_regions(cx.tcx, cx.param_env) &&
- !matches!(caller.kind, ExprKind::Call(..) | ExprKind::MethodCall(..));
- let sugg = if let Some(else_inner) = r#else {
- if eq_expr_value(cx, caller, peel_blocks(else_inner)) {
- format!("Some({receiver_str}?)")
- } else {
- return;
- }
+ if !self.inside_try_block()
+ && let Some(higher::If { cond, then, r#else }) = higher::If::hir(expr)
+ && !is_else_clause(cx.tcx, expr)
+ && let ExprKind::MethodCall(segment, caller, ..) = &cond.kind
+ && let caller_ty = cx.typeck_results().expr_ty(caller)
+ && let if_block = IfBlockType::IfIs(caller, caller_ty, segment.ident.name, then, r#else)
+ && (is_early_return(sym::Option, cx, &if_block) || is_early_return(sym::Result, cx, &if_block))
+ {
+ let mut applicability = Applicability::MachineApplicable;
+ let receiver_str = snippet_with_applicability(cx, caller.span, "..", &mut applicability);
+ let by_ref = !caller_ty.is_copy_modulo_regions(cx.tcx, cx.param_env)
+ && !matches!(caller.kind, ExprKind::Call(..) | ExprKind::MethodCall(..));
+ let sugg = if let Some(else_inner) = r#else {
+ if eq_expr_value(cx, caller, peel_blocks(else_inner)) {
+ format!("Some({receiver_str}?)")
} else {
- format!("{receiver_str}{}?;", if by_ref { ".as_ref()" } else { "" })
- };
+ return;
+ }
+ } else {
+ format!("{receiver_str}{}?;", if by_ref { ".as_ref()" } else { "" })
+ };
- span_lint_and_sugg(
- cx,
- QUESTION_MARK,
- expr.span,
- "this block may be rewritten with the `?` operator",
- "replace it with",
- sugg,
- applicability,
- );
- }
+ span_lint_and_sugg(
+ cx,
+ QUESTION_MARK,
+ expr.span,
+ "this block may be rewritten with the `?` operator",
+ "replace it with",
+ sugg,
+ applicability,
+ );
}
}
fn check_if_let_some_or_err_and_early_return<'tcx>(&self, cx: &LateContext<'tcx>, expr: &Expr<'tcx>) {
- if_chain! {
- if !self.inside_try_block();
- if let Some(higher::IfLet { let_pat, let_expr, if_then, if_else }) = higher::IfLet::hir(cx, expr);
- if !is_else_clause(cx.tcx, expr);
- if let PatKind::TupleStruct(ref path1, [field], ddpos) = let_pat.kind;
- if ddpos.as_opt_usize().is_none();
- if let PatKind::Binding(BindingAnnotation(by_ref, _), bind_id, ident, None) = field.kind;
- let caller_ty = cx.typeck_results().expr_ty(let_expr);
- let if_block = IfBlockType::IfLet(
+ if !self.inside_try_block()
+ && let Some(higher::IfLet {
+ let_pat,
+ let_expr,
+ if_then,
+ if_else,
+ }) = higher::IfLet::hir(cx, expr)
+ && !is_else_clause(cx.tcx, expr)
+ && let PatKind::TupleStruct(ref path1, [field], ddpos) = let_pat.kind
+ && ddpos.as_opt_usize().is_none()
+ && let PatKind::Binding(BindingAnnotation(by_ref, _), bind_id, ident, None) = field.kind
+ && let caller_ty = cx.typeck_results().expr_ty(let_expr)
+ && let if_block = IfBlockType::IfLet(
cx.qpath_res(path1, let_pat.hir_id),
caller_ty,
ident.name,
let_expr,
if_then,
- if_else
+ if_else,
+ )
+ && ((is_early_return(sym::Option, cx, &if_block) && path_to_local_id(peel_blocks(if_then), bind_id))
+ || is_early_return(sym::Result, cx, &if_block))
+ && if_else
+ .map(|e| eq_expr_value(cx, let_expr, peel_blocks(e)))
+ .filter(|e| *e)
+ .is_none()
+ {
+ let mut applicability = Applicability::MachineApplicable;
+ let receiver_str = snippet_with_applicability(cx, let_expr.span, "..", &mut applicability);
+ let requires_semi = matches!(get_parent_node(cx.tcx, expr.hir_id), Some(Node::Stmt(_)));
+ let sugg = format!(
+ "{receiver_str}{}?{}",
+ if by_ref == ByRef::Yes { ".as_ref()" } else { "" },
+ if requires_semi { ";" } else { "" }
+ );
+ span_lint_and_sugg(
+ cx,
+ QUESTION_MARK,
+ expr.span,
+ "this block may be rewritten with the `?` operator",
+ "replace it with",
+ sugg,
+ applicability,
);
- if (is_early_return(sym::Option, cx, &if_block) && path_to_local_id(peel_blocks(if_then), bind_id))
- || is_early_return(sym::Result, cx, &if_block);
- if if_else.map(|e| eq_expr_value(cx, let_expr, peel_blocks(e))).filter(|e| *e).is_none();
- then {
- let mut applicability = Applicability::MachineApplicable;
- let receiver_str = snippet_with_applicability(cx, let_expr.span, "..", &mut applicability);
- let requires_semi = matches!(get_parent_node(cx.tcx, expr.hir_id), Some(Node::Stmt(_)));
- let sugg = format!(
- "{receiver_str}{}?{}",
- if by_ref == ByRef::Yes { ".as_ref()" } else { "" },
- if requires_semi { ";" } else { "" }
- );
- span_lint_and_sugg(
- cx,
- QUESTION_MARK,
- expr.span,
- "this block may be rewritten with the `?` operator",
- "replace it with",
- sugg,
- applicability,
- );
- }
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/question_mark_used.rs b/src/tools/clippy/clippy_lints/src/question_mark_used.rs
index d0de33e3c..ddfc53083 100644
--- a/src/tools/clippy/clippy_lints/src/question_mark_used.rs
+++ b/src/tools/clippy/clippy_lints/src/question_mark_used.rs
@@ -3,7 +3,7 @@ use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::macros::span_is_local;
use rustc_hir::{Expr, ExprKind, MatchSource};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/ranges.rs b/src/tools/clippy/clippy_lints/src/ranges.rs
index 1b3081abc..6b54258dd 100644
--- a/src/tools/clippy/clippy_lints/src/ranges.rs
+++ b/src/tools/clippy/clippy_lints/src/ranges.rs
@@ -4,15 +4,14 @@ use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg, span_lint_and_the
use clippy_utils::source::{snippet, snippet_opt, snippet_with_applicability};
use clippy_utils::sugg::Sugg;
use clippy_utils::{get_parent_expr, higher, in_constant, is_integer_const, path_to_local};
-use if_chain::if_chain;
use rustc_ast::ast::RangeLimits;
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind, HirId};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
-use rustc_span::Span;
+use rustc_session::impl_lint_pass;
use rustc_span::source_map::Spanned;
+use rustc_span::Span;
use std::cmp::Ordering;
declare_clippy_lint! {
@@ -283,16 +282,14 @@ fn check_possible_range_contains(
// If the LHS is the same operator, we have to recurse to get the "real" RHS, since they have
// the same operator precedence
- if_chain! {
- if let ExprKind::Binary(ref lhs_op, _left, new_lhs) = left.kind;
- if op == lhs_op.node;
- let new_span = Span::new(new_lhs.span.lo(), right.span.hi(), expr.span.ctxt(), expr.span.parent());
- if let Some(snip) = &snippet_opt(cx, new_span);
+ if let ExprKind::Binary(ref lhs_op, _left, new_lhs) = left.kind
+ && op == lhs_op.node
+ && let new_span = Span::new(new_lhs.span.lo(), right.span.hi(), expr.span.ctxt(), expr.span.parent())
+ && let Some(snip) = &snippet_opt(cx, new_span)
// Do not continue if we have mismatched number of parens, otherwise the suggestion is wrong
- if snip.matches('(').count() == snip.matches(')').count();
- then {
- check_possible_range_contains(cx, op, new_lhs, right, expr, new_span);
- }
+ && snip.matches('(').count() == snip.matches(')').count()
+ {
+ check_possible_range_contains(cx, op, new_lhs, right, expr, new_span);
}
}
@@ -349,71 +346,66 @@ fn check_range_bounds<'a, 'tcx>(cx: &'a LateContext<'tcx>, ex: &'a Expr<'_>) ->
// exclusive range plus one: `x..(y+1)`
fn check_exclusive_range_plus_one(cx: &LateContext<'_>, expr: &Expr<'_>) {
- if_chain! {
- if expr.span.can_be_used_for_suggestions();
- if let Some(higher::Range {
+ if expr.span.can_be_used_for_suggestions()
+ && let Some(higher::Range {
start,
end: Some(end),
- limits: RangeLimits::HalfOpen
- }) = higher::Range::hir(expr);
- if let Some(y) = y_plus_one(cx, end);
- then {
- let span = expr.span;
- span_lint_and_then(
- cx,
- RANGE_PLUS_ONE,
- span,
- "an inclusive range would be more readable",
- |diag| {
- let start = start.map_or(String::new(), |x| Sugg::hir(cx, x, "x").maybe_par().to_string());
- let end = Sugg::hir(cx, y, "y").maybe_par();
- if let Some(is_wrapped) = &snippet_opt(cx, span) {
- if is_wrapped.starts_with('(') && is_wrapped.ends_with(')') {
- diag.span_suggestion(
- span,
- "use",
- format!("({start}..={end})"),
- Applicability::MaybeIncorrect,
- );
- } else {
- diag.span_suggestion(
- span,
- "use",
- format!("{start}..={end}"),
- Applicability::MachineApplicable, // snippet
- );
- }
+ limits: RangeLimits::HalfOpen,
+ }) = higher::Range::hir(expr)
+ && let Some(y) = y_plus_one(cx, end)
+ {
+ let span = expr.span;
+ span_lint_and_then(
+ cx,
+ RANGE_PLUS_ONE,
+ span,
+ "an inclusive range would be more readable",
+ |diag| {
+ let start = start.map_or(String::new(), |x| Sugg::hir(cx, x, "x").maybe_par().to_string());
+ let end = Sugg::hir(cx, y, "y").maybe_par();
+ if let Some(is_wrapped) = &snippet_opt(cx, span) {
+ if is_wrapped.starts_with('(') && is_wrapped.ends_with(')') {
+ diag.span_suggestion(span, "use", format!("({start}..={end})"), Applicability::MaybeIncorrect);
+ } else {
+ diag.span_suggestion(
+ span,
+ "use",
+ format!("{start}..={end}"),
+ Applicability::MachineApplicable, // snippet
+ );
}
- },
- );
- }
+ }
+ },
+ );
}
}
// inclusive range minus one: `x..=(y-1)`
fn check_inclusive_range_minus_one(cx: &LateContext<'_>, expr: &Expr<'_>) {
- if_chain! {
- if expr.span.can_be_used_for_suggestions();
- if let Some(higher::Range { start, end: Some(end), limits: RangeLimits::Closed }) = higher::Range::hir(expr);
- if let Some(y) = y_minus_one(cx, end);
- then {
- span_lint_and_then(
- cx,
- RANGE_MINUS_ONE,
- expr.span,
- "an exclusive range would be more readable",
- |diag| {
- let start = start.map_or(String::new(), |x| Sugg::hir(cx, x, "x").maybe_par().to_string());
- let end = Sugg::hir(cx, y, "y").maybe_par();
- diag.span_suggestion(
- expr.span,
- "use",
- format!("{start}..{end}"),
- Applicability::MachineApplicable, // snippet
- );
- },
- );
- }
+ if expr.span.can_be_used_for_suggestions()
+ && let Some(higher::Range {
+ start,
+ end: Some(end),
+ limits: RangeLimits::Closed,
+ }) = higher::Range::hir(expr)
+ && let Some(y) = y_minus_one(cx, end)
+ {
+ span_lint_and_then(
+ cx,
+ RANGE_MINUS_ONE,
+ expr.span,
+ "an exclusive range would be more readable",
+ |diag| {
+ let start = start.map_or(String::new(), |x| Sugg::hir(cx, x, "x").maybe_par().to_string());
+ let end = Sugg::hir(cx, y, "y").maybe_par();
+ diag.span_suggestion(
+ expr.span,
+ "use",
+ format!("{start}..{end}"),
+ Applicability::MachineApplicable, // snippet
+ );
+ },
+ );
}
}
@@ -447,52 +439,54 @@ fn check_reversed_empty_range(cx: &LateContext<'_>, expr: &Expr<'_>) {
}
}
- if_chain! {
- if let Some(higher::Range { start: Some(start), end: Some(end), limits }) = higher::Range::hir(expr);
- let ty = cx.typeck_results().expr_ty(start);
- if let ty::Int(_) | ty::Uint(_) = ty.kind();
- if let Some(start_idx) = constant(cx, cx.typeck_results(), start);
- if let Some(end_idx) = constant(cx, cx.typeck_results(), end);
- if let Some(ordering) = Constant::partial_cmp(cx.tcx, ty, &start_idx, &end_idx);
- if is_empty_range(limits, ordering);
- then {
- if inside_indexing_expr(cx, expr) {
- // Avoid linting `N..N` as it has proven to be useful, see #5689 and #5628 ...
- if ordering != Ordering::Equal {
- span_lint(
- cx,
- REVERSED_EMPTY_RANGES,
- expr.span,
- "this range is reversed and using it to index a slice will panic at run-time",
- );
- }
- // ... except in for loop arguments for backwards compatibility with `reverse_range_loop`
- } else if ordering != Ordering::Equal || is_for_loop_arg(cx, expr) {
- span_lint_and_then(
+ if let Some(higher::Range {
+ start: Some(start),
+ end: Some(end),
+ limits,
+ }) = higher::Range::hir(expr)
+ && let ty = cx.typeck_results().expr_ty(start)
+ && let ty::Int(_) | ty::Uint(_) = ty.kind()
+ && let Some(start_idx) = constant(cx, cx.typeck_results(), start)
+ && let Some(end_idx) = constant(cx, cx.typeck_results(), end)
+ && let Some(ordering) = Constant::partial_cmp(cx.tcx, ty, &start_idx, &end_idx)
+ && is_empty_range(limits, ordering)
+ {
+ if inside_indexing_expr(cx, expr) {
+ // Avoid linting `N..N` as it has proven to be useful, see #5689 and #5628 ...
+ if ordering != Ordering::Equal {
+ span_lint(
cx,
REVERSED_EMPTY_RANGES,
expr.span,
- "this range is empty so it will yield no values",
- |diag| {
- if ordering != Ordering::Equal {
- let start_snippet = snippet(cx, start.span, "_");
- let end_snippet = snippet(cx, end.span, "_");
- let dots = match limits {
- RangeLimits::HalfOpen => "..",
- RangeLimits::Closed => "..="
- };
-
- diag.span_suggestion(
- expr.span,
- "consider using the following if you are attempting to iterate over this \
- range in reverse",
- format!("({end_snippet}{dots}{start_snippet}).rev()"),
- Applicability::MaybeIncorrect,
- );
- }
- },
+ "this range is reversed and using it to index a slice will panic at run-time",
);
}
+ // ... except in for loop arguments for backwards compatibility with `reverse_range_loop`
+ } else if ordering != Ordering::Equal || is_for_loop_arg(cx, expr) {
+ span_lint_and_then(
+ cx,
+ REVERSED_EMPTY_RANGES,
+ expr.span,
+ "this range is empty so it will yield no values",
+ |diag| {
+ if ordering != Ordering::Equal {
+ let start_snippet = snippet(cx, start.span, "_");
+ let end_snippet = snippet(cx, end.span, "_");
+ let dots = match limits {
+ RangeLimits::HalfOpen => "..",
+ RangeLimits::Closed => "..=",
+ };
+
+ diag.span_suggestion(
+ expr.span,
+ "consider using the following if you are attempting to iterate over this \
+ range in reverse",
+ format!("({end_snippet}{dots}{start_snippet}).rev()"),
+ Applicability::MaybeIncorrect,
+ );
+ }
+ },
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/raw_strings.rs b/src/tools/clippy/clippy_lints/src/raw_strings.rs
index 391c77dbf..ac29d2730 100644
--- a/src/tools/clippy/clippy_lints/src/raw_strings.rs
+++ b/src/tools/clippy/clippy_lints/src/raw_strings.rs
@@ -8,7 +8,7 @@ use rustc_ast::token::LitKind;
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::{BytePos, Pos, Span};
declare_clippy_lint! {
@@ -56,7 +56,7 @@ declare_clippy_lint! {
impl_lint_pass!(RawStrings => [NEEDLESS_RAW_STRINGS, NEEDLESS_RAW_STRING_HASHES]);
pub struct RawStrings {
- pub needless_raw_string_hashes_allow_one: bool,
+ pub allow_one_hash_in_raw_strings: bool,
}
impl EarlyLintPass for RawStrings {
diff --git a/src/tools/clippy/clippy_lints/src/rc_clone_in_vec_init.rs b/src/tools/clippy/clippy_lints/src/rc_clone_in_vec_init.rs
index 59ce289e7..d0b45b595 100644
--- a/src/tools/clippy/clippy_lints/src/rc_clone_in_vec_init.rs
+++ b/src/tools/clippy/clippy_lints/src/rc_clone_in_vec_init.rs
@@ -7,7 +7,7 @@ use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, QPath, TyKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{sym, Span, Symbol};
declare_clippy_lint! {
@@ -118,26 +118,24 @@ fn emit_lint(cx: &LateContext<'_>, symbol: Symbol, lint_span: Span, elem: &Expr<
/// Checks whether the given `expr` is a call to `Arc::new`, `Rc::new`, or evaluates to a `Weak`
fn ref_init(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<(Symbol, Span)> {
- if_chain! {
- if let ExprKind::Call(func, _args) = expr.kind;
- if let ExprKind::Path(ref func_path @ QPath::TypeRelative(ty, _)) = func.kind;
- if let TyKind::Path(ref ty_path) = ty.kind;
- if let Some(def_id) = cx.qpath_res(ty_path, ty.hir_id).opt_def_id();
-
- then {
- if last_path_segment(func_path).ident.name == sym::new
- && let Some(symbol) = cx
- .tcx
- .get_diagnostic_name(def_id)
- .filter(|symbol| symbol == &sym::Arc || symbol == &sym::Rc) {
- return Some((symbol, func.span));
- }
+ if let ExprKind::Call(func, _args) = expr.kind
+ && let ExprKind::Path(ref func_path @ QPath::TypeRelative(ty, _)) = func.kind
+ && let TyKind::Path(ref ty_path) = ty.kind
+ && let Some(def_id) = cx.qpath_res(ty_path, ty.hir_id).opt_def_id()
+ {
+ if last_path_segment(func_path).ident.name == sym::new
+ && let Some(symbol) = cx
+ .tcx
+ .get_diagnostic_name(def_id)
+ .filter(|symbol| symbol == &sym::Arc || symbol == &sym::Rc)
+ {
+ return Some((symbol, func.span));
+ }
- if let ty::Adt(adt, _) = *cx.typeck_results().expr_ty(expr).kind()
- && matches!(cx.tcx.get_diagnostic_name(adt.did()), Some(sym::RcWeak | sym::ArcWeak))
- {
- return Some((Symbol::intern("Weak"), func.span));
- }
+ if let ty::Adt(adt, _) = *cx.typeck_results().expr_ty(expr).kind()
+ && matches!(cx.tcx.get_diagnostic_name(adt.did()), Some(sym::RcWeak | sym::ArcWeak))
+ {
+ return Some((Symbol::intern("Weak"), func.span));
}
}
diff --git a/src/tools/clippy/clippy_lints/src/read_zero_byte_vec.rs b/src/tools/clippy/clippy_lints/src/read_zero_byte_vec.rs
index b27d4cc6e..62f3c09aa 100644
--- a/src/tools/clippy/clippy_lints/src/read_zero_byte_vec.rs
+++ b/src/tools/clippy/clippy_lints/src/read_zero_byte_vec.rs
@@ -7,7 +7,7 @@ use hir::{Expr, ExprKind, Local, PatKind, PathSegment, QPath, StmtKind};
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/redundant_async_block.rs b/src/tools/clippy/clippy_lints/src/redundant_async_block.rs
index 90297ca8b..19d9d64b3 100644
--- a/src/tools/clippy/clippy_lints/src/redundant_async_block.rs
+++ b/src/tools/clippy/clippy_lints/src/redundant_async_block.rs
@@ -9,7 +9,7 @@ use rustc_hir::{Closure, CoroutineKind, CoroutineSource, Expr, ExprKind, MatchSo
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::UpvarCapture;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/redundant_clone.rs b/src/tools/clippy/clippy_lints/src/redundant_clone.rs
index 8daf085a4..c62c351e7 100644
--- a/src/tools/clippy/clippy_lints/src/redundant_clone.rs
+++ b/src/tools/clippy/clippy_lints/src/redundant_clone.rs
@@ -3,14 +3,13 @@ use clippy_utils::mir::{visit_local_usage, LocalUsage, PossibleBorrowerMap};
use clippy_utils::source::snippet_opt;
use clippy_utils::ty::{has_drop, is_copy, is_type_diagnostic_item, is_type_lang_item, walk_ptrs_ty_depth};
use clippy_utils::{fn_has_unsatisfiable_preds, match_def_path, paths};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::intravisit::FnKind;
use rustc_hir::{def_id, Body, FnDecl, LangItem};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::mir;
use rustc_middle::ty::{self, Ty};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::def_id::LocalDefId;
use rustc_span::{sym, BytePos, Span};
@@ -145,18 +144,16 @@ impl<'tcx> LateLintPass<'tcx> for RedundantClone {
let pred_terminator = mir[ps[0]].terminator();
// receiver of the `deref()` call
- let (pred_arg, deref_clone_ret) = if_chain! {
- if let Some((pred_fn_def_id, pred_arg, pred_arg_ty, res)) =
- is_call_with_ref_arg(cx, mir, &pred_terminator.kind);
- if res == cloned;
- if cx.tcx.is_diagnostic_item(sym::deref_method, pred_fn_def_id);
- if is_type_diagnostic_item(cx, pred_arg_ty, sym::PathBuf)
- || is_type_diagnostic_item(cx, pred_arg_ty, sym::OsString);
- then {
- (pred_arg, res)
- } else {
- continue;
- }
+ let (pred_arg, deref_clone_ret) = if let Some((pred_fn_def_id, pred_arg, pred_arg_ty, res)) =
+ is_call_with_ref_arg(cx, mir, &pred_terminator.kind)
+ && res == cloned
+ && cx.tcx.is_diagnostic_item(sym::deref_method, pred_fn_def_id)
+ && (is_type_diagnostic_item(cx, pred_arg_ty, sym::PathBuf)
+ || is_type_diagnostic_item(cx, pred_arg_ty, sym::OsString))
+ {
+ (pred_arg, res)
+ } else {
+ continue;
};
let (local, cannot_move_out) =
@@ -211,45 +208,37 @@ impl<'tcx> LateLintPass<'tcx> for RedundantClone {
.assert_crate_local()
.lint_root;
- if_chain! {
- if let Some(snip) = snippet_opt(cx, span);
- if let Some(dot) = snip.rfind('.');
- then {
- let sugg_span = span.with_lo(
- span.lo() + BytePos(u32::try_from(dot).unwrap())
- );
- let mut app = Applicability::MaybeIncorrect;
-
- let call_snip = &snip[dot + 1..];
- // Machine applicable when `call_snip` looks like `foobar()`
- if let Some(call_snip) = call_snip.strip_suffix("()").map(str::trim) {
- if call_snip.as_bytes().iter().all(|b| b.is_ascii_alphabetic() || *b == b'_') {
- app = Applicability::MachineApplicable;
- }
+ if let Some(snip) = snippet_opt(cx, span)
+ && let Some(dot) = snip.rfind('.')
+ {
+ let sugg_span = span.with_lo(span.lo() + BytePos(u32::try_from(dot).unwrap()));
+ let mut app = Applicability::MaybeIncorrect;
+
+ let call_snip = &snip[dot + 1..];
+ // Machine applicable when `call_snip` looks like `foobar()`
+ if let Some(call_snip) = call_snip.strip_suffix("()").map(str::trim) {
+ if call_snip
+ .as_bytes()
+ .iter()
+ .all(|b| b.is_ascii_alphabetic() || *b == b'_')
+ {
+ app = Applicability::MachineApplicable;
}
+ }
- span_lint_hir_and_then(cx, REDUNDANT_CLONE, node, sugg_span, "redundant clone", |diag| {
- diag.span_suggestion(
- sugg_span,
- "remove this",
- "",
- app,
+ span_lint_hir_and_then(cx, REDUNDANT_CLONE, node, sugg_span, "redundant clone", |diag| {
+ diag.span_suggestion(sugg_span, "remove this", "", app);
+ if clone_usage.cloned_used {
+ diag.span_note(span, "cloned value is neither consumed nor mutated");
+ } else {
+ diag.span_note(
+ span.with_hi(span.lo() + BytePos(u32::try_from(dot).unwrap())),
+ "this value is dropped without further use",
);
- if clone_usage.cloned_used {
- diag.span_note(
- span,
- "cloned value is neither consumed nor mutated",
- );
- } else {
- diag.span_note(
- span.with_hi(span.lo() + BytePos(u32::try_from(dot).unwrap())),
- "this value is dropped without further use",
- );
- }
- });
- } else {
- span_lint_hir(cx, REDUNDANT_CLONE, node, span, "redundant clone");
- }
+ }
+ });
+ } else {
+ span_lint_hir(cx, REDUNDANT_CLONE, node, span, "redundant clone");
}
}
}
@@ -261,18 +250,21 @@ fn is_call_with_ref_arg<'tcx>(
mir: &'tcx mir::Body<'tcx>,
kind: &'tcx mir::TerminatorKind<'tcx>,
) -> Option<(def_id::DefId, mir::Local, Ty<'tcx>, mir::Local)> {
- if_chain! {
- if let mir::TerminatorKind::Call { func, args, destination, .. } = kind;
- if args.len() == 1;
- if let mir::Operand::Move(mir::Place { local, .. }) = &args[0];
- if let ty::FnDef(def_id, _) = *func.ty(mir, cx.tcx).kind();
- if let (inner_ty, 1) = walk_ptrs_ty_depth(args[0].ty(mir, cx.tcx));
- if !is_copy(cx, inner_ty);
- then {
- Some((def_id, *local, inner_ty, destination.as_local()?))
- } else {
- None
- }
+ if let mir::TerminatorKind::Call {
+ func,
+ args,
+ destination,
+ ..
+ } = kind
+ && args.len() == 1
+ && let mir::Operand::Move(mir::Place { local, .. }) = &args[0]
+ && let ty::FnDef(def_id, _) = *func.ty(mir, cx.tcx).kind()
+ && let (inner_ty, 1) = walk_ptrs_ty_depth(args[0].ty(mir, cx.tcx))
+ && !is_copy(cx, inner_ty)
+ {
+ Some((def_id, *local, inner_ty, destination.as_local()?))
+ } else {
+ None
}
}
diff --git a/src/tools/clippy/clippy_lints/src/redundant_closure_call.rs b/src/tools/clippy/clippy_lints/src/redundant_closure_call.rs
index e679fab53..8bac2e40e 100644
--- a/src/tools/clippy/clippy_lints/src/redundant_closure_call.rs
+++ b/src/tools/clippy/clippy_lints/src/redundant_closure_call.rs
@@ -2,16 +2,15 @@ use crate::rustc_lint::LintContext;
use clippy_utils::diagnostics::{span_lint, span_lint_and_then};
use clippy_utils::get_parent_expr;
use clippy_utils::sugg::Sugg;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
-use rustc_hir::intravisit as hir_visit;
use rustc_hir::intravisit::{Visitor as HirVisitor, Visitor};
+use rustc_hir::{intravisit as hir_visit, CoroutineKind, CoroutineSource, Node};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::nested_filter;
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -61,11 +60,14 @@ impl<'tcx> Visitor<'tcx> for ReturnVisitor {
}
}
-/// Checks if the body is owned by an async closure
-fn is_async_closure(body: &hir::Body<'_>) -> bool {
- if let hir::ExprKind::Closure(closure) = body.value.kind
- && let [resume_ty] = closure.fn_decl.inputs
- && let hir::TyKind::Path(hir::QPath::LangItem(hir::LangItem::ResumeTy, ..)) = resume_ty.kind
+/// Checks if the body is owned by an async closure.
+/// Returns true for `async || whatever_expression`, but false for `|| async { whatever_expression
+/// }`.
+fn is_async_closure(cx: &LateContext<'_>, body: &hir::Body<'_>) -> bool {
+ if let hir::ExprKind::Closure(innermost_closure_generated_by_desugar) = body.value.kind
+ && let desugared_inner_closure_body = cx.tcx.hir().body(innermost_closure_generated_by_desugar.body)
+ // checks whether it is `async || whatever_expression`
+ && let Some(CoroutineKind::Async(CoroutineSource::Closure)) = desugared_inner_closure_body.coroutine_kind
{
true
} else {
@@ -101,7 +103,7 @@ fn find_innermost_closure<'tcx>(
data = Some((
body.value,
closure.fn_decl,
- if is_async_closure(body) {
+ if is_async_closure(cx, body) {
ty::Asyncness::Yes
} else {
ty::Asyncness::No
@@ -141,7 +143,7 @@ impl<'tcx> LateLintPass<'tcx> for RedundantClosureCall {
}
if let hir::ExprKind::Call(recv, _) = expr.kind
- // don't lint if the receiver is a call, too.
+ // don't lint if the receiver is a call, too.
// we do this in order to prevent linting multiple times; consider:
// `(|| || 1)()()`
// ^^ we only want to lint for this call (but we walk up the calls to consider both calls).
@@ -174,12 +176,18 @@ impl<'tcx> LateLintPass<'tcx> for RedundantClosureCall {
hint = hint.asyncify();
}
- diag.span_suggestion(
- full_expr.span,
- "try doing something like",
- hint.maybe_par(),
- applicability,
- );
+ let is_in_fn_call_arg =
+ clippy_utils::get_parent_node(cx.tcx, expr.hir_id).is_some_and(|x| match x {
+ Node::Expr(expr) => matches!(expr.kind, hir::ExprKind::Call(_, _)),
+ _ => false,
+ });
+
+ // avoid clippy::double_parens
+ if !is_in_fn_call_arg {
+ hint = hint.maybe_par();
+ };
+
+ diag.span_suggestion(full_expr.span, "try doing something like", hint, applicability);
}
},
);
@@ -201,14 +209,12 @@ impl<'tcx> LateLintPass<'tcx> for RedundantClosureCall {
type NestedFilter = nested_filter::OnlyBodies;
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
- if_chain! {
- if let hir::ExprKind::Call(closure, _) = expr.kind;
- if let hir::ExprKind::Path(hir::QPath::Resolved(_, path)) = closure.kind;
- if self.path.segments[0].ident == path.segments[0].ident;
- if self.path.res == path.res;
- then {
- self.count += 1;
- }
+ if let hir::ExprKind::Call(closure, _) = expr.kind
+ && let hir::ExprKind::Path(hir::QPath::Resolved(_, path)) = closure.kind
+ && self.path.segments[0].ident == path.segments[0].ident
+ && self.path.res == path.res
+ {
+ self.count += 1;
}
hir_visit::walk_expr(self, expr);
}
@@ -223,25 +229,23 @@ impl<'tcx> LateLintPass<'tcx> for RedundantClosureCall {
}
for w in block.stmts.windows(2) {
- if_chain! {
- if let hir::StmtKind::Local(local) = w[0].kind;
- if let Option::Some(t) = local.init;
- if let hir::ExprKind::Closure { .. } = t.kind;
- if let hir::PatKind::Binding(_, _, ident, _) = local.pat.kind;
- if let hir::StmtKind::Semi(second) = w[1].kind;
- if let hir::ExprKind::Assign(_, call, _) = second.kind;
- if let hir::ExprKind::Call(closure, _) = call.kind;
- if let hir::ExprKind::Path(hir::QPath::Resolved(_, path)) = closure.kind;
- if ident == path.segments[0].ident;
- if count_closure_usage(cx, block, path) == 1;
- then {
- span_lint(
- cx,
- REDUNDANT_CLOSURE_CALL,
- second.span,
- "closure called just once immediately after it was declared",
- );
- }
+ if let hir::StmtKind::Local(local) = w[0].kind
+ && let Option::Some(t) = local.init
+ && let hir::ExprKind::Closure { .. } = t.kind
+ && let hir::PatKind::Binding(_, _, ident, _) = local.pat.kind
+ && let hir::StmtKind::Semi(second) = w[1].kind
+ && let hir::ExprKind::Assign(_, call, _) = second.kind
+ && let hir::ExprKind::Call(closure, _) = call.kind
+ && let hir::ExprKind::Path(hir::QPath::Resolved(_, path)) = closure.kind
+ && ident == path.segments[0].ident
+ && count_closure_usage(cx, block, path) == 1
+ {
+ span_lint(
+ cx,
+ REDUNDANT_CLOSURE_CALL,
+ second.span,
+ "closure called just once immediately after it was declared",
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/redundant_else.rs b/src/tools/clippy/clippy_lints/src/redundant_else.rs
index 221aa317e..001686c84 100644
--- a/src/tools/clippy/clippy_lints/src/redundant_else.rs
+++ b/src/tools/clippy/clippy_lints/src/redundant_else.rs
@@ -3,7 +3,7 @@ use rustc_ast::ast::{Block, Expr, ExprKind, Stmt, StmtKind};
use rustc_ast::visit::{walk_expr, Visitor};
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -105,7 +105,9 @@ impl<'ast> Visitor<'ast> for BreakVisitor {
fn visit_expr(&mut self, expr: &'ast Expr) {
self.is_break = match expr.kind {
ExprKind::Break(..) | ExprKind::Continue(..) | ExprKind::Ret(..) => true,
- ExprKind::Match(_, ref arms) => arms.iter().all(|arm| self.check_expr(&arm.body)),
+ ExprKind::Match(_, ref arms) => arms.iter().all(|arm|
+ arm.body.is_none() || arm.body.as_deref().is_some_and(|body| self.check_expr(body))
+ ),
ExprKind::If(_, ref then, Some(ref els)) => self.check_block(then) && self.check_expr(els),
ExprKind::If(_, _, None)
// ignore loops for simplicity
diff --git a/src/tools/clippy/clippy_lints/src/redundant_field_names.rs b/src/tools/clippy/clippy_lints/src/redundant_field_names.rs
index b8e606df7..fb000cd71 100644
--- a/src/tools/clippy/clippy_lints/src/redundant_field_names.rs
+++ b/src/tools/clippy/clippy_lints/src/redundant_field_names.rs
@@ -4,7 +4,7 @@ use rustc_ast::ast::{Expr, ExprKind};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/redundant_locals.rs b/src/tools/clippy/clippy_lints/src/redundant_locals.rs
index 6bc0d0618..2c511ee0b 100644
--- a/src/tools/clippy/clippy_lints/src/redundant_locals.rs
+++ b/src/tools/clippy/clippy_lints/src/redundant_locals.rs
@@ -6,7 +6,7 @@ use rustc_hir::def::Res;
use rustc_hir::{BindingAnnotation, ByRef, ExprKind, HirId, Local, Node, Pat, PatKind, QPath};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::Ident;
use rustc_span::DesugaringKind;
@@ -46,40 +46,38 @@ declare_lint_pass!(RedundantLocals => [REDUNDANT_LOCALS]);
impl<'tcx> LateLintPass<'tcx> for RedundantLocals {
fn check_local(&mut self, cx: &LateContext<'tcx>, local: &'tcx Local<'tcx>) {
- if_chain! {
- if !local.span.is_desugaring(DesugaringKind::Async);
+ if !local.span.is_desugaring(DesugaringKind::Async)
// the pattern is a single by-value binding
- if let PatKind::Binding(BindingAnnotation(ByRef::No, mutability), _, ident, None) = local.pat.kind;
+ && let PatKind::Binding(BindingAnnotation(ByRef::No, mutability), _, ident, None) = local.pat.kind
// the binding is not type-ascribed
- if local.ty.is_none();
+ && local.ty.is_none()
// the expression is a resolved path
- if let Some(expr) = local.init;
- if let ExprKind::Path(qpath @ QPath::Resolved(None, path)) = expr.kind;
+ && let Some(expr) = local.init
+ && let ExprKind::Path(qpath @ QPath::Resolved(None, path)) = expr.kind
// the path is a single segment equal to the local's name
- if let [last_segment] = path.segments;
- if last_segment.ident == ident;
+ && let [last_segment] = path.segments
+ && last_segment.ident == ident
// resolve the path to its defining binding pattern
- if let Res::Local(binding_id) = cx.qpath_res(&qpath, expr.hir_id);
- if let Node::Pat(binding_pat) = cx.tcx.hir().get(binding_id);
+ && let Res::Local(binding_id) = cx.qpath_res(&qpath, expr.hir_id)
+ && let Node::Pat(binding_pat) = cx.tcx.hir_node(binding_id)
// the previous binding has the same mutability
- if find_binding(binding_pat, ident).is_some_and(|bind| bind.1 == mutability);
+ && find_binding(binding_pat, ident).is_some_and(|bind| bind.1 == mutability)
// the local does not change the effect of assignments to the binding. see #11290
- if !affects_assignments(cx, mutability, binding_id, local.hir_id);
+ && !affects_assignments(cx, mutability, binding_id, local.hir_id)
// the local does not affect the code's drop behavior
- if !needs_ordered_drop(cx, cx.typeck_results().expr_ty(expr));
+ && !needs_ordered_drop(cx, cx.typeck_results().expr_ty(expr))
// the local is user-controlled
- if !in_external_macro(cx.sess(), local.span);
- if !is_from_proc_macro(cx, expr);
- then {
- span_lint_and_help(
- cx,
- REDUNDANT_LOCALS,
- local.span,
- &format!("redundant redefinition of a binding `{ident}`"),
- Some(binding_pat.span),
- &format!("`{ident}` is initially defined here"),
- );
- }
+ && !in_external_macro(cx.sess(), local.span)
+ && !is_from_proc_macro(cx, expr)
+ {
+ span_lint_and_help(
+ cx,
+ REDUNDANT_LOCALS,
+ local.span,
+ &format!("redundant redefinition of a binding `{ident}`"),
+ Some(binding_pat.span),
+ &format!("`{ident}` is initially defined here"),
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/redundant_pub_crate.rs b/src/tools/clippy/clippy_lints/src/redundant_pub_crate.rs
index 03673eb27..0e43e4a7e 100644
--- a/src/tools/clippy/clippy_lints/src/redundant_pub_crate.rs
+++ b/src/tools/clippy/clippy_lints/src/redundant_pub_crate.rs
@@ -4,7 +4,7 @@ use rustc_hir::def::{DefKind, Res};
use rustc_hir::{Item, ItemKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::def_id::CRATE_DEF_ID;
use rustc_span::hygiene::MacroKind;
@@ -45,28 +45,27 @@ impl_lint_pass!(RedundantPubCrate => [REDUNDANT_PUB_CRATE]);
impl<'tcx> LateLintPass<'tcx> for RedundantPubCrate {
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) {
- if_chain! {
- if cx.tcx.visibility(item.owner_id.def_id) == ty::Visibility::Restricted(CRATE_DEF_ID.to_def_id());
- if !cx.effective_visibilities.is_exported(item.owner_id.def_id) && self.is_exported.last() == Some(&false);
- if is_not_macro_export(item);
- then {
- let span = item.span.with_hi(item.ident.span.hi());
- let descr = cx.tcx.def_kind(item.owner_id).descr(item.owner_id.to_def_id());
- span_lint_and_then(
- cx,
- REDUNDANT_PUB_CRATE,
- span,
- &format!("pub(crate) {descr} inside private module"),
- |diag| {
- diag.span_suggestion(
- item.vis_span,
- "consider using",
- "pub".to_string(),
- Applicability::MachineApplicable,
- );
- },
- );
- }
+ if cx.tcx.visibility(item.owner_id.def_id) == ty::Visibility::Restricted(CRATE_DEF_ID.to_def_id())
+ && !cx.effective_visibilities.is_exported(item.owner_id.def_id)
+ && self.is_exported.last() == Some(&false)
+ && is_not_macro_export(item)
+ {
+ let span = item.span.with_hi(item.ident.span.hi());
+ let descr = cx.tcx.def_kind(item.owner_id).descr(item.owner_id.to_def_id());
+ span_lint_and_then(
+ cx,
+ REDUNDANT_PUB_CRATE,
+ span,
+ &format!("pub(crate) {descr} inside private module"),
+ |diag| {
+ diag.span_suggestion(
+ item.vis_span,
+ "consider using",
+ "pub".to_string(),
+ Applicability::MachineApplicable,
+ );
+ },
+ );
}
if let ItemKind::Mod { .. } = item.kind {
diff --git a/src/tools/clippy/clippy_lints/src/redundant_slicing.rs b/src/tools/clippy/clippy_lints/src/redundant_slicing.rs
index 7adbd6791..c99b657c2 100644
--- a/src/tools/clippy/clippy_lints/src/redundant_slicing.rs
+++ b/src/tools/clippy/clippy_lints/src/redundant_slicing.rs
@@ -2,14 +2,13 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::get_parent_expr;
use clippy_utils::source::snippet_with_context;
use clippy_utils::ty::{is_type_lang_item, peel_mid_ty_refs};
-use if_chain::if_chain;
use rustc_ast::util::parser::PREC_PREFIX;
use rustc_errors::Applicability;
use rustc_hir::{BorrowKind, Expr, ExprKind, LangItem, Mutability};
use rustc_lint::{LateContext, LateLintPass, Lint};
use rustc_middle::ty::adjustment::{Adjust, AutoBorrow, AutoBorrowMutability};
use rustc_middle::ty::{GenericArg, Ty};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -78,92 +77,83 @@ impl<'tcx> LateLintPass<'tcx> for RedundantSlicing {
}
let ctxt = expr.span.ctxt();
- if_chain! {
- if let ExprKind::AddrOf(BorrowKind::Ref, mutability, addressee) = expr.kind;
- if addressee.span.ctxt() == ctxt;
- if let ExprKind::Index(indexed, range, _) = addressee.kind;
- if is_type_lang_item(cx, cx.typeck_results().expr_ty_adjusted(range), LangItem::RangeFull);
- then {
- let (expr_ty, expr_ref_count) = peel_mid_ty_refs(cx.typeck_results().expr_ty(expr));
- let (indexed_ty, indexed_ref_count) = peel_mid_ty_refs(cx.typeck_results().expr_ty(indexed));
- let parent_expr = get_parent_expr(cx, expr);
- let needs_parens_for_prefix = parent_expr.map_or(false, |parent| {
- parent.precedence().order() > PREC_PREFIX
- });
- let mut app = Applicability::MachineApplicable;
+ if let ExprKind::AddrOf(BorrowKind::Ref, mutability, addressee) = expr.kind
+ && addressee.span.ctxt() == ctxt
+ && let ExprKind::Index(indexed, range, _) = addressee.kind
+ && is_type_lang_item(cx, cx.typeck_results().expr_ty_adjusted(range), LangItem::RangeFull)
+ {
+ let (expr_ty, expr_ref_count) = peel_mid_ty_refs(cx.typeck_results().expr_ty(expr));
+ let (indexed_ty, indexed_ref_count) = peel_mid_ty_refs(cx.typeck_results().expr_ty(indexed));
+ let parent_expr = get_parent_expr(cx, expr);
+ let needs_parens_for_prefix = parent_expr.map_or(false, |parent| parent.precedence().order() > PREC_PREFIX);
+ let mut app = Applicability::MachineApplicable;
- let ((lint, msg), help, sugg) = if expr_ty == indexed_ty {
- if expr_ref_count > indexed_ref_count {
- // Indexing takes self by reference and can't return a reference to that
- // reference as it's a local variable. The only way this could happen is if
- // `self` contains a reference to the `Self` type. If this occurs then the
- // lint no longer applies as it's essentially a field access, which is not
- // redundant.
- return;
- }
- let deref_count = indexed_ref_count - expr_ref_count;
+ let ((lint, msg), help, sugg) = if expr_ty == indexed_ty {
+ if expr_ref_count > indexed_ref_count {
+ // Indexing takes self by reference and can't return a reference to that
+ // reference as it's a local variable. The only way this could happen is if
+ // `self` contains a reference to the `Self` type. If this occurs then the
+ // lint no longer applies as it's essentially a field access, which is not
+ // redundant.
+ return;
+ }
+ let deref_count = indexed_ref_count - expr_ref_count;
- let (lint, reborrow_str, help_str) = if mutability == Mutability::Mut {
- // The slice was used to reborrow the mutable reference.
- (DEREF_BY_SLICING_LINT, "&mut *", "reborrow the original value instead")
- } else if matches!(
- parent_expr,
- Some(Expr {
- kind: ExprKind::AddrOf(BorrowKind::Ref, Mutability::Mut, _),
- ..
- })
- ) || cx.typeck_results().expr_adjustments(expr).first().map_or(false, |a| {
- matches!(a.kind, Adjust::Borrow(AutoBorrow::Ref(_, AutoBorrowMutability::Mut { .. })))
- }) {
- // The slice was used to make a temporary reference.
- (DEREF_BY_SLICING_LINT, "&*", "reborrow the original value instead")
- } else if deref_count != 0 {
- (DEREF_BY_SLICING_LINT, "", "dereference the original value instead")
- } else {
- (REDUNDANT_SLICING_LINT, "", "use the original value instead")
- };
+ let (lint, reborrow_str, help_str) = if mutability == Mutability::Mut {
+ // The slice was used to reborrow the mutable reference.
+ (DEREF_BY_SLICING_LINT, "&mut *", "reborrow the original value instead")
+ } else if matches!(
+ parent_expr,
+ Some(Expr {
+ kind: ExprKind::AddrOf(BorrowKind::Ref, Mutability::Mut, _),
+ ..
+ })
+ ) || cx.typeck_results().expr_adjustments(expr).first().map_or(false, |a| {
+ matches!(
+ a.kind,
+ Adjust::Borrow(AutoBorrow::Ref(_, AutoBorrowMutability::Mut { .. }))
+ )
+ }) {
+ // The slice was used to make a temporary reference.
+ (DEREF_BY_SLICING_LINT, "&*", "reborrow the original value instead")
+ } else if deref_count != 0 {
+ (DEREF_BY_SLICING_LINT, "", "dereference the original value instead")
+ } else {
+ (REDUNDANT_SLICING_LINT, "", "use the original value instead")
+ };
- let snip = snippet_with_context(cx, indexed.span, ctxt, "..", &mut app).0;
- let sugg = if (deref_count != 0 || !reborrow_str.is_empty()) && needs_parens_for_prefix {
- format!("({reborrow_str}{}{snip})", "*".repeat(deref_count))
- } else {
- format!("{reborrow_str}{}{snip}", "*".repeat(deref_count))
- };
+ let snip = snippet_with_context(cx, indexed.span, ctxt, "..", &mut app).0;
+ let sugg = if (deref_count != 0 || !reborrow_str.is_empty()) && needs_parens_for_prefix {
+ format!("({reborrow_str}{}{snip})", "*".repeat(deref_count))
+ } else {
+ format!("{reborrow_str}{}{snip}", "*".repeat(deref_count))
+ };
- (lint, help_str, sugg)
- } else if let Some(target_id) = cx.tcx.lang_items().deref_target() {
- if let Ok(deref_ty) = cx.tcx.try_normalize_erasing_regions(
- cx.param_env,
- Ty::new_projection(cx.tcx,target_id, cx.tcx.mk_args(&[GenericArg::from(indexed_ty)])),
- ) {
- if deref_ty == expr_ty {
- let snip = snippet_with_context(cx, indexed.span, ctxt, "..", &mut app).0;
- let sugg = if needs_parens_for_prefix {
- format!("(&{}{}*{snip})", mutability.prefix_str(), "*".repeat(indexed_ref_count))
- } else {
- format!("&{}{}*{snip}", mutability.prefix_str(), "*".repeat(indexed_ref_count))
- };
- (DEREF_BY_SLICING_LINT, "dereference the original value instead", sugg)
+ (lint, help_str, sugg)
+ } else if let Some(target_id) = cx.tcx.lang_items().deref_target() {
+ if let Ok(deref_ty) = cx.tcx.try_normalize_erasing_regions(
+ cx.param_env,
+ Ty::new_projection(cx.tcx, target_id, cx.tcx.mk_args(&[GenericArg::from(indexed_ty)])),
+ ) {
+ if deref_ty == expr_ty {
+ let snip = snippet_with_context(cx, indexed.span, ctxt, "..", &mut app).0;
+ let sugg = if needs_parens_for_prefix {
+ format!("(&{}{}*{snip})", mutability.prefix_str(), "*".repeat(indexed_ref_count))
} else {
- return;
- }
+ format!("&{}{}*{snip}", mutability.prefix_str(), "*".repeat(indexed_ref_count))
+ };
+ (DEREF_BY_SLICING_LINT, "dereference the original value instead", sugg)
} else {
return;
}
} else {
return;
- };
+ }
+ } else {
+ return;
+ };
- span_lint_and_sugg(
- cx,
- lint,
- expr.span,
- msg,
- help,
- sugg,
- app,
- );
- }
+ span_lint_and_sugg(cx, lint, expr.span, msg, help, sugg, app);
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/redundant_static_lifetimes.rs b/src/tools/clippy/clippy_lints/src/redundant_static_lifetimes.rs
index a70b831a8..07b604f23 100644
--- a/src/tools/clippy/clippy_lints/src/redundant_static_lifetimes.rs
+++ b/src/tools/clippy/clippy_lints/src/redundant_static_lifetimes.rs
@@ -4,7 +4,7 @@ use clippy_utils::source::snippet;
use rustc_ast::ast::{ConstItem, Item, ItemKind, StaticItem, Ty, TyKind};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::symbol::kw;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/redundant_type_annotations.rs b/src/tools/clippy/clippy_lints/src/redundant_type_annotations.rs
index f6af9cac3..07fcb69af 100644
--- a/src/tools/clippy/clippy_lints/src/redundant_type_annotations.rs
+++ b/src/tools/clippy/clippy_lints/src/redundant_type_annotations.rs
@@ -5,7 +5,7 @@ use rustc_hir as hir;
use rustc_hir::def::DefKind;
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::Ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/ref_option_ref.rs b/src/tools/clippy/clippy_lints/src/ref_option_ref.rs
index c984a8286..19ce08bde 100644
--- a/src/tools/clippy/clippy_lints/src/ref_option_ref.rs
+++ b/src/tools/clippy/clippy_lints/src/ref_option_ref.rs
@@ -1,11 +1,10 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::last_path_segment;
use clippy_utils::source::snippet;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{GenericArg, GenericArgsParentheses, Mutability, Ty, TyKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::sym;
declare_clippy_lint! {
@@ -38,34 +37,30 @@ declare_lint_pass!(RefOptionRef => [REF_OPTION_REF]);
impl<'tcx> LateLintPass<'tcx> for RefOptionRef {
fn check_ty(&mut self, cx: &LateContext<'tcx>, ty: &'tcx Ty<'tcx>) {
- if_chain! {
- if let TyKind::Ref(_, ref mut_ty) = ty.kind;
- if mut_ty.mutbl == Mutability::Not;
- if let TyKind::Path(ref qpath) = &mut_ty.ty.kind;
- let last = last_path_segment(qpath);
- if let Some(def_id) = last.res.opt_def_id();
-
- if cx.tcx.is_diagnostic_item(sym::Option, def_id);
- if let Some(params) = last_path_segment(qpath).args ;
- if params.parenthesized == GenericArgsParentheses::No;
- if let Some(inner_ty) = params.args.iter().find_map(|arg| match arg {
+ if let TyKind::Ref(_, ref mut_ty) = ty.kind
+ && mut_ty.mutbl == Mutability::Not
+ && let TyKind::Path(ref qpath) = &mut_ty.ty.kind
+ && let last = last_path_segment(qpath)
+ && let Some(def_id) = last.res.opt_def_id()
+ && cx.tcx.is_diagnostic_item(sym::Option, def_id)
+ && let Some(params) = last_path_segment(qpath).args
+ && params.parenthesized == GenericArgsParentheses::No
+ && let Some(inner_ty) = params.args.iter().find_map(|arg| match arg {
GenericArg::Type(inner_ty) => Some(inner_ty),
_ => None,
- });
- if let TyKind::Ref(_, ref inner_mut_ty) = inner_ty.kind;
- if inner_mut_ty.mutbl == Mutability::Not;
-
- then {
- span_lint_and_sugg(
- cx,
- REF_OPTION_REF,
- ty.span,
- "since `&` implements the `Copy` trait, `&Option<&T>` can be simplified to `Option<&T>`",
- "try",
- format!("Option<{}>", &snippet(cx, inner_ty.span, "..")),
- Applicability::MaybeIncorrect,
- );
- }
+ })
+ && let TyKind::Ref(_, ref inner_mut_ty) = inner_ty.kind
+ && inner_mut_ty.mutbl == Mutability::Not
+ {
+ span_lint_and_sugg(
+ cx,
+ REF_OPTION_REF,
+ ty.span,
+ "since `&` implements the `Copy` trait, `&Option<&T>` can be simplified to `Option<&T>`",
+ "try",
+ format!("Option<{}>", &snippet(cx, inner_ty.span, "..")),
+ Applicability::MaybeIncorrect,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/ref_patterns.rs b/src/tools/clippy/clippy_lints/src/ref_patterns.rs
index 8b3dabde9..a4be78b31 100644
--- a/src/tools/clippy/clippy_lints/src/ref_patterns.rs
+++ b/src/tools/clippy/clippy_lints/src/ref_patterns.rs
@@ -1,7 +1,7 @@
use clippy_utils::diagnostics::span_lint_and_help;
use rustc_ast::ast::{BindingAnnotation, Pat, PatKind};
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/reference.rs b/src/tools/clippy/clippy_lints/src/reference.rs
index 12da29f11..16086ba66 100644
--- a/src/tools/clippy/clippy_lints/src/reference.rs
+++ b/src/tools/clippy/clippy_lints/src/reference.rs
@@ -1,10 +1,9 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::{snippet_opt, snippet_with_applicability};
-use if_chain::if_chain;
use rustc_ast::ast::{Expr, ExprKind, Mutability, UnOp};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::BytePos;
declare_clippy_lint! {
@@ -47,58 +46,62 @@ fn without_parens(mut e: &Expr) -> &Expr {
impl EarlyLintPass for DerefAddrOf {
fn check_expr(&mut self, cx: &EarlyContext<'_>, e: &Expr) {
- if_chain! {
- if let ExprKind::Unary(UnOp::Deref, ref deref_target) = e.kind;
- if let ExprKind::AddrOf(_, ref mutability, ref addrof_target) = without_parens(deref_target).kind;
- if deref_target.span.eq_ctxt(e.span);
- if !addrof_target.span.from_expansion();
- then {
- let mut applicability = Applicability::MachineApplicable;
- let sugg = if e.span.from_expansion() {
- if let Some(macro_source) = snippet_opt(cx, e.span) {
- // Remove leading whitespace from the given span
- // e.g: ` $visitor` turns into `$visitor`
- let trim_leading_whitespaces = |span| {
- snippet_opt(cx, span).and_then(|snip| {
+ if let ExprKind::Unary(UnOp::Deref, ref deref_target) = e.kind
+ && let ExprKind::AddrOf(_, ref mutability, ref addrof_target) = without_parens(deref_target).kind
+ && deref_target.span.eq_ctxt(e.span)
+ && !addrof_target.span.from_expansion()
+ {
+ let mut applicability = Applicability::MachineApplicable;
+ let sugg = if e.span.from_expansion() {
+ if let Some(macro_source) = snippet_opt(cx, e.span) {
+ // Remove leading whitespace from the given span
+ // e.g: ` $visitor` turns into `$visitor`
+ let trim_leading_whitespaces = |span| {
+ snippet_opt(cx, span)
+ .and_then(|snip| {
#[expect(clippy::cast_possible_truncation)]
- snip.find(|c: char| !c.is_whitespace()).map(|pos| {
- span.lo() + BytePos(pos as u32)
- })
- }).map_or(span, |start_no_whitespace| e.span.with_lo(start_no_whitespace))
- };
-
- let mut generate_snippet = |pattern: &str| {
- #[expect(clippy::cast_possible_truncation)]
- macro_source.rfind(pattern).map(|pattern_pos| {
- let rpos = pattern_pos + pattern.len();
- let span_after_ref = e.span.with_lo(BytePos(e.span.lo().0 + rpos as u32));
- let span = trim_leading_whitespaces(span_after_ref);
- snippet_with_applicability(cx, span, "_", &mut applicability)
+ snip.find(|c: char| !c.is_whitespace())
+ .map(|pos| span.lo() + BytePos(pos as u32))
})
- };
+ .map_or(span, |start_no_whitespace| e.span.with_lo(start_no_whitespace))
+ };
+
+ let mut generate_snippet = |pattern: &str| {
+ #[expect(clippy::cast_possible_truncation)]
+ macro_source.rfind(pattern).map(|pattern_pos| {
+ let rpos = pattern_pos + pattern.len();
+ let span_after_ref = e.span.with_lo(BytePos(e.span.lo().0 + rpos as u32));
+ let span = trim_leading_whitespaces(span_after_ref);
+ snippet_with_applicability(cx, span, "_", &mut applicability)
+ })
+ };
- if *mutability == Mutability::Mut {
- generate_snippet("mut")
- } else {
- generate_snippet("&")
- }
+ if *mutability == Mutability::Mut {
+ generate_snippet("mut")
} else {
- Some(snippet_with_applicability(cx, e.span, "_", &mut applicability))
+ generate_snippet("&")
}
} else {
- Some(snippet_with_applicability(cx, addrof_target.span, "_", &mut applicability))
- };
- if let Some(sugg) = sugg {
- span_lint_and_sugg(
- cx,
- DEREF_ADDROF,
- e.span,
- "immediately dereferencing a reference",
- "try",
- sugg.to_string(),
- applicability,
- );
+ Some(snippet_with_applicability(cx, e.span, "_", &mut applicability))
}
+ } else {
+ Some(snippet_with_applicability(
+ cx,
+ addrof_target.span,
+ "_",
+ &mut applicability,
+ ))
+ };
+ if let Some(sugg) = sugg {
+ span_lint_and_sugg(
+ cx,
+ DEREF_ADDROF,
+ e.span,
+ "immediately dereferencing a reference",
+ "try",
+ sugg.to_string(),
+ applicability,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/regex.rs b/src/tools/clippy/clippy_lints/src/regex.rs
index cb78eec9e..687bad35a 100644
--- a/src/tools/clippy/clippy_lints/src/regex.rs
+++ b/src/tools/clippy/clippy_lints/src/regex.rs
@@ -8,7 +8,7 @@ use rustc_ast::ast::{LitKind, StrStyle};
use rustc_hir::def_id::DefIdMap;
use rustc_hir::{BorrowKind, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::{BytePos, Span};
declare_clippy_lint! {
@@ -191,13 +191,11 @@ fn is_trivial_regex(s: &regex_syntax::hir::Hir) -> Option<&'static str> {
}
fn check_set<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, utf8: bool) {
- if_chain! {
- if let ExprKind::AddrOf(BorrowKind::Ref, _, expr) = expr.kind;
- if let ExprKind::Array(exprs) = expr.kind;
- then {
- for expr in exprs {
- check_regex(cx, expr, utf8);
- }
+ if let ExprKind::AddrOf(BorrowKind::Ref, _, expr) = expr.kind
+ && let ExprKind::Array(exprs) = expr.kind
+ {
+ for expr in exprs {
+ check_regex(cx, expr, utf8);
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/renamed_lints.rs b/src/tools/clippy/clippy_lints/src/renamed_lints.rs
index 613f1ecc6..85979903b 100644
--- a/src/tools/clippy/clippy_lints/src/renamed_lints.rs
+++ b/src/tools/clippy/clippy_lints/src/renamed_lints.rs
@@ -4,8 +4,9 @@
pub static RENAMED_LINTS: &[(&str, &str)] = &[
("clippy::almost_complete_letter_range", "clippy::almost_complete_range"),
("clippy::blacklisted_name", "clippy::disallowed_names"),
- ("clippy::block_in_if_condition_expr", "clippy::blocks_in_if_conditions"),
- ("clippy::block_in_if_condition_stmt", "clippy::blocks_in_if_conditions"),
+ ("clippy::block_in_if_condition_expr", "clippy::blocks_in_conditions"),
+ ("clippy::block_in_if_condition_stmt", "clippy::blocks_in_conditions"),
+ ("clippy::blocks_in_if_conditions", "clippy::blocks_in_conditions"),
("clippy::box_vec", "clippy::box_collection"),
("clippy::const_static_lifetime", "clippy::redundant_static_lifetimes"),
("clippy::cyclomatic_complexity", "clippy::cognitive_complexity"),
@@ -58,4 +59,5 @@ pub static RENAMED_LINTS: &[(&str, &str)] = &[
("clippy::undropped_manually_drops", "undropped_manually_drops"),
("clippy::unknown_clippy_lints", "unknown_lints"),
("clippy::unused_label", "unused_labels"),
+ ("clippy::vtable_address_comparisons", "ambiguous_wide_pointer_comparisons"),
];
diff --git a/src/tools/clippy/clippy_lints/src/repeat_vec_with_capacity.rs b/src/tools/clippy/clippy_lints/src/repeat_vec_with_capacity.rs
new file mode 100644
index 000000000..5a4933a3f
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/repeat_vec_with_capacity.rs
@@ -0,0 +1,114 @@
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::higher::VecArgs;
+use clippy_utils::macros::root_macro_call;
+use clippy_utils::source::snippet;
+use clippy_utils::{expr_or_init, fn_def_id, match_def_path, paths};
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::declare_lint_pass;
+use rustc_span::{sym, Span};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Looks for patterns such as `vec![Vec::with_capacity(x); n]` or `iter::repeat(Vec::with_capacity(x))`.
+ ///
+ /// ### Why is this bad?
+ /// These constructs work by cloning the element, but cloning a `Vec<_>` does not
+ /// respect the old vector's capacity and effectively discards it.
+ ///
+ /// This makes `iter::repeat(Vec::with_capacity(x))` especially suspicious because the user most certainly
+ /// expected that the yielded `Vec<_>` will have the requested capacity, otherwise one can simply write
+ /// `iter::repeat(Vec::new())` instead and it will have the same effect.
+ ///
+ /// Similarly for `vec![x; n]`, the element `x` is cloned to fill the vec.
+ /// Unlike `iter::repeat` however, the vec repeat macro does not have to clone the value `n` times
+ /// but just `n - 1` times, because it can reuse the passed value for the last slot.
+ /// That means that the last `Vec<_>` gets the requested capacity but all other ones do not.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::iter;
+ ///
+ /// let _: Vec<Vec<u8>> = vec![Vec::with_capacity(42); 123];
+ /// let _: Vec<Vec<u8>> = iter::repeat(Vec::with_capacity(42)).take(123).collect();
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # use std::iter;
+ ///
+ /// let _: Vec<Vec<u8>> = iter::repeat_with(|| Vec::with_capacity(42)).take(123).collect();
+ /// // ^^^ this closure executes 123 times
+ /// // and the vecs will have the expected capacity
+ /// ```
+ #[clippy::version = "1.74.0"]
+ pub REPEAT_VEC_WITH_CAPACITY,
+ suspicious,
+ "repeating a `Vec::with_capacity` expression which does not retain capacity"
+}
+
+declare_lint_pass!(RepeatVecWithCapacity => [REPEAT_VEC_WITH_CAPACITY]);
+
+fn emit_lint(cx: &LateContext<'_>, span: Span, kind: &str, note: &'static str, sugg_msg: &'static str, sugg: String) {
+ span_lint_and_then(
+ cx,
+ REPEAT_VEC_WITH_CAPACITY,
+ span,
+ &format!("repeating `Vec::with_capacity` using `{kind}`, which does not retain capacity"),
+ |diag| {
+ diag.note(note);
+ diag.span_suggestion_verbose(span, sugg_msg, sugg, Applicability::MaybeIncorrect);
+ },
+ );
+}
+
+/// Checks `vec![Vec::with_capacity(x); n]`
+fn check_vec_macro(cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if let Some(mac_call) = root_macro_call(expr.span)
+ && cx.tcx.is_diagnostic_item(sym::vec_macro, mac_call.def_id)
+ && let Some(VecArgs::Repeat(repeat_expr, len_expr)) = VecArgs::hir(cx, expr)
+ && fn_def_id(cx, repeat_expr).is_some_and(|did| match_def_path(cx, did, &paths::VEC_WITH_CAPACITY))
+ && !len_expr.span.from_expansion()
+ && let Some(Constant::Int(2..)) = constant(cx, cx.typeck_results(), expr_or_init(cx, len_expr))
+ {
+ emit_lint(
+ cx,
+ expr.span.source_callsite(),
+ "vec![x; n]",
+ "only the last `Vec` will have the capacity",
+ "if you intended to initialize multiple `Vec`s with an initial capacity, try",
+ format!(
+ "(0..{}).map(|_| {}).collect::<Vec<_>>()",
+ snippet(cx, len_expr.span, ""),
+ snippet(cx, repeat_expr.span, "..")
+ ),
+ );
+ }
+}
+
+/// Checks `iter::repeat(Vec::with_capacity(x))`
+fn check_repeat_fn(cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if !expr.span.from_expansion()
+ && fn_def_id(cx, expr).is_some_and(|did| cx.tcx.is_diagnostic_item(sym::iter_repeat, did))
+ && let ExprKind::Call(_, [repeat_expr]) = expr.kind
+ && fn_def_id(cx, repeat_expr).is_some_and(|did| match_def_path(cx, did, &paths::VEC_WITH_CAPACITY))
+ && !repeat_expr.span.from_expansion()
+ {
+ emit_lint(
+ cx,
+ expr.span,
+ "iter::repeat",
+ "none of the yielded `Vec`s will have the requested capacity",
+ "if you intended to create an iterator that yields `Vec`s with an initial capacity, try",
+ format!("std::iter::repeat_with(|| {})", snippet(cx, repeat_expr.span, "..")),
+ );
+ }
+}
+
+impl LateLintPass<'_> for RepeatVecWithCapacity {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
+ check_vec_macro(cx, expr);
+ check_repeat_fn(cx, expr);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/reserve_after_initialization.rs b/src/tools/clippy/clippy_lints/src/reserve_after_initialization.rs
index 1975946c6..ca7a0c7c8 100644
--- a/src/tools/clippy/clippy_lints/src/reserve_after_initialization.rs
+++ b/src/tools/clippy/clippy_lints/src/reserve_after_initialization.rs
@@ -7,7 +7,7 @@ use rustc_hir::def::Res;
use rustc_hir::{BindingAnnotation, Block, Expr, ExprKind, HirId, Local, PatKind, QPath, Stmt, StmtKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::Span;
declare_clippy_lint! {
@@ -26,7 +26,7 @@ declare_clippy_lint! {
/// ```no_run
/// let mut v: Vec<usize> = Vec::with_capacity(10);
/// ```
- #[clippy::version = "1.73.0"]
+ #[clippy::version = "1.74.0"]
pub RESERVE_AFTER_INITIALIZATION,
complexity,
"`reserve` called immediately after `Vec` creation"
diff --git a/src/tools/clippy/clippy_lints/src/return_self_not_must_use.rs b/src/tools/clippy/clippy_lints/src/return_self_not_must_use.rs
index 245029a06..5962e8be9 100644
--- a/src/tools/clippy/clippy_lints/src/return_self_not_must_use.rs
+++ b/src/tools/clippy/clippy_lints/src/return_self_not_must_use.rs
@@ -6,7 +6,7 @@ use rustc_hir::intravisit::FnKind;
use rustc_hir::{Body, FnDecl, OwnerId, TraitItem, TraitItemKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{sym, Span};
declare_clippy_lint! {
@@ -69,35 +69,32 @@ declare_clippy_lint! {
declare_lint_pass!(ReturnSelfNotMustUse => [RETURN_SELF_NOT_MUST_USE]);
fn check_method(cx: &LateContext<'_>, decl: &FnDecl<'_>, fn_def: LocalDefId, span: Span, owner_id: OwnerId) {
- if_chain! {
+ if !in_external_macro(cx.sess(), span)
// If it comes from an external macro, better ignore it.
- if !in_external_macro(cx.sess(), span);
- if decl.implicit_self.has_implicit_self();
+ && decl.implicit_self.has_implicit_self()
// We only show this warning for public exported methods.
- if cx.effective_visibilities.is_exported(fn_def);
+ && cx.effective_visibilities.is_exported(fn_def)
// We don't want to emit this lint if the `#[must_use]` attribute is already there.
- if !cx.tcx.hir().attrs(owner_id.into()).iter().any(|attr| attr.has_name(sym::must_use));
- if cx.tcx.visibility(fn_def.to_def_id()).is_public();
- let ret_ty = return_ty(cx, owner_id);
- let self_arg = nth_arg(cx, owner_id, 0);
+ && !cx.tcx.hir().attrs(owner_id.into()).iter().any(|attr| attr.has_name(sym::must_use))
+ && cx.tcx.visibility(fn_def.to_def_id()).is_public()
+ && let ret_ty = return_ty(cx, owner_id)
+ && let self_arg = nth_arg(cx, owner_id, 0)
// If `Self` has the same type as the returned type, then we want to warn.
//
// For this check, we don't want to remove the reference on the returned type because if
// there is one, we shouldn't emit a warning!
- if self_arg.peel_refs() == ret_ty;
+ && self_arg.peel_refs() == ret_ty
// If `Self` is already marked as `#[must_use]`, no need for the attribute here.
- if !is_must_use_ty(cx, ret_ty);
-
- then {
- span_lint_and_help(
- cx,
- RETURN_SELF_NOT_MUST_USE,
- span,
- "missing `#[must_use]` attribute on a method returning `Self`",
- None,
- "consider adding the `#[must_use]` attribute to the method or directly to the `Self` type"
- );
- }
+ && !is_must_use_ty(cx, ret_ty)
+ {
+ span_lint_and_help(
+ cx,
+ RETURN_SELF_NOT_MUST_USE,
+ span,
+ "missing `#[must_use]` attribute on a method returning `Self`",
+ None,
+ "consider adding the `#[must_use]` attribute to the method or directly to the `Self` type",
+ );
}
}
@@ -111,18 +108,15 @@ impl<'tcx> LateLintPass<'tcx> for ReturnSelfNotMustUse {
span: Span,
fn_def: LocalDefId,
) {
- if_chain! {
+ if matches!(kind, FnKind::Method(_, _))
// We are only interested in methods, not in functions or associated functions.
- if matches!(kind, FnKind::Method(_, _));
- if let Some(impl_def) = cx.tcx.impl_of_method(fn_def.to_def_id());
+ && let Some(impl_def) = cx.tcx.impl_of_method(fn_def.to_def_id())
// We don't want this method to be te implementation of a trait because the
// `#[must_use]` should be put on the trait definition directly.
- if cx.tcx.trait_id_of_impl(impl_def).is_none();
-
- then {
- let hir_id = cx.tcx.hir().local_def_id_to_hir_id(fn_def);
- check_method(cx, decl, fn_def, span, hir_id.expect_owner());
- }
+ && cx.tcx.trait_id_of_impl(impl_def).is_none()
+ {
+ let hir_id = cx.tcx.local_def_id_to_hir_id(fn_def);
+ check_method(cx, decl, fn_def, span, hir_id.expect_owner());
}
}
diff --git a/src/tools/clippy/clippy_lints/src/returns.rs b/src/tools/clippy/clippy_lints/src/returns.rs
index f2a3dc509..2293b53b4 100644
--- a/src/tools/clippy/clippy_lints/src/returns.rs
+++ b/src/tools/clippy/clippy_lints/src/returns.rs
@@ -2,18 +2,19 @@ use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then, span_lin
use clippy_utils::source::{snippet_opt, snippet_with_context};
use clippy_utils::sugg::has_enclosing_paren;
use clippy_utils::visitors::{for_each_expr_with_closures, Descend};
-use clippy_utils::{fn_def_id, is_from_proc_macro, path_to_local_id, span_find_starting_semi};
+use clippy_utils::{fn_def_id, is_from_proc_macro, is_inside_let_else, path_to_local_id, span_find_starting_semi};
use core::ops::ControlFlow;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::intravisit::FnKind;
use rustc_hir::{
- Block, Body, Expr, ExprKind, FnDecl, ItemKind, LangItem, MatchSource, OwnerNode, PatKind, QPath, Stmt, StmtKind,
+ Block, Body, Expr, ExprKind, FnDecl, HirId, ItemKind, LangItem, MatchSource, Node, OwnerNode, PatKind, QPath, Stmt,
+ StmtKind,
};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty::adjustment::Adjust;
use rustc_middle::ty::{self, GenericArgKind, Ty};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::def_id::LocalDefId;
use rustc_span::{BytePos, Pos, Span};
use std::borrow::Cow;
@@ -159,6 +160,22 @@ impl<'tcx> ToString for RetReplacement<'tcx> {
declare_lint_pass!(Return => [LET_AND_RETURN, NEEDLESS_RETURN, NEEDLESS_RETURN_WITH_QUESTION_MARK]);
+/// Checks if a return statement is "needed" in the middle of a block, or if it can be removed. This
+/// is the case when the enclosing block expression is coerced to some other type, which only works
+/// because of the never-ness of `return` expressions
+fn stmt_needs_never_type(cx: &LateContext<'_>, stmt_hir_id: HirId) -> bool {
+ cx.tcx
+ .hir()
+ .parent_iter(stmt_hir_id)
+ .find_map(|(_, node)| if let Node::Expr(expr) = node { Some(expr) } else { None })
+ .is_some_and(|e| {
+ cx.typeck_results()
+ .expr_adjustments(e)
+ .iter()
+ .any(|adjust| adjust.target != cx.tcx.types.unit && matches!(adjust.kind, Adjust::NeverToAny))
+ })
+}
+
impl<'tcx> LateLintPass<'tcx> for Return {
fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) {
if !in_external_macro(cx.sess(), stmt.span)
@@ -170,9 +187,11 @@ impl<'tcx> LateLintPass<'tcx> for Return {
&& let ItemKind::Fn(_, _, body) = item.kind
&& let block = cx.tcx.hir().body(body).value
&& let ExprKind::Block(block, _) = block.kind
+ && !is_inside_let_else(cx.tcx, expr)
&& let [.., final_stmt] = block.stmts
&& final_stmt.hir_id != stmt.hir_id
&& !is_from_proc_macro(cx, expr)
+ && !stmt_needs_never_type(cx, stmt.hir_id)
{
span_lint_and_sugg(
cx,
@@ -188,50 +207,45 @@ impl<'tcx> LateLintPass<'tcx> for Return {
fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx Block<'_>) {
// we need both a let-binding stmt and an expr
- if_chain! {
- if let Some(retexpr) = block.expr;
- if let Some(stmt) = block.stmts.iter().last();
- if let StmtKind::Local(local) = &stmt.kind;
- if local.ty.is_none();
- if cx.tcx.hir().attrs(local.hir_id).is_empty();
- if let Some(initexpr) = &local.init;
- if let PatKind::Binding(_, local_id, _, _) = local.pat.kind;
- if path_to_local_id(retexpr, local_id);
- if !last_statement_borrows(cx, initexpr);
- if !in_external_macro(cx.sess(), initexpr.span);
- if !in_external_macro(cx.sess(), retexpr.span);
- if !local.span.from_expansion();
- then {
- span_lint_hir_and_then(
- cx,
- LET_AND_RETURN,
- retexpr.hir_id,
- retexpr.span,
- "returning the result of a `let` binding from a block",
- |err| {
- err.span_label(local.span, "unnecessary `let` binding");
+ if let Some(retexpr) = block.expr
+ && let Some(stmt) = block.stmts.iter().last()
+ && let StmtKind::Local(local) = &stmt.kind
+ && local.ty.is_none()
+ && cx.tcx.hir().attrs(local.hir_id).is_empty()
+ && let Some(initexpr) = &local.init
+ && let PatKind::Binding(_, local_id, _, _) = local.pat.kind
+ && path_to_local_id(retexpr, local_id)
+ && !last_statement_borrows(cx, initexpr)
+ && !in_external_macro(cx.sess(), initexpr.span)
+ && !in_external_macro(cx.sess(), retexpr.span)
+ && !local.span.from_expansion()
+ {
+ span_lint_hir_and_then(
+ cx,
+ LET_AND_RETURN,
+ retexpr.hir_id,
+ retexpr.span,
+ "returning the result of a `let` binding from a block",
+ |err| {
+ err.span_label(local.span, "unnecessary `let` binding");
- if let Some(mut snippet) = snippet_opt(cx, initexpr.span) {
- if !cx.typeck_results().expr_adjustments(retexpr).is_empty() {
- if !has_enclosing_paren(&snippet) {
- snippet = format!("({snippet})");
- }
- snippet.push_str(" as _");
+ if let Some(mut snippet) = snippet_opt(cx, initexpr.span) {
+ if !cx.typeck_results().expr_adjustments(retexpr).is_empty() {
+ if !has_enclosing_paren(&snippet) {
+ snippet = format!("({snippet})");
}
- err.multipart_suggestion(
- "return the expression directly",
- vec![
- (local.span, String::new()),
- (retexpr.span, snippet),
- ],
- Applicability::MachineApplicable,
- );
- } else {
- err.span_help(initexpr.span, "this expression can be directly returned");
+ snippet.push_str(" as _");
}
- },
- );
- }
+ err.multipart_suggestion(
+ "return the expression directly",
+ vec![(local.span, String::new()), (retexpr.span, snippet)],
+ Applicability::MachineApplicable,
+ );
+ } else {
+ err.span_help(initexpr.span, "this expression can be directly returned");
+ }
+ },
+ );
}
}
@@ -314,7 +328,7 @@ fn check_final_expr<'tcx>(
let replacement = if let Some(inner_expr) = inner {
// if desugar of `do yeet`, don't lint
if let ExprKind::Call(path_expr, _) = inner_expr.kind
- && let ExprKind::Path(QPath::LangItem(LangItem::TryTraitFromYeet, _, _)) = path_expr.kind
+ && let ExprKind::Path(QPath::LangItem(LangItem::TryTraitFromYeet, ..)) = path_expr.kind
{
return;
}
diff --git a/src/tools/clippy/clippy_lints/src/same_name_method.rs b/src/tools/clippy/clippy_lints/src/same_name_method.rs
index fd1f3d390..7a351dab2 100644
--- a/src/tools/clippy/clippy_lints/src/same_name_method.rs
+++ b/src/tools/clippy/clippy_lints/src/same_name_method.rs
@@ -4,7 +4,7 @@ use rustc_hir::def::{DefKind, Res};
use rustc_hir::{HirId, Impl, ItemKind, Node, Path, QPath, TraitRef, TyKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::AssocKind;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::Symbol;
use rustc_span::Span;
use std::collections::{BTreeMap, BTreeSet};
@@ -55,11 +55,11 @@ impl<'tcx> LateLintPass<'tcx> for SameNameMethod {
if matches!(cx.tcx.def_kind(id.owner_id), DefKind::Impl { .. })
&& let item = cx.tcx.hir().item(id)
&& let ItemKind::Impl(Impl {
- items,
- of_trait,
- self_ty,
- ..
- }) = &item.kind
+ items,
+ of_trait,
+ self_ty,
+ ..
+ }) = &item.kind
&& let TyKind::Path(QPath::Resolved(_, Path { res, .. })) = self_ty.kind
{
if !map.contains_key(res) {
@@ -75,11 +75,11 @@ impl<'tcx> LateLintPass<'tcx> for SameNameMethod {
match of_trait {
Some(trait_ref) => {
- let mut methods_in_trait: BTreeSet<Symbol> = if_chain! {
+ let mut methods_in_trait: BTreeSet<Symbol> =
if let Some(Node::TraitRef(TraitRef { path, .. })) =
- cx.tcx.hir().find(trait_ref.hir_ref_id);
- if let Res::Def(DefKind::Trait, did) = path.res;
- then{
+ cx.tcx.opt_hir_node(trait_ref.hir_ref_id)
+ && let Res::Def(DefKind::Trait, did) = path.res
+ {
// FIXME: if
// `rustc_middle::ty::assoc::AssocItems::items` is public,
// we can iterate its keys instead of `in_definition_order`,
@@ -87,15 +87,12 @@ impl<'tcx> LateLintPass<'tcx> for SameNameMethod {
cx.tcx
.associated_items(did)
.in_definition_order()
- .filter(|assoc_item| {
- matches!(assoc_item.kind, AssocKind::Fn)
- })
+ .filter(|assoc_item| matches!(assoc_item.kind, AssocKind::Fn))
.map(|assoc_item| assoc_item.name)
.collect()
- }else{
+ } else {
BTreeSet::new()
- }
- };
+ };
let mut check_trait_method = |method_name: Symbol, trait_method_span: Span| {
if let Some((impl_span, hir_id)) = existing_name.impl_methods.get(&method_name) {
diff --git a/src/tools/clippy/clippy_lints/src/self_named_constructors.rs b/src/tools/clippy/clippy_lints/src/self_named_constructors.rs
index b92014f68..98f3235af 100644
--- a/src/tools/clippy/clippy_lints/src/self_named_constructors.rs
+++ b/src/tools/clippy/clippy_lints/src/self_named_constructors.rs
@@ -3,7 +3,7 @@ use clippy_utils::return_ty;
use clippy_utils::ty::contains_adt_constructor;
use rustc_hir::{Impl, ImplItem, ImplItemKind, ItemKind, Node};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -70,22 +70,20 @@ impl<'tcx> LateLintPass<'tcx> for SelfNamedConstructors {
return;
}
- if_chain! {
- if let Some(self_def) = self_ty.ty_adt_def();
- if let Some(self_local_did) = self_def.did().as_local();
- let self_id = cx.tcx.hir().local_def_id_to_hir_id(self_local_did);
- if let Some(Node::Item(x)) = cx.tcx.hir().find(self_id);
- let type_name = x.ident.name.as_str().to_lowercase();
- if impl_item.ident.name.as_str() == type_name || impl_item.ident.name.as_str().replace('_', "") == type_name;
-
- then {
- span_lint(
- cx,
- SELF_NAMED_CONSTRUCTORS,
- impl_item.span,
- &format!("constructor `{}` has the same name as the type", impl_item.ident.name),
- );
- }
+ if let Some(self_def) = self_ty.ty_adt_def()
+ && let Some(self_local_did) = self_def.did().as_local()
+ && let self_id = cx.tcx.local_def_id_to_hir_id(self_local_did)
+ && let Some(Node::Item(x)) = cx.tcx.opt_hir_node(self_id)
+ && let type_name = x.ident.name.as_str().to_lowercase()
+ && (impl_item.ident.name.as_str() == type_name
+ || impl_item.ident.name.as_str().replace('_', "") == type_name)
+ {
+ span_lint(
+ cx,
+ SELF_NAMED_CONSTRUCTORS,
+ impl_item.span,
+ &format!("constructor `{}` has the same name as the type", impl_item.ident.name),
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/semicolon_block.rs b/src/tools/clippy/clippy_lints/src/semicolon_block.rs
index b0601bba4..0b3adfb7a 100644
--- a/src/tools/clippy/clippy_lints/src/semicolon_block.rs
+++ b/src/tools/clippy/clippy_lints/src/semicolon_block.rs
@@ -2,7 +2,7 @@ use clippy_utils::diagnostics::{multispan_sugg_with_applicability, span_lint_and
use rustc_errors::Applicability;
use rustc_hir::{Block, Expr, ExprKind, Stmt, StmtKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::Span;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/semicolon_if_nothing_returned.rs b/src/tools/clippy/clippy_lints/src/semicolon_if_nothing_returned.rs
index ccf8b9977..2cd3e57f8 100644
--- a/src/tools/clippy/clippy_lints/src/semicolon_if_nothing_returned.rs
+++ b/src/tools/clippy/clippy_lints/src/semicolon_if_nothing_returned.rs
@@ -1,11 +1,10 @@
use crate::rustc_lint::LintContext;
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_context;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Block, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -38,30 +37,29 @@ declare_lint_pass!(SemicolonIfNothingReturned => [SEMICOLON_IF_NOTHING_RETURNED]
impl<'tcx> LateLintPass<'tcx> for SemicolonIfNothingReturned {
fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx Block<'tcx>) {
- if_chain! {
- if !block.span.from_expansion();
- if let Some(expr) = block.expr;
- let t_expr = cx.typeck_results().expr_ty(expr);
- if t_expr.is_unit();
- let mut app = Applicability::MachineApplicable;
- if let snippet = snippet_with_context(cx, expr.span, block.span.ctxt(), "}", &mut app).0;
- if !snippet.ends_with('}') && !snippet.ends_with(';');
- if cx.sess().source_map().is_multiline(block.span);
- then {
- // filter out the desugared `for` loop
- if let ExprKind::DropTemps(..) = &expr.kind {
- return;
- }
- span_lint_and_sugg(
- cx,
- SEMICOLON_IF_NOTHING_RETURNED,
- expr.span.source_callsite(),
- "consider adding a `;` to the last statement for consistent formatting",
- "add a `;` here",
- format!("{snippet};"),
- app,
- );
+ if !block.span.from_expansion()
+ && let Some(expr) = block.expr
+ && let t_expr = cx.typeck_results().expr_ty(expr)
+ && t_expr.is_unit()
+ && let mut app = Applicability::MachineApplicable
+ && let snippet = snippet_with_context(cx, expr.span, block.span.ctxt(), "}", &mut app).0
+ && !snippet.ends_with('}')
+ && !snippet.ends_with(';')
+ && cx.sess().source_map().is_multiline(block.span)
+ {
+ // filter out the desugared `for` loop
+ if let ExprKind::DropTemps(..) = &expr.kind {
+ return;
}
+ span_lint_and_sugg(
+ cx,
+ SEMICOLON_IF_NOTHING_RETURNED,
+ expr.span.source_callsite(),
+ "consider adding a `;` to the last statement for consistent formatting",
+ "add a `;` here",
+ format!("{snippet};"),
+ app,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/serde_api.rs b/src/tools/clippy/clippy_lints/src/serde_api.rs
index fc1c2af92..90834d784 100644
--- a/src/tools/clippy/clippy_lints/src/serde_api.rs
+++ b/src/tools/clippy/clippy_lints/src/serde_api.rs
@@ -2,7 +2,7 @@ use clippy_utils::diagnostics::span_lint;
use clippy_utils::{get_trait_def_id, paths};
use rustc_hir::{Impl, Item, ItemKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/shadow.rs b/src/tools/clippy/clippy_lints/src/shadow.rs
index 41c10b34a..c74364d89 100644
--- a/src/tools/clippy/clippy_lints/src/shadow.rs
+++ b/src/tools/clippy/clippy_lints/src/shadow.rs
@@ -7,7 +7,7 @@ use rustc_hir::def_id::LocalDefId;
use rustc_hir::hir_id::ItemLocalId;
use rustc_hir::{Block, Body, BodyOwnerKind, Expr, ExprKind, HirId, Let, Node, Pat, PatKind, QPath, UnOp};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::{Span, Symbol};
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs b/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs
index 57bcee1a8..6c99ccda7 100644
--- a/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs
+++ b/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs
@@ -8,7 +8,7 @@ use rustc_hir::intravisit::{walk_expr, Visitor};
use rustc_hir::{self as hir};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::ty::{GenericArgKind, Ty, TypeAndMut};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::symbol::Ident;
use rustc_span::{sym, Span, DUMMY_SP};
use std::borrow::Cow;
diff --git a/src/tools/clippy/clippy_lints/src/single_call_fn.rs b/src/tools/clippy/clippy_lints/src/single_call_fn.rs
index 0492df68d..8e181c3cc 100644
--- a/src/tools/clippy/clippy_lints/src/single_call_fn.rs
+++ b/src/tools/clippy/clippy_lints/src/single_call_fn.rs
@@ -7,7 +7,7 @@ use rustc_hir::{Body, Expr, ExprKind, FnDecl};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::hir::nested_filter::OnlyBodies;
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::Span;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/single_char_lifetime_names.rs b/src/tools/clippy/clippy_lints/src/single_char_lifetime_names.rs
index 74ee8ce2d..42f1564db 100644
--- a/src/tools/clippy/clippy_lints/src/single_char_lifetime_names.rs
+++ b/src/tools/clippy/clippy_lints/src/single_char_lifetime_names.rs
@@ -2,7 +2,7 @@ use clippy_utils::diagnostics::span_lint_and_help;
use rustc_ast::ast::{GenericParam, GenericParamKind};
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs b/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs
index 9c21d70c8..18fbbdb40 100644
--- a/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs
+++ b/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs
@@ -5,7 +5,7 @@ use rustc_ast::visit::{walk_expr, Visitor};
use rustc_ast::{Crate, Expr, ExprKind, Item, ItemKind, MacroDef, ModKind, Ty, TyKind, UseTreeKind};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::edition::Edition;
use rustc_span::symbol::kw;
use rustc_span::{Span, Symbol};
diff --git a/src/tools/clippy/clippy_lints/src/single_range_in_vec_init.rs b/src/tools/clippy/clippy_lints/src/single_range_in_vec_init.rs
index 099743d22..95b4a11a7 100644
--- a/src/tools/clippy/clippy_lints/src/single_range_in_vec_init.rs
+++ b/src/tools/clippy/clippy_lints/src/single_range_in_vec_init.rs
@@ -8,7 +8,7 @@ use rustc_ast::{LitIntType, LitKind, UintTy};
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, LangItem, QPath};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use std::fmt::{self, Display, Formatter};
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/size_of_in_element_count.rs b/src/tools/clippy/clippy_lints/src/size_of_in_element_count.rs
index b940cac60..756e47cbd 100644
--- a/src/tools/clippy/clippy_lints/src/size_of_in_element_count.rs
+++ b/src/tools/clippy/clippy_lints/src/size_of_in_element_count.rs
@@ -2,11 +2,10 @@
//! expecting a count of T
use clippy_utils::diagnostics::span_lint_and_help;
-use if_chain::if_chain;
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::{self, Ty, TypeAndMut};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
@@ -39,16 +38,17 @@ declare_lint_pass!(SizeOfInElementCount => [SIZE_OF_IN_ELEMENT_COUNT]);
fn get_size_of_ty<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, inverted: bool) -> Option<Ty<'tcx>> {
match expr.kind {
ExprKind::Call(count_func, _func_args) => {
- if_chain! {
- if !inverted;
- if let ExprKind::Path(ref count_func_qpath) = count_func.kind;
- if let Some(def_id) = cx.qpath_res(count_func_qpath, count_func.hir_id).opt_def_id();
- if matches!(cx.tcx.get_diagnostic_name(def_id), Some(sym::mem_size_of | sym::mem_size_of_val));
- then {
- cx.typeck_results().node_args(count_func.hir_id).types().next()
- } else {
- None
- }
+ if !inverted
+ && let ExprKind::Path(ref count_func_qpath) = count_func.kind
+ && let Some(def_id) = cx.qpath_res(count_func_qpath, count_func.hir_id).opt_def_id()
+ && matches!(
+ cx.tcx.get_diagnostic_name(def_id),
+ Some(sym::mem_size_of | sym::mem_size_of_val)
+ )
+ {
+ cx.typeck_results().node_args(count_func.hir_id).types().next()
+ } else {
+ None
}
},
ExprKind::Binary(op, left, right) if BinOpKind::Mul == op.node => {
@@ -80,13 +80,12 @@ fn get_pointee_ty_and_count_expr<'tcx>(
"wrapping_offset",
];
- if_chain! {
+ if let ExprKind::Call(func, [.., count]) = expr.kind
// Find calls to ptr::{copy, copy_nonoverlapping}
// and ptr::{swap_nonoverlapping, write_bytes},
- if let ExprKind::Call(func, [.., count]) = expr.kind;
- if let ExprKind::Path(ref func_qpath) = func.kind;
- if let Some(def_id) = cx.qpath_res(func_qpath, func.hir_id).opt_def_id();
- if matches!(cx.tcx.get_diagnostic_name(def_id), Some(
+ && let ExprKind::Path(ref func_qpath) = func.kind
+ && let Some(def_id) = cx.qpath_res(func_qpath, func.hir_id).opt_def_id()
+ && matches!(cx.tcx.get_diagnostic_name(def_id), Some(
sym::ptr_copy
| sym::ptr_copy_nonoverlapping
| sym::ptr_slice_from_raw_parts
@@ -95,26 +94,23 @@ fn get_pointee_ty_and_count_expr<'tcx>(
| sym::ptr_write_bytes
| sym::slice_from_raw_parts
| sym::slice_from_raw_parts_mut
- ));
+ ))
// Get the pointee type
- if let Some(pointee_ty) = cx.typeck_results().node_args(func.hir_id).types().next();
- then {
- return Some((pointee_ty, count));
- }
+ && let Some(pointee_ty) = cx.typeck_results().node_args(func.hir_id).types().next()
+ {
+ return Some((pointee_ty, count));
};
- if_chain! {
+ if let ExprKind::MethodCall(method_path, ptr_self, [.., count], _) = expr.kind
// Find calls to copy_{from,to}{,_nonoverlapping} and write_bytes methods
- if let ExprKind::MethodCall(method_path, ptr_self, [.., count], _) = expr.kind;
- let method_ident = method_path.ident.as_str();
- if METHODS.iter().any(|m| *m == method_ident);
+ && let method_ident = method_path.ident.as_str()
+ && METHODS.iter().any(|m| *m == method_ident)
// Get the pointee type
- if let ty::RawPtr(TypeAndMut { ty: pointee_ty, .. }) =
- cx.typeck_results().expr_ty(ptr_self).kind();
- then {
- return Some((*pointee_ty, count));
- }
+ && let ty::RawPtr(TypeAndMut { ty: pointee_ty, .. }) =
+ cx.typeck_results().expr_ty(ptr_self).kind()
+ {
+ return Some((*pointee_ty, count));
};
None
}
@@ -127,25 +123,16 @@ impl<'tcx> LateLintPass<'tcx> for SizeOfInElementCount {
const LINT_MSG: &str = "found a count of bytes \
instead of a count of elements of `T`";
- if_chain! {
+ if let Some((pointee_ty, count_expr)) = get_pointee_ty_and_count_expr(cx, expr)
// Find calls to functions with an element count parameter and get
// the pointee type and count parameter expression
- if let Some((pointee_ty, count_expr)) = get_pointee_ty_and_count_expr(cx, expr);
// Find a size_of call in the count parameter expression and
// check that it's the same type
- if let Some(ty_used_for_size_of) = get_size_of_ty(cx, count_expr, false);
- if pointee_ty == ty_used_for_size_of;
- then {
- span_lint_and_help(
- cx,
- SIZE_OF_IN_ELEMENT_COUNT,
- count_expr.span,
- LINT_MSG,
- None,
- HELP_MSG
- );
- }
+ && let Some(ty_used_for_size_of) = get_size_of_ty(cx, count_expr, false)
+ && pointee_ty == ty_used_for_size_of
+ {
+ span_lint_and_help(cx, SIZE_OF_IN_ELEMENT_COUNT, count_expr.span, LINT_MSG, None, HELP_MSG);
};
}
}
diff --git a/src/tools/clippy/clippy_lints/src/size_of_ref.rs b/src/tools/clippy/clippy_lints/src/size_of_ref.rs
index 7de029b7b..14ca7a3f0 100644
--- a/src/tools/clippy/clippy_lints/src/size_of_ref.rs
+++ b/src/tools/clippy/clippy_lints/src/size_of_ref.rs
@@ -3,7 +3,7 @@ use clippy_utils::path_def_id;
use clippy_utils::ty::peel_mid_ty_refs;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/slow_vector_initialization.rs b/src/tools/clippy/clippy_lints/src/slow_vector_initialization.rs
index 2244eab96..c4a5e48e8 100644
--- a/src/tools/clippy/clippy_lints/src/slow_vector_initialization.rs
+++ b/src/tools/clippy/clippy_lints/src/slow_vector_initialization.rs
@@ -5,12 +5,11 @@ use clippy_utils::{
get_enclosing_block, is_expr_path_def_path, is_integer_literal, is_path_diagnostic_item, path_to_local,
path_to_local_id, paths, SpanlessEq,
};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::intravisit::{walk_block, walk_expr, walk_stmt, Visitor};
use rustc_hir::{BindingAnnotation, Block, Expr, ExprKind, HirId, PatKind, Stmt, StmtKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::sym;
declare_clippy_lint! {
@@ -103,41 +102,35 @@ enum InitializationType<'tcx> {
impl<'tcx> LateLintPass<'tcx> for SlowVectorInit {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
// Matches initialization on reassignments. For example: `vec = Vec::with_capacity(100)`
- if_chain! {
- if let ExprKind::Assign(left, right, _) = expr.kind;
- if let Some(local_id) = path_to_local(left);
- if let Some(size_expr) = Self::as_vec_initializer(cx, right);
-
- then {
- let vi = VecAllocation {
- local_id,
- allocation_expr: right,
- size_expr,
- };
-
- Self::search_initialization(cx, vi, expr.hir_id);
- }
+ if let ExprKind::Assign(left, right, _) = expr.kind
+ && let Some(local_id) = path_to_local(left)
+ && let Some(size_expr) = Self::as_vec_initializer(cx, right)
+ {
+ let vi = VecAllocation {
+ local_id,
+ allocation_expr: right,
+ size_expr,
+ };
+
+ Self::search_initialization(cx, vi, expr.hir_id);
}
}
fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) {
// Matches statements which initializes vectors. For example: `let mut vec = Vec::with_capacity(10)`
// or `Vec::new()`
- if_chain! {
- if let StmtKind::Local(local) = stmt.kind;
- if let PatKind::Binding(BindingAnnotation::MUT, local_id, _, None) = local.pat.kind;
- if let Some(init) = local.init;
- if let Some(size_expr) = Self::as_vec_initializer(cx, init);
-
- then {
- let vi = VecAllocation {
- local_id,
- allocation_expr: init,
- size_expr,
- };
-
- Self::search_initialization(cx, vi, stmt.hir_id);
- }
+ if let StmtKind::Local(local) = stmt.kind
+ && let PatKind::Binding(BindingAnnotation::MUT, local_id, _, None) = local.pat.kind
+ && let Some(init) = local.init
+ && let Some(size_expr) = Self::as_vec_initializer(cx, init)
+ {
+ let vi = VecAllocation {
+ local_id,
+ allocation_expr: init,
+ size_expr,
+ };
+
+ Self::search_initialization(cx, vi, stmt.hir_id);
}
}
}
@@ -242,16 +235,13 @@ struct VectorInitializationVisitor<'a, 'tcx> {
impl<'a, 'tcx> VectorInitializationVisitor<'a, 'tcx> {
/// Checks if the given expression is extending a vector with `repeat(0).take(..)`
fn search_slow_extend_filling(&mut self, expr: &'tcx Expr<'_>) {
- if_chain! {
- if self.initialization_found;
- if let ExprKind::MethodCall(path, self_arg, [extend_arg], _) = expr.kind;
- if path_to_local_id(self_arg, self.vec_alloc.local_id);
- if path.ident.name == sym!(extend);
- if self.is_repeat_take(extend_arg);
-
- then {
- self.slow_expression = Some(InitializationType::Extend(expr));
- }
+ if self.initialization_found
+ && let ExprKind::MethodCall(path, self_arg, [extend_arg], _) = expr.kind
+ && path_to_local_id(self_arg, self.vec_alloc.local_id)
+ && path.ident.name == sym!(extend)
+ && self.is_repeat_take(extend_arg)
+ {
+ self.slow_expression = Some(InitializationType::Extend(expr));
}
}
@@ -281,21 +271,19 @@ impl<'a, 'tcx> VectorInitializationVisitor<'a, 'tcx> {
/// Returns `true` if give expression is `repeat(0).take(...)`
fn is_repeat_take(&mut self, expr: &'tcx Expr<'tcx>) -> bool {
- if_chain! {
- if let ExprKind::MethodCall(take_path, recv, [len_arg, ..], _) = expr.kind;
- if take_path.ident.name == sym!(take);
+ if let ExprKind::MethodCall(take_path, recv, [len_arg, ..], _) = expr.kind
+ && take_path.ident.name == sym!(take)
// Check that take is applied to `repeat(0)`
- if self.is_repeat_zero(recv);
- then {
- if let InitializedSize::Initialized(size_expr) = self.vec_alloc.size_expr {
- // Check that len expression is equals to `with_capacity` expression
- return SpanlessEq::new(self.cx).eq_expr(len_arg, size_expr)
- || matches!(len_arg.kind, ExprKind::MethodCall(path, ..) if path.ident.as_str() == "capacity")
- }
-
- self.vec_alloc.size_expr = InitializedSize::Initialized(len_arg);
- return true;
+ && self.is_repeat_zero(recv)
+ {
+ if let InitializedSize::Initialized(size_expr) = self.vec_alloc.size_expr {
+ // Check that len expression is equals to `with_capacity` expression
+ return SpanlessEq::new(self.cx).eq_expr(len_arg, size_expr)
+ || matches!(len_arg.kind, ExprKind::MethodCall(path, ..) if path.ident.as_str() == "capacity");
}
+
+ self.vec_alloc.size_expr = InitializedSize::Initialized(len_arg);
+ return true;
}
false
@@ -303,15 +291,13 @@ impl<'a, 'tcx> VectorInitializationVisitor<'a, 'tcx> {
/// Returns `true` if given expression is `repeat(0)`
fn is_repeat_zero(&self, expr: &Expr<'_>) -> bool {
- if_chain! {
- if let ExprKind::Call(fn_expr, [repeat_arg]) = expr.kind;
- if is_path_diagnostic_item(self.cx, fn_expr, sym::iter_repeat);
- if is_integer_literal(repeat_arg, 0);
- then {
- true
- } else {
- false
- }
+ if let ExprKind::Call(fn_expr, [repeat_arg]) = expr.kind
+ && is_path_diagnostic_item(self.cx, fn_expr, sym::iter_repeat)
+ && is_integer_literal(repeat_arg, 0)
+ {
+ true
+ } else {
+ false
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs b/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs
index d07a44770..38fd54a0f 100644
--- a/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs
+++ b/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs
@@ -6,7 +6,7 @@ use rustc_hir::def_id::DefId;
use rustc_hir::{HirId, Path, PathSegment};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::symbol::kw;
use rustc_span::{sym, Span};
diff --git a/src/tools/clippy/clippy_lints/src/strings.rs b/src/tools/clippy/clippy_lints/src/strings.rs
index a44adc938..13ae1ff52 100644
--- a/src/tools/clippy/clippy_lints/src/strings.rs
+++ b/src/tools/clippy/clippy_lints/src/strings.rs
@@ -5,14 +5,13 @@ use clippy_utils::{
get_expr_use_or_unification_node, get_parent_expr, is_lint_allowed, is_path_diagnostic_item, method_calls,
peel_blocks, SpanlessEq,
};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::def_id::DefId;
use rustc_hir::{BinOpKind, BorrowKind, Expr, ExprKind, LangItem, Node, QPath};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::source_map::Spanned;
use rustc_span::sym;
@@ -251,128 +250,115 @@ const MAX_LENGTH_BYTE_STRING_LIT: usize = 32;
declare_lint_pass!(StringLitAsBytes => [STRING_LIT_AS_BYTES, STRING_FROM_UTF8_AS_BYTES]);
impl<'tcx> LateLintPass<'tcx> for StringLitAsBytes {
- #[expect(clippy::too_many_lines)]
fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
use rustc_ast::LitKind;
- if_chain! {
+ if let ExprKind::Call(fun, args) = e.kind
// Find std::str::converts::from_utf8
- if let ExprKind::Call(fun, args) = e.kind;
- if is_path_diagnostic_item(cx, fun, sym::str_from_utf8);
+ && is_path_diagnostic_item(cx, fun, sym::str_from_utf8)
// Find string::as_bytes
- if let ExprKind::AddrOf(BorrowKind::Ref, _, args) = args[0].kind;
- if let ExprKind::Index(left, right, _) = args.kind;
- let (method_names, expressions, _) = method_calls(left, 1);
- if method_names.len() == 1;
- if expressions.len() == 1;
- if expressions[0].1.is_empty();
- if method_names[0] == sym!(as_bytes);
+ && let ExprKind::AddrOf(BorrowKind::Ref, _, args) = args[0].kind
+ && let ExprKind::Index(left, right, _) = args.kind
+ && let (method_names, expressions, _) = method_calls(left, 1)
+ && method_names.len() == 1
+ && expressions.len() == 1
+ && expressions[0].1.is_empty()
+ && method_names[0] == sym!(as_bytes)
// Check for slicer
- if let ExprKind::Struct(QPath::LangItem(LangItem::Range, ..), _, _) = right.kind;
+ && let ExprKind::Struct(QPath::LangItem(LangItem::Range, ..), _, _) = right.kind
+ {
+ let mut applicability = Applicability::MachineApplicable;
+ let string_expression = &expressions[0].0;
- then {
- let mut applicability = Applicability::MachineApplicable;
- let string_expression = &expressions[0].0;
+ let snippet_app = snippet_with_applicability(cx, string_expression.span, "..", &mut applicability);
- let snippet_app = snippet_with_applicability(
- cx,
- string_expression.span, "..",
- &mut applicability,
- );
+ span_lint_and_sugg(
+ cx,
+ STRING_FROM_UTF8_AS_BYTES,
+ e.span,
+ "calling a slice of `as_bytes()` with `from_utf8` should be not necessary",
+ "try",
+ format!("Some(&{snippet_app}[{}])", snippet(cx, right.span, "..")),
+ applicability,
+ );
+ }
+ if !in_external_macro(cx.sess(), e.span)
+ && let ExprKind::MethodCall(path, receiver, ..) = &e.kind
+ && path.ident.name == sym!(as_bytes)
+ && let ExprKind::Lit(lit) = &receiver.kind
+ && let LitKind::Str(lit_content, _) = &lit.node
+ {
+ let callsite = snippet(cx, receiver.span.source_callsite(), r#""foo""#);
+ let mut applicability = Applicability::MachineApplicable;
+ if callsite.starts_with("include_str!") {
span_lint_and_sugg(
cx,
- STRING_FROM_UTF8_AS_BYTES,
+ STRING_LIT_AS_BYTES,
e.span,
- "calling a slice of `as_bytes()` with `from_utf8` should be not necessary",
- "try",
- format!("Some(&{snippet_app}[{}])", snippet(cx, right.span, "..")),
- applicability
- )
- }
- }
-
- if_chain! {
- if !in_external_macro(cx.sess(), e.span);
- if let ExprKind::MethodCall(path, receiver, ..) = &e.kind;
- if path.ident.name == sym!(as_bytes);
- if let ExprKind::Lit(lit) = &receiver.kind;
- if let LitKind::Str(lit_content, _) = &lit.node;
- then {
- let callsite = snippet(cx, receiver.span.source_callsite(), r#""foo""#);
- let mut applicability = Applicability::MachineApplicable;
- if callsite.starts_with("include_str!") {
+ "calling `as_bytes()` on `include_str!(..)`",
+ "consider using `include_bytes!(..)` instead",
+ snippet_with_applicability(cx, receiver.span, r#""foo""#, &mut applicability).replacen(
+ "include_str",
+ "include_bytes",
+ 1,
+ ),
+ applicability,
+ );
+ } else if lit_content.as_str().is_ascii()
+ && lit_content.as_str().len() <= MAX_LENGTH_BYTE_STRING_LIT
+ && !receiver.span.from_expansion()
+ {
+ if let Some((parent, id)) = get_expr_use_or_unification_node(cx.tcx, e)
+ && let Node::Expr(parent) = parent
+ && let ExprKind::Match(scrutinee, ..) = parent.kind
+ && scrutinee.hir_id == id
+ {
+ // Don't lint. Byte strings produce `&[u8; N]` whereas `as_bytes()` produces
+ // `&[u8]`. This change would prevent matching with different sized slices.
+ } else if !callsite.starts_with("env!") {
span_lint_and_sugg(
cx,
STRING_LIT_AS_BYTES,
e.span,
- "calling `as_bytes()` on `include_str!(..)`",
- "consider using `include_bytes!(..)` instead",
- snippet_with_applicability(cx, receiver.span, r#""foo""#, &mut applicability).replacen(
- "include_str",
- "include_bytes",
- 1,
+ "calling `as_bytes()` on a string literal",
+ "consider using a byte string literal instead",
+ format!(
+ "b{}",
+ snippet_with_applicability(cx, receiver.span, r#""foo""#, &mut applicability)
),
applicability,
);
- } else if lit_content.as_str().is_ascii()
- && lit_content.as_str().len() <= MAX_LENGTH_BYTE_STRING_LIT
- && !receiver.span.from_expansion()
- {
- if let Some((parent, id)) = get_expr_use_or_unification_node(cx.tcx, e)
- && let Node::Expr(parent) = parent
- && let ExprKind::Match(scrutinee, ..) = parent.kind
- && scrutinee.hir_id == id
- {
- // Don't lint. Byte strings produce `&[u8; N]` whereas `as_bytes()` produces
- // `&[u8]`. This change would prevent matching with different sized slices.
- } else if !callsite.starts_with("env!") {
- span_lint_and_sugg(
- cx,
- STRING_LIT_AS_BYTES,
- e.span,
- "calling `as_bytes()` on a string literal",
- "consider using a byte string literal instead",
- format!(
- "b{}",
- snippet_with_applicability(cx, receiver.span, r#""foo""#, &mut applicability)
- ),
- applicability,
- );
- }
}
}
}
- if_chain! {
- if let ExprKind::MethodCall(path, recv, [], _) = &e.kind;
- if path.ident.name == sym!(into_bytes);
- if let ExprKind::MethodCall(path, recv, [], _) = &recv.kind;
- if matches!(path.ident.name.as_str(), "to_owned" | "to_string");
- if let ExprKind::Lit(lit) = &recv.kind;
- if let LitKind::Str(lit_content, _) = &lit.node;
-
- if lit_content.as_str().is_ascii();
- if lit_content.as_str().len() <= MAX_LENGTH_BYTE_STRING_LIT;
- if !recv.span.from_expansion();
- then {
- let mut applicability = Applicability::MachineApplicable;
+ if let ExprKind::MethodCall(path, recv, [], _) = &e.kind
+ && path.ident.name == sym!(into_bytes)
+ && let ExprKind::MethodCall(path, recv, [], _) = &recv.kind
+ && matches!(path.ident.name.as_str(), "to_owned" | "to_string")
+ && let ExprKind::Lit(lit) = &recv.kind
+ && let LitKind::Str(lit_content, _) = &lit.node
+ && lit_content.as_str().is_ascii()
+ && lit_content.as_str().len() <= MAX_LENGTH_BYTE_STRING_LIT
+ && !recv.span.from_expansion()
+ {
+ let mut applicability = Applicability::MachineApplicable;
- span_lint_and_sugg(
- cx,
- STRING_LIT_AS_BYTES,
- e.span,
- "calling `into_bytes()` on a string literal",
- "consider using a byte string literal instead",
- format!(
- "b{}.to_vec()",
- snippet_with_applicability(cx, recv.span, r#""..""#, &mut applicability)
- ),
- applicability,
- );
- }
+ span_lint_and_sugg(
+ cx,
+ STRING_LIT_AS_BYTES,
+ e.span,
+ "calling `into_bytes()` on a string literal",
+ "consider using a byte string literal instead",
+ format!(
+ "b{}.to_vec()",
+ snippet_with_applicability(cx, recv.span, r#""..""#, &mut applicability)
+ ),
+ applicability,
+ );
}
}
}
@@ -406,22 +392,20 @@ declare_lint_pass!(StrToString => [STR_TO_STRING]);
impl<'tcx> LateLintPass<'tcx> for StrToString {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &Expr<'_>) {
- if_chain! {
- if let ExprKind::MethodCall(path, self_arg, ..) = &expr.kind;
- if path.ident.name == sym::to_string;
- let ty = cx.typeck_results().expr_ty(self_arg);
- if let ty::Ref(_, ty, ..) = ty.kind();
- if ty.is_str();
- then {
- span_lint_and_help(
- cx,
- STR_TO_STRING,
- expr.span,
- "`to_string()` called on a `&str`",
- None,
- "consider using `.to_owned()`",
- );
- }
+ if let ExprKind::MethodCall(path, self_arg, ..) = &expr.kind
+ && path.ident.name == sym::to_string
+ && let ty = cx.typeck_results().expr_ty(self_arg)
+ && let ty::Ref(_, ty, ..) = ty.kind()
+ && ty.is_str()
+ {
+ span_lint_and_help(
+ cx,
+ STR_TO_STRING,
+ expr.span,
+ "`to_string()` called on a `&str`",
+ None,
+ "consider using `.to_owned()`",
+ );
}
}
}
@@ -456,21 +440,19 @@ declare_lint_pass!(StringToString => [STRING_TO_STRING]);
impl<'tcx> LateLintPass<'tcx> for StringToString {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &Expr<'_>) {
- if_chain! {
- if let ExprKind::MethodCall(path, self_arg, ..) = &expr.kind;
- if path.ident.name == sym::to_string;
- let ty = cx.typeck_results().expr_ty(self_arg);
- if is_type_lang_item(cx, ty, LangItem::String);
- then {
- span_lint_and_help(
- cx,
- STRING_TO_STRING,
- expr.span,
- "`to_string()` called on a `String`",
- None,
- "consider using `.clone()`",
- );
- }
+ if let ExprKind::MethodCall(path, self_arg, ..) = &expr.kind
+ && path.ident.name == sym::to_string
+ && let ty = cx.typeck_results().expr_ty(self_arg)
+ && is_type_lang_item(cx, ty, LangItem::String)
+ {
+ span_lint_and_help(
+ cx,
+ STRING_TO_STRING,
+ expr.span,
+ "`to_string()` called on a `String`",
+ None,
+ "consider using `.clone()`",
+ );
}
}
}
@@ -500,26 +482,24 @@ declare_lint_pass!(TrimSplitWhitespace => [TRIM_SPLIT_WHITESPACE]);
impl<'tcx> LateLintPass<'tcx> for TrimSplitWhitespace {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &Expr<'_>) {
let tyckres = cx.typeck_results();
- if_chain! {
- if let ExprKind::MethodCall(path, split_recv, [], split_ws_span) = expr.kind;
- if path.ident.name == sym!(split_whitespace);
- if let Some(split_ws_def_id) = tyckres.type_dependent_def_id(expr.hir_id);
- if cx.tcx.is_diagnostic_item(sym::str_split_whitespace, split_ws_def_id);
- if let ExprKind::MethodCall(path, _trim_recv, [], trim_span) = split_recv.kind;
- if let trim_fn_name @ ("trim" | "trim_start" | "trim_end") = path.ident.name.as_str();
- if let Some(trim_def_id) = tyckres.type_dependent_def_id(split_recv.hir_id);
- if is_one_of_trim_diagnostic_items(cx, trim_def_id);
- then {
- span_lint_and_sugg(
- cx,
- TRIM_SPLIT_WHITESPACE,
- trim_span.with_hi(split_ws_span.lo()),
- &format!("found call to `str::{trim_fn_name}` before `str::split_whitespace`"),
- &format!("remove `{trim_fn_name}()`"),
- String::new(),
- Applicability::MachineApplicable,
- );
- }
+ if let ExprKind::MethodCall(path, split_recv, [], split_ws_span) = expr.kind
+ && path.ident.name == sym!(split_whitespace)
+ && let Some(split_ws_def_id) = tyckres.type_dependent_def_id(expr.hir_id)
+ && cx.tcx.is_diagnostic_item(sym::str_split_whitespace, split_ws_def_id)
+ && let ExprKind::MethodCall(path, _trim_recv, [], trim_span) = split_recv.kind
+ && let trim_fn_name @ ("trim" | "trim_start" | "trim_end") = path.ident.name.as_str()
+ && let Some(trim_def_id) = tyckres.type_dependent_def_id(split_recv.hir_id)
+ && is_one_of_trim_diagnostic_items(cx, trim_def_id)
+ {
+ span_lint_and_sugg(
+ cx,
+ TRIM_SPLIT_WHITESPACE,
+ trim_span.with_hi(split_ws_span.lo()),
+ &format!("found call to `str::{trim_fn_name}` before `str::split_whitespace`"),
+ &format!("remove `{trim_fn_name}()`"),
+ String::new(),
+ Applicability::MachineApplicable,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/strlen_on_c_strings.rs b/src/tools/clippy/clippy_lints/src/strlen_on_c_strings.rs
index b3db5e9a4..8cf4715ee 100644
--- a/src/tools/clippy/clippy_lints/src/strlen_on_c_strings.rs
+++ b/src/tools/clippy/clippy_lints/src/strlen_on_c_strings.rs
@@ -3,11 +3,10 @@ use clippy_utils::source::snippet_with_context;
use clippy_utils::ty::{is_type_diagnostic_item, is_type_lang_item};
use clippy_utils::visitors::is_expr_unsafe;
use clippy_utils::{get_parent_node, match_libc_symbol};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Block, BlockCheckMode, Expr, ExprKind, LangItem, Node, UnsafeSource};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::sym;
declare_clippy_lint! {
@@ -41,48 +40,45 @@ declare_lint_pass!(StrlenOnCStrings => [STRLEN_ON_C_STRINGS]);
impl<'tcx> LateLintPass<'tcx> for StrlenOnCStrings {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
- if_chain! {
- if !expr.span.from_expansion();
- if let ExprKind::Call(func, [recv]) = expr.kind;
- if let ExprKind::Path(path) = &func.kind;
- if let Some(did) = cx.qpath_res(path, func.hir_id).opt_def_id();
- if match_libc_symbol(cx, did, "strlen");
- if let ExprKind::MethodCall(path, self_arg, [], _) = recv.kind;
- if !recv.span.from_expansion();
- if path.ident.name == sym::as_ptr;
- then {
- let ctxt = expr.span.ctxt();
- let span = match get_parent_node(cx.tcx, expr.hir_id) {
- Some(Node::Block(&Block {
- rules: BlockCheckMode::UnsafeBlock(UnsafeSource::UserProvided), span, ..
- }))
- if span.ctxt() == ctxt && !is_expr_unsafe(cx, self_arg) => {
- span
- }
- _ => expr.span,
- };
+ if !expr.span.from_expansion()
+ && let ExprKind::Call(func, [recv]) = expr.kind
+ && let ExprKind::Path(path) = &func.kind
+ && let Some(did) = cx.qpath_res(path, func.hir_id).opt_def_id()
+ && match_libc_symbol(cx, did, "strlen")
+ && let ExprKind::MethodCall(path, self_arg, [], _) = recv.kind
+ && !recv.span.from_expansion()
+ && path.ident.name == sym::as_ptr
+ {
+ let ctxt = expr.span.ctxt();
+ let span = match get_parent_node(cx.tcx, expr.hir_id) {
+ Some(Node::Block(&Block {
+ rules: BlockCheckMode::UnsafeBlock(UnsafeSource::UserProvided),
+ span,
+ ..
+ })) if span.ctxt() == ctxt && !is_expr_unsafe(cx, self_arg) => span,
+ _ => expr.span,
+ };
- let ty = cx.typeck_results().expr_ty(self_arg).peel_refs();
- let mut app = Applicability::MachineApplicable;
- let val_name = snippet_with_context(cx, self_arg.span, ctxt, "..", &mut app).0;
- let method_name = if is_type_diagnostic_item(cx, ty, sym::cstring_type) {
- "as_bytes"
- } else if is_type_lang_item(cx, ty, LangItem::CStr) {
- "to_bytes"
- } else {
- return;
- };
+ let ty = cx.typeck_results().expr_ty(self_arg).peel_refs();
+ let mut app = Applicability::MachineApplicable;
+ let val_name = snippet_with_context(cx, self_arg.span, ctxt, "..", &mut app).0;
+ let method_name = if is_type_diagnostic_item(cx, ty, sym::cstring_type) {
+ "as_bytes"
+ } else if is_type_lang_item(cx, ty, LangItem::CStr) {
+ "to_bytes"
+ } else {
+ return;
+ };
- span_lint_and_sugg(
- cx,
- STRLEN_ON_C_STRINGS,
- span,
- "using `libc::strlen` on a `CString` or `CStr` value",
- "try",
- format!("{val_name}.{method_name}().len()"),
- app,
- );
- }
+ span_lint_and_sugg(
+ cx,
+ STRLEN_ON_C_STRINGS,
+ span,
+ "using `libc::strlen` on a `CString` or `CStr` value",
+ "try",
+ format!("{val_name}.{method_name}().len()"),
+ app,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/suspicious_doc_comments.rs b/src/tools/clippy/clippy_lints/src/suspicious_doc_comments.rs
deleted file mode 100644
index 0abc199da..000000000
--- a/src/tools/clippy/clippy_lints/src/suspicious_doc_comments.rs
+++ /dev/null
@@ -1,95 +0,0 @@
-use clippy_utils::diagnostics::{multispan_sugg_with_applicability, span_lint_and_then};
-use if_chain::if_chain;
-use rustc_ast::token::CommentKind;
-use rustc_ast::{AttrKind, AttrStyle, Attribute, Item};
-use rustc_errors::Applicability;
-use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
-use rustc_span::Span;
-
-declare_clippy_lint! {
- /// ### What it does
- /// Detects the use of outer doc comments (`///`, `/**`) followed by a bang (`!`): `///!`
- ///
- /// ### Why is this bad?
- /// Triple-slash comments (known as "outer doc comments") apply to items that follow it.
- /// An outer doc comment followed by a bang (i.e. `///!`) has no specific meaning.
- ///
- /// The user most likely meant to write an inner doc comment (`//!`, `/*!`), which
- /// applies to the parent item (i.e. the item that the comment is contained in,
- /// usually a module or crate).
- ///
- /// ### Known problems
- /// Inner doc comments can only appear before items, so there are certain cases where the suggestion
- /// made by this lint is not valid code. For example:
- /// ```rs
- /// fn foo() {}
- /// ///!
- /// fn bar() {}
- /// ```
- /// This lint detects the doc comment and suggests changing it to `//!`, but an inner doc comment
- /// is not valid at that position.
- ///
- /// ### Example
- /// In this example, the doc comment is attached to the *function*, rather than the *module*.
- /// ```no_run
- /// pub mod util {
- /// ///! This module contains utility functions.
- ///
- /// pub fn dummy() {}
- /// }
- /// ```
- ///
- /// Use instead:
- /// ```no_run
- /// pub mod util {
- /// //! This module contains utility functions.
- ///
- /// pub fn dummy() {}
- /// }
- /// ```
- #[clippy::version = "1.70.0"]
- pub SUSPICIOUS_DOC_COMMENTS,
- suspicious,
- "suspicious usage of (outer) doc comments"
-}
-declare_lint_pass!(SuspiciousDocComments => [SUSPICIOUS_DOC_COMMENTS]);
-
-const WARNING: &str = "this is an outer doc comment and does not apply to the parent module or crate";
-const HELP: &str = "use an inner doc comment to document the parent module or crate";
-
-impl EarlyLintPass for SuspiciousDocComments {
- fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) {
- let replacements = collect_doc_comment_replacements(&item.attrs);
-
- if let Some(((lo_span, _), (hi_span, _))) = replacements.first().zip(replacements.last()) {
- let span = lo_span.to(*hi_span);
-
- span_lint_and_then(cx, SUSPICIOUS_DOC_COMMENTS, span, WARNING, |diag| {
- multispan_sugg_with_applicability(diag, HELP, Applicability::MaybeIncorrect, replacements);
- });
- }
- }
-}
-
-fn collect_doc_comment_replacements(attrs: &[Attribute]) -> Vec<(Span, String)> {
- attrs
- .iter()
- .filter_map(|attr| {
- if_chain! {
- if let AttrKind::DocComment(com_kind, sym) = attr.kind;
- if let AttrStyle::Outer = attr.style;
- if let Some(com) = sym.as_str().strip_prefix('!');
- then {
- let sugg = match com_kind {
- CommentKind::Line => format!("//!{com}"),
- CommentKind::Block => format!("/*!{com}*/")
- };
- Some((attr.span, sugg))
- } else {
- None
- }
- }
- })
- .collect()
-}
diff --git a/src/tools/clippy/clippy_lints/src/suspicious_operation_groupings.rs b/src/tools/clippy/clippy_lints/src/suspicious_operation_groupings.rs
index bb8cde5b9..8b9d9bade 100644
--- a/src/tools/clippy/clippy_lints/src/suspicious_operation_groupings.rs
+++ b/src/tools/clippy/clippy_lints/src/suspicious_operation_groupings.rs
@@ -2,12 +2,11 @@ use clippy_utils::ast_utils::{eq_id, is_useless_with_eq_exprs, IdentIter};
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
use core::ops::{Add, AddAssign};
-use if_chain::if_chain;
use rustc_ast::ast::{BinOpKind, Expr, ExprKind, StmtKind};
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::Ident;
use rustc_span::Span;
@@ -155,34 +154,22 @@ fn check_binops(cx: &EarlyContext<'_>, binops: &[&BinaryOp<'_>]) {
match (no_difference_info, double_difference_info) {
(Some(i), None) => attempt_to_emit_no_difference_lint(cx, binops, i, expected_loc),
(None, Some((double_difference_index, ident_loc1, ident_loc2))) => {
- if_chain! {
- if one_ident_difference_count == binop_count - 1;
- if let Some(binop) = binops.get(double_difference_index);
- then {
- let changed_loc = if ident_loc1 == expected_loc {
- ident_loc2
- } else if ident_loc2 == expected_loc {
- ident_loc1
- } else {
- // This expression doesn't match the form we're
- // looking for.
- return;
- };
-
- if let Some(sugg) = ident_swap_sugg(
- cx,
- &paired_identifiers,
- binop,
- changed_loc,
- &mut applicability,
- ) {
- emit_suggestion(
- cx,
- binop.span,
- sugg,
- applicability,
- );
- }
+ if one_ident_difference_count == binop_count - 1
+ && let Some(binop) = binops.get(double_difference_index)
+ {
+ let changed_loc = if ident_loc1 == expected_loc {
+ ident_loc2
+ } else if ident_loc2 == expected_loc {
+ ident_loc1
+ } else {
+ // This expression doesn't match the form we're
+ // looking for.
+ return;
+ };
+
+ if let Some(sugg) = ident_swap_sugg(cx, &paired_identifiers, binop, changed_loc, &mut applicability)
+ {
+ emit_suggestion(cx, binop.span, sugg, applicability);
}
}
},
@@ -212,48 +199,32 @@ fn attempt_to_emit_no_difference_lint(
let old_right_ident = get_ident(binop.right, expected_loc);
for b in skip_index(binops.iter(), i) {
- if_chain! {
- if let (Some(old_ident), Some(new_ident)) =
- (old_left_ident, get_ident(b.left, expected_loc));
- if old_ident != new_ident;
- if let Some(sugg) = suggestion_with_swapped_ident(
+ if let (Some(old_ident), Some(new_ident)) = (old_left_ident, get_ident(b.left, expected_loc))
+ && old_ident != new_ident
+ && let Some(sugg) =
+ suggestion_with_swapped_ident(cx, binop.left, expected_loc, new_ident, &mut applicability)
+ {
+ emit_suggestion(
cx,
- binop.left,
- expected_loc,
- new_ident,
- &mut applicability,
+ binop.span,
+ replace_left_sugg(cx, binop, &sugg, &mut applicability),
+ applicability,
);
- then {
- emit_suggestion(
- cx,
- binop.span,
- replace_left_sugg(cx, binop, &sugg, &mut applicability),
- applicability,
- );
- return;
- }
+ return;
}
- if_chain! {
- if let (Some(old_ident), Some(new_ident)) =
- (old_right_ident, get_ident(b.right, expected_loc));
- if old_ident != new_ident;
- if let Some(sugg) = suggestion_with_swapped_ident(
+ if let (Some(old_ident), Some(new_ident)) = (old_right_ident, get_ident(b.right, expected_loc))
+ && old_ident != new_ident
+ && let Some(sugg) =
+ suggestion_with_swapped_ident(cx, binop.right, expected_loc, new_ident, &mut applicability)
+ {
+ emit_suggestion(
cx,
- binop.right,
- expected_loc,
- new_ident,
- &mut applicability,
+ binop.span,
+ replace_right_sugg(cx, binop, &sugg, &mut applicability),
+ applicability,
);
- then {
- emit_suggestion(
- cx,
- binop.span,
- replace_right_sugg(cx, binop, &sugg, &mut applicability),
- applicability,
- );
- return;
- }
+ return;
}
}
}
@@ -327,7 +298,7 @@ fn replace_left_sugg(
) -> String {
format!(
"{left_suggestion} {} {}",
- binop.op.to_string(),
+ binop.op.as_str(),
snippet_with_applicability(cx, binop.right.span, "..", applicability),
)
}
@@ -341,7 +312,7 @@ fn replace_right_sugg(
format!(
"{} {} {right_suggestion}",
snippet_with_applicability(cx, binop.left.span, "..", applicability),
- binop.op.to_string(),
+ binop.op.as_str(),
)
}
diff --git a/src/tools/clippy/clippy_lints/src/suspicious_trait_impl.rs b/src/tools/clippy/clippy_lints/src/suspicious_trait_impl.rs
index 6271ea027..8eab3f587 100644
--- a/src/tools/clippy/clippy_lints/src/suspicious_trait_impl.rs
+++ b/src/tools/clippy/clippy_lints/src/suspicious_trait_impl.rs
@@ -2,10 +2,9 @@ use clippy_utils::diagnostics::span_lint;
use clippy_utils::visitors::for_each_expr;
use clippy_utils::{binop_traits, trait_ref_of_method, BINOP_TRAITS, OP_ASSIGN_TRAITS};
use core::ops::ControlFlow;
-use if_chain::if_chain;
use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -57,37 +56,39 @@ declare_lint_pass!(SuspiciousImpl => [SUSPICIOUS_ARITHMETIC_IMPL, SUSPICIOUS_OP_
impl<'tcx> LateLintPass<'tcx> for SuspiciousImpl {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
- if_chain! {
- if let hir::ExprKind::Binary(binop, _, _) | hir::ExprKind::AssignOp(binop, ..) = expr.kind;
- if let Some((binop_trait_lang, op_assign_trait_lang)) = binop_traits(binop.node);
- if let Some(binop_trait_id) = cx.tcx.lang_items().get(binop_trait_lang);
- if let Some(op_assign_trait_id) = cx.tcx.lang_items().get(op_assign_trait_lang);
+ if let hir::ExprKind::Binary(binop, _, _) | hir::ExprKind::AssignOp(binop, ..) = expr.kind
+ && let Some((binop_trait_lang, op_assign_trait_lang)) = binop_traits(binop.node)
+ && let Some(binop_trait_id) = cx.tcx.lang_items().get(binop_trait_lang)
+ && let Some(op_assign_trait_id) = cx.tcx.lang_items().get(op_assign_trait_lang)
// Check for more than one binary operation in the implemented function
// Linting when multiple operations are involved can result in false positives
- let parent_fn = cx.tcx.hir().get_parent_item(expr.hir_id).def_id;
- if let hir::Node::ImplItem(impl_item) = cx.tcx.hir().get_by_def_id(parent_fn);
- if let hir::ImplItemKind::Fn(_, body_id) = impl_item.kind;
- let body = cx.tcx.hir().body(body_id);
- let parent_fn = cx.tcx.hir().get_parent_item(expr.hir_id).def_id;
- if let Some(trait_ref) = trait_ref_of_method(cx, parent_fn);
- let trait_id = trait_ref.path.res.def_id();
- if ![binop_trait_id, op_assign_trait_id].contains(&trait_id);
- if let Some(&(_, lint)) = [
+ && let parent_fn = cx.tcx.hir().get_parent_item(expr.hir_id).def_id
+ && let hir::Node::ImplItem(impl_item) = cx.tcx.hir_node_by_def_id(parent_fn)
+ && let hir::ImplItemKind::Fn(_, body_id) = impl_item.kind
+ && let body = cx.tcx.hir().body(body_id)
+ && let parent_fn = cx.tcx.hir().get_parent_item(expr.hir_id).def_id
+ && let Some(trait_ref) = trait_ref_of_method(cx, parent_fn)
+ && let trait_id = trait_ref.path.res.def_id()
+ && ![binop_trait_id, op_assign_trait_id].contains(&trait_id)
+ && let Some(&(_, lint)) = [
(&BINOP_TRAITS, SUSPICIOUS_ARITHMETIC_IMPL),
(&OP_ASSIGN_TRAITS, SUSPICIOUS_OP_ASSIGN_IMPL),
]
.iter()
- .find(|&(ts, _)| ts.iter().any(|&t| Some(trait_id) == cx.tcx.lang_items().get(t)));
- if count_binops(body.value) == 1;
- then {
- span_lint(
- cx,
- lint,
- binop.span,
- &format!("suspicious use of `{}` in `{}` impl", binop.node.as_str(), cx.tcx.item_name(trait_id)),
- );
- }
+ .find(|&(ts, _)| ts.iter().any(|&t| Some(trait_id) == cx.tcx.lang_items().get(t)))
+ && count_binops(body.value) == 1
+ {
+ span_lint(
+ cx,
+ lint,
+ binop.span,
+ &format!(
+ "suspicious use of `{}` in `{}` impl",
+ binop.node.as_str(),
+ cx.tcx.item_name(trait_id)
+ ),
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/suspicious_xor_used_as_pow.rs b/src/tools/clippy/clippy_lints/src/suspicious_xor_used_as_pow.rs
index 4340c23f8..1cc27670f 100644
--- a/src/tools/clippy/clippy_lints/src/suspicious_xor_used_as_pow.rs
+++ b/src/tools/clippy/clippy_lints/src/suspicious_xor_used_as_pow.rs
@@ -6,7 +6,7 @@ use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/swap.rs b/src/tools/clippy/clippy_lints/src/swap.rs
index 660e6835e..daa6fe871 100644
--- a/src/tools/clippy/clippy_lints/src/swap.rs
+++ b/src/tools/clippy/clippy_lints/src/swap.rs
@@ -3,13 +3,12 @@ use clippy_utils::source::snippet_with_context;
use clippy_utils::sugg::Sugg;
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::{can_mut_borrow_both, eq_expr_value, in_constant, std_or_core};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Block, Expr, ExprKind, PatKind, QPath, Stmt, StmtKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::Ident;
use rustc_span::{sym, Span, SyntaxContext};
@@ -149,36 +148,33 @@ fn check_manual_swap(cx: &LateContext<'_>, block: &Block<'_>) {
}
for [s1, s2, s3] in block.stmts.array_windows::<3>() {
- if_chain! {
+ if let StmtKind::Local(tmp) = s1.kind
// let t = foo();
- if let StmtKind::Local(tmp) = s1.kind;
- if let Some(tmp_init) = tmp.init;
- if let PatKind::Binding(.., ident, None) = tmp.pat.kind;
+ && let Some(tmp_init) = tmp.init
+ && let PatKind::Binding(.., ident, None) = tmp.pat.kind
// foo() = bar();
- if let StmtKind::Semi(first) = s2.kind;
- if let ExprKind::Assign(lhs1, rhs1, _) = first.kind;
+ && let StmtKind::Semi(first) = s2.kind
+ && let ExprKind::Assign(lhs1, rhs1, _) = first.kind
// bar() = t;
- if let StmtKind::Semi(second) = s3.kind;
- if let ExprKind::Assign(lhs2, rhs2, _) = second.kind;
- if let ExprKind::Path(QPath::Resolved(None, rhs2)) = rhs2.kind;
- if rhs2.segments.len() == 1;
+ && let StmtKind::Semi(second) = s3.kind
+ && let ExprKind::Assign(lhs2, rhs2, _) = second.kind
+ && let ExprKind::Path(QPath::Resolved(None, rhs2)) = rhs2.kind
+ && rhs2.segments.len() == 1
- if ident.name == rhs2.segments[0].ident.name;
- if eq_expr_value(cx, tmp_init, lhs1);
- if eq_expr_value(cx, rhs1, lhs2);
+ && ident.name == rhs2.segments[0].ident.name
+ && eq_expr_value(cx, tmp_init, lhs1)
+ && eq_expr_value(cx, rhs1, lhs2)
- let ctxt = s1.span.ctxt();
- if s2.span.ctxt() == ctxt;
- if s3.span.ctxt() == ctxt;
- if first.span.ctxt() == ctxt;
- if second.span.ctxt() == ctxt;
-
- then {
- let span = s1.span.to(s3.span);
- generate_swap_warning(cx, lhs1, lhs2, span, false);
- }
+ && let ctxt = s1.span.ctxt()
+ && s2.span.ctxt() == ctxt
+ && s3.span.ctxt() == ctxt
+ && first.span.ctxt() == ctxt
+ && second.span.ctxt() == ctxt
+ {
+ let span = s1.span.to(s3.span);
+ generate_swap_warning(cx, lhs1, lhs2, span, false);
}
}
}
@@ -261,20 +257,18 @@ fn parse<'a, 'hir>(stmt: &'a Stmt<'hir>) -> Option<(ExprOrIdent<'hir>, &'a Expr<
fn check_xor_swap(cx: &LateContext<'_>, block: &Block<'_>) {
for [s1, s2, s3] in block.stmts.array_windows::<3>() {
let ctxt = s1.span.ctxt();
- if_chain! {
- if let Some((lhs0, rhs0)) = extract_sides_of_xor_assign(s1, ctxt);
- if let Some((lhs1, rhs1)) = extract_sides_of_xor_assign(s2, ctxt);
- if let Some((lhs2, rhs2)) = extract_sides_of_xor_assign(s3, ctxt);
- if eq_expr_value(cx, lhs0, rhs1);
- if eq_expr_value(cx, lhs2, rhs1);
- if eq_expr_value(cx, lhs1, rhs0);
- if eq_expr_value(cx, lhs1, rhs2);
- if s2.span.ctxt() == ctxt;
- if s3.span.ctxt() == ctxt;
- then {
- let span = s1.span.to(s3.span);
- generate_swap_warning(cx, lhs0, rhs0, span, true);
- }
+ if let Some((lhs0, rhs0)) = extract_sides_of_xor_assign(s1, ctxt)
+ && let Some((lhs1, rhs1)) = extract_sides_of_xor_assign(s2, ctxt)
+ && let Some((lhs2, rhs2)) = extract_sides_of_xor_assign(s3, ctxt)
+ && eq_expr_value(cx, lhs0, rhs1)
+ && eq_expr_value(cx, lhs2, rhs1)
+ && eq_expr_value(cx, lhs1, rhs0)
+ && eq_expr_value(cx, lhs1, rhs2)
+ && s2.span.ctxt() == ctxt
+ && s3.span.ctxt() == ctxt
+ {
+ let span = s1.span.to(s3.span);
+ generate_swap_warning(cx, lhs0, rhs0, span, true);
};
}
}
diff --git a/src/tools/clippy/clippy_lints/src/swap_ptr_to_ref.rs b/src/tools/clippy/clippy_lints/src/swap_ptr_to_ref.rs
index 6a6c94425..20e9608a1 100644
--- a/src/tools/clippy/clippy_lints/src/swap_ptr_to_ref.rs
+++ b/src/tools/clippy/clippy_lints/src/swap_ptr_to_ref.rs
@@ -4,7 +4,7 @@ use clippy_utils::source::snippet_with_context;
use rustc_errors::Applicability;
use rustc_hir::{BorrowKind, Expr, ExprKind, Mutability, UnOp};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{sym, Span, SyntaxContext};
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/tabs_in_doc_comments.rs b/src/tools/clippy/clippy_lints/src/tabs_in_doc_comments.rs
index dcf1fac02..af9e13dba 100644
--- a/src/tools/clippy/clippy_lints/src/tabs_in_doc_comments.rs
+++ b/src/tools/clippy/clippy_lints/src/tabs_in_doc_comments.rs
@@ -2,7 +2,7 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
use rustc_ast::ast;
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{BytePos, Span};
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/temporary_assignment.rs b/src/tools/clippy/clippy_lints/src/temporary_assignment.rs
index c717ccc35..8151dd8f2 100644
--- a/src/tools/clippy/clippy_lints/src/temporary_assignment.rs
+++ b/src/tools/clippy/clippy_lints/src/temporary_assignment.rs
@@ -2,7 +2,7 @@ use clippy_utils::diagnostics::span_lint;
use clippy_utils::is_adjusted;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/tests_outside_test_module.rs b/src/tools/clippy/clippy_lints/src/tests_outside_test_module.rs
index 0cfb1c125..da5575826 100644
--- a/src/tools/clippy/clippy_lints/src/tests_outside_test_module.rs
+++ b/src/tools/clippy/clippy_lints/src/tests_outside_test_module.rs
@@ -3,7 +3,7 @@ use clippy_utils::{is_in_cfg_test, is_in_test_function};
use rustc_hir::intravisit::FnKind;
use rustc_hir::{Body, FnDecl};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::def_id::LocalDefId;
use rustc_span::Span;
@@ -55,20 +55,18 @@ impl LateLintPass<'_> for TestsOutsideTestModule {
sp: Span,
_: LocalDefId,
) {
- if_chain! {
- if !matches!(kind, FnKind::Closure);
- if is_in_test_function(cx.tcx, body.id().hir_id);
- if !is_in_cfg_test(cx.tcx, body.id().hir_id);
- then {
- span_lint_and_note(
- cx,
- TESTS_OUTSIDE_TEST_MODULE,
- sp,
- "this function marked with #[test] is outside a #[cfg(test)] module",
- None,
- "move it to a testing module marked with #[cfg(test)]",
- );
- }
+ if !matches!(kind, FnKind::Closure)
+ && is_in_test_function(cx.tcx, body.id().hir_id)
+ && !is_in_cfg_test(cx.tcx, body.id().hir_id)
+ {
+ span_lint_and_note(
+ cx,
+ TESTS_OUTSIDE_TEST_MODULE,
+ sp,
+ "this function marked with #[test] is outside a #[cfg(test)] module",
+ None,
+ "move it to a testing module marked with #[cfg(test)]",
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/to_digit_is_some.rs b/src/tools/clippy/clippy_lints/src/to_digit_is_some.rs
index a171d225f..dafe9e388 100644
--- a/src/tools/clippy/clippy_lints/src/to_digit_is_some.rs
+++ b/src/tools/clippy/clippy_lints/src/to_digit_is_some.rs
@@ -1,12 +1,11 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::match_def_path;
use clippy_utils::source::snippet_with_applicability;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -38,59 +37,57 @@ declare_lint_pass!(ToDigitIsSome => [TO_DIGIT_IS_SOME]);
impl<'tcx> LateLintPass<'tcx> for ToDigitIsSome {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
- if_chain! {
- if let hir::ExprKind::MethodCall(is_some_path, to_digit_expr, [], _) = &expr.kind;
- if is_some_path.ident.name.as_str() == "is_some";
- then {
- let match_result = match &to_digit_expr.kind {
- hir::ExprKind::MethodCall(to_digits_path, char_arg, [radix_arg], _) => {
- if_chain! {
- if to_digits_path.ident.name.as_str() == "to_digit";
- let char_arg_ty = cx.typeck_results().expr_ty_adjusted(char_arg);
- if *char_arg_ty.kind() == ty::Char;
- then {
- Some((true, *char_arg, radix_arg))
- } else {
- None
- }
- }
+ if let hir::ExprKind::MethodCall(is_some_path, to_digit_expr, [], _) = &expr.kind
+ && is_some_path.ident.name.as_str() == "is_some"
+ {
+ let match_result = match &to_digit_expr.kind {
+ hir::ExprKind::MethodCall(to_digits_path, char_arg, [radix_arg], _) => {
+ if to_digits_path.ident.name.as_str() == "to_digit"
+ && let char_arg_ty = cx.typeck_results().expr_ty_adjusted(char_arg)
+ && *char_arg_ty.kind() == ty::Char
+ {
+ Some((true, *char_arg, radix_arg))
+ } else {
+ None
}
- hir::ExprKind::Call(to_digits_call, to_digit_args) => {
- if_chain! {
- if let [char_arg, radix_arg] = *to_digit_args;
- if let hir::ExprKind::Path(to_digits_path) = &to_digits_call.kind;
- if let to_digits_call_res = cx.qpath_res(to_digits_path, to_digits_call.hir_id);
- if let Some(to_digits_def_id) = to_digits_call_res.opt_def_id();
- if match_def_path(cx, to_digits_def_id, &["core", "char", "methods", "<impl char>", "to_digit"]);
- then {
- Some((false, char_arg, radix_arg))
- } else {
- None
- }
- }
+ },
+ hir::ExprKind::Call(to_digits_call, to_digit_args) => {
+ if let [char_arg, radix_arg] = *to_digit_args
+ && let hir::ExprKind::Path(to_digits_path) = &to_digits_call.kind
+ && let to_digits_call_res = cx.qpath_res(to_digits_path, to_digits_call.hir_id)
+ && let Some(to_digits_def_id) = to_digits_call_res.opt_def_id()
+ && match_def_path(
+ cx,
+ to_digits_def_id,
+ &["core", "char", "methods", "<impl char>", "to_digit"],
+ )
+ {
+ Some((false, char_arg, radix_arg))
+ } else {
+ None
}
- _ => None
- };
+ },
+ _ => None,
+ };
- if let Some((is_method_call, char_arg, radix_arg)) = match_result {
- let mut applicability = Applicability::MachineApplicable;
- let char_arg_snip = snippet_with_applicability(cx, char_arg.span, "_", &mut applicability);
- let radix_snip = snippet_with_applicability(cx, radix_arg.span, "_", &mut applicability);
+ if let Some((is_method_call, char_arg, radix_arg)) = match_result {
+ let mut applicability = Applicability::MachineApplicable;
+ let char_arg_snip = snippet_with_applicability(cx, char_arg.span, "_", &mut applicability);
+ let radix_snip = snippet_with_applicability(cx, radix_arg.span, "_", &mut applicability);
- span_lint_and_sugg(
- cx,
- TO_DIGIT_IS_SOME,
- expr.span,
- "use of `.to_digit(..).is_some()`",
- "try",
- if is_method_call {
- format!("{char_arg_snip}.is_digit({radix_snip})")
- } else {
- format!("char::is_digit({char_arg_snip}, {radix_snip})")
- },
- applicability,
- );
- }
+ span_lint_and_sugg(
+ cx,
+ TO_DIGIT_IS_SOME,
+ expr.span,
+ "use of `.to_digit(..).is_some()`",
+ "try",
+ if is_method_call {
+ format!("{char_arg_snip}.is_digit({radix_snip})")
+ } else {
+ format!("char::is_digit({char_arg_snip}, {radix_snip})")
+ },
+ applicability,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/trailing_empty_array.rs b/src/tools/clippy/clippy_lints/src/trailing_empty_array.rs
index 87181adc2..cbdf31c93 100644
--- a/src/tools/clippy/clippy_lints/src/trailing_empty_array.rs
+++ b/src/tools/clippy/clippy_lints/src/trailing_empty_array.rs
@@ -3,7 +3,7 @@ use clippy_utils::has_repr_attr;
use rustc_hir::{Item, ItemKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::Const;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -54,20 +54,18 @@ impl<'tcx> LateLintPass<'tcx> for TrailingEmptyArray {
}
fn is_struct_with_trailing_zero_sized_array(cx: &LateContext<'_>, item: &Item<'_>) -> bool {
- if_chain! {
+ if let ItemKind::Struct(data, _) = &item.kind
// First check if last field is an array
- if let ItemKind::Struct(data, _) = &item.kind;
- if let Some(last_field) = data.fields().last();
- if let rustc_hir::TyKind::Array(_, rustc_hir::ArrayLen::Body(length)) = last_field.ty.kind;
+ && let Some(last_field) = data.fields().last()
+ && let rustc_hir::TyKind::Array(_, rustc_hir::ArrayLen::Body(length)) = last_field.ty.kind
// Then check if that array is zero-sized
- let length = Const::from_anon_const(cx.tcx, length.def_id);
- let length = length.try_eval_target_usize(cx.tcx, cx.param_env);
- if let Some(length) = length;
- then {
- length == 0
- } else {
- false
- }
+ && let length = Const::from_anon_const(cx.tcx, length.def_id)
+ && let length = length.try_eval_target_usize(cx.tcx, cx.param_env)
+ && let Some(length) = length
+ {
+ length == 0
+ } else {
+ false
}
}
diff --git a/src/tools/clippy/clippy_lints/src/trait_bounds.rs b/src/tools/clippy/clippy_lints/src/trait_bounds.rs
index f065d215e..e4054393d 100644
--- a/src/tools/clippy/clippy_lints/src/trait_bounds.rs
+++ b/src/tools/clippy/clippy_lints/src/trait_bounds.rs
@@ -3,7 +3,6 @@ use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_sugg};
use clippy_utils::source::{snippet, snippet_opt, snippet_with_applicability};
use clippy_utils::{is_from_proc_macro, SpanlessEq, SpanlessHash};
use core::hash::{Hash, Hasher};
-use if_chain::if_chain;
use itertools::Itertools;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::unhash::UnhashMap;
@@ -14,7 +13,7 @@ use rustc_hir::{
TraitBoundModifier, TraitItem, TraitRef, Ty, TyKind, WherePredicate,
};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::{BytePos, Span};
use std::collections::hash_map::Entry;
@@ -124,103 +123,100 @@ impl<'tcx> LateLintPass<'tcx> for TraitBounds {
let mut self_bounds_map = FxHashMap::default();
for predicate in item.generics.predicates {
- if_chain! {
- if let WherePredicate::BoundPredicate(ref bound_predicate) = predicate;
- if bound_predicate.origin != PredicateOrigin::ImplTrait;
- if !bound_predicate.span.from_expansion();
- if let TyKind::Path(QPath::Resolved(_, Path { segments, .. })) = bound_predicate.bounded_ty.kind;
- if let Some(PathSegment {
- res: Res::SelfTyParam { trait_: def_id }, ..
- }) = segments.first();
- if let Some(
- Node::Item(
- Item {
- kind: ItemKind::Trait(_, _, _, self_bounds, _),
- .. }
- )
- ) = cx.tcx.hir().get_if_local(*def_id);
- then {
- if self_bounds_map.is_empty() {
- for bound in *self_bounds {
- let Some((self_res, self_segments, _)) = get_trait_info_from_bound(bound) else { continue };
- self_bounds_map.insert(self_res, self_segments);
- }
+ if let WherePredicate::BoundPredicate(ref bound_predicate) = predicate
+ && bound_predicate.origin != PredicateOrigin::ImplTrait
+ && !bound_predicate.span.from_expansion()
+ && let TyKind::Path(QPath::Resolved(_, Path { segments, .. })) = bound_predicate.bounded_ty.kind
+ && let Some(PathSegment {
+ res: Res::SelfTyParam { trait_: def_id },
+ ..
+ }) = segments.first()
+ && let Some(Node::Item(Item {
+ kind: ItemKind::Trait(_, _, _, self_bounds, _),
+ ..
+ })) = cx.tcx.hir().get_if_local(*def_id)
+ {
+ if self_bounds_map.is_empty() {
+ for bound in *self_bounds {
+ let Some((self_res, self_segments, _)) = get_trait_info_from_bound(bound) else {
+ continue;
+ };
+ self_bounds_map.insert(self_res, self_segments);
}
+ }
- bound_predicate
- .bounds
- .iter()
- .filter_map(get_trait_info_from_bound)
- .for_each(|(trait_item_res, trait_item_segments, span)| {
- if let Some(self_segments) = self_bounds_map.get(&trait_item_res) {
- if SpanlessEq::new(cx).eq_path_segments(self_segments, trait_item_segments) {
- span_lint_and_help(
- cx,
- TRAIT_DUPLICATION_IN_BOUNDS,
- span,
- "this trait bound is already specified in trait declaration",
- None,
- "consider removing this trait bound",
- );
- }
+ bound_predicate
+ .bounds
+ .iter()
+ .filter_map(get_trait_info_from_bound)
+ .for_each(|(trait_item_res, trait_item_segments, span)| {
+ if let Some(self_segments) = self_bounds_map.get(&trait_item_res) {
+ if SpanlessEq::new(cx).eq_path_segments(self_segments, trait_item_segments) {
+ span_lint_and_help(
+ cx,
+ TRAIT_DUPLICATION_IN_BOUNDS,
+ span,
+ "this trait bound is already specified in trait declaration",
+ None,
+ "consider removing this trait bound",
+ );
}
- });
- }
+ }
+ });
}
}
}
fn check_ty(&mut self, cx: &LateContext<'tcx>, ty: &'tcx Ty<'tcx>) {
- if_chain! {
- if let TyKind::Ref(.., mut_ty) = &ty.kind;
- if let TyKind::TraitObject(bounds, ..) = mut_ty.ty.kind;
- if bounds.len() > 2;
- then {
-
- // Build up a hash of every trait we've seen
- // When we see a trait for the first time, add it to unique_traits
- // so we can later use it to build a string of all traits exactly once, without duplicates
+ if let TyKind::Ref(.., mut_ty) = &ty.kind
+ && let TyKind::TraitObject(bounds, ..) = mut_ty.ty.kind
+ && bounds.len() > 2
+ {
+ // Build up a hash of every trait we've seen
+ // When we see a trait for the first time, add it to unique_traits
+ // so we can later use it to build a string of all traits exactly once, without duplicates
- let mut seen_def_ids = FxHashSet::default();
- let mut unique_traits = Vec::new();
+ let mut seen_def_ids = FxHashSet::default();
+ let mut unique_traits = Vec::new();
- // Iterate the bounds and add them to our seen hash
- // If we haven't yet seen it, add it to the fixed traits
- for bound in bounds {
- let Some(def_id) = bound.trait_ref.trait_def_id() else { continue; };
+ // Iterate the bounds and add them to our seen hash
+ // If we haven't yet seen it, add it to the fixed traits
+ for bound in bounds {
+ let Some(def_id) = bound.trait_ref.trait_def_id() else {
+ continue;
+ };
- let new_trait = seen_def_ids.insert(def_id);
+ let new_trait = seen_def_ids.insert(def_id);
- if new_trait {
- unique_traits.push(bound);
- }
+ if new_trait {
+ unique_traits.push(bound);
}
+ }
- // If the number of unique traits isn't the same as the number of traits in the bounds,
- // there must be 1 or more duplicates
- if bounds.len() != unique_traits.len() {
- let mut bounds_span = bounds[0].span;
-
- for bound in bounds.iter().skip(1) {
- bounds_span = bounds_span.to(bound.span);
- }
-
- let fixed_trait_snippet = unique_traits
- .iter()
- .filter_map(|b| snippet_opt(cx, b.span))
- .collect::<Vec<_>>()
- .join(" + ");
+ // If the number of unique traits isn't the same as the number of traits in the bounds,
+ // there must be 1 or more duplicates
+ if bounds.len() != unique_traits.len() {
+ let mut bounds_span = bounds[0].span;
- span_lint_and_sugg(
- cx,
- TRAIT_DUPLICATION_IN_BOUNDS,
- bounds_span,
- "this trait bound is already specified in trait declaration",
- "try",
- fixed_trait_snippet,
- Applicability::MaybeIncorrect,
- );
+ for bound in bounds.iter().skip(1) {
+ bounds_span = bounds_span.to(bound.span);
}
+
+ let fixed_trait_snippet = unique_traits
+ .iter()
+ .filter_map(|b| snippet_opt(cx, b.span))
+ .collect::<Vec<_>>()
+ .join(" + ");
+
+ span_lint_and_sugg(
+ cx,
+ TRAIT_DUPLICATION_IN_BOUNDS,
+ bounds_span,
+ "this trait bound is already specified in trait declaration",
+ "try",
+ fixed_trait_snippet,
+ Applicability::MaybeIncorrect,
+ );
}
}
}
@@ -267,36 +263,38 @@ impl TraitBounds {
let mut map: UnhashMap<SpanlessTy<'_, '_>, Vec<&GenericBound<'_>>> = UnhashMap::default();
let mut applicability = Applicability::MaybeIncorrect;
for bound in gen.predicates {
- if_chain! {
- if let WherePredicate::BoundPredicate(ref p) = bound;
- if p.origin != PredicateOrigin::ImplTrait;
- if p.bounds.len() as u64 <= self.max_trait_bounds;
- if !p.span.from_expansion();
- let bounds = p.bounds.iter().filter(|b| !self.cannot_combine_maybe_bound(cx, b)).collect::<Vec<_>>();
- if !bounds.is_empty();
- if let Some(ref v) = map.insert(SpanlessTy { ty: p.bounded_ty, cx }, bounds);
- if !is_from_proc_macro(cx, p.bounded_ty);
- then {
- let trait_bounds = v
- .iter()
- .copied()
- .chain(p.bounds.iter())
- .filter_map(get_trait_info_from_bound)
- .map(|(_, _, span)| snippet_with_applicability(cx, span, "..", &mut applicability))
- .join(" + ");
- let hint_string = format!(
- "consider combining the bounds: `{}: {trait_bounds}`",
- snippet(cx, p.bounded_ty.span, "_"),
- );
- span_lint_and_help(
- cx,
- TYPE_REPETITION_IN_BOUNDS,
- p.span,
- "this type has already been used as a bound predicate",
- None,
- &hint_string,
- );
- }
+ if let WherePredicate::BoundPredicate(ref p) = bound
+ && p.origin != PredicateOrigin::ImplTrait
+ && p.bounds.len() as u64 <= self.max_trait_bounds
+ && !p.span.from_expansion()
+ && let bounds = p
+ .bounds
+ .iter()
+ .filter(|b| !self.cannot_combine_maybe_bound(cx, b))
+ .collect::<Vec<_>>()
+ && !bounds.is_empty()
+ && let Some(ref v) = map.insert(SpanlessTy { ty: p.bounded_ty, cx }, bounds)
+ && !is_from_proc_macro(cx, p.bounded_ty)
+ {
+ let trait_bounds = v
+ .iter()
+ .copied()
+ .chain(p.bounds.iter())
+ .filter_map(get_trait_info_from_bound)
+ .map(|(_, _, span)| snippet_with_applicability(cx, span, "..", &mut applicability))
+ .join(" + ");
+ let hint_string = format!(
+ "consider combining the bounds: `{}: {trait_bounds}`",
+ snippet(cx, p.bounded_ty.span, "_"),
+ );
+ span_lint_and_help(
+ cx,
+ TYPE_REPETITION_IN_BOUNDS,
+ p.span,
+ "this type has already been used as a bound predicate",
+ None,
+ &hint_string,
+ );
}
}
}
@@ -318,15 +316,19 @@ fn check_trait_bound_duplication(cx: &LateContext<'_>, gen: &'_ Generics<'_>) {
.predicates
.iter()
.filter_map(|pred| {
- if_chain! {
- if pred.in_where_clause();
- if let WherePredicate::BoundPredicate(bound_predicate) = pred;
- if let TyKind::Path(QPath::Resolved(_, path)) = bound_predicate.bounded_ty.kind;
- then {
- return Some(
- rollup_traits(cx, bound_predicate.bounds, "these where clauses contain repeated elements")
- .into_iter().map(|(trait_ref, _)| (path.res, trait_ref)))
- }
+ if pred.in_where_clause()
+ && let WherePredicate::BoundPredicate(bound_predicate) = pred
+ && let TyKind::Path(QPath::Resolved(_, path)) = bound_predicate.bounded_ty.kind
+ {
+ return Some(
+ rollup_traits(
+ cx,
+ bound_predicate.bounds,
+ "these where clauses contain repeated elements",
+ )
+ .into_iter()
+ .map(|(trait_ref, _)| (path.res, trait_ref)),
+ );
}
None
})
@@ -340,25 +342,23 @@ fn check_trait_bound_duplication(cx: &LateContext<'_>, gen: &'_ Generics<'_>) {
// compare trait bounds keyed by generic name and comparable trait to collected where
// predicates eg. (T, Clone)
for predicate in gen.predicates.iter().filter(|pred| !pred.in_where_clause()) {
- if_chain! {
- if let WherePredicate::BoundPredicate(bound_predicate) = predicate;
- if bound_predicate.origin != PredicateOrigin::ImplTrait;
- if !bound_predicate.span.from_expansion();
- if let TyKind::Path(QPath::Resolved(_, path)) = bound_predicate.bounded_ty.kind;
- then {
- let traits = rollup_traits(cx, bound_predicate.bounds, "these bounds contain repeated elements");
- for (trait_ref, span) in traits {
- let key = (path.res, trait_ref);
- if where_predicates.contains(&key) {
- span_lint_and_help(
- cx,
- TRAIT_DUPLICATION_IN_BOUNDS,
- span,
- "this trait bound is already specified in the where clause",
- None,
- "consider removing this trait bound",
- );
- }
+ if let WherePredicate::BoundPredicate(bound_predicate) = predicate
+ && bound_predicate.origin != PredicateOrigin::ImplTrait
+ && !bound_predicate.span.from_expansion()
+ && let TyKind::Path(QPath::Resolved(_, path)) = bound_predicate.bounded_ty.kind
+ {
+ let traits = rollup_traits(cx, bound_predicate.bounds, "these bounds contain repeated elements");
+ for (trait_ref, span) in traits {
+ let key = (path.res, trait_ref);
+ if where_predicates.contains(&key) {
+ span_lint_and_help(
+ cx,
+ TRAIT_DUPLICATION_IN_BOUNDS,
+ span,
+ "this trait bound is already specified in the where clause",
+ None,
+ "consider removing this trait bound",
+ );
}
}
}
@@ -401,10 +401,10 @@ fn into_comparable_trait_ref(trait_ref: &TraitRef<'_>) -> ComparableTraitRef {
.filter_map(|segment| {
// get trait bound type arguments
Some(segment.args?.args.iter().filter_map(|arg| {
- if_chain! {
- if let GenericArg::Type(ty) = arg;
- if let TyKind::Path(QPath::Resolved(_, path)) = ty.kind;
- then { return Some(path.res) }
+ if let GenericArg::Type(ty) = arg
+ && let TyKind::Path(QPath::Resolved(_, path)) = ty.kind
+ {
+ return Some(path.res);
}
None
}))
@@ -444,27 +444,24 @@ fn rollup_traits(cx: &LateContext<'_>, bounds: &[GenericBound<'_>], msg: &str) -
comparable_bounds[i] = (k, v);
}
- if_chain! {
- if repeated_res;
- if let [first_trait, .., last_trait] = bounds;
- then {
- let all_trait_span = first_trait.span().to(last_trait.span());
-
- let traits = comparable_bounds.iter()
- .filter_map(|&(_, span)| snippet_opt(cx, span))
- .collect::<Vec<_>>();
- let traits = traits.join(" + ");
-
- span_lint_and_sugg(
- cx,
- TRAIT_DUPLICATION_IN_BOUNDS,
- all_trait_span,
- msg,
- "try",
- traits,
- Applicability::MachineApplicable
- );
- }
+ if repeated_res && let [first_trait, .., last_trait] = bounds {
+ let all_trait_span = first_trait.span().to(last_trait.span());
+
+ let traits = comparable_bounds
+ .iter()
+ .filter_map(|&(_, span)| snippet_opt(cx, span))
+ .collect::<Vec<_>>();
+ let traits = traits.join(" + ");
+
+ span_lint_and_sugg(
+ cx,
+ TRAIT_DUPLICATION_IN_BOUNDS,
+ all_trait_span,
+ msg,
+ "try",
+ traits,
+ Applicability::MachineApplicable,
+ );
}
comparable_bounds
diff --git a/src/tools/clippy/clippy_lints/src/transmute/mod.rs b/src/tools/clippy/clippy_lints/src/transmute/mod.rs
index 6eec40cb5..95a92afea 100644
--- a/src/tools/clippy/clippy_lints/src/transmute/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/transmute/mod.rs
@@ -19,10 +19,9 @@ mod wrong_transmute;
use clippy_config::msrvs::Msrv;
use clippy_utils::in_constant;
-use if_chain::if_chain;
use rustc_hir::{Expr, ExprKind, QPath};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::symbol::sym;
declare_clippy_lint! {
@@ -494,51 +493,47 @@ impl Transmute {
}
impl<'tcx> LateLintPass<'tcx> for Transmute {
fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
- if_chain! {
- if let ExprKind::Call(path_expr, [arg]) = e.kind;
- if let ExprKind::Path(QPath::Resolved(None, path)) = path_expr.kind;
- if let Some(def_id) = path.res.opt_def_id();
- if cx.tcx.is_diagnostic_item(sym::transmute, def_id);
- then {
- // Avoid suggesting non-const operations in const contexts:
- // - from/to bits (https://github.com/rust-lang/rust/issues/73736)
- // - dereferencing raw pointers (https://github.com/rust-lang/rust/issues/51911)
- // - char conversions (https://github.com/rust-lang/rust/issues/89259)
- let const_context = in_constant(cx, e.hir_id);
+ if let ExprKind::Call(path_expr, [arg]) = e.kind
+ && let ExprKind::Path(QPath::Resolved(None, path)) = path_expr.kind
+ && let Some(def_id) = path.res.opt_def_id()
+ && cx.tcx.is_diagnostic_item(sym::transmute, def_id)
+ {
+ // Avoid suggesting non-const operations in const contexts:
+ // - from/to bits (https://github.com/rust-lang/rust/issues/73736)
+ // - dereferencing raw pointers (https://github.com/rust-lang/rust/issues/51911)
+ // - char conversions (https://github.com/rust-lang/rust/issues/89259)
+ let const_context = in_constant(cx, e.hir_id);
- let (from_ty, from_ty_adjusted) = match cx.typeck_results().expr_adjustments(arg) {
- [] => (cx.typeck_results().expr_ty(arg), false),
- [.., a] => (a.target, true),
- };
- // Adjustments for `to_ty` happen after the call to `transmute`, so don't use them.
- let to_ty = cx.typeck_results().expr_ty(e);
+ let (from_ty, from_ty_adjusted) = match cx.typeck_results().expr_adjustments(arg) {
+ [] => (cx.typeck_results().expr_ty(arg), false),
+ [.., a] => (a.target, true),
+ };
+ // Adjustments for `to_ty` happen after the call to `transmute`, so don't use them.
+ let to_ty = cx.typeck_results().expr_ty(e);
- // If useless_transmute is triggered, the other lints can be skipped.
- if useless_transmute::check(cx, e, from_ty, to_ty, arg) {
- return;
- }
+ // If useless_transmute is triggered, the other lints can be skipped.
+ if useless_transmute::check(cx, e, from_ty, to_ty, arg) {
+ return;
+ }
- let linted = wrong_transmute::check(cx, e, from_ty, to_ty)
- | crosspointer_transmute::check(cx, e, from_ty, to_ty)
- | transmuting_null::check(cx, e, arg, to_ty)
- | transmute_null_to_fn::check(cx, e, arg, to_ty)
- | transmute_ptr_to_ref::check(cx, e, from_ty, to_ty, arg, path, &self.msrv)
- | transmute_int_to_char::check(cx, e, from_ty, to_ty, arg, const_context)
- | transmute_ref_to_ref::check(cx, e, from_ty, to_ty, arg, const_context)
- | transmute_ptr_to_ptr::check(cx, e, from_ty, to_ty, arg)
- | transmute_int_to_bool::check(cx, e, from_ty, to_ty, arg)
- | transmute_int_to_float::check(cx, e, from_ty, to_ty, arg, const_context)
- | transmute_int_to_non_zero::check(cx, e, from_ty, to_ty, arg)
- | transmute_float_to_int::check(cx, e, from_ty, to_ty, arg, const_context)
- | transmute_num_to_bytes::check(cx, e, from_ty, to_ty, arg, const_context)
- | (
- unsound_collection_transmute::check(cx, e, from_ty, to_ty)
- || transmute_undefined_repr::check(cx, e, from_ty, to_ty)
- );
+ let linted = wrong_transmute::check(cx, e, from_ty, to_ty)
+ | crosspointer_transmute::check(cx, e, from_ty, to_ty)
+ | transmuting_null::check(cx, e, arg, to_ty)
+ | transmute_null_to_fn::check(cx, e, arg, to_ty)
+ | transmute_ptr_to_ref::check(cx, e, from_ty, to_ty, arg, path, &self.msrv)
+ | transmute_int_to_char::check(cx, e, from_ty, to_ty, arg, const_context)
+ | transmute_ref_to_ref::check(cx, e, from_ty, to_ty, arg, const_context)
+ | transmute_ptr_to_ptr::check(cx, e, from_ty, to_ty, arg)
+ | transmute_int_to_bool::check(cx, e, from_ty, to_ty, arg)
+ | transmute_int_to_float::check(cx, e, from_ty, to_ty, arg, const_context)
+ | transmute_int_to_non_zero::check(cx, e, from_ty, to_ty, arg)
+ | transmute_float_to_int::check(cx, e, from_ty, to_ty, arg, const_context)
+ | transmute_num_to_bytes::check(cx, e, from_ty, to_ty, arg, const_context)
+ | (unsound_collection_transmute::check(cx, e, from_ty, to_ty)
+ || transmute_undefined_repr::check(cx, e, from_ty, to_ty));
- if !linted {
- transmutes_expressible_as_ptr_casts::check(cx, e, from_ty, from_ty_adjusted, to_ty, arg);
- }
+ if !linted {
+ transmutes_expressible_as_ptr_casts::check(cx, e, from_ty, from_ty_adjusted, to_ty, arg);
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmute_float_to_int.rs b/src/tools/clippy/clippy_lints/src/transmute/transmute_float_to_int.rs
index 5ecba512b..aef520923 100644
--- a/src/tools/clippy/clippy_lints/src/transmute/transmute_float_to_int.rs
+++ b/src/tools/clippy/clippy_lints/src/transmute/transmute_float_to_int.rs
@@ -1,7 +1,6 @@
use super::TRANSMUTE_FLOAT_TO_INT;
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::sugg;
-use if_chain::if_chain;
use rustc_ast as ast;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, UnOp};
@@ -32,17 +31,15 @@ pub(super) fn check<'tcx>(
arg = inner_expr;
}
- if_chain! {
+ if let ExprKind::Lit(lit) = &arg.kind
// if the expression is a float literal and it is unsuffixed then
// add a suffix so the suggestion is valid and unambiguous
- if let ExprKind::Lit(lit) = &arg.kind;
- if let ast::LitKind::Float(_, ast::LitFloatType::Unsuffixed) = lit.node;
- then {
- let op = format!("{sugg}{}", float_ty.name_str()).into();
- match sugg {
- sugg::Sugg::MaybeParen(_) => sugg = sugg::Sugg::MaybeParen(op),
- _ => sugg = sugg::Sugg::NonParen(op)
- }
+ && let ast::LitKind::Float(_, ast::LitFloatType::Unsuffixed) = lit.node
+ {
+ let op = format!("{sugg}{}", float_ty.name_str()).into();
+ match sugg {
+ sugg::Sugg::MaybeParen(_) => sugg = sugg::Sugg::MaybeParen(op),
+ _ => sugg = sugg::Sugg::NonParen(op),
}
}
diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmute_ref_to_ref.rs b/src/tools/clippy/clippy_lints/src/transmute/transmute_ref_to_ref.rs
index ea9ad9961..98e9ea2d7 100644
--- a/src/tools/clippy/clippy_lints/src/transmute/transmute_ref_to_ref.rs
+++ b/src/tools/clippy/clippy_lints/src/transmute/transmute_ref_to_ref.rs
@@ -2,7 +2,6 @@ use super::{TRANSMUTE_BYTES_TO_STR, TRANSMUTE_PTR_TO_PTR};
use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then};
use clippy_utils::source::snippet;
use clippy_utils::sugg;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Expr, Mutability};
use rustc_lint::LateContext;
@@ -21,68 +20,59 @@ pub(super) fn check<'tcx>(
let mut triggered = false;
if let (ty::Ref(_, ty_from, from_mutbl), ty::Ref(_, ty_to, to_mutbl)) = (&from_ty.kind(), &to_ty.kind()) {
- if_chain! {
- if let ty::Slice(slice_ty) = *ty_from.kind();
- if ty_to.is_str();
- if let ty::Uint(ty::UintTy::U8) = slice_ty.kind();
- if from_mutbl == to_mutbl;
- then {
- let postfix = if *from_mutbl == Mutability::Mut {
- "_mut"
- } else {
- ""
- };
+ if let ty::Slice(slice_ty) = *ty_from.kind()
+ && ty_to.is_str()
+ && let ty::Uint(ty::UintTy::U8) = slice_ty.kind()
+ && from_mutbl == to_mutbl
+ {
+ let postfix = if *from_mutbl == Mutability::Mut { "_mut" } else { "" };
- let snippet = snippet(cx, arg.span, "..");
+ let snippet = snippet(cx, arg.span, "..");
- span_lint_and_sugg(
- cx,
- TRANSMUTE_BYTES_TO_STR,
- e.span,
- &format!("transmute from a `{from_ty}` to a `{to_ty}`"),
- "consider using",
- if const_context {
- format!("std::str::from_utf8_unchecked{postfix}({snippet})")
- } else {
- format!("std::str::from_utf8{postfix}({snippet}).unwrap()")
- },
- Applicability::MaybeIncorrect,
- );
- triggered = true;
- } else {
- if (cx.tcx.erase_regions(from_ty) != cx.tcx.erase_regions(to_ty))
- && !const_context {
- span_lint_and_then(
- cx,
- TRANSMUTE_PTR_TO_PTR,
- e.span,
- "transmute from a reference to a reference",
- |diag| if let Some(arg) = sugg::Sugg::hir_opt(cx, arg) {
- let ty_from_and_mut = ty::TypeAndMut {
- ty: *ty_from,
- mutbl: *from_mutbl
- };
- let ty_to_and_mut = ty::TypeAndMut { ty: *ty_to, mutbl: *to_mutbl };
- let sugg_paren = arg
- .as_ty(Ty::new_ptr(cx.tcx,ty_from_and_mut))
- .as_ty(Ty::new_ptr(cx.tcx,ty_to_and_mut));
- let sugg = if *to_mutbl == Mutability::Mut {
- sugg_paren.mut_addr_deref()
- } else {
- sugg_paren.addr_deref()
- };
- diag.span_suggestion(
- e.span,
- "try",
- sugg,
- Applicability::Unspecified,
- );
- },
- );
+ span_lint_and_sugg(
+ cx,
+ TRANSMUTE_BYTES_TO_STR,
+ e.span,
+ &format!("transmute from a `{from_ty}` to a `{to_ty}`"),
+ "consider using",
+ if const_context {
+ format!("std::str::from_utf8_unchecked{postfix}({snippet})")
+ } else {
+ format!("std::str::from_utf8{postfix}({snippet}).unwrap()")
+ },
+ Applicability::MaybeIncorrect,
+ );
+ triggered = true;
+ } else if (cx.tcx.erase_regions(from_ty) != cx.tcx.erase_regions(to_ty)) && !const_context {
+ span_lint_and_then(
+ cx,
+ TRANSMUTE_PTR_TO_PTR,
+ e.span,
+ "transmute from a reference to a reference",
+ |diag| {
+ if let Some(arg) = sugg::Sugg::hir_opt(cx, arg) {
+ let ty_from_and_mut = ty::TypeAndMut {
+ ty: *ty_from,
+ mutbl: *from_mutbl,
+ };
+ let ty_to_and_mut = ty::TypeAndMut {
+ ty: *ty_to,
+ mutbl: *to_mutbl,
+ };
+ let sugg_paren = arg
+ .as_ty(Ty::new_ptr(cx.tcx, ty_from_and_mut))
+ .as_ty(Ty::new_ptr(cx.tcx, ty_to_and_mut));
+ let sugg = if *to_mutbl == Mutability::Mut {
+ sugg_paren.mut_addr_deref()
+ } else {
+ sugg_paren.addr_deref()
+ };
+ diag.span_suggestion(e.span, "try", sugg, Applicability::Unspecified);
+ }
+ },
+ );
- triggered = true;
- }
- }
+ triggered = true;
}
}
diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmute_undefined_repr.rs b/src/tools/clippy/clippy_lints/src/transmute/transmute_undefined_repr.rs
index 7c2223ca3..a65bc0ce4 100644
--- a/src/tools/clippy/clippy_lints/src/transmute/transmute_undefined_repr.rs
+++ b/src/tools/clippy/clippy_lints/src/transmute/transmute_undefined_repr.rs
@@ -299,14 +299,12 @@ fn reduce_ty<'tcx>(cx: &LateContext<'tcx>, mut ty: Ty<'tcx>) -> ReducedTy<'tcx>
}
fn is_zero_sized_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
- if_chain! {
- if let Ok(ty) = cx.tcx.try_normalize_erasing_regions(cx.param_env, ty);
- if let Ok(layout) = cx.tcx.layout_of(cx.param_env.and(ty));
- then {
- layout.layout.size().bytes() == 0
- } else {
- false
- }
+ if let Ok(ty) = cx.tcx.try_normalize_erasing_regions(cx.param_env, ty)
+ && let Ok(layout) = cx.tcx.layout_of(cx.param_env.and(ty))
+ {
+ layout.layout.size().bytes() == 0
+ } else {
+ false
}
}
diff --git a/src/tools/clippy/clippy_lints/src/tuple_array_conversions.rs b/src/tools/clippy/clippy_lints/src/tuple_array_conversions.rs
index 642e39e82..e1cd82e18 100644
--- a/src/tools/clippy/clippy_lints/src/tuple_array_conversions.rs
+++ b/src/tools/clippy/clippy_lints/src/tuple_array_conversions.rs
@@ -8,7 +8,7 @@ use rustc_hir::{Expr, ExprKind, Node, PatKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::{self, Ty};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use std::iter::once;
use std::ops::ControlFlow;
diff --git a/src/tools/clippy/clippy_lints/src/types/borrowed_box.rs b/src/tools/clippy/clippy_lints/src/types/borrowed_box.rs
index 306ca5724..801e88626 100644
--- a/src/tools/clippy/clippy_lints/src/types/borrowed_box.rs
+++ b/src/tools/clippy/clippy_lints/src/types/borrowed_box.rs
@@ -1,6 +1,5 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{
self as hir, GenericArg, GenericBounds, GenericParamKind, HirId, Lifetime, MutTy, Mutability, Node, QPath, TyKind,
@@ -15,66 +14,64 @@ pub(super) fn check(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, lt: &Lifetime, m
TyKind::Path(ref qpath) => {
let hir_id = mut_ty.ty.hir_id;
let def = cx.qpath_res(qpath, hir_id);
- if_chain! {
- if let Some(def_id) = def.opt_def_id();
- if Some(def_id) == cx.tcx.lang_items().owned_box();
- if let QPath::Resolved(None, path) = *qpath;
- if let [ref bx] = *path.segments;
- if let Some(params) = bx.args;
- if params.parenthesized == hir::GenericArgsParentheses::No;
- if let Some(inner) = params.args.iter().find_map(|arg| match arg {
+ if let Some(def_id) = def.opt_def_id()
+ && Some(def_id) == cx.tcx.lang_items().owned_box()
+ && let QPath::Resolved(None, path) = *qpath
+ && let [ref bx] = *path.segments
+ && let Some(params) = bx.args
+ && params.parenthesized == hir::GenericArgsParentheses::No
+ && let Some(inner) = params.args.iter().find_map(|arg| match arg {
GenericArg::Type(ty) => Some(ty),
_ => None,
- });
- then {
- if is_any_trait(cx, inner) {
- // Ignore `Box<Any>` types; see issue #1884 for details.
- return false;
- }
-
- let ltopt = if lt.is_anonymous() {
- String::new()
- } else {
- format!("{} ", lt.ident.as_str())
- };
+ })
+ {
+ if is_any_trait(cx, inner) {
+ // Ignore `Box<Any>` types; see issue #1884 for details.
+ return false;
+ }
- if mut_ty.mutbl == Mutability::Mut {
- // Ignore `&mut Box<T>` types; see issue #2907 for
- // details.
- return false;
- }
+ let ltopt = if lt.is_anonymous() {
+ String::new()
+ } else {
+ format!("{} ", lt.ident.as_str())
+ };
- // When trait objects or opaque types have lifetime or auto-trait bounds,
- // we need to add parentheses to avoid a syntax error due to its ambiguity.
- // Originally reported as the issue #3128.
- let inner_snippet = snippet(cx, inner.span, "..");
- let suggestion = match &inner.kind {
- TyKind::TraitObject(bounds, lt_bound, _) if bounds.len() > 1 || !lt_bound.is_elided() => {
- format!("&{ltopt}({})", &inner_snippet)
- },
- TyKind::Path(qpath)
- if get_bounds_if_impl_trait(cx, qpath, inner.hir_id)
- .map_or(false, |bounds| bounds.len() > 1) =>
- {
- format!("&{ltopt}({})", &inner_snippet)
- },
- _ => format!("&{ltopt}{}", &inner_snippet),
- };
- span_lint_and_sugg(
- cx,
- BORROWED_BOX,
- hir_ty.span,
- "you seem to be trying to use `&Box<T>`. Consider using just `&T`",
- "try",
- suggestion,
- // To make this `MachineApplicable`, at least one needs to check if it isn't a trait item
- // because the trait impls of it will break otherwise;
- // and there may be other cases that result in invalid code.
- // For example, type coercion doesn't work nicely.
- Applicability::Unspecified,
- );
- return true;
+ if mut_ty.mutbl == Mutability::Mut {
+ // Ignore `&mut Box<T>` types; see issue #2907 for
+ // details.
+ return false;
}
+
+ // When trait objects or opaque types have lifetime or auto-trait bounds,
+ // we need to add parentheses to avoid a syntax error due to its ambiguity.
+ // Originally reported as the issue #3128.
+ let inner_snippet = snippet(cx, inner.span, "..");
+ let suggestion = match &inner.kind {
+ TyKind::TraitObject(bounds, lt_bound, _) if bounds.len() > 1 || !lt_bound.is_elided() => {
+ format!("&{ltopt}({})", &inner_snippet)
+ },
+ TyKind::Path(qpath)
+ if get_bounds_if_impl_trait(cx, qpath, inner.hir_id)
+ .map_or(false, |bounds| bounds.len() > 1) =>
+ {
+ format!("&{ltopt}({})", &inner_snippet)
+ },
+ _ => format!("&{ltopt}{}", &inner_snippet),
+ };
+ span_lint_and_sugg(
+ cx,
+ BORROWED_BOX,
+ hir_ty.span,
+ "you seem to be trying to use `&Box<T>`. Consider using just `&T`",
+ "try",
+ suggestion,
+ // To make this `MachineApplicable`, at least one needs to check if it isn't a trait item
+ // because the trait impls of it will break otherwise;
+ // and there may be other cases that result in invalid code.
+ // For example, type coercion doesn't work nicely.
+ Applicability::Unspecified,
+ );
+ return true;
};
false
},
@@ -84,33 +81,29 @@ pub(super) fn check(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, lt: &Lifetime, m
// Returns true if given type is `Any` trait.
fn is_any_trait(cx: &LateContext<'_>, t: &hir::Ty<'_>) -> bool {
- if_chain! {
- if let TyKind::TraitObject(traits, ..) = t.kind;
- if !traits.is_empty();
- if let Some(trait_did) = traits[0].trait_ref.trait_def_id();
+ if let TyKind::TraitObject(traits, ..) = t.kind
+ && !traits.is_empty()
+ && let Some(trait_did) = traits[0].trait_ref.trait_def_id()
// Only Send/Sync can be used as additional traits, so it is enough to
// check only the first trait.
- if cx.tcx.is_diagnostic_item(sym::Any, trait_did);
- then {
- return true;
- }
+ && cx.tcx.is_diagnostic_item(sym::Any, trait_did)
+ {
+ return true;
}
false
}
fn get_bounds_if_impl_trait<'tcx>(cx: &LateContext<'tcx>, qpath: &QPath<'_>, id: HirId) -> Option<GenericBounds<'tcx>> {
- if_chain! {
- if let Some(did) = cx.qpath_res(qpath, id).opt_def_id();
- if let Some(Node::GenericParam(generic_param)) = cx.tcx.hir().get_if_local(did);
- if let GenericParamKind::Type { synthetic, .. } = generic_param.kind;
- if synthetic;
- if let Some(generics) = cx.tcx.hir().get_generics(id.owner.def_id);
- if let Some(pred) = generics.bounds_for_param(did.expect_local()).next();
- then {
- Some(pred.bounds)
- } else {
- None
- }
+ if let Some(did) = cx.qpath_res(qpath, id).opt_def_id()
+ && let Some(Node::GenericParam(generic_param)) = cx.tcx.hir().get_if_local(did)
+ && let GenericParamKind::Type { synthetic, .. } = generic_param.kind
+ && synthetic
+ && let Some(generics) = cx.tcx.hir().get_generics(id.owner.def_id)
+ && let Some(pred) = generics.bounds_for_param(did.expect_local()).next()
+ {
+ Some(pred.bounds)
+ } else {
+ None
}
}
diff --git a/src/tools/clippy/clippy_lints/src/types/box_collection.rs b/src/tools/clippy/clippy_lints/src/types/box_collection.rs
index 4a5a94f26..fc3420af0 100644
--- a/src/tools/clippy/clippy_lints/src/types/box_collection.rs
+++ b/src/tools/clippy/clippy_lints/src/types/box_collection.rs
@@ -8,30 +8,26 @@ use rustc_span::{sym, Symbol};
use super::BOX_COLLECTION;
pub(super) fn check(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, qpath: &QPath<'_>, def_id: DefId) -> bool {
- if_chain! {
- if Some(def_id) == cx.tcx.lang_items().owned_box();
- if let Some(item_type) = get_std_collection(cx, qpath);
- then {
- let generic = match item_type {
- sym::String => "",
- _ => "<..>",
- };
+ if Some(def_id) == cx.tcx.lang_items().owned_box()
+ && let Some(item_type) = get_std_collection(cx, qpath)
+ {
+ let generic = match item_type {
+ sym::String => "",
+ _ => "<..>",
+ };
- let box_content = format!("{item_type}{generic}");
- span_lint_and_help(
- cx,
- BOX_COLLECTION,
- hir_ty.span,
- &format!(
- "you seem to be trying to use `Box<{box_content}>`. Consider using just `{box_content}`"),
- None,
- &format!(
- "`{box_content}` is already on the heap, `Box<{box_content}>` makes an extra allocation")
- );
- true
- } else {
- false
- }
+ let box_content = format!("{item_type}{generic}");
+ span_lint_and_help(
+ cx,
+ BOX_COLLECTION,
+ hir_ty.span,
+ &format!("you seem to be trying to use `Box<{box_content}>`. Consider using just `{box_content}`"),
+ None,
+ &format!("`{box_content}` is already on the heap, `Box<{box_content}>` makes an extra allocation"),
+ );
+ true
+ } else {
+ false
}
}
diff --git a/src/tools/clippy/clippy_lints/src/types/mod.rs b/src/tools/clippy/clippy_lints/src/types/mod.rs
index 6a6160c49..81efec653 100644
--- a/src/tools/clippy/clippy_lints/src/types/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/types/mod.rs
@@ -16,7 +16,7 @@ use rustc_hir::{
TraitItemKind, TyKind,
};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::def_id::LocalDefId;
use rustc_span::Span;
@@ -321,10 +321,10 @@ impl<'tcx> LateLintPass<'tcx> for Types {
_: Span,
def_id: LocalDefId,
) {
- let is_in_trait_impl = if let Some(hir::Node::Item(item)) = cx.tcx.hir().find_by_def_id(
+ let is_in_trait_impl = if let Some(hir::Node::Item(item)) = cx.tcx.opt_hir_node_by_def_id(
cx.tcx
.hir()
- .get_parent_item(cx.tcx.hir().local_def_id_to_hir_id(def_id))
+ .get_parent_item(cx.tcx.local_def_id_to_hir_id(def_id))
.def_id,
) {
matches!(item.kind, ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }))
@@ -368,8 +368,7 @@ impl<'tcx> LateLintPass<'tcx> for Types {
ImplItemKind::Const(ty, _) => {
let is_in_trait_impl = if let Some(hir::Node::Item(item)) = cx
.tcx
- .hir()
- .find_by_def_id(cx.tcx.hir().get_parent_item(item.hir_id()).def_id)
+ .opt_hir_node_by_def_id(cx.tcx.hir().get_parent_item(item.hir_id()).def_id)
{
matches!(item.kind, ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }))
} else {
@@ -490,7 +489,7 @@ impl Types {
// All lints that are being checked in this block are guarded by
// the `avoid_breaking_exported_api` configuration. When adding a
// new lint, please also add the name to the configuration documentation
- // in `clippy_lints::utils::conf.rs`
+ // in `clippy_config::conf`
let mut triggered = false;
triggered |= box_collection::check(cx, hir_ty, qpath, def_id);
diff --git a/src/tools/clippy/clippy_lints/src/types/option_option.rs b/src/tools/clippy/clippy_lints/src/types/option_option.rs
index 60622903a..d12d14f2b 100644
--- a/src/tools/clippy/clippy_lints/src/types/option_option.rs
+++ b/src/tools/clippy/clippy_lints/src/types/option_option.rs
@@ -1,6 +1,5 @@
use clippy_utils::diagnostics::span_lint;
use clippy_utils::{path_def_id, qpath_generic_tys};
-use if_chain::if_chain;
use rustc_hir::def_id::DefId;
use rustc_hir::{self as hir, QPath};
use rustc_lint::LateContext;
@@ -9,21 +8,19 @@ use rustc_span::symbol::sym;
use super::OPTION_OPTION;
pub(super) fn check(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, qpath: &QPath<'_>, def_id: DefId) -> bool {
- if_chain! {
- if cx.tcx.is_diagnostic_item(sym::Option, def_id);
- if let Some(arg) = qpath_generic_tys(qpath).next();
- if path_def_id(cx, arg) == Some(def_id);
- then {
- span_lint(
- cx,
- OPTION_OPTION,
- hir_ty.span,
- "consider using `Option<T>` instead of `Option<Option<T>>` or a custom \
- enum if you need to distinguish all 3 cases",
- );
- true
- } else {
- false
- }
+ if cx.tcx.is_diagnostic_item(sym::Option, def_id)
+ && let Some(arg) = qpath_generic_tys(qpath).next()
+ && path_def_id(cx, arg) == Some(def_id)
+ {
+ span_lint(
+ cx,
+ OPTION_OPTION,
+ hir_ty.span,
+ "consider using `Option<T>` instead of `Option<Option<T>>` or a custom \
+ enum if you need to distinguish all 3 cases",
+ );
+ true
+ } else {
+ false
}
}
diff --git a/src/tools/clippy/clippy_lints/src/types/rc_mutex.rs b/src/tools/clippy/clippy_lints/src/types/rc_mutex.rs
index a616c3e4e..afc319217 100644
--- a/src/tools/clippy/clippy_lints/src/types/rc_mutex.rs
+++ b/src/tools/clippy/clippy_lints/src/types/rc_mutex.rs
@@ -1,6 +1,5 @@
use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::{path_def_id, qpath_generic_tys};
-use if_chain::if_chain;
use rustc_hir::def_id::DefId;
use rustc_hir::{self as hir, QPath};
use rustc_lint::LateContext;
@@ -9,22 +8,20 @@ use rustc_span::symbol::sym;
use super::RC_MUTEX;
pub(super) fn check(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, qpath: &QPath<'_>, def_id: DefId) -> bool {
- if_chain! {
- if cx.tcx.is_diagnostic_item(sym::Rc, def_id) ;
- if let Some(arg) = qpath_generic_tys(qpath).next();
- if let Some(id) = path_def_id(cx, arg);
- if cx.tcx.is_diagnostic_item(sym::Mutex, id);
- then {
- span_lint_and_help(
- cx,
- RC_MUTEX,
- hir_ty.span,
- "usage of `Rc<Mutex<_>>`",
- None,
- "consider using `Rc<RefCell<_>>` or `Arc<Mutex<_>>` instead",
- );
- return true;
- }
+ if cx.tcx.is_diagnostic_item(sym::Rc, def_id)
+ && let Some(arg) = qpath_generic_tys(qpath).next()
+ && let Some(id) = path_def_id(cx, arg)
+ && cx.tcx.is_diagnostic_item(sym::Mutex, id)
+ {
+ span_lint_and_help(
+ cx,
+ RC_MUTEX,
+ hir_ty.span,
+ "usage of `Rc<Mutex<_>>`",
+ None,
+ "consider using `Rc<RefCell<_>>` or `Arc<Mutex<_>>` instead",
+ );
+ return true;
}
false
diff --git a/src/tools/clippy/clippy_lints/src/types/utils.rs b/src/tools/clippy/clippy_lints/src/types/utils.rs
index 39469841b..0bca56b8d 100644
--- a/src/tools/clippy/clippy_lints/src/types/utils.rs
+++ b/src/tools/clippy/clippy_lints/src/types/utils.rs
@@ -1,22 +1,19 @@
use clippy_utils::last_path_segment;
-use if_chain::if_chain;
use rustc_hir::{GenericArg, GenericArgsParentheses, QPath, TyKind};
use rustc_lint::LateContext;
use rustc_span::Span;
pub(super) fn match_borrows_parameter(_cx: &LateContext<'_>, qpath: &QPath<'_>) -> Option<Span> {
let last = last_path_segment(qpath);
- if_chain! {
- if let Some(params) = last.args;
- if params.parenthesized == GenericArgsParentheses::No;
- if let Some(ty) = params.args.iter().find_map(|arg| match arg {
+ if let Some(params) = last.args
+ && params.parenthesized == GenericArgsParentheses::No
+ && let Some(ty) = params.args.iter().find_map(|arg| match arg {
GenericArg::Type(ty) => Some(ty),
_ => None,
- });
- if let TyKind::Ref(..) = ty.kind;
- then {
- return Some(ty.span);
- }
+ })
+ && let TyKind::Ref(..) = ty.kind
+ {
+ return Some(ty.span);
}
None
}
diff --git a/src/tools/clippy/clippy_lints/src/types/vec_box.rs b/src/tools/clippy/clippy_lints/src/types/vec_box.rs
index decc183ad..9d5066199 100644
--- a/src/tools/clippy/clippy_lints/src/types/vec_box.rs
+++ b/src/tools/clippy/clippy_lints/src/types/vec_box.rs
@@ -1,7 +1,7 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
-use clippy_utils::last_path_segment;
+use clippy_utils::paths::ALLOCATOR_GLOBAL;
use clippy_utils::source::snippet;
-use if_chain::if_chain;
+use clippy_utils::{last_path_segment, match_def_path};
use rustc_errors::Applicability;
use rustc_hir::def_id::DefId;
use rustc_hir::{self as hir, GenericArg, QPath, TyKind};
@@ -21,43 +21,57 @@ pub(super) fn check(
box_size_threshold: u64,
) -> bool {
if cx.tcx.is_diagnostic_item(sym::Vec, def_id) {
- if_chain! {
+ if let Some(last) = last_path_segment(qpath).args
// Get the _ part of Vec<_>
- if let Some(last) = last_path_segment(qpath).args;
- if let Some(ty) = last.args.iter().find_map(|arg| match arg {
- GenericArg::Type(ty) => Some(ty),
- _ => None,
- });
+ && let Some(GenericArg::Type(ty)) = last.args.first()
+ // extract allocator from the Vec for later
+ && let vec_alloc_ty = last.args.get(1)
// ty is now _ at this point
- if let TyKind::Path(ref ty_qpath) = ty.kind;
- let res = cx.qpath_res(ty_qpath, ty.hir_id);
- if let Some(def_id) = res.opt_def_id();
- if Some(def_id) == cx.tcx.lang_items().owned_box();
+ && let TyKind::Path(ref ty_qpath) = ty.kind
+ && let res = cx.qpath_res(ty_qpath, ty.hir_id)
+ && let Some(def_id) = res.opt_def_id()
+ && Some(def_id) == cx.tcx.lang_items().owned_box()
// At this point, we know ty is Box<T>, now get T
- if let Some(last) = last_path_segment(ty_qpath).args;
- if let Some(boxed_ty) = last.args.iter().find_map(|arg| match arg {
- GenericArg::Type(ty) => Some(ty),
- _ => None,
- });
- let ty_ty = hir_ty_to_ty(cx.tcx, boxed_ty);
- if !ty_ty.has_escaping_bound_vars();
- if ty_ty.is_sized(cx.tcx, cx.param_env);
- if let Ok(ty_ty_size) = cx.layout_of(ty_ty).map(|l| l.size.bytes());
- if ty_ty_size < box_size_threshold;
- then {
- span_lint_and_sugg(
- cx,
- VEC_BOX,
- hir_ty.span,
- "`Vec<T>` is already on the heap, the boxing is unnecessary",
- "try",
- format!("Vec<{}>", snippet(cx, boxed_ty.span, "..")),
- Applicability::MachineApplicable,
- );
- true
- } else {
- false
+ && let Some(last) = last_path_segment(ty_qpath).args
+ && let Some(GenericArg::Type(boxed_ty)) = last.args.first()
+ // extract allocator from the Box for later
+ && let boxed_alloc_ty = last.args.get(1)
+ && let ty_ty = hir_ty_to_ty(cx.tcx, boxed_ty)
+ && !ty_ty.has_escaping_bound_vars()
+ && ty_ty.is_sized(cx.tcx, cx.param_env)
+ && let Ok(ty_ty_size) = cx.layout_of(ty_ty).map(|l| l.size.bytes())
+ && ty_ty_size < box_size_threshold
+ // https://github.com/rust-lang/rust-clippy/issues/7114
+ && match (vec_alloc_ty, boxed_alloc_ty) {
+ (None, None) => true,
+ // this is in the event that we have something like
+ // Vec<_, Global>, in which case is equivalent to
+ // Vec<_>
+ (None, Some(GenericArg::Type(inner))) | (Some(GenericArg::Type(inner)), None) => {
+ if let TyKind::Path(path) = inner.kind
+ && let Some(did) = cx.qpath_res(&path, inner.hir_id).opt_def_id() {
+ match_def_path(cx, did, &ALLOCATOR_GLOBAL)
+ } else {
+ false
+ }
+ },
+ (Some(GenericArg::Type(l)), Some(GenericArg::Type(r))) =>
+ hir_ty_to_ty(cx.tcx, l) == hir_ty_to_ty(cx.tcx, r),
+ _ => false
}
+ {
+ span_lint_and_sugg(
+ cx,
+ VEC_BOX,
+ hir_ty.span,
+ "`Vec<T>` is already on the heap, the boxing is unnecessary",
+ "try",
+ format!("Vec<{}>", snippet(cx, boxed_ty.span, "..")),
+ Applicability::Unspecified,
+ );
+ true
+ } else {
+ false
}
} else {
false
diff --git a/src/tools/clippy/clippy_lints/src/undocumented_unsafe_blocks.rs b/src/tools/clippy/clippy_lints/src/undocumented_unsafe_blocks.rs
index 32aebdd8c..7a6549a7c 100644
--- a/src/tools/clippy/clippy_lints/src/undocumented_unsafe_blocks.rs
+++ b/src/tools/clippy/clippy_lints/src/undocumented_unsafe_blocks.rs
@@ -11,7 +11,7 @@ use rustc_hir::{Block, BlockCheckMode, ItemKind, Node, UnsafeSource};
use rustc_lexer::{tokenize, TokenKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::{BytePos, Pos, RelativeBytePos, Span, SyntaxContext};
declare_clippy_lint! {
@@ -349,7 +349,7 @@ fn block_parents_have_safety_comment(
span,
owner_id,
..
- })) => (*span, cx.tcx.hir().local_def_id_to_hir_id(owner_id.def_id)),
+ })) => (*span, cx.tcx.local_def_id_to_hir_id(owner_id.def_id)),
_ => {
if is_branchy(expr) {
return false;
@@ -370,7 +370,7 @@ fn block_parents_have_safety_comment(
span,
owner_id,
..
- }) => (*span, cx.tcx.hir().local_def_id_to_hir_id(owner_id.def_id)),
+ }) => (*span, cx.tcx.local_def_id_to_hir_id(owner_id.def_id)),
_ => return false,
};
// if unsafe block is part of a let/const/static statement,
diff --git a/src/tools/clippy/clippy_lints/src/unicode.rs b/src/tools/clippy/clippy_lints/src/unicode.rs
index b824deac2..3d319b9fe 100644
--- a/src/tools/clippy/clippy_lints/src/unicode.rs
+++ b/src/tools/clippy/clippy_lints/src/unicode.rs
@@ -6,7 +6,7 @@ use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, HirId};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::Span;
use unicode_normalization::UnicodeNormalization;
diff --git a/src/tools/clippy/clippy_lints/src/uninhabited_references.rs b/src/tools/clippy/clippy_lints/src/uninhabited_references.rs
new file mode 100644
index 000000000..903593ecf
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/uninhabited_references.rs
@@ -0,0 +1,84 @@
+use clippy_utils::diagnostics::span_lint;
+use rustc_hir::intravisit::FnKind;
+use rustc_hir::{Body, Expr, ExprKind, FnDecl, FnRetTy, TyKind, UnOp};
+use rustc_hir_analysis::hir_ty_to_ty;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::declare_lint_pass;
+use rustc_span::def_id::LocalDefId;
+use rustc_span::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// It detects references to uninhabited types, such as `!` and
+ /// warns when those are either dereferenced or returned from a function.
+ ///
+ /// ### Why is this bad?
+ /// Dereferencing a reference to an uninhabited type would create
+ /// an instance of such a type, which cannot exist. This constitutes
+ /// undefined behaviour. Such a reference could have been created
+ /// by `unsafe` code.
+ ///
+ /// ### Example
+ /// The following function can return a reference to an uninhabited type
+ /// (`Infallible`) because it uses `unsafe` code to create it. However,
+ /// the user of such a function could dereference the return value and
+ /// trigger an undefined behavior from safe code.
+ ///
+ /// ```no_run
+ /// fn create_ref() -> &'static std::convert::Infallible {
+ /// unsafe { std::mem::transmute(&()) }
+ /// }
+ /// ```
+ #[clippy::version = "1.76.0"]
+ pub UNINHABITED_REFERENCES,
+ nursery,
+ "reference to uninhabited type"
+}
+
+declare_lint_pass!(UninhabitedReferences => [UNINHABITED_REFERENCES]);
+
+impl LateLintPass<'_> for UninhabitedReferences {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &'_ Expr<'_>) {
+ if in_external_macro(cx.tcx.sess, expr.span) {
+ return;
+ }
+
+ if let ExprKind::Unary(UnOp::Deref, _) = expr.kind {
+ let ty = cx.typeck_results().expr_ty_adjusted(expr);
+ if ty.is_privately_uninhabited(cx.tcx, cx.param_env) {
+ span_lint(
+ cx,
+ UNINHABITED_REFERENCES,
+ expr.span,
+ "dereferencing a reference to an uninhabited type is undefined behavior",
+ );
+ }
+ }
+ }
+
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'_>,
+ kind: FnKind<'_>,
+ fndecl: &'_ FnDecl<'_>,
+ _: &'_ Body<'_>,
+ span: Span,
+ _: LocalDefId,
+ ) {
+ if in_external_macro(cx.tcx.sess, span) || matches!(kind, FnKind::Closure) {
+ return;
+ }
+ if let FnRetTy::Return(hir_ty) = fndecl.output
+ && let TyKind::Ref(_, mut_ty) = hir_ty.kind
+ && hir_ty_to_ty(cx.tcx, mut_ty.ty).is_privately_uninhabited(cx.tcx, cx.param_env)
+ {
+ span_lint(
+ cx,
+ UNINHABITED_REFERENCES,
+ hir_ty.span,
+ "dereferencing a reference to an uninhabited type would be undefined behavior",
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/uninit_vec.rs b/src/tools/clippy/clippy_lints/src/uninit_vec.rs
index 72569e10f..fc8519d56 100644
--- a/src/tools/clippy/clippy_lints/src/uninit_vec.rs
+++ b/src/tools/clippy/clippy_lints/src/uninit_vec.rs
@@ -6,7 +6,7 @@ use rustc_hir::{Block, Expr, ExprKind, HirId, PatKind, PathSegment, Stmt, StmtKi
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{sym, Span};
// TODO: add `ReadBuf` (RFC 2930) in "How to fix" once it is available in std
@@ -83,41 +83,39 @@ fn handle_uninit_vec_pair<'tcx>(
maybe_init_or_reserve: &'tcx Stmt<'tcx>,
maybe_set_len: &'tcx Expr<'tcx>,
) {
- if_chain! {
- if let Some(vec) = extract_init_or_reserve_target(cx, maybe_init_or_reserve);
- if let Some((set_len_self, call_span)) = extract_set_len_self(cx, maybe_set_len);
- if vec.location.eq_expr(cx, set_len_self);
- if let ty::Ref(_, vec_ty, _) = cx.typeck_results().expr_ty_adjusted(set_len_self).kind();
- if let ty::Adt(_, args) = vec_ty.kind();
+ if let Some(vec) = extract_init_or_reserve_target(cx, maybe_init_or_reserve)
+ && let Some((set_len_self, call_span)) = extract_set_len_self(cx, maybe_set_len)
+ && vec.location.eq_expr(cx, set_len_self)
+ && let ty::Ref(_, vec_ty, _) = cx.typeck_results().expr_ty_adjusted(set_len_self).kind()
+ && let ty::Adt(_, args) = vec_ty.kind()
// `#[allow(...)]` attribute can be set on enclosing unsafe block of `set_len()`
- if !is_lint_allowed(cx, UNINIT_VEC, maybe_set_len.hir_id);
- then {
- if vec.has_capacity() {
- // with_capacity / reserve -> set_len
+ && !is_lint_allowed(cx, UNINIT_VEC, maybe_set_len.hir_id)
+ {
+ if vec.has_capacity() {
+ // with_capacity / reserve -> set_len
- // Check T of Vec<T>
- if !is_uninit_value_valid_for_ty(cx, args.type_at(0)) {
- // FIXME: #7698, false positive of the internal lints
- #[expect(clippy::collapsible_span_lint_calls)]
- span_lint_and_then(
- cx,
- UNINIT_VEC,
- vec![call_span, maybe_init_or_reserve.span],
- "calling `set_len()` immediately after reserving a buffer creates uninitialized values",
- |diag| {
- diag.help("initialize the buffer or wrap the content in `MaybeUninit`");
- },
- );
- }
- } else {
- // new / default -> set_len
- span_lint(
+ // Check T of Vec<T>
+ if !is_uninit_value_valid_for_ty(cx, args.type_at(0)) {
+ // FIXME: #7698, false positive of the internal lints
+ #[expect(clippy::collapsible_span_lint_calls)]
+ span_lint_and_then(
cx,
UNINIT_VEC,
vec![call_span, maybe_init_or_reserve.span],
- "calling `set_len()` on empty `Vec` creates out-of-bound values",
+ "calling `set_len()` immediately after reserving a buffer creates uninitialized values",
+ |diag| {
+ diag.help("initialize the buffer or wrap the content in `MaybeUninit`");
+ },
);
}
+ } else {
+ // new / default -> set_len
+ span_lint(
+ cx,
+ UNINIT_VEC,
+ vec![call_span, maybe_init_or_reserve.span],
+ "calling `set_len()` on empty `Vec` creates out-of-bound values",
+ );
}
}
}
@@ -156,16 +154,14 @@ impl<'tcx> VecLocation<'tcx> {
fn extract_init_or_reserve_target<'tcx>(cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'tcx>) -> Option<TargetVec<'tcx>> {
match stmt.kind {
StmtKind::Local(local) => {
- if_chain! {
- if let Some(init_expr) = local.init;
- if let PatKind::Binding(_, hir_id, _, None) = local.pat.kind;
- if let Some(init_kind) = get_vec_init_kind(cx, init_expr);
- then {
- return Some(TargetVec {
- location: VecLocation::Local(hir_id),
- init_kind: Some(init_kind),
- })
- }
+ if let Some(init_expr) = local.init
+ && let PatKind::Binding(_, hir_id, _, None) = local.pat.kind
+ && let Some(init_kind) = get_vec_init_kind(cx, init_expr)
+ {
+ return Some(TargetVec {
+ location: VecLocation::Local(hir_id),
+ init_kind: Some(init_kind),
+ });
}
},
StmtKind::Expr(expr) | StmtKind::Semi(expr) => match expr.kind {
diff --git a/src/tools/clippy/clippy_lints/src/unit_return_expecting_ord.rs b/src/tools/clippy/clippy_lints/src/unit_return_expecting_ord.rs
index e76cc65fd..729972de6 100644
--- a/src/tools/clippy/clippy_lints/src/unit_return_expecting_ord.rs
+++ b/src/tools/clippy/clippy_lints/src/unit_return_expecting_ord.rs
@@ -1,11 +1,10 @@
use clippy_utils::diagnostics::{span_lint, span_lint_and_help};
-use if_chain::if_chain;
use rustc_hir::def_id::DefId;
use rustc_hir::{Closure, Expr, ExprKind, StmtKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
use rustc_middle::ty::{ClauseKind, GenericPredicates, ProjectionPredicate, TraitPredicate};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{sym, BytePos, Span};
declare_clippy_lint! {
@@ -44,14 +43,14 @@ fn get_trait_predicates_for_trait_id<'tcx>(
) -> Vec<TraitPredicate<'tcx>> {
let mut preds = Vec::new();
for (pred, _) in generics.predicates {
- if_chain! {
- if let ClauseKind::Trait(poly_trait_pred) = pred.kind().skip_binder();
- let trait_pred = cx.tcx.erase_late_bound_regions(pred.kind().rebind(poly_trait_pred));
- if let Some(trait_def_id) = trait_id;
- if trait_def_id == trait_pred.trait_ref.def_id;
- then {
- preds.push(trait_pred);
- }
+ if let ClauseKind::Trait(poly_trait_pred) = pred.kind().skip_binder()
+ && let trait_pred = cx
+ .tcx
+ .instantiate_bound_regions_with_erased(pred.kind().rebind(poly_trait_pred))
+ && let Some(trait_def_id) = trait_id
+ && trait_def_id == trait_pred.trait_ref.def_id
+ {
+ preds.push(trait_pred);
}
}
preds
@@ -64,7 +63,9 @@ fn get_projection_pred<'tcx>(
) -> Option<ProjectionPredicate<'tcx>> {
generics.predicates.iter().find_map(|(proj_pred, _)| {
if let ClauseKind::Projection(pred) = proj_pred.kind().skip_binder() {
- let projection_pred = cx.tcx.erase_late_bound_regions(proj_pred.kind().rebind(pred));
+ let projection_pred = cx
+ .tcx
+ .instantiate_bound_regions_with_erased(proj_pred.kind().rebind(pred));
if projection_pred.projection_ty.args == trait_pred.trait_ref.args {
return Some(projection_pred);
}
@@ -82,10 +83,10 @@ fn get_args_to_check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> Ve
let ord_preds = get_trait_predicates_for_trait_id(cx, generics, cx.tcx.get_diagnostic_item(sym::Ord));
let partial_ord_preds =
get_trait_predicates_for_trait_id(cx, generics, cx.tcx.lang_items().partial_ord_trait());
- // Trying to call erase_late_bound_regions on fn_sig.inputs() gives the following error
+ // Trying to call instantiate_bound_regions_with_erased on fn_sig.inputs() gives the following error
// The trait `rustc::ty::TypeFoldable<'_>` is not implemented for
// `&[rustc_middle::ty::Ty<'_>]`
- let inputs_output = cx.tcx.erase_late_bound_regions(fn_sig.inputs_and_output());
+ let inputs_output = cx.tcx.instantiate_bound_regions_with_erased(fn_sig.inputs_and_output());
inputs_output
.iter()
.rev()
@@ -94,21 +95,19 @@ fn get_args_to_check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> Ve
.enumerate()
.for_each(|(i, inp)| {
for trait_pred in &fn_mut_preds {
- if_chain! {
- if trait_pred.self_ty() == inp;
- if let Some(return_ty_pred) = get_projection_pred(cx, generics, *trait_pred);
- then {
- if ord_preds
- .iter()
- .any(|ord| Some(ord.self_ty()) == return_ty_pred.term.ty())
- {
- args_to_check.push((i, "Ord".to_string()));
- } else if partial_ord_preds
- .iter()
- .any(|pord| pord.self_ty() == return_ty_pred.term.ty().unwrap())
- {
- args_to_check.push((i, "PartialOrd".to_string()));
- }
+ if trait_pred.self_ty() == inp
+ && let Some(return_ty_pred) = get_projection_pred(cx, generics, *trait_pred)
+ {
+ if ord_preds
+ .iter()
+ .any(|ord| Some(ord.self_ty()) == return_ty_pred.term.ty())
+ {
+ args_to_check.push((i, "Ord".to_string()));
+ } else if partial_ord_preds
+ .iter()
+ .any(|pord| pord.self_ty() == return_ty_pred.term.ty().unwrap())
+ {
+ args_to_check.push((i, "PartialOrd".to_string()));
}
}
}
@@ -118,30 +117,26 @@ fn get_args_to_check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> Ve
}
fn check_arg<'tcx>(cx: &LateContext<'tcx>, arg: &'tcx Expr<'tcx>) -> Option<(Span, Option<Span>)> {
- if_chain! {
- if let ExprKind::Closure(&Closure { body, fn_decl_span, .. }) = arg.kind;
- if let ty::Closure(_def_id, args) = &cx.typeck_results().node_type(arg.hir_id).kind();
- let ret_ty = args.as_closure().sig().output();
- let ty = cx.tcx.erase_late_bound_regions(ret_ty);
- if ty.is_unit();
- then {
- let body = cx.tcx.hir().body(body);
- if_chain! {
- if let ExprKind::Block(block, _) = body.value.kind;
- if block.expr.is_none();
- if let Some(stmt) = block.stmts.last();
- if let StmtKind::Semi(_) = stmt.kind;
- then {
- let data = stmt.span.data();
- // Make a span out of the semicolon for the help message
- Some((fn_decl_span, Some(data.with_lo(data.hi-BytePos(1)))))
- } else {
- Some((fn_decl_span, None))
- }
- }
+ if let ExprKind::Closure(&Closure { body, fn_decl_span, .. }) = arg.kind
+ && let ty::Closure(_def_id, args) = &cx.typeck_results().node_type(arg.hir_id).kind()
+ && let ret_ty = args.as_closure().sig().output()
+ && let ty = cx.tcx.instantiate_bound_regions_with_erased(ret_ty)
+ && ty.is_unit()
+ {
+ let body = cx.tcx.hir().body(body);
+ if let ExprKind::Block(block, _) = body.value.kind
+ && block.expr.is_none()
+ && let Some(stmt) = block.stmts.last()
+ && let StmtKind::Semi(_) = stmt.kind
+ {
+ let data = stmt.span.data();
+ // Make a span out of the semicolon for the help message
+ Some((fn_decl_span, Some(data.with_lo(data.hi - BytePos(1)))))
} else {
- None
+ Some((fn_decl_span, None))
}
+ } else {
+ None
}
}
diff --git a/src/tools/clippy/clippy_lints/src/unit_types/mod.rs b/src/tools/clippy/clippy_lints/src/unit_types/mod.rs
index 884c6ca4d..0abd48e64 100644
--- a/src/tools/clippy/clippy_lints/src/unit_types/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/unit_types/mod.rs
@@ -5,7 +5,7 @@ mod utils;
use rustc_hir::{Expr, Local};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/unit_types/unit_arg.rs b/src/tools/clippy/clippy_lints/src/unit_types/unit_arg.rs
index 462b1aa81..44cff78a7 100644
--- a/src/tools/clippy/clippy_lints/src/unit_types/unit_arg.rs
+++ b/src/tools/clippy/clippy_lints/src/unit_types/unit_arg.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::is_from_proc_macro;
use clippy_utils::source::{indent_of, reindent_multiline, snippet_opt};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{self as hir, Block, Expr, ExprKind, MatchSource, Node, StmtKind};
use rustc_lint::LateContext;
@@ -22,12 +21,10 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
}
let map = &cx.tcx.hir();
let opt_parent_node = map.find_parent(expr.hir_id);
- if_chain! {
- if let Some(hir::Node::Expr(parent_expr)) = opt_parent_node;
- if is_questionmark_desugar_marked_call(parent_expr);
- then {
- return;
- }
+ if let Some(hir::Node::Expr(parent_expr)) = opt_parent_node
+ && is_questionmark_desugar_marked_call(parent_expr)
+ {
+ return;
}
let args: Vec<_> = match expr.kind {
@@ -80,21 +77,15 @@ fn lint_unit_args(cx: &LateContext<'_>, expr: &Expr<'_>, args_to_recover: &[&Exp
args_to_recover
.iter()
.filter_map(|arg| {
- if_chain! {
- if let ExprKind::Block(block, _) = arg.kind;
- if block.expr.is_none();
- if let Some(last_stmt) = block.stmts.iter().last();
- if let StmtKind::Semi(last_expr) = last_stmt.kind;
- if let Some(snip) = snippet_opt(cx, last_expr.span);
- then {
- Some((
- last_stmt.span,
- snip,
- ))
- }
- else {
- None
- }
+ if let ExprKind::Block(block, _) = arg.kind
+ && block.expr.is_none()
+ && let Some(last_stmt) = block.stmts.iter().last()
+ && let StmtKind::Semi(last_expr) = last_stmt.kind
+ && let Some(snip) = snippet_opt(cx, last_expr.span)
+ {
+ Some((last_stmt.span, snip))
+ } else {
+ None
}
})
.for_each(|(span, sugg)| {
diff --git a/src/tools/clippy/clippy_lints/src/unnamed_address.rs b/src/tools/clippy/clippy_lints/src/unnamed_address.rs
index e7355f923..cd2dacc9f 100644
--- a/src/tools/clippy/clippy_lints/src/unnamed_address.rs
+++ b/src/tools/clippy/clippy_lints/src/unnamed_address.rs
@@ -1,10 +1,8 @@
-use clippy_utils::diagnostics::{span_lint, span_lint_and_help};
-use if_chain::if_chain;
+use clippy_utils::diagnostics::span_lint;
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
-use rustc_span::sym;
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -30,31 +28,7 @@ declare_clippy_lint! {
"comparison with an address of a function item"
}
-declare_clippy_lint! {
- /// ### What it does
- /// Checks for comparisons with an address of a trait vtable.
- ///
- /// ### Why is this bad?
- /// Comparing trait objects pointers compares an vtable addresses which
- /// are not guaranteed to be unique and could vary between different code generation units.
- /// Furthermore vtables for different types could have the same address after being merged
- /// together.
- ///
- /// ### Example
- /// ```rust,ignore
- /// let a: Rc<dyn Trait> = ...
- /// let b: Rc<dyn Trait> = ...
- /// if Rc::ptr_eq(&a, &b) {
- /// ...
- /// }
- /// ```
- #[clippy::version = "1.44.0"]
- pub VTABLE_ADDRESS_COMPARISONS,
- correctness,
- "comparison with an address of a trait vtable"
-}
-
-declare_lint_pass!(UnnamedAddress => [FN_ADDRESS_COMPARISONS, VTABLE_ADDRESS_COMPARISONS]);
+declare_lint_pass!(UnnamedAddress => [FN_ADDRESS_COMPARISONS]);
impl LateLintPass<'_> for UnnamedAddress {
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
@@ -65,66 +39,22 @@ impl LateLintPass<'_> for UnnamedAddress {
)
}
- fn is_trait_ptr(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
- match cx.typeck_results().expr_ty_adjusted(expr).kind() {
- ty::RawPtr(ty::TypeAndMut { ty, .. }) => ty.is_trait(),
- _ => false,
- }
- }
-
fn is_fn_def(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
matches!(cx.typeck_results().expr_ty(expr).kind(), ty::FnDef(..))
}
- if_chain! {
- if let ExprKind::Binary(binop, left, right) = expr.kind;
- if is_comparison(binop.node);
- if is_trait_ptr(cx, left) && is_trait_ptr(cx, right);
- then {
- span_lint_and_help(
- cx,
- VTABLE_ADDRESS_COMPARISONS,
- expr.span,
- "comparing trait object pointers compares a non-unique vtable address",
- None,
- "consider extracting and comparing data pointers only",
- );
- }
- }
-
- if_chain! {
- if let ExprKind::Call(func, [ref _left, ref _right]) = expr.kind;
- if let ExprKind::Path(ref func_qpath) = func.kind;
- if let Some(def_id) = cx.qpath_res(func_qpath, func.hir_id).opt_def_id();
- if cx.tcx.is_diagnostic_item(sym::ptr_eq, def_id);
- let ty_param = cx.typeck_results().node_args(func.hir_id).type_at(0);
- if ty_param.is_trait();
- then {
- span_lint_and_help(
- cx,
- VTABLE_ADDRESS_COMPARISONS,
- expr.span,
- "comparing trait object pointers compares a non-unique vtable address",
- None,
- "consider extracting and comparing data pointers only",
- );
- }
- }
-
- if_chain! {
- if let ExprKind::Binary(binop, left, right) = expr.kind;
- if is_comparison(binop.node);
- if cx.typeck_results().expr_ty_adjusted(left).is_fn_ptr();
- if cx.typeck_results().expr_ty_adjusted(right).is_fn_ptr();
- if is_fn_def(cx, left) || is_fn_def(cx, right);
- then {
- span_lint(
- cx,
- FN_ADDRESS_COMPARISONS,
- expr.span,
- "comparing with a non-unique address of a function item",
- );
- }
+ if let ExprKind::Binary(binop, left, right) = expr.kind
+ && is_comparison(binop.node)
+ && cx.typeck_results().expr_ty_adjusted(left).is_fn_ptr()
+ && cx.typeck_results().expr_ty_adjusted(right).is_fn_ptr()
+ && (is_fn_def(cx, left) || is_fn_def(cx, right))
+ {
+ span_lint(
+ cx,
+ FN_ADDRESS_COMPARISONS,
+ expr.span,
+ "comparing with a non-unique address of a function item",
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_box_returns.rs b/src/tools/clippy/clippy_lints/src/unnecessary_box_returns.rs
index ca159eb4d..f5af540fa 100644
--- a/src/tools/clippy/clippy_lints/src/unnecessary_box_returns.rs
+++ b/src/tools/clippy/clippy_lints/src/unnecessary_box_returns.rs
@@ -4,7 +4,7 @@ use rustc_errors::Applicability;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::{FnDecl, FnRetTy, ImplItemKind, Item, ItemKind, Node, TraitItem, TraitItemKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::Symbol;
declare_clippy_lint! {
@@ -71,7 +71,7 @@ impl UnnecessaryBoxReturns {
let return_ty = cx
.tcx
- .erase_late_bound_regions(cx.tcx.fn_sig(def_id).skip_binder())
+ .instantiate_bound_regions_with_erased(cx.tcx.fn_sig(def_id).skip_binder())
.output();
if !return_ty.is_box() {
diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_map_on_constructor.rs b/src/tools/clippy/clippy_lints/src/unnecessary_map_on_constructor.rs
index 9107b2b99..2b0d2d61d 100644
--- a/src/tools/clippy/clippy_lints/src/unnecessary_map_on_constructor.rs
+++ b/src/tools/clippy/clippy_lints/src/unnecessary_map_on_constructor.rs
@@ -4,26 +4,29 @@ use clippy_utils::ty::get_type_diagnostic_name;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
/// ### What it does
- /// Suggest removing the use of a may (or map_err) method when an Option or Result is being constructed.
+ /// Suggests removing the use of a `map()` (or `map_err()`) method when an `Option` or `Result`
+ /// is being constructed.
///
/// ### Why is this bad?
- /// It introduces unnecessary complexity. In this case the function can be used directly and
- /// construct the Option or Result from the output.
+ /// It introduces unnecessary complexity. Instead, the function can be called before
+ /// constructing the `Option` or `Result` from its return value.
///
/// ### Example
/// ```no_run
- /// Some(4).map(i32::swap_bytes);
+ /// Some(4).map(i32::swap_bytes)
+ /// # ;
/// ```
/// Use instead:
/// ```no_run
- /// Some(i32::swap_bytes(4));
+ /// Some(i32::swap_bytes(4))
+ /// # ;
/// ```
- #[clippy::version = "1.73.0"]
+ #[clippy::version = "1.74.0"]
pub UNNECESSARY_MAP_ON_CONSTRUCTOR,
complexity,
"using `map`/`map_err` on `Option` or `Result` constructors"
@@ -59,11 +62,11 @@ impl<'tcx> LateLintPass<'tcx> for UnnecessaryMapOnConstructor {
}
},
hir::QPath::TypeRelative(_, path) => path.ident.name,
- hir::QPath::LangItem(_, _, _) => return,
+ hir::QPath::LangItem(..) => return,
};
match constructor_symbol {
sym::Some | sym::Ok if path.ident.name == rustc_span::sym::map => (),
- sym::Err if path.ident.name == sym!(map_err) => (),
+ sym::Err if path.ident.name == sym::map_err => (),
_ => return,
}
diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_owned_empty_strings.rs b/src/tools/clippy/clippy_lints/src/unnecessary_owned_empty_strings.rs
index 28ea02e4d..6b5e6c6ab 100644
--- a/src/tools/clippy/clippy_lints/src/unnecessary_owned_empty_strings.rs
+++ b/src/tools/clippy/clippy_lints/src/unnecessary_owned_empty_strings.rs
@@ -1,13 +1,12 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::ty::is_type_lang_item;
use clippy_utils::{match_def_path, paths};
-use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::{BorrowKind, Expr, ExprKind, LangItem, Mutability};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::sym;
declare_clippy_lint! {
@@ -36,46 +35,40 @@ declare_lint_pass!(UnnecessaryOwnedEmptyStrings => [UNNECESSARY_OWNED_EMPTY_STRI
impl<'tcx> LateLintPass<'tcx> for UnnecessaryOwnedEmptyStrings {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
- if_chain! {
- if let ExprKind::AddrOf(BorrowKind::Ref, Mutability::Not, inner_expr) = expr.kind;
- if let ExprKind::Call(fun, args) = inner_expr.kind;
- if let ExprKind::Path(ref qpath) = fun.kind;
- if let Some(fun_def_id) = cx.qpath_res(qpath, fun.hir_id).opt_def_id();
- if let ty::Ref(_, inner_str, _) = cx.typeck_results().expr_ty_adjusted(expr).kind();
- if inner_str.is_str();
- then {
- if match_def_path(cx, fun_def_id, &paths::STRING_NEW) {
- span_lint_and_sugg(
- cx,
- UNNECESSARY_OWNED_EMPTY_STRINGS,
- expr.span,
- "usage of `&String::new()` for a function expecting a `&str` argument",
- "try",
- "\"\"".to_owned(),
- Applicability::MachineApplicable,
- );
- } else {
- if_chain! {
- if cx.tcx.is_diagnostic_item(sym::from_fn, fun_def_id);
- if let [.., last_arg] = args;
- if let ExprKind::Lit(spanned) = &last_arg.kind;
- if let LitKind::Str(symbol, _) = spanned.node;
- if symbol.is_empty();
- let inner_expr_type = cx.typeck_results().expr_ty(inner_expr);
- if is_type_lang_item(cx, inner_expr_type, LangItem::String);
- then {
- span_lint_and_sugg(
- cx,
- UNNECESSARY_OWNED_EMPTY_STRINGS,
- expr.span,
- "usage of `&String::from(\"\")` for a function expecting a `&str` argument",
- "try",
- "\"\"".to_owned(),
- Applicability::MachineApplicable,
- );
- }
- }
- }
+ if let ExprKind::AddrOf(BorrowKind::Ref, Mutability::Not, inner_expr) = expr.kind
+ && let ExprKind::Call(fun, args) = inner_expr.kind
+ && let ExprKind::Path(ref qpath) = fun.kind
+ && let Some(fun_def_id) = cx.qpath_res(qpath, fun.hir_id).opt_def_id()
+ && let ty::Ref(_, inner_str, _) = cx.typeck_results().expr_ty_adjusted(expr).kind()
+ && inner_str.is_str()
+ {
+ if match_def_path(cx, fun_def_id, &paths::STRING_NEW) {
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_OWNED_EMPTY_STRINGS,
+ expr.span,
+ "usage of `&String::new()` for a function expecting a `&str` argument",
+ "try",
+ "\"\"".to_owned(),
+ Applicability::MachineApplicable,
+ );
+ } else if cx.tcx.is_diagnostic_item(sym::from_fn, fun_def_id)
+ && let [.., last_arg] = args
+ && let ExprKind::Lit(spanned) = &last_arg.kind
+ && let LitKind::Str(symbol, _) = spanned.node
+ && symbol.is_empty()
+ && let inner_expr_type = cx.typeck_results().expr_ty(inner_expr)
+ && is_type_lang_item(cx, inner_expr_type, LangItem::String)
+ {
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_OWNED_EMPTY_STRINGS,
+ expr.span,
+ "usage of `&String::from(\"\")` for a function expecting a `&str` argument",
+ "try",
+ "\"\"".to_owned(),
+ Applicability::MachineApplicable,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_self_imports.rs b/src/tools/clippy/clippy_lints/src/unnecessary_self_imports.rs
index a1083a0a6..ddee06b59 100644
--- a/src/tools/clippy/clippy_lints/src/unnecessary_self_imports.rs
+++ b/src/tools/clippy/clippy_lints/src/unnecessary_self_imports.rs
@@ -1,9 +1,8 @@
use clippy_utils::diagnostics::span_lint_and_then;
-use if_chain::if_chain;
use rustc_ast::{Item, ItemKind, UseTreeKind};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::kw;
declare_clippy_lint! {
@@ -36,35 +35,36 @@ declare_lint_pass!(UnnecessarySelfImports => [UNNECESSARY_SELF_IMPORTS]);
impl EarlyLintPass for UnnecessarySelfImports {
fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) {
- if_chain! {
- if let ItemKind::Use(use_tree) = &item.kind;
- if let UseTreeKind::Nested(nodes) = &use_tree.kind;
- if let [(self_tree, _)] = &**nodes;
- if let [self_seg] = &*self_tree.prefix.segments;
- if self_seg.ident.name == kw::SelfLower;
- if let Some(last_segment) = use_tree.prefix.segments.last();
-
- then {
- span_lint_and_then(
- cx,
- UNNECESSARY_SELF_IMPORTS,
- item.span,
- "import ending with `::{self}`",
- |diag| {
- diag.span_suggestion(
- last_segment.span().with_hi(item.span.hi()),
- "consider omitting `::{self}`",
- format!(
- "{}{};",
- last_segment.ident,
- if let UseTreeKind::Simple(Some(alias)) = self_tree.kind { format!(" as {alias}") } else { String::new() },
- ),
- Applicability::MaybeIncorrect,
- );
- diag.note("this will slightly change semantics; any non-module items at the same path will also be imported");
- },
- );
- }
+ if let ItemKind::Use(use_tree) = &item.kind
+ && let UseTreeKind::Nested(nodes) = &use_tree.kind
+ && let [(self_tree, _)] = &**nodes
+ && let [self_seg] = &*self_tree.prefix.segments
+ && self_seg.ident.name == kw::SelfLower
+ && let Some(last_segment) = use_tree.prefix.segments.last()
+ {
+ span_lint_and_then(
+ cx,
+ UNNECESSARY_SELF_IMPORTS,
+ item.span,
+ "import ending with `::{self}`",
+ |diag| {
+ diag.span_suggestion(
+ last_segment.span().with_hi(item.span.hi()),
+ "consider omitting `::{self}`",
+ format!(
+ "{}{};",
+ last_segment.ident,
+ if let UseTreeKind::Simple(Some(alias)) = self_tree.kind {
+ format!(" as {alias}")
+ } else {
+ String::new()
+ },
+ ),
+ Applicability::MaybeIncorrect,
+ );
+ diag.note("this will slightly change semantics; any non-module items at the same path will also be imported");
+ },
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_struct_initialization.rs b/src/tools/clippy/clippy_lints/src/unnecessary_struct_initialization.rs
index c35a2afab..333ea0c82 100644
--- a/src/tools/clippy/clippy_lints/src/unnecessary_struct_initialization.rs
+++ b/src/tools/clippy/clippy_lints/src/unnecessary_struct_initialization.rs
@@ -4,7 +4,7 @@ use clippy_utils::ty::is_copy;
use clippy_utils::{get_parent_expr, path_to_local};
use rustc_hir::{BindingAnnotation, Expr, ExprKind, Node, PatKind, UnOp};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -82,7 +82,7 @@ impl LateLintPass<'_> for UnnecessaryStruct {
fn is_mutable(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
if let Some(hir_id) = path_to_local(expr)
- && let Node::Pat(pat) = cx.tcx.hir().get(hir_id)
+ && let Node::Pat(pat) = cx.tcx.hir_node(hir_id)
{
matches!(pat.kind, PatKind::Binding(BindingAnnotation::MUT, ..))
} else {
diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_wraps.rs b/src/tools/clippy/clippy_lints/src/unnecessary_wraps.rs
index ab8de17b0..446160f8e 100644
--- a/src/tools/clippy/clippy_lints/src/unnecessary_wraps.rs
+++ b/src/tools/clippy/clippy_lints/src/unnecessary_wraps.rs
@@ -2,14 +2,13 @@ use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::source::snippet;
use clippy_utils::visitors::find_all_ret_expressions;
use clippy_utils::{contains_return, is_res_lang_ctor, path_res, return_ty};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::intravisit::FnKind;
use rustc_hir::LangItem::{OptionSome, ResultOk};
use rustc_hir::{Body, ExprKind, FnDecl, Impl, ItemKind, Node};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::def_id::LocalDefId;
use rustc_span::symbol::sym;
use rustc_span::Span;
@@ -92,7 +91,7 @@ impl<'tcx> LateLintPass<'tcx> for UnnecessaryWraps {
}
// Abort if the method is implementing a trait or of it a trait method.
- let hir_id = cx.tcx.hir().local_def_id_to_hir_id(def_id);
+ let hir_id = cx.tcx.local_def_id_to_hir_id(def_id);
if let Some(Node::Item(item)) = cx.tcx.hir().find_parent(hir_id) {
if matches!(
item.kind,
@@ -119,28 +118,24 @@ impl<'tcx> LateLintPass<'tcx> for UnnecessaryWraps {
// Check if all return expression respect the following condition and collect them.
let mut suggs = Vec::new();
let can_sugg = find_all_ret_expressions(cx, body.value, |ret_expr| {
- if_chain! {
- if !ret_expr.span.from_expansion();
+ if !ret_expr.span.from_expansion()
// Check if a function call.
- if let ExprKind::Call(func, [arg]) = ret_expr.kind;
- if is_res_lang_ctor(cx, path_res(cx, func), lang_item);
+ && let ExprKind::Call(func, [arg]) = ret_expr.kind
+ && is_res_lang_ctor(cx, path_res(cx, func), lang_item)
// Make sure the function argument does not contain a return expression.
- if !contains_return(arg);
- then {
- suggs.push(
- (
- ret_expr.span,
- if inner_type.is_unit() {
- String::new()
- } else {
- snippet(cx, arg.span.source_callsite(), "..").to_string()
- }
- )
- );
- true
- } else {
- false
- }
+ && !contains_return(arg)
+ {
+ suggs.push((
+ ret_expr.span,
+ if inner_type.is_unit() {
+ String::new()
+ } else {
+ snippet(cx, arg.span.source_callsite(), "..").to_string()
+ },
+ ));
+ true
+ } else {
+ false
}
});
diff --git a/src/tools/clippy/clippy_lints/src/unnested_or_patterns.rs b/src/tools/clippy/clippy_lints/src/unnested_or_patterns.rs
index 8ff088a20..65600009c 100644
--- a/src/tools/clippy/clippy_lints/src/unnested_or_patterns.rs
+++ b/src/tools/clippy/clippy_lints/src/unnested_or_patterns.rs
@@ -11,7 +11,7 @@ use rustc_ast::{self as ast, Mutability, Pat, PatKind, DUMMY_NODE_ID};
use rustc_ast_pretty::pprust;
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::DUMMY_SP;
use std::cell::Cell;
use std::mem;
@@ -226,7 +226,7 @@ fn transform_with_focus_on_idx(alternatives: &mut ThinVec<P<Pat>>, focus_idx: us
// Therefore they are not some form of constructor `C`,
// with which a pattern `C(p_0)` may be formed,
// which we would want to join with other `C(p_j)`s.
- Ident(.., None) | Lit(_) | Wild | Path(..) | Range(..) | Rest | MacCall(_)
+ Ident(.., None) | Lit(_) | Wild | Never | Path(..) | Range(..) | Rest | MacCall(_)
// Skip immutable refs, as grouping them saves few characters,
// and almost always requires adding parens (increasing noisiness).
// In the case of only two patterns, replacement adds net characters.
diff --git a/src/tools/clippy/clippy_lints/src/unsafe_removed_from_name.rs b/src/tools/clippy/clippy_lints/src/unsafe_removed_from_name.rs
index c43d5dc94..3f2f765f7 100644
--- a/src/tools/clippy/clippy_lints/src/unsafe_removed_from_name.rs
+++ b/src/tools/clippy/clippy_lints/src/unsafe_removed_from_name.rs
@@ -1,9 +1,9 @@
use clippy_utils::diagnostics::span_lint;
use rustc_ast::ast::{Item, ItemKind, UseTree, UseTreeKind};
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
-use rustc_span::Span;
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::Ident;
+use rustc_span::Span;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/unused_async.rs b/src/tools/clippy/clippy_lints/src/unused_async.rs
index aea72c798..9c8c44c0a 100644
--- a/src/tools/clippy/clippy_lints/src/unused_async.rs
+++ b/src/tools/clippy/clippy_lints/src/unused_async.rs
@@ -5,7 +5,7 @@ use rustc_hir::intravisit::{walk_body, walk_expr, walk_fn, FnKind, Visitor};
use rustc_hir::{Body, Expr, ExprKind, FnDecl, Node, YieldSource};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::nested_filter;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::def_id::{LocalDefId, LocalDefIdSet};
use rustc_span::Span;
@@ -148,7 +148,7 @@ impl<'tcx> LateLintPass<'tcx> for UnusedAsync {
// statements, so don't lint at all if there are any such paths.
if let Some(def_id) = path.res.opt_def_id()
&& let Some(local_def_id) = def_id.as_local()
- && let Some(DefKind::Fn) = cx.tcx.opt_def_kind(def_id)
+ && cx.tcx.def_kind(def_id) == DefKind::Fn
&& cx.tcx.asyncness(def_id).is_async()
&& !is_node_func_call(cx.tcx.hir().get_parent(hir_id), path.span)
{
diff --git a/src/tools/clippy/clippy_lints/src/unused_io_amount.rs b/src/tools/clippy/clippy_lints/src/unused_io_amount.rs
index 0fcb62017..1de9adfcb 100644
--- a/src/tools/clippy/clippy_lints/src/unused_io_amount.rs
+++ b/src/tools/clippy/clippy_lints/src/unused_io_amount.rs
@@ -2,7 +2,7 @@ use clippy_utils::diagnostics::{span_lint, span_lint_and_help};
use clippy_utils::{is_trait_method, is_try, match_trait_method, paths};
use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/unused_peekable.rs b/src/tools/clippy/clippy_lints/src/unused_peekable.rs
index 0473ecaab..ba72b3450 100644
--- a/src/tools/clippy/clippy_lints/src/unused_peekable.rs
+++ b/src/tools/clippy/clippy_lints/src/unused_peekable.rs
@@ -6,7 +6,7 @@ use rustc_hir::intravisit::{walk_expr, Visitor};
use rustc_hir::{Block, Expr, ExprKind, HirId, Local, Node, PatKind, PathSegment, StmtKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::nested_filter::OnlyBodies;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/unused_rounding.rs b/src/tools/clippy/clippy_lints/src/unused_rounding.rs
index fbb36bea0..d5ca844b9 100644
--- a/src/tools/clippy/clippy_lints/src/unused_rounding.rs
+++ b/src/tools/clippy/clippy_lints/src/unused_rounding.rs
@@ -3,7 +3,7 @@ use clippy_utils::source::snippet;
use rustc_ast::ast::{Expr, ExprKind, MethodCall};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/unused_self.rs b/src/tools/clippy/clippy_lints/src/unused_self.rs
index f864c5203..a67f53f00 100644
--- a/src/tools/clippy/clippy_lints/src/unused_self.rs
+++ b/src/tools/clippy/clippy_lints/src/unused_self.rs
@@ -1,10 +1,9 @@
use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::macros::root_macro_call_first_node;
use clippy_utils::visitors::is_local_used;
-use if_chain::if_chain;
use rustc_hir::{Body, Impl, ImplItem, ImplItemKind, ItemKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use std::ops::ControlFlow;
declare_clippy_lint! {
@@ -73,25 +72,23 @@ impl<'tcx> LateLintPass<'tcx> for UnusedSelf {
})
.is_some()
};
- if_chain! {
- if let ItemKind::Impl(Impl { of_trait: None, .. }) = parent_item.kind;
- if assoc_item.fn_has_self_parameter;
- if let ImplItemKind::Fn(.., body_id) = &impl_item.kind;
- if !cx.effective_visibilities.is_exported(impl_item.owner_id.def_id) || !self.avoid_breaking_exported_api;
- let body = cx.tcx.hir().body(*body_id);
- if let [self_param, ..] = body.params;
- if !is_local_used(cx, body, self_param.pat.hir_id);
- if !contains_todo(cx, body);
- then {
- span_lint_and_help(
- cx,
- UNUSED_SELF,
- self_param.span,
- "unused `self` argument",
- None,
- "consider refactoring to an associated function",
- );
- }
+ if let ItemKind::Impl(Impl { of_trait: None, .. }) = parent_item.kind
+ && assoc_item.fn_has_self_parameter
+ && let ImplItemKind::Fn(.., body_id) = &impl_item.kind
+ && (!cx.effective_visibilities.is_exported(impl_item.owner_id.def_id) || !self.avoid_breaking_exported_api)
+ && let body = cx.tcx.hir().body(*body_id)
+ && let [self_param, ..] = body.params
+ && !is_local_used(cx, body, self_param.pat.hir_id)
+ && !contains_todo(cx, body)
+ {
+ span_lint_and_help(
+ cx,
+ UNUSED_SELF,
+ self_param.span,
+ "unused `self` argument",
+ None,
+ "consider refactoring to an associated function",
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/unused_unit.rs b/src/tools/clippy/clippy_lints/src/unused_unit.rs
index adbf82813..0a73da202 100644
--- a/src/tools/clippy/clippy_lints/src/unused_unit.rs
+++ b/src/tools/clippy/clippy_lints/src/unused_unit.rs
@@ -1,11 +1,10 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::{position_before_rarrow, snippet_opt};
-use if_chain::if_chain;
use rustc_ast::visit::FnKind;
use rustc_ast::{ast, ClosureBinder};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{BytePos, Span};
declare_clippy_lint! {
@@ -37,40 +36,39 @@ declare_lint_pass!(UnusedUnit => [UNUSED_UNIT]);
impl EarlyLintPass for UnusedUnit {
fn check_fn(&mut self, cx: &EarlyContext<'_>, kind: FnKind<'_>, span: Span, _: ast::NodeId) {
- if_chain! {
- if let ast::FnRetTy::Ty(ref ty) = kind.decl().output;
- if let ast::TyKind::Tup(ref vals) = ty.kind;
- if vals.is_empty() && !ty.span.from_expansion() && get_def(span) == get_def(ty.span);
- then {
- // implicit types in closure signatures are forbidden when `for<...>` is present
- if let FnKind::Closure(&ClosureBinder::For { .. }, ..) = kind {
- return;
- }
-
- lint_unneeded_unit_return(cx, ty, span);
+ if let ast::FnRetTy::Ty(ref ty) = kind.decl().output
+ && let ast::TyKind::Tup(ref vals) = ty.kind
+ && vals.is_empty()
+ && !ty.span.from_expansion()
+ && get_def(span) == get_def(ty.span)
+ {
+ // implicit types in closure signatures are forbidden when `for<...>` is present
+ if let FnKind::Closure(&ClosureBinder::For { .. }, ..) = kind {
+ return;
}
+
+ lint_unneeded_unit_return(cx, ty, span);
}
}
fn check_block(&mut self, cx: &EarlyContext<'_>, block: &ast::Block) {
- if_chain! {
- if let Some(stmt) = block.stmts.last();
- if let ast::StmtKind::Expr(ref expr) = stmt.kind;
- if is_unit_expr(expr);
- let ctxt = block.span.ctxt();
- if stmt.span.ctxt() == ctxt && expr.span.ctxt() == ctxt;
- then {
- let sp = expr.span;
- span_lint_and_sugg(
- cx,
- UNUSED_UNIT,
- sp,
- "unneeded unit expression",
- "remove the final `()`",
- String::new(),
- Applicability::MachineApplicable,
- );
- }
+ if let Some(stmt) = block.stmts.last()
+ && let ast::StmtKind::Expr(ref expr) = stmt.kind
+ && is_unit_expr(expr)
+ && let ctxt = block.span.ctxt()
+ && stmt.span.ctxt() == ctxt
+ && expr.span.ctxt() == ctxt
+ {
+ let sp = expr.span;
+ span_lint_and_sugg(
+ cx,
+ UNUSED_UNIT,
+ sp,
+ "unneeded unit expression",
+ "remove the final `()`",
+ String::new(),
+ Applicability::MachineApplicable,
+ );
}
}
@@ -96,16 +94,14 @@ impl EarlyLintPass for UnusedUnit {
fn check_poly_trait_ref(&mut self, cx: &EarlyContext<'_>, poly: &ast::PolyTraitRef) {
let segments = &poly.trait_ref.path.segments;
- if_chain! {
- if segments.len() == 1;
- if ["Fn", "FnMut", "FnOnce"].contains(&segments[0].ident.name.as_str());
- if let Some(args) = &segments[0].args;
- if let ast::GenericArgs::Parenthesized(generic_args) = &**args;
- if let ast::FnRetTy::Ty(ty) = &generic_args.output;
- if ty.kind.is_unit();
- then {
- lint_unneeded_unit_return(cx, ty, generic_args.span);
- }
+ if segments.len() == 1
+ && ["Fn", "FnMut", "FnOnce"].contains(&segments[0].ident.name.as_str())
+ && let Some(args) = &segments[0].args
+ && let ast::GenericArgs::Parenthesized(generic_args) = &**args
+ && let ast::FnRetTy::Ty(ty) = &generic_args.output
+ && ty.kind.is_unit()
+ {
+ lint_unneeded_unit_return(cx, ty, generic_args.span);
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/unwrap.rs b/src/tools/clippy/clippy_lints/src/unwrap.rs
index cdfcb8500..ae2ac38cf 100644
--- a/src/tools/clippy/clippy_lints/src/unwrap.rs
+++ b/src/tools/clippy/clippy_lints/src/unwrap.rs
@@ -2,7 +2,6 @@ use clippy_utils::diagnostics::span_lint_hir_and_then;
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::usage::is_potentially_local_place;
use clippy_utils::{higher, path_to_local};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::intravisit::{walk_expr, walk_fn, FnKind, Visitor};
use rustc_hir::{BinOpKind, Body, Expr, ExprKind, FnDecl, HirId, Node, PathSegment, UnOp};
@@ -13,7 +12,7 @@ use rustc_middle::hir::nested_filter;
use rustc_middle::lint::in_external_macro;
use rustc_middle::mir::FakeReadCause;
use rustc_middle::ty::{self, Ty, TyCtxt};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::def_id::LocalDefId;
use rustc_span::{sym, Span};
@@ -155,41 +154,35 @@ fn collect_unwrap_info<'tcx>(
}
} else if let ExprKind::Unary(UnOp::Not, expr) = &expr.kind {
return collect_unwrap_info(cx, if_expr, expr, branch, !invert, false);
- } else {
- if_chain! {
- if let ExprKind::MethodCall(method_name, receiver, args, _) = &expr.kind;
- if let Some(local_id) = path_to_local(receiver);
- let ty = cx.typeck_results().expr_ty(receiver);
- let name = method_name.ident.as_str();
- if is_relevant_option_call(cx, ty, name) || is_relevant_result_call(cx, ty, name);
- then {
- assert!(args.is_empty());
- let unwrappable = match name {
- "is_some" | "is_ok" => true,
- "is_err" | "is_none" => false,
- _ => unreachable!(),
- };
- let safe_to_unwrap = unwrappable != invert;
- let kind = if is_type_diagnostic_item(cx, ty, sym::Option) {
- UnwrappableKind::Option
- } else {
- UnwrappableKind::Result
- };
+ } else if let ExprKind::MethodCall(method_name, receiver, args, _) = &expr.kind
+ && let Some(local_id) = path_to_local(receiver)
+ && let ty = cx.typeck_results().expr_ty(receiver)
+ && let name = method_name.ident.as_str()
+ && (is_relevant_option_call(cx, ty, name) || is_relevant_result_call(cx, ty, name))
+ {
+ assert!(args.is_empty());
+ let unwrappable = match name {
+ "is_some" | "is_ok" => true,
+ "is_err" | "is_none" => false,
+ _ => unreachable!(),
+ };
+ let safe_to_unwrap = unwrappable != invert;
+ let kind = if is_type_diagnostic_item(cx, ty, sym::Option) {
+ UnwrappableKind::Option
+ } else {
+ UnwrappableKind::Result
+ };
- return vec![
- UnwrapInfo {
- local_id,
- if_expr,
- check: expr,
- check_name: method_name,
- branch,
- safe_to_unwrap,
- kind,
- is_entire_condition,
- }
- ]
- }
- }
+ return vec![UnwrapInfo {
+ local_id,
+ if_expr,
+ check: expr,
+ check_name: method_name,
+ branch,
+ safe_to_unwrap,
+ kind,
+ is_entire_condition,
+ }];
}
Vec::new()
}
@@ -319,73 +312,72 @@ impl<'a, 'tcx> Visitor<'tcx> for UnwrappableVariablesVisitor<'a, 'tcx> {
}
} else {
// find `unwrap[_err]()` calls:
- if_chain! {
- if let ExprKind::MethodCall(method_name, self_arg, ..) = expr.kind;
- let (self_arg, as_ref_kind) = consume_option_as_ref(self_arg);
- if let Some(id) = path_to_local(self_arg);
- if [sym::unwrap, sym::expect, sym!(unwrap_err)].contains(&method_name.ident.name);
- let call_to_unwrap = [sym::unwrap, sym::expect].contains(&method_name.ident.name);
- if let Some(unwrappable) = self.unwrappables.iter()
- .find(|u| u.local_id == id);
+ if let ExprKind::MethodCall(method_name, self_arg, ..) = expr.kind
+ && let (self_arg, as_ref_kind) = consume_option_as_ref(self_arg)
+ && let Some(id) = path_to_local(self_arg)
+ && [sym::unwrap, sym::expect, sym!(unwrap_err)].contains(&method_name.ident.name)
+ && let call_to_unwrap = [sym::unwrap, sym::expect].contains(&method_name.ident.name)
+ && let Some(unwrappable) = self.unwrappables.iter()
+ .find(|u| u.local_id == id)
// Span contexts should not differ with the conditional branch
- let span_ctxt = expr.span.ctxt();
- if unwrappable.branch.span.ctxt() == span_ctxt;
- if unwrappable.check.span.ctxt() == span_ctxt;
- then {
- if call_to_unwrap == unwrappable.safe_to_unwrap {
- let is_entire_condition = unwrappable.is_entire_condition;
- let unwrappable_variable_name = self.cx.tcx.hir().name(unwrappable.local_id);
- let suggested_pattern = if call_to_unwrap {
- unwrappable.kind.success_variant_pattern()
- } else {
- unwrappable.kind.error_variant_pattern()
- };
-
- span_lint_hir_and_then(
- self.cx,
- UNNECESSARY_UNWRAP,
- expr.hir_id,
- expr.span,
- &format!(
- "called `{}` on `{unwrappable_variable_name}` after checking its variant with `{}`",
- method_name.ident.name,
- unwrappable.check_name.ident.as_str(),
- ),
- |diag| {
- if is_entire_condition {
- diag.span_suggestion(
- unwrappable.check.span.with_lo(unwrappable.if_expr.span.lo()),
- "try",
- format!(
- "if let {suggested_pattern} = {borrow_prefix}{unwrappable_variable_name}",
- borrow_prefix = match as_ref_kind {
- Some(AsRefKind::AsRef) => "&",
- Some(AsRefKind::AsMut) => "&mut ",
- None => "",
- },
- ),
- // We don't track how the unwrapped value is used inside the
- // block or suggest deleting the unwrap, so we can't offer a
- // fixable solution.
- Applicability::Unspecified,
- );
- } else {
- diag.span_label(unwrappable.check.span, "the check is happening here");
- diag.help("try using `if let` or `match`");
- }
- },
- );
+ && let span_ctxt = expr.span.ctxt()
+ && unwrappable.branch.span.ctxt() == span_ctxt
+ && unwrappable.check.span.ctxt() == span_ctxt
+ {
+ if call_to_unwrap == unwrappable.safe_to_unwrap {
+ let is_entire_condition = unwrappable.is_entire_condition;
+ let unwrappable_variable_name = self.cx.tcx.hir().name(unwrappable.local_id);
+ let suggested_pattern = if call_to_unwrap {
+ unwrappable.kind.success_variant_pattern()
} else {
- span_lint_hir_and_then(
- self.cx,
- PANICKING_UNWRAP,
- expr.hir_id,
- expr.span,
- &format!("this call to `{}()` will always panic",
- method_name.ident.name),
- |diag| { diag.span_label(unwrappable.check.span, "because of this check"); },
- );
- }
+ unwrappable.kind.error_variant_pattern()
+ };
+
+ span_lint_hir_and_then(
+ self.cx,
+ UNNECESSARY_UNWRAP,
+ expr.hir_id,
+ expr.span,
+ &format!(
+ "called `{}` on `{unwrappable_variable_name}` after checking its variant with `{}`",
+ method_name.ident.name,
+ unwrappable.check_name.ident.as_str(),
+ ),
+ |diag| {
+ if is_entire_condition {
+ diag.span_suggestion(
+ unwrappable.check.span.with_lo(unwrappable.if_expr.span.lo()),
+ "try",
+ format!(
+ "if let {suggested_pattern} = {borrow_prefix}{unwrappable_variable_name}",
+ borrow_prefix = match as_ref_kind {
+ Some(AsRefKind::AsRef) => "&",
+ Some(AsRefKind::AsMut) => "&mut ",
+ None => "",
+ },
+ ),
+ // We don't track how the unwrapped value is used inside the
+ // block or suggest deleting the unwrap, so we can't offer a
+ // fixable solution.
+ Applicability::Unspecified,
+ );
+ } else {
+ diag.span_label(unwrappable.check.span, "the check is happening here");
+ diag.help("try using `if let` or `match`");
+ }
+ },
+ );
+ } else {
+ span_lint_hir_and_then(
+ self.cx,
+ PANICKING_UNWRAP,
+ expr.hir_id,
+ expr.span,
+ &format!("this call to `{}()` will always panic", method_name.ident.name),
+ |diag| {
+ diag.span_label(unwrappable.check.span, "because of this check");
+ },
+ );
}
}
walk_expr(self, expr);
diff --git a/src/tools/clippy/clippy_lints/src/unwrap_in_result.rs b/src/tools/clippy/clippy_lints/src/unwrap_in_result.rs
index 21592abbf..a615ef116 100644
--- a/src/tools/clippy/clippy_lints/src/unwrap_in_result.rs
+++ b/src/tools/clippy/clippy_lints/src/unwrap_in_result.rs
@@ -3,11 +3,10 @@ use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::visitors::for_each_expr;
use clippy_utils::{method_chain_args, return_ty};
use core::ops::ControlFlow;
-use if_chain::if_chain;
use rustc_hir as hir;
use rustc_hir::ImplItemKind;
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::{sym, Span};
declare_clippy_lint! {
@@ -60,15 +59,13 @@ declare_lint_pass!(UnwrapInResult=> [UNWRAP_IN_RESULT]);
impl<'tcx> LateLintPass<'tcx> for UnwrapInResult {
fn check_impl_item(&mut self, cx: &LateContext<'tcx>, impl_item: &'tcx hir::ImplItem<'_>) {
- if_chain! {
+ if let hir::ImplItemKind::Fn(ref _signature, _) = impl_item.kind
// first check if it's a method or function
- if let hir::ImplItemKind::Fn(ref _signature, _) = impl_item.kind;
// checking if its return type is `result` or `option`
- if is_type_diagnostic_item(cx, return_ty(cx, impl_item.owner_id), sym::Result)
- || is_type_diagnostic_item(cx, return_ty(cx, impl_item.owner_id), sym::Option);
- then {
- lint_impl_body(cx, impl_item.span, impl_item);
- }
+ && (is_type_diagnostic_item(cx, return_ty(cx, impl_item.owner_id), sym::Result)
+ || is_type_diagnostic_item(cx, return_ty(cx, impl_item.owner_id), sym::Option))
+ {
+ lint_impl_body(cx, impl_item.span, impl_item);
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/upper_case_acronyms.rs b/src/tools/clippy/clippy_lints/src/upper_case_acronyms.rs
index de6a75b79..d2a1d42f2 100644
--- a/src/tools/clippy/clippy_lints/src/upper_case_acronyms.rs
+++ b/src/tools/clippy/clippy_lints/src/upper_case_acronyms.rs
@@ -1,10 +1,10 @@
-use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::diagnostics::span_lint_hir_and_then;
use itertools::Itertools;
use rustc_errors::Applicability;
-use rustc_hir::{Item, ItemKind};
+use rustc_hir::{HirId, Item, ItemKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::symbol::Ident;
declare_clippy_lint! {
@@ -77,7 +77,7 @@ fn correct_ident(ident: &str) -> String {
ident
}
-fn check_ident(cx: &LateContext<'_>, ident: &Ident, be_aggressive: bool) {
+fn check_ident(cx: &LateContext<'_>, ident: &Ident, hir_id: HirId, be_aggressive: bool) {
let span = ident.span;
let ident = ident.as_str();
let corrected = correct_ident(ident);
@@ -89,14 +89,20 @@ fn check_ident(cx: &LateContext<'_>, ident: &Ident, be_aggressive: bool) {
// upper-case-acronyms-aggressive config option enabled
|| (be_aggressive && ident != corrected)
{
- span_lint_and_sugg(
+ span_lint_hir_and_then(
cx,
UPPER_CASE_ACRONYMS,
+ hir_id,
span,
&format!("name `{ident}` contains a capitalized acronym"),
- "consider making the acronym lowercase, except the initial letter",
- corrected,
- Applicability::MaybeIncorrect,
+ |diag| {
+ diag.span_suggestion(
+ span,
+ "consider making the acronym lowercase, except the initial letter",
+ corrected,
+ Applicability::MaybeIncorrect,
+ );
+ },
);
}
}
@@ -111,16 +117,15 @@ impl LateLintPass<'_> for UpperCaseAcronyms {
}
match it.kind {
ItemKind::TyAlias(..) | ItemKind::Struct(..) | ItemKind::Trait(..) => {
- check_ident(cx, &it.ident, self.upper_case_acronyms_aggressive);
+ check_ident(cx, &it.ident, it.hir_id(), self.upper_case_acronyms_aggressive);
},
ItemKind::Enum(ref enumdef, _) => {
- check_ident(cx, &it.ident, self.upper_case_acronyms_aggressive);
+ check_ident(cx, &it.ident, it.hir_id(), self.upper_case_acronyms_aggressive);
// check enum variants separately because again we only want to lint on private enums and
// the fn check_variant does not know about the vis of the enum of its variants
- enumdef
- .variants
- .iter()
- .for_each(|variant| check_ident(cx, &variant.ident, self.upper_case_acronyms_aggressive));
+ enumdef.variants.iter().for_each(|variant| {
+ check_ident(cx, &variant.ident, variant.hir_id, self.upper_case_acronyms_aggressive);
+ });
},
_ => {},
}
diff --git a/src/tools/clippy/clippy_lints/src/use_self.rs b/src/tools/clippy/clippy_lints/src/use_self.rs
index c3fe16ad5..fa033838e 100644
--- a/src/tools/clippy/clippy_lints/src/use_self.rs
+++ b/src/tools/clippy/clippy_lints/src/use_self.rs
@@ -2,7 +2,6 @@ use clippy_config::msrvs::{self, Msrv};
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::is_from_proc_macro;
use clippy_utils::ty::same_type_and_consts;
-use if_chain::if_chain;
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::Applicability;
use rustc_hir::def::{CtorOf, DefKind, Res};
@@ -14,7 +13,7 @@ use rustc_hir::{
};
use rustc_hir_analysis::hir_ty_to_ty;
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::Span;
declare_clippy_lint! {
@@ -93,32 +92,40 @@ impl<'tcx> LateLintPass<'tcx> for UseSelf {
// relevant for linting, since this is the self type of the `impl` we're currently in. To
// avoid linting on nested items, we push `StackItem::NoCheck` on the stack to signal, that
// we're in an `impl` or nested item, that we don't want to lint
- let stack_item = if_chain! {
- if let ItemKind::Impl(Impl { self_ty, generics,.. }) = item.kind;
- if let TyKind::Path(QPath::Resolved(_, item_path)) = self_ty.kind;
- let parameters = &item_path.segments.last().expect(SEGMENTS_MSG).args;
- if parameters.as_ref().map_or(true, |params| {
- params.parenthesized == GenericArgsParentheses::No
+ let stack_item = if let ItemKind::Impl(Impl { self_ty, generics, .. }) = item.kind
+ && let TyKind::Path(QPath::Resolved(_, item_path)) = self_ty.kind
+ && let parameters = &item_path.segments.last().expect(SEGMENTS_MSG).args
+ && parameters.as_ref().map_or(true, |params| {
+ params.parenthesized == GenericArgsParentheses::No
&& !params.args.iter().any(|arg| matches!(arg, GenericArg::Lifetime(_)))
- });
- if !item.span.from_expansion();
- if !is_from_proc_macro(cx, item); // expensive, should be last check
- then {
- // Self cannot be used inside const generic parameters
- let types_to_skip = generics.params.iter().filter_map(|param| {
- match param {
- GenericParam { kind: GenericParamKind::Const { ty: Ty { hir_id, ..}, ..}, ..} => Some(*hir_id),
- _ => None,
- }
- }).chain(std::iter::once(self_ty.hir_id)).collect();
- StackItem::Check {
- impl_id: item.owner_id.def_id,
- in_body: 0,
- types_to_skip,
- }
- } else {
- StackItem::NoCheck
+ })
+ && !item.span.from_expansion()
+ && !is_from_proc_macro(cx, item)
+ // expensive, should be last check
+ {
+ // Self cannot be used inside const generic parameters
+ let types_to_skip = generics
+ .params
+ .iter()
+ .filter_map(|param| match param {
+ GenericParam {
+ kind:
+ GenericParamKind::Const {
+ ty: Ty { hir_id, .. }, ..
+ },
+ ..
+ } => Some(*hir_id),
+ _ => None,
+ })
+ .chain(std::iter::once(self_ty.hir_id))
+ .collect();
+ StackItem::Check {
+ impl_id: item.owner_id.def_id,
+ in_body: 0,
+ types_to_skip,
}
+ } else {
+ StackItem::NoCheck
};
self.stack.push(stack_item);
}
@@ -132,56 +139,54 @@ impl<'tcx> LateLintPass<'tcx> for UseSelf {
fn check_impl_item(&mut self, cx: &LateContext<'_>, impl_item: &hir::ImplItem<'_>) {
// We want to skip types in trait `impl`s that aren't declared as `Self` in the trait
// declaration. The collection of those types is all this method implementation does.
- if_chain! {
- if let ImplItemKind::Fn(FnSig { decl, .. }, ..) = impl_item.kind;
- if let Some(&mut StackItem::Check {
+ if let ImplItemKind::Fn(FnSig { decl, .. }, ..) = impl_item.kind
+ && let Some(&mut StackItem::Check {
impl_id,
ref mut types_to_skip,
..
- }) = self.stack.last_mut();
- if let Some(impl_trait_ref) = cx.tcx.impl_trait_ref(impl_id);
- then {
- // `self_ty` is the semantic self type of `impl <trait> for <type>`. This cannot be
- // `Self`.
- let self_ty = impl_trait_ref.instantiate_identity().self_ty();
+ }) = self.stack.last_mut()
+ && let Some(impl_trait_ref) = cx.tcx.impl_trait_ref(impl_id)
+ {
+ // `self_ty` is the semantic self type of `impl <trait> for <type>`. This cannot be
+ // `Self`.
+ let self_ty = impl_trait_ref.instantiate_identity().self_ty();
- // `trait_method_sig` is the signature of the function, how it is declared in the
- // trait, not in the impl of the trait.
- let trait_method = cx
- .tcx
- .associated_item(impl_item.owner_id)
- .trait_item_def_id
- .expect("impl method matches a trait method");
- let trait_method_sig = cx.tcx.fn_sig(trait_method).instantiate_identity();
- let trait_method_sig = cx.tcx.erase_late_bound_regions(trait_method_sig);
+ // `trait_method_sig` is the signature of the function, how it is declared in the
+ // trait, not in the impl of the trait.
+ let trait_method = cx
+ .tcx
+ .associated_item(impl_item.owner_id)
+ .trait_item_def_id
+ .expect("impl method matches a trait method");
+ let trait_method_sig = cx.tcx.fn_sig(trait_method).instantiate_identity();
+ let trait_method_sig = cx.tcx.instantiate_bound_regions_with_erased(trait_method_sig);
- // `impl_inputs_outputs` is an iterator over the types (`hir::Ty`) declared in the
- // implementation of the trait.
- let output_hir_ty = if let FnRetTy::Return(ty) = &decl.output {
- Some(&**ty)
- } else {
- None
- };
- let impl_inputs_outputs = decl.inputs.iter().chain(output_hir_ty);
+ // `impl_inputs_outputs` is an iterator over the types (`hir::Ty`) declared in the
+ // implementation of the trait.
+ let output_hir_ty = if let FnRetTy::Return(ty) = &decl.output {
+ Some(&**ty)
+ } else {
+ None
+ };
+ let impl_inputs_outputs = decl.inputs.iter().chain(output_hir_ty);
- // `impl_hir_ty` (of type `hir::Ty`) represents the type written in the signature.
- //
- // `trait_sem_ty` (of type `ty::Ty`) is the semantic type for the signature in the
- // trait declaration. This is used to check if `Self` was used in the trait
- // declaration.
- //
- // If `any`where in the `trait_sem_ty` the `self_ty` was used verbatim (as opposed
- // to `Self`), we want to skip linting that type and all subtypes of it. This
- // avoids suggestions to e.g. replace `Vec<u8>` with `Vec<Self>`, in an `impl Trait
- // for u8`, when the trait always uses `Vec<u8>`.
- //
- // See also https://github.com/rust-lang/rust-clippy/issues/2894.
- for (impl_hir_ty, trait_sem_ty) in impl_inputs_outputs.zip(trait_method_sig.inputs_and_output) {
- if trait_sem_ty.walk().any(|inner| inner == self_ty.into()) {
- let mut visitor = SkipTyCollector::default();
- visitor.visit_ty(impl_hir_ty);
- types_to_skip.extend(visitor.types_to_skip);
- }
+ // `impl_hir_ty` (of type `hir::Ty`) represents the type written in the signature.
+ //
+ // `trait_sem_ty` (of type `ty::Ty`) is the semantic type for the signature in the
+ // trait declaration. This is used to check if `Self` was used in the trait
+ // declaration.
+ //
+ // If `any`where in the `trait_sem_ty` the `self_ty` was used verbatim (as opposed
+ // to `Self`), we want to skip linting that type and all subtypes of it. This
+ // avoids suggestions to e.g. replace `Vec<u8>` with `Vec<Self>`, in an `impl Trait
+ // for u8`, when the trait always uses `Vec<u8>`.
+ //
+ // See also https://github.com/rust-lang/rust-clippy/issues/2894.
+ for (impl_hir_ty, trait_sem_ty) in impl_inputs_outputs.zip(trait_method_sig.inputs_and_output) {
+ if trait_sem_ty.walk().any(|inner| inner == self_ty.into()) {
+ let mut visitor = SkipTyCollector::default();
+ visitor.visit_ty(impl_hir_ty);
+ types_to_skip.extend(visitor.types_to_skip);
}
}
}
@@ -203,41 +208,38 @@ impl<'tcx> LateLintPass<'tcx> for UseSelf {
}
fn check_ty(&mut self, cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>) {
- if_chain! {
- if !hir_ty.span.from_expansion();
- if self.msrv.meets(msrvs::TYPE_ALIAS_ENUM_VARIANTS);
- if let Some(&StackItem::Check {
+ if !hir_ty.span.from_expansion()
+ && self.msrv.meets(msrvs::TYPE_ALIAS_ENUM_VARIANTS)
+ && let Some(&StackItem::Check {
impl_id,
in_body,
ref types_to_skip,
- }) = self.stack.last();
- if let TyKind::Path(QPath::Resolved(_, path)) = hir_ty.kind;
- if !matches!(
+ }) = self.stack.last()
+ && let TyKind::Path(QPath::Resolved(_, path)) = hir_ty.kind
+ && !matches!(
path.res,
- Res::SelfTyParam { .. }
- | Res::SelfTyAlias { .. }
- | Res::Def(DefKind::TyParam, _)
- );
- if !types_to_skip.contains(&hir_ty.hir_id);
- let ty = if in_body > 0 {
+ Res::SelfTyParam { .. } | Res::SelfTyAlias { .. } | Res::Def(DefKind::TyParam, _)
+ )
+ && !types_to_skip.contains(&hir_ty.hir_id)
+ && let ty = if in_body > 0 {
cx.typeck_results().node_type(hir_ty.hir_id)
} else {
hir_ty_to_ty(cx.tcx, hir_ty)
- };
- if same_type_and_consts(ty, cx.tcx.type_of(impl_id).instantiate_identity());
- then {
- span_lint(cx, hir_ty.span);
}
+ && same_type_and_consts(ty, cx.tcx.type_of(impl_id).instantiate_identity())
+ {
+ span_lint(cx, hir_ty.span);
}
}
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
- if_chain! {
- if !expr.span.from_expansion();
- if self.msrv.meets(msrvs::TYPE_ALIAS_ENUM_VARIANTS);
- if let Some(&StackItem::Check { impl_id, .. }) = self.stack.last();
- if cx.typeck_results().expr_ty(expr) == cx.tcx.type_of(impl_id).instantiate_identity();
- then {} else { return; }
+ if !expr.span.from_expansion()
+ && self.msrv.meets(msrvs::TYPE_ALIAS_ENUM_VARIANTS)
+ && let Some(&StackItem::Check { impl_id, .. }) = self.stack.last()
+ && cx.typeck_results().expr_ty(expr) == cx.tcx.type_of(impl_id).instantiate_identity()
+ {
+ } else {
+ return;
}
match expr.kind {
ExprKind::Struct(QPath::Resolved(_, path), ..) => check_path(cx, path),
@@ -252,18 +254,16 @@ impl<'tcx> LateLintPass<'tcx> for UseSelf {
}
fn check_pat(&mut self, cx: &LateContext<'_>, pat: &Pat<'_>) {
- if_chain! {
- if !pat.span.from_expansion();
- if self.msrv.meets(msrvs::TYPE_ALIAS_ENUM_VARIANTS);
- if let Some(&StackItem::Check { impl_id, .. }) = self.stack.last();
+ if !pat.span.from_expansion()
+ && self.msrv.meets(msrvs::TYPE_ALIAS_ENUM_VARIANTS)
+ && let Some(&StackItem::Check { impl_id, .. }) = self.stack.last()
// get the path from the pattern
- if let PatKind::Path(QPath::Resolved(_, path))
+ && let PatKind::Path(QPath::Resolved(_, path))
| PatKind::TupleStruct(QPath::Resolved(_, path), _, _)
- | PatKind::Struct(QPath::Resolved(_, path), _, _) = pat.kind;
- if cx.typeck_results().pat_ty(pat) == cx.tcx.type_of(impl_id).instantiate_identity();
- then {
- check_path(cx, path);
- }
+ | PatKind::Struct(QPath::Resolved(_, path), _, _) = pat.kind
+ && cx.typeck_results().pat_ty(pat) == cx.tcx.type_of(impl_id).instantiate_identity()
+ {
+ check_path(cx, path);
}
}
diff --git a/src/tools/clippy/clippy_lints/src/useless_conversion.rs b/src/tools/clippy/clippy_lints/src/useless_conversion.rs
index 28f1d487e..2e0a0f6cb 100644
--- a/src/tools/clippy/clippy_lints/src/useless_conversion.rs
+++ b/src/tools/clippy/clippy_lints/src/useless_conversion.rs
@@ -3,9 +3,7 @@ use clippy_utils::source::{snippet, snippet_with_applicability, snippet_with_con
use clippy_utils::sugg::Sugg;
use clippy_utils::ty::{is_copy, is_type_diagnostic_item, same_type_and_consts};
use clippy_utils::{get_parent_expr, is_trait_method, is_ty_alias, path_to_local};
-use if_chain::if_chain;
use rustc_errors::Applicability;
-use rustc_hir::def::DefKind;
use rustc_hir::def_id::DefId;
use rustc_hir::{BindingAnnotation, Expr, ExprKind, HirId, MatchSource, Node, PatKind};
use rustc_infer::infer::TyCtxtInferExt;
@@ -13,7 +11,7 @@ use rustc_infer::traits::Obligation;
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::traits::ObligationCause;
use rustc_middle::ty::{self, EarlyBinder, GenericArg, GenericArgsRef, Ty, TypeVisitableExt};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::{sym, Span};
use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt;
@@ -209,7 +207,7 @@ impl<'tcx> LateLintPass<'tcx> for UselessConversion {
&& let Some(did) = cx.qpath_res(qpath, recv.hir_id).opt_def_id()
// make sure that the path indeed points to a fn-like item, so that
// `fn_sig` does not ICE. (see #11065)
- && cx.tcx.opt_def_kind(did).is_some_and(DefKind::is_fn_like) =>
+ && cx.tcx.def_kind(did).is_fn_like() =>
{
Some((
did,
@@ -283,7 +281,7 @@ impl<'tcx> LateLintPass<'tcx> for UselessConversion {
}
if let Some(id) = path_to_local(recv)
- && let Node::Pat(pat) = cx.tcx.hir().get(id)
+ && let Node::Pat(pat) = cx.tcx.hir_node(id)
&& let PatKind::Binding(ann, ..) = pat.kind
&& ann != BindingAnnotation::MUT
{
@@ -311,76 +309,63 @@ impl<'tcx> LateLintPass<'tcx> for UselessConversion {
);
}
}
- if_chain! {
- if is_trait_method(cx, e, sym::TryInto) && name.ident.name == sym::try_into;
- let a = cx.typeck_results().expr_ty(e);
- let b = cx.typeck_results().expr_ty(recv);
- if is_type_diagnostic_item(cx, a, sym::Result);
- if let ty::Adt(_, args) = a.kind();
- if let Some(a_type) = args.types().next();
- if same_type_and_consts(a_type, b);
+ if is_trait_method(cx, e, sym::TryInto)
+ && name.ident.name == sym::try_into
+ && let a = cx.typeck_results().expr_ty(e)
+ && let b = cx.typeck_results().expr_ty(recv)
+ && is_type_diagnostic_item(cx, a, sym::Result)
+ && let ty::Adt(_, args) = a.kind()
+ && let Some(a_type) = args.types().next()
+ && same_type_and_consts(a_type, b)
+ {
+ span_lint_and_help(
+ cx,
+ USELESS_CONVERSION,
+ e.span,
+ &format!("useless conversion to the same type: `{b}`"),
+ None,
+ "consider removing `.try_into()`",
+ );
+ }
+ },
- then {
+ ExprKind::Call(path, [arg]) => {
+ if let ExprKind::Path(ref qpath) = path.kind
+ && let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
+ && !is_ty_alias(qpath)
+ {
+ let a = cx.typeck_results().expr_ty(e);
+ let b = cx.typeck_results().expr_ty(arg);
+ if cx.tcx.is_diagnostic_item(sym::try_from_fn, def_id)
+ && is_type_diagnostic_item(cx, a, sym::Result)
+ && let ty::Adt(_, args) = a.kind()
+ && let Some(a_type) = args.types().next()
+ && same_type_and_consts(a_type, b)
+ {
+ let hint = format!("consider removing `{}()`", snippet(cx, path.span, "TryFrom::try_from"));
span_lint_and_help(
cx,
USELESS_CONVERSION,
e.span,
&format!("useless conversion to the same type: `{b}`"),
None,
- "consider removing `.try_into()`",
+ &hint,
);
}
- }
- },
-
- ExprKind::Call(path, [arg]) => {
- if_chain! {
- if let ExprKind::Path(ref qpath) = path.kind;
- if let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id();
- if !is_ty_alias(qpath);
- then {
- let a = cx.typeck_results().expr_ty(e);
- let b = cx.typeck_results().expr_ty(arg);
- if_chain! {
- if cx.tcx.is_diagnostic_item(sym::try_from_fn, def_id);
- if is_type_diagnostic_item(cx, a, sym::Result);
- if let ty::Adt(_, args) = a.kind();
- if let Some(a_type) = args.types().next();
- if same_type_and_consts(a_type, b);
- then {
- let hint = format!("consider removing `{}()`", snippet(cx, path.span, "TryFrom::try_from"));
- span_lint_and_help(
- cx,
- USELESS_CONVERSION,
- e.span,
- &format!("useless conversion to the same type: `{b}`"),
- None,
- &hint,
- );
- }
- }
-
- if_chain! {
- if cx.tcx.is_diagnostic_item(sym::from_fn, def_id);
- if same_type_and_consts(a, b);
-
- then {
- let mut app = Applicability::MachineApplicable;
- let sugg = Sugg::hir_with_context(cx, arg, e.span.ctxt(), "<expr>", &mut app).maybe_par();
- let sugg_msg =
- format!("consider removing `{}()`", snippet(cx, path.span, "From::from"));
- span_lint_and_sugg(
- cx,
- USELESS_CONVERSION,
- e.span,
- &format!("useless conversion to the same type: `{b}`"),
- &sugg_msg,
- sugg.to_string(),
- app,
- );
- }
- }
+ if cx.tcx.is_diagnostic_item(sym::from_fn, def_id) && same_type_and_consts(a, b) {
+ let mut app = Applicability::MachineApplicable;
+ let sugg = Sugg::hir_with_context(cx, arg, e.span.ctxt(), "<expr>", &mut app).maybe_par();
+ let sugg_msg = format!("consider removing `{}()`", snippet(cx, path.span, "From::from"));
+ span_lint_and_sugg(
+ cx,
+ USELESS_CONVERSION,
+ e.span,
+ &format!("useless conversion to the same type: `{b}`"),
+ &sugg_msg,
+ sugg.to_string(),
+ app,
+ );
}
}
},
diff --git a/src/tools/clippy/clippy_lints/src/utils/author.rs b/src/tools/clippy/clippy_lints/src/utils/author.rs
index 152248afc..e83c04eda 100644
--- a/src/tools/clippy/clippy_lints/src/utils/author.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/author.rs
@@ -7,7 +7,7 @@ use rustc_ast::LitIntType;
use rustc_data_structures::fx::FxHashMap;
use rustc_hir as hir;
use rustc_hir::{
- ArrayLen, BindingAnnotation, Closure, ExprKind, FnRetTy, HirId, Lit, PatKind, QPath, StmtKind, TyKind, CaptureBy
+ ArrayLen, BindingAnnotation, CaptureBy, Closure, ExprKind, FnRetTy, HirId, Lit, PatKind, QPath, StmtKind, TyKind,
};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_session::declare_lint_pass;
@@ -629,6 +629,7 @@ impl<'a, 'tcx> PrintVisitor<'a, 'tcx> {
match pat.value.kind {
PatKind::Wild => kind!("Wild"),
+ PatKind::Never => kind!("Never"),
PatKind::Binding(ann, _, name, sub) => {
bind!(self, name);
opt_bind!(self, sub);
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints.rs
index ddcb9f27c..877a77fd6 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints.rs
@@ -1,7 +1,6 @@
pub mod almost_standard_lint_formulation;
pub mod collapsible_calls;
pub mod compiler_lint_functions;
-pub mod if_chain_style;
pub mod interning_defined_symbol;
pub mod invalid_paths;
pub mod lint_without_lint_pass;
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/almost_standard_lint_formulation.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/almost_standard_lint_formulation.rs
index d78f67c05..5ddedb24b 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/almost_standard_lint_formulation.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/almost_standard_lint_formulation.rs
@@ -4,7 +4,7 @@ use regex::Regex;
use rustc_ast as ast;
use rustc_hir::{Item, ItemKind, Mutability};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/collapsible_calls.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/collapsible_calls.rs
index d7666b77f..7c70d3f45 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/collapsible_calls.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/collapsible_calls.rs
@@ -1,12 +1,11 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet;
use clippy_utils::{is_expr_path_def_path, is_lint_allowed, peel_blocks_with_stmt, SpanlessEq};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_hir::{Closure, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use std::borrow::{Borrow, Cow};
@@ -78,45 +77,43 @@ impl<'tcx> LateLintPass<'tcx> for CollapsibleCalls {
return;
}
- if_chain! {
- if let ExprKind::Call(func, and_then_args) = expr.kind;
- if is_expr_path_def_path(cx, func, &["clippy_utils", "diagnostics", "span_lint_and_then"]);
- if and_then_args.len() == 5;
- if let ExprKind::Closure(&Closure { body, .. }) = &and_then_args[4].kind;
- let body = cx.tcx.hir().body(body);
- let only_expr = peel_blocks_with_stmt(body.value);
- if let ExprKind::MethodCall(ps, recv, span_call_args, _) = &only_expr.kind;
- if let ExprKind::Path(..) = recv.kind;
- then {
- let and_then_snippets = get_and_then_snippets(cx, and_then_args);
- let mut sle = SpanlessEq::new(cx).deny_side_effects();
- match ps.ident.as_str() {
- "span_suggestion" if sle.eq_expr(&and_then_args[2], &span_call_args[0]) => {
- suggest_suggestion(
- cx,
- expr,
- &and_then_snippets,
- &span_suggestion_snippets(cx, span_call_args),
- );
- },
- "span_help" if sle.eq_expr(&and_then_args[2], &span_call_args[0]) => {
- let help_snippet = snippet(cx, span_call_args[1].span, r#""...""#);
- suggest_help(cx, expr, &and_then_snippets, help_snippet.borrow(), true);
- },
- "span_note" if sle.eq_expr(&and_then_args[2], &span_call_args[0]) => {
- let note_snippet = snippet(cx, span_call_args[1].span, r#""...""#);
- suggest_note(cx, expr, &and_then_snippets, note_snippet.borrow(), true);
- },
- "help" => {
- let help_snippet = snippet(cx, span_call_args[0].span, r#""...""#);
- suggest_help(cx, expr, &and_then_snippets, help_snippet.borrow(), false);
- },
- "note" => {
- let note_snippet = snippet(cx, span_call_args[0].span, r#""...""#);
- suggest_note(cx, expr, &and_then_snippets, note_snippet.borrow(), false);
- },
- _ => (),
- }
+ if let ExprKind::Call(func, and_then_args) = expr.kind
+ && is_expr_path_def_path(cx, func, &["clippy_utils", "diagnostics", "span_lint_and_then"])
+ && and_then_args.len() == 5
+ && let ExprKind::Closure(&Closure { body, .. }) = &and_then_args[4].kind
+ && let body = cx.tcx.hir().body(body)
+ && let only_expr = peel_blocks_with_stmt(body.value)
+ && let ExprKind::MethodCall(ps, recv, span_call_args, _) = &only_expr.kind
+ && let ExprKind::Path(..) = recv.kind
+ {
+ let and_then_snippets = get_and_then_snippets(cx, and_then_args);
+ let mut sle = SpanlessEq::new(cx).deny_side_effects();
+ match ps.ident.as_str() {
+ "span_suggestion" if sle.eq_expr(&and_then_args[2], &span_call_args[0]) => {
+ suggest_suggestion(
+ cx,
+ expr,
+ &and_then_snippets,
+ &span_suggestion_snippets(cx, span_call_args),
+ );
+ },
+ "span_help" if sle.eq_expr(&and_then_args[2], &span_call_args[0]) => {
+ let help_snippet = snippet(cx, span_call_args[1].span, r#""...""#);
+ suggest_help(cx, expr, &and_then_snippets, help_snippet.borrow(), true);
+ },
+ "span_note" if sle.eq_expr(&and_then_args[2], &span_call_args[0]) => {
+ let note_snippet = snippet(cx, span_call_args[1].span, r#""...""#);
+ suggest_note(cx, expr, &and_then_snippets, note_snippet.borrow(), true);
+ },
+ "help" => {
+ let help_snippet = snippet(cx, span_call_args[0].span, r#""...""#);
+ suggest_help(cx, expr, &and_then_snippets, help_snippet.borrow(), false);
+ },
+ "note" => {
+ let note_snippet = snippet(cx, span_call_args[0].span, r#""...""#);
+ suggest_note(cx, expr, &and_then_snippets, note_snippet.borrow(), false);
+ },
+ _ => (),
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/compiler_lint_functions.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/compiler_lint_functions.rs
index cacd05262..5059712d6 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/compiler_lint_functions.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/compiler_lint_functions.rs
@@ -1,11 +1,10 @@
use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::ty::match_type;
use clippy_utils::{is_lint_allowed, paths};
-use if_chain::if_chain;
use rustc_data_structures::fx::FxHashMap;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -56,22 +55,20 @@ impl<'tcx> LateLintPass<'tcx> for CompilerLintFunctions {
return;
}
- if_chain! {
- if let ExprKind::MethodCall(path, self_arg, _, _) = &expr.kind;
- let fn_name = path.ident;
- if let Some(sugg) = self.map.get(fn_name.as_str());
- let ty = cx.typeck_results().expr_ty(self_arg).peel_refs();
- if match_type(cx, ty, &paths::EARLY_CONTEXT) || match_type(cx, ty, &paths::LATE_CONTEXT);
- then {
- span_lint_and_help(
- cx,
- COMPILER_LINT_FUNCTIONS,
- path.ident.span,
- "usage of a compiler lint function",
- None,
- &format!("please use the Clippy variant of this function: `{sugg}`"),
- );
- }
+ if let ExprKind::MethodCall(path, self_arg, _, _) = &expr.kind
+ && let fn_name = path.ident
+ && let Some(sugg) = self.map.get(fn_name.as_str())
+ && let ty = cx.typeck_results().expr_ty(self_arg).peel_refs()
+ && (match_type(cx, ty, &paths::EARLY_CONTEXT) || match_type(cx, ty, &paths::LATE_CONTEXT))
+ {
+ span_lint_and_help(
+ cx,
+ COMPILER_LINT_FUNCTIONS,
+ path.ident.span,
+ "usage of a compiler lint function",
+ None,
+ &format!("please use the Clippy variant of this function: `{sugg}`"),
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/if_chain_style.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/if_chain_style.rs
deleted file mode 100644
index 8cdd5ea89..000000000
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/if_chain_style.rs
+++ /dev/null
@@ -1,166 +0,0 @@
-use clippy_utils::diagnostics::{span_lint, span_lint_and_then};
-use clippy_utils::{higher, is_else_clause, is_expn_of};
-use if_chain::if_chain;
-use rustc_hir as hir;
-use rustc_hir::{BinOpKind, Block, Expr, ExprKind, HirId, Local, Node, Stmt, StmtKind};
-use rustc_lint::{LateContext, LateLintPass, LintContext};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
-use rustc_span::{BytePos, Span};
-
-declare_clippy_lint! {
- /// Finds unidiomatic usage of `if_chain!`
- pub IF_CHAIN_STYLE,
- internal,
- "non-idiomatic `if_chain!` usage"
-}
-
-declare_lint_pass!(IfChainStyle => [IF_CHAIN_STYLE]);
-
-impl<'tcx> LateLintPass<'tcx> for IfChainStyle {
- fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx hir::Block<'_>) {
- let (local, after, if_chain_span) = if_chain! {
- if let [Stmt { kind: StmtKind::Local(local), .. }, after @ ..] = block.stmts;
- if let Some(if_chain_span) = is_expn_of(block.span, "if_chain");
- then { (local, after, if_chain_span) } else { return }
- };
- if is_first_if_chain_expr(cx, block.hir_id, if_chain_span) {
- span_lint(
- cx,
- IF_CHAIN_STYLE,
- if_chain_local_span(cx, local, if_chain_span),
- "`let` expression should be above the `if_chain!`",
- );
- } else if local.span.eq_ctxt(block.span) && is_if_chain_then(after, block.expr, if_chain_span) {
- span_lint(
- cx,
- IF_CHAIN_STYLE,
- if_chain_local_span(cx, local, if_chain_span),
- "`let` expression should be inside `then { .. }`",
- );
- }
- }
-
- fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
- let (cond, then, els) = if let Some(higher::IfOrIfLet { cond, r#else, then }) = higher::IfOrIfLet::hir(expr) {
- (cond, then, r#else.is_some())
- } else {
- return;
- };
- let ExprKind::Block(then_block, _) = then.kind else {
- return;
- };
- let if_chain_span = is_expn_of(expr.span, "if_chain");
- if !els {
- check_nested_if_chains(cx, expr, then_block, if_chain_span);
- }
- let Some(if_chain_span) = if_chain_span else { return };
- // check for `if a && b;`
- if_chain! {
- if let ExprKind::Binary(op, _, _) = cond.kind;
- if op.node == BinOpKind::And;
- if cx.sess().source_map().is_multiline(cond.span);
- then {
- span_lint(cx, IF_CHAIN_STYLE, cond.span, "`if a && b;` should be `if a; if b;`");
- }
- }
- if is_first_if_chain_expr(cx, expr.hir_id, if_chain_span)
- && is_if_chain_then(then_block.stmts, then_block.expr, if_chain_span)
- {
- span_lint(cx, IF_CHAIN_STYLE, expr.span, "`if_chain!` only has one `if`");
- }
- }
-}
-
-fn check_nested_if_chains(
- cx: &LateContext<'_>,
- if_expr: &Expr<'_>,
- then_block: &Block<'_>,
- if_chain_span: Option<Span>,
-) {
- #[rustfmt::skip]
- let (head, tail) = match *then_block {
- Block { stmts, expr: Some(tail), .. } => (stmts, tail),
- Block {
- stmts: &[
- ref head @ ..,
- Stmt { kind: StmtKind::Expr(tail) | StmtKind::Semi(tail), .. }
- ],
- ..
- } => (head, tail),
- _ => return,
- };
- if_chain! {
- if let Some(higher::IfOrIfLet { r#else: None, .. }) = higher::IfOrIfLet::hir(tail);
- let sm = cx.sess().source_map();
- if head
- .iter()
- .all(|stmt| matches!(stmt.kind, StmtKind::Local(..)) && !sm.is_multiline(stmt.span));
- if if_chain_span.is_some() || !is_else_clause(cx.tcx, if_expr);
- then {
- } else {
- return;
- }
- }
- let (span, msg) = match (if_chain_span, is_expn_of(tail.span, "if_chain")) {
- (None, Some(_)) => (if_expr.span, "this `if` can be part of the inner `if_chain!`"),
- (Some(_), None) => (tail.span, "this `if` can be part of the outer `if_chain!`"),
- (Some(a), Some(b)) if a != b => (b, "this `if_chain!` can be merged with the outer `if_chain!`"),
- _ => return,
- };
- span_lint_and_then(cx, IF_CHAIN_STYLE, span, msg, |diag| {
- let (span, msg) = match head {
- [] => return,
- [stmt] => (stmt.span, "this `let` statement can also be in the `if_chain!`"),
- [a, .., b] => (
- a.span.to(b.span),
- "these `let` statements can also be in the `if_chain!`",
- ),
- };
- diag.span_help(span, msg);
- });
-}
-
-fn is_first_if_chain_expr(cx: &LateContext<'_>, hir_id: HirId, if_chain_span: Span) -> bool {
- cx.tcx
- .hir()
- .parent_iter(hir_id)
- .find(|(_, node)| {
- #[rustfmt::skip]
- !matches!(node, Node::Expr(Expr { kind: ExprKind::Block(..), .. }) | Node::Stmt(_))
- })
- .map_or(false, |(id, _)| {
- is_expn_of(cx.tcx.hir().span(id), "if_chain") != Some(if_chain_span)
- })
-}
-
-/// Checks a trailing slice of statements and expression of a `Block` to see if they are part
-/// of the `then {..}` portion of an `if_chain!`
-fn is_if_chain_then(stmts: &[Stmt<'_>], expr: Option<&Expr<'_>>, if_chain_span: Span) -> bool {
- let span = if let [stmt, ..] = stmts {
- stmt.span
- } else if let Some(expr) = expr {
- expr.span
- } else {
- // empty `then {}`
- return true;
- };
- is_expn_of(span, "if_chain").map_or(true, |span| span != if_chain_span)
-}
-
-/// Creates a `Span` for `let x = ..;` in an `if_chain!` call.
-fn if_chain_local_span(cx: &LateContext<'_>, local: &Local<'_>, if_chain_span: Span) -> Span {
- let mut span = local.pat.span;
- if let Some(init) = local.init {
- span = span.to(init.span);
- }
- span.adjust(if_chain_span.ctxt().outer_expn());
- let sm = cx.sess().source_map();
- let span = sm.span_extend_to_prev_str(span, "let", false, true).unwrap_or(span);
- let span = sm.span_extend_to_next_char(span, ';', false);
- Span::new(
- span.lo() - BytePos(3),
- span.hi() + BytePos(1),
- span.ctxt(),
- span.parent(),
- )
-}
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/interning_defined_symbol.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/interning_defined_symbol.rs
index fc9afe5ca..07879e81f 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/interning_defined_symbol.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/interning_defined_symbol.rs
@@ -3,7 +3,6 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet;
use clippy_utils::ty::match_type;
use clippy_utils::{def_path_def_ids, is_expn_of, match_def_path, paths};
-use if_chain::if_chain;
use rustc_data_structures::fx::FxHashMap;
use rustc_errors::Applicability;
use rustc_hir::def::{DefKind, Res};
@@ -12,7 +11,7 @@ use rustc_hir::{BinOpKind, Expr, ExprKind, UnOp};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::mir::ConstValue;
use rustc_middle::ty;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::sym;
use rustc_span::symbol::Symbol;
@@ -77,15 +76,13 @@ impl<'tcx> LateLintPass<'tcx> for InterningDefinedSymbol {
for &module in &[&paths::KW_MODULE, &paths::SYM_MODULE] {
for def_id in def_path_def_ids(cx, module) {
for item in cx.tcx.module_children(def_id) {
- if_chain! {
- if let Res::Def(DefKind::Const, item_def_id) = item.res;
- let ty = cx.tcx.type_of(item_def_id).instantiate_identity();
- if match_type(cx, ty, &paths::SYMBOL);
- if let Ok(ConstValue::Scalar(value)) = cx.tcx.const_eval_poly(item_def_id);
- if let Ok(value) = value.to_u32();
- then {
- self.symbol_map.insert(value, item_def_id);
- }
+ if let Res::Def(DefKind::Const, item_def_id) = item.res
+ && let ty = cx.tcx.type_of(item_def_id).instantiate_identity()
+ && match_type(cx, ty, &paths::SYMBOL)
+ && let Ok(ConstValue::Scalar(value)) = cx.tcx.const_eval_poly(item_def_id)
+ && let Ok(value) = value.to_u32()
+ {
+ self.symbol_map.insert(value, item_def_id);
}
}
}
@@ -93,24 +90,22 @@ impl<'tcx> LateLintPass<'tcx> for InterningDefinedSymbol {
}
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
- if_chain! {
- if let ExprKind::Call(func, [arg]) = &expr.kind;
- if let ty::FnDef(def_id, _) = cx.typeck_results().expr_ty(func).kind();
- if match_def_path(cx, *def_id, &paths::SYMBOL_INTERN);
- if let Some(Constant::Str(arg)) = constant_simple(cx, cx.typeck_results(), arg);
- let value = Symbol::intern(&arg).as_u32();
- if let Some(&def_id) = self.symbol_map.get(&value);
- then {
- span_lint_and_sugg(
- cx,
- INTERNING_DEFINED_SYMBOL,
- is_expn_of(expr.span, "sym").unwrap_or(expr.span),
- "interning a defined symbol",
- "try",
- cx.tcx.def_path_str(def_id),
- Applicability::MachineApplicable,
- );
- }
+ if let ExprKind::Call(func, [arg]) = &expr.kind
+ && let ty::FnDef(def_id, _) = cx.typeck_results().expr_ty(func).kind()
+ && match_def_path(cx, *def_id, &paths::SYMBOL_INTERN)
+ && let Some(Constant::Str(arg)) = constant_simple(cx, cx.typeck_results(), arg)
+ && let value = Symbol::intern(&arg).as_u32()
+ && let Some(&def_id) = self.symbol_map.get(&value)
+ {
+ span_lint_and_sugg(
+ cx,
+ INTERNING_DEFINED_SYMBOL,
+ is_expn_of(expr.span, "sym").unwrap_or(expr.span),
+ "interning a defined symbol",
+ "try",
+ cx.tcx.def_path_str(def_id),
+ Applicability::MachineApplicable,
+ );
}
if let ExprKind::Binary(op, left, right) = expr.kind {
if matches!(op.node, BinOpKind::Eq | BinOpKind::Ne) {
@@ -163,27 +158,28 @@ impl InterningDefinedSymbol {
fn symbol_str_expr<'tcx>(&self, expr: &'tcx Expr<'tcx>, cx: &LateContext<'tcx>) -> Option<SymbolStrExpr<'tcx>> {
static IDENT_STR_PATHS: &[&[&str]] = &[&paths::IDENT_AS_STR];
static SYMBOL_STR_PATHS: &[&[&str]] = &[&paths::SYMBOL_AS_STR, &paths::SYMBOL_TO_IDENT_STRING];
- let call = if_chain! {
- if let ExprKind::AddrOf(_, _, e) = expr.kind;
- if let ExprKind::Unary(UnOp::Deref, e) = e.kind;
- then { e } else { expr }
+ let call = if let ExprKind::AddrOf(_, _, e) = expr.kind
+ && let ExprKind::Unary(UnOp::Deref, e) = e.kind
+ {
+ e
+ } else {
+ expr
};
- if_chain! {
+ if let ExprKind::MethodCall(_, item, [], _) = call.kind
// is a method call
- if let ExprKind::MethodCall(_, item, [], _) = call.kind;
- if let Some(did) = cx.typeck_results().type_dependent_def_id(call.hir_id);
- let ty = cx.typeck_results().expr_ty(item);
+ && let Some(did) = cx.typeck_results().type_dependent_def_id(call.hir_id)
+ && let ty = cx.typeck_results().expr_ty(item)
// ...on either an Ident or a Symbol
- if let Some(is_ident) = if match_type(cx, ty, &paths::SYMBOL) {
+ && let Some(is_ident) = if match_type(cx, ty, &paths::SYMBOL) {
Some(false)
} else if match_type(cx, ty, &paths::IDENT) {
Some(true)
} else {
None
- };
+ }
// ...which converts it to a string
- let paths = if is_ident { IDENT_STR_PATHS } else { SYMBOL_STR_PATHS };
- if let Some(is_to_owned) = paths
+ && let paths = if is_ident { IDENT_STR_PATHS } else { SYMBOL_STR_PATHS }
+ && let Some(is_to_owned) = paths
.iter()
.find_map(|path| if match_def_path(cx, did, path) {
Some(path == &paths::SYMBOL_TO_IDENT_STRING)
@@ -194,14 +190,13 @@ impl InterningDefinedSymbol {
Some(true)
} else {
None
- });
- then {
- return Some(SymbolStrExpr::Expr {
- item,
- is_ident,
- is_to_owned,
- });
- }
+ })
+ {
+ return Some(SymbolStrExpr::Expr {
+ item,
+ is_ident,
+ is_to_owned,
+ });
}
// is a string constant
if let Some(Constant::Str(s)) = constant_simple(cx, cx.typeck_results(), expr) {
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/invalid_paths.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/invalid_paths.rs
index 250772238..4fb615e1d 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/invalid_paths.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/invalid_paths.rs
@@ -1,14 +1,13 @@
use clippy_utils::consts::{constant_simple, Constant};
use clippy_utils::def_path_res;
use clippy_utils::diagnostics::span_lint;
-use if_chain::if_chain;
use rustc_hir as hir;
use rustc_hir::def::DefKind;
use rustc_hir::Item;
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::fast_reject::SimplifiedType;
use rustc_middle::ty::FloatTy;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::Symbol;
declare_clippy_lint! {
@@ -31,13 +30,12 @@ impl<'tcx> LateLintPass<'tcx> for InvalidPaths {
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
let local_def_id = &cx.tcx.parent_module(item.hir_id());
let mod_name = &cx.tcx.item_name(local_def_id.to_def_id());
- if_chain! {
- if mod_name.as_str() == "paths";
- if let hir::ItemKind::Const(.., body_id) = item.kind;
- let body = cx.tcx.hir().body(body_id);
- let typeck_results = cx.tcx.typeck_body(body_id);
- if let Some(Constant::Vec(path)) = constant_simple(cx, typeck_results, body.value);
- if let Some(path) = path
+ if mod_name.as_str() == "paths"
+ && let hir::ItemKind::Const(.., body_id) = item.kind
+ && let body = cx.tcx.hir().body(body_id)
+ && let typeck_results = cx.tcx.typeck_body(body_id)
+ && let Some(Constant::Vec(path)) = constant_simple(cx, typeck_results, body.value)
+ && let Some(path) = path
.iter()
.map(|x| {
if let Constant::Str(s) = x {
@@ -46,11 +44,10 @@ impl<'tcx> LateLintPass<'tcx> for InvalidPaths {
None
}
})
- .collect::<Option<Vec<&str>>>();
- if !check_path(cx, &path[..]);
- then {
- span_lint(cx, INVALID_PATHS, item.span, "invalid path");
- }
+ .collect::<Option<Vec<&str>>>()
+ && !check_path(cx, &path[..])
+ {
+ span_lint(cx, INVALID_PATHS, item.span, "invalid path");
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/lint_without_lint_pass.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/lint_without_lint_pass.rs
index 00e352961..370ed430b 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/lint_without_lint_pass.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/lint_without_lint_pass.rs
@@ -2,7 +2,6 @@ use crate::utils::internal_lints::metadata_collector::is_deprecated_lint;
use clippy_utils::diagnostics::{span_lint, span_lint_and_help};
use clippy_utils::macros::root_macro_call_first_node;
use clippy_utils::{is_lint_allowed, match_def_path, paths};
-use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_hir::def::{DefKind, Res};
@@ -12,7 +11,7 @@ use rustc_hir::{ExprKind, HirId, Item, MutTy, Mutability, Path, TyKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::nested_filter;
use rustc_semver::RustcVersion;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::Symbol;
use rustc_span::{sym, Span};
@@ -309,14 +308,16 @@ fn check_invalid_clippy_version_attribute(cx: &LateContext<'_>, item: &'_ Item<'
pub(super) fn extract_clippy_version_value(cx: &LateContext<'_>, item: &'_ Item<'_>) -> Option<Symbol> {
let attrs = cx.tcx.hir().attrs(item.hir_id());
attrs.iter().find_map(|attr| {
- if_chain! {
+ if let ast::AttrKind::Normal(ref attr_kind) = &attr.kind
// Identify attribute
- if let ast::AttrKind::Normal(ref attr_kind) = &attr.kind;
- if let [tool_name, attr_name] = &attr_kind.item.path.segments[..];
- if tool_name.ident.name == sym::clippy;
- if attr_name.ident.name == sym::version;
- if let Some(version) = attr.value_str();
- then { Some(version) } else { None }
+ && let [tool_name, attr_name] = &attr_kind.item.path.segments[..]
+ && tool_name.ident.name == sym::clippy
+ && attr_name.ident.name == sym::version
+ && let Some(version) = attr.value_str()
+ {
+ Some(version)
+ } else {
+ None
}
})
}
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs
index 51abe0c1d..fae1b90ac 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs
@@ -14,27 +14,26 @@ use clippy_config::{get_configuration_metadata, ClippyConfiguration};
use clippy_utils::diagnostics::span_lint;
use clippy_utils::ty::{match_type, walk_ptrs_ty_depth};
use clippy_utils::{last_path_segment, match_def_path, match_function_call, match_path, paths};
-use if_chain::if_chain;
use itertools::Itertools;
use rustc_ast as ast;
use rustc_data_structures::fx::FxHashMap;
use rustc_hir::def::DefKind;
use rustc_hir::intravisit::Visitor;
use rustc_hir::{self as hir, intravisit, Closure, ExprKind, Item, ItemKind, Mutability, QPath};
-use rustc_lint::{CheckLintNameResult, LateContext, LateLintPass, LintContext, LintId};
+use rustc_lint::{unerased_lint_store, CheckLintNameResult, LateContext, LateLintPass, LintContext, LintId};
use rustc_middle::hir::nested_filter;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::symbol::Ident;
use rustc_span::{sym, Loc, Span, Symbol};
use serde::ser::SerializeStruct;
use serde::{Serialize, Serializer};
use std::collections::{BTreeSet, BinaryHeap};
-use std::fmt;
use std::fmt::Write as _;
use std::fs::{self, File};
use std::io::prelude::*;
use std::path::{Path, PathBuf};
use std::process::Command;
+use std::{env, fmt};
/// This is the json output file of the lint collector.
const JSON_OUTPUT_FILE: &str = "../util/gh-pages/lints.json";
@@ -416,7 +415,7 @@ fn get_lint_output(lint_name: &str, example: &[&mut String], clippy_project_root
let prefixed_name = format!("{CLIPPY_LINT_GROUP_PREFIX}{lint_name}");
- let mut cmd = Command::new("cargo");
+ let mut cmd = Command::new(env::var("CARGO").unwrap_or("cargo".into()));
cmd.current_dir(clippy_project_root)
.env("CARGO_INCREMENTAL", "0")
@@ -543,49 +542,45 @@ impl<'hir> LateLintPass<'hir> for MetadataCollector {
fn check_item(&mut self, cx: &LateContext<'hir>, item: &'hir Item<'_>) {
if let ItemKind::Static(ty, Mutability::Not, _) = item.kind {
// Normal lint
- if_chain! {
+ if is_lint_ref_type(cx, ty)
// item validation
- if is_lint_ref_type(cx, ty);
// disallow check
- let lint_name = sym_to_string(item.ident.name).to_ascii_lowercase();
+ && let lint_name = sym_to_string(item.ident.name).to_ascii_lowercase()
// metadata extraction
- if let Some((group, level)) = get_lint_group_and_level_or_lint(cx, &lint_name, item);
- if let Some(mut raw_docs) = extract_attr_docs_or_lint(cx, item);
- then {
- if let Some(configuration_section) = self.get_lint_configs(&lint_name) {
- raw_docs.push_str(&configuration_section);
- }
- let version = get_lint_version(cx, item);
-
- self.lints.push(LintMetadata::new(
- lint_name,
- SerializableSpan::from_item(cx, item),
- group,
- level,
- version,
- raw_docs,
- ));
+ && let Some((group, level)) = get_lint_group_and_level_or_lint(cx, &lint_name, item)
+ && let Some(mut raw_docs) = extract_attr_docs_or_lint(cx, item)
+ {
+ if let Some(configuration_section) = self.get_lint_configs(&lint_name) {
+ raw_docs.push_str(&configuration_section);
}
+ let version = get_lint_version(cx, item);
+
+ self.lints.push(LintMetadata::new(
+ lint_name,
+ SerializableSpan::from_item(cx, item),
+ group,
+ level,
+ version,
+ raw_docs,
+ ));
}
- if_chain! {
- if is_deprecated_lint(cx, ty);
+ if is_deprecated_lint(cx, ty)
// disallow check
- let lint_name = sym_to_string(item.ident.name).to_ascii_lowercase();
+ && let lint_name = sym_to_string(item.ident.name).to_ascii_lowercase()
// Metadata the little we can get from a deprecated lint
- if let Some(raw_docs) = extract_attr_docs_or_lint(cx, item);
- then {
- let version = get_lint_version(cx, item);
-
- self.lints.push(LintMetadata::new(
- lint_name,
- SerializableSpan::from_item(cx, item),
- DEPRECATED_LINT_GROUP_STR.to_string(),
- DEPRECATED_LINT_LEVEL,
- version,
- raw_docs,
- ));
- }
+ && let Some(raw_docs) = extract_attr_docs_or_lint(cx, item)
+ {
+ let version = get_lint_version(cx, item);
+
+ self.lints.push(LintMetadata::new(
+ lint_name,
+ SerializableSpan::from_item(cx, item),
+ DEPRECATED_LINT_GROUP_STR.to_string(),
+ DEPRECATED_LINT_LEVEL,
+ version,
+ raw_docs,
+ ));
}
}
}
@@ -719,7 +714,7 @@ fn get_lint_group_and_level_or_lint(
lint_name: &str,
item: &Item<'_>,
) -> Option<(String, &'static str)> {
- let result = cx.lint_store.check_lint_name(
+ let result = unerased_lint_store(cx.tcx.sess).check_lint_name(
lint_name,
Some(sym::clippy),
&std::iter::once(Ident::with_dummy_span(sym::clippy)).collect(),
@@ -751,7 +746,7 @@ fn get_lint_group_and_level_or_lint(
}
fn get_lint_group(cx: &LateContext<'_>, lint_id: LintId) -> Option<String> {
- for (group_name, lints, _) in cx.lint_store.get_lint_groups() {
+ for (group_name, lints, _) in unerased_lint_store(cx.tcx.sess).get_lint_groups() {
if IGNORED_LINT_GROUPS.contains(&group_name) {
continue;
}
@@ -789,15 +784,13 @@ fn collect_renames(lints: &mut Vec<LintMetadata>) {
loop {
if let Some(lint_name) = names.pop() {
for (k, v) in RENAMED_LINTS {
- if_chain! {
- if let Some(name) = v.strip_prefix(CLIPPY_LINT_GROUP_PREFIX);
- if name == lint_name;
- if let Some(past_name) = k.strip_prefix(CLIPPY_LINT_GROUP_PREFIX);
- then {
- lint.former_ids.insert(past_name.to_owned());
- writeln!(collected, "* `{past_name}`").unwrap();
- names.push(past_name.to_string());
- }
+ if let Some(name) = v.strip_prefix(CLIPPY_LINT_GROUP_PREFIX)
+ && name == lint_name
+ && let Some(past_name) = k.strip_prefix(CLIPPY_LINT_GROUP_PREFIX)
+ {
+ lint.former_ids.insert(past_name.to_owned());
+ writeln!(collected, "* `{past_name}`").unwrap();
+ names.push(past_name.to_string());
}
}
@@ -927,20 +920,17 @@ impl<'a, 'hir> intravisit::Visitor<'hir> for LintResolver<'a, 'hir> {
}
fn visit_expr(&mut self, expr: &'hir hir::Expr<'hir>) {
- if_chain! {
- if let ExprKind::Path(qpath) = &expr.kind;
- if let QPath::Resolved(_, path) = qpath;
-
- let (expr_ty, _) = walk_ptrs_ty_depth(self.cx.typeck_results().expr_ty(expr));
- if match_type(self.cx, expr_ty, &paths::LINT);
- then {
- if let hir::def::Res::Def(DefKind::Static(..), _) = path.res {
- let lint_name = last_path_segment(qpath).ident.name;
- self.lints.push(sym_to_string(lint_name).to_ascii_lowercase());
- } else if let Some(local) = get_parent_local(self.cx, expr) {
- if let Some(local_init) = local.init {
- intravisit::walk_expr(self, local_init);
- }
+ if let ExprKind::Path(qpath) = &expr.kind
+ && let QPath::Resolved(_, path) = qpath
+ && let (expr_ty, _) = walk_ptrs_ty_depth(self.cx.typeck_results().expr_ty(expr))
+ && match_type(self.cx, expr_ty, &paths::LINT)
+ {
+ if let hir::def::Res::Def(DefKind::Static(..), _) = path.res {
+ let lint_name = last_path_segment(qpath).ident.name;
+ self.lints.push(sym_to_string(lint_name).to_ascii_lowercase());
+ } else if let Some(local) = get_parent_local(self.cx, expr) {
+ if let Some(local_init) = local.init {
+ intravisit::walk_expr(self, local_init);
}
}
}
@@ -992,13 +982,11 @@ impl<'a, 'hir> intravisit::Visitor<'hir> for ApplicabilityResolver<'a, 'hir> {
fn visit_expr(&mut self, expr: &'hir hir::Expr<'hir>) {
let (expr_ty, _) = walk_ptrs_ty_depth(self.cx.typeck_results().expr_ty(expr));
- if_chain! {
- if match_type(self.cx, expr_ty, &paths::APPLICABILITY);
- if let Some(local) = get_parent_local(self.cx, expr);
- if let Some(local_init) = local.init;
- then {
- intravisit::walk_expr(self, local_init);
- }
+ if match_type(self.cx, expr_ty, &paths::APPLICABILITY)
+ && let Some(local) = get_parent_local(self.cx, expr)
+ && let Some(local_init) = local.init
+ {
+ intravisit::walk_expr(self, local_init);
};
intravisit::walk_expr(self, expr);
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/msrv_attr_impl.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/msrv_attr_impl.rs
index 86b77a77f..6d5240db8 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/msrv_attr_impl.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/msrv_attr_impl.rs
@@ -2,12 +2,11 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet;
use clippy_utils::ty::match_type;
use clippy_utils::{match_def_path, paths};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::ty::{self, EarlyBinder, GenericArgKind};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -22,40 +21,41 @@ declare_lint_pass!(MsrvAttrImpl => [MISSING_MSRV_ATTR_IMPL]);
impl LateLintPass<'_> for MsrvAttrImpl {
fn check_item(&mut self, cx: &LateContext<'_>, item: &hir::Item<'_>) {
- if_chain! {
- if let hir::ItemKind::Impl(hir::Impl {
- of_trait: Some(_),
- items,
- ..
- }) = &item.kind;
- if let Some(trait_ref) = cx.tcx.impl_trait_ref(item.owner_id).map(EarlyBinder::instantiate_identity);
- let is_late_pass = match_def_path(cx, trait_ref.def_id, &paths::LATE_LINT_PASS);
- if is_late_pass || match_def_path(cx, trait_ref.def_id, &paths::EARLY_LINT_PASS);
- if let ty::Adt(self_ty_def, _) = trait_ref.self_ty().kind();
- if self_ty_def.is_struct();
- if self_ty_def.all_fields().any(|f| {
+ if let hir::ItemKind::Impl(hir::Impl {
+ of_trait: Some(_),
+ items,
+ ..
+ }) = &item.kind
+ && let Some(trait_ref) = cx
+ .tcx
+ .impl_trait_ref(item.owner_id)
+ .map(EarlyBinder::instantiate_identity)
+ && let is_late_pass = match_def_path(cx, trait_ref.def_id, &paths::LATE_LINT_PASS)
+ && (is_late_pass || match_def_path(cx, trait_ref.def_id, &paths::EARLY_LINT_PASS))
+ && let ty::Adt(self_ty_def, _) = trait_ref.self_ty().kind()
+ && self_ty_def.is_struct()
+ && self_ty_def.all_fields().any(|f| {
cx.tcx
.type_of(f.did)
.instantiate_identity()
.walk()
.filter(|t| matches!(t.unpack(), GenericArgKind::Type(_)))
.any(|t| match_type(cx, t.expect_ty(), &paths::MSRV))
- });
- if !items.iter().any(|item| item.ident.name == sym!(enter_lint_attrs));
- then {
- let context = if is_late_pass { "LateContext" } else { "EarlyContext" };
- let lint_pass = if is_late_pass { "LateLintPass" } else { "EarlyLintPass" };
- let span = cx.sess().source_map().span_through_char(item.span, '{');
- span_lint_and_sugg(
- cx,
- MISSING_MSRV_ATTR_IMPL,
- span,
- &format!("`extract_msrv_attr!` macro missing from `{lint_pass}` implementation"),
- &format!("add `extract_msrv_attr!({context})` to the `{lint_pass}` implementation"),
- format!("{}\n extract_msrv_attr!({context});", snippet(cx, span, "..")),
- Applicability::MachineApplicable,
- );
- }
+ })
+ && !items.iter().any(|item| item.ident.name == sym!(enter_lint_attrs))
+ {
+ let context = if is_late_pass { "LateContext" } else { "EarlyContext" };
+ let lint_pass = if is_late_pass { "LateLintPass" } else { "EarlyLintPass" };
+ let span = cx.sess().source_map().span_through_char(item.span, '{');
+ span_lint_and_sugg(
+ cx,
+ MISSING_MSRV_ATTR_IMPL,
+ span,
+ &format!("`extract_msrv_attr!` macro missing from `{lint_pass}` implementation"),
+ &format!("add `extract_msrv_attr!({context})` to the `{lint_pass}` implementation"),
+ format!("{}\n extract_msrv_attr!({context});", snippet(cx, span, "..")),
+ Applicability::MachineApplicable,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/outer_expn_data_pass.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/outer_expn_data_pass.rs
index 2b13fad80..326e17214 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/outer_expn_data_pass.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/outer_expn_data_pass.rs
@@ -1,11 +1,10 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::ty::match_type;
use clippy_utils::{is_lint_allowed, method_calls, paths};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::Symbol;
declare_clippy_lint! {
@@ -40,23 +39,21 @@ impl<'tcx> LateLintPass<'tcx> for OuterExpnDataPass {
let (method_names, arg_lists, spans) = method_calls(expr, 2);
let method_names: Vec<&str> = method_names.iter().map(Symbol::as_str).collect();
- if_chain! {
- if let ["expn_data", "outer_expn"] = method_names.as_slice();
- let (self_arg, args) = arg_lists[1];
- if args.is_empty();
- let self_ty = cx.typeck_results().expr_ty(self_arg).peel_refs();
- if match_type(cx, self_ty, &paths::SYNTAX_CONTEXT);
- then {
- span_lint_and_sugg(
- cx,
- OUTER_EXPN_EXPN_DATA,
- spans[1].with_hi(expr.span.hi()),
- "usage of `outer_expn().expn_data()`",
- "try",
- "outer_expn_data()".to_string(),
- Applicability::MachineApplicable,
- );
- }
+ if let ["expn_data", "outer_expn"] = method_names.as_slice()
+ && let (self_arg, args) = arg_lists[1]
+ && args.is_empty()
+ && let self_ty = cx.typeck_results().expr_ty(self_arg).peel_refs()
+ && match_type(cx, self_ty, &paths::SYNTAX_CONTEXT)
+ {
+ span_lint_and_sugg(
+ cx,
+ OUTER_EXPN_EXPN_DATA,
+ spans[1].with_hi(expr.span.hi()),
+ "usage of `outer_expn().expn_data()`",
+ "try",
+ "outer_expn_data()".to_string(),
+ Applicability::MachineApplicable,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/produce_ice.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/produce_ice.rs
index 5899b94e1..9169e2968 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/produce_ice.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/produce_ice.rs
@@ -1,7 +1,7 @@
use rustc_ast::ast::NodeId;
use rustc_ast::visit::FnKind;
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::Span;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/unnecessary_def_path.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/unnecessary_def_path.rs
index 81be04659..6e449dc98 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/unnecessary_def_path.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/unnecessary_def_path.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_then};
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::{def_path_def_ids, is_lint_allowed, match_any_def_paths, peel_hir_expr_refs};
-use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_data_structures::fx::{FxHashSet, FxIndexSet};
use rustc_errors::Applicability;
@@ -13,7 +12,7 @@ use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::mir::interpret::{Allocation, GlobalAlloc};
use rustc_middle::mir::ConstValue;
use rustc_middle::ty::{self, Ty};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::symbol::Symbol;
use rustc_span::Span;
@@ -102,108 +101,106 @@ impl UnnecessaryDefPath {
&["clippy_utils", "is_expr_path_def_path"],
];
- if_chain! {
- if let [cx_arg, def_arg, args @ ..] = args;
- if let ExprKind::Path(path) = &func.kind;
- if let Some(id) = cx.qpath_res(path, func.hir_id).opt_def_id();
- if let Some(which_path) = match_any_def_paths(cx, id, PATHS);
- let item_arg = if which_path == 4 { &args[1] } else { &args[0] };
+ if let [cx_arg, def_arg, args @ ..] = args
+ && let ExprKind::Path(path) = &func.kind
+ && let Some(id) = cx.qpath_res(path, func.hir_id).opt_def_id()
+ && let Some(which_path) = match_any_def_paths(cx, id, PATHS)
+ && let item_arg = if which_path == 4 { &args[1] } else { &args[0] }
// Extract the path to the matched type
- if let Some(segments) = path_to_matched_type(cx, item_arg);
- let segments: Vec<&str> = segments.iter().map(|sym| &**sym).collect();
- if let Some(def_id) = def_path_def_ids(cx, &segments[..]).next();
- then {
- // Check if the target item is a diagnostic item or LangItem.
- #[rustfmt::skip]
- let (msg, item) = if let Some(item_name)
- = cx.tcx.diagnostic_items(def_id.krate).id_to_name.get(&def_id)
- {
- (
- "use of a def path to a diagnostic item",
- Item::DiagnosticItem(*item_name),
- )
- } else if let Some(item_name) = get_lang_item_name(cx, def_id) {
- (
- "use of a def path to a `LangItem`",
- Item::LangItem(item_name),
- )
- } else {
- return;
- };
+ && let Some(segments) = path_to_matched_type(cx, item_arg)
+ && let segments = segments.iter().map(|sym| &**sym).collect::<Vec<_>>()
+ && let Some(def_id) = def_path_def_ids(cx, &segments[..]).next()
+ {
+ // Check if the target item is a diagnostic item or LangItem.
+ #[rustfmt::skip]
+ let (msg, item) = if let Some(item_name)
+ = cx.tcx.diagnostic_items(def_id.krate).id_to_name.get(&def_id)
+ {
+ (
+ "use of a def path to a diagnostic item",
+ Item::DiagnosticItem(*item_name),
+ )
+ } else if let Some(item_name) = get_lang_item_name(cx, def_id) {
+ (
+ "use of a def path to a `LangItem`",
+ Item::LangItem(item_name),
+ )
+ } else {
+ return;
+ };
- let has_ctor = match cx.tcx.def_kind(def_id) {
- DefKind::Struct => {
- let variant = cx.tcx.adt_def(def_id).non_enum_variant();
- variant.ctor.is_some() && variant.fields.iter().all(|f| f.vis.is_public())
- },
- DefKind::Variant => {
- let variant = cx.tcx.adt_def(cx.tcx.parent(def_id)).variant_with_id(def_id);
- variant.ctor.is_some() && variant.fields.iter().all(|f| f.vis.is_public())
- },
- _ => false,
- };
+ let has_ctor = match cx.tcx.def_kind(def_id) {
+ DefKind::Struct => {
+ let variant = cx.tcx.adt_def(def_id).non_enum_variant();
+ variant.ctor.is_some() && variant.fields.iter().all(|f| f.vis.is_public())
+ },
+ DefKind::Variant => {
+ let variant = cx.tcx.adt_def(cx.tcx.parent(def_id)).variant_with_id(def_id);
+ variant.ctor.is_some() && variant.fields.iter().all(|f| f.vis.is_public())
+ },
+ _ => false,
+ };
- let mut app = Applicability::MachineApplicable;
- let cx_snip = snippet_with_applicability(cx, cx_arg.span, "..", &mut app);
- let def_snip = snippet_with_applicability(cx, def_arg.span, "..", &mut app);
- let (sugg, with_note) = match (which_path, item) {
- // match_def_path
- (0, Item::DiagnosticItem(item)) => (
- format!("{cx_snip}.tcx.is_diagnostic_item(sym::{item}, {def_snip})"),
- has_ctor,
- ),
- (0, Item::LangItem(item)) => (
- format!("{cx_snip}.tcx.lang_items().get(LangItem::{item}) == Some({def_snip})"),
- has_ctor,
- ),
- // match_trait_method
- (1, Item::DiagnosticItem(item)) => {
- (format!("is_trait_method({cx_snip}, {def_snip}, sym::{item})"), false)
- },
- // match_type
- (2, Item::DiagnosticItem(item)) => (
- format!("is_type_diagnostic_item({cx_snip}, {def_snip}, sym::{item})"),
- false,
- ),
- (2, Item::LangItem(item)) => (
- format!("is_type_lang_item({cx_snip}, {def_snip}, LangItem::{item})"),
- false,
- ),
- // is_expr_path_def_path
- (3, Item::DiagnosticItem(item)) if has_ctor => (
- format!("is_res_diag_ctor({cx_snip}, path_res({cx_snip}, {def_snip}), sym::{item})",),
- false,
- ),
- (3, Item::LangItem(item)) if has_ctor => (
- format!("is_res_lang_ctor({cx_snip}, path_res({cx_snip}, {def_snip}), LangItem::{item})",),
- false,
- ),
- (3, Item::DiagnosticItem(item)) => (
- format!("is_path_diagnostic_item({cx_snip}, {def_snip}, sym::{item})"),
- false,
- ),
- (3, Item::LangItem(item)) => (
- format!(
- "path_res({cx_snip}, {def_snip}).opt_def_id()\
- .map_or(false, |id| {cx_snip}.tcx.lang_items().get(LangItem::{item}) == Some(id))",
- ),
- false,
+ let mut app = Applicability::MachineApplicable;
+ let cx_snip = snippet_with_applicability(cx, cx_arg.span, "..", &mut app);
+ let def_snip = snippet_with_applicability(cx, def_arg.span, "..", &mut app);
+ let (sugg, with_note) = match (which_path, item) {
+ // match_def_path
+ (0, Item::DiagnosticItem(item)) => (
+ format!("{cx_snip}.tcx.is_diagnostic_item(sym::{item}, {def_snip})"),
+ has_ctor,
+ ),
+ (0, Item::LangItem(item)) => (
+ format!("{cx_snip}.tcx.lang_items().get(LangItem::{item}) == Some({def_snip})"),
+ has_ctor,
+ ),
+ // match_trait_method
+ (1, Item::DiagnosticItem(item)) => {
+ (format!("is_trait_method({cx_snip}, {def_snip}, sym::{item})"), false)
+ },
+ // match_type
+ (2, Item::DiagnosticItem(item)) => (
+ format!("is_type_diagnostic_item({cx_snip}, {def_snip}, sym::{item})"),
+ false,
+ ),
+ (2, Item::LangItem(item)) => (
+ format!("is_type_lang_item({cx_snip}, {def_snip}, LangItem::{item})"),
+ false,
+ ),
+ // is_expr_path_def_path
+ (3, Item::DiagnosticItem(item)) if has_ctor => (
+ format!("is_res_diag_ctor({cx_snip}, path_res({cx_snip}, {def_snip}), sym::{item})",),
+ false,
+ ),
+ (3, Item::LangItem(item)) if has_ctor => (
+ format!("is_res_lang_ctor({cx_snip}, path_res({cx_snip}, {def_snip}), LangItem::{item})",),
+ false,
+ ),
+ (3, Item::DiagnosticItem(item)) => (
+ format!("is_path_diagnostic_item({cx_snip}, {def_snip}, sym::{item})"),
+ false,
+ ),
+ (3, Item::LangItem(item)) => (
+ format!(
+ "path_res({cx_snip}, {def_snip}).opt_def_id()\
+ .map_or(false, |id| {cx_snip}.tcx.lang_items().get(LangItem::{item}) == Some(id))",
),
- _ => return,
- };
+ false,
+ ),
+ _ => return,
+ };
- span_lint_and_then(cx, UNNECESSARY_DEF_PATH, span, msg, |diag| {
- diag.span_suggestion(span, "try", sugg, app);
- if with_note {
- diag.help(
- "if this `DefId` came from a constructor expression or pattern then the \
- parent `DefId` should be used instead",
- );
- }
- });
+ span_lint_and_then(cx, UNNECESSARY_DEF_PATH, span, msg, |diag| {
+ diag.span_suggestion(span, "try", sugg, app);
+ if with_note {
+ diag.help(
+ "if this `DefId` came from a constructor expression or pattern then the \
+ parent `DefId` should be used instead",
+ );
+ }
+ });
- self.linted_def_ids.insert(def_id);
- }
+ self.linted_def_ids.insert(def_id);
}
}
@@ -221,7 +218,7 @@ fn path_to_matched_type(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> Option<Ve
ExprKind::Path(ref qpath) => match cx.qpath_res(qpath, expr.hir_id) {
Res::Local(hir_id) => {
let parent_id = cx.tcx.hir().parent_id(hir_id);
- if let Some(Node::Local(Local { init: Some(init), .. })) = cx.tcx.hir().find(parent_id) {
+ if let Node::Local(Local { init: Some(init), .. }) = cx.tcx.hir_node(parent_id) {
path_to_matched_type(cx, init)
} else {
None
@@ -249,7 +246,7 @@ fn path_to_matched_type(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> Option<Ve
fn read_mir_alloc_def_path<'tcx>(cx: &LateContext<'tcx>, alloc: &'tcx Allocation, ty: Ty<'_>) -> Option<Vec<String>> {
let (alloc, ty) = if let ty::Ref(_, ty, Mutability::Not) = *ty.kind() {
let &alloc = alloc.provenance().ptrs().values().next()?;
- if let GlobalAlloc::Memory(alloc) = cx.tcx.global_alloc(alloc) {
+ if let GlobalAlloc::Memory(alloc) = cx.tcx.global_alloc(alloc.alloc_id()) {
(alloc.inner(), ty)
} else {
return None;
@@ -267,7 +264,7 @@ fn read_mir_alloc_def_path<'tcx>(cx: &LateContext<'tcx>, alloc: &'tcx Allocation
.ptrs()
.values()
.map(|&alloc| {
- if let GlobalAlloc::Memory(alloc) = cx.tcx.global_alloc(alloc) {
+ if let GlobalAlloc::Memory(alloc) = cx.tcx.global_alloc(alloc.alloc_id()) {
let alloc = alloc.inner();
str::from_utf8(alloc.inspect_with_uninit_and_ptr_outside_interpreter(0..alloc.len()))
.ok()
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/unsorted_clippy_utils_paths.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/unsorted_clippy_utils_paths.rs
index fd51bca9e..a5c4bf474 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/unsorted_clippy_utils_paths.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/unsorted_clippy_utils_paths.rs
@@ -1,7 +1,7 @@
use clippy_utils::diagnostics::span_lint;
use rustc_ast::ast::{Crate, ItemKind, ModKind};
use rustc_lint::{EarlyContext, EarlyLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/vec.rs b/src/tools/clippy/clippy_lints/src/vec.rs
index a9a3aaad3..5e13c73f0 100644
--- a/src/tools/clippy/clippy_lints/src/vec.rs
+++ b/src/tools/clippy/clippy_lints/src/vec.rs
@@ -1,26 +1,27 @@
+use std::collections::BTreeMap;
use std::ops::ControlFlow;
use clippy_config::msrvs::{self, Msrv};
use clippy_utils::consts::{constant, Constant};
-use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::diagnostics::span_lint_hir_and_then;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::ty::is_copy;
use clippy_utils::visitors::for_each_local_use_after_expr;
use clippy_utils::{get_parent_expr, higher, is_trait_method};
-use if_chain::if_chain;
use rustc_errors::Applicability;
-use rustc_hir::{BorrowKind, Expr, ExprKind, Mutability, Node, PatKind};
+use rustc_hir::{BorrowKind, Expr, ExprKind, HirId, Mutability, Node, PatKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::layout::LayoutOf;
use rustc_middle::ty::{self, Ty};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
-use rustc_span::{sym, Span};
+use rustc_session::impl_lint_pass;
+use rustc_span::{sym, DesugaringKind, Span};
#[expect(clippy::module_name_repetitions)]
#[derive(Clone)]
pub struct UselessVec {
pub too_large_for_stack: u64,
pub msrv: Msrv,
+ pub span_to_lint_map: BTreeMap<Span, Option<(HirId, SuggestedType, String, Applicability)>>,
}
declare_clippy_lint! {
@@ -58,7 +59,7 @@ fn adjusts_to_slice(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
/// Checks if the given expression is a method call to a `Vec` method
/// that also exists on slices. If this returns true, it means that
/// this expression does not actually require a `Vec` and could just work with an array.
-fn is_allowed_vec_method(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
+pub fn is_allowed_vec_method(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
const ALLOWED_METHOD_NAMES: &[&str] = &["len", "as_ptr", "is_empty"];
if let ExprKind::MethodCall(path, ..) = e.kind {
@@ -70,11 +71,56 @@ fn is_allowed_vec_method(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
impl<'tcx> LateLintPass<'tcx> for UselessVec {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
- // search for `&vec![_]` or `vec![_]` expressions where the adjusted type is `&[_]`
- if_chain! {
- if adjusts_to_slice(cx, expr);
- if let Some(vec_args) = higher::VecArgs::hir(cx, expr.peel_borrows());
- then {
+ if let Some(vec_args) = higher::VecArgs::hir(cx, expr.peel_borrows()) {
+ // search for `let foo = vec![_]` expressions where all uses of `foo`
+ // adjust to slices or call a method that exist on slices (e.g. len)
+ if let Node::Local(local) = cx.tcx.hir().get_parent(expr.hir_id)
+ // for now ignore locals with type annotations.
+ // this is to avoid compile errors when doing the suggestion here: let _: Vec<_> = vec![..];
+ && local.ty.is_none()
+ && let PatKind::Binding(_, id, ..) = local.pat.kind
+ && is_copy(cx, vec_type(cx.typeck_results().expr_ty_adjusted(expr.peel_borrows())))
+ {
+ let only_slice_uses = for_each_local_use_after_expr(cx, id, expr.hir_id, |expr| {
+ // allow indexing into a vec and some set of allowed method calls that exist on slices, too
+ if let Some(parent) = get_parent_expr(cx, expr)
+ && (adjusts_to_slice(cx, expr)
+ || matches!(parent.kind, ExprKind::Index(..))
+ || is_allowed_vec_method(cx, parent))
+ {
+ ControlFlow::Continue(())
+ } else {
+ ControlFlow::Break(())
+ }
+ })
+ .is_continue();
+
+ let span = expr.span.ctxt().outer_expn_data().call_site;
+ if only_slice_uses {
+ self.check_vec_macro(cx, &vec_args, span, expr.hir_id, SuggestedType::Array);
+ } else {
+ self.span_to_lint_map.insert(span, None);
+ }
+ }
+ // if the local pattern has a specified type, do not lint.
+ else if let Some(_) = higher::VecArgs::hir(cx, expr)
+ && let Node::Local(local) = cx.tcx.hir().get_parent(expr.hir_id)
+ && local.ty.is_some()
+ {
+ let span = expr.span.ctxt().outer_expn_data().call_site;
+ self.span_to_lint_map.insert(span, None);
+ }
+ // search for `for _ in vec![...]`
+ else if let Some(parent) = get_parent_expr(cx, expr)
+ && parent.span.is_desugaring(DesugaringKind::ForLoop)
+ && self.msrv.meets(msrvs::ARRAY_INTO_ITERATOR)
+ {
+ // report the error around the `vec!` not inside `<std macros>:`
+ let span = expr.span.ctxt().outer_expn_data().call_site;
+ self.check_vec_macro(cx, &vec_args, span, expr.hir_id, SuggestedType::Array);
+ }
+ // search for `&vec![_]` or `vec![_]` expressions where the adjusted type is `&[_]`
+ else {
let (suggest_slice, span) = if let ExprKind::AddrOf(BorrowKind::Ref, mutability, _) = expr.kind {
// `expr` is `&vec![_]`, so suggest `&[_]` (or `&mut[_]` resp.)
(SuggestedType::SliceRef(mutability), expr.span)
@@ -84,53 +130,28 @@ impl<'tcx> LateLintPass<'tcx> for UselessVec {
(SuggestedType::Array, expr.span.ctxt().outer_expn_data().call_site)
};
- self.check_vec_macro(cx, &vec_args, span, suggest_slice);
- }
- }
-
- // search for `let foo = vec![_]` expressions where all uses of `foo`
- // adjust to slices or call a method that exist on slices (e.g. len)
- if let Some(vec_args) = higher::VecArgs::hir(cx, expr)
- && let Node::Local(local) = cx.tcx.hir().get_parent(expr.hir_id)
- // for now ignore locals with type annotations.
- // this is to avoid compile errors when doing the suggestion here: let _: Vec<_> = vec![..];
- && local.ty.is_none()
- && let PatKind::Binding(_, id, ..) = local.pat.kind
- && is_copy(cx, vec_type(cx.typeck_results().expr_ty_adjusted(expr)))
- {
- let only_slice_uses = for_each_local_use_after_expr(cx, id, expr.hir_id, |expr| {
- // allow indexing into a vec and some set of allowed method calls that exist on slices, too
- if let Some(parent) = get_parent_expr(cx, expr)
- && (adjusts_to_slice(cx, expr)
- || matches!(parent.kind, ExprKind::Index(..))
- || is_allowed_vec_method(cx, parent))
- {
- ControlFlow::Continue(())
+ if adjusts_to_slice(cx, expr) {
+ self.check_vec_macro(cx, &vec_args, span, expr.hir_id, suggest_slice);
} else {
- ControlFlow::Break(())
+ self.span_to_lint_map.insert(span, None);
}
- })
- .is_continue();
-
- if only_slice_uses {
- self.check_vec_macro(
- cx,
- &vec_args,
- expr.span.ctxt().outer_expn_data().call_site,
- SuggestedType::Array,
- );
}
}
+ }
- // search for `for _ in vec![…]`
- if_chain! {
- if let Some(higher::ForLoop { arg, .. }) = higher::ForLoop::hir(expr);
- if let Some(vec_args) = higher::VecArgs::hir(cx, arg);
- if self.msrv.meets(msrvs::ARRAY_INTO_ITERATOR);
- then {
- // report the error around the `vec!` not inside `<std macros>:`
- let span = arg.span.ctxt().outer_expn_data().call_site;
- self.check_vec_macro(cx, &vec_args, span, SuggestedType::Array);
+ fn check_crate_post(&mut self, cx: &LateContext<'tcx>) {
+ for (span, lint_opt) in &self.span_to_lint_map {
+ if let Some((hir_id, suggest_slice, snippet, applicability)) = lint_opt {
+ let help_msg = format!(
+ "you can use {} directly",
+ match suggest_slice {
+ SuggestedType::SliceRef(_) => "a slice",
+ SuggestedType::Array => "an array",
+ }
+ );
+ span_lint_hir_and_then(cx, USELESS_VEC, *hir_id, *span, "useless use of `vec!`", |diag| {
+ diag.span_suggestion(*span, help_msg, snippet, *applicability);
+ });
}
}
}
@@ -139,7 +160,7 @@ impl<'tcx> LateLintPass<'tcx> for UselessVec {
}
#[derive(Copy, Clone)]
-enum SuggestedType {
+pub(crate) enum SuggestedType {
/// Suggest using a slice `&[..]` / `&mut [..]`
SliceRef(Mutability),
/// Suggest using an array: `[..]`
@@ -152,6 +173,7 @@ impl UselessVec {
cx: &LateContext<'tcx>,
vec_args: &higher::VecArgs<'tcx>,
span: Span,
+ hir_id: HirId,
suggest_slice: SuggestedType,
) {
if span.from_expansion() {
@@ -209,21 +231,9 @@ impl UselessVec {
},
};
- span_lint_and_sugg(
- cx,
- USELESS_VEC,
- span,
- "useless use of `vec!`",
- &format!(
- "you can use {} directly",
- match suggest_slice {
- SuggestedType::SliceRef(_) => "a slice",
- SuggestedType::Array => "an array",
- }
- ),
- snippet,
- applicability,
- );
+ self.span_to_lint_map
+ .entry(span)
+ .or_insert(Some((hir_id, suggest_slice, snippet, applicability)));
}
}
diff --git a/src/tools/clippy/clippy_lints/src/vec_init_then_push.rs b/src/tools/clippy/clippy_lints/src/vec_init_then_push.rs
index c8b9402f1..ac3b2bdaf 100644
--- a/src/tools/clippy/clippy_lints/src/vec_init_then_push.rs
+++ b/src/tools/clippy/clippy_lints/src/vec_init_then_push.rs
@@ -11,7 +11,7 @@ use rustc_hir::{
};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::{Span, Symbol};
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/visibility.rs b/src/tools/clippy/clippy_lints/src/visibility.rs
index 8abcc964b..83369c663 100644
--- a/src/tools/clippy/clippy_lints/src/visibility.rs
+++ b/src/tools/clippy/clippy_lints/src/visibility.rs
@@ -4,7 +4,7 @@ use rustc_ast::ast::{Item, VisibilityKind};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::symbol::kw;
use rustc_span::Span;
diff --git a/src/tools/clippy/clippy_lints/src/wildcard_imports.rs b/src/tools/clippy/clippy_lints/src/wildcard_imports.rs
index d88ede763..9b0dac6af 100644
--- a/src/tools/clippy/clippy_lints/src/wildcard_imports.rs
+++ b/src/tools/clippy/clippy_lints/src/wildcard_imports.rs
@@ -1,13 +1,12 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::is_test_module_or_function;
use clippy_utils::source::{snippet, snippet_with_applicability};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::{Item, ItemKind, PathSegment, UseKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::ty;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::symbol::kw;
use rustc_span::{sym, BytePos};
@@ -127,70 +126,55 @@ impl LateLintPass<'_> for WildcardImports {
if cx.tcx.visibility(item.owner_id.def_id) != ty::Visibility::Restricted(module.to_def_id()) {
return;
}
- if_chain! {
- if let ItemKind::Use(use_path, UseKind::Glob) = &item.kind;
- if self.warn_on_all || !self.check_exceptions(item, use_path.segments);
- let used_imports = cx.tcx.names_imported_by_glob_use(item.owner_id.def_id);
- if !used_imports.is_empty(); // Already handled by `unused_imports`
- if !used_imports.contains(&kw::Underscore);
- then {
- let mut applicability = Applicability::MachineApplicable;
- let import_source_snippet = snippet_with_applicability(cx, use_path.span, "..", &mut applicability);
- let (span, braced_glob) = if import_source_snippet.is_empty() {
- // This is a `_::{_, *}` import
- // In this case `use_path.span` is empty and ends directly in front of the `*`,
- // so we need to extend it by one byte.
- (
- use_path.span.with_hi(use_path.span.hi() + BytePos(1)),
- true,
- )
- } else {
- // In this case, the `use_path.span` ends right before the `::*`, so we need to
- // extend it up to the `*`. Since it is hard to find the `*` in weird
- // formattings like `use _ :: *;`, we extend it up to, but not including the
- // `;`. In nested imports, like `use _::{inner::*, _}` there is no `;` and we
- // can just use the end of the item span
- let mut span = use_path.span.with_hi(item.span.hi());
- if snippet(cx, span, "").ends_with(';') {
- span = use_path.span.with_hi(item.span.hi() - BytePos(1));
- }
- (
- span, false,
- )
- };
+ if let ItemKind::Use(use_path, UseKind::Glob) = &item.kind
+ && (self.warn_on_all || !self.check_exceptions(item, use_path.segments))
+ && let used_imports = cx.tcx.names_imported_by_glob_use(item.owner_id.def_id)
+ && !used_imports.is_empty() // Already handled by `unused_imports`
+ && !used_imports.contains(&kw::Underscore)
+ {
+ let mut applicability = Applicability::MachineApplicable;
+ let import_source_snippet = snippet_with_applicability(cx, use_path.span, "..", &mut applicability);
+ let (span, braced_glob) = if import_source_snippet.is_empty() {
+ // This is a `_::{_, *}` import
+ // In this case `use_path.span` is empty and ends directly in front of the `*`,
+ // so we need to extend it by one byte.
+ (use_path.span.with_hi(use_path.span.hi() + BytePos(1)), true)
+ } else {
+ // In this case, the `use_path.span` ends right before the `::*`, so we need to
+ // extend it up to the `*`. Since it is hard to find the `*` in weird
+ // formattings like `use _ :: *;`, we extend it up to, but not including the
+ // `;`. In nested imports, like `use _::{inner::*, _}` there is no `;` and we
+ // can just use the end of the item span
+ let mut span = use_path.span.with_hi(item.span.hi());
+ if snippet(cx, span, "").ends_with(';') {
+ span = use_path.span.with_hi(item.span.hi() - BytePos(1));
+ }
+ (span, false)
+ };
- let mut imports = used_imports.items().map(ToString::to_string).into_sorted_stable_ord();
- let imports_string = if imports.len() == 1 {
- imports.pop().unwrap()
- } else if braced_glob {
- imports.join(", ")
- } else {
- format!("{{{}}}", imports.join(", "))
- };
+ let mut imports = used_imports.items().map(ToString::to_string).into_sorted_stable_ord();
+ let imports_string = if imports.len() == 1 {
+ imports.pop().unwrap()
+ } else if braced_glob {
+ imports.join(", ")
+ } else {
+ format!("{{{}}}", imports.join(", "))
+ };
- let sugg = if braced_glob {
- imports_string
- } else {
- format!("{import_source_snippet}::{imports_string}")
- };
+ let sugg = if braced_glob {
+ imports_string
+ } else {
+ format!("{import_source_snippet}::{imports_string}")
+ };
- // Glob imports always have a single resolution.
- let (lint, message) = if let Res::Def(DefKind::Enum, _) = use_path.res[0] {
- (ENUM_GLOB_USE, "usage of wildcard import for enum variants")
- } else {
- (WILDCARD_IMPORTS, "usage of wildcard import")
- };
+ // Glob imports always have a single resolution.
+ let (lint, message) = if let Res::Def(DefKind::Enum, _) = use_path.res[0] {
+ (ENUM_GLOB_USE, "usage of wildcard import for enum variants")
+ } else {
+ (WILDCARD_IMPORTS, "usage of wildcard import")
+ };
- span_lint_and_sugg(
- cx,
- lint,
- span,
- message,
- "try",
- sugg,
- applicability,
- );
- }
+ span_lint_and_sugg(cx, lint, span, message, "try", sugg, applicability);
}
}
diff --git a/src/tools/clippy/clippy_lints/src/write.rs b/src/tools/clippy/clippy_lints/src/write.rs
index b6f942a90..be16d2e5c 100644
--- a/src/tools/clippy/clippy_lints/src/write.rs
+++ b/src/tools/clippy/clippy_lints/src/write.rs
@@ -10,7 +10,7 @@ use rustc_ast::{
use rustc_errors::Applicability;
use rustc_hir::{Expr, Impl, Item, ItemKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::impl_lint_pass;
use rustc_span::{sym, BytePos, Span};
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/zero_div_zero.rs b/src/tools/clippy/clippy_lints/src/zero_div_zero.rs
index f2f0699ef..d3623d6fd 100644
--- a/src/tools/clippy/clippy_lints/src/zero_div_zero.rs
+++ b/src/tools/clippy/clippy_lints/src/zero_div_zero.rs
@@ -1,9 +1,8 @@
use clippy_utils::consts::{constant_simple, Constant};
use clippy_utils::diagnostics::span_lint_and_help;
-use if_chain::if_chain;
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
declare_clippy_lint! {
/// ### What it does
@@ -32,35 +31,30 @@ declare_lint_pass!(ZeroDiv => [ZERO_DIVIDED_BY_ZERO]);
impl<'tcx> LateLintPass<'tcx> for ZeroDiv {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
// check for instances of 0.0/0.0
- if_chain! {
- if let ExprKind::Binary(ref op, left, right) = expr.kind;
- if op.node == BinOpKind::Div;
+ if let ExprKind::Binary(ref op, left, right) = expr.kind
+ && op.node == BinOpKind::Div
// TODO - constant_simple does not fold many operations involving floats.
// That's probably fine for this lint - it's pretty unlikely that someone would
// do something like 0.0/(2.0 - 2.0), but it would be nice to warn on that case too.
- if let Some(lhs_value) = constant_simple(cx, cx.typeck_results(), left);
- if let Some(rhs_value) = constant_simple(cx, cx.typeck_results(), right);
- if Constant::F32(0.0) == lhs_value || Constant::F64(0.0) == lhs_value;
- if Constant::F32(0.0) == rhs_value || Constant::F64(0.0) == rhs_value;
- then {
- // since we're about to suggest a use of f32::NAN or f64::NAN,
- // match the precision of the literals that are given.
- let float_type = match (lhs_value, rhs_value) {
- (Constant::F64(_), _)
- | (_, Constant::F64(_)) => "f64",
- _ => "f32"
- };
- span_lint_and_help(
- cx,
- ZERO_DIVIDED_BY_ZERO,
- expr.span,
- "constant division of `0.0` with `0.0` will always result in NaN",
- None,
- &format!(
- "consider using `{float_type}::NAN` if you would like a constant representing NaN",
- ),
- );
- }
+ && let Some(lhs_value) = constant_simple(cx, cx.typeck_results(), left)
+ && let Some(rhs_value) = constant_simple(cx, cx.typeck_results(), right)
+ && (Constant::F32(0.0) == lhs_value || Constant::F64(0.0) == lhs_value)
+ && (Constant::F32(0.0) == rhs_value || Constant::F64(0.0) == rhs_value)
+ {
+ // since we're about to suggest a use of f32::NAN or f64::NAN,
+ // match the precision of the literals that are given.
+ let float_type = match (lhs_value, rhs_value) {
+ (Constant::F64(_), _) | (_, Constant::F64(_)) => "f64",
+ _ => "f32",
+ };
+ span_lint_and_help(
+ cx,
+ ZERO_DIVIDED_BY_ZERO,
+ expr.span,
+ "constant division of `0.0` with `0.0` will always result in NaN",
+ None,
+ &format!("consider using `{float_type}::NAN` if you would like a constant representing NaN",),
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/zero_sized_map_values.rs b/src/tools/clippy/clippy_lints/src/zero_sized_map_values.rs
index fee100fe1..b36c4ef91 100644
--- a/src/tools/clippy/clippy_lints/src/zero_sized_map_values.rs
+++ b/src/tools/clippy/clippy_lints/src/zero_sized_map_values.rs
@@ -1,12 +1,11 @@
use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::ty::{is_normalizable, is_type_diagnostic_item};
-use if_chain::if_chain;
use rustc_hir::{self as hir, HirId, ItemKind, Node};
use rustc_hir_analysis::hir_ty_to_ty;
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::layout::LayoutOf as _;
use rustc_middle::ty::{Adt, Ty, TypeVisitableExt};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::declare_lint_pass;
use rustc_span::sym;
declare_clippy_lint! {
@@ -46,23 +45,28 @@ declare_lint_pass!(ZeroSizedMapValues => [ZERO_SIZED_MAP_VALUES]);
impl LateLintPass<'_> for ZeroSizedMapValues {
fn check_ty(&mut self, cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>) {
- if_chain! {
- if !hir_ty.span.from_expansion();
- if !in_trait_impl(cx, hir_ty.hir_id);
- let ty = ty_from_hir_ty(cx, hir_ty);
- if is_type_diagnostic_item(cx, ty, sym::HashMap) || is_type_diagnostic_item(cx, ty, sym::BTreeMap);
- if let Adt(_, args) = ty.kind();
- let ty = args.type_at(1);
+ if !hir_ty.span.from_expansion()
+ && !in_trait_impl(cx, hir_ty.hir_id)
+ && let ty = ty_from_hir_ty(cx, hir_ty)
+ && (is_type_diagnostic_item(cx, ty, sym::HashMap) || is_type_diagnostic_item(cx, ty, sym::BTreeMap))
+ && let Adt(_, args) = ty.kind()
+ && let ty = args.type_at(1)
// Fixes https://github.com/rust-lang/rust-clippy/issues/7447 because of
// https://github.com/rust-lang/rust/blob/master/compiler/rustc_middle/src/ty/sty.rs#L968
- if !ty.has_escaping_bound_vars();
+ && !ty.has_escaping_bound_vars()
// Do this to prevent `layout_of` crashing, being unable to fully normalize `ty`.
- if is_normalizable(cx, cx.param_env, ty);
- if let Ok(layout) = cx.layout_of(ty);
- if layout.is_zst();
- then {
- span_lint_and_help(cx, ZERO_SIZED_MAP_VALUES, hir_ty.span, "map with zero-sized value type", None, "consider using a set instead");
- }
+ && is_normalizable(cx, cx.param_env, ty)
+ && let Ok(layout) = cx.layout_of(ty)
+ && layout.is_zst()
+ {
+ span_lint_and_help(
+ cx,
+ ZERO_SIZED_MAP_VALUES,
+ hir_ty.span,
+ "map with zero-sized value type",
+ None,
+ "consider using a set instead",
+ );
}
}
}
@@ -70,7 +74,7 @@ impl LateLintPass<'_> for ZeroSizedMapValues {
fn in_trait_impl(cx: &LateContext<'_>, hir_id: HirId) -> bool {
let parent_id = cx.tcx.hir().get_parent_item(hir_id);
let second_parent_id = cx.tcx.hir().get_parent_item(parent_id.into()).def_id;
- if let Some(Node::Item(item)) = cx.tcx.hir().find_by_def_id(second_parent_id) {
+ if let Some(Node::Item(item)) = cx.tcx.opt_hir_node_by_def_id(second_parent_id) {
if let ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }) = item.kind {
return true;
}
diff --git a/src/tools/clippy/clippy_utils/Cargo.toml b/src/tools/clippy/clippy_utils/Cargo.toml
index c9b01a68f..5d23326ce 100644
--- a/src/tools/clippy/clippy_utils/Cargo.toml
+++ b/src/tools/clippy/clippy_utils/Cargo.toml
@@ -1,14 +1,13 @@
[package]
name = "clippy_utils"
-version = "0.1.75"
+version = "0.1.76"
edition = "2021"
publish = false
[dependencies]
clippy_config = { path = "../clippy_config" }
arrayvec = { version = "0.7", default-features = false }
-if_chain = "1.0"
-itertools = "0.10.1"
+itertools = "0.11"
rustc-semver = "1.1"
[features]
diff --git a/src/tools/clippy/clippy_utils/src/ast_utils.rs b/src/tools/clippy/clippy_utils/src/ast_utils.rs
index a2c61e07b..c271e4986 100644
--- a/src/tools/clippy/clippy_utils/src/ast_utils.rs
+++ b/src/tools/clippy/clippy_utils/src/ast_utils.rs
@@ -188,7 +188,7 @@ pub fn eq_expr(l: &Expr, r: &Expr) -> bool {
Closure(box ast::Closure {
binder: lb,
capture_clause: lc,
- asyncness: la,
+ coroutine_kind: la,
movability: lm,
fn_decl: lf,
body: le,
@@ -197,7 +197,7 @@ pub fn eq_expr(l: &Expr, r: &Expr) -> bool {
Closure(box ast::Closure {
binder: rb,
capture_clause: rc,
- asyncness: ra,
+ coroutine_kind: ra,
movability: rm,
fn_decl: rf,
body: re,
@@ -206,7 +206,7 @@ pub fn eq_expr(l: &Expr, r: &Expr) -> bool {
) => {
eq_closure_binder(lb, rb)
&& lc == rc
- && la.is_async() == ra.is_async()
+ && la.map_or(false, CoroutineKind::is_async) == ra.map_or(false, CoroutineKind::is_async)
&& lm == rm
&& eq_fn_decl(lf, rf)
&& eq_expr(le, re)
@@ -236,7 +236,7 @@ pub fn eq_field(l: &ExprField, r: &ExprField) -> bool {
pub fn eq_arm(l: &Arm, r: &Arm) -> bool {
l.is_placeholder == r.is_placeholder
&& eq_pat(&l.pat, &r.pat)
- && eq_expr(&l.body, &r.body)
+ && eq_expr_opt(&l.body, &r.body)
&& eq_expr_opt(&l.guard, &r.guard)
&& over(&l.attrs, &r.attrs, eq_attr)
}
@@ -546,7 +546,9 @@ pub fn eq_variant_data(l: &VariantData, r: &VariantData) -> bool {
use VariantData::*;
match (l, r) {
(Unit(_), Unit(_)) => true,
- (Struct(l, _), Struct(r, _)) | (Tuple(l, _), Tuple(r, _)) => over(l, r, eq_struct_field),
+ (Struct { fields: l, .. }, Struct { fields: r, .. }) | (Tuple(l, _), Tuple(r, _)) => {
+ over(l, r, eq_struct_field)
+ },
_ => false,
}
}
@@ -563,9 +565,22 @@ pub fn eq_fn_sig(l: &FnSig, r: &FnSig) -> bool {
eq_fn_decl(&l.decl, &r.decl) && eq_fn_header(&l.header, &r.header)
}
+fn eq_opt_coroutine_kind(l: Option<CoroutineKind>, r: Option<CoroutineKind>) -> bool {
+ matches!(
+ (l, r),
+ (Some(CoroutineKind::Async { .. }), Some(CoroutineKind::Async { .. }))
+ | (Some(CoroutineKind::Gen { .. }), Some(CoroutineKind::Gen { .. }))
+ | (
+ Some(CoroutineKind::AsyncGen { .. }),
+ Some(CoroutineKind::AsyncGen { .. })
+ )
+ | (None, None)
+ )
+}
+
pub fn eq_fn_header(l: &FnHeader, r: &FnHeader) -> bool {
matches!(l.unsafety, Unsafe::No) == matches!(r.unsafety, Unsafe::No)
- && l.asyncness.is_async() == r.asyncness.is_async()
+ && eq_opt_coroutine_kind(l.coroutine_kind, r.coroutine_kind)
&& matches!(l.constness, Const::No) == matches!(r.constness, Const::No)
&& eq_ext(&l.ext, &r.ext)
}
diff --git a/src/tools/clippy/clippy_utils/src/check_proc_macro.rs b/src/tools/clippy/clippy_utils/src/check_proc_macro.rs
index 2f619a306..d751aeaf9 100644
--- a/src/tools/clippy/clippy_utils/src/check_proc_macro.rs
+++ b/src/tools/clippy/clippy_utils/src/check_proc_macro.rs
@@ -200,7 +200,7 @@ fn item_search_pat(item: &Item<'_>) -> (Pat, Pat) {
ItemKind::ForeignMod { .. } => (Pat::Str("extern"), Pat::Str("}")),
ItemKind::TyAlias(..) | ItemKind::OpaqueTy(_) => (Pat::Str("type"), Pat::Str(";")),
ItemKind::Enum(..) => (Pat::Str("enum"), Pat::Str("}")),
- ItemKind::Struct(VariantData::Struct(..), _) => (Pat::Str("struct"), Pat::Str("}")),
+ ItemKind::Struct(VariantData::Struct { .. }, _) => (Pat::Str("struct"), Pat::Str("}")),
ItemKind::Struct(..) => (Pat::Str("struct"), Pat::Str(";")),
ItemKind::Union(..) => (Pat::Str("union"), Pat::Str("}")),
ItemKind::Trait(_, Unsafety::Unsafe, ..)
@@ -255,7 +255,7 @@ fn field_def_search_pat(def: &FieldDef<'_>) -> (Pat, Pat) {
fn variant_search_pat(v: &Variant<'_>) -> (Pat, Pat) {
match v.data {
- VariantData::Struct(..) => (Pat::Sym(v.ident.name), Pat::Str("}")),
+ VariantData::Struct { .. } => (Pat::Sym(v.ident.name), Pat::Str("}")),
VariantData::Tuple(..) => (Pat::Sym(v.ident.name), Pat::Str("")),
VariantData::Unit(..) => (Pat::Sym(v.ident.name), Pat::Sym(v.ident.name)),
}
@@ -267,7 +267,7 @@ fn fn_kind_pat(tcx: TyCtxt<'_>, kind: &FnKind<'_>, body: &Body<'_>, hir_id: HirI
FnKind::Method(.., sig) => (fn_header_search_pat(sig.header), Pat::Str("")),
FnKind::Closure => return (Pat::Str(""), expr_search_pat(tcx, body.value).1),
};
- let start_pat = match tcx.hir().get(hir_id) {
+ let start_pat = match tcx.hir_node(hir_id) {
Node::Item(Item { vis_span, .. }) | Node::ImplItem(ImplItem { vis_span, .. }) => {
if vis_span.is_empty() {
start_pat
diff --git a/src/tools/clippy/clippy_utils/src/consts.rs b/src/tools/clippy/clippy_utils/src/consts.rs
index b581a60de..727f93c83 100644
--- a/src/tools/clippy/clippy_utils/src/consts.rs
+++ b/src/tools/clippy/clippy_utils/src/consts.rs
@@ -2,7 +2,7 @@
use crate::source::{get_source_text, walk_span_to_context};
use crate::{clip, is_direct_expn_of, sext, unsext};
-use if_chain::if_chain;
+
use rustc_ast::ast::{self, LitFloatType, LitKind};
use rustc_data_structures::sync::Lrc;
use rustc_hir::def::{DefKind, Res};
@@ -10,7 +10,7 @@ use rustc_hir::{BinOp, BinOpKind, Block, ConstBlock, Expr, ExprKind, HirId, Item
use rustc_lexer::tokenize;
use rustc_lint::LateContext;
use rustc_middle::mir::interpret::{alloc_range, Scalar};
-use rustc_middle::ty::{self, EarlyBinder, FloatTy, GenericArgsRef, List, ScalarInt, Ty, TyCtxt};
+use rustc_middle::ty::{self, EarlyBinder, FloatTy, GenericArgsRef, IntTy, List, ScalarInt, Ty, TyCtxt, UintTy};
use rustc_middle::{bug, mir, span_bug};
use rustc_span::symbol::{Ident, Symbol};
use rustc_span::SyntaxContext;
@@ -51,6 +51,63 @@ pub enum Constant<'tcx> {
Err,
}
+trait IntTypeBounds: Sized {
+ type Output: PartialOrd;
+
+ fn min_max(self) -> Option<(Self::Output, Self::Output)>;
+ fn bits(self) -> Self::Output;
+ fn ensure_fits(self, val: Self::Output) -> Option<Self::Output> {
+ let (min, max) = self.min_max()?;
+ (min <= val && val <= max).then_some(val)
+ }
+}
+impl IntTypeBounds for UintTy {
+ type Output = u128;
+ fn min_max(self) -> Option<(Self::Output, Self::Output)> {
+ Some(match self {
+ UintTy::U8 => (u8::MIN.into(), u8::MAX.into()),
+ UintTy::U16 => (u16::MIN.into(), u16::MAX.into()),
+ UintTy::U32 => (u32::MIN.into(), u32::MAX.into()),
+ UintTy::U64 => (u64::MIN.into(), u64::MAX.into()),
+ UintTy::U128 => (u128::MIN, u128::MAX),
+ UintTy::Usize => (usize::MIN.try_into().ok()?, usize::MAX.try_into().ok()?),
+ })
+ }
+ fn bits(self) -> Self::Output {
+ match self {
+ UintTy::U8 => 8,
+ UintTy::U16 => 16,
+ UintTy::U32 => 32,
+ UintTy::U64 => 64,
+ UintTy::U128 => 128,
+ UintTy::Usize => usize::BITS.into(),
+ }
+ }
+}
+impl IntTypeBounds for IntTy {
+ type Output = i128;
+ fn min_max(self) -> Option<(Self::Output, Self::Output)> {
+ Some(match self {
+ IntTy::I8 => (i8::MIN.into(), i8::MAX.into()),
+ IntTy::I16 => (i16::MIN.into(), i16::MAX.into()),
+ IntTy::I32 => (i32::MIN.into(), i32::MAX.into()),
+ IntTy::I64 => (i64::MIN.into(), i64::MAX.into()),
+ IntTy::I128 => (i128::MIN, i128::MAX),
+ IntTy::Isize => (isize::MIN.try_into().ok()?, isize::MAX.try_into().ok()?),
+ })
+ }
+ fn bits(self) -> Self::Output {
+ match self {
+ IntTy::I8 => 8,
+ IntTy::I16 => 16,
+ IntTy::I32 => 32,
+ IntTy::I64 => 64,
+ IntTy::I128 => 128,
+ IntTy::Isize => isize::BITS.into(),
+ }
+ }
+}
+
impl<'tcx> PartialEq for Constant<'tcx> {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
@@ -372,27 +429,25 @@ impl<'a, 'tcx> ConstEvalLateContext<'a, 'tcx> {
ExprKind::Binary(op, left, right) => self.binop(op, left, right),
ExprKind::Call(callee, args) => {
// We only handle a few const functions for now.
- if_chain! {
- if args.is_empty();
- if let ExprKind::Path(qpath) = &callee.kind;
- let res = self.typeck_results.qpath_res(qpath, callee.hir_id);
- if let Some(def_id) = res.opt_def_id();
- let def_path = self.lcx.get_def_path(def_id);
- let def_path: Vec<&str> = def_path.iter().take(4).map(Symbol::as_str).collect();
- if let ["core", "num", int_impl, "max_value"] = *def_path;
- then {
- let value = match int_impl {
- "<impl i8>" => i8::MAX as u128,
- "<impl i16>" => i16::MAX as u128,
- "<impl i32>" => i32::MAX as u128,
- "<impl i64>" => i64::MAX as u128,
- "<impl i128>" => i128::MAX as u128,
- _ => return None,
- };
- Some(Constant::Int(value))
- } else {
- None
- }
+ if args.is_empty()
+ && let ExprKind::Path(qpath) = &callee.kind
+ && let res = self.typeck_results.qpath_res(qpath, callee.hir_id)
+ && let Some(def_id) = res.opt_def_id()
+ && let def_path = self.lcx.get_def_path(def_id)
+ && let def_path = def_path.iter().take(4).map(Symbol::as_str).collect::<Vec<_>>()
+ && let ["core", "num", int_impl, "max_value"] = *def_path
+ {
+ let value = match int_impl {
+ "<impl i8>" => i8::MAX as u128,
+ "<impl i16>" => i16::MAX as u128,
+ "<impl i32>" => i32::MAX as u128,
+ "<impl i64>" => i64::MAX as u128,
+ "<impl i128>" => i128::MAX as u128,
+ _ => return None,
+ };
+ Some(Constant::Int(value))
+ } else {
+ None
}
},
ExprKind::Index(arr, index, _) => self.index(arr, index),
@@ -435,8 +490,15 @@ impl<'a, 'tcx> ConstEvalLateContext<'a, 'tcx> {
match *o {
Int(value) => {
let ty::Int(ity) = *ty.kind() else { return None };
+ let (min, _) = ity.min_max()?;
// sign extend
let value = sext(self.lcx.tcx, value, ity);
+
+ // Applying unary - to the most negative value of any signed integer type panics.
+ if value == min {
+ return None;
+ }
+
let value = value.checked_neg()?;
// clear unused bits
Some(Int(unsext(self.lcx.tcx, value, ity)))
@@ -469,7 +531,7 @@ impl<'a, 'tcx> ConstEvalLateContext<'a, 'tcx> {
kind: ExprKind::Lit(_),
span,
..
- }) = self.lcx.tcx.hir().get(body_id.hir_id)
+ }) = self.lcx.tcx.hir_node(body_id.hir_id)
&& is_direct_expn_of(*span, "cfg").is_some()
{
return None;
@@ -572,17 +634,33 @@ impl<'a, 'tcx> ConstEvalLateContext<'a, 'tcx> {
match (l, r) {
(Constant::Int(l), Some(Constant::Int(r))) => match *self.typeck_results.expr_ty_opt(left)?.kind() {
ty::Int(ity) => {
+ let (ty_min_value, _) = ity.min_max()?;
+ let bits = ity.bits();
let l = sext(self.lcx.tcx, l, ity);
let r = sext(self.lcx.tcx, r, ity);
+
+ // Using / or %, where the left-hand argument is the smallest integer of a signed integer type and
+ // the right-hand argument is -1 always panics, even with overflow-checks disabled
+ if let BinOpKind::Div | BinOpKind::Rem = op.node
+ && l == ty_min_value
+ && r == -1
+ {
+ return None;
+ }
+
let zext = |n: i128| Constant::Int(unsext(self.lcx.tcx, n, ity));
match op.node {
- BinOpKind::Add => l.checked_add(r).map(zext),
- BinOpKind::Sub => l.checked_sub(r).map(zext),
- BinOpKind::Mul => l.checked_mul(r).map(zext),
+ // When +, * or binary - create a value greater than the maximum value, or less than
+ // the minimum value that can be stored, it panics.
+ BinOpKind::Add => l.checked_add(r).and_then(|n| ity.ensure_fits(n)).map(zext),
+ BinOpKind::Sub => l.checked_sub(r).and_then(|n| ity.ensure_fits(n)).map(zext),
+ BinOpKind::Mul => l.checked_mul(r).and_then(|n| ity.ensure_fits(n)).map(zext),
BinOpKind::Div if r != 0 => l.checked_div(r).map(zext),
BinOpKind::Rem if r != 0 => l.checked_rem(r).map(zext),
- BinOpKind::Shr => l.checked_shr(r.try_into().ok()?).map(zext),
- BinOpKind::Shl => l.checked_shl(r.try_into().ok()?).map(zext),
+ // Using << or >> where the right-hand argument is greater than or equal to the number of bits
+ // in the type of the left-hand argument, or is negative panics.
+ BinOpKind::Shr if r < bits && !r.is_negative() => l.checked_shr(r.try_into().ok()?).map(zext),
+ BinOpKind::Shl if r < bits && !r.is_negative() => l.checked_shl(r.try_into().ok()?).map(zext),
BinOpKind::BitXor => Some(zext(l ^ r)),
BinOpKind::BitOr => Some(zext(l | r)),
BinOpKind::BitAnd => Some(zext(l & r)),
@@ -595,24 +673,28 @@ impl<'a, 'tcx> ConstEvalLateContext<'a, 'tcx> {
_ => None,
}
},
- ty::Uint(_) => match op.node {
- BinOpKind::Add => l.checked_add(r).map(Constant::Int),
- BinOpKind::Sub => l.checked_sub(r).map(Constant::Int),
- BinOpKind::Mul => l.checked_mul(r).map(Constant::Int),
- BinOpKind::Div => l.checked_div(r).map(Constant::Int),
- BinOpKind::Rem => l.checked_rem(r).map(Constant::Int),
- BinOpKind::Shr => l.checked_shr(r.try_into().ok()?).map(Constant::Int),
- BinOpKind::Shl => l.checked_shl(r.try_into().ok()?).map(Constant::Int),
- BinOpKind::BitXor => Some(Constant::Int(l ^ r)),
- BinOpKind::BitOr => Some(Constant::Int(l | r)),
- BinOpKind::BitAnd => Some(Constant::Int(l & r)),
- BinOpKind::Eq => Some(Constant::Bool(l == r)),
- BinOpKind::Ne => Some(Constant::Bool(l != r)),
- BinOpKind::Lt => Some(Constant::Bool(l < r)),
- BinOpKind::Le => Some(Constant::Bool(l <= r)),
- BinOpKind::Ge => Some(Constant::Bool(l >= r)),
- BinOpKind::Gt => Some(Constant::Bool(l > r)),
- _ => None,
+ ty::Uint(ity) => {
+ let bits = ity.bits();
+
+ match op.node {
+ BinOpKind::Add => l.checked_add(r).and_then(|n| ity.ensure_fits(n)).map(Constant::Int),
+ BinOpKind::Sub => l.checked_sub(r).and_then(|n| ity.ensure_fits(n)).map(Constant::Int),
+ BinOpKind::Mul => l.checked_mul(r).and_then(|n| ity.ensure_fits(n)).map(Constant::Int),
+ BinOpKind::Div => l.checked_div(r).map(Constant::Int),
+ BinOpKind::Rem => l.checked_rem(r).map(Constant::Int),
+ BinOpKind::Shr if r < bits => l.checked_shr(r.try_into().ok()?).map(Constant::Int),
+ BinOpKind::Shl if r < bits => l.checked_shl(r.try_into().ok()?).map(Constant::Int),
+ BinOpKind::BitXor => Some(Constant::Int(l ^ r)),
+ BinOpKind::BitOr => Some(Constant::Int(l | r)),
+ BinOpKind::BitAnd => Some(Constant::Int(l & r)),
+ BinOpKind::Eq => Some(Constant::Bool(l == r)),
+ BinOpKind::Ne => Some(Constant::Bool(l != r)),
+ BinOpKind::Lt => Some(Constant::Bool(l < r)),
+ BinOpKind::Le => Some(Constant::Bool(l <= r)),
+ BinOpKind::Ge => Some(Constant::Bool(l >= r)),
+ BinOpKind::Gt => Some(Constant::Bool(l > r)),
+ _ => None,
+ }
},
_ => None,
},
diff --git a/src/tools/clippy/clippy_utils/src/diagnostics.rs b/src/tools/clippy/clippy_utils/src/diagnostics.rs
index 45c7e3a6e..756296153 100644
--- a/src/tools/clippy/clippy_utils/src/diagnostics.rs
+++ b/src/tools/clippy/clippy_utils/src/diagnostics.rs
@@ -46,9 +46,9 @@ fn docs_link(diag: &mut Diagnostic, lint: &'static Lint) {
/// | ^^^^^^^^^^^^^^^^^^^^^^^
/// ```
pub fn span_lint<T: LintContext>(cx: &T, lint: &'static Lint, sp: impl Into<MultiSpan>, msg: &str) {
+ #[expect(clippy::disallowed_methods)]
cx.struct_span_lint(lint, sp, msg.to_string(), |diag| {
docs_link(diag, lint);
- diag
});
}
@@ -80,6 +80,7 @@ pub fn span_lint_and_help<T: LintContext>(
help_span: Option<Span>,
help: &str,
) {
+ #[expect(clippy::disallowed_methods)]
cx.struct_span_lint(lint, span, msg.to_string(), |diag| {
let help = help.to_string();
if let Some(help_span) = help_span {
@@ -88,7 +89,6 @@ pub fn span_lint_and_help<T: LintContext>(
diag.help(help.to_string());
}
docs_link(diag, lint);
- diag
});
}
@@ -123,6 +123,7 @@ pub fn span_lint_and_note<T: LintContext>(
note_span: Option<Span>,
note: &str,
) {
+ #[expect(clippy::disallowed_methods)]
cx.struct_span_lint(lint, span, msg.to_string(), |diag| {
let note = note.to_string();
if let Some(note_span) = note_span {
@@ -131,7 +132,6 @@ pub fn span_lint_and_note<T: LintContext>(
diag.note(note);
}
docs_link(diag, lint);
- diag
});
}
@@ -145,17 +145,17 @@ where
S: Into<MultiSpan>,
F: FnOnce(&mut Diagnostic),
{
+ #[expect(clippy::disallowed_methods)]
cx.struct_span_lint(lint, sp, msg.to_string(), |diag| {
f(diag);
docs_link(diag, lint);
- diag
});
}
pub fn span_lint_hir(cx: &LateContext<'_>, lint: &'static Lint, hir_id: HirId, sp: Span, msg: &str) {
+ #[expect(clippy::disallowed_methods)]
cx.tcx.struct_span_lint_hir(lint, hir_id, sp, msg.to_string(), |diag| {
docs_link(diag, lint);
- diag
});
}
@@ -167,10 +167,10 @@ pub fn span_lint_hir_and_then(
msg: &str,
f: impl FnOnce(&mut Diagnostic),
) {
+ #[expect(clippy::disallowed_methods)]
cx.tcx.struct_span_lint_hir(lint, hir_id, sp, msg.to_string(), |diag| {
f(diag);
docs_link(diag, lint);
- diag
});
}
diff --git a/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs b/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs
index 0bcefba75..4e71c6483 100644
--- a/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs
+++ b/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs
@@ -9,12 +9,13 @@
//! - or-fun-call
//! - option-if-let-else
+use crate::consts::{constant, FullInt};
use crate::ty::{all_predicates_of, is_copy};
use crate::visitors::is_const_evaluatable;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::DefId;
use rustc_hir::intravisit::{walk_expr, Visitor};
-use rustc_hir::{Block, Expr, ExprKind, QPath, UnOp};
+use rustc_hir::{BinOpKind, Block, Expr, ExprKind, QPath, UnOp};
use rustc_lint::LateContext;
use rustc_middle::ty;
use rustc_middle::ty::adjustment::Adjust;
@@ -193,6 +194,12 @@ fn expr_eagerness<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) -> EagernessS
self.eagerness = Lazy;
}
},
+
+ // `-i32::MIN` panics with overflow checks
+ ExprKind::Unary(UnOp::Neg, right) if constant(self.cx, self.cx.typeck_results(), right).is_none() => {
+ self.eagerness |= NoChange;
+ },
+
// Custom `Deref` impl might have side effects
ExprKind::Unary(UnOp::Deref, e)
if self.cx.typeck_results().expr_ty(e).builtin_deref(true).is_none() =>
@@ -207,6 +214,49 @@ fn expr_eagerness<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) -> EagernessS
self.cx.typeck_results().expr_ty(e).kind(),
ty::Bool | ty::Int(_) | ty::Uint(_),
) => {},
+
+ // `>>` and `<<` panic when the right-hand side is greater than or equal to the number of bits in the
+ // type of the left-hand side, or is negative.
+ // We intentionally only check if the right-hand isn't a constant, because even if the suggestion would
+ // overflow with constants, the compiler emits an error for it and the programmer will have to fix it.
+ // Thus, we would realistically only delay the lint.
+ ExprKind::Binary(op, _, right)
+ if matches!(op.node, BinOpKind::Shl | BinOpKind::Shr)
+ && constant(self.cx, self.cx.typeck_results(), right).is_none() =>
+ {
+ self.eagerness |= NoChange;
+ },
+
+ ExprKind::Binary(op, left, right)
+ if matches!(op.node, BinOpKind::Div | BinOpKind::Rem)
+ && let right_ty = self.cx.typeck_results().expr_ty(right)
+ && let left = constant(self.cx, self.cx.typeck_results(), left)
+ && let right = constant(self.cx, self.cx.typeck_results(), right)
+ .and_then(|c| c.int_value(self.cx, right_ty))
+ && matches!(
+ (left, right),
+ // `1 / x`: x might be zero
+ (_, None)
+ // `x / -1`: x might be T::MIN
+ | (None, Some(FullInt::S(-1)))
+ ) =>
+ {
+ self.eagerness |= NoChange;
+ },
+
+ // Similar to `>>` and `<<`, we only want to avoid linting entirely if either side is unknown and the
+ // compiler can't emit an error for an overflowing expression.
+ // Suggesting eagerness for `true.then(|| i32::MAX + 1)` is okay because the compiler will emit an
+ // error and it's good to have the eagerness warning up front when the user fixes the logic error.
+ ExprKind::Binary(op, left, right)
+ if matches!(op.node, BinOpKind::Add | BinOpKind::Sub | BinOpKind::Mul)
+ && !self.cx.typeck_results().expr_ty(e).is_floating_point()
+ && (constant(self.cx, self.cx.typeck_results(), left).is_none()
+ || constant(self.cx, self.cx.typeck_results(), right).is_none()) =>
+ {
+ self.eagerness |= NoChange;
+ },
+
ExprKind::Binary(_, lhs, rhs)
if self.cx.typeck_results().expr_ty(lhs).is_primitive()
&& self.cx.typeck_results().expr_ty(rhs).is_primitive() => {},
diff --git a/src/tools/clippy/clippy_utils/src/higher.rs b/src/tools/clippy/clippy_utils/src/higher.rs
index edea4b366..3135a0336 100644
--- a/src/tools/clippy/clippy_utils/src/higher.rs
+++ b/src/tools/clippy/clippy_utils/src/higher.rs
@@ -5,7 +5,7 @@
use crate::consts::{constant_simple, Constant};
use crate::ty::is_type_diagnostic_item;
use crate::{is_expn_of, match_def_path, paths};
-use if_chain::if_chain;
+
use rustc_ast::ast;
use rustc_hir as hir;
use rustc_hir::{Arm, Block, Expr, ExprKind, HirId, LoopSource, MatchSource, Node, Pat, QPath};
@@ -30,24 +30,22 @@ pub struct ForLoop<'tcx> {
impl<'tcx> ForLoop<'tcx> {
/// Parses a desugared `for` loop
pub fn hir(expr: &Expr<'tcx>) -> Option<Self> {
- if_chain! {
- if let hir::ExprKind::DropTemps(e) = expr.kind;
- if let hir::ExprKind::Match(iterexpr, [arm], hir::MatchSource::ForLoopDesugar) = e.kind;
- if let hir::ExprKind::Call(_, [arg]) = iterexpr.kind;
- if let hir::ExprKind::Loop(block, ..) = arm.body.kind;
- if let [stmt] = block.stmts;
- if let hir::StmtKind::Expr(e) = stmt.kind;
- if let hir::ExprKind::Match(_, [_, some_arm], _) = e.kind;
- if let hir::PatKind::Struct(_, [field], _) = some_arm.pat.kind;
- then {
- return Some(Self {
- pat: field.pat,
- arg,
- body: some_arm.body,
- loop_id: arm.body.hir_id,
- span: expr.span.ctxt().outer_expn_data().call_site,
- });
- }
+ if let hir::ExprKind::DropTemps(e) = expr.kind
+ && let hir::ExprKind::Match(iterexpr, [arm], hir::MatchSource::ForLoopDesugar) = e.kind
+ && let hir::ExprKind::Call(_, [arg]) = iterexpr.kind
+ && let hir::ExprKind::Loop(block, ..) = arm.body.kind
+ && let [stmt] = block.stmts
+ && let hir::StmtKind::Expr(e) = stmt.kind
+ && let hir::ExprKind::Match(_, [_, some_arm], _) = e.kind
+ && let hir::PatKind::Struct(_, [field], _) = some_arm.pat.kind
+ {
+ return Some(Self {
+ pat: field.pat,
+ arg,
+ body: some_arm.body,
+ loop_id: arm.body.hir_id,
+ span: expr.span.ctxt().outer_expn_data().call_site,
+ });
}
None
}
@@ -277,29 +275,28 @@ impl<'a> VecArgs<'a> {
/// Returns the arguments of the `vec!` macro if this expression was expanded
/// from `vec!`.
pub fn hir(cx: &LateContext<'_>, expr: &'a hir::Expr<'_>) -> Option<VecArgs<'a>> {
- if_chain! {
- if let hir::ExprKind::Call(fun, args) = expr.kind;
- if let hir::ExprKind::Path(ref qpath) = fun.kind;
- if is_expn_of(fun.span, "vec").is_some();
- if let Some(fun_def_id) = cx.qpath_res(qpath, fun.hir_id).opt_def_id();
- then {
- return if match_def_path(cx, fun_def_id, &paths::VEC_FROM_ELEM) && args.len() == 2 {
- // `vec![elem; size]` case
- Some(VecArgs::Repeat(&args[0], &args[1]))
- } else if match_def_path(cx, fun_def_id, &paths::SLICE_INTO_VEC) && args.len() == 1 {
- // `vec![a, b, c]` case
- if let hir::ExprKind::Call(_, [arg]) = &args[0].kind
- && let hir::ExprKind::Array(args) = arg.kind {
- Some(VecArgs::Vec(args))
- } else {
- None
- }
- } else if match_def_path(cx, fun_def_id, &paths::VEC_NEW) && args.is_empty() {
- Some(VecArgs::Vec(&[]))
+ if let hir::ExprKind::Call(fun, args) = expr.kind
+ && let hir::ExprKind::Path(ref qpath) = fun.kind
+ && is_expn_of(fun.span, "vec").is_some()
+ && let Some(fun_def_id) = cx.qpath_res(qpath, fun.hir_id).opt_def_id()
+ {
+ return if match_def_path(cx, fun_def_id, &paths::VEC_FROM_ELEM) && args.len() == 2 {
+ // `vec![elem; size]` case
+ Some(VecArgs::Repeat(&args[0], &args[1]))
+ } else if match_def_path(cx, fun_def_id, &paths::SLICE_INTO_VEC) && args.len() == 1 {
+ // `vec![a, b, c]` case
+ if let hir::ExprKind::Call(_, [arg]) = &args[0].kind
+ && let hir::ExprKind::Array(args) = arg.kind
+ {
+ Some(VecArgs::Vec(args))
} else {
None
- };
- }
+ }
+ } else if match_def_path(cx, fun_def_id, &paths::VEC_NEW) && args.is_empty() {
+ Some(VecArgs::Vec(&[]))
+ } else {
+ None
+ };
}
None
diff --git a/src/tools/clippy/clippy_utils/src/hir_utils.rs b/src/tools/clippy/clippy_utils/src/hir_utils.rs
index 2a8b2ebd5..e610ed930 100644
--- a/src/tools/clippy/clippy_utils/src/hir_utils.rs
+++ b/src/tools/clippy/clippy_utils/src/hir_utils.rs
@@ -247,7 +247,7 @@ impl HirEqInterExpr<'_, '_, '_> {
res
}
- #[expect(clippy::similar_names)]
+ #[expect(clippy::similar_names, clippy::too_many_lines)]
pub fn eq_expr(&mut self, left: &Expr<'_>, right: &Expr<'_>) -> bool {
if !self.check_ctxt(left.span.ctxt(), right.span.ctxt()) {
return false;
@@ -271,9 +271,7 @@ impl HirEqInterExpr<'_, '_, '_> {
(&ExprKind::AddrOf(lb, l_mut, le), &ExprKind::AddrOf(rb, r_mut, re)) => {
lb == rb && l_mut == r_mut && self.eq_expr(le, re)
},
- (&ExprKind::Continue(li), &ExprKind::Continue(ri)) => {
- both(&li.label, &ri.label, |l, r| l.ident.name == r.ident.name)
- },
+ (&ExprKind::Array(l), &ExprKind::Array(r)) => self.eq_exprs(l, r),
(&ExprKind::Assign(ll, lr, _), &ExprKind::Assign(rl, rr, _)) => {
self.inner.allow_side_effects && self.eq_expr(ll, rl) && self.eq_expr(lr, rr)
},
@@ -294,9 +292,15 @@ impl HirEqInterExpr<'_, '_, '_> {
(&ExprKind::Call(l_fun, l_args), &ExprKind::Call(r_fun, r_args)) => {
self.inner.allow_side_effects && self.eq_expr(l_fun, r_fun) && self.eq_exprs(l_args, r_args)
},
- (&ExprKind::Cast(lx, lt), &ExprKind::Cast(rx, rt)) | (&ExprKind::Type(lx, lt), &ExprKind::Type(rx, rt)) => {
+ (&ExprKind::Cast(lx, lt), &ExprKind::Cast(rx, rt)) => {
self.eq_expr(lx, rx) && self.eq_ty(lt, rt)
},
+ (&ExprKind::Closure(_l), &ExprKind::Closure(_r)) => false,
+ (&ExprKind::ConstBlock(lb), &ExprKind::ConstBlock(rb)) => self.eq_body(lb.body, rb.body),
+ (&ExprKind::Continue(li), &ExprKind::Continue(ri)) => {
+ both(&li.label, &ri.label, |l, r| l.ident.name == r.ident.name)
+ },
+ (&ExprKind::DropTemps(le), &ExprKind::DropTemps(re)) => self.eq_expr(le, re),
(&ExprKind::Field(l_f_exp, ref l_f_ident), &ExprKind::Field(r_f_exp, ref r_f_ident)) => {
l_f_ident.name == r_f_ident.name && self.eq_expr(l_f_exp, r_f_exp)
},
@@ -329,24 +333,70 @@ impl HirEqInterExpr<'_, '_, '_> {
&& self.eq_expr(l_receiver, r_receiver)
&& self.eq_exprs(l_args, r_args)
},
+ (&ExprKind::OffsetOf(l_container, l_fields), &ExprKind::OffsetOf(r_container, r_fields)) => {
+ self.eq_ty(l_container, r_container) && over(l_fields, r_fields, |l, r| l.name == r.name)
+ },
+ (ExprKind::Path(l), ExprKind::Path(r)) => self.eq_qpath(l, r),
(&ExprKind::Repeat(le, ll), &ExprKind::Repeat(re, rl)) => {
self.eq_expr(le, re) && self.eq_array_length(ll, rl)
},
(ExprKind::Ret(l), ExprKind::Ret(r)) => both(l, r, |l, r| self.eq_expr(l, r)),
- (ExprKind::Path(l), ExprKind::Path(r)) => self.eq_qpath(l, r),
(&ExprKind::Struct(l_path, lf, ref lo), &ExprKind::Struct(r_path, rf, ref ro)) => {
self.eq_qpath(l_path, r_path)
&& both(lo, ro, |l, r| self.eq_expr(l, r))
&& over(lf, rf, |l, r| self.eq_expr_field(l, r))
},
(&ExprKind::Tup(l_tup), &ExprKind::Tup(r_tup)) => self.eq_exprs(l_tup, r_tup),
+ (&ExprKind::Type(le, lt), &ExprKind::Type(re, rt)) => self.eq_expr(le, re) && self.eq_ty(lt, rt),
(&ExprKind::Unary(l_op, le), &ExprKind::Unary(r_op, re)) => l_op == r_op && self.eq_expr(le, re),
- (&ExprKind::Array(l), &ExprKind::Array(r)) => self.eq_exprs(l, r),
- (&ExprKind::DropTemps(le), &ExprKind::DropTemps(re)) => self.eq_expr(le, re),
- (&ExprKind::OffsetOf(l_container, l_fields), &ExprKind::OffsetOf(r_container, r_fields)) => {
- self.eq_ty(l_container, r_container) && over(l_fields, r_fields, |l, r| l.name == r.name)
- },
- _ => false,
+ (&ExprKind::Yield(le, _), &ExprKind::Yield(re, _)) => return self.eq_expr(le, re),
+ (
+ // Else branches for branches above, grouped as per `match_same_arms`.
+ | &ExprKind::AddrOf(..)
+ | &ExprKind::Array(..)
+ | &ExprKind::Assign(..)
+ | &ExprKind::AssignOp(..)
+ | &ExprKind::Binary(..)
+ | &ExprKind::Become(..)
+ | &ExprKind::Block(..)
+ | &ExprKind::Break(..)
+ | &ExprKind::Call(..)
+ | &ExprKind::Cast(..)
+ | &ExprKind::ConstBlock(..)
+ | &ExprKind::Continue(..)
+ | &ExprKind::DropTemps(..)
+ | &ExprKind::Field(..)
+ | &ExprKind::Index(..)
+ | &ExprKind::If(..)
+ | &ExprKind::Let(..)
+ | &ExprKind::Lit(..)
+ | &ExprKind::Loop(..)
+ | &ExprKind::Match(..)
+ | &ExprKind::MethodCall(..)
+ | &ExprKind::OffsetOf(..)
+ | &ExprKind::Path(..)
+ | &ExprKind::Repeat(..)
+ | &ExprKind::Ret(..)
+ | &ExprKind::Struct(..)
+ | &ExprKind::Tup(..)
+ | &ExprKind::Type(..)
+ | &ExprKind::Unary(..)
+ | &ExprKind::Yield(..)
+
+ // --- Special cases that do not have a positive branch.
+
+ // `Err` represents an invalid expression, so let's never assume that
+ // an invalid expressions is equal to anything.
+ | &ExprKind::Err(..)
+
+ // For the time being, we always consider that two closures are unequal.
+ // This behavior may change in the future.
+ | &ExprKind::Closure(..)
+ // For the time being, we always consider that two instances of InlineAsm are different.
+ // This behavior may change in the future.
+ | &ExprKind::InlineAsm(_)
+ , _
+ ) => false,
};
(is_eq && (!self.should_ignore(left) || !self.should_ignore(right)))
|| self.inner.expr_fallback.as_mut().map_or(false, |f| f(left, right))
@@ -684,6 +734,9 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
self.hash_name(i.ident.name);
}
},
+ ExprKind::Array(v) => {
+ self.hash_exprs(v);
+ },
ExprKind::Assign(l, r, _) => {
self.hash_expr(l);
self.hash_expr(r);
@@ -693,6 +746,9 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
self.hash_expr(l);
self.hash_expr(r);
},
+ ExprKind::Become(f) => {
+ self.hash_expr(f);
+ },
ExprKind::Block(b, _) => {
self.hash_block(b);
},
@@ -709,9 +765,6 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
self.hash_expr(j);
}
},
- ExprKind::DropTemps(e) | ExprKind::Yield(e, _) => {
- self.hash_expr(e);
- },
ExprKind::Call(fun, args) => {
self.hash_expr(fun);
self.hash_exprs(args);
@@ -727,6 +780,12 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
// closures inherit TypeckResults
self.hash_expr(self.cx.tcx.hir().body(body).value);
},
+ ExprKind::ConstBlock(ref l_id) => {
+ self.hash_body(l_id.body);
+ },
+ ExprKind::DropTemps(e) | ExprKind::Yield(e, _) => {
+ self.hash_expr(e);
+ },
ExprKind::Field(e, ref f) => {
self.hash_expr(e);
self.hash_name(f.name);
@@ -788,12 +847,6 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
}
}
},
- ExprKind::OffsetOf(container, fields) => {
- self.hash_ty(container);
- for field in fields {
- self.hash_name(field.name);
- }
- },
ExprKind::Let(Let { pat, init, ty, .. }) => {
self.hash_expr(init);
if let Some(ty) = ty {
@@ -801,7 +854,6 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
}
self.hash_pat(pat);
},
- ExprKind::Err(_) => {},
ExprKind::Lit(l) => {
l.node.hash(&mut self.s);
},
@@ -836,8 +888,14 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
self.hash_expr(receiver);
self.hash_exprs(args);
},
- ExprKind::ConstBlock(ref l_id) => {
- self.hash_body(l_id.body);
+ ExprKind::OffsetOf(container, fields) => {
+ self.hash_ty(container);
+ for field in fields {
+ self.hash_name(field.name);
+ }
+ },
+ ExprKind::Path(ref qpath) => {
+ self.hash_qpath(qpath);
},
ExprKind::Repeat(e, len) => {
self.hash_expr(e);
@@ -848,12 +906,6 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
self.hash_expr(e);
}
},
- ExprKind::Become(f) => {
- self.hash_expr(f);
- },
- ExprKind::Path(ref qpath) => {
- self.hash_qpath(qpath);
- },
ExprKind::Struct(path, fields, ref expr) => {
self.hash_qpath(path);
@@ -869,13 +921,11 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
ExprKind::Tup(tup) => {
self.hash_exprs(tup);
},
- ExprKind::Array(v) => {
- self.hash_exprs(v);
- },
ExprKind::Unary(lop, le) => {
std::mem::discriminant(&lop).hash(&mut self.s);
self.hash_expr(le);
},
+ ExprKind::Err(_) => {},
}
}
@@ -967,7 +1017,7 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
}
e.hash(&mut self.s);
},
- PatKind::Wild => {},
+ PatKind::Never | PatKind::Wild => {},
}
}
diff --git a/src/tools/clippy/clippy_utils/src/lib.rs b/src/tools/clippy/clippy_utils/src/lib.rs
index 1181dfc0e..70a3c6f82 100644
--- a/src/tools/clippy/clippy_utils/src/lib.rs
+++ b/src/tools/clippy/clippy_utils/src/lib.rs
@@ -71,12 +71,12 @@ pub use self::hir_utils::{
both, count_eq, eq_expr_value, hash_expr, hash_stmt, is_bool, over, HirEqInterExpr, SpanlessEq, SpanlessHash,
};
+use core::mem;
use core::ops::ControlFlow;
use std::collections::hash_map::Entry;
use std::hash::BuildHasherDefault;
use std::sync::{Mutex, MutexGuard, OnceLock};
-use if_chain::if_chain;
use itertools::Itertools;
use rustc_ast::ast::{self, LitKind, RangeLimits};
use rustc_data_structures::fx::FxHashMap;
@@ -176,14 +176,12 @@ pub fn expr_or_init<'a, 'b, 'tcx: 'b>(cx: &LateContext<'tcx>, mut expr: &'a Expr
/// canonical binding `HirId`.
pub fn find_binding_init<'tcx>(cx: &LateContext<'tcx>, hir_id: HirId) -> Option<&'tcx Expr<'tcx>> {
let hir = cx.tcx.hir();
- if_chain! {
- if let Some(Node::Pat(pat)) = hir.find(hir_id);
- if matches!(pat.kind, PatKind::Binding(BindingAnnotation::NONE, ..));
- let parent = hir.parent_id(hir_id);
- if let Some(Node::Local(local)) = hir.find(parent);
- then {
- return local.init;
- }
+ if let Some(Node::Pat(pat)) = cx.tcx.opt_hir_node(hir_id)
+ && matches!(pat.kind, PatKind::Binding(BindingAnnotation::NONE, ..))
+ && let parent = hir.parent_id(hir_id)
+ && let Some(Node::Local(local)) = cx.tcx.opt_hir_node(parent)
+ {
+ return local.init;
}
None
}
@@ -565,7 +563,7 @@ fn local_item_children_by_name(tcx: TyCtxt<'_>, local_id: LocalDefId, name: Symb
let hir = tcx.hir();
let root_mod;
- let item_kind = match hir.find_by_def_id(local_id) {
+ let item_kind = match tcx.opt_hir_node_by_def_id(local_id) {
Some(Node::Crate(r#mod)) => {
root_mod = ItemKind::Mod(r#mod);
&root_mod
@@ -711,15 +709,13 @@ pub fn get_trait_def_id(cx: &LateContext<'_>, path: &[&str]) -> Option<DefId> {
/// ```
pub fn trait_ref_of_method<'tcx>(cx: &LateContext<'tcx>, def_id: LocalDefId) -> Option<&'tcx TraitRef<'tcx>> {
// Get the implemented trait for the current function
- let hir_id = cx.tcx.hir().local_def_id_to_hir_id(def_id);
+ let hir_id = cx.tcx.local_def_id_to_hir_id(def_id);
let parent_impl = cx.tcx.hir().get_parent_item(hir_id);
- if_chain! {
- if parent_impl != hir::CRATE_OWNER_ID;
- if let hir::Node::Item(item) = cx.tcx.hir().get_by_def_id(parent_impl.def_id);
- if let hir::ItemKind::Impl(impl_) = &item.kind;
- then {
- return impl_.of_trait.as_ref();
- }
+ if parent_impl != hir::CRATE_OWNER_ID
+ && let hir::Node::Item(item) = cx.tcx.hir_node_by_def_id(parent_impl.def_id)
+ && let hir::ItemKind::Impl(impl_) = &item.kind
+ {
+ return impl_.of_trait.as_ref();
}
None
}
@@ -823,12 +819,14 @@ fn is_default_equivalent_ctor(cx: &LateContext<'_>, def_id: DefId, path: &QPath<
/// Returns true if the expr is equal to `Default::default` when evaluated.
pub fn is_default_equivalent_call(cx: &LateContext<'_>, repl_func: &Expr<'_>) -> bool {
- if_chain! {
- if let hir::ExprKind::Path(ref repl_func_qpath) = repl_func.kind;
- if let Some(repl_def_id) = cx.qpath_res(repl_func_qpath, repl_func.hir_id).opt_def_id();
- if is_diag_trait_item(cx, repl_def_id, sym::Default)
- || is_default_equivalent_ctor(cx, repl_def_id, repl_func_qpath);
- then { true } else { false }
+ if let hir::ExprKind::Path(ref repl_func_qpath) = repl_func.kind
+ && let Some(repl_def_id) = cx.qpath_res(repl_func_qpath, repl_func.hir_id).opt_def_id()
+ && (is_diag_trait_item(cx, repl_def_id, sym::Default)
+ || is_default_equivalent_ctor(cx, repl_def_id, repl_func_qpath))
+ {
+ true
+ } else {
+ false
}
}
@@ -843,14 +841,14 @@ pub fn is_default_equivalent(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
_ => false,
},
ExprKind::Tup(items) | ExprKind::Array(items) => items.iter().all(|x| is_default_equivalent(cx, x)),
- ExprKind::Repeat(x, ArrayLen::Body(len)) => if_chain! {
- if let ExprKind::Lit(const_lit) = cx.tcx.hir().body(len.body).value.kind;
- if let LitKind::Int(v, _) = const_lit.node;
- if v <= 32 && is_default_equivalent(cx, x);
- then {
+ ExprKind::Repeat(x, ArrayLen::Body(len)) => {
+ if let ExprKind::Lit(const_lit) = cx.tcx.hir().body(len.body).value.kind
+ && let LitKind::Int(v, _) = const_lit.node
+ && v <= 32
+ && is_default_equivalent(cx, x)
+ {
true
- }
- else {
+ } else {
false
}
},
@@ -1244,7 +1242,7 @@ pub fn is_in_panic_handler(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
/// Gets the name of the item the expression is in, if available.
pub fn get_item_name(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<Symbol> {
let parent_id = cx.tcx.hir().get_parent_item(expr.hir_id).def_id;
- match cx.tcx.hir().find_by_def_id(parent_id) {
+ match cx.tcx.opt_hir_node_by_def_id(parent_id) {
Some(
Node::Item(Item { ident, .. })
| Node::TraitItem(TraitItem { ident, .. })
@@ -1321,7 +1319,7 @@ pub fn get_enclosing_block<'tcx>(cx: &LateContext<'tcx>, hir_id: HirId) -> Optio
let map = &cx.tcx.hir();
let enclosing_node = map
.get_enclosing_scope(hir_id)
- .and_then(|enclosing_id| map.find(enclosing_id));
+ .and_then(|enclosing_id| cx.tcx.opt_hir_node(enclosing_id));
enclosing_node.and_then(|node| match node {
Node::Block(block) => Some(block),
Node::Item(&Item {
@@ -1489,6 +1487,43 @@ pub fn is_else_clause(tcx: TyCtxt<'_>, expr: &Expr<'_>) -> bool {
}
}
+/// Checks if the given expression is a part of `let else`
+/// returns `true` for both the `init` and the `else` part
+pub fn is_inside_let_else(tcx: TyCtxt<'_>, expr: &Expr<'_>) -> bool {
+ let mut child_id = expr.hir_id;
+ for (parent_id, node) in tcx.hir().parent_iter(child_id) {
+ if let Node::Local(Local {
+ init: Some(init),
+ els: Some(els),
+ ..
+ }) = node
+ && (init.hir_id == child_id || els.hir_id == child_id)
+ {
+ return true;
+ }
+
+ child_id = parent_id;
+ }
+
+ false
+}
+
+/// Checks if the given expression is the else clause of a `let else` expression
+pub fn is_else_clause_in_let_else(tcx: TyCtxt<'_>, expr: &Expr<'_>) -> bool {
+ let mut child_id = expr.hir_id;
+ for (parent_id, node) in tcx.hir().parent_iter(child_id) {
+ if let Node::Local(Local { els: Some(els), .. }) = node
+ && els.hir_id == child_id
+ {
+ return true;
+ }
+
+ child_id = parent_id;
+ }
+
+ false
+}
+
/// Checks whether the given `Expr` is a range equivalent to a `RangeFull`.
/// For the lower bound, this means that:
/// - either there is none
@@ -1632,13 +1667,13 @@ pub fn is_direct_expn_of(span: Span, name: &str) -> Option<Span> {
/// Convenience function to get the return type of a function.
pub fn return_ty<'tcx>(cx: &LateContext<'tcx>, fn_def_id: hir::OwnerId) -> Ty<'tcx> {
let ret_ty = cx.tcx.fn_sig(fn_def_id).instantiate_identity().output();
- cx.tcx.erase_late_bound_regions(ret_ty)
+ cx.tcx.instantiate_bound_regions_with_erased(ret_ty)
}
/// Convenience function to get the nth argument type of a function.
pub fn nth_arg<'tcx>(cx: &LateContext<'tcx>, fn_def_id: hir::OwnerId, nth: usize) -> Ty<'tcx> {
let arg = cx.tcx.fn_sig(fn_def_id).instantiate_identity().input(nth);
- cx.tcx.erase_late_bound_regions(arg)
+ cx.tcx.instantiate_bound_regions_with_erased(arg)
}
/// Checks if an expression is constructing a tuple-like enum variant or struct
@@ -1671,7 +1706,7 @@ pub fn is_refutable(cx: &LateContext<'_>, pat: &Pat<'_>) -> bool {
}
match pat.kind {
- PatKind::Wild => false,
+ PatKind::Wild | PatKind::Never => false, // If `!` typechecked then the type is empty, so not refutable.
PatKind::Binding(_, _, _, pat) => pat.map_or(false, |pat| is_refutable(cx, pat)),
PatKind::Box(pat) | PatKind::Ref(pat, _) => is_refutable(cx, pat),
PatKind::Lit(..) | PatKind::Range(..) => true,
@@ -1736,15 +1771,13 @@ pub fn iter_input_pats<'tcx>(decl: &FnDecl<'_>, body: &'tcx Body<'_>) -> impl It
/// operator or the `try` macro.
pub fn is_try<'tcx>(cx: &LateContext<'_>, expr: &'tcx Expr<'tcx>) -> Option<&'tcx Expr<'tcx>> {
fn is_ok(cx: &LateContext<'_>, arm: &Arm<'_>) -> bool {
- if_chain! {
- if let PatKind::TupleStruct(ref path, pat, ddpos) = arm.pat.kind;
- if ddpos.as_opt_usize().is_none();
- if is_res_lang_ctor(cx, cx.qpath_res(path, arm.pat.hir_id), ResultOk);
- if let PatKind::Binding(_, hir_id, _, None) = pat[0].kind;
- if path_to_local_id(arm.body, hir_id);
- then {
- return true;
- }
+ if let PatKind::TupleStruct(ref path, pat, ddpos) = arm.pat.kind
+ && ddpos.as_opt_usize().is_none()
+ && is_res_lang_ctor(cx, cx.qpath_res(path, arm.pat.hir_id), ResultOk)
+ && let PatKind::Binding(_, hir_id, _, None) = pat[0].kind
+ && path_to_local_id(arm.body, hir_id)
+ {
+ return true;
}
false
}
@@ -1763,14 +1796,12 @@ pub fn is_try<'tcx>(cx: &LateContext<'_>, expr: &'tcx Expr<'tcx>) -> Option<&'tc
return Some(expr);
}
- if_chain! {
- if arms.len() == 2;
- if arms[0].guard.is_none();
- if arms[1].guard.is_none();
- if (is_ok(cx, &arms[0]) && is_err(cx, &arms[1])) || (is_ok(cx, &arms[1]) && is_err(cx, &arms[0]));
- then {
- return Some(expr);
- }
+ if arms.len() == 2
+ && arms[0].guard.is_none()
+ && arms[1].guard.is_none()
+ && ((is_ok(cx, &arms[0]) && is_err(cx, &arms[1])) || (is_ok(cx, &arms[1]) && is_err(cx, &arms[0])))
+ {
+ return Some(expr);
}
}
@@ -1887,14 +1918,12 @@ pub fn match_function_call<'tcx>(
expr: &'tcx Expr<'_>,
path: &[&str],
) -> Option<&'tcx [Expr<'tcx>]> {
- if_chain! {
- if let ExprKind::Call(fun, args) = expr.kind;
- if let ExprKind::Path(ref qpath) = fun.kind;
- if let Some(fun_def_id) = cx.qpath_res(qpath, fun.hir_id).opt_def_id();
- if match_def_path(cx, fun_def_id, path);
- then {
- return Some(args);
- }
+ if let ExprKind::Call(fun, args) = expr.kind
+ && let ExprKind::Path(ref qpath) = fun.kind
+ && let Some(fun_def_id) = cx.qpath_res(qpath, fun.hir_id).opt_def_id()
+ && match_def_path(cx, fun_def_id, path)
+ {
+ return Some(args);
};
None
}
@@ -1904,13 +1933,11 @@ pub fn match_function_call_with_def_id<'tcx>(
expr: &'tcx Expr<'_>,
fun_def_id: DefId,
) -> Option<&'tcx [Expr<'tcx>]> {
- if_chain! {
- if let ExprKind::Call(fun, args) = expr.kind;
- if let ExprKind::Path(ref qpath) = fun.kind;
- if cx.qpath_res(qpath, fun.hir_id).opt_def_id() == Some(fun_def_id);
- then {
- return Some(args);
- }
+ if let ExprKind::Call(fun, args) = expr.kind
+ && let ExprKind::Path(ref qpath) = fun.kind
+ && cx.qpath_res(qpath, fun.hir_id).opt_def_id() == Some(fun_def_id)
+ {
+ return Some(args);
};
None
}
@@ -2008,10 +2035,10 @@ pub fn get_async_fn_body<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'_>) -> Option<&'t
// check if expr is calling method or function with #[must_use] attribute
pub fn is_must_use_func_call(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
let did = match expr.kind {
- ExprKind::Call(path, _) => if_chain! {
- if let ExprKind::Path(ref qpath) = path.kind;
- if let def::Res::Def(_, did) = cx.qpath_res(qpath, path.hir_id);
- then {
+ ExprKind::Call(path, _) => {
+ if let ExprKind::Path(ref qpath) = path.kind
+ && let def::Res::Def(_, did) = cx.qpath_res(qpath, path.hir_id)
+ {
Some(did)
} else {
None
@@ -2034,6 +2061,18 @@ pub fn is_must_use_func_call(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
/// Consider calling [`is_expr_untyped_identity_function`] or [`is_expr_identity_function`] instead.
fn is_body_identity_function(cx: &LateContext<'_>, func: &Body<'_>) -> bool {
fn check_pat(cx: &LateContext<'_>, pat: &Pat<'_>, expr: &Expr<'_>) -> bool {
+ if cx
+ .typeck_results()
+ .pat_binding_modes()
+ .get(pat.hir_id)
+ .is_some_and(|mode| matches!(mode, BindingMode::BindByReference(_)))
+ {
+ // If a tuple `(x, y)` is of type `&(i32, i32)`, then due to match ergonomics,
+ // the inner patterns become references. Don't consider this the identity function
+ // as that changes types.
+ return false;
+ }
+
match (pat.kind, expr.kind) {
(PatKind::Binding(_, id, _, _), _) => {
path_to_local_id(expr, id) && cx.typeck_results().expr_adjustments(expr).is_empty()
@@ -2071,14 +2110,12 @@ fn is_body_identity_function(cx: &LateContext<'_>, func: &Body<'_>) -> bool {
},
_,
) => {
- if_chain! {
- if let StmtKind::Semi(e) | StmtKind::Expr(e) = stmt.kind;
- if let ExprKind::Ret(Some(ret_val)) = e.kind;
- then {
- expr = ret_val;
- } else {
- return false;
- }
+ if let StmtKind::Semi(e) | StmtKind::Expr(e) = stmt.kind
+ && let ExprKind::Ret(Some(ret_val)) = e.kind
+ {
+ expr = ret_val;
+ } else {
+ return false;
}
},
_ => return check_pat(cx, param.pat, expr),
@@ -2530,7 +2567,7 @@ pub fn inherits_cfg(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool {
tcx.has_attr(def_id, sym::cfg)
|| hir
- .parent_iter(hir.local_def_id_to_hir_id(def_id))
+ .parent_iter(tcx.local_def_id_to_hir_id(def_id))
.flat_map(|(parent_id, _)| hir.attrs(parent_id))
.any(|attr| attr.has_name(sym::cfg))
}
@@ -2650,11 +2687,11 @@ impl<'tcx> ExprUseNode<'tcx> {
.and(Binder::dummy(cx.tcx.type_of(id).instantiate_identity())),
)),
Self::Return(id) => {
- let hir_id = cx.tcx.hir().local_def_id_to_hir_id(id.def_id);
+ let hir_id = cx.tcx.local_def_id_to_hir_id(id.def_id);
if let Some(Node::Expr(Expr {
kind: ExprKind::Closure(c),
..
- })) = cx.tcx.hir().find(hir_id)
+ })) = cx.tcx.opt_hir_node(hir_id)
{
match c.fn_decl.output {
FnRetTy::DefaultReturn(_) => None,
@@ -2720,7 +2757,7 @@ pub fn expr_use_ctxt<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'tcx>) -> Optio
walk_to_expr_usage(cx, e, &mut |parent, child_id| {
// LocalTableInContext returns the wrong lifetime, so go use `expr_adjustments` instead.
if adjustments.is_empty()
- && let Node::Expr(e) = cx.tcx.hir().get(child_id)
+ && let Node::Expr(e) = cx.tcx.hir_node(child_id)
{
adjustments = cx.typeck_results().expr_adjustments(e);
}
@@ -2974,3 +3011,248 @@ pub fn pat_is_wild<'tcx>(cx: &LateContext<'tcx>, pat: &'tcx PatKind<'_>, body: i
_ => false,
}
}
+
+#[derive(Clone, Copy)]
+pub enum RequiresSemi {
+ Yes,
+ No,
+}
+impl RequiresSemi {
+ pub fn requires_semi(self) -> bool {
+ matches!(self, Self::Yes)
+ }
+}
+
+/// Check if the expression return `!`, a type coerced from `!`, or could return `!` if the final
+/// expression were turned into a statement.
+#[expect(clippy::too_many_lines)]
+pub fn is_never_expr<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) -> Option<RequiresSemi> {
+ struct BreakTarget {
+ id: HirId,
+ unused: bool,
+ }
+
+ struct V<'cx, 'tcx> {
+ cx: &'cx LateContext<'tcx>,
+ break_targets: Vec<BreakTarget>,
+ break_targets_for_result_ty: u32,
+ in_final_expr: bool,
+ requires_semi: bool,
+ is_never: bool,
+ }
+
+ impl<'tcx> V<'_, 'tcx> {
+ fn push_break_target(&mut self, id: HirId) {
+ self.break_targets.push(BreakTarget { id, unused: true });
+ self.break_targets_for_result_ty += u32::from(self.in_final_expr);
+ }
+ }
+
+ impl<'tcx> Visitor<'tcx> for V<'_, 'tcx> {
+ fn visit_expr(&mut self, e: &'tcx Expr<'_>) {
+ // Note: Part of the complexity here comes from the fact that
+ // coercions are applied to the innermost expression.
+ // e.g. In `let x: u32 = { break () };` the never-to-any coercion
+ // is applied to the break expression. This means we can't just
+ // check the block's type as it will be `u32` despite the fact
+ // that the block always diverges.
+
+ // The rest of the complexity comes from checking blocks which
+ // syntactically return a value, but will always diverge before
+ // reaching that point.
+ // e.g. In `let x = { foo(panic!()) };` the block's type will be the
+ // return type of `foo` even though it will never actually run. This
+ // can be trivially fixed by adding a semicolon after the call, but
+ // we must first detect that a semicolon is needed to make that
+ // suggestion.
+
+ if self.is_never && self.break_targets.is_empty() {
+ if self.in_final_expr && !self.requires_semi {
+ // This expression won't ever run, but we still need to check
+ // if it can affect the type of the final expression.
+ match e.kind {
+ ExprKind::DropTemps(e) => self.visit_expr(e),
+ ExprKind::If(_, then, Some(else_)) => {
+ self.visit_expr(then);
+ self.visit_expr(else_);
+ },
+ ExprKind::Match(_, arms, _) => {
+ for arm in arms {
+ self.visit_expr(arm.body);
+ }
+ },
+ ExprKind::Loop(b, ..) => {
+ self.push_break_target(e.hir_id);
+ self.in_final_expr = false;
+ self.visit_block(b);
+ self.break_targets.pop();
+ },
+ ExprKind::Block(b, _) => {
+ if b.targeted_by_break {
+ self.push_break_target(b.hir_id);
+ self.visit_block(b);
+ self.break_targets.pop();
+ } else {
+ self.visit_block(b);
+ }
+ },
+ _ => {
+ self.requires_semi = !self.cx.typeck_results().expr_ty(e).is_never();
+ },
+ }
+ }
+ return;
+ }
+ match e.kind {
+ ExprKind::DropTemps(e) => self.visit_expr(e),
+ ExprKind::Ret(None) | ExprKind::Continue(_) => self.is_never = true,
+ ExprKind::Ret(Some(e)) | ExprKind::Become(e) => {
+ self.in_final_expr = false;
+ self.visit_expr(e);
+ self.is_never = true;
+ },
+ ExprKind::Break(dest, e) => {
+ if let Some(e) = e {
+ self.in_final_expr = false;
+ self.visit_expr(e);
+ }
+ if let Ok(id) = dest.target_id
+ && let Some((i, target)) = self
+ .break_targets
+ .iter_mut()
+ .enumerate()
+ .find(|(_, target)| target.id == id)
+ {
+ target.unused &= self.is_never;
+ if i < self.break_targets_for_result_ty as usize {
+ self.requires_semi = true;
+ }
+ }
+ self.is_never = true;
+ },
+ ExprKind::If(cond, then, else_) => {
+ let in_final_expr = mem::replace(&mut self.in_final_expr, false);
+ self.visit_expr(cond);
+ self.in_final_expr = in_final_expr;
+
+ if self.is_never {
+ self.visit_expr(then);
+ if let Some(else_) = else_ {
+ self.visit_expr(else_);
+ }
+ } else {
+ self.visit_expr(then);
+ let is_never = mem::replace(&mut self.is_never, false);
+ if let Some(else_) = else_ {
+ self.visit_expr(else_);
+ self.is_never &= is_never;
+ }
+ }
+ },
+ ExprKind::Match(scrutinee, arms, _) => {
+ let in_final_expr = mem::replace(&mut self.in_final_expr, false);
+ self.visit_expr(scrutinee);
+ self.in_final_expr = in_final_expr;
+
+ if self.is_never {
+ for arm in arms {
+ self.visit_arm(arm);
+ }
+ } else {
+ let mut is_never = true;
+ for arm in arms {
+ self.is_never = false;
+ if let Some(guard) = arm.guard {
+ let in_final_expr = mem::replace(&mut self.in_final_expr, false);
+ self.visit_expr(guard.body());
+ self.in_final_expr = in_final_expr;
+ // The compiler doesn't consider diverging guards as causing the arm to diverge.
+ self.is_never = false;
+ }
+ self.visit_expr(arm.body);
+ is_never &= self.is_never;
+ }
+ self.is_never = is_never;
+ }
+ },
+ ExprKind::Loop(b, _, _, _) => {
+ self.push_break_target(e.hir_id);
+ self.in_final_expr = false;
+ self.visit_block(b);
+ self.is_never = self.break_targets.pop().unwrap().unused;
+ },
+ ExprKind::Block(b, _) => {
+ if b.targeted_by_break {
+ self.push_break_target(b.hir_id);
+ self.visit_block(b);
+ self.is_never &= self.break_targets.pop().unwrap().unused;
+ } else {
+ self.visit_block(b);
+ }
+ },
+ _ => {
+ self.in_final_expr = false;
+ walk_expr(self, e);
+ self.is_never |= self.cx.typeck_results().expr_ty(e).is_never();
+ },
+ }
+ }
+
+ fn visit_block(&mut self, b: &'tcx Block<'_>) {
+ let in_final_expr = mem::replace(&mut self.in_final_expr, false);
+ for s in b.stmts {
+ self.visit_stmt(s);
+ }
+ self.in_final_expr = in_final_expr;
+ if let Some(e) = b.expr {
+ self.visit_expr(e);
+ }
+ }
+
+ fn visit_local(&mut self, l: &'tcx Local<'_>) {
+ if let Some(e) = l.init {
+ self.visit_expr(e);
+ }
+ if let Some(else_) = l.els {
+ let is_never = self.is_never;
+ self.visit_block(else_);
+ self.is_never = is_never;
+ }
+ }
+
+ fn visit_arm(&mut self, arm: &Arm<'tcx>) {
+ if let Some(guard) = arm.guard {
+ let in_final_expr = mem::replace(&mut self.in_final_expr, false);
+ self.visit_expr(guard.body());
+ self.in_final_expr = in_final_expr;
+ }
+ self.visit_expr(arm.body);
+ }
+ }
+
+ if cx.typeck_results().expr_ty(e).is_never() {
+ Some(RequiresSemi::No)
+ } else if let ExprKind::Block(b, _) = e.kind
+ && !b.targeted_by_break
+ && b.expr.is_none()
+ {
+ // If a block diverges without a final expression then it's type is `!`.
+ None
+ } else {
+ let mut v = V {
+ cx,
+ break_targets: Vec::new(),
+ break_targets_for_result_ty: 0,
+ in_final_expr: true,
+ requires_semi: false,
+ is_never: false,
+ };
+ v.visit_expr(e);
+ v.is_never
+ .then_some(if v.requires_semi && matches!(e.kind, ExprKind::Block(..)) {
+ RequiresSemi::Yes
+ } else {
+ RequiresSemi::No
+ })
+ }
+}
diff --git a/src/tools/clippy/clippy_utils/src/paths.rs b/src/tools/clippy/clippy_utils/src/paths.rs
index 5bca55437..0a820a175 100644
--- a/src/tools/clippy/clippy_utils/src/paths.rs
+++ b/src/tools/clippy/clippy_utils/src/paths.rs
@@ -32,7 +32,15 @@ pub const FUTURES_IO_ASYNCREADEXT: [&str; 3] = ["futures_util", "io", "AsyncRead
pub const FUTURES_IO_ASYNCWRITEEXT: [&str; 3] = ["futures_util", "io", "AsyncWriteExt"];
pub const HASHMAP_CONTAINS_KEY: [&str; 6] = ["std", "collections", "hash", "map", "HashMap", "contains_key"];
pub const HASHMAP_INSERT: [&str; 6] = ["std", "collections", "hash", "map", "HashMap", "insert"];
+pub const HASHMAP_ITER: [&str; 5] = ["std", "collections", "hash", "map", "Iter"];
+pub const HASHMAP_ITER_MUT: [&str; 5] = ["std", "collections", "hash", "map", "IterMut"];
+pub const HASHMAP_KEYS: [&str; 5] = ["std", "collections", "hash", "map", "Keys"];
+pub const HASHMAP_VALUES: [&str; 5] = ["std", "collections", "hash", "map", "Values"];
+pub const HASHMAP_DRAIN: [&str; 5] = ["std", "collections", "hash", "map", "Drain"];
+pub const HASHMAP_VALUES_MUT: [&str; 5] = ["std", "collections", "hash", "map", "ValuesMut"];
+pub const HASHSET_ITER_TY: [&str; 5] = ["std", "collections", "hash", "set", "Iter"];
pub const HASHSET_ITER: [&str; 6] = ["std", "collections", "hash", "set", "HashSet", "iter"];
+pub const HASHSET_DRAIN: [&str; 5] = ["std", "collections", "hash", "set", "Drain"];
pub const IDENT: [&str; 3] = ["rustc_span", "symbol", "Ident"];
pub const IDENT_AS_STR: [&str; 4] = ["rustc_span", "symbol", "Ident", "as_str"];
pub const INSERT_STR: [&str; 4] = ["alloc", "string", "String", "insert_str"];
@@ -99,3 +107,4 @@ pub const OPTION_UNWRAP: [&str; 4] = ["core", "option", "Option", "unwrap"];
pub const OPTION_EXPECT: [&str; 4] = ["core", "option", "Option", "expect"];
#[expect(clippy::invalid_paths)] // not sure why it thinks this, it works so
pub const BOOL_THEN: [&str; 4] = ["core", "bool", "<impl bool>", "then"];
+pub const ALLOCATOR_GLOBAL: [&str; 3] = ["alloc", "alloc", "Global"];
diff --git a/src/tools/clippy/clippy_utils/src/sugg.rs b/src/tools/clippy/clippy_utils/src/sugg.rs
index db79dd788..9b2bc8df1 100644
--- a/src/tools/clippy/clippy_utils/src/sugg.rs
+++ b/src/tools/clippy/clippy_utils/src/sugg.rs
@@ -159,7 +159,7 @@ impl<'a> Sugg<'a> {
Sugg::BinOp(hirbinop2assignop(op), get_snippet(lhs.span), get_snippet(rhs.span))
},
hir::ExprKind::Binary(op, lhs, rhs) => Sugg::BinOp(
- AssocOp::from_ast_binop(op.node.into()),
+ AssocOp::from_ast_binop(op.node),
get_snippet(lhs.span),
get_snippet(rhs.span),
),
@@ -380,10 +380,7 @@ fn binop_to_string(op: AssocOp, lhs: &str, rhs: &str) -> String {
| AssocOp::NotEqual
| AssocOp::Greater
| AssocOp::GreaterEqual => {
- format!(
- "{lhs} {} {rhs}",
- op.to_ast_binop().expect("Those are AST ops").to_string()
- )
+ format!("{lhs} {} {rhs}", op.to_ast_binop().expect("Those are AST ops").as_str())
},
AssocOp::Assign => format!("{lhs} = {rhs}"),
AssocOp::AssignOp(op) => {
diff --git a/src/tools/clippy/clippy_utils/src/ty.rs b/src/tools/clippy/clippy_utils/src/ty.rs
index 842a206f9..61d0663aa 100644
--- a/src/tools/clippy/clippy_utils/src/ty.rs
+++ b/src/tools/clippy/clippy_utils/src/ty.rs
@@ -214,7 +214,17 @@ pub fn implements_trait<'tcx>(
trait_id: DefId,
args: &[GenericArg<'tcx>],
) -> bool {
- implements_trait_with_env_from_iter(cx.tcx, cx.param_env, ty, trait_id, args.iter().map(|&x| Some(x)))
+ let callee_id = cx
+ .enclosing_body
+ .map(|body| cx.tcx.hir().body_owner(body).owner.to_def_id());
+ implements_trait_with_env_from_iter(
+ cx.tcx,
+ cx.param_env,
+ ty,
+ trait_id,
+ callee_id,
+ args.iter().map(|&x| Some(x)),
+ )
}
/// Same as `implements_trait` but allows using a `ParamEnv` different from the lint context.
@@ -223,9 +233,17 @@ pub fn implements_trait_with_env<'tcx>(
param_env: ParamEnv<'tcx>,
ty: Ty<'tcx>,
trait_id: DefId,
+ callee_id: DefId,
args: &[GenericArg<'tcx>],
) -> bool {
- implements_trait_with_env_from_iter(tcx, param_env, ty, trait_id, args.iter().map(|&x| Some(x)))
+ implements_trait_with_env_from_iter(
+ tcx,
+ param_env,
+ ty,
+ trait_id,
+ Some(callee_id),
+ args.iter().map(|&x| Some(x)),
+ )
}
/// Same as `implements_trait_from_env` but takes the arguments as an iterator.
@@ -234,6 +252,7 @@ pub fn implements_trait_with_env_from_iter<'tcx>(
param_env: ParamEnv<'tcx>,
ty: Ty<'tcx>,
trait_id: DefId,
+ callee_id: Option<DefId>,
args: impl IntoIterator<Item = impl Into<Option<GenericArg<'tcx>>>>,
) -> bool {
// Clippy shouldn't have infer types
@@ -245,20 +264,36 @@ pub fn implements_trait_with_env_from_iter<'tcx>(
}
let infcx = tcx.infer_ctxt().build();
+ let args = args
+ .into_iter()
+ .map(|arg| {
+ arg.into().unwrap_or_else(|| {
+ let orig = TypeVariableOrigin {
+ kind: TypeVariableOriginKind::MiscVariable,
+ span: DUMMY_SP,
+ };
+ infcx.next_ty_var(orig).into()
+ })
+ })
+ .collect::<Vec<_>>();
+
+ // If an effect arg was not specified, we need to specify it.
+ let effect_arg = if tcx
+ .generics_of(trait_id)
+ .host_effect_index
+ .is_some_and(|x| args.get(x - 1).is_none())
+ {
+ Some(GenericArg::from(callee_id.map_or(tcx.consts.true_, |def_id| {
+ tcx.expected_host_effect_param_for_body(def_id)
+ })))
+ } else {
+ None
+ };
+
let trait_ref = TraitRef::new(
tcx,
trait_id,
- Some(GenericArg::from(ty))
- .into_iter()
- .chain(args.into_iter().map(|arg| {
- arg.into().unwrap_or_else(|| {
- let orig = TypeVariableOrigin {
- kind: TypeVariableOriginKind::MiscVariable,
- span: DUMMY_SP,
- };
- infcx.next_ty_var(orig).into()
- })
- })),
+ Some(GenericArg::from(ty)).into_iter().chain(args).chain(effect_arg),
);
debug_assert_matches!(
@@ -694,7 +729,7 @@ pub fn ty_sig<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<ExprFnSig<'t
ty::Closure(id, subs) => {
let decl = id
.as_local()
- .and_then(|id| cx.tcx.hir().fn_decl_by_hir_id(cx.tcx.hir().local_def_id_to_hir_id(id)));
+ .and_then(|id| cx.tcx.hir().fn_decl_by_hir_id(cx.tcx.local_def_id_to_hir_id(id)));
Some(ExprFnSig::Closure(decl, subs.as_closure().sig()))
},
ty::FnDef(id, subs) => Some(ExprFnSig::Sig(cx.tcx.fn_sig(id).instantiate(cx.tcx, subs), Some(id))),
@@ -890,7 +925,7 @@ pub fn for_each_top_level_late_bound_region<B>(
impl<'tcx, B, F: FnMut(BoundRegion) -> ControlFlow<B>> TypeVisitor<TyCtxt<'tcx>> for V<F> {
type BreakTy = B;
fn visit_region(&mut self, r: Region<'tcx>) -> ControlFlow<Self::BreakTy> {
- if let RegionKind::ReLateBound(idx, bound) = r.kind()
+ if let RegionKind::ReBound(idx, bound) = r.kind()
&& idx.as_u32() == self.index
{
(self.f)(bound)
@@ -1169,7 +1204,7 @@ pub fn make_normalized_projection<'tcx>(
debug_assert!(
false,
"args contain late-bound region at index `{i}` which can't be normalized.\n\
- use `TyCtxt::erase_late_bound_regions`\n\
+ use `TyCtxt::instantiate_bound_regions_with_erased`\n\
note: arg is `{arg:#?}`",
);
return None;
@@ -1247,7 +1282,7 @@ pub fn make_normalized_projection_with_regions<'tcx>(
debug_assert!(
false,
"args contain late-bound region at index `{i}` which can't be normalized.\n\
- use `TyCtxt::erase_late_bound_regions`\n\
+ use `TyCtxt::instantiate_bound_regions_with_erased`\n\
note: arg is `{arg:#?}`",
);
return None;
@@ -1276,3 +1311,8 @@ pub fn normalize_with_regions<'tcx>(tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>
Err(_) => ty,
}
}
+
+/// Checks if the type is `core::mem::ManuallyDrop<_>`
+pub fn is_manually_drop(ty: Ty<'_>) -> bool {
+ ty.ty_adt_def().map_or(false, AdtDef::is_manually_drop)
+}
diff --git a/src/tools/clippy/clippy_utils/src/ty/type_certainty/mod.rs b/src/tools/clippy/clippy_utils/src/ty/type_certainty/mod.rs
index 76fa15e15..da71fc3aa 100644
--- a/src/tools/clippy/clippy_utils/src/ty/type_certainty/mod.rs
+++ b/src/tools/clippy/clippy_utils/src/ty/type_certainty/mod.rs
@@ -170,19 +170,18 @@ fn qpath_certainty(cx: &LateContext<'_>, qpath: &QPath<'_>, resolves_to_type: bo
path_segment_certainty(cx, type_certainty(cx, ty), path_segment, resolves_to_type)
},
- QPath::LangItem(lang_item, _, _) => {
- cx.tcx
- .lang_items()
- .get(*lang_item)
- .map_or(Certainty::Uncertain, |def_id| {
- let generics = cx.tcx.generics_of(def_id);
- if generics.parent_count == 0 && generics.params.is_empty() {
- Certainty::Certain(if resolves_to_type { Some(def_id) } else { None })
- } else {
- Certainty::Uncertain
- }
- })
- },
+ QPath::LangItem(lang_item, ..) => cx
+ .tcx
+ .lang_items()
+ .get(*lang_item)
+ .map_or(Certainty::Uncertain, |def_id| {
+ let generics = cx.tcx.generics_of(def_id);
+ if generics.parent_count == 0 && generics.params.is_empty() {
+ Certainty::Certain(if resolves_to_type { Some(def_id) } else { None })
+ } else {
+ Certainty::Uncertain
+ }
+ }),
};
debug_assert!(resolves_to_type || certainty.to_def_id().is_none());
certainty
diff --git a/src/tools/clippy/declare_clippy_lint/Cargo.toml b/src/tools/clippy/declare_clippy_lint/Cargo.toml
index beea9fd00..af123e107 100644
--- a/src/tools/clippy/declare_clippy_lint/Cargo.toml
+++ b/src/tools/clippy/declare_clippy_lint/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "declare_clippy_lint"
-version = "0.1.75"
+version = "0.1.76"
edition = "2021"
publish = false
@@ -8,7 +8,7 @@ publish = false
proc-macro = true
[dependencies]
-itertools = "0.10.1"
+itertools = "0.11"
quote = "1.0.21"
syn = "2.0"
diff --git a/src/tools/clippy/declare_clippy_lint/src/lib.rs b/src/tools/clippy/declare_clippy_lint/src/lib.rs
index dc3037f66..25b2fc939 100644
--- a/src/tools/clippy/declare_clippy_lint/src/lib.rs
+++ b/src/tools/clippy/declare_clippy_lint/src/lib.rs
@@ -148,7 +148,7 @@ pub fn declare_clippy_lint(input: TokenStream) -> TokenStream {
let category_variant = format_ident!("{category}");
let output = quote! {
- declare_tool_lint! {
+ rustc_session::declare_tool_lint! {
#(#attrs)*
pub clippy::#name,
#level,
diff --git a/src/tools/clippy/lintcheck/src/main.rs b/src/tools/clippy/lintcheck/src/main.rs
index 58cb42316..88966b41f 100644
--- a/src/tools/clippy/lintcheck/src/main.rs
+++ b/src/tools/clippy/lintcheck/src/main.rs
@@ -309,7 +309,7 @@ impl Crate {
target_dir_index: &AtomicUsize,
total_crates_to_lint: usize,
config: &LintcheckConfig,
- lint_filter: &Vec<String>,
+ lint_filter: &[String],
server: &Option<LintcheckServer>,
) -> Vec<ClippyWarning> {
// advance the atomic index by one
@@ -367,7 +367,7 @@ impl Crate {
//
// The wrapper is set to the `lintcheck` so we can force enable linting and ignore certain crates
// (see `crate::driver`)
- let status = Command::new("cargo")
+ let status = Command::new(env::var("CARGO").unwrap_or("cargo".into()))
.arg("check")
.arg("--quiet")
.current_dir(&self.path)
@@ -441,7 +441,7 @@ impl Crate {
/// Builds clippy inside the repo to make sure we have a clippy executable we can use.
fn build_clippy() {
- let status = Command::new("cargo")
+ let status = Command::new(env::var("CARGO").unwrap_or("cargo".into()))
.arg("build")
.status()
.expect("Failed to build clippy!");
@@ -728,7 +728,7 @@ fn read_stats_from_file(file_path: &Path) -> HashMap<String, usize> {
}
/// print how lint counts changed between runs
-fn print_stats(old_stats: HashMap<String, usize>, new_stats: HashMap<&String, usize>, lint_filter: &Vec<String>) {
+fn print_stats(old_stats: HashMap<String, usize>, new_stats: HashMap<&String, usize>, lint_filter: &[String]) {
let same_in_both_hashmaps = old_stats
.iter()
.filter(|(old_key, old_val)| new_stats.get::<&String>(old_key) == Some(old_val))
@@ -816,7 +816,7 @@ fn lintcheck_test() {
"--crates-toml",
"lintcheck/test_sources.toml",
];
- let status = std::process::Command::new("cargo")
+ let status = std::process::Command::new(env::var("CARGO").unwrap_or("cargo".into()))
.args(args)
.current_dir("..") // repo root
.status();
diff --git a/src/tools/clippy/rust-toolchain b/src/tools/clippy/rust-toolchain
index 293fcbf39..d575da6de 100644
--- a/src/tools/clippy/rust-toolchain
+++ b/src/tools/clippy/rust-toolchain
@@ -1,3 +1,3 @@
[toolchain]
-channel = "nightly-2023-11-02"
+channel = "nightly-2023-12-16"
components = ["cargo", "llvm-tools", "rust-src", "rust-std", "rustc", "rustc-dev", "rustfmt"]
diff --git a/src/tools/clippy/src/driver.rs b/src/tools/clippy/src/driver.rs
index 7bb49d08d..b944a2992 100644
--- a/src/tools/clippy/src/driver.rs
+++ b/src/tools/clippy/src/driver.rs
@@ -18,7 +18,7 @@ extern crate rustc_span;
use rustc_interface::interface;
use rustc_session::config::ErrorOutputType;
use rustc_session::parse::ParseSess;
-use rustc_session::EarlyErrorHandler;
+use rustc_session::EarlyDiagCtxt;
use rustc_span::symbol::Symbol;
use std::env;
@@ -148,7 +148,7 @@ impl rustc_driver::Callbacks for ClippyCallbacks {
}
let conf = clippy_config::Conf::read(sess, &conf_path);
- clippy_lints::register_plugins(lint_store, sess, conf);
+ clippy_lints::register_lints(lint_store, conf);
clippy_lints::register_pre_expansion_lints(lint_store, conf);
clippy_lints::register_renamed(lint_store);
}));
@@ -174,16 +174,16 @@ const BUG_REPORT_URL: &str = "https://github.com/rust-lang/rust-clippy/issues/ne
#[allow(clippy::too_many_lines)]
#[allow(clippy::ignored_unit_patterns)]
pub fn main() {
- let handler = EarlyErrorHandler::new(ErrorOutputType::default());
+ let early_dcx = EarlyDiagCtxt::new(ErrorOutputType::default());
- rustc_driver::init_rustc_env_logger(&handler);
+ rustc_driver::init_rustc_env_logger(&early_dcx);
let using_internal_features = rustc_driver::install_ice_hook(BUG_REPORT_URL, |handler| {
// FIXME: this macro calls unwrap internally but is called in a panicking context! It's not
// as simple as moving the call from the hook to main, because `install_ice_hook` doesn't
// accept a generic closure.
let version_info = rustc_tools_util::get_version_info!();
- handler.note_without_error(format!("Clippy version: {version_info}"));
+ handler.note(format!("Clippy version: {version_info}"));
});
exit(rustc_driver::catch_with_exit_code(move || {
diff --git a/src/tools/clippy/src/main.rs b/src/tools/clippy/src/main.rs
index bbf7d22c8..dffa85417 100644
--- a/src/tools/clippy/src/main.rs
+++ b/src/tools/clippy/src/main.rs
@@ -105,7 +105,7 @@ impl ClippyCmd {
}
fn into_std_cmd(self) -> Command {
- let mut cmd = Command::new("cargo");
+ let mut cmd = Command::new(env::var("CARGO").unwrap_or("cargo".into()));
let clippy_args: String = self
.clippy_args
.iter()
diff --git a/src/tools/clippy/tests/headers.rs b/src/tools/clippy/tests/headers.rs
index 7eec9a9cd..d1f986ef5 100644
--- a/src/tools/clippy/tests/headers.rs
+++ b/src/tools/clippy/tests/headers.rs
@@ -12,7 +12,12 @@ fn old_test_headers() {
for entry in WalkDir::new("tests") {
let entry = entry.unwrap();
- if !entry.file_type().is_file() {
+ let is_hidden_file = entry
+ .file_name()
+ .to_str()
+ .expect("non-UTF-8 file name")
+ .starts_with('.');
+ if is_hidden_file || !entry.file_type().is_file() {
continue;
}
diff --git a/src/tools/clippy/tests/integration.rs b/src/tools/clippy/tests/integration.rs
index 031982edb..267f095f9 100644
--- a/src/tools/clippy/tests/integration.rs
+++ b/src/tools/clippy/tests/integration.rs
@@ -69,15 +69,15 @@ fn integration_test() {
// debug:
eprintln!("{stderr}");
- // this is an internal test to make sure we would correctly panic on a delay_span_bug
+ // this is an internal test to make sure we would correctly panic on a span_delayed_bug
if repo_name == "matthiaskrgr/clippy_ci_panic_test" {
// we need to kind of switch around our logic here:
// if we find a panic, everything is fine, if we don't panic, SOMETHING is broken about our testing
- // the repo basically just contains a delay_span_bug that forces rustc/clippy to panic:
+ // the repo basically just contains a span_delayed_bug that forces rustc/clippy to panic:
/*
#![feature(rustc_attrs)]
- #[rustc_error(delay_span_bug_from_inside_query)]
+ #[rustc_error(span_delayed_bug_from_inside_query)]
fn main() {}
*/
@@ -86,7 +86,7 @@ fn integration_test() {
return;
}
- panic!("panic caused by delay_span_bug was NOT detected! Something is broken!");
+ panic!("panic caused by span_delayed_bug was NOT detected! Something is broken!");
}
if let Some(backtrace_start) = stderr.find("error: internal compiler error") {
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/fail_mod_remap/Cargo.stderr b/src/tools/clippy/tests/ui-cargo/module_style/fail_mod_remap/Cargo.stderr
index d776feb7f..bdceb7526 100644
--- a/src/tools/clippy/tests/ui-cargo/module_style/fail_mod_remap/Cargo.stderr
+++ b/src/tools/clippy/tests/ui-cargo/module_style/fail_mod_remap/Cargo.stderr
@@ -8,4 +8,4 @@ error: `mod.rs` files are required, found `src/bad.rs`
= note: `-D clippy::self-named-module-files` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::self_named_module_files)]`
-error: could not compile `fail-mod-remap` (bin "fail-mod-remap") due to previous error
+error: could not compile `fail-mod-remap` (bin "fail-mod-remap") due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/fail_no_mod/Cargo.stderr b/src/tools/clippy/tests/ui-cargo/module_style/fail_no_mod/Cargo.stderr
index 22558bc4c..06eaa071e 100644
--- a/src/tools/clippy/tests/ui-cargo/module_style/fail_no_mod/Cargo.stderr
+++ b/src/tools/clippy/tests/ui-cargo/module_style/fail_no_mod/Cargo.stderr
@@ -8,4 +8,4 @@ error: `mod.rs` files are not allowed, found `src/bad/mod.rs`
= note: `-D clippy::mod-module-files` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::mod_module_files)]`
-error: could not compile `fail-no-mod` (bin "fail-no-mod") due to previous error
+error: could not compile `fail-no-mod` (bin "fail-no-mod") due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/Cargo.stderr b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/Cargo.stderr
index 4beedc108..39f7176ad 100644
--- a/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/Cargo.stderr
+++ b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/Cargo.stderr
@@ -3,4 +3,4 @@ error: multiple versions for dependency `winapi`: 0.2.8, 0.3.9
= note: `-D clippy::multiple-crate-versions` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::multiple_crate_versions)]`
-error: could not compile `multiple_crate_versions` (bin "multiple_crate_versions") due to previous error
+error: could not compile `multiple_crate_versions` (bin "multiple_crate_versions") due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/fail/Cargo.stderr b/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/fail/Cargo.stderr
index 65a19bb07..a3539051b 100644
--- a/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/fail/Cargo.stderr
+++ b/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/fail/Cargo.stderr
@@ -3,4 +3,4 @@ error: wildcard dependency for `regex`
= note: `-D clippy::wildcard-dependencies` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::wildcard_dependencies)]`
-error: could not compile `wildcard_dependencies` (bin "wildcard_dependencies") due to previous error
+error: could not compile `wildcard_dependencies` (bin "wildcard_dependencies") due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-internal/default_deprecation_reason.stderr b/src/tools/clippy/tests/ui-internal/default_deprecation_reason.stderr
index ca26b649f..595e4c138 100644
--- a/src/tools/clippy/tests/ui-internal/default_deprecation_reason.stderr
+++ b/src/tools/clippy/tests/ui-internal/default_deprecation_reason.stderr
@@ -18,5 +18,5 @@ LL | #![deny(clippy::internal)]
= note: `#[deny(clippy::default_deprecation_reason)]` implied by `#[deny(clippy::internal)]`
= note: this error originates in the macro `declare_deprecated_lint` (in Nightly builds, run with -Z macro-backtrace for more info)
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-internal/default_lint.stderr b/src/tools/clippy/tests/ui-internal/default_lint.stderr
index 8961bd462..ab2470210 100644
--- a/src/tools/clippy/tests/ui-internal/default_lint.stderr
+++ b/src/tools/clippy/tests/ui-internal/default_lint.stderr
@@ -17,5 +17,5 @@ LL | #![deny(clippy::internal)]
= note: `#[deny(clippy::default_lint)]` implied by `#[deny(clippy::internal)]`
= note: this error originates in the macro `$crate::declare_tool_lint` which comes from the expansion of the macro `declare_tool_lint` (in Nightly builds, run with -Z macro-backtrace for more info)
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-internal/disallow_struct_span_lint.rs b/src/tools/clippy/tests/ui-internal/disallow_struct_span_lint.rs
new file mode 100644
index 000000000..3155c0235
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/disallow_struct_span_lint.rs
@@ -0,0 +1,27 @@
+#![feature(rustc_private)]
+
+extern crate rustc_errors;
+extern crate rustc_hir;
+extern crate rustc_lint;
+extern crate rustc_middle;
+
+use rustc_errors::{DiagnosticMessage, MultiSpan};
+use rustc_hir::hir_id::HirId;
+use rustc_lint::{Lint, LintContext};
+use rustc_middle::ty::TyCtxt;
+
+pub fn a(cx: impl LintContext, lint: &'static Lint, span: impl Into<MultiSpan>, msg: impl Into<DiagnosticMessage>) {
+ cx.struct_span_lint(lint, span, msg, |b| b);
+}
+
+pub fn b(
+ tcx: TyCtxt<'_>,
+ lint: &'static Lint,
+ hir_id: HirId,
+ span: impl Into<MultiSpan>,
+ msg: impl Into<DiagnosticMessage>,
+) {
+ tcx.struct_span_lint_hir(lint, hir_id, span, msg, |b| b);
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-internal/disallow_struct_span_lint.stderr b/src/tools/clippy/tests/ui-internal/disallow_struct_span_lint.stderr
new file mode 100644
index 000000000..76c487fb1
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/disallow_struct_span_lint.stderr
@@ -0,0 +1,17 @@
+error: use of a disallowed method `rustc_lint::context::LintContext::struct_span_lint`
+ --> $DIR/disallow_struct_span_lint.rs:14:5
+ |
+LL | cx.struct_span_lint(lint, span, msg, |b| b);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::disallowed-methods` implied by `-D warnings`
+ = help: to override `-D warnings` add `#[allow(clippy::disallowed_methods)]`
+
+error: use of a disallowed method `rustc_middle::ty::context::TyCtxt::struct_span_lint_hir`
+ --> $DIR/disallow_struct_span_lint.rs:24:5
+ |
+LL | tcx.struct_span_lint_hir(lint, hir_id, span, msg, |b| b);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui-internal/if_chain_style.rs b/src/tools/clippy/tests/ui-internal/if_chain_style.rs
deleted file mode 100644
index b462b20e0..000000000
--- a/src/tools/clippy/tests/ui-internal/if_chain_style.rs
+++ /dev/null
@@ -1,97 +0,0 @@
-#![warn(clippy::if_chain_style)]
-#![allow(
- clippy::needless_if,
- clippy::no_effect,
- clippy::nonminimal_bool,
- clippy::missing_clippy_version_attribute
-)]
-
-extern crate if_chain;
-
-use if_chain::if_chain;
-
-fn main() {
- if true {
- let x = "";
- // `if_chain!` inside `if`
- if_chain! {
- if true;
- if true;
- then {}
- }
- }
- if_chain! {
- if true
- // multi-line AND'ed conditions
- && false;
- if let Some(1) = Some(1);
- // `let` before `then`
- let x = "";
- then {
- ();
- }
- }
- if_chain! {
- // single `if` condition
- if true;
- then {
- let x = "";
- // nested if
- if true {}
- }
- }
- if_chain! {
- // starts with `let ..`
- let x = "";
- if let Some(1) = Some(1);
- then {
- let x = "";
- let x = "";
- // nested if_chain!
- if_chain! {
- if true;
- if true;
- then {}
- }
- }
- }
-}
-
-fn negative() {
- if true {
- ();
- if_chain! {
- if true;
- if true;
- then { (); }
- }
- }
- if_chain! {
- if true;
- let x = "";
- if true;
- then { (); }
- }
- if_chain! {
- if true;
- if true;
- then {
- if true { 1 } else { 2 }
- } else {
- 3
- }
- };
- if true {
- if_chain! {
- if true;
- if true;
- then {}
- }
- } else if false {
- if_chain! {
- if true;
- if false;
- then {}
- }
- }
-}
diff --git a/src/tools/clippy/tests/ui-internal/if_chain_style.stderr b/src/tools/clippy/tests/ui-internal/if_chain_style.stderr
deleted file mode 100644
index ea0495532..000000000
--- a/src/tools/clippy/tests/ui-internal/if_chain_style.stderr
+++ /dev/null
@@ -1,86 +0,0 @@
-error: this `if` can be part of the inner `if_chain!`
- --> $DIR/if_chain_style.rs:14:5
- |
-LL | / if true {
-LL | | let x = "";
-LL | | // `if_chain!` inside `if`
-LL | | if_chain! {
-... |
-LL | | }
-LL | | }
- | |_____^
- |
-help: this `let` statement can also be in the `if_chain!`
- --> $DIR/if_chain_style.rs:15:9
- |
-LL | let x = "";
- | ^^^^^^^^^^^
- = note: `-D clippy::if-chain-style` implied by `-D warnings`
- = help: to override `-D warnings` add `#[allow(clippy::if_chain_style)]`
-
-error: `if a && b;` should be `if a; if b;`
- --> $DIR/if_chain_style.rs:24:12
- |
-LL | if true
- | ____________^
-LL | | // multi-line AND'ed conditions
-LL | | && false;
- | |____________________^
-
-error: `let` expression should be inside `then { .. }`
- --> $DIR/if_chain_style.rs:29:9
- |
-LL | let x = "";
- | ^^^^^^^^^^^
-
-error: this `if` can be part of the outer `if_chain!`
- --> $DIR/if_chain_style.rs:40:13
- |
-LL | if true {}
- | ^^^^^^^^^^
- |
-help: this `let` statement can also be in the `if_chain!`
- --> $DIR/if_chain_style.rs:38:13
- |
-LL | let x = "";
- | ^^^^^^^^^^^
-
-error: `if_chain!` only has one `if`
- --> $DIR/if_chain_style.rs:34:5
- |
-LL | / if_chain! {
-LL | | // single `if` condition
-LL | | if true;
-LL | | then {
-... |
-LL | | }
-LL | | }
- | |_____^
- |
- = note: this error originates in the macro `__if_chain` which comes from the expansion of the macro `if_chain` (in Nightly builds, run with -Z macro-backtrace for more info)
-
-error: `let` expression should be above the `if_chain!`
- --> $DIR/if_chain_style.rs:45:9
- |
-LL | let x = "";
- | ^^^^^^^^^^^
-
-error: this `if_chain!` can be merged with the outer `if_chain!`
- --> $DIR/if_chain_style.rs:51:13
- |
-LL | / if_chain! {
-LL | | if true;
-LL | | if true;
-LL | | then {}
-LL | | }
- | |_____________^
- |
-help: these `let` statements can also be in the `if_chain!`
- --> $DIR/if_chain_style.rs:48:13
- |
-LL | / let x = "";
-LL | | let x = "";
- | |_______________________^
-
-error: aborting due to 7 previous errors
-
diff --git a/src/tools/clippy/tests/ui-internal/lint_without_lint_pass.stderr b/src/tools/clippy/tests/ui-internal/lint_without_lint_pass.stderr
index de04920b8..de55876b1 100644
--- a/src/tools/clippy/tests/ui-internal/lint_without_lint_pass.stderr
+++ b/src/tools/clippy/tests/ui-internal/lint_without_lint_pass.stderr
@@ -17,5 +17,5 @@ LL | #![deny(clippy::internal)]
= note: `#[deny(clippy::lint_without_lint_pass)]` implied by `#[deny(clippy::internal)]`
= note: this error originates in the macro `declare_tool_lint` (in Nightly builds, run with -Z macro-backtrace for more info)
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-internal/outer_expn_data.stderr b/src/tools/clippy/tests/ui-internal/outer_expn_data.stderr
index e41ace472..0d5b01325 100644
--- a/src/tools/clippy/tests/ui-internal/outer_expn_data.stderr
+++ b/src/tools/clippy/tests/ui-internal/outer_expn_data.stderr
@@ -11,5 +11,5 @@ LL | #![deny(clippy::internal)]
| ^^^^^^^^^^^^^^^^
= note: `#[deny(clippy::outer_expn_expn_data)]` implied by `#[deny(clippy::internal)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/bad_toml/conf_bad_toml.stderr b/src/tools/clippy/tests/ui-toml/bad_toml/conf_bad_toml.stderr
index f7d53763a..c308b7aa0 100644
--- a/src/tools/clippy/tests/ui-toml/bad_toml/conf_bad_toml.stderr
+++ b/src/tools/clippy/tests/ui-toml/bad_toml/conf_bad_toml.stderr
@@ -4,5 +4,5 @@ error: error reading Clippy's configuration file: expected `.`, `=`
LL | fn this_is_obviously(not: a, toml: file) {
| ^
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/bad_toml_type/conf_bad_type.stderr b/src/tools/clippy/tests/ui-toml/bad_toml_type/conf_bad_type.stderr
index fb0a14081..1bcde2f30 100644
--- a/src/tools/clippy/tests/ui-toml/bad_toml_type/conf_bad_type.stderr
+++ b/src/tools/clippy/tests/ui-toml/bad_toml_type/conf_bad_type.stderr
@@ -4,5 +4,5 @@ error: error reading Clippy's configuration file: invalid type: integer `42`, ex
LL | disallowed-names = 42
| ^^
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/conf_deprecated_key/conf_deprecated_key.stderr b/src/tools/clippy/tests/ui-toml/conf_deprecated_key/conf_deprecated_key.stderr
index a21952c0e..08fdb2d2d 100644
--- a/src/tools/clippy/tests/ui-toml/conf_deprecated_key/conf_deprecated_key.stderr
+++ b/src/tools/clippy/tests/ui-toml/conf_deprecated_key/conf_deprecated_key.stderr
@@ -20,5 +20,5 @@ LL | fn cognitive_complexity() {
= note: `-D clippy::cognitive-complexity` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::cognitive_complexity)]`
-error: aborting due to previous error; 2 warnings emitted
+error: aborting due to 1 previous error; 2 warnings emitted
diff --git a/src/tools/clippy/tests/ui-toml/decimal_literal_representation/decimal_literal_representation.stderr b/src/tools/clippy/tests/ui-toml/decimal_literal_representation/decimal_literal_representation.stderr
index 6f817a3fd..4510275c9 100644
--- a/src/tools/clippy/tests/ui-toml/decimal_literal_representation/decimal_literal_representation.stderr
+++ b/src/tools/clippy/tests/ui-toml/decimal_literal_representation/decimal_literal_representation.stderr
@@ -7,5 +7,5 @@ LL | let _ = 16777215;
= note: `-D clippy::decimal-literal-representation` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::decimal_literal_representation)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/disallowed_names_replace/disallowed_names.stderr b/src/tools/clippy/tests/ui-toml/disallowed_names_replace/disallowed_names.stderr
index d9f25a3ee..a5fece575 100644
--- a/src/tools/clippy/tests/ui-toml/disallowed_names_replace/disallowed_names.stderr
+++ b/src/tools/clippy/tests/ui-toml/disallowed_names_replace/disallowed_names.stderr
@@ -7,5 +7,5 @@ LL | let ducks = ["quack", "quack"];
= note: `-D clippy::disallowed-names` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::disallowed_names)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/disallowed_script_idents/disallowed_script_idents.stderr b/src/tools/clippy/tests/ui-toml/disallowed_script_idents/disallowed_script_idents.stderr
index 31bb5ee35..e83027e4e 100644
--- a/src/tools/clippy/tests/ui-toml/disallowed_script_idents/disallowed_script_idents.stderr
+++ b/src/tools/clippy/tests/ui-toml/disallowed_script_idents/disallowed_script_idents.stderr
@@ -7,5 +7,5 @@ LL | let カウンタ = 10;
= note: `-D clippy::disallowed-script-idents` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::disallowed_script_idents)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/doc_valid_idents_append/doc_markdown.stderr b/src/tools/clippy/tests/ui-toml/doc_valid_idents_append/doc_markdown.stderr
index 92b035058..877ca726f 100644
--- a/src/tools/clippy/tests/ui-toml/doc_valid_idents_append/doc_markdown.stderr
+++ b/src/tools/clippy/tests/ui-toml/doc_valid_idents_append/doc_markdown.stderr
@@ -11,5 +11,5 @@ help: try
LL | /// `TestItemThingyOfCoolness` might sound cool but is not on the list and should be linted.
| ~~~~~~~~~~~~~~~~~~~~~~~~~~
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/duplicated_keys/duplicated_keys.stderr b/src/tools/clippy/tests/ui-toml/duplicated_keys/duplicated_keys.stderr
index 7c56dfdb9..3f2086b5e 100644
--- a/src/tools/clippy/tests/ui-toml/duplicated_keys/duplicated_keys.stderr
+++ b/src/tools/clippy/tests/ui-toml/duplicated_keys/duplicated_keys.stderr
@@ -4,5 +4,5 @@ error: error reading Clippy's configuration file: duplicate key `cognitive-compl
LL | cognitive-complexity-threshold = 4
| ^
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/duplicated_keys_deprecated/duplicated_keys.stderr b/src/tools/clippy/tests/ui-toml/duplicated_keys_deprecated/duplicated_keys.stderr
index 0af8c0add..3c3839633 100644
--- a/src/tools/clippy/tests/ui-toml/duplicated_keys_deprecated/duplicated_keys.stderr
+++ b/src/tools/clippy/tests/ui-toml/duplicated_keys_deprecated/duplicated_keys.stderr
@@ -10,5 +10,5 @@ warning: error reading Clippy's configuration file: deprecated field `cyclomatic
LL | cyclomatic-complexity-threshold = 3
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-error: aborting due to previous error; 1 warning emitted
+error: aborting due to 1 previous error; 1 warning emitted
diff --git a/src/tools/clippy/tests/ui-toml/duplicated_keys_deprecated_2/duplicated_keys.stderr b/src/tools/clippy/tests/ui-toml/duplicated_keys_deprecated_2/duplicated_keys.stderr
index a4b1e9c33..3d37e4daa 100644
--- a/src/tools/clippy/tests/ui-toml/duplicated_keys_deprecated_2/duplicated_keys.stderr
+++ b/src/tools/clippy/tests/ui-toml/duplicated_keys_deprecated_2/duplicated_keys.stderr
@@ -10,5 +10,5 @@ warning: error reading Clippy's configuration file: deprecated field `cyclomatic
LL | cyclomatic-complexity-threshold = 3
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-error: aborting due to previous error; 1 warning emitted
+error: aborting due to 1 previous error; 1 warning emitted
diff --git a/src/tools/clippy/tests/ui-toml/enum_variant_size/enum_variant_size.stderr b/src/tools/clippy/tests/ui-toml/enum_variant_size/enum_variant_size.stderr
index 4d9bc9d48..ca96c47b9 100644
--- a/src/tools/clippy/tests/ui-toml/enum_variant_size/enum_variant_size.stderr
+++ b/src/tools/clippy/tests/ui-toml/enum_variant_size/enum_variant_size.stderr
@@ -17,5 +17,5 @@ help: consider boxing the large fields to reduce the total size of the enum
LL | B(Box<[u8; 501]>),
| ~~~~~~~~~~~~~~
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/excessive_nesting/excessive_nesting.rs b/src/tools/clippy/tests/ui-toml/excessive_nesting/excessive_nesting.rs
index d737a832d..4375f324a 100644
--- a/src/tools/clippy/tests/ui-toml/excessive_nesting/excessive_nesting.rs
+++ b/src/tools/clippy/tests/ui-toml/excessive_nesting/excessive_nesting.rs
@@ -9,7 +9,7 @@
#![allow(clippy::never_loop)]
#![allow(clippy::needless_if)]
#![warn(clippy::excessive_nesting)]
-#![allow(clippy::collapsible_if)]
+#![allow(clippy::collapsible_if, clippy::blocks_in_conditions)]
#[macro_use]
extern crate proc_macros;
diff --git a/src/tools/clippy/tests/ui-toml/fn_params_excessive_bools/test.stderr b/src/tools/clippy/tests/ui-toml/fn_params_excessive_bools/test.stderr
index 717a4bbfb..ceec4ea67 100644
--- a/src/tools/clippy/tests/ui-toml/fn_params_excessive_bools/test.stderr
+++ b/src/tools/clippy/tests/ui-toml/fn_params_excessive_bools/test.stderr
@@ -8,5 +8,5 @@ LL | fn g(_: bool, _: bool) {}
= note: `-D clippy::fn-params-excessive-bools` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::fn_params_excessive_bools)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/ifs_same_cond/ifs_same_cond.stderr b/src/tools/clippy/tests/ui-toml/ifs_same_cond/ifs_same_cond.stderr
index 305e00af2..e0e77bf23 100644
--- a/src/tools/clippy/tests/ui-toml/ifs_same_cond/ifs_same_cond.stderr
+++ b/src/tools/clippy/tests/ui-toml/ifs_same_cond/ifs_same_cond.stderr
@@ -12,5 +12,5 @@ LL | if x.get() {
= note: `-D clippy::ifs-same-cond` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::ifs_same_cond)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/impl_trait_in_params/impl_trait_in_params.stderr b/src/tools/clippy/tests/ui-toml/impl_trait_in_params/impl_trait_in_params.stderr
index 80c4f5ed4..bb1244ada 100644
--- a/src/tools/clippy/tests/ui-toml/impl_trait_in_params/impl_trait_in_params.stderr
+++ b/src/tools/clippy/tests/ui-toml/impl_trait_in_params/impl_trait_in_params.stderr
@@ -11,5 +11,5 @@ help: add a type parameter
LL | fn t<{ /* Generic name */ }: Trait>(_: impl Trait);
| +++++++++++++++++++++++++++++++
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/invalid_min_rust_version/invalid_min_rust_version.stderr b/src/tools/clippy/tests/ui-toml/invalid_min_rust_version/invalid_min_rust_version.stderr
index f127c2408..a76484066 100644
--- a/src/tools/clippy/tests/ui-toml/invalid_min_rust_version/invalid_min_rust_version.stderr
+++ b/src/tools/clippy/tests/ui-toml/invalid_min_rust_version/invalid_min_rust_version.stderr
@@ -4,5 +4,5 @@ error: error reading Clippy's configuration file: not a valid Rust version
LL | msrv = "invalid.version"
| ^^^^^^^^^^^^^^^^^
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/large_futures/large_futures.stderr b/src/tools/clippy/tests/ui-toml/large_futures/large_futures.stderr
index 7a02fcdbd..23c6215f9 100644
--- a/src/tools/clippy/tests/ui-toml/large_futures/large_futures.stderr
+++ b/src/tools/clippy/tests/ui-toml/large_futures/large_futures.stderr
@@ -7,5 +7,5 @@ LL | should_warn().await;
= note: `-D clippy::large-futures` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::large_futures)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/large_stack_frames/large_stack_frames.stderr b/src/tools/clippy/tests/ui-toml/large_stack_frames/large_stack_frames.stderr
index 67ee57ab6..5adf66627 100644
--- a/src/tools/clippy/tests/ui-toml/large_stack_frames/large_stack_frames.stderr
+++ b/src/tools/clippy/tests/ui-toml/large_stack_frames/large_stack_frames.stderr
@@ -11,5 +11,5 @@ LL | | }
= note: `-D clippy::large-stack-frames` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::large_stack_frames)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/large_types_passed_by_value/large_types_passed_by_value.stderr b/src/tools/clippy/tests/ui-toml/large_types_passed_by_value/large_types_passed_by_value.stderr
index 6678a2b47..20026d358 100644
--- a/src/tools/clippy/tests/ui-toml/large_types_passed_by_value/large_types_passed_by_value.stderr
+++ b/src/tools/clippy/tests/ui-toml/large_types_passed_by_value/large_types_passed_by_value.stderr
@@ -7,5 +7,5 @@ LL | fn f2(_v: [u8; 513]) {}
= note: `-D clippy::large-types-passed-by-value` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::large_types_passed_by_value)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/manual_let_else/manual_let_else.stderr b/src/tools/clippy/tests/ui-toml/manual_let_else/manual_let_else.stderr
index 5c2c86c37..67647cc5e 100644
--- a/src/tools/clippy/tests/ui-toml/manual_let_else/manual_let_else.stderr
+++ b/src/tools/clippy/tests/ui-toml/manual_let_else/manual_let_else.stderr
@@ -11,5 +11,5 @@ LL | | };
= note: `-D clippy::manual-let-else` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::manual_let_else)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.stderr b/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.stderr
index d319e65d0..20ffacd09 100644
--- a/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.stderr
+++ b/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.stderr
@@ -18,5 +18,5 @@ help: and replace the index expressions here
LL | println!("{}", slice_7);
| ~~~~~~~
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/min_rust_version/min_rust_version.stderr b/src/tools/clippy/tests/ui-toml/min_rust_version/min_rust_version.stderr
index 5b1f8dbd3..5bf2bcd3b 100644
--- a/src/tools/clippy/tests/ui-toml/min_rust_version/min_rust_version.stderr
+++ b/src/tools/clippy/tests/ui-toml/min_rust_version/min_rust_version.stderr
@@ -7,5 +7,5 @@ LL | let _: Option<u64> = Some(&16).map(|b| *b);
= note: `-D clippy::map-clone` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::map_clone)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/private-doc-errors/clippy.toml b/src/tools/clippy/tests/ui-toml/private-doc-errors/clippy.toml
new file mode 100644
index 000000000..8483b87c6
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/private-doc-errors/clippy.toml
@@ -0,0 +1 @@
+check-private-items = true
diff --git a/src/tools/clippy/tests/ui-toml/private-doc-errors/doc_lints.rs b/src/tools/clippy/tests/ui-toml/private-doc-errors/doc_lints.rs
new file mode 100644
index 000000000..ae4c3f84c
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/private-doc-errors/doc_lints.rs
@@ -0,0 +1,54 @@
+#![deny(
+ clippy::unnecessary_safety_doc,
+ clippy::missing_errors_doc,
+ clippy::missing_panics_doc
+)]
+
+/// This is a private function, skip to match behavior with `missing_safety_doc`.
+///
+/// # Safety
+///
+/// Boo!
+fn you_dont_see_me() {
+ //~^ ERROR: safe function's docs have unnecessary `# Safety` section
+ unimplemented!();
+}
+
+mod private_mod {
+ /// This is public but unexported function.
+ ///
+ /// # Safety
+ ///
+ /// Very safe!
+ pub fn only_crate_wide_accessible() -> Result<(), ()> {
+ //~^ ERROR: safe function's docs have unnecessary `# Safety` section
+ //~| ERROR: docs for function returning `Result` missing `# Errors` section
+ unimplemented!();
+ }
+}
+
+pub struct S;
+
+impl S {
+ /// Private, fine again to stay consistent with `missing_safety_doc`.
+ ///
+ /// # Safety
+ ///
+ /// Unnecessary!
+ fn private(&self) {
+ //~^ ERROR: safe function's docs have unnecessary `# Safety` section
+ //~| ERROR: docs for function which may panic missing `# Panics` section
+ panic!();
+ }
+}
+
+#[doc(hidden)]
+pub mod __macro {
+ pub struct T;
+ impl T {
+ pub unsafe fn f() {}
+ //~^ ERROR: unsafe function's docs miss `# Safety` section
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-toml/private-doc-errors/doc_lints.stderr b/src/tools/clippy/tests/ui-toml/private-doc-errors/doc_lints.stderr
new file mode 100644
index 000000000..853367480
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/private-doc-errors/doc_lints.stderr
@@ -0,0 +1,64 @@
+error: safe function's docs have unnecessary `# Safety` section
+ --> $DIR/doc_lints.rs:12:1
+ |
+LL | fn you_dont_see_me() {
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/doc_lints.rs:2:5
+ |
+LL | clippy::unnecessary_safety_doc,
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: safe function's docs have unnecessary `# Safety` section
+ --> $DIR/doc_lints.rs:23:5
+ |
+LL | pub fn only_crate_wide_accessible() -> Result<(), ()> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: docs for function returning `Result` missing `# Errors` section
+ --> $DIR/doc_lints.rs:23:5
+ |
+LL | pub fn only_crate_wide_accessible() -> Result<(), ()> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/doc_lints.rs:3:5
+ |
+LL | clippy::missing_errors_doc,
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: safe function's docs have unnecessary `# Safety` section
+ --> $DIR/doc_lints.rs:38:5
+ |
+LL | fn private(&self) {
+ | ^^^^^^^^^^^^^^^^^
+
+error: docs for function which may panic missing `# Panics` section
+ --> $DIR/doc_lints.rs:38:5
+ |
+LL | fn private(&self) {
+ | ^^^^^^^^^^^^^^^^^
+ |
+note: first possible panic found here
+ --> $DIR/doc_lints.rs:41:9
+ |
+LL | panic!();
+ | ^^^^^^^^
+note: the lint level is defined here
+ --> $DIR/doc_lints.rs:4:5
+ |
+LL | clippy::missing_panics_doc
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: unsafe function's docs miss `# Safety` section
+ --> $DIR/doc_lints.rs:49:9
+ |
+LL | pub unsafe fn f() {}
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::missing-safety-doc` implied by `-D warnings`
+ = help: to override `-D warnings` add `#[allow(clippy::missing_safety_doc)]`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui-toml/result_large_err/result_large_err.stderr b/src/tools/clippy/tests/ui-toml/result_large_err/result_large_err.stderr
index b0936319d..cc603fc0c 100644
--- a/src/tools/clippy/tests/ui-toml/result_large_err/result_large_err.stderr
+++ b/src/tools/clippy/tests/ui-toml/result_large_err/result_large_err.stderr
@@ -8,5 +8,5 @@ LL | fn f2() -> Result<(), [u8; 512]> {
= note: `-D clippy::result-large-err` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::result_large_err)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/semicolon_block/semicolon_inside_block.stderr b/src/tools/clippy/tests/ui-toml/semicolon_block/semicolon_inside_block.stderr
index ce03d7d75..0542e139b 100644
--- a/src/tools/clippy/tests/ui-toml/semicolon_block/semicolon_inside_block.stderr
+++ b/src/tools/clippy/tests/ui-toml/semicolon_block/semicolon_inside_block.stderr
@@ -15,5 +15,5 @@ LL ~ unit_fn_block();
LL ~ }
|
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/struct_excessive_bools/test.stderr b/src/tools/clippy/tests/ui-toml/struct_excessive_bools/test.stderr
index 9237c9c9d..31e0e33a3 100644
--- a/src/tools/clippy/tests/ui-toml/struct_excessive_bools/test.stderr
+++ b/src/tools/clippy/tests/ui-toml/struct_excessive_bools/test.stderr
@@ -10,5 +10,5 @@ LL | | }
= note: `-D clippy::struct-excessive-bools` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::struct_excessive_bools)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr b/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr
index 2f9eaa517..12828cf9d 100644
--- a/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr
+++ b/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr
@@ -21,6 +21,7 @@ error: error reading Clippy's configuration file: unknown field `foobar`, expect
await-holding-invalid-types
blacklisted-names
cargo-ignore-publish
+ check-private-items
cognitive-complexity-threshold
cyclomatic-complexity-threshold
disallowed-macros
@@ -95,6 +96,7 @@ error: error reading Clippy's configuration file: unknown field `barfoo`, expect
await-holding-invalid-types
blacklisted-names
cargo-ignore-publish
+ check-private-items
cognitive-complexity-threshold
cyclomatic-complexity-threshold
disallowed-macros
diff --git a/src/tools/clippy/tests/ui-toml/too_large_for_stack/boxed_local.stderr b/src/tools/clippy/tests/ui-toml/too_large_for_stack/boxed_local.stderr
index 2859a29f1..54990c352 100644
--- a/src/tools/clippy/tests/ui-toml/too_large_for_stack/boxed_local.stderr
+++ b/src/tools/clippy/tests/ui-toml/too_large_for_stack/boxed_local.stderr
@@ -7,5 +7,5 @@ LL | fn f(x: Box<[u8; 500]>) {}
= note: `-D clippy::boxed-local` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::boxed_local)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/too_large_for_stack/useless_vec.stderr b/src/tools/clippy/tests/ui-toml/too_large_for_stack/useless_vec.stderr
index 923cded5e..5d289db85 100644
--- a/src/tools/clippy/tests/ui-toml/too_large_for_stack/useless_vec.stderr
+++ b/src/tools/clippy/tests/ui-toml/too_large_for_stack/useless_vec.stderr
@@ -7,5 +7,5 @@ LL | let x = vec![0u8; 500];
= note: `-D clippy::useless-vec` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::useless_vec)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/too_many_arguments/too_many_arguments.stderr b/src/tools/clippy/tests/ui-toml/too_many_arguments/too_many_arguments.stderr
index 8b9d159b5..81d9bee73 100644
--- a/src/tools/clippy/tests/ui-toml/too_many_arguments/too_many_arguments.stderr
+++ b/src/tools/clippy/tests/ui-toml/too_many_arguments/too_many_arguments.stderr
@@ -7,5 +7,5 @@ LL | fn too_many(p1: u8, p2: u8, p3: u8, p4: u8, p5: u8, p6: u8, p7: u8, p8: u8,
= note: `-D clippy::too-many-arguments` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::too_many_arguments)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/type_complexity/type_complexity.stderr b/src/tools/clippy/tests/ui-toml/type_complexity/type_complexity.stderr
index 8ca637f72..df824400d 100644
--- a/src/tools/clippy/tests/ui-toml/type_complexity/type_complexity.stderr
+++ b/src/tools/clippy/tests/ui-toml/type_complexity/type_complexity.stderr
@@ -7,5 +7,5 @@ LL | fn f2(_: (u8, (u8, (u8, (u8, (u8, (u8, u8))))))) {}
= note: `-D clippy::type-complexity` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::type_complexity)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/type_repetition_in_bounds/main.stderr b/src/tools/clippy/tests/ui-toml/type_repetition_in_bounds/main.stderr
index 2ae298497..444fbd128 100644
--- a/src/tools/clippy/tests/ui-toml/type_repetition_in_bounds/main.stderr
+++ b/src/tools/clippy/tests/ui-toml/type_repetition_in_bounds/main.stderr
@@ -8,5 +8,5 @@ LL | T: Unpin + PartialEq,
= note: `-D clippy::type-repetition-in-bounds` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::type_repetition_in_bounds)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/unnecessary_box_returns/unnecessary_box_returns.stderr b/src/tools/clippy/tests/ui-toml/unnecessary_box_returns/unnecessary_box_returns.stderr
index df9aa37ac..9a747a19f 100644
--- a/src/tools/clippy/tests/ui-toml/unnecessary_box_returns/unnecessary_box_returns.stderr
+++ b/src/tools/clippy/tests/ui-toml/unnecessary_box_returns/unnecessary_box_returns.stderr
@@ -8,5 +8,5 @@ LL | fn f() -> Box<[u8; 64]> {
= note: `-D clippy::unnecessary-box-returns` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::unnecessary_box_returns)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/verbose_bit_mask/verbose_bit_mask.stderr b/src/tools/clippy/tests/ui-toml/verbose_bit_mask/verbose_bit_mask.stderr
index 7377921b4..5fcc63131 100644
--- a/src/tools/clippy/tests/ui-toml/verbose_bit_mask/verbose_bit_mask.stderr
+++ b/src/tools/clippy/tests/ui-toml/verbose_bit_mask/verbose_bit_mask.stderr
@@ -7,5 +7,5 @@ LL | let _ = v & 0b111111 == 0;
= note: `-D clippy::verbose-bit-mask` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::verbose_bit_mask)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui-toml/wildcard_imports/wildcard_imports.stderr b/src/tools/clippy/tests/ui-toml/wildcard_imports/wildcard_imports.stderr
index 13ec3a229..f11fda6a0 100644
--- a/src/tools/clippy/tests/ui-toml/wildcard_imports/wildcard_imports.stderr
+++ b/src/tools/clippy/tests/ui-toml/wildcard_imports/wildcard_imports.stderr
@@ -7,5 +7,5 @@ LL | use prelude::*;
= note: `-D clippy::wildcard-imports` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::wildcard_imports)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/arc_with_non_send_sync.rs b/src/tools/clippy/tests/ui/arc_with_non_send_sync.rs
index d03a577c4..349e81912 100644
--- a/src/tools/clippy/tests/ui/arc_with_non_send_sync.rs
+++ b/src/tools/clippy/tests/ui/arc_with_non_send_sync.rs
@@ -33,16 +33,16 @@ fn main() {
let _ = Arc::new(42);
let _ = Arc::new(RefCell::new(42));
- //~^ ERROR: usage of an `Arc` that is not `Send` or `Sync`
+ //~^ ERROR: usage of an `Arc` that is not `Send` and `Sync`
//~| NOTE: the trait `Sync` is not implemented for `RefCell<i32>`
let mutex = Mutex::new(1);
let _ = Arc::new(mutex.lock().unwrap());
- //~^ ERROR: usage of an `Arc` that is not `Send` or `Sync`
+ //~^ ERROR: usage of an `Arc` that is not `Send` and `Sync`
//~| NOTE: the trait `Send` is not implemented for `MutexGuard<'_, i32>`
let _ = Arc::new(&42 as *const i32);
- //~^ ERROR: usage of an `Arc` that is not `Send` or `Sync`
+ //~^ ERROR: usage of an `Arc` that is not `Send` and `Sync`
//~| NOTE: the trait `Send` is not implemented for `*const i32`
//~| NOTE: the trait `Sync` is not implemented for `*const i32`
}
diff --git a/src/tools/clippy/tests/ui/arc_with_non_send_sync.stderr b/src/tools/clippy/tests/ui/arc_with_non_send_sync.stderr
index fd239580d..a7f91abda 100644
--- a/src/tools/clippy/tests/ui/arc_with_non_send_sync.stderr
+++ b/src/tools/clippy/tests/ui/arc_with_non_send_sync.stderr
@@ -1,35 +1,41 @@
-error: usage of an `Arc` that is not `Send` or `Sync`
+error: usage of an `Arc` that is not `Send` and `Sync`
--> $DIR/arc_with_non_send_sync.rs:35:13
|
LL | let _ = Arc::new(RefCell::new(42));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = note: the trait `Sync` is not implemented for `RefCell<i32>`
- = note: required for `Arc<RefCell<i32>>` to implement `Send` and `Sync`
- = help: consider using an `Rc` instead or wrapping the inner type with a `Mutex`
+ = note: `Arc<RefCell<i32>>` is not `Send` and `Sync` as:
+ = note: - the trait `Sync` is not implemented for `RefCell<i32>`
+ = help: consider using an `Rc` instead. `Arc` does not provide benefits for non `Send` and `Sync` types
+ = note: if you intend to use `Arc` with `Send` and `Sync` traits
+ = note: wrap the inner type with a `Mutex` or implement `Send` and `Sync` for `RefCell<i32>`
= note: `-D clippy::arc-with-non-send-sync` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::arc_with_non_send_sync)]`
-error: usage of an `Arc` that is not `Send` or `Sync`
+error: usage of an `Arc` that is not `Send` and `Sync`
--> $DIR/arc_with_non_send_sync.rs:40:13
|
LL | let _ = Arc::new(mutex.lock().unwrap());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = note: the trait `Send` is not implemented for `MutexGuard<'_, i32>`
- = note: required for `Arc<MutexGuard<'_, i32>>` to implement `Send` and `Sync`
- = help: consider using an `Rc` instead or wrapping the inner type with a `Mutex`
+ = note: `Arc<MutexGuard<'_, i32>>` is not `Send` and `Sync` as:
+ = note: - the trait `Send` is not implemented for `MutexGuard<'_, i32>`
+ = help: consider using an `Rc` instead. `Arc` does not provide benefits for non `Send` and `Sync` types
+ = note: if you intend to use `Arc` with `Send` and `Sync` traits
+ = note: wrap the inner type with a `Mutex` or implement `Send` and `Sync` for `MutexGuard<'_, i32>`
-error: usage of an `Arc` that is not `Send` or `Sync`
+error: usage of an `Arc` that is not `Send` and `Sync`
--> $DIR/arc_with_non_send_sync.rs:44:13
|
LL | let _ = Arc::new(&42 as *const i32);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = note: the trait `Send` is not implemented for `*const i32`
- = note: the trait `Sync` is not implemented for `*const i32`
- = note: required for `Arc<*const i32>` to implement `Send` and `Sync`
- = help: consider using an `Rc` instead or wrapping the inner type with a `Mutex`
+ = note: `Arc<*const i32>` is not `Send` and `Sync` as:
+ = note: - the trait `Send` is not implemented for `*const i32`
+ = note: - the trait `Sync` is not implemented for `*const i32`
+ = help: consider using an `Rc` instead. `Arc` does not provide benefits for non `Send` and `Sync` types
+ = note: if you intend to use `Arc` with `Send` and `Sync` traits
+ = note: wrap the inner type with a `Mutex` or implement `Send` and `Sync` for `*const i32`
error: aborting due to 3 previous errors
diff --git a/src/tools/clippy/tests/ui/attrs.rs b/src/tools/clippy/tests/ui/attrs.rs
index 05ee48d17..da96eabed 100644
--- a/src/tools/clippy/tests/ui/attrs.rs
+++ b/src/tools/clippy/tests/ui/attrs.rs
@@ -36,6 +36,9 @@ pub const ANOTHER_CONST: u8 = 23;
#[deprecated(since = "0.1.1")]
pub const YET_ANOTHER_CONST: u8 = 0;
+#[deprecated(since = "TBD")]
+pub const GONNA_DEPRECATE_THIS_LATER: u8 = 0;
+
fn main() {
test_attr_lint();
if false {
diff --git a/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.fixed b/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.fixed
index 8cbadc67d..8c77039b3 100644
--- a/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.fixed
+++ b/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.fixed
@@ -1,5 +1,5 @@
#![deny(clippy::bind_instead_of_map)]
-#![allow(clippy::blocks_in_if_conditions)]
+#![allow(clippy::blocks_in_conditions)]
pub fn main() {
let _ = Some("42").map(|s| if s.len() < 42 { 0 } else { s.len() });
diff --git a/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.rs b/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.rs
index 91d9d11e3..44257f3a4 100644
--- a/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.rs
+++ b/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.rs
@@ -1,5 +1,5 @@
#![deny(clippy::bind_instead_of_map)]
-#![allow(clippy::blocks_in_if_conditions)]
+#![allow(clippy::blocks_in_conditions)]
pub fn main() {
let _ = Some("42").and_then(|s| if s.len() < 42 { Some(0) } else { Some(s.len()) });
diff --git a/src/tools/clippy/tests/ui/blocks_in_if_conditions.fixed b/src/tools/clippy/tests/ui/blocks_in_conditions.fixed
index f89c46504..2ab441bbd 100644
--- a/src/tools/clippy/tests/ui/blocks_in_if_conditions.fixed
+++ b/src/tools/clippy/tests/ui/blocks_in_conditions.fixed
@@ -1,4 +1,4 @@
-#![warn(clippy::blocks_in_if_conditions)]
+#![warn(clippy::blocks_in_conditions)]
#![allow(unused, clippy::let_and_return, clippy::needless_if)]
#![warn(clippy::nonminimal_bool)]
@@ -21,6 +21,7 @@ fn macro_if() {
fn condition_has_block() -> i32 {
let res = {
+ //~^ ERROR: in an `if` condition, avoid complex blocks or closures with blocks; instead, move the block or closure higher and bind it with a `let`
let x = 3;
x == 3
}; if res {
@@ -32,6 +33,7 @@ fn condition_has_block() -> i32 {
fn condition_has_block_with_single_expression() -> i32 {
if true { 6 } else { 10 }
+ //~^ ERROR: omit braces around single expression condition
}
fn condition_is_normal() -> i32 {
@@ -61,4 +63,26 @@ fn block_in_assert() {
);
}
+// issue #11814
+fn block_in_match_expr(num: i32) -> i32 {
+ let res = {
+ //~^ ERROR: in a `match` scrutinee, avoid complex blocks or closures with blocks; instead, move the block or closure higher and bind it with a `let`
+ let opt = Some(2);
+ opt
+ }; match res {
+ Some(0) => 1,
+ Some(n) => num * 2,
+ None => 0,
+ };
+
+ match unsafe {
+ let hearty_hearty_hearty = vec![240, 159, 146, 150];
+ String::from_utf8_unchecked(hearty_hearty_hearty).as_str()
+ } {
+ "💖" => 1,
+ "what" => 2,
+ _ => 3,
+ }
+}
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/blocks_in_if_conditions.rs b/src/tools/clippy/tests/ui/blocks_in_conditions.rs
index 34febc5fa..dd5ae4fb4 100644
--- a/src/tools/clippy/tests/ui/blocks_in_if_conditions.rs
+++ b/src/tools/clippy/tests/ui/blocks_in_conditions.rs
@@ -1,4 +1,4 @@
-#![warn(clippy::blocks_in_if_conditions)]
+#![warn(clippy::blocks_in_conditions)]
#![allow(unused, clippy::let_and_return, clippy::needless_if)]
#![warn(clippy::nonminimal_bool)]
@@ -21,6 +21,7 @@ fn macro_if() {
fn condition_has_block() -> i32 {
if {
+ //~^ ERROR: in an `if` condition, avoid complex blocks or closures with blocks; instead, move the block or closure higher and bind it with a `let`
let x = 3;
x == 3
} {
@@ -32,6 +33,7 @@ fn condition_has_block() -> i32 {
fn condition_has_block_with_single_expression() -> i32 {
if { true } { 6 } else { 10 }
+ //~^ ERROR: omit braces around single expression condition
}
fn condition_is_normal() -> i32 {
@@ -61,4 +63,26 @@ fn block_in_assert() {
);
}
+// issue #11814
+fn block_in_match_expr(num: i32) -> i32 {
+ match {
+ //~^ ERROR: in a `match` scrutinee, avoid complex blocks or closures with blocks; instead, move the block or closure higher and bind it with a `let`
+ let opt = Some(2);
+ opt
+ } {
+ Some(0) => 1,
+ Some(n) => num * 2,
+ None => 0,
+ };
+
+ match unsafe {
+ let hearty_hearty_hearty = vec![240, 159, 146, 150];
+ String::from_utf8_unchecked(hearty_hearty_hearty).as_str()
+ } {
+ "💖" => 1,
+ "what" => 2,
+ _ => 3,
+ }
+}
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/blocks_in_if_conditions.stderr b/src/tools/clippy/tests/ui/blocks_in_conditions.stderr
index d80ef9c0f..b00fe2f63 100644
--- a/src/tools/clippy/tests/ui/blocks_in_if_conditions.stderr
+++ b/src/tools/clippy/tests/ui/blocks_in_conditions.stderr
@@ -1,30 +1,32 @@
error: in an `if` condition, avoid complex blocks or closures with blocks; instead, move the block or closure higher and bind it with a `let`
- --> $DIR/blocks_in_if_conditions.rs:23:5
+ --> $DIR/blocks_in_conditions.rs:23:5
|
LL | / if {
+LL | |
LL | | let x = 3;
LL | | x == 3
LL | | } {
| |_____^
|
- = note: `-D clippy::blocks-in-if-conditions` implied by `-D warnings`
- = help: to override `-D warnings` add `#[allow(clippy::blocks_in_if_conditions)]`
+ = note: `-D clippy::blocks-in-conditions` implied by `-D warnings`
+ = help: to override `-D warnings` add `#[allow(clippy::blocks_in_conditions)]`
help: try
|
LL ~ let res = {
+LL +
LL + let x = 3;
LL + x == 3
LL ~ }; if res {
|
error: omit braces around single expression condition
- --> $DIR/blocks_in_if_conditions.rs:34:8
+ --> $DIR/blocks_in_conditions.rs:35:8
|
LL | if { true } { 6 } else { 10 }
| ^^^^^^^^ help: try: `true`
error: this boolean expression can be simplified
- --> $DIR/blocks_in_if_conditions.rs:39:8
+ --> $DIR/blocks_in_conditions.rs:41:8
|
LL | if true && x == 3 { 6 } else { 10 }
| ^^^^^^^^^^^^^^ help: try: `x == 3`
@@ -32,5 +34,24 @@ LL | if true && x == 3 { 6 } else { 10 }
= note: `-D clippy::nonminimal-bool` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::nonminimal_bool)]`
-error: aborting due to 3 previous errors
+error: in a `match` scrutinee, avoid complex blocks or closures with blocks; instead, move the block or closure higher and bind it with a `let`
+ --> $DIR/blocks_in_conditions.rs:68:5
+ |
+LL | / match {
+LL | |
+LL | | let opt = Some(2);
+LL | | opt
+LL | | } {
+ | |_____^
+ |
+help: try
+ |
+LL ~ let res = {
+LL +
+LL + let opt = Some(2);
+LL + opt
+LL ~ }; match res {
+ |
+
+error: aborting due to 4 previous errors
diff --git a/src/tools/clippy/tests/ui/blocks_in_if_conditions_closure.rs b/src/tools/clippy/tests/ui/blocks_in_conditions_closure.rs
index 539f2df15..db31e4ae1 100644
--- a/src/tools/clippy/tests/ui/blocks_in_if_conditions_closure.rs
+++ b/src/tools/clippy/tests/ui/blocks_in_conditions_closure.rs
@@ -1,4 +1,4 @@
-#![warn(clippy::blocks_in_if_conditions)]
+#![warn(clippy::blocks_in_conditions)]
#![allow(
unused,
clippy::let_and_return,
@@ -22,7 +22,7 @@ fn pred_test() {
&& predicate(
|x| {
//~^ ERROR: in an `if` condition, avoid complex blocks or closures with blocks
- //~| NOTE: `-D clippy::blocks-in-if-conditions` implied by `-D warnings`
+ //~| NOTE: `-D clippy::blocks-in-conditions` implied by `-D warnings`
let target = 3;
x == target
},
@@ -60,6 +60,23 @@ fn function_with_empty_closure() {
if closure(|| {}) {}
}
+// issue #11814
+fn match_with_pred() {
+ let v = 3;
+ match Some(predicate(
+ |x| {
+ //~^ ERROR: in a `match` scrutinee, avoid complex blocks or closures with blocks
+ let target = 3;
+ x == target
+ },
+ v,
+ )) {
+ Some(true) => 1,
+ Some(false) => 2,
+ None => 3,
+ };
+}
+
#[rustfmt::skip]
fn main() {
let mut range = 0..10;
diff --git a/src/tools/clippy/tests/ui/blocks_in_if_conditions_closure.stderr b/src/tools/clippy/tests/ui/blocks_in_conditions_closure.stderr
index ab68997d4..08b98f1b4 100644
--- a/src/tools/clippy/tests/ui/blocks_in_if_conditions_closure.stderr
+++ b/src/tools/clippy/tests/ui/blocks_in_conditions_closure.stderr
@@ -1,5 +1,5 @@
error: in an `if` condition, avoid complex blocks or closures with blocks; instead, move the block or closure higher and bind it with a `let`
- --> $DIR/blocks_in_if_conditions_closure.rs:23:17
+ --> $DIR/blocks_in_conditions_closure.rs:23:17
|
LL | |x| {
| _________________^
@@ -10,11 +10,11 @@ LL | | x == target
LL | | },
| |_____________^
|
- = note: `-D clippy::blocks-in-if-conditions` implied by `-D warnings`
- = help: to override `-D warnings` add `#[allow(clippy::blocks_in_if_conditions)]`
+ = note: `-D clippy::blocks-in-conditions` implied by `-D warnings`
+ = help: to override `-D warnings` add `#[allow(clippy::blocks_in_conditions)]`
error: in an `if` condition, avoid complex blocks or closures with blocks; instead, move the block or closure higher and bind it with a `let`
- --> $DIR/blocks_in_if_conditions_closure.rs:34:13
+ --> $DIR/blocks_in_conditions_closure.rs:34:13
|
LL | |x| {
| _____________^
@@ -24,5 +24,16 @@ LL | | x == target
LL | | },
| |_________^
-error: aborting due to 2 previous errors
+error: in a `match` scrutinee, avoid complex blocks or closures with blocks; instead, move the block or closure higher and bind it with a `let`
+ --> $DIR/blocks_in_conditions_closure.rs:67:13
+ |
+LL | |x| {
+ | _____________^
+LL | |
+LL | | let target = 3;
+LL | | x == target
+LL | | },
+ | |_________^
+
+error: aborting due to 3 previous errors
diff --git a/src/tools/clippy/tests/ui/borrow_deref_ref_unfixable.stderr b/src/tools/clippy/tests/ui/borrow_deref_ref_unfixable.stderr
index 2a21f5ca2..296af6436 100644
--- a/src/tools/clippy/tests/ui/borrow_deref_ref_unfixable.stderr
+++ b/src/tools/clippy/tests/ui/borrow_deref_ref_unfixable.stderr
@@ -15,5 +15,5 @@ help: if you would like to deref, try using `&**`
LL | let x: &str = &**s;
| ~~~~
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/box_default.fixed b/src/tools/clippy/tests/ui/box_default.fixed
index 69cabcb32..48408e191 100644
--- a/src/tools/clippy/tests/ui/box_default.fixed
+++ b/src/tools/clippy/tests/ui/box_default.fixed
@@ -90,3 +90,17 @@ fn issue_10381() {
assert!(maybe_get_bar(2).is_some());
}
+
+#[allow(unused)]
+fn issue_11868() {
+ fn foo(_: &mut Vec<usize>) {}
+
+ macro_rules! bar {
+ ($baz:expr) => {
+ Box::leak(Box::new($baz))
+ };
+ }
+
+ foo(bar!(vec![]));
+ foo(bar!(vec![1]));
+}
diff --git a/src/tools/clippy/tests/ui/box_default.rs b/src/tools/clippy/tests/ui/box_default.rs
index 48fa8bc33..58b912707 100644
--- a/src/tools/clippy/tests/ui/box_default.rs
+++ b/src/tools/clippy/tests/ui/box_default.rs
@@ -90,3 +90,17 @@ fn issue_10381() {
assert!(maybe_get_bar(2).is_some());
}
+
+#[allow(unused)]
+fn issue_11868() {
+ fn foo(_: &mut Vec<usize>) {}
+
+ macro_rules! bar {
+ ($baz:expr) => {
+ Box::leak(Box::new($baz))
+ };
+ }
+
+ foo(bar!(vec![]));
+ foo(bar!(vec![1]));
+}
diff --git a/src/tools/clippy/tests/ui/cfg_features.fixed b/src/tools/clippy/tests/ui/cfg_features.fixed
index 3d52f2382..0fe38f169 100644
--- a/src/tools/clippy/tests/ui/cfg_features.fixed
+++ b/src/tools/clippy/tests/ui/cfg_features.fixed
@@ -2,16 +2,28 @@
fn main() {
#[cfg(feature = "not-really-a-feature")]
- //~^ ERROR: feature may misspelled as features
+ //~^ ERROR: 'feature' may be misspelled as 'features'
//~| NOTE: `-D clippy::maybe-misused-cfg` implied by `-D warnings`
let _ = 1 + 2;
#[cfg(all(feature = "right", feature = "wrong"))]
- //~^ ERROR: feature may misspelled as features
+ //~^ ERROR: 'feature' may be misspelled as 'features'
let _ = 1 + 2;
#[cfg(all(feature = "wrong1", any(feature = "right", feature = "wrong2", feature, features)))]
- //~^ ERROR: feature may misspelled as features
- //~| ERROR: feature may misspelled as features
+ //~^ ERROR: 'feature' may be misspelled as 'features'
+ //~| ERROR: 'feature' may be misspelled as 'features'
let _ = 1 + 2;
+
+ #[cfg(test)]
+ //~^ ERROR: 'test' may be misspelled as 'tests'
+ let _ = 2;
+ #[cfg(test)]
+ //~^ ERROR: 'test' may be misspelled as 'Test'
+ let _ = 2;
+
+ #[cfg(all(test, test))]
+ //~^ ERROR: 'test' may be misspelled as 'tests'
+ //~| ERROR: 'test' may be misspelled as 'Test'
+ let _ = 2;
}
diff --git a/src/tools/clippy/tests/ui/cfg_features.rs b/src/tools/clippy/tests/ui/cfg_features.rs
index a0344a004..9c0db035e 100644
--- a/src/tools/clippy/tests/ui/cfg_features.rs
+++ b/src/tools/clippy/tests/ui/cfg_features.rs
@@ -2,16 +2,28 @@
fn main() {
#[cfg(features = "not-really-a-feature")]
- //~^ ERROR: feature may misspelled as features
+ //~^ ERROR: 'feature' may be misspelled as 'features'
//~| NOTE: `-D clippy::maybe-misused-cfg` implied by `-D warnings`
let _ = 1 + 2;
#[cfg(all(feature = "right", features = "wrong"))]
- //~^ ERROR: feature may misspelled as features
+ //~^ ERROR: 'feature' may be misspelled as 'features'
let _ = 1 + 2;
#[cfg(all(features = "wrong1", any(feature = "right", features = "wrong2", feature, features)))]
- //~^ ERROR: feature may misspelled as features
- //~| ERROR: feature may misspelled as features
+ //~^ ERROR: 'feature' may be misspelled as 'features'
+ //~| ERROR: 'feature' may be misspelled as 'features'
let _ = 1 + 2;
+
+ #[cfg(tests)]
+ //~^ ERROR: 'test' may be misspelled as 'tests'
+ let _ = 2;
+ #[cfg(Test)]
+ //~^ ERROR: 'test' may be misspelled as 'Test'
+ let _ = 2;
+
+ #[cfg(all(tests, Test))]
+ //~^ ERROR: 'test' may be misspelled as 'tests'
+ //~| ERROR: 'test' may be misspelled as 'Test'
+ let _ = 2;
}
diff --git a/src/tools/clippy/tests/ui/cfg_features.stderr b/src/tools/clippy/tests/ui/cfg_features.stderr
index 401c3e92e..e1593e207 100644
--- a/src/tools/clippy/tests/ui/cfg_features.stderr
+++ b/src/tools/clippy/tests/ui/cfg_features.stderr
@@ -1,29 +1,53 @@
-error: feature may misspelled as features
+error: 'feature' may be misspelled as 'features'
--> $DIR/cfg_features.rs:4:11
|
LL | #[cfg(features = "not-really-a-feature")]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `feature = "not-really-a-feature"`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: did you mean: `feature = "not-really-a-feature"`
|
= note: `-D clippy::maybe-misused-cfg` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::maybe_misused_cfg)]`
-error: feature may misspelled as features
+error: 'feature' may be misspelled as 'features'
--> $DIR/cfg_features.rs:9:34
|
LL | #[cfg(all(feature = "right", features = "wrong"))]
- | ^^^^^^^^^^^^^^^^^^ help: use: `feature = "wrong"`
+ | ^^^^^^^^^^^^^^^^^^ help: did you mean: `feature = "wrong"`
-error: feature may misspelled as features
+error: 'feature' may be misspelled as 'features'
--> $DIR/cfg_features.rs:13:15
|
LL | #[cfg(all(features = "wrong1", any(feature = "right", features = "wrong2", feature, features)))]
- | ^^^^^^^^^^^^^^^^^^^ help: use: `feature = "wrong1"`
+ | ^^^^^^^^^^^^^^^^^^^ help: did you mean: `feature = "wrong1"`
-error: feature may misspelled as features
+error: 'feature' may be misspelled as 'features'
--> $DIR/cfg_features.rs:13:59
|
LL | #[cfg(all(features = "wrong1", any(feature = "right", features = "wrong2", feature, features)))]
- | ^^^^^^^^^^^^^^^^^^^ help: use: `feature = "wrong2"`
+ | ^^^^^^^^^^^^^^^^^^^ help: did you mean: `feature = "wrong2"`
-error: aborting due to 4 previous errors
+error: 'test' may be misspelled as 'tests'
+ --> $DIR/cfg_features.rs:18:11
+ |
+LL | #[cfg(tests)]
+ | ^^^^^ help: did you mean: `test`
+
+error: 'test' may be misspelled as 'Test'
+ --> $DIR/cfg_features.rs:21:11
+ |
+LL | #[cfg(Test)]
+ | ^^^^ help: did you mean: `test`
+
+error: 'test' may be misspelled as 'tests'
+ --> $DIR/cfg_features.rs:25:15
+ |
+LL | #[cfg(all(tests, Test))]
+ | ^^^^^ help: did you mean: `test`
+
+error: 'test' may be misspelled as 'Test'
+ --> $DIR/cfg_features.rs:25:22
+ |
+LL | #[cfg(all(tests, Test))]
+ | ^^^^ help: did you mean: `test`
+
+error: aborting due to 8 previous errors
diff --git a/src/tools/clippy/tests/ui/char_lit_as_u8.stderr b/src/tools/clippy/tests/ui/char_lit_as_u8.stderr
index ce1f2f829..22774d2f9 100644
--- a/src/tools/clippy/tests/ui/char_lit_as_u8.stderr
+++ b/src/tools/clippy/tests/ui/char_lit_as_u8.stderr
@@ -8,5 +8,5 @@ LL | let _ = '❤' as u8;
= note: `-D clippy::char-lit-as-u8` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::char_lit_as_u8)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/cognitive_complexity_attr_used.stderr b/src/tools/clippy/tests/ui/cognitive_complexity_attr_used.stderr
index 9cd25f6fd..b9af72371 100644
--- a/src/tools/clippy/tests/ui/cognitive_complexity_attr_used.stderr
+++ b/src/tools/clippy/tests/ui/cognitive_complexity_attr_used.stderr
@@ -8,5 +8,5 @@ LL | fn kaboom() {
= note: `-D clippy::cognitive-complexity` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::cognitive_complexity)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/copy_iterator.stderr b/src/tools/clippy/tests/ui/copy_iterator.stderr
index 48c3385b6..30535db50 100644
--- a/src/tools/clippy/tests/ui/copy_iterator.stderr
+++ b/src/tools/clippy/tests/ui/copy_iterator.stderr
@@ -14,5 +14,5 @@ LL | | }
= note: `-D clippy::copy-iterator` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::copy_iterator)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crashes/ice-10148.stderr b/src/tools/clippy/tests/ui/crashes/ice-10148.stderr
index 4d436e3aa..ece3e1c39 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-10148.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-10148.stderr
@@ -9,5 +9,5 @@ LL | println!(with_span!(""something ""));
= note: `-D clippy::println-empty-string` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::println_empty_string)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crashes/ice-11422.stderr b/src/tools/clippy/tests/ui/crashes/ice-11422.stderr
index fb80b5b14..b3dcc00f3 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-11422.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-11422.stderr
@@ -12,5 +12,5 @@ LL - fn gen() -> impl PartialOrd + PartialEq + Debug {}
LL + fn gen() -> impl PartialOrd + Debug {}
|
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crashes/ice-11803.rs b/src/tools/clippy/tests/ui/crashes/ice-11803.rs
new file mode 100644
index 000000000..1bb8bf0c7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-11803.rs
@@ -0,0 +1,9 @@
+//@no-rustfix
+
+#![warn(clippy::impl_trait_in_params)]
+
+pub fn g<T: IntoIterator<Item = impl Iterator<Item = impl Clone>>>() {
+ extern "C" fn implementation_detail() {}
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-11803.stderr b/src/tools/clippy/tests/ui/crashes/ice-11803.stderr
new file mode 100644
index 000000000..b8289048a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-11803.stderr
@@ -0,0 +1,26 @@
+error: `impl Trait` used as a function parameter
+ --> $DIR/ice-11803.rs:5:54
+ |
+LL | pub fn g<T: IntoIterator<Item = impl Iterator<Item = impl Clone>>>() {
+ | ^^^^^^^^^^
+ |
+ = note: `-D clippy::impl-trait-in-params` implied by `-D warnings`
+ = help: to override `-D warnings` add `#[allow(clippy::impl_trait_in_params)]`
+help: add a type parameter
+ |
+LL | pub fn g<T: IntoIterator<Item = impl Iterator<Item = impl Clone>>, { /* Generic name */ }: Clone>() {
+ | +++++++++++++++++++++++++++++++
+
+error: `impl Trait` used as a function parameter
+ --> $DIR/ice-11803.rs:5:33
+ |
+LL | pub fn g<T: IntoIterator<Item = impl Iterator<Item = impl Clone>>>() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: add a type parameter
+ |
+LL | pub fn g<T: IntoIterator<Item = impl Iterator<Item = impl Clone>>, { /* Generic name */ }: Iterator<Item = impl Clone>>() {
+ | +++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/crashes/ice-2774.stderr b/src/tools/clippy/tests/ui/crashes/ice-2774.stderr
index ae9610c9a..188a59850 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-2774.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-2774.stderr
@@ -12,5 +12,5 @@ LL - pub fn add_barfoos_to_foos<'a>(bars: &HashSet<&'a Bar>) {
LL + pub fn add_barfoos_to_foos(bars: &HashSet<&Bar>) {
|
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crashes/ice-3717.stderr b/src/tools/clippy/tests/ui/crashes/ice-3717.stderr
index 4d3d617b6..863608fca 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-3717.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-3717.stderr
@@ -18,5 +18,5 @@ help: ...and use generic constructor
LL | let _: HashSet<usize> = HashSet::default();
| ~~~~~~~~~~~~~~~~~~
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crashes/ice-3891.stderr b/src/tools/clippy/tests/ui/crashes/ice-3891.stderr
index 59469ec58..5358734fe 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-3891.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-3891.stderr
@@ -6,5 +6,5 @@ LL | 1x;
|
= help: the suffix must be one of the numeric types (`u32`, `isize`, `f32`, etc.)
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crashes/ice-5497.stderr b/src/tools/clippy/tests/ui/crashes/ice-5497.stderr
index e75e7dc91..ee69f3379 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-5497.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-5497.stderr
@@ -6,5 +6,5 @@ LL | const OOB: i32 = [1][1] + T::OOB;
|
= note: `#[deny(unconditional_panic)]` on by default
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crashes/ice-5835.stderr b/src/tools/clippy/tests/ui/crashes/ice-5835.stderr
index 74d99a348..1f930e1f6 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-5835.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-5835.stderr
@@ -7,5 +7,5 @@ LL | /// 位
= note: `-D clippy::tabs-in-doc-comments` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::tabs_in_doc_comments)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crashes/ice-5872.stderr b/src/tools/clippy/tests/ui/crashes/ice-5872.stderr
index 75a26ee31..d0067a223 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-5872.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-5872.stderr
@@ -7,5 +7,5 @@ LL | let _ = vec![1, 2, 3].into_iter().collect::<Vec<_>>().is_empty();
= note: `-D clippy::needless-collect` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::needless_collect)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6254.stderr b/src/tools/clippy/tests/ui/crashes/ice-6254.stderr
index 6ace7dae8..7a34e6cce 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-6254.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-6254.stderr
@@ -11,5 +11,5 @@ LL | FOO_REF_REF => {},
= note: `-D indirect-structural-match` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(indirect_structural_match)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6255.stderr b/src/tools/clippy/tests/ui/crashes/ice-6255.stderr
index db0cb25e3..bc13319be 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-6255.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-6255.stderr
@@ -9,5 +9,5 @@ LL | define_other_core!();
|
= note: this error originates in the macro `define_other_core` (in Nightly builds, run with -Z macro-backtrace for more info)
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6256.stderr b/src/tools/clippy/tests/ui/crashes/ice-6256.stderr
index 671933157..cba6df194 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-6256.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-6256.stderr
@@ -9,6 +9,6 @@ LL | let f = |x: &dyn TT| x.func();
| | let's call the lifetime of this reference `'1`
| `x` is a reference that is only valid in the closure body
-error: aborting due to previous error
+error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0521`.
diff --git a/src/tools/clippy/tests/ui/crashes/ice-7169.stderr b/src/tools/clippy/tests/ui/crashes/ice-7169.stderr
index 47947f89b..3126de93d 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-7169.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-7169.stderr
@@ -7,5 +7,5 @@ LL | if let Ok(_) = Ok::<_, ()>(A::<String>::default()) {}
= note: `-D clippy::redundant-pattern-matching` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::redundant_pattern_matching)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crashes/ice-7868.stderr b/src/tools/clippy/tests/ui/crashes/ice-7868.stderr
index e5f14f221..3315a8d90 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-7868.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-7868.stderr
@@ -8,5 +8,5 @@ LL | unsafe { 0 };
= note: `-D clippy::undocumented-unsafe-blocks` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::undocumented_unsafe_blocks)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crashes/ice-7869.stderr b/src/tools/clippy/tests/ui/crashes/ice-7869.stderr
index 7acace78a..22f2c7e46 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-7869.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-7869.stderr
@@ -13,5 +13,5 @@ LL | | }
= note: `-D clippy::enum-variant-names` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::enum_variant_names)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crashes/ice-8250.stderr b/src/tools/clippy/tests/ui/crashes/ice-8250.stderr
index 9c57f3345..397e978af 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-8250.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-8250.stderr
@@ -7,5 +7,5 @@ LL | let _ = s[1..].splitn(2, '.').next()?;
= note: `-D clippy::needless-splitn` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::needless_splitn)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crashes/ice-8821.stderr b/src/tools/clippy/tests/ui/crashes/ice-8821.stderr
index c8bd01fb1..94ebb2091 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-8821.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-8821.stderr
@@ -7,5 +7,5 @@ LL | let _: () = FN();
= note: `-D clippy::let-unit-value` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::let_unit_value)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crashes/ice-9041.stderr b/src/tools/clippy/tests/ui/crashes/ice-9041.stderr
index 49c9bdc30..00b65f00d 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-9041.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-9041.stderr
@@ -7,5 +7,5 @@ LL | things.iter().find(|p| is_thing_ready(p)).is_some()
= note: `-D clippy::search-is-some` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::search_is_some)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crashes/ice-9445.stderr b/src/tools/clippy/tests/ui/crashes/ice-9445.stderr
index 9307409ba..f97b4536e 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-9445.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-9445.stderr
@@ -9,5 +9,5 @@ LL | const UNINIT: core::mem::MaybeUninit<core::cell::Cell<&'static ()>> = core:
= note: `-D clippy::declare-interior-mutable-const` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::declare_interior_mutable_const)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crashes/ice-96721.stderr b/src/tools/clippy/tests/ui/crashes/ice-96721.stderr
index 712bd14c6..1741c7c6a 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-96721.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-96721.stderr
@@ -4,5 +4,5 @@ error: malformed `path` attribute input
LL | #[path = foo!()]
| ^^^^^^^^^^^^^^^^ help: must be of the form: `#[path = "file"]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crashes/needless_lifetimes_impl_trait.stderr b/src/tools/clippy/tests/ui/crashes/needless_lifetimes_impl_trait.stderr
index 37484f5eb..2ebb9d5cd 100644
--- a/src/tools/clippy/tests/ui/crashes/needless_lifetimes_impl_trait.stderr
+++ b/src/tools/clippy/tests/ui/crashes/needless_lifetimes_impl_trait.stderr
@@ -15,5 +15,5 @@ LL - fn baz<'a>(&'a self) -> impl Foo + 'a {
LL + fn baz(&self) -> impl Foo + '_ {
|
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crashes/needless_pass_by_value-w-late-bound.stderr b/src/tools/clippy/tests/ui/crashes/needless_pass_by_value-w-late-bound.stderr
index 6d4539399..b318f8d3f 100644
--- a/src/tools/clippy/tests/ui/crashes/needless_pass_by_value-w-late-bound.stderr
+++ b/src/tools/clippy/tests/ui/crashes/needless_pass_by_value-w-late-bound.stderr
@@ -12,5 +12,5 @@ LL | struct Foo<'a>(&'a [(); 100]);
= note: `-D clippy::needless-pass-by-value` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::needless_pass_by_value)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crate_in_macro_def.stderr b/src/tools/clippy/tests/ui/crate_in_macro_def.stderr
index 3e6246182..1a21d4e92 100644
--- a/src/tools/clippy/tests/ui/crate_in_macro_def.stderr
+++ b/src/tools/clippy/tests/ui/crate_in_macro_def.stderr
@@ -7,5 +7,5 @@ LL | println!("{}", crate::unhygienic::MESSAGE);
= note: `-D clippy::crate-in-macro-def` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::crate_in_macro_def)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.stderr b/src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.stderr
index 01033246d..7ef8d08d5 100644
--- a/src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.stderr
+++ b/src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.stderr
@@ -11,5 +11,5 @@ LL | | b = a;
= note: `-D clippy::almost-swapped` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::almost_swapped)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/crate_level_checks/std_main_recursion.stderr b/src/tools/clippy/tests/ui/crate_level_checks/std_main_recursion.stderr
index f3ffd6a10..3bc406206 100644
--- a/src/tools/clippy/tests/ui/crate_level_checks/std_main_recursion.stderr
+++ b/src/tools/clippy/tests/ui/crate_level_checks/std_main_recursion.stderr
@@ -8,5 +8,5 @@ LL | main();
= note: `-D clippy::main-recursion` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::main_recursion)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/dbg_macro/auxiliary/submodule.rs b/src/tools/clippy/tests/ui/dbg_macro/auxiliary/submodule.rs
new file mode 100644
index 000000000..b1df24737
--- /dev/null
+++ b/src/tools/clippy/tests/ui/dbg_macro/auxiliary/submodule.rs
@@ -0,0 +1,3 @@
+fn f() {
+ dbg!();
+}
diff --git a/src/tools/clippy/tests/ui/dbg_macro.rs b/src/tools/clippy/tests/ui/dbg_macro/dbg_macro.rs
index 149b08476..3f4770c63 100644
--- a/src/tools/clippy/tests/ui/dbg_macro.rs
+++ b/src/tools/clippy/tests/ui/dbg_macro/dbg_macro.rs
@@ -2,10 +2,12 @@
#![warn(clippy::dbg_macro)]
+#[path = "auxiliary/submodule.rs"]
+mod submodule;
+
fn foo(n: u32) -> u32 {
if let Some(n) = dbg!(n.checked_sub(4)) { n } else { n }
//~^ ERROR: the `dbg!` macro is intended as a debugging tool
- //~| NOTE: `-D clippy::dbg-macro` implied by `-D warnings`
}
fn bar(_: ()) {}
diff --git a/src/tools/clippy/tests/ui/dbg_macro.stderr b/src/tools/clippy/tests/ui/dbg_macro/dbg_macro.stderr
index f45a7ba1f..4d00421c7 100644
--- a/src/tools/clippy/tests/ui/dbg_macro.stderr
+++ b/src/tools/clippy/tests/ui/dbg_macro/dbg_macro.stderr
@@ -1,18 +1,30 @@
error: the `dbg!` macro is intended as a debugging tool
- --> $DIR/dbg_macro.rs:6:22
+ --> $DIR/auxiliary/submodule.rs:2:5
|
-LL | if let Some(n) = dbg!(n.checked_sub(4)) { n } else { n }
- | ^^^^^^^^^^^^^^^^^^^^^^
+LL | dbg!();
+ | ^^^^^^^
|
= note: `-D clippy::dbg-macro` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::dbg_macro)]`
help: remove the invocation before committing it to a version control system
|
+LL - dbg!();
+LL +
+ |
+
+error: the `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:9:22
+ |
+LL | if let Some(n) = dbg!(n.checked_sub(4)) { n } else { n }
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: remove the invocation before committing it to a version control system
+ |
LL | if let Some(n) = n.checked_sub(4) { n } else { n }
| ~~~~~~~~~~~~~~~~
error: the `dbg!` macro is intended as a debugging tool
- --> $DIR/dbg_macro.rs:13:8
+ --> $DIR/dbg_macro.rs:15:8
|
LL | if dbg!(n <= 1) {
| ^^^^^^^^^^^^
@@ -23,7 +35,7 @@ LL | if n <= 1 {
| ~~~~~~
error: the `dbg!` macro is intended as a debugging tool
- --> $DIR/dbg_macro.rs:15:9
+ --> $DIR/dbg_macro.rs:17:9
|
LL | dbg!(1)
| ^^^^^^^
@@ -34,7 +46,7 @@ LL | 1
|
error: the `dbg!` macro is intended as a debugging tool
- --> $DIR/dbg_macro.rs:18:9
+ --> $DIR/dbg_macro.rs:20:9
|
LL | dbg!(n * factorial(n - 1))
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -45,7 +57,7 @@ LL | n * factorial(n - 1)
|
error: the `dbg!` macro is intended as a debugging tool
- --> $DIR/dbg_macro.rs:24:5
+ --> $DIR/dbg_macro.rs:26:5
|
LL | dbg!(42);
| ^^^^^^^^
@@ -56,7 +68,7 @@ LL | 42;
| ~~
error: the `dbg!` macro is intended as a debugging tool
- --> $DIR/dbg_macro.rs:26:5
+ --> $DIR/dbg_macro.rs:28:5
|
LL | dbg!(dbg!(dbg!(42)));
| ^^^^^^^^^^^^^^^^^^^^
@@ -67,7 +79,7 @@ LL | dbg!(dbg!(42));
| ~~~~~~~~~~~~~~
error: the `dbg!` macro is intended as a debugging tool
- --> $DIR/dbg_macro.rs:28:14
+ --> $DIR/dbg_macro.rs:30:14
|
LL | foo(3) + dbg!(factorial(4));
| ^^^^^^^^^^^^^^^^^^
@@ -78,7 +90,7 @@ LL | foo(3) + factorial(4);
| ~~~~~~~~~~~~
error: the `dbg!` macro is intended as a debugging tool
- --> $DIR/dbg_macro.rs:30:5
+ --> $DIR/dbg_macro.rs:32:5
|
LL | dbg!(1, 2, dbg!(3, 4));
| ^^^^^^^^^^^^^^^^^^^^^^
@@ -89,7 +101,7 @@ LL | (1, 2, dbg!(3, 4));
| ~~~~~~~~~~~~~~~~~~
error: the `dbg!` macro is intended as a debugging tool
- --> $DIR/dbg_macro.rs:32:5
+ --> $DIR/dbg_macro.rs:34:5
|
LL | dbg!(1, 2, 3, 4, 5);
| ^^^^^^^^^^^^^^^^^^^
@@ -100,7 +112,7 @@ LL | (1, 2, 3, 4, 5);
| ~~~~~~~~~~~~~~~
error: the `dbg!` macro is intended as a debugging tool
- --> $DIR/dbg_macro.rs:53:5
+ --> $DIR/dbg_macro.rs:55:5
|
LL | dbg!();
| ^^^^^^^
@@ -112,7 +124,7 @@ LL +
|
error: the `dbg!` macro is intended as a debugging tool
- --> $DIR/dbg_macro.rs:56:13
+ --> $DIR/dbg_macro.rs:58:13
|
LL | let _ = dbg!();
| ^^^^^^
@@ -123,7 +135,7 @@ LL | let _ = ();
| ~~
error: the `dbg!` macro is intended as a debugging tool
- --> $DIR/dbg_macro.rs:58:9
+ --> $DIR/dbg_macro.rs:60:9
|
LL | bar(dbg!());
| ^^^^^^
@@ -134,7 +146,7 @@ LL | bar(());
| ~~
error: the `dbg!` macro is intended as a debugging tool
- --> $DIR/dbg_macro.rs:60:10
+ --> $DIR/dbg_macro.rs:62:10
|
LL | foo!(dbg!());
| ^^^^^^
@@ -145,7 +157,7 @@ LL | foo!(());
| ~~
error: the `dbg!` macro is intended as a debugging tool
- --> $DIR/dbg_macro.rs:62:16
+ --> $DIR/dbg_macro.rs:64:16
|
LL | foo2!(foo!(dbg!()));
| ^^^^^^
@@ -156,7 +168,7 @@ LL | foo2!(foo!(()));
| ~~
error: the `dbg!` macro is intended as a debugging tool
- --> $DIR/dbg_macro.rs:84:9
+ --> $DIR/dbg_macro.rs:86:9
|
LL | dbg!(2);
| ^^^^^^^
@@ -167,7 +179,7 @@ LL | 2;
| ~
error: the `dbg!` macro is intended as a debugging tool
- --> $DIR/dbg_macro.rs:91:5
+ --> $DIR/dbg_macro.rs:93:5
|
LL | dbg!(1);
| ^^^^^^^
@@ -178,7 +190,7 @@ LL | 1;
| ~
error: the `dbg!` macro is intended as a debugging tool
- --> $DIR/dbg_macro.rs:97:5
+ --> $DIR/dbg_macro.rs:99:5
|
LL | dbg!(1);
| ^^^^^^^
@@ -189,7 +201,7 @@ LL | 1;
| ~
error: the `dbg!` macro is intended as a debugging tool
- --> $DIR/dbg_macro.rs:104:9
+ --> $DIR/dbg_macro.rs:106:9
|
LL | dbg!(1);
| ^^^^^^^
@@ -199,5 +211,5 @@ help: remove the invocation before committing it to a version control system
LL | 1;
| ~
-error: aborting due to 18 previous errors
+error: aborting due to 19 previous errors
diff --git a/src/tools/clippy/tests/ui/def_id_nocore.stderr b/src/tools/clippy/tests/ui/def_id_nocore.stderr
index bfd0de4e1..6a00331ec 100644
--- a/src/tools/clippy/tests/ui/def_id_nocore.stderr
+++ b/src/tools/clippy/tests/ui/def_id_nocore.stderr
@@ -8,5 +8,5 @@ LL | pub fn as_ref(self) -> &'static str {
= note: `-D clippy::wrong-self-convention` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::wrong_self_convention)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/doc/doc-fixable.fixed b/src/tools/clippy/tests/ui/doc/doc-fixable.fixed
index aee897197..708ac6666 100644
--- a/src/tools/clippy/tests/ui/doc/doc-fixable.fixed
+++ b/src/tools/clippy/tests/ui/doc/doc-fixable.fixed
@@ -227,3 +227,6 @@ where [(); N.checked_next_power_of_two().unwrap()]: {
/// this checks if the lowerCamelCase issue is fixed
fn issue_11568() {}
+
+/// There is no try (`do()` or `do_not()`).
+fn parenthesized_word() {}
diff --git a/src/tools/clippy/tests/ui/doc/doc-fixable.rs b/src/tools/clippy/tests/ui/doc/doc-fixable.rs
index b6346b881..040d6352c 100644
--- a/src/tools/clippy/tests/ui/doc/doc-fixable.rs
+++ b/src/tools/clippy/tests/ui/doc/doc-fixable.rs
@@ -227,3 +227,6 @@ where [(); N.checked_next_power_of_two().unwrap()]: {
/// this checks if the lowerCamelCase issue is fixed
fn issue_11568() {}
+
+/// There is no try (do() or do_not()).
+fn parenthesized_word() {}
diff --git a/src/tools/clippy/tests/ui/doc/doc-fixable.stderr b/src/tools/clippy/tests/ui/doc/doc-fixable.stderr
index 4c9ff41d9..033604e03 100644
--- a/src/tools/clippy/tests/ui/doc/doc-fixable.stderr
+++ b/src/tools/clippy/tests/ui/doc/doc-fixable.stderr
@@ -319,5 +319,27 @@ help: try
LL | /// Foo \[bar\] \[baz\] \[qux\]. `DocMarkdownLint`
| ~~~~~~~~~~~~~~~~~
-error: aborting due to 29 previous errors
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:231:22
+ |
+LL | /// There is no try (do() or do_not()).
+ | ^^^^
+ |
+help: try
+ |
+LL | /// There is no try (`do()` or do_not()).
+ | ~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:231:30
+ |
+LL | /// There is no try (do() or do_not()).
+ | ^^^^^^^^
+ |
+help: try
+ |
+LL | /// There is no try (do() or `do_not()`).
+ | ~~~~~~~~~~
+
+error: aborting due to 31 previous errors
diff --git a/src/tools/clippy/tests/ui/doc_link_with_quotes.rs b/src/tools/clippy/tests/ui/doc_link_with_quotes.rs
index 37d0d1359..48e1b1819 100644
--- a/src/tools/clippy/tests/ui/doc_link_with_quotes.rs
+++ b/src/tools/clippy/tests/ui/doc_link_with_quotes.rs
@@ -11,6 +11,12 @@ pub fn foo() {
bar()
}
+/// Calls ["bar"] uselessly
+//~^ ERROR: possible intra-doc link using quotes instead of backticks
+pub fn foo2() {
+ bar()
+}
+
/// # Examples
/// This demonstrates issue \#8961
/// ```
diff --git a/src/tools/clippy/tests/ui/doc_link_with_quotes.stderr b/src/tools/clippy/tests/ui/doc_link_with_quotes.stderr
index 2db1bc092..cd4f87c56 100644
--- a/src/tools/clippy/tests/ui/doc_link_with_quotes.stderr
+++ b/src/tools/clippy/tests/ui/doc_link_with_quotes.stderr
@@ -7,5 +7,11 @@ LL | /// Calls ['bar'] uselessly
= note: `-D clippy::doc-link-with-quotes` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::doc_link_with_quotes)]`
-error: aborting due to previous error
+error: possible intra-doc link using quotes instead of backticks
+ --> $DIR/doc_link_with_quotes.rs:14:12
+ |
+LL | /// Calls ["bar"] uselessly
+ | ^^^^^
+
+error: aborting due to 2 previous errors
diff --git a/src/tools/clippy/tests/ui/double_neg.stderr b/src/tools/clippy/tests/ui/double_neg.stderr
index a6241c786..a4fa1688d 100644
--- a/src/tools/clippy/tests/ui/double_neg.stderr
+++ b/src/tools/clippy/tests/ui/double_neg.stderr
@@ -7,5 +7,5 @@ LL | --x;
= note: `-D clippy::double-neg` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::double_neg)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/duplicate_underscore_argument.stderr b/src/tools/clippy/tests/ui/duplicate_underscore_argument.stderr
index f47f6c896..53ee0c4e8 100644
--- a/src/tools/clippy/tests/ui/duplicate_underscore_argument.stderr
+++ b/src/tools/clippy/tests/ui/duplicate_underscore_argument.stderr
@@ -7,5 +7,5 @@ LL | fn join_the_dark_side(darth: i32, _darth: i32) {}
= note: `-D clippy::duplicate-underscore-argument` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::duplicate_underscore_argument)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/empty_enum.stderr b/src/tools/clippy/tests/ui/empty_enum.stderr
index 92d81c726..c9bd88764 100644
--- a/src/tools/clippy/tests/ui/empty_enum.stderr
+++ b/src/tools/clippy/tests/ui/empty_enum.stderr
@@ -8,5 +8,5 @@ LL | enum Empty {}
= note: `-D clippy::empty-enum` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::empty_enum)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/entry_btree.stderr b/src/tools/clippy/tests/ui/entry_btree.stderr
index cc0e951d9..63e9a0af8 100644
--- a/src/tools/clippy/tests/ui/entry_btree.stderr
+++ b/src/tools/clippy/tests/ui/entry_btree.stderr
@@ -17,5 +17,5 @@ LL + foo();
LL + }
|
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/exit1.stderr b/src/tools/clippy/tests/ui/exit1.stderr
index 94d8f1e32..bbe0762c8 100644
--- a/src/tools/clippy/tests/ui/exit1.stderr
+++ b/src/tools/clippy/tests/ui/exit1.stderr
@@ -7,5 +7,5 @@ LL | std::process::exit(4);
= note: `-D clippy::exit` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::exit)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/exit2.stderr b/src/tools/clippy/tests/ui/exit2.stderr
index cd324f182..19abbc606 100644
--- a/src/tools/clippy/tests/ui/exit2.stderr
+++ b/src/tools/clippy/tests/ui/exit2.stderr
@@ -7,5 +7,5 @@ LL | std::process::exit(3);
= note: `-D clippy::exit` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::exit)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/explicit_auto_deref.fixed b/src/tools/clippy/tests/ui/explicit_auto_deref.fixed
index 12158d0d1..e6ca4bb66 100644
--- a/src/tools/clippy/tests/ui/explicit_auto_deref.fixed
+++ b/src/tools/clippy/tests/ui/explicit_auto_deref.fixed
@@ -301,24 +301,47 @@ fn main() {
};
// Issue #11474
- pub struct Variant {
- pub anonymous: Variant0,
+ #[derive(Clone, Copy)]
+ struct Wrap<T>(T);
+ impl<T> core::ops::Deref for Wrap<T> {
+ type Target = T;
+ fn deref(&self) -> &T {
+ &self.0
+ }
}
-
- pub union Variant0 {
- pub anonymous: std::mem::ManuallyDrop<Variant00>,
+ impl<T> core::ops::DerefMut for Wrap<T> {
+ fn deref_mut(&mut self) -> &mut T {
+ &mut self.0
+ }
}
- pub struct Variant00 {
- pub anonymous: Variant000,
+ union U<T: Copy> {
+ u: T,
}
- pub union Variant000 {
- pub val: i32,
+ #[derive(Clone, Copy)]
+ struct S8 {
+ x: &'static str,
}
unsafe {
- let mut p = core::mem::zeroed::<Variant>();
- (*p.anonymous.anonymous).anonymous.val = 1;
+ let mut x = U {
+ u: core::mem::ManuallyDrop::new(S8 { x: "" }),
+ };
+ let _ = &mut (*x.u).x;
+ let _ = &mut { x.u }.x;
+ let _ = &mut ({ *x.u }).x;
+
+ let mut x = U {
+ u: Wrap(core::mem::ManuallyDrop::new(S8 { x: "" })),
+ };
+ let _ = &mut (*x.u).x;
+ let _ = &mut { x.u }.x;
+ let _ = &mut ({ **x.u }).x;
+
+ let mut x = U { u: Wrap(S8 { x: "" }) };
+ let _ = &mut x.u.x;
+ let _ = &mut { x.u }.x;
+ let _ = &mut ({ *x.u }).x;
}
}
diff --git a/src/tools/clippy/tests/ui/explicit_auto_deref.rs b/src/tools/clippy/tests/ui/explicit_auto_deref.rs
index dec021c18..7531e1f87 100644
--- a/src/tools/clippy/tests/ui/explicit_auto_deref.rs
+++ b/src/tools/clippy/tests/ui/explicit_auto_deref.rs
@@ -301,24 +301,47 @@ fn main() {
};
// Issue #11474
- pub struct Variant {
- pub anonymous: Variant0,
+ #[derive(Clone, Copy)]
+ struct Wrap<T>(T);
+ impl<T> core::ops::Deref for Wrap<T> {
+ type Target = T;
+ fn deref(&self) -> &T {
+ &self.0
+ }
}
-
- pub union Variant0 {
- pub anonymous: std::mem::ManuallyDrop<Variant00>,
+ impl<T> core::ops::DerefMut for Wrap<T> {
+ fn deref_mut(&mut self) -> &mut T {
+ &mut self.0
+ }
}
- pub struct Variant00 {
- pub anonymous: Variant000,
+ union U<T: Copy> {
+ u: T,
}
- pub union Variant000 {
- pub val: i32,
+ #[derive(Clone, Copy)]
+ struct S8 {
+ x: &'static str,
}
unsafe {
- let mut p = core::mem::zeroed::<Variant>();
- (*p.anonymous.anonymous).anonymous.val = 1;
+ let mut x = U {
+ u: core::mem::ManuallyDrop::new(S8 { x: "" }),
+ };
+ let _ = &mut (*x.u).x;
+ let _ = &mut (*{ x.u }).x;
+ let _ = &mut ({ *x.u }).x;
+
+ let mut x = U {
+ u: Wrap(core::mem::ManuallyDrop::new(S8 { x: "" })),
+ };
+ let _ = &mut (**x.u).x;
+ let _ = &mut (**{ x.u }).x;
+ let _ = &mut ({ **x.u }).x;
+
+ let mut x = U { u: Wrap(S8 { x: "" }) };
+ let _ = &mut (*x.u).x;
+ let _ = &mut (*{ x.u }).x;
+ let _ = &mut ({ *x.u }).x;
}
}
diff --git a/src/tools/clippy/tests/ui/explicit_auto_deref.stderr b/src/tools/clippy/tests/ui/explicit_auto_deref.stderr
index 3d2a7b0d9..cc9eeeb50 100644
--- a/src/tools/clippy/tests/ui/explicit_auto_deref.stderr
+++ b/src/tools/clippy/tests/ui/explicit_auto_deref.stderr
@@ -241,5 +241,35 @@ error: deref which would be done by auto-deref
LL | Some(x) => &mut *x,
| ^^^^^^^ help: try: `x`
-error: aborting due to 40 previous errors
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:332:22
+ |
+LL | let _ = &mut (*{ x.u }).x;
+ | ^^^^^^^^^^ help: try: `{ x.u }`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:338:22
+ |
+LL | let _ = &mut (**x.u).x;
+ | ^^^^^^^ help: try: `(*x.u)`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:339:22
+ |
+LL | let _ = &mut (**{ x.u }).x;
+ | ^^^^^^^^^^^ help: try: `{ x.u }`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:343:22
+ |
+LL | let _ = &mut (*x.u).x;
+ | ^^^^^^ help: try: `x.u`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:344:22
+ |
+LL | let _ = &mut (*{ x.u }).x;
+ | ^^^^^^^^^^ help: try: `{ x.u }`
+
+error: aborting due to 45 previous errors
diff --git a/src/tools/clippy/tests/ui/filter_map_next.stderr b/src/tools/clippy/tests/ui/filter_map_next.stderr
index 184155391..07760d883 100644
--- a/src/tools/clippy/tests/ui/filter_map_next.stderr
+++ b/src/tools/clippy/tests/ui/filter_map_next.stderr
@@ -14,5 +14,5 @@ LL | | .next();
= note: `-D clippy::filter-map-next` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::filter_map_next)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/floating_point_mul_add.fixed b/src/tools/clippy/tests/ui/floating_point_mul_add.fixed
index a4d6d49e5..3ce2edf2c 100644
--- a/src/tools/clippy/tests/ui/floating_point_mul_add.fixed
+++ b/src/tools/clippy/tests/ui/floating_point_mul_add.fixed
@@ -39,3 +39,21 @@ fn main() {
// Cases where the lint shouldn't be applied
let _ = (a * a + b * b).sqrt();
}
+
+fn _issue11831() {
+ struct NotAFloat;
+
+ impl std::ops::Add<f64> for NotAFloat {
+ type Output = Self;
+
+ fn add(self, _: f64) -> Self {
+ NotAFloat
+ }
+ }
+
+ let a = NotAFloat;
+ let b = 1.0_f64;
+ let c = 1.0;
+
+ let _ = a + b * c;
+}
diff --git a/src/tools/clippy/tests/ui/floating_point_mul_add.rs b/src/tools/clippy/tests/ui/floating_point_mul_add.rs
index 262a20f0f..b5e4a8db4 100644
--- a/src/tools/clippy/tests/ui/floating_point_mul_add.rs
+++ b/src/tools/clippy/tests/ui/floating_point_mul_add.rs
@@ -39,3 +39,21 @@ fn main() {
// Cases where the lint shouldn't be applied
let _ = (a * a + b * b).sqrt();
}
+
+fn _issue11831() {
+ struct NotAFloat;
+
+ impl std::ops::Add<f64> for NotAFloat {
+ type Output = Self;
+
+ fn add(self, _: f64) -> Self {
+ NotAFloat
+ }
+ }
+
+ let a = NotAFloat;
+ let b = 1.0_f64;
+ let c = 1.0;
+
+ let _ = a + b * c;
+}
diff --git a/src/tools/clippy/tests/ui/four_forward_slashes_first_line.stderr b/src/tools/clippy/tests/ui/four_forward_slashes_first_line.stderr
index afb7c6b4d..f49b7a097 100644
--- a/src/tools/clippy/tests/ui/four_forward_slashes_first_line.stderr
+++ b/src/tools/clippy/tests/ui/four_forward_slashes_first_line.stderr
@@ -12,5 +12,5 @@ help: make this a doc comment by removing one `/`
LL + /// borked doc comment on the first line. doesn't combust!
|
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/functions_maxlines.stderr b/src/tools/clippy/tests/ui/functions_maxlines.stderr
index 1d6ddad79..497acc0a6 100644
--- a/src/tools/clippy/tests/ui/functions_maxlines.stderr
+++ b/src/tools/clippy/tests/ui/functions_maxlines.stderr
@@ -13,5 +13,5 @@ LL | | }
= note: `-D clippy::too-many-lines` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::too_many_lines)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/impl_hash_with_borrow_str_and_bytes.rs b/src/tools/clippy/tests/ui/impl_hash_with_borrow_str_and_bytes.rs
new file mode 100644
index 000000000..f6ce6153e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/impl_hash_with_borrow_str_and_bytes.rs
@@ -0,0 +1,136 @@
+#![warn(clippy::impl_hash_borrow_with_str_and_bytes)]
+
+use std::borrow::Borrow;
+use std::hash::{Hash, Hasher};
+
+struct ExampleType {
+ data: String,
+}
+
+impl Hash for ExampleType {
+ //~^ ERROR: can't
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.data.hash(state);
+ }
+}
+
+impl Borrow<str> for ExampleType {
+ fn borrow(&self) -> &str {
+ &self.data
+ }
+}
+
+impl Borrow<[u8]> for ExampleType {
+ fn borrow(&self) -> &[u8] {
+ self.data.as_bytes()
+ }
+}
+
+struct ShouldNotRaiseForHash {}
+impl Hash for ShouldNotRaiseForHash {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ todo!();
+ }
+}
+
+struct ShouldNotRaiseForBorrow {}
+impl Borrow<str> for ShouldNotRaiseForBorrow {
+ fn borrow(&self) -> &str {
+ todo!();
+ }
+}
+impl Borrow<[u8]> for ShouldNotRaiseForBorrow {
+ fn borrow(&self) -> &[u8] {
+ todo!();
+ }
+}
+
+struct ShouldNotRaiseForHashBorrowStr {}
+impl Hash for ShouldNotRaiseForHashBorrowStr {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ todo!();
+ }
+}
+impl Borrow<str> for ShouldNotRaiseForHashBorrowStr {
+ fn borrow(&self) -> &str {
+ todo!();
+ }
+}
+
+struct ShouldNotRaiseForHashBorrowSlice {}
+impl Hash for ShouldNotRaiseForHashBorrowSlice {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ todo!();
+ }
+}
+
+impl Borrow<[u8]> for ShouldNotRaiseForHashBorrowSlice {
+ fn borrow(&self) -> &[u8] {
+ todo!();
+ }
+}
+
+#[derive(Hash)]
+//~^ ERROR: can't
+struct Derived {
+ data: String,
+}
+
+impl Borrow<str> for Derived {
+ fn borrow(&self) -> &str {
+ self.data.as_str()
+ }
+}
+
+impl Borrow<[u8]> for Derived {
+ fn borrow(&self) -> &[u8] {
+ self.data.as_bytes()
+ }
+}
+
+struct GenericExampleType<T> {
+ data: T,
+}
+
+impl<T: Hash> Hash for GenericExampleType<T> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.data.hash(state);
+ }
+}
+
+impl Borrow<str> for GenericExampleType<String> {
+ fn borrow(&self) -> &str {
+ &self.data
+ }
+}
+
+impl Borrow<[u8]> for GenericExampleType<&'static [u8]> {
+ fn borrow(&self) -> &[u8] {
+ self.data
+ }
+}
+
+struct GenericExampleType2<T> {
+ data: T,
+}
+
+impl Hash for GenericExampleType2<String> {
+ //~^ ERROR: can't
+ // this is correctly throwing an error for generic with concrete impl
+ // for all 3 types
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.data.hash(state);
+ }
+}
+
+impl Borrow<str> for GenericExampleType2<String> {
+ fn borrow(&self) -> &str {
+ &self.data
+ }
+}
+
+impl Borrow<[u8]> for GenericExampleType2<String> {
+ fn borrow(&self) -> &[u8] {
+ self.data.as_bytes()
+ }
+}
diff --git a/src/tools/clippy/tests/ui/impl_hash_with_borrow_str_and_bytes.stderr b/src/tools/clippy/tests/ui/impl_hash_with_borrow_str_and_bytes.stderr
new file mode 100644
index 000000000..afc35ef98
--- /dev/null
+++ b/src/tools/clippy/tests/ui/impl_hash_with_borrow_str_and_bytes.stderr
@@ -0,0 +1,41 @@
+error: the semantics of `Borrow<T>` around `Hash` can't be satisfied when both `Borrow<str>` and `Borrow<[u8]>` are implemented
+ --> $DIR/impl_hash_with_borrow_str_and_bytes.rs:10:6
+ |
+LL | impl Hash for ExampleType {
+ | ^^^^
+ |
+ = note: the `Borrow` semantics require that `Hash` must behave the same for all implementations of Borrow<T>
+ = note: however, the hash implementations of a string (`str`) and the bytes of a string `[u8]` do not behave the same ...
+ = note: ... as (`hash("abc") != hash("abc".as_bytes())`
+ = help: consider either removing one of the `Borrow` implementations (`Borrow<str>` or `Borrow<[u8]>`) ...
+ = help: ... or not implementing `Hash` for this type
+ = note: `-D clippy::impl-hash-borrow-with-str-and-bytes` implied by `-D warnings`
+ = help: to override `-D warnings` add `#[allow(clippy::impl_hash_borrow_with_str_and_bytes)]`
+
+error: the semantics of `Borrow<T>` around `Hash` can't be satisfied when both `Borrow<str>` and `Borrow<[u8]>` are implemented
+ --> $DIR/impl_hash_with_borrow_str_and_bytes.rs:73:10
+ |
+LL | #[derive(Hash)]
+ | ^^^^
+ |
+ = note: the `Borrow` semantics require that `Hash` must behave the same for all implementations of Borrow<T>
+ = note: however, the hash implementations of a string (`str`) and the bytes of a string `[u8]` do not behave the same ...
+ = note: ... as (`hash("abc") != hash("abc".as_bytes())`
+ = help: consider either removing one of the `Borrow` implementations (`Borrow<str>` or `Borrow<[u8]>`) ...
+ = help: ... or not implementing `Hash` for this type
+ = note: this error originates in the derive macro `Hash` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: the semantics of `Borrow<T>` around `Hash` can't be satisfied when both `Borrow<str>` and `Borrow<[u8]>` are implemented
+ --> $DIR/impl_hash_with_borrow_str_and_bytes.rs:117:6
+ |
+LL | impl Hash for GenericExampleType2<String> {
+ | ^^^^
+ |
+ = note: the `Borrow` semantics require that `Hash` must behave the same for all implementations of Borrow<T>
+ = note: however, the hash implementations of a string (`str`) and the bytes of a string `[u8]` do not behave the same ...
+ = note: ... as (`hash("abc") != hash("abc".as_bytes())`
+ = help: consider either removing one of the `Borrow` implementations (`Borrow<str>` or `Borrow<[u8]>`) ...
+ = help: ... or not implementing `Hash` for this type
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.stderr b/src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.stderr
index 11b19428b..429861e99 100644
--- a/src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.stderr
+++ b/src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.stderr
@@ -18,5 +18,5 @@ help: and replace the index expressions here
LL | println!("{}", slice_0);
| ~~~~~~~
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/ineffective_open_options.fixed b/src/tools/clippy/tests/ui/ineffective_open_options.fixed
new file mode 100644
index 000000000..3af8f3c5a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ineffective_open_options.fixed
@@ -0,0 +1,41 @@
+#![warn(clippy::ineffective_open_options)]
+
+use std::fs::OpenOptions;
+
+fn main() {
+ let file = OpenOptions::new()
+ .create(true)
+ //~ ERROR: unnecessary use of `.write(true)`
+ .append(true)
+ .open("dump.json")
+ .unwrap();
+
+ let file = OpenOptions::new()
+ .create(true)
+ .append(true)
+ //~ ERROR: unnecessary use of `.write(true)`
+ .open("dump.json")
+ .unwrap();
+
+ // All the next calls are ok.
+ let file = OpenOptions::new()
+ .create(true)
+ .write(false)
+ .append(true)
+ .open("dump.json")
+ .unwrap();
+ let file = OpenOptions::new()
+ .create(true)
+ .write(true)
+ .append(false)
+ .open("dump.json")
+ .unwrap();
+ let file = OpenOptions::new()
+ .create(true)
+ .write(false)
+ .append(false)
+ .open("dump.json")
+ .unwrap();
+ let file = OpenOptions::new().create(true).append(true).open("dump.json").unwrap();
+ let file = OpenOptions::new().create(true).write(true).open("dump.json").unwrap();
+}
diff --git a/src/tools/clippy/tests/ui/ineffective_open_options.rs b/src/tools/clippy/tests/ui/ineffective_open_options.rs
new file mode 100644
index 000000000..4eaf6293c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ineffective_open_options.rs
@@ -0,0 +1,41 @@
+#![warn(clippy::ineffective_open_options)]
+
+use std::fs::OpenOptions;
+
+fn main() {
+ let file = OpenOptions::new()
+ .create(true)
+ .write(true) //~ ERROR: unnecessary use of `.write(true)`
+ .append(true)
+ .open("dump.json")
+ .unwrap();
+
+ let file = OpenOptions::new()
+ .create(true)
+ .append(true)
+ .write(true) //~ ERROR: unnecessary use of `.write(true)`
+ .open("dump.json")
+ .unwrap();
+
+ // All the next calls are ok.
+ let file = OpenOptions::new()
+ .create(true)
+ .write(false)
+ .append(true)
+ .open("dump.json")
+ .unwrap();
+ let file = OpenOptions::new()
+ .create(true)
+ .write(true)
+ .append(false)
+ .open("dump.json")
+ .unwrap();
+ let file = OpenOptions::new()
+ .create(true)
+ .write(false)
+ .append(false)
+ .open("dump.json")
+ .unwrap();
+ let file = OpenOptions::new().create(true).append(true).open("dump.json").unwrap();
+ let file = OpenOptions::new().create(true).write(true).open("dump.json").unwrap();
+}
diff --git a/src/tools/clippy/tests/ui/ineffective_open_options.stderr b/src/tools/clippy/tests/ui/ineffective_open_options.stderr
new file mode 100644
index 000000000..7dc532223
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ineffective_open_options.stderr
@@ -0,0 +1,17 @@
+error: unnecessary use of `.write(true)` because there is `.append(true)`
+ --> $DIR/ineffective_open_options.rs:8:9
+ |
+LL | .write(true)
+ | ^^^^^^^^^^^^ help: remove `.write(true)`
+ |
+ = note: `-D clippy::ineffective-open-options` implied by `-D warnings`
+ = help: to override `-D warnings` add `#[allow(clippy::ineffective_open_options)]`
+
+error: unnecessary use of `.write(true)` because there is `.append(true)`
+ --> $DIR/ineffective_open_options.rs:16:9
+ |
+LL | .write(true)
+ | ^^^^^^^^^^^^ help: remove `.write(true)`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/infallible_destructuring_match.fixed b/src/tools/clippy/tests/ui/infallible_destructuring_match.fixed
index 60304177b..ecf1b14e5 100644
--- a/src/tools/clippy/tests/ui/infallible_destructuring_match.fixed
+++ b/src/tools/clippy/tests/ui/infallible_destructuring_match.fixed
@@ -1,6 +1,6 @@
#![feature(exhaustive_patterns, never_type)]
#![allow(dead_code, unreachable_code, unused_variables)]
-#![allow(clippy::let_and_return)]
+#![allow(clippy::let_and_return, clippy::uninhabited_references)]
enum SingleVariantEnum {
Variant(i32),
diff --git a/src/tools/clippy/tests/ui/infallible_destructuring_match.rs b/src/tools/clippy/tests/ui/infallible_destructuring_match.rs
index b77aac4a1..7cc7cb9d6 100644
--- a/src/tools/clippy/tests/ui/infallible_destructuring_match.rs
+++ b/src/tools/clippy/tests/ui/infallible_destructuring_match.rs
@@ -1,6 +1,6 @@
#![feature(exhaustive_patterns, never_type)]
#![allow(dead_code, unreachable_code, unused_variables)]
-#![allow(clippy::let_and_return)]
+#![allow(clippy::let_and_return, clippy::uninhabited_references)]
enum SingleVariantEnum {
Variant(i32),
diff --git a/src/tools/clippy/tests/ui/infinite_loops.rs b/src/tools/clippy/tests/ui/infinite_loops.rs
new file mode 100644
index 000000000..646f1eca5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/infinite_loops.rs
@@ -0,0 +1,366 @@
+//@no-rustfix
+#![allow(clippy::never_loop)]
+#![warn(clippy::infinite_loop)]
+
+fn do_something() {}
+
+fn no_break() {
+ loop {
+ //~^ ERROR: infinite loop detected
+ do_something();
+ }
+}
+
+fn all_inf() {
+ loop {
+ //~^ ERROR: infinite loop detected
+ loop {
+ //~^ ERROR: infinite loop detected
+ loop {
+ //~^ ERROR: infinite loop detected
+ do_something();
+ }
+ }
+ do_something();
+ }
+}
+
+fn no_break_return_some_ty() -> Option<u8> {
+ loop {
+ do_something();
+ return None;
+ }
+ loop {
+ //~^ ERROR: infinite loop detected
+ do_something();
+ }
+}
+
+fn no_break_never_ret() -> ! {
+ loop {
+ do_something();
+ }
+}
+
+fn no_break_never_ret_noise() {
+ loop {
+ fn inner_fn() -> ! {
+ std::process::exit(0);
+ }
+ do_something();
+ }
+}
+
+fn has_direct_break_1() {
+ loop {
+ do_something();
+ break;
+ }
+}
+
+fn has_direct_break_2() {
+ 'outer: loop {
+ do_something();
+ break 'outer;
+ }
+}
+
+fn has_indirect_break_1(cond: bool) {
+ 'outer: loop {
+ loop {
+ if cond {
+ break 'outer;
+ }
+ }
+ }
+}
+
+fn has_indirect_break_2(stop_num: i32) {
+ 'outer: loop {
+ for x in 0..5 {
+ if x == stop_num {
+ break 'outer;
+ }
+ }
+ }
+}
+
+fn break_inner_but_not_outer_1(cond: bool) {
+ loop {
+ //~^ ERROR: infinite loop detected
+ loop {
+ if cond {
+ break;
+ }
+ }
+ }
+}
+
+fn break_inner_but_not_outer_2(cond: bool) {
+ loop {
+ //~^ ERROR: infinite loop detected
+ 'inner: loop {
+ loop {
+ if cond {
+ break 'inner;
+ }
+ }
+ }
+ }
+}
+
+fn break_outer_but_not_inner() {
+ loop {
+ loop {
+ //~^ ERROR: infinite loop detected
+ do_something();
+ }
+ break;
+ }
+}
+
+fn can_break_both_inner_and_outer(cond: bool) {
+ 'outer: loop {
+ loop {
+ if cond {
+ break 'outer;
+ } else {
+ break;
+ }
+ }
+ }
+}
+
+fn break_wrong_loop(cond: bool) {
+ // 'inner has statement to break 'outer loop, but it was breaked early by a labeled child loop
+ 'outer: loop {
+ loop {
+ //~^ ERROR: infinite loop detected
+ 'inner: loop {
+ loop {
+ loop {
+ break 'inner;
+ }
+ break 'outer;
+ }
+ }
+ }
+ }
+}
+
+fn has_direct_return(cond: bool) {
+ loop {
+ if cond {
+ return;
+ }
+ }
+}
+
+fn ret_in_inner(cond: bool) {
+ loop {
+ loop {
+ if cond {
+ return;
+ }
+ }
+ }
+}
+
+enum Foo {
+ A,
+ B,
+ C,
+}
+
+fn match_like() {
+ let opt: Option<u8> = Some(1);
+ loop {
+ //~^ ERROR: infinite loop detected
+ match opt {
+ Some(v) => {
+ println!("{v}");
+ },
+ None => {
+ do_something();
+ },
+ }
+ }
+
+ loop {
+ match opt {
+ Some(v) => {
+ println!("{v}");
+ },
+ None => {
+ do_something();
+ break;
+ },
+ }
+ }
+
+ let result: Result<u8, u16> = Ok(1);
+ loop {
+ let _val = match result {
+ Ok(1) => 1 + 1,
+ Ok(v) => v / 2,
+ Err(_) => return,
+ };
+ }
+
+ loop {
+ let Ok(_val) = result else { return };
+ }
+
+ loop {
+ let Ok(_val) = result.map(|v| 10) else { break };
+ }
+
+ loop {
+ //~^ ERROR: infinite loop detected
+ let _x = matches!(result, Ok(v) if v != 0).then_some(0);
+ }
+
+ loop {
+ //~^ ERROR: infinite loop detected
+ // This `return` does not return the function, so it doesn't count
+ let _x = matches!(result, Ok(v) if v != 0).then(|| {
+ if true {
+ return;
+ }
+ do_something();
+ });
+ }
+
+ let mut val = 0;
+ let mut fooc = Foo::C;
+
+ loop {
+ val = match fooc {
+ Foo::A => 0,
+ Foo::B => {
+ fooc = Foo::C;
+ 1
+ },
+ Foo::C => break,
+ };
+ }
+
+ loop {
+ val = match fooc {
+ Foo::A => 0,
+ Foo::B => 1,
+ Foo::C => {
+ break;
+ },
+ };
+ }
+}
+
+macro_rules! set_or_ret {
+ ($opt:expr, $a:expr) => {{
+ match $opt {
+ Some(val) => $a = val,
+ None => return,
+ }
+ }};
+}
+
+fn ret_in_macro(opt: Option<u8>) {
+ let opt: Option<u8> = Some(1);
+ let mut a: u8 = 0;
+ loop {
+ set_or_ret!(opt, a);
+ }
+
+ let res: Result<bool, u8> = Ok(true);
+ loop {
+ match res {
+ Ok(true) => set_or_ret!(opt, a),
+ _ => do_something(),
+ }
+ }
+}
+
+fn panic_like_macros_1() {
+ loop {
+ do_something();
+ panic!();
+ }
+}
+
+fn panic_like_macros_2() {
+ let mut x = 0;
+
+ loop {
+ do_something();
+ if true {
+ todo!();
+ }
+ }
+ loop {
+ do_something();
+ x += 1;
+ assert_eq!(x, 0);
+ }
+ loop {
+ do_something();
+ assert!(x % 2 == 0);
+ }
+ loop {
+ do_something();
+ match Some(1) {
+ Some(n) => println!("{n}"),
+ None => unreachable!("It won't happen"),
+ }
+ }
+}
+
+fn exit_directly(cond: bool) {
+ loop {
+ if cond {
+ std::process::exit(0);
+ }
+ }
+}
+
+trait MyTrait {
+ fn problematic_trait_method() {
+ loop {
+ //~^ ERROR: infinite loop detected
+ do_something();
+ }
+ }
+ fn could_be_problematic();
+}
+
+impl MyTrait for String {
+ fn could_be_problematic() {
+ loop {
+ //~^ ERROR: infinite loop detected
+ do_something();
+ }
+ }
+}
+
+fn inf_loop_in_closure() {
+ let _loop_forever = || {
+ loop {
+ //~^ ERROR: infinite loop detected
+ do_something();
+ }
+ };
+
+ let _somehow_ok = || -> ! {
+ loop {
+ do_something();
+ }
+ };
+}
+
+fn inf_loop_in_res() -> Result<(), i32> {
+ Ok(loop {
+ do_something()
+ })
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/infinite_loops.stderr b/src/tools/clippy/tests/ui/infinite_loops.stderr
new file mode 100644
index 000000000..f58b3cebb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/infinite_loops.stderr
@@ -0,0 +1,259 @@
+error: infinite loop detected
+ --> $DIR/infinite_loops.rs:8:5
+ |
+LL | / loop {
+LL | |
+LL | | do_something();
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::infinite-loop` implied by `-D warnings`
+ = help: to override `-D warnings` add `#[allow(clippy::infinite_loop)]`
+help: if this is intentional, consider specifing `!` as function return
+ |
+LL | fn no_break() -> ! {
+ | ++++
+
+error: infinite loop detected
+ --> $DIR/infinite_loops.rs:15:5
+ |
+LL | / loop {
+LL | |
+LL | | loop {
+LL | |
+... |
+LL | | do_something();
+LL | | }
+ | |_____^
+ |
+help: if this is intentional, consider specifing `!` as function return
+ |
+LL | fn all_inf() -> ! {
+ | ++++
+
+error: infinite loop detected
+ --> $DIR/infinite_loops.rs:17:9
+ |
+LL | / loop {
+LL | |
+LL | | loop {
+LL | |
+LL | | do_something();
+LL | | }
+LL | | }
+ | |_________^
+ |
+help: if this is intentional, consider specifing `!` as function return
+ |
+LL | fn all_inf() -> ! {
+ | ++++
+
+error: infinite loop detected
+ --> $DIR/infinite_loops.rs:19:13
+ |
+LL | / loop {
+LL | |
+LL | | do_something();
+LL | | }
+ | |_____________^
+ |
+help: if this is intentional, consider specifing `!` as function return
+ |
+LL | fn all_inf() -> ! {
+ | ++++
+
+error: infinite loop detected
+ --> $DIR/infinite_loops.rs:33:5
+ |
+LL | / loop {
+LL | |
+LL | | do_something();
+LL | | }
+ | |_____^
+ |
+ = help: if this is not intended, try adding a `break` or `return` condition in the loop
+
+error: infinite loop detected
+ --> $DIR/infinite_loops.rs:46:5
+ |
+LL | / loop {
+LL | | fn inner_fn() -> ! {
+LL | | std::process::exit(0);
+LL | | }
+LL | | do_something();
+LL | | }
+ | |_____^
+ |
+help: if this is intentional, consider specifing `!` as function return
+ |
+LL | fn no_break_never_ret_noise() -> ! {
+ | ++++
+
+error: infinite loop detected
+ --> $DIR/infinite_loops.rs:89:5
+ |
+LL | / loop {
+LL | |
+LL | | loop {
+LL | | if cond {
+... |
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: if this is intentional, consider specifing `!` as function return
+ |
+LL | fn break_inner_but_not_outer_1(cond: bool) -> ! {
+ | ++++
+
+error: infinite loop detected
+ --> $DIR/infinite_loops.rs:100:5
+ |
+LL | / loop {
+LL | |
+LL | | 'inner: loop {
+LL | | loop {
+... |
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: if this is intentional, consider specifing `!` as function return
+ |
+LL | fn break_inner_but_not_outer_2(cond: bool) -> ! {
+ | ++++
+
+error: infinite loop detected
+ --> $DIR/infinite_loops.rs:114:9
+ |
+LL | / loop {
+LL | |
+LL | | do_something();
+LL | | }
+ | |_________^
+ |
+help: if this is intentional, consider specifing `!` as function return
+ |
+LL | fn break_outer_but_not_inner() -> ! {
+ | ++++
+
+error: infinite loop detected
+ --> $DIR/infinite_loops.rs:137:9
+ |
+LL | / loop {
+LL | |
+LL | | 'inner: loop {
+LL | | loop {
+... |
+LL | | }
+LL | | }
+ | |_________^
+ |
+help: if this is intentional, consider specifing `!` as function return
+ |
+LL | fn break_wrong_loop(cond: bool) -> ! {
+ | ++++
+
+error: infinite loop detected
+ --> $DIR/infinite_loops.rs:177:5
+ |
+LL | / loop {
+LL | |
+LL | | match opt {
+LL | | Some(v) => {
+... |
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: if this is intentional, consider specifing `!` as function return
+ |
+LL | fn match_like() -> ! {
+ | ++++
+
+error: infinite loop detected
+ --> $DIR/infinite_loops.rs:218:5
+ |
+LL | / loop {
+LL | |
+LL | | let _x = matches!(result, Ok(v) if v != 0).then_some(0);
+LL | | }
+ | |_____^
+ |
+help: if this is intentional, consider specifing `!` as function return
+ |
+LL | fn match_like() -> ! {
+ | ++++
+
+error: infinite loop detected
+ --> $DIR/infinite_loops.rs:223:5
+ |
+LL | / loop {
+LL | |
+LL | | // This `return` does not return the function, so it doesn't count
+LL | | let _x = matches!(result, Ok(v) if v != 0).then(|| {
+... |
+LL | | });
+LL | | }
+ | |_____^
+ |
+help: if this is intentional, consider specifing `!` as function return
+ |
+LL | fn match_like() -> ! {
+ | ++++
+
+error: infinite loop detected
+ --> $DIR/infinite_loops.rs:328:9
+ |
+LL | / loop {
+LL | |
+LL | | do_something();
+LL | | }
+ | |_________^
+ |
+help: if this is intentional, consider specifing `!` as function return
+ |
+LL | fn problematic_trait_method() -> ! {
+ | ++++
+
+error: infinite loop detected
+ --> $DIR/infinite_loops.rs:338:9
+ |
+LL | / loop {
+LL | |
+LL | | do_something();
+LL | | }
+ | |_________^
+ |
+help: if this is intentional, consider specifing `!` as function return
+ |
+LL | fn could_be_problematic() -> ! {
+ | ++++
+
+error: infinite loop detected
+ --> $DIR/infinite_loops.rs:347:9
+ |
+LL | / loop {
+LL | |
+LL | | do_something();
+LL | | }
+ | |_________^
+ |
+help: if this is intentional, consider specifing `!` as function return
+ |
+LL | let _loop_forever = || -> ! {
+ | ++++
+
+error: infinite loop detected
+ --> $DIR/infinite_loops.rs:361:8
+ |
+LL | Ok(loop {
+ | ________^
+LL | | do_something()
+LL | | })
+ | |_____^
+ |
+ = help: if this is not intended, try adding a `break` or `return` condition in the loop
+
+error: aborting due to 17 previous errors
+
diff --git a/src/tools/clippy/tests/ui/inspect_for_each.stderr b/src/tools/clippy/tests/ui/inspect_for_each.stderr
index 80df86ad6..8bd4fe398 100644
--- a/src/tools/clippy/tests/ui/inspect_for_each.stderr
+++ b/src/tools/clippy/tests/ui/inspect_for_each.stderr
@@ -14,5 +14,5 @@ LL | | });
= note: `-D clippy::inspect-for-each` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::inspect_for_each)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/issue-3145.stderr b/src/tools/clippy/tests/ui/issue-3145.stderr
index d7c2c88a2..51debc9b7 100644
--- a/src/tools/clippy/tests/ui/issue-3145.stderr
+++ b/src/tools/clippy/tests/ui/issue-3145.stderr
@@ -4,5 +4,5 @@ error: expected `,`, found `a`
LL | println!("{}" a);
| ^ expected `,`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/issue_2356.stderr b/src/tools/clippy/tests/ui/issue_2356.stderr
index d04b49e52..860c545c7 100644
--- a/src/tools/clippy/tests/ui/issue_2356.stderr
+++ b/src/tools/clippy/tests/ui/issue_2356.stderr
@@ -10,5 +10,5 @@ note: the lint level is defined here
LL | #![deny(clippy::while_let_on_iterator)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/items_after_test_module/in_submodule.stderr b/src/tools/clippy/tests/ui/items_after_test_module/in_submodule.stderr
index 4e9987636..30aa90d29 100644
--- a/src/tools/clippy/tests/ui/items_after_test_module/in_submodule.stderr
+++ b/src/tools/clippy/tests/ui/items_after_test_module/in_submodule.stderr
@@ -10,5 +10,5 @@ LL | fn in_submodule() {}
= note: `-D clippy::items-after-test-module` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::items_after_test_module)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/items_after_test_module/root_module.stderr b/src/tools/clippy/tests/ui/items_after_test_module/root_module.stderr
index 67bc82ebf..17b07cc32 100644
--- a/src/tools/clippy/tests/ui/items_after_test_module/root_module.stderr
+++ b/src/tools/clippy/tests/ui/items_after_test_module/root_module.stderr
@@ -16,5 +16,5 @@ LL | macro_rules! should_lint {
= help: to override `-D warnings` add `#[allow(clippy::items_after_test_module)]`
= help: move the items to before the test module was defined
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/iter_kv_map.fixed b/src/tools/clippy/tests/ui/iter_kv_map.fixed
index 566a5b690..2cbf972fc 100644
--- a/src/tools/clippy/tests/ui/iter_kv_map.fixed
+++ b/src/tools/clippy/tests/ui/iter_kv_map.fixed
@@ -89,3 +89,46 @@ fn main() {
// Don't let a mut interfere.
let _ = map.clone().into_values().count();
}
+
+#[clippy::msrv = "1.53"]
+fn msrv_1_53() {
+ let map: HashMap<u32, u32> = HashMap::new();
+
+ // Don't lint because into_iter is not supported
+ let _ = map.clone().into_iter().map(|(key, _)| key).collect::<Vec<_>>();
+ let _ = map.clone().into_iter().map(|(key, _)| key + 2).collect::<Vec<_>>();
+
+ let _ = map.clone().into_iter().map(|(_, val)| val).collect::<Vec<_>>();
+ let _ = map.clone().into_iter().map(|(_, val)| val + 2).collect::<Vec<_>>();
+
+ // Lint
+ let _ = map.keys().collect::<Vec<_>>();
+ //~^ ERROR: iterating on a map's keys
+ let _ = map.values().collect::<Vec<_>>();
+ //~^ ERROR: iterating on a map's values
+ let _ = map.values().map(|v| v + 2).collect::<Vec<_>>();
+ //~^ ERROR: iterating on a map's values
+}
+
+#[clippy::msrv = "1.54"]
+fn msrv_1_54() {
+ // Lint all
+ let map: HashMap<u32, u32> = HashMap::new();
+
+ let _ = map.clone().into_keys().collect::<Vec<_>>();
+ //~^ ERROR: iterating on a map's keys
+ let _ = map.clone().into_keys().map(|key| key + 2).collect::<Vec<_>>();
+ //~^ ERROR: iterating on a map's keys
+
+ let _ = map.clone().into_values().collect::<Vec<_>>();
+ //~^ ERROR: iterating on a map's values
+ let _ = map.clone().into_values().map(|val| val + 2).collect::<Vec<_>>();
+ //~^ ERROR: iterating on a map's values
+
+ let _ = map.keys().collect::<Vec<_>>();
+ //~^ ERROR: iterating on a map's keys
+ let _ = map.values().collect::<Vec<_>>();
+ //~^ ERROR: iterating on a map's values
+ let _ = map.values().map(|v| v + 2).collect::<Vec<_>>();
+ //~^ ERROR: iterating on a map's values
+}
diff --git a/src/tools/clippy/tests/ui/iter_kv_map.rs b/src/tools/clippy/tests/ui/iter_kv_map.rs
index d85e501da..6a9a4267a 100644
--- a/src/tools/clippy/tests/ui/iter_kv_map.rs
+++ b/src/tools/clippy/tests/ui/iter_kv_map.rs
@@ -93,3 +93,46 @@ fn main() {
// Don't let a mut interfere.
let _ = map.clone().into_iter().map(|(_, mut val)| val).count();
}
+
+#[clippy::msrv = "1.53"]
+fn msrv_1_53() {
+ let map: HashMap<u32, u32> = HashMap::new();
+
+ // Don't lint because into_iter is not supported
+ let _ = map.clone().into_iter().map(|(key, _)| key).collect::<Vec<_>>();
+ let _ = map.clone().into_iter().map(|(key, _)| key + 2).collect::<Vec<_>>();
+
+ let _ = map.clone().into_iter().map(|(_, val)| val).collect::<Vec<_>>();
+ let _ = map.clone().into_iter().map(|(_, val)| val + 2).collect::<Vec<_>>();
+
+ // Lint
+ let _ = map.iter().map(|(key, _)| key).collect::<Vec<_>>();
+ //~^ ERROR: iterating on a map's keys
+ let _ = map.iter().map(|(_, value)| value).collect::<Vec<_>>();
+ //~^ ERROR: iterating on a map's values
+ let _ = map.iter().map(|(_, v)| v + 2).collect::<Vec<_>>();
+ //~^ ERROR: iterating on a map's values
+}
+
+#[clippy::msrv = "1.54"]
+fn msrv_1_54() {
+ // Lint all
+ let map: HashMap<u32, u32> = HashMap::new();
+
+ let _ = map.clone().into_iter().map(|(key, _)| key).collect::<Vec<_>>();
+ //~^ ERROR: iterating on a map's keys
+ let _ = map.clone().into_iter().map(|(key, _)| key + 2).collect::<Vec<_>>();
+ //~^ ERROR: iterating on a map's keys
+
+ let _ = map.clone().into_iter().map(|(_, val)| val).collect::<Vec<_>>();
+ //~^ ERROR: iterating on a map's values
+ let _ = map.clone().into_iter().map(|(_, val)| val + 2).collect::<Vec<_>>();
+ //~^ ERROR: iterating on a map's values
+
+ let _ = map.iter().map(|(key, _)| key).collect::<Vec<_>>();
+ //~^ ERROR: iterating on a map's keys
+ let _ = map.iter().map(|(_, value)| value).collect::<Vec<_>>();
+ //~^ ERROR: iterating on a map's values
+ let _ = map.iter().map(|(_, v)| v + 2).collect::<Vec<_>>();
+ //~^ ERROR: iterating on a map's values
+}
diff --git a/src/tools/clippy/tests/ui/iter_kv_map.stderr b/src/tools/clippy/tests/ui/iter_kv_map.stderr
index 62155b7f8..471615978 100644
--- a/src/tools/clippy/tests/ui/iter_kv_map.stderr
+++ b/src/tools/clippy/tests/ui/iter_kv_map.stderr
@@ -201,5 +201,65 @@ error: iterating on a map's values
LL | let _ = map.clone().into_iter().map(|(_, mut val)| val).count();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `map.clone().into_values()`
-error: aborting due to 28 previous errors
+error: iterating on a map's keys
+ --> $DIR/iter_kv_map.rs:109:13
+ |
+LL | let _ = map.iter().map(|(key, _)| key).collect::<Vec<_>>();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `map.keys()`
+
+error: iterating on a map's values
+ --> $DIR/iter_kv_map.rs:111:13
+ |
+LL | let _ = map.iter().map(|(_, value)| value).collect::<Vec<_>>();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `map.values()`
+
+error: iterating on a map's values
+ --> $DIR/iter_kv_map.rs:113:13
+ |
+LL | let _ = map.iter().map(|(_, v)| v + 2).collect::<Vec<_>>();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `map.values().map(|v| v + 2)`
+
+error: iterating on a map's keys
+ --> $DIR/iter_kv_map.rs:122:13
+ |
+LL | let _ = map.clone().into_iter().map(|(key, _)| key).collect::<Vec<_>>();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `map.clone().into_keys()`
+
+error: iterating on a map's keys
+ --> $DIR/iter_kv_map.rs:124:13
+ |
+LL | let _ = map.clone().into_iter().map(|(key, _)| key + 2).collect::<Vec<_>>();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `map.clone().into_keys().map(|key| key + 2)`
+
+error: iterating on a map's values
+ --> $DIR/iter_kv_map.rs:127:13
+ |
+LL | let _ = map.clone().into_iter().map(|(_, val)| val).collect::<Vec<_>>();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `map.clone().into_values()`
+
+error: iterating on a map's values
+ --> $DIR/iter_kv_map.rs:129:13
+ |
+LL | let _ = map.clone().into_iter().map(|(_, val)| val + 2).collect::<Vec<_>>();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `map.clone().into_values().map(|val| val + 2)`
+
+error: iterating on a map's keys
+ --> $DIR/iter_kv_map.rs:132:13
+ |
+LL | let _ = map.iter().map(|(key, _)| key).collect::<Vec<_>>();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `map.keys()`
+
+error: iterating on a map's values
+ --> $DIR/iter_kv_map.rs:134:13
+ |
+LL | let _ = map.iter().map(|(_, value)| value).collect::<Vec<_>>();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `map.values()`
+
+error: iterating on a map's values
+ --> $DIR/iter_kv_map.rs:136:13
+ |
+LL | let _ = map.iter().map(|(_, v)| v + 2).collect::<Vec<_>>();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `map.values().map(|v| v + 2)`
+
+error: aborting due to 38 previous errors
diff --git a/src/tools/clippy/tests/ui/iter_next_loop.stderr b/src/tools/clippy/tests/ui/iter_next_loop.stderr
index 5bba0e635..5871d21e4 100644
--- a/src/tools/clippy/tests/ui/iter_next_loop.stderr
+++ b/src/tools/clippy/tests/ui/iter_next_loop.stderr
@@ -4,6 +4,6 @@ error[E0423]: expected value, found macro `vec`
LL | for _ in vec.iter().next() {}
| ^^^ not a value
-error: aborting due to previous error
+error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0423`.
diff --git a/src/tools/clippy/tests/ui/iter_over_hash_type.rs b/src/tools/clippy/tests/ui/iter_over_hash_type.rs
new file mode 100644
index 000000000..7000c8bf9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_over_hash_type.rs
@@ -0,0 +1,74 @@
+//@aux-build:proc_macros.rs
+#![feature(rustc_private)]
+#![warn(clippy::iter_over_hash_type)]
+use std::collections::{HashMap, HashSet};
+
+extern crate rustc_data_structures;
+
+extern crate proc_macros;
+
+fn main() {
+ let mut hash_set = HashSet::<i32>::new();
+ let mut hash_map = HashMap::<i32, i32>::new();
+ let mut fx_hash_map = rustc_data_structures::fx::FxHashMap::<i32, i32>::default();
+ let mut fx_hash_set = rustc_data_structures::fx::FxHashMap::<i32, i32>::default();
+ let vec = Vec::<i32>::new();
+
+ // test hashset
+ for x in &hash_set {
+ let _ = x;
+ }
+ for x in hash_set.iter() {
+ let _ = x;
+ }
+ for x in hash_set.clone() {
+ let _ = x;
+ }
+ for x in hash_set.drain() {
+ let _ = x;
+ }
+
+ // test hashmap
+ for (x, y) in &hash_map {
+ let _ = (x, y);
+ }
+ for x in hash_map.keys() {
+ let _ = x;
+ }
+ for x in hash_map.values() {
+ let _ = x;
+ }
+ for x in hash_map.values_mut() {
+ *x += 1;
+ }
+ for x in hash_map.iter() {
+ let _ = x;
+ }
+ for x in hash_map.clone() {
+ let _ = x;
+ }
+ for x in hash_map.drain() {
+ let _ = x;
+ }
+
+ // test type-aliased hashers
+ for x in fx_hash_set {
+ let _ = x;
+ }
+ for x in fx_hash_map {
+ let _ = x;
+ }
+
+ // shouldnt fire
+ for x in &vec {
+ let _ = x;
+ }
+ for x in vec {
+ let _ = x;
+ }
+
+ // should not lint, this comes from an external crate
+ proc_macros::external! {
+ for _ in HashMap::<i32, i32>::new() {}
+ }
+}
diff --git a/src/tools/clippy/tests/ui/iter_over_hash_type.stderr b/src/tools/clippy/tests/ui/iter_over_hash_type.stderr
new file mode 100644
index 000000000..cf420fb8e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_over_hash_type.stderr
@@ -0,0 +1,109 @@
+error: iteration over unordered hash-based type
+ --> $DIR/iter_over_hash_type.rs:18:5
+ |
+LL | / for x in &hash_set {
+LL | | let _ = x;
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::iter-over-hash-type` implied by `-D warnings`
+ = help: to override `-D warnings` add `#[allow(clippy::iter_over_hash_type)]`
+
+error: iteration over unordered hash-based type
+ --> $DIR/iter_over_hash_type.rs:21:5
+ |
+LL | / for x in hash_set.iter() {
+LL | | let _ = x;
+LL | | }
+ | |_____^
+
+error: iteration over unordered hash-based type
+ --> $DIR/iter_over_hash_type.rs:24:5
+ |
+LL | / for x in hash_set.clone() {
+LL | | let _ = x;
+LL | | }
+ | |_____^
+
+error: iteration over unordered hash-based type
+ --> $DIR/iter_over_hash_type.rs:27:5
+ |
+LL | / for x in hash_set.drain() {
+LL | | let _ = x;
+LL | | }
+ | |_____^
+
+error: iteration over unordered hash-based type
+ --> $DIR/iter_over_hash_type.rs:32:5
+ |
+LL | / for (x, y) in &hash_map {
+LL | | let _ = (x, y);
+LL | | }
+ | |_____^
+
+error: iteration over unordered hash-based type
+ --> $DIR/iter_over_hash_type.rs:35:5
+ |
+LL | / for x in hash_map.keys() {
+LL | | let _ = x;
+LL | | }
+ | |_____^
+
+error: iteration over unordered hash-based type
+ --> $DIR/iter_over_hash_type.rs:38:5
+ |
+LL | / for x in hash_map.values() {
+LL | | let _ = x;
+LL | | }
+ | |_____^
+
+error: iteration over unordered hash-based type
+ --> $DIR/iter_over_hash_type.rs:41:5
+ |
+LL | / for x in hash_map.values_mut() {
+LL | | *x += 1;
+LL | | }
+ | |_____^
+
+error: iteration over unordered hash-based type
+ --> $DIR/iter_over_hash_type.rs:44:5
+ |
+LL | / for x in hash_map.iter() {
+LL | | let _ = x;
+LL | | }
+ | |_____^
+
+error: iteration over unordered hash-based type
+ --> $DIR/iter_over_hash_type.rs:47:5
+ |
+LL | / for x in hash_map.clone() {
+LL | | let _ = x;
+LL | | }
+ | |_____^
+
+error: iteration over unordered hash-based type
+ --> $DIR/iter_over_hash_type.rs:50:5
+ |
+LL | / for x in hash_map.drain() {
+LL | | let _ = x;
+LL | | }
+ | |_____^
+
+error: iteration over unordered hash-based type
+ --> $DIR/iter_over_hash_type.rs:55:5
+ |
+LL | / for x in fx_hash_set {
+LL | | let _ = x;
+LL | | }
+ | |_____^
+
+error: iteration over unordered hash-based type
+ --> $DIR/iter_over_hash_type.rs:58:5
+ |
+LL | / for x in fx_hash_map {
+LL | | let _ = x;
+LL | | }
+ | |_____^
+
+error: aborting due to 13 previous errors
+
diff --git a/src/tools/clippy/tests/ui/join_absolute_paths.rs b/src/tools/clippy/tests/ui/join_absolute_paths.rs
new file mode 100644
index 000000000..efa77a049
--- /dev/null
+++ b/src/tools/clippy/tests/ui/join_absolute_paths.rs
@@ -0,0 +1,30 @@
+//@no-rustfix
+
+#![allow(clippy::needless_raw_string_hashes)]
+#![warn(clippy::join_absolute_paths)]
+
+use std::path::{Path, PathBuf};
+
+fn main() {
+ let path = Path::new("/bin");
+ path.join("/sh");
+ //~^ ERROR: argument to `Path::join` starts with a path separator
+
+ let path = Path::new("C:\\Users");
+ path.join("\\user");
+ //~^ ERROR: argument to `Path::join` starts with a path separator
+
+ let path = PathBuf::from("/bin");
+ path.join("/sh");
+ //~^ ERROR: argument to `Path::join` starts with a path separator
+
+ let path = PathBuf::from("/bin");
+ path.join(r#"/sh"#);
+ //~^ ERROR: argument to `Path::join` starts with a path separator
+
+ let path: &[&str] = &["/bin"];
+ path.join("/sh");
+
+ let path = Path::new("/bin");
+ path.join("sh");
+}
diff --git a/src/tools/clippy/tests/ui/join_absolute_paths.stderr b/src/tools/clippy/tests/ui/join_absolute_paths.stderr
new file mode 100644
index 000000000..0c2f89d99
--- /dev/null
+++ b/src/tools/clippy/tests/ui/join_absolute_paths.stderr
@@ -0,0 +1,68 @@
+error: argument to `Path::join` starts with a path separator
+ --> $DIR/join_absolute_paths.rs:10:15
+ |
+LL | path.join("/sh");
+ | ^^^^^
+ |
+ = note: joining a path starting with separator will replace the path instead
+ = note: `-D clippy::join-absolute-paths` implied by `-D warnings`
+ = help: to override `-D warnings` add `#[allow(clippy::join_absolute_paths)]`
+help: if this is unintentional, try removing the starting separator
+ |
+LL | path.join("sh");
+ | ~~~~
+help: if this is intentional, try using `Path::new` instead
+ |
+LL | PathBuf::from("/sh");
+ | ~~~~~~~~~~~~~~~~~~~~
+
+error: argument to `Path::join` starts with a path separator
+ --> $DIR/join_absolute_paths.rs:14:15
+ |
+LL | path.join("\\user");
+ | ^^^^^^^^
+ |
+ = note: joining a path starting with separator will replace the path instead
+help: if this is unintentional, try removing the starting separator
+ |
+LL | path.join("\user");
+ | ~~~~~~~
+help: if this is intentional, try using `Path::new` instead
+ |
+LL | PathBuf::from("\\user");
+ | ~~~~~~~~~~~~~~~~~~~~~~~
+
+error: argument to `Path::join` starts with a path separator
+ --> $DIR/join_absolute_paths.rs:18:15
+ |
+LL | path.join("/sh");
+ | ^^^^^
+ |
+ = note: joining a path starting with separator will replace the path instead
+help: if this is unintentional, try removing the starting separator
+ |
+LL | path.join("sh");
+ | ~~~~
+help: if this is intentional, try using `Path::new` instead
+ |
+LL | PathBuf::from("/sh");
+ | ~~~~~~~~~~~~~~~~~~~~
+
+error: argument to `Path::join` starts with a path separator
+ --> $DIR/join_absolute_paths.rs:22:15
+ |
+LL | path.join(r#"/sh"#);
+ | ^^^^^^^^
+ |
+ = note: joining a path starting with separator will replace the path instead
+help: if this is unintentional, try removing the starting separator
+ |
+LL | path.join(r#"sh"#);
+ | ~~~~~~~
+help: if this is intentional, try using `Path::new` instead
+ |
+LL | PathBuf::from(r#"/sh"#);
+ | ~~~~~~~~~~~~~~~~~~~~~~~
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/lines_filter_map_ok.fixed b/src/tools/clippy/tests/ui/lines_filter_map_ok.fixed
index 74ef6f729..621115cc1 100644
--- a/src/tools/clippy/tests/ui/lines_filter_map_ok.fixed
+++ b/src/tools/clippy/tests/ui/lines_filter_map_ok.fixed
@@ -10,11 +10,17 @@ fn main() -> io::Result<()> {
// Lint
let f = std::fs::File::open("/")?;
BufReader::new(f).lines().map_while(Result::ok).for_each(|_| ());
+ // Lint
+ let f = std::fs::File::open("/")?;
+ BufReader::new(f).lines().map_while(Result::ok).for_each(|_| ());
+
let s = "foo\nbar\nbaz\n";
// Lint
io::stdin().lines().map_while(Result::ok).for_each(|_| ());
// Lint
io::stdin().lines().map_while(Result::ok).for_each(|_| ());
+ // Lint
+ io::stdin().lines().map_while(Result::ok).for_each(|_| ());
// Do not lint (not a `Lines` iterator)
io::stdin()
.lines()
diff --git a/src/tools/clippy/tests/ui/lines_filter_map_ok.rs b/src/tools/clippy/tests/ui/lines_filter_map_ok.rs
index 345f4dc5f..a86efbd66 100644
--- a/src/tools/clippy/tests/ui/lines_filter_map_ok.rs
+++ b/src/tools/clippy/tests/ui/lines_filter_map_ok.rs
@@ -10,11 +10,17 @@ fn main() -> io::Result<()> {
// Lint
let f = std::fs::File::open("/")?;
BufReader::new(f).lines().flat_map(Result::ok).for_each(|_| ());
+ // Lint
+ let f = std::fs::File::open("/")?;
+ BufReader::new(f).lines().flatten().for_each(|_| ());
+
let s = "foo\nbar\nbaz\n";
// Lint
io::stdin().lines().filter_map(Result::ok).for_each(|_| ());
// Lint
io::stdin().lines().filter_map(|x| x.ok()).for_each(|_| ());
+ // Lint
+ io::stdin().lines().flatten().for_each(|_| ());
// Do not lint (not a `Lines` iterator)
io::stdin()
.lines()
diff --git a/src/tools/clippy/tests/ui/lines_filter_map_ok.stderr b/src/tools/clippy/tests/ui/lines_filter_map_ok.stderr
index fa2ba0a9a..9833ab164 100644
--- a/src/tools/clippy/tests/ui/lines_filter_map_ok.stderr
+++ b/src/tools/clippy/tests/ui/lines_filter_map_ok.stderr
@@ -24,29 +24,53 @@ note: this expression returning a `std::io::Lines` may produce an infinite numbe
LL | BufReader::new(f).lines().flat_map(Result::ok).for_each(|_| ());
| ^^^^^^^^^^^^^^^^^^^^^^^^^
+error: `flatten()` will run forever if the iterator repeatedly produces an `Err`
+ --> $DIR/lines_filter_map_ok.rs:15:31
+ |
+LL | BufReader::new(f).lines().flatten().for_each(|_| ());
+ | ^^^^^^^^^ help: replace with: `map_while(Result::ok)`
+ |
+note: this expression returning a `std::io::Lines` may produce an infinite number of `Err` in case of a read error
+ --> $DIR/lines_filter_map_ok.rs:15:5
+ |
+LL | BufReader::new(f).lines().flatten().for_each(|_| ());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
error: `filter_map()` will run forever if the iterator repeatedly produces an `Err`
- --> $DIR/lines_filter_map_ok.rs:15:25
+ --> $DIR/lines_filter_map_ok.rs:19:25
|
LL | io::stdin().lines().filter_map(Result::ok).for_each(|_| ());
| ^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `map_while(Result::ok)`
|
note: this expression returning a `std::io::Lines` may produce an infinite number of `Err` in case of a read error
- --> $DIR/lines_filter_map_ok.rs:15:5
+ --> $DIR/lines_filter_map_ok.rs:19:5
|
LL | io::stdin().lines().filter_map(Result::ok).for_each(|_| ());
| ^^^^^^^^^^^^^^^^^^^
error: `filter_map()` will run forever if the iterator repeatedly produces an `Err`
- --> $DIR/lines_filter_map_ok.rs:17:25
+ --> $DIR/lines_filter_map_ok.rs:21:25
|
LL | io::stdin().lines().filter_map(|x| x.ok()).for_each(|_| ());
| ^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `map_while(Result::ok)`
|
note: this expression returning a `std::io::Lines` may produce an infinite number of `Err` in case of a read error
- --> $DIR/lines_filter_map_ok.rs:17:5
+ --> $DIR/lines_filter_map_ok.rs:21:5
|
LL | io::stdin().lines().filter_map(|x| x.ok()).for_each(|_| ());
| ^^^^^^^^^^^^^^^^^^^
-error: aborting due to 4 previous errors
+error: `flatten()` will run forever if the iterator repeatedly produces an `Err`
+ --> $DIR/lines_filter_map_ok.rs:23:25
+ |
+LL | io::stdin().lines().flatten().for_each(|_| ());
+ | ^^^^^^^^^ help: replace with: `map_while(Result::ok)`
+ |
+note: this expression returning a `std::io::Lines` may produce an infinite number of `Err` in case of a read error
+ --> $DIR/lines_filter_map_ok.rs:23:5
+ |
+LL | io::stdin().lines().flatten().for_each(|_| ());
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 6 previous errors
diff --git a/src/tools/clippy/tests/ui/macro_use_imports.stderr b/src/tools/clippy/tests/ui/macro_use_imports.stderr
index 6de869699..5524e7e56 100644
--- a/src/tools/clippy/tests/ui/macro_use_imports.stderr
+++ b/src/tools/clippy/tests/ui/macro_use_imports.stderr
@@ -1,8 +1,8 @@
error: `macro_use` attributes are no longer needed in the Rust 2018 edition
- --> $DIR/macro_use_imports.rs:25:5
+ --> $DIR/macro_use_imports.rs:19:5
|
LL | #[macro_use]
- | ^^^^^^^^^^^^ help: remove the attribute and import the macro directly, try: `use mac::inner::nested::string_add;`
+ | ^^^^^^^^^^^^ help: remove the attribute and import the macro directly, try: `use mac::{pub_macro, inner_mod_macro, function_macro, ty_macro, pub_in_private_macro};`
|
= note: `-D clippy::macro-use-imports` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::macro_use_imports)]`
@@ -14,16 +14,16 @@ LL | #[macro_use]
| ^^^^^^^^^^^^ help: remove the attribute and import the macro directly, try: `use mac::{inner::mut_mut, inner::try_err};`
error: `macro_use` attributes are no longer needed in the Rust 2018 edition
- --> $DIR/macro_use_imports.rs:21:5
+ --> $DIR/macro_use_imports.rs:25:5
|
LL | #[macro_use]
- | ^^^^^^^^^^^^ help: remove the attribute and import the macro directly, try: `use mini_mac::ClippyMiniMacroTest;`
+ | ^^^^^^^^^^^^ help: remove the attribute and import the macro directly, try: `use mac::inner::nested::string_add;`
error: `macro_use` attributes are no longer needed in the Rust 2018 edition
- --> $DIR/macro_use_imports.rs:19:5
+ --> $DIR/macro_use_imports.rs:21:5
|
LL | #[macro_use]
- | ^^^^^^^^^^^^ help: remove the attribute and import the macro directly, try: `use mac::{pub_macro, inner_mod_macro, function_macro, ty_macro, pub_in_private_macro};`
+ | ^^^^^^^^^^^^ help: remove the attribute and import the macro directly, try: `use mini_mac::ClippyMiniMacroTest;`
error: aborting due to 4 previous errors
diff --git a/src/tools/clippy/tests/ui/manual_filter.fixed b/src/tools/clippy/tests/ui/manual_filter.fixed
index c1bc4aae9..a0fb0e32d 100644
--- a/src/tools/clippy/tests/ui/manual_filter.fixed
+++ b/src/tools/clippy/tests/ui/manual_filter.fixed
@@ -40,7 +40,7 @@ fn main() {
};
}
- #[allow(clippy::blocks_in_if_conditions)]
+ #[allow(clippy::blocks_in_conditions)]
Some(11).filter(|&x| {
println!("foo");
x > 10 && x < 100
diff --git a/src/tools/clippy/tests/ui/manual_filter.rs b/src/tools/clippy/tests/ui/manual_filter.rs
index ee44909f3..0ac6cbefc 100644
--- a/src/tools/clippy/tests/ui/manual_filter.rs
+++ b/src/tools/clippy/tests/ui/manual_filter.rs
@@ -135,7 +135,7 @@ fn main() {
};
}
- #[allow(clippy::blocks_in_if_conditions)]
+ #[allow(clippy::blocks_in_conditions)]
match Some(11) {
// Lint, statement is preserved by `.filter`
Some(x) => {
diff --git a/src/tools/clippy/tests/ui/manual_let_else.rs b/src/tools/clippy/tests/ui/manual_let_else.rs
index 27717ab3a..5d94660ec 100644
--- a/src/tools/clippy/tests/ui/manual_let_else.rs
+++ b/src/tools/clippy/tests/ui/manual_let_else.rs
@@ -5,7 +5,9 @@
clippy::let_unit_value,
clippy::match_single_binding,
clippy::never_loop,
- clippy::needless_if
+ clippy::needless_if,
+ clippy::diverging_sub_expression,
+ clippy::single_match
)]
#![warn(clippy::manual_let_else)]
//@no-rustfix
@@ -24,7 +26,7 @@ fn main() {}
fn fire() {
let v = if let Some(v_some) = g() { v_some } else { return };
//~^ ERROR: this could be rewritten as `let...else`
- //~| NOTE: `-D clippy::manual-let-else` implied by `-D warnings`
+
let v = if let Some(v_some) = g() {
//~^ ERROR: this could be rewritten as `let...else`
v_some
@@ -79,22 +81,76 @@ fn fire() {
panic!();
};
+ // The final expression will need to be turned into a statement.
+ let v = if let Some(v_some) = g() {
+ //~^ ERROR: this could be rewritten as `let...else`
+ v_some
+ } else {
+ panic!();
+ ()
+ };
+
+ // Even if the result is buried multiple expressions deep.
+ let v = if let Some(v_some) = g() {
+ //~^ ERROR: this could be rewritten as `let...else`
+ v_some
+ } else {
+ panic!();
+ if true {
+ match 0 {
+ 0 => (),
+ _ => (),
+ }
+ } else {
+ panic!()
+ }
+ };
+
+ // Or if a break gives the value.
+ let v = if let Some(v_some) = g() {
+ //~^ ERROR: this could be rewritten as `let...else`
+ v_some
+ } else {
+ loop {
+ panic!();
+ break ();
+ }
+ };
+
+ // Even if the break is in a weird position.
+ let v = if let Some(v_some) = g() {
+ //~^ ERROR: this could be rewritten as `let...else`
+ v_some
+ } else {
+ 'a: loop {
+ panic!();
+ loop {
+ match 0 {
+ 0 if (return break 'a ()) => {},
+ _ => {},
+ }
+ }
+ }
+ };
+
// A match diverges if all branches diverge:
- // Note: the corresponding let-else requires a ; at the end of the match
- // as otherwise the type checker does not turn it into a ! type.
let v = if let Some(v_some) = g() {
//~^ ERROR: this could be rewritten as `let...else`
v_some
} else {
- match () {
- _ if panic!() => {},
+ match 0 {
+ 0 if true => panic!(),
_ => panic!(),
- }
+ };
};
// An if's expression can cause divergence:
- let v = if let Some(v_some) = g() { v_some } else { if panic!() {} };
- //~^ ERROR: this could be rewritten as `let...else`
+ let v = if let Some(v_some) = g() {
+ //~^ ERROR: this could be rewritten as `let...else`
+ v_some
+ } else {
+ if panic!() {};
+ };
// An expression of a match can cause divergence:
let v = if let Some(v_some) = g() {
@@ -103,7 +159,7 @@ fn fire() {
} else {
match panic!() {
_ => {},
- }
+ };
};
// Top level else if
@@ -342,6 +398,43 @@ fn not_fire() {
} else {
return;
};
+
+ // A break that skips the divergent statement will cause the expression to be non-divergent.
+ let _x = if let Some(x) = Some(0) {
+ x
+ } else {
+ 'foo: loop {
+ break 'foo 0;
+ panic!();
+ }
+ };
+
+ // Even in inner loops.
+ let _x = if let Some(x) = Some(0) {
+ x
+ } else {
+ 'foo: {
+ loop {
+ break 'foo 0;
+ }
+ panic!();
+ }
+ };
+
+ // But a break that can't ever be reached still affects divergence checking.
+ let _x = if let Some(x) = g() {
+ x
+ } else {
+ 'foo: {
+ 'bar: loop {
+ loop {
+ break 'bar ();
+ }
+ break 'foo ();
+ }
+ panic!();
+ };
+ };
}
struct S<T> {
diff --git a/src/tools/clippy/tests/ui/manual_let_else.stderr b/src/tools/clippy/tests/ui/manual_let_else.stderr
index 2b6504a18..3beaf766e 100644
--- a/src/tools/clippy/tests/ui/manual_let_else.stderr
+++ b/src/tools/clippy/tests/ui/manual_let_else.stderr
@@ -1,5 +1,5 @@
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:25:5
+ --> $DIR/manual_let_else.rs:27:5
|
LL | let v = if let Some(v_some) = g() { v_some } else { return };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider writing: `let Some(v) = g() else { return };`
@@ -8,7 +8,7 @@ LL | let v = if let Some(v_some) = g() { v_some } else { return };
= help: to override `-D warnings` add `#[allow(clippy::manual_let_else)]`
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:28:5
+ --> $DIR/manual_let_else.rs:30:5
|
LL | / let v = if let Some(v_some) = g() {
LL | |
@@ -26,7 +26,7 @@ LL + };
|
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:35:5
+ --> $DIR/manual_let_else.rs:37:5
|
LL | / let v = if let Some(v) = g() {
LL | |
@@ -47,25 +47,25 @@ LL + };
|
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:47:9
+ --> $DIR/manual_let_else.rs:49:9
|
LL | let v = if let Some(v_some) = g() { v_some } else { continue };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider writing: `let Some(v) = g() else { continue };`
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:49:9
+ --> $DIR/manual_let_else.rs:51:9
|
LL | let v = if let Some(v_some) = g() { v_some } else { break };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider writing: `let Some(v) = g() else { break };`
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:54:5
+ --> $DIR/manual_let_else.rs:56:5
|
LL | let v = if let Some(v_some) = g() { v_some } else { panic!() };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider writing: `let Some(v) = g() else { panic!() };`
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:58:5
+ --> $DIR/manual_let_else.rs:60:5
|
LL | / let v = if let Some(v_some) = g() {
LL | |
@@ -83,7 +83,7 @@ LL + };
|
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:66:5
+ --> $DIR/manual_let_else.rs:68:5
|
LL | / let v = if let Some(v_some) = g() {
LL | |
@@ -101,7 +101,7 @@ LL + };
|
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:74:5
+ --> $DIR/manual_let_else.rs:76:5
|
LL | / let v = if let Some(v_some) = g() {
LL | |
@@ -127,6 +127,26 @@ LL | / let v = if let Some(v_some) = g() {
LL | |
LL | | v_some
LL | | } else {
+LL | | panic!();
+LL | | ()
+LL | | };
+ | |______^
+ |
+help: consider writing
+ |
+LL ~ let Some(v) = g() else {
+LL + panic!();
+LL + ()
+LL + };
+ |
+
+error: this could be rewritten as `let...else`
+ --> $DIR/manual_let_else.rs:94:5
+ |
+LL | / let v = if let Some(v_some) = g() {
+LL | |
+LL | | v_some
+LL | | } else {
... |
LL | | }
LL | | };
@@ -135,21 +155,42 @@ LL | | };
help: consider writing
|
LL ~ let Some(v) = g() else {
-LL + match () {
-LL + _ if panic!() => {},
-LL + _ => panic!(),
+LL + panic!();
+LL + if true {
+LL + match 0 {
+LL + 0 => (),
+LL + _ => (),
+LL + }
+LL + } else {
+LL + panic!()
LL + }
LL + };
|
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:96:5
+ --> $DIR/manual_let_else.rs:110:5
+ |
+LL | / let v = if let Some(v_some) = g() {
+LL | |
+LL | | v_some
+LL | | } else {
+... |
+LL | | }
+LL | | };
+ | |______^
+ |
+help: consider writing
+ |
+LL ~ let Some(v) = g() else {
+LL + loop {
+LL + panic!();
+LL + break ();
+LL + }
+LL + };
|
-LL | let v = if let Some(v_some) = g() { v_some } else { if panic!() {} };
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider writing: `let Some(v) = g() else { if panic!() {} };`
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:100:5
+ --> $DIR/manual_let_else.rs:121:5
|
LL | / let v = if let Some(v_some) = g() {
LL | |
@@ -163,14 +204,81 @@ LL | | };
help: consider writing
|
LL ~ let Some(v) = g() else {
+LL + 'a: loop {
+LL + panic!();
+LL + loop {
+LL + match 0 {
+LL + 0 if (return break 'a ()) => {},
+LL + _ => {},
+LL + }
+LL + }
+LL + }
+LL + };
+ |
+
+error: this could be rewritten as `let...else`
+ --> $DIR/manual_let_else.rs:137:5
+ |
+LL | / let v = if let Some(v_some) = g() {
+LL | |
+LL | | v_some
+LL | | } else {
+... |
+LL | | };
+LL | | };
+ | |______^
+ |
+help: consider writing
+ |
+LL ~ let Some(v) = g() else {
+LL + match 0 {
+LL + 0 if true => panic!(),
+LL + _ => panic!(),
+LL + };
+LL + };
+ |
+
+error: this could be rewritten as `let...else`
+ --> $DIR/manual_let_else.rs:148:5
+ |
+LL | / let v = if let Some(v_some) = g() {
+LL | |
+LL | | v_some
+LL | | } else {
+LL | | if panic!() {};
+LL | | };
+ | |______^
+ |
+help: consider writing
+ |
+LL ~ let Some(v) = g() else {
+LL + if panic!() {};
+LL + };
+ |
+
+error: this could be rewritten as `let...else`
+ --> $DIR/manual_let_else.rs:156:5
+ |
+LL | / let v = if let Some(v_some) = g() {
+LL | |
+LL | | v_some
+LL | | } else {
+... |
+LL | | };
+LL | | };
+ | |______^
+ |
+help: consider writing
+ |
+LL ~ let Some(v) = g() else {
LL + match panic!() {
LL + _ => {},
-LL + }
+LL + };
LL + };
|
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:110:5
+ --> $DIR/manual_let_else.rs:166:5
|
LL | / let v = if let Some(v_some) = g() {
LL | |
@@ -191,7 +299,7 @@ LL + } };
|
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:120:5
+ --> $DIR/manual_let_else.rs:176:5
|
LL | / let v = if let Some(v_some) = g() {
LL | |
@@ -220,7 +328,7 @@ LL + };
|
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:138:5
+ --> $DIR/manual_let_else.rs:194:5
|
LL | / let (v, w) = if let Some(v_some) = g().map(|v| (v, 42)) {
LL | |
@@ -238,7 +346,7 @@ LL + };
|
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:146:5
+ --> $DIR/manual_let_else.rs:202:5
|
LL | / let (w, S { v }) = if let (Some(v_some), w_some) = (g().map(|_| S { v: 0 }), 0) {
LL | |
@@ -256,7 +364,7 @@ LL + };
|
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:156:13
+ --> $DIR/manual_let_else.rs:212:13
|
LL | let $n = if let Some(v) = $e { v } else { return };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider writing: `let Some($n) = g() else { return };`
@@ -267,19 +375,19 @@ LL | create_binding_if_some!(w, g());
= note: this error originates in the macro `create_binding_if_some` (in Nightly builds, run with -Z macro-backtrace for more info)
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:165:5
+ --> $DIR/manual_let_else.rs:221:5
|
LL | let v = if let Variant::A(a, 0) = e() { a } else { return };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider writing: `let Variant::A(v, 0) = e() else { return };`
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:169:5
+ --> $DIR/manual_let_else.rs:225:5
|
LL | let mut v = if let Variant::B(b) = e() { b } else { return };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider writing: `let Variant::B(mut v) = e() else { return };`
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:174:5
+ --> $DIR/manual_let_else.rs:230:5
|
LL | / let v = if let Ok(Some(Variant::B(b))) | Err(Some(Variant::A(b, _))) = nested {
LL | |
@@ -297,19 +405,19 @@ LL + };
|
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:181:5
+ --> $DIR/manual_let_else.rs:237:5
|
LL | let v = if let Variant::A(.., a) = e() { a } else { return };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider writing: `let Variant::A(.., v) = e() else { return };`
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:185:5
+ --> $DIR/manual_let_else.rs:241:5
|
LL | let w = if let (Some(v), ()) = (g(), ()) { v } else { return };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider writing: `let (Some(w), ()) = (g(), ()) else { return };`
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:189:5
+ --> $DIR/manual_let_else.rs:245:5
|
LL | / let w = if let Some(S { v: x }) = Some(S { v: 0 }) {
LL | |
@@ -327,7 +435,7 @@ LL + };
|
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:197:5
+ --> $DIR/manual_let_else.rs:253:5
|
LL | / let v = if let Some(S { v: x }) = Some(S { v: 0 }) {
LL | |
@@ -345,7 +453,7 @@ LL + };
|
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:205:5
+ --> $DIR/manual_let_else.rs:261:5
|
LL | / let (x, S { v }, w) = if let Some(U { v, w, x }) = None::<U<S<()>>> {
LL | |
@@ -363,7 +471,7 @@ LL + };
|
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:322:5
+ --> $DIR/manual_let_else.rs:378:5
|
LL | / let _ = match ff {
LL | |
@@ -372,5 +480,5 @@ LL | | _ => macro_call!(),
LL | | };
| |______^ help: consider writing: `let Some(_) = ff else { macro_call!() };`
-error: aborting due to 26 previous errors
+error: aborting due to 30 previous errors
diff --git a/src/tools/clippy/tests/ui/manual_memcpy/without_loop_counters.rs b/src/tools/clippy/tests/ui/manual_memcpy/without_loop_counters.rs
index a224001a3..8146091a2 100644
--- a/src/tools/clippy/tests/ui/manual_memcpy/without_loop_counters.rs
+++ b/src/tools/clippy/tests/ui/manual_memcpy/without_loop_counters.rs
@@ -138,6 +138,26 @@ pub fn manual_copy(src: &[i32], dst: &mut [i32], dst2: &mut [i32]) {
for i in 0..dst.len() {
dst[i] = src[i];
}
+
+ // Range is equal to array length
+ let src = [0, 1, 2, 3, 4];
+ let mut dst = [0; 4];
+ for i in 0..4 {
+ //~^ ERROR: it looks like you're manually copying between slices
+ dst[i] = src[i];
+ }
+
+ let mut dst = [0; 6];
+ for i in 0..5 {
+ //~^ ERROR: it looks like you're manually copying between slices
+ dst[i] = src[i];
+ }
+
+ let mut dst = [0; 5];
+ for i in 0..5 {
+ //~^ ERROR: it looks like you're manually copying between slices
+ dst[i] = src[i];
+ }
}
#[warn(clippy::needless_range_loop, clippy::manual_memcpy)]
diff --git a/src/tools/clippy/tests/ui/manual_memcpy/without_loop_counters.stderr b/src/tools/clippy/tests/ui/manual_memcpy/without_loop_counters.stderr
index b9dbda6ed..4b5cd274d 100644
--- a/src/tools/clippy/tests/ui/manual_memcpy/without_loop_counters.stderr
+++ b/src/tools/clippy/tests/ui/manual_memcpy/without_loop_counters.stderr
@@ -106,7 +106,7 @@ LL | / for i in 0..5 {
LL | |
LL | | dst[i - 0] = src[i];
LL | | }
- | |_____^ help: try replacing the loop by: `dst[..5].copy_from_slice(&src[..5]);`
+ | |_____^ help: try replacing the loop by: `dst[..5].copy_from_slice(&src);`
error: it looks like you're manually copying between slices
--> $DIR/without_loop_counters.rs:121:5
@@ -120,11 +120,38 @@ LL | | }
error: it looks like you're manually copying between slices
--> $DIR/without_loop_counters.rs:145:5
|
+LL | / for i in 0..4 {
+LL | |
+LL | | dst[i] = src[i];
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst.copy_from_slice(&src[..4]);`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/without_loop_counters.rs:151:5
+ |
+LL | / for i in 0..5 {
+LL | |
+LL | | dst[i] = src[i];
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst[..5].copy_from_slice(&src);`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/without_loop_counters.rs:157:5
+ |
+LL | / for i in 0..5 {
+LL | |
+LL | | dst[i] = src[i];
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst.copy_from_slice(&src);`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/without_loop_counters.rs:165:5
+ |
LL | / for i in 0..src.len() {
LL | |
LL | | dst[i] = src[i].clone();
LL | | }
| |_____^ help: try replacing the loop by: `dst[..src.len()].clone_from_slice(&src[..]);`
-error: aborting due to 13 previous errors
+error: aborting due to 16 previous errors
diff --git a/src/tools/clippy/tests/ui/manual_non_exhaustive_enum.rs b/src/tools/clippy/tests/ui/manual_non_exhaustive_enum.rs
index e32ba8631..eb3875320 100644
--- a/src/tools/clippy/tests/ui/manual_non_exhaustive_enum.rs
+++ b/src/tools/clippy/tests/ui/manual_non_exhaustive_enum.rs
@@ -26,7 +26,7 @@ enum NoDocHidden {
_C,
}
-// name of variant with doc hidden does not start with underscore, should be ignored
+// name of variant with doc hidden does not start with underscore
enum NoUnderscore {
A,
B,
diff --git a/src/tools/clippy/tests/ui/manual_non_exhaustive_enum.stderr b/src/tools/clippy/tests/ui/manual_non_exhaustive_enum.stderr
index 7361a4a2c..c4b13a577 100644
--- a/src/tools/clippy/tests/ui/manual_non_exhaustive_enum.stderr
+++ b/src/tools/clippy/tests/ui/manual_non_exhaustive_enum.stderr
@@ -22,5 +22,26 @@ LL | _C,
= note: `-D clippy::manual-non-exhaustive` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::manual_non_exhaustive)]`
-error: aborting due to previous error
+error: this seems like a manual implementation of the non-exhaustive pattern
+ --> $DIR/manual_non_exhaustive_enum.rs:30:1
+ |
+LL | enum NoUnderscore {
+ | ^----------------
+ | |
+ | _help: add the attribute: `#[non_exhaustive] enum NoUnderscore`
+ | |
+LL | | A,
+LL | | B,
+LL | | #[doc(hidden)]
+LL | | C,
+LL | | }
+ | |_^
+ |
+help: remove this variant
+ --> $DIR/manual_non_exhaustive_enum.rs:34:5
+ |
+LL | C,
+ | ^
+
+error: aborting due to 2 previous errors
diff --git a/src/tools/clippy/tests/ui/manual_non_exhaustive_struct.stderr b/src/tools/clippy/tests/ui/manual_non_exhaustive_struct.stderr
index 028b8ff76..0b88b1969 100644
--- a/src/tools/clippy/tests/ui/manual_non_exhaustive_struct.stderr
+++ b/src/tools/clippy/tests/ui/manual_non_exhaustive_struct.stderr
@@ -39,6 +39,26 @@ LL | _c: (),
| ^^^^^^
error: this seems like a manual implementation of the non-exhaustive pattern
+ --> $DIR/manual_non_exhaustive_struct.rs:29:5
+ |
+LL | struct NoUnderscore {
+ | ^------------------
+ | |
+ | _____help: add the attribute: `#[non_exhaustive] struct NoUnderscore`
+ | |
+LL | | pub a: i32,
+LL | | pub b: i32,
+LL | | c: (),
+LL | | }
+ | |_____^
+ |
+help: remove this field
+ --> $DIR/manual_non_exhaustive_struct.rs:32:9
+ |
+LL | c: (),
+ | ^^^^^
+
+error: this seems like a manual implementation of the non-exhaustive pattern
--> $DIR/manual_non_exhaustive_struct.rs:56:5
|
LL | struct T(pub i32, pub i32, ());
@@ -64,5 +84,5 @@ help: remove this field
LL | struct Tp(pub i32, pub i32, ());
| ^^
-error: aborting due to 4 previous errors
+error: aborting due to 5 previous errors
diff --git a/src/tools/clippy/tests/ui/manual_ok_or.stderr b/src/tools/clippy/tests/ui/manual_ok_or.stderr
index ddb2cf261..b277d22e5 100644
--- a/src/tools/clippy/tests/ui/manual_ok_or.stderr
+++ b/src/tools/clippy/tests/ui/manual_ok_or.stderr
@@ -13,6 +13,15 @@ error: this pattern reimplements `Option::ok_or`
LL | foo.map_or(Err("error"), Ok);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `foo.ok_or("error")`
+error: called `map_or(Err(_), Ok)` on an `Option` value
+ --> $DIR/manual_ok_or.rs:14:5
+ |
+LL | foo.map_or(Err("error"), Ok);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `ok_or` instead: `foo.ok_or("error")`
+ |
+ = note: `-D clippy::option-map-or-err-ok` implied by `-D warnings`
+ = help: to override `-D warnings` add `#[allow(clippy::option_map_or_err_ok)]`
+
error: this pattern reimplements `Option::ok_or`
--> $DIR/manual_ok_or.rs:17:5
|
@@ -38,5 +47,5 @@ LL + "{}{}{}{}{}{}{}",
LL ~ "Alice", "Bob", "Sarah", "Marc", "Sandra", "Eric", "Jenifer"));
|
-error: aborting due to 4 previous errors
+error: aborting due to 5 previous errors
diff --git a/src/tools/clippy/tests/ui/manual_try_fold.rs b/src/tools/clippy/tests/ui/manual_try_fold.rs
index bddf03ac3..7299d7cf9 100644
--- a/src/tools/clippy/tests/ui/manual_try_fold.rs
+++ b/src/tools/clippy/tests/ui/manual_try_fold.rs
@@ -96,3 +96,33 @@ fn msrv_juust_right() {
.fold(Some(0i32), |sum, i| sum?.checked_add(*i))
.unwrap();
}
+
+mod issue11876 {
+ struct Foo;
+
+ impl Bar for Foo {
+ type Output = u32;
+ }
+
+ trait Bar: Sized {
+ type Output;
+ fn fold<A, F>(self, init: A, func: F) -> Fold<Self, A, F>
+ where
+ A: Clone,
+ F: Fn(A, Self::Output) -> A,
+ {
+ Fold { this: self, init, func }
+ }
+ }
+
+ #[allow(dead_code)]
+ struct Fold<S, A, F> {
+ this: S,
+ init: A,
+ func: F,
+ }
+
+ fn main() {
+ Foo.fold(Some(0), |acc, entry| Some(acc? + entry));
+ }
+}
diff --git a/src/tools/clippy/tests/ui/map_err.stderr b/src/tools/clippy/tests/ui/map_err.stderr
index 6a845c84a..eb6742ff2 100644
--- a/src/tools/clippy/tests/ui/map_err.stderr
+++ b/src/tools/clippy/tests/ui/map_err.stderr
@@ -8,5 +8,5 @@ LL | println!("{:?}", x.map_err(|_| Errors::Ignored));
= note: `-D clippy::map-err-ignore` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::map_err_ignore)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/map_identity.fixed b/src/tools/clippy/tests/ui/map_identity.fixed
index 62b0ba018..53ebfb40b 100644
--- a/src/tools/clippy/tests/ui/map_identity.fixed
+++ b/src/tools/clippy/tests/ui/map_identity.fixed
@@ -24,28 +24,40 @@ fn main() {
fn issue7189() {
// should lint
- let x = [(1, 2), (3, 4)];
- let _ = x.iter();
- let _ = x.iter();
- let _ = x.iter();
+ let x = [(1, 2), (3, 4)].iter().copied();
+ let _ = x.clone();
+ let _ = x.clone();
+ let _ = x.clone();
- let y = [(1, 2, (3, (4,))), (5, 6, (7, (8,)))];
- let _ = y.iter();
+ let y = [(1, 2, (3, (4,))), (5, 6, (7, (8,)))].iter().copied();
+ let _ = y.clone();
// should not lint
- let _ = x.iter().map(|(x, y)| (x, y, y));
- let _ = x.iter().map(|(x, _y)| (x,));
- let _ = x.iter().map(|(x, _)| (x,));
- let _ = x.iter().map(|(x, ..)| (x,));
- let _ = y.iter().map(|(x, y, (z, _))| (x, y, (z, z)));
+ let _ = x.clone().map(|(x, y)| (x, y, y));
+ let _ = x.clone().map(|(x, _y)| (x,));
+ let _ = x.clone().map(|(x, _)| (x,));
+ let _ = x.clone().map(|(x, ..)| (x,));
+ let _ = y.clone().map(|(x, y, (z, _))| (x, y, (z, z)));
let _ = y
- .iter()
- .map(|(x, y, (z, _)): &(i32, i32, (i32, (i32,)))| (x, y, (z, z)));
+ .clone()
+ .map(|(x, y, (z, _)): (i32, i32, (i32, (i32,)))| (x, y, (z, z)));
let _ = y
- .iter()
- .map(|(x, y, (z, (w,))): &(i32, i32, (i32, (i32,)))| (x, y, (z, (w,))));
+ .clone()
+ .map(|(x, y, (z, (w,))): (i32, i32, (i32, (i32,)))| (x, y, (z, (w,))));
}
fn not_identity(x: &u16) -> u16 {
*x
}
+
+fn issue11764() {
+ let x = [(1, 2), (3, 4)];
+ // don't lint: this is an `Iterator<Item = &(i32, i32)>`
+ // match ergonomics makes the binding patterns into references
+ // so that its type changes to `Iterator<Item = (&i32, &i32)>`
+ let _ = x.iter().map(|(x, y)| (x, y));
+ let _ = x.iter().map(|x| (x.0,)).map(|(x,)| x);
+
+ // no match ergonomics for `(i32, i32)`
+ let _ = x.iter().copied();
+}
diff --git a/src/tools/clippy/tests/ui/map_identity.rs b/src/tools/clippy/tests/ui/map_identity.rs
index b7f4c99f2..c646c0568 100644
--- a/src/tools/clippy/tests/ui/map_identity.rs
+++ b/src/tools/clippy/tests/ui/map_identity.rs
@@ -26,30 +26,42 @@ fn main() {
fn issue7189() {
// should lint
- let x = [(1, 2), (3, 4)];
- let _ = x.iter().map(|(x, y)| (x, y));
- let _ = x.iter().map(|(x, y)| {
+ let x = [(1, 2), (3, 4)].iter().copied();
+ let _ = x.clone().map(|(x, y)| (x, y));
+ let _ = x.clone().map(|(x, y)| {
return (x, y);
});
- let _ = x.iter().map(|(x, y)| return (x, y));
+ let _ = x.clone().map(|(x, y)| return (x, y));
- let y = [(1, 2, (3, (4,))), (5, 6, (7, (8,)))];
- let _ = y.iter().map(|(x, y, (z, (w,)))| (x, y, (z, (w,))));
+ let y = [(1, 2, (3, (4,))), (5, 6, (7, (8,)))].iter().copied();
+ let _ = y.clone().map(|(x, y, (z, (w,)))| (x, y, (z, (w,))));
// should not lint
- let _ = x.iter().map(|(x, y)| (x, y, y));
- let _ = x.iter().map(|(x, _y)| (x,));
- let _ = x.iter().map(|(x, _)| (x,));
- let _ = x.iter().map(|(x, ..)| (x,));
- let _ = y.iter().map(|(x, y, (z, _))| (x, y, (z, z)));
+ let _ = x.clone().map(|(x, y)| (x, y, y));
+ let _ = x.clone().map(|(x, _y)| (x,));
+ let _ = x.clone().map(|(x, _)| (x,));
+ let _ = x.clone().map(|(x, ..)| (x,));
+ let _ = y.clone().map(|(x, y, (z, _))| (x, y, (z, z)));
let _ = y
- .iter()
- .map(|(x, y, (z, _)): &(i32, i32, (i32, (i32,)))| (x, y, (z, z)));
+ .clone()
+ .map(|(x, y, (z, _)): (i32, i32, (i32, (i32,)))| (x, y, (z, z)));
let _ = y
- .iter()
- .map(|(x, y, (z, (w,))): &(i32, i32, (i32, (i32,)))| (x, y, (z, (w,))));
+ .clone()
+ .map(|(x, y, (z, (w,))): (i32, i32, (i32, (i32,)))| (x, y, (z, (w,))));
}
fn not_identity(x: &u16) -> u16 {
*x
}
+
+fn issue11764() {
+ let x = [(1, 2), (3, 4)];
+ // don't lint: this is an `Iterator<Item = &(i32, i32)>`
+ // match ergonomics makes the binding patterns into references
+ // so that its type changes to `Iterator<Item = (&i32, &i32)>`
+ let _ = x.iter().map(|(x, y)| (x, y));
+ let _ = x.iter().map(|x| (x.0,)).map(|(x,)| x);
+
+ // no match ergonomics for `(i32, i32)`
+ let _ = x.iter().copied().map(|(x, y)| (x, y));
+}
diff --git a/src/tools/clippy/tests/ui/map_identity.stderr b/src/tools/clippy/tests/ui/map_identity.stderr
index 4ca24b0b0..ea077d66d 100644
--- a/src/tools/clippy/tests/ui/map_identity.stderr
+++ b/src/tools/clippy/tests/ui/map_identity.stderr
@@ -41,31 +41,37 @@ LL | let _: Result<u32, u32> = Ok(1).map_err(|a| a);
| ^^^^^^^^^^^^^^^ help: remove the call to `map_err`
error: unnecessary map of the identity function
- --> $DIR/map_identity.rs:30:21
+ --> $DIR/map_identity.rs:30:22
|
-LL | let _ = x.iter().map(|(x, y)| (x, y));
- | ^^^^^^^^^^^^^^^^^^^^^ help: remove the call to `map`
+LL | let _ = x.clone().map(|(x, y)| (x, y));
+ | ^^^^^^^^^^^^^^^^^^^^^ help: remove the call to `map`
error: unnecessary map of the identity function
- --> $DIR/map_identity.rs:31:21
+ --> $DIR/map_identity.rs:31:22
|
-LL | let _ = x.iter().map(|(x, y)| {
- | _____________________^
+LL | let _ = x.clone().map(|(x, y)| {
+ | ______________________^
LL | | return (x, y);
LL | | });
| |______^ help: remove the call to `map`
error: unnecessary map of the identity function
- --> $DIR/map_identity.rs:34:21
+ --> $DIR/map_identity.rs:34:22
|
-LL | let _ = x.iter().map(|(x, y)| return (x, y));
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove the call to `map`
+LL | let _ = x.clone().map(|(x, y)| return (x, y));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove the call to `map`
error: unnecessary map of the identity function
- --> $DIR/map_identity.rs:37:21
+ --> $DIR/map_identity.rs:37:22
|
-LL | let _ = y.iter().map(|(x, y, (z, (w,)))| (x, y, (z, (w,))));
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove the call to `map`
+LL | let _ = y.clone().map(|(x, y, (z, (w,)))| (x, y, (z, (w,))));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove the call to `map`
-error: aborting due to 10 previous errors
+error: unnecessary map of the identity function
+ --> $DIR/map_identity.rs:66:30
+ |
+LL | let _ = x.iter().copied().map(|(x, y)| (x, y));
+ | ^^^^^^^^^^^^^^^^^^^^^ help: remove the call to `map`
+
+error: aborting due to 11 previous errors
diff --git a/src/tools/clippy/tests/ui/map_unwrap_or.stderr b/src/tools/clippy/tests/ui/map_unwrap_or.stderr
index 7b7eeb322..54ddd1402 100644
--- a/src/tools/clippy/tests/ui/map_unwrap_or.stderr
+++ b/src/tools/clippy/tests/ui/map_unwrap_or.stderr
@@ -1,4 +1,4 @@
-error: called `map(<f>).unwrap_or(<a>)` on an `Option` value. This can be done more directly by calling `map_or(<a>, <f>)` instead
+error: called `map(<f>).unwrap_or(<a>)` on an `Option` value
--> $DIR/map_unwrap_or.rs:17:13
|
LL | let _ = opt.map(|x| x + 1)
@@ -15,7 +15,7 @@ LL - let _ = opt.map(|x| x + 1)
LL + let _ = opt.map_or(0, |x| x + 1);
|
-error: called `map(<f>).unwrap_or(<a>)` on an `Option` value. This can be done more directly by calling `map_or(<a>, <f>)` instead
+error: called `map(<f>).unwrap_or(<a>)` on an `Option` value
--> $DIR/map_unwrap_or.rs:21:13
|
LL | let _ = opt.map(|x| {
@@ -33,7 +33,7 @@ LL | }
LL ~ );
|
-error: called `map(<f>).unwrap_or(<a>)` on an `Option` value. This can be done more directly by calling `map_or(<a>, <f>)` instead
+error: called `map(<f>).unwrap_or(<a>)` on an `Option` value
--> $DIR/map_unwrap_or.rs:25:13
|
LL | let _ = opt.map(|x| x + 1)
@@ -50,7 +50,7 @@ LL + 0
LL ~ }, |x| x + 1);
|
-error: called `map(<f>).unwrap_or(None)` on an `Option` value. This can be done more directly by calling `and_then(<f>)` instead
+error: called `map(<f>).unwrap_or(None)` on an `Option` value
--> $DIR/map_unwrap_or.rs:30:13
|
LL | let _ = opt.map(|x| Some(x + 1)).unwrap_or(None);
@@ -62,7 +62,7 @@ LL - let _ = opt.map(|x| Some(x + 1)).unwrap_or(None);
LL + let _ = opt.and_then(|x| Some(x + 1));
|
-error: called `map(<f>).unwrap_or(None)` on an `Option` value. This can be done more directly by calling `and_then(<f>)` instead
+error: called `map(<f>).unwrap_or(None)` on an `Option` value
--> $DIR/map_unwrap_or.rs:32:13
|
LL | let _ = opt.map(|x| {
@@ -80,7 +80,7 @@ LL | }
LL ~ );
|
-error: called `map(<f>).unwrap_or(None)` on an `Option` value. This can be done more directly by calling `and_then(<f>)` instead
+error: called `map(<f>).unwrap_or(None)` on an `Option` value
--> $DIR/map_unwrap_or.rs:36:13
|
LL | let _ = opt
@@ -95,7 +95,7 @@ LL - .map(|x| Some(x + 1))
LL + .and_then(|x| Some(x + 1));
|
-error: called `map(<f>).unwrap_or(<a>)` on an `Option` value. This can be done more directly by calling `map_or(<a>, <f>)` instead
+error: called `map(<f>).unwrap_or(<a>)` on an `Option` value
--> $DIR/map_unwrap_or.rs:47:13
|
LL | let _ = Some("prefix").map(|p| format!("{}.", p)).unwrap_or(id);
@@ -107,7 +107,7 @@ LL - let _ = Some("prefix").map(|p| format!("{}.", p)).unwrap_or(id);
LL + let _ = Some("prefix").map_or(id, |p| format!("{}.", p));
|
-error: called `map(<f>).unwrap_or_else(<g>)` on an `Option` value. This can be done more directly by calling `map_or_else(<g>, <f>)` instead
+error: called `map(<f>).unwrap_or_else(<g>)` on an `Option` value
--> $DIR/map_unwrap_or.rs:51:13
|
LL | let _ = opt.map(|x| {
@@ -117,7 +117,7 @@ LL | | }
LL | | ).unwrap_or_else(|| 0);
| |__________________________^
-error: called `map(<f>).unwrap_or_else(<g>)` on an `Option` value. This can be done more directly by calling `map_or_else(<g>, <f>)` instead
+error: called `map(<f>).unwrap_or_else(<g>)` on an `Option` value
--> $DIR/map_unwrap_or.rs:55:13
|
LL | let _ = opt.map(|x| x + 1)
@@ -127,7 +127,7 @@ LL | | 0
LL | | );
| |_________^
-error: called `map(<f>).unwrap_or(false)` on an `Option` value. This can be done more directly by calling `is_some_and(<f>)` instead
+error: called `map(<f>).unwrap_or(false)` on an `Option` value
--> $DIR/map_unwrap_or.rs:61:13
|
LL | let _ = opt.map(|x| x > 5).unwrap_or(false);
@@ -139,7 +139,7 @@ LL - let _ = opt.map(|x| x > 5).unwrap_or(false);
LL + let _ = opt.is_some_and(|x| x > 5);
|
-error: called `map(<f>).unwrap_or_else(<g>)` on a `Result` value. This can be done more directly by calling `.map_or_else(<g>, <f>)` instead
+error: called `map(<f>).unwrap_or_else(<g>)` on a `Result` value
--> $DIR/map_unwrap_or.rs:71:13
|
LL | let _ = res.map(|x| {
@@ -149,7 +149,7 @@ LL | | }
LL | | ).unwrap_or_else(|_e| 0);
| |____________________________^
-error: called `map(<f>).unwrap_or_else(<g>)` on a `Result` value. This can be done more directly by calling `.map_or_else(<g>, <f>)` instead
+error: called `map(<f>).unwrap_or_else(<g>)` on a `Result` value
--> $DIR/map_unwrap_or.rs:75:13
|
LL | let _ = res.map(|x| x + 1)
@@ -159,13 +159,13 @@ LL | | 0
LL | | });
| |__________^
-error: called `map(<f>).unwrap_or_else(<g>)` on a `Result` value. This can be done more directly by calling `.map_or_else(<g>, <f>)` instead
+error: called `map(<f>).unwrap_or_else(<g>)` on a `Result` value
--> $DIR/map_unwrap_or.rs:99:13
|
LL | let _ = res.map(|x| x + 1).unwrap_or_else(|_e| 0);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `res.map_or_else(|_e| 0, |x| x + 1)`
-error: called `map(<f>).unwrap_or(<a>)` on an `Option` value. This can be done more directly by calling `map_or(<a>, <f>)` instead
+error: called `map(<f>).unwrap_or(<a>)` on an `Option` value
--> $DIR/map_unwrap_or.rs:106:13
|
LL | let _ = opt.map(|x| x > 5).unwrap_or(false);
@@ -177,7 +177,7 @@ LL - let _ = opt.map(|x| x > 5).unwrap_or(false);
LL + let _ = opt.map_or(false, |x| x > 5);
|
-error: called `map(<f>).unwrap_or(false)` on an `Option` value. This can be done more directly by calling `is_some_and(<f>)` instead
+error: called `map(<f>).unwrap_or(false)` on an `Option` value
--> $DIR/map_unwrap_or.rs:113:13
|
LL | let _ = opt.map(|x| x > 5).unwrap_or(false);
diff --git a/src/tools/clippy/tests/ui/map_unwrap_or_fixable.stderr b/src/tools/clippy/tests/ui/map_unwrap_or_fixable.stderr
index ca611ac9d..d1a9fdd6e 100644
--- a/src/tools/clippy/tests/ui/map_unwrap_or_fixable.stderr
+++ b/src/tools/clippy/tests/ui/map_unwrap_or_fixable.stderr
@@ -1,4 +1,4 @@
-error: called `map(<f>).unwrap_or_else(<g>)` on an `Option` value. This can be done more directly by calling `map_or_else(<g>, <f>)` instead
+error: called `map(<f>).unwrap_or_else(<g>)` on an `Option` value
--> $DIR/map_unwrap_or_fixable.rs:16:13
|
LL | let _ = opt.map(|x| x + 1)
@@ -10,7 +10,7 @@ LL | | .unwrap_or_else(|| 0);
= note: `-D clippy::map-unwrap-or` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::map_unwrap_or)]`
-error: called `map(<f>).unwrap_or_else(<g>)` on a `Result` value. This can be done more directly by calling `.map_or_else(<g>, <f>)` instead
+error: called `map(<f>).unwrap_or_else(<g>)` on a `Result` value
--> $DIR/map_unwrap_or_fixable.rs:46:13
|
LL | let _ = res.map(|x| x + 1)
diff --git a/src/tools/clippy/tests/ui/mem_replace_macro.stderr b/src/tools/clippy/tests/ui/mem_replace_macro.stderr
index 842ad3a85..c6435e94e 100644
--- a/src/tools/clippy/tests/ui/mem_replace_macro.stderr
+++ b/src/tools/clippy/tests/ui/mem_replace_macro.stderr
@@ -8,5 +8,5 @@ LL | inline!(std::mem::replace($s, Default::default()));
= help: to override `-D warnings` add `#[allow(clippy::mem_replace_with_default)]`
= note: this error originates in the macro `__inline_mac_fn_main` (in Nightly builds, run with -Z macro-backtrace for more info)
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/methods_fixable.stderr b/src/tools/clippy/tests/ui/methods_fixable.stderr
index 1bfe56d91..f290c20e5 100644
--- a/src/tools/clippy/tests/ui/methods_fixable.stderr
+++ b/src/tools/clippy/tests/ui/methods_fixable.stderr
@@ -7,5 +7,5 @@ LL | let _ = v.iter().filter(|&x| *x < 0).next();
= note: `-D clippy::filter-next` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::filter_next)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/methods_unfixable.stderr b/src/tools/clippy/tests/ui/methods_unfixable.stderr
index 581a985e0..771e10cbe 100644
--- a/src/tools/clippy/tests/ui/methods_unfixable.stderr
+++ b/src/tools/clippy/tests/ui/methods_unfixable.stderr
@@ -12,5 +12,5 @@ LL | let iter = (0..10);
= note: `-D clippy::filter-next` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::filter_next)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/missing_asserts_for_indexing.fixed b/src/tools/clippy/tests/ui/missing_asserts_for_indexing.fixed
index a96827259..ac44a6f3f 100644
--- a/src/tools/clippy/tests/ui/missing_asserts_for_indexing.fixed
+++ b/src/tools/clippy/tests/ui/missing_asserts_for_indexing.fixed
@@ -118,4 +118,19 @@ fn index_different_slice_in_same_expr(v1: &[u8], v2: &[u8]) {
let _ = v1[0] + v2[1];
}
+fn issue11835(v1: &[u8], v2: &[u8], v3: &[u8], v4: &[u8]) {
+ assert!(v1.len() == 3);
+ assert!(v2.len() == 4);
+ assert!(v3.len() == 3);
+ assert!(4 == v4.len());
+
+ let _ = v1[0] + v1[1] + v1[2];
+ //~^ ERROR: indexing into a slice multiple times with an `assert` that does not cover the
+ let _ = v2[0] + v2[1] + v2[2];
+
+ let _ = v3[0] + v3[1] + v3[2];
+ //~^ ERROR: indexing into a slice multiple times with an `assert` that does not cover the
+ let _ = v4[0] + v4[1] + v4[2];
+}
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/missing_asserts_for_indexing.rs b/src/tools/clippy/tests/ui/missing_asserts_for_indexing.rs
index 0b4b883ac..f05d5fea5 100644
--- a/src/tools/clippy/tests/ui/missing_asserts_for_indexing.rs
+++ b/src/tools/clippy/tests/ui/missing_asserts_for_indexing.rs
@@ -118,4 +118,19 @@ fn index_different_slice_in_same_expr(v1: &[u8], v2: &[u8]) {
let _ = v1[0] + v2[1];
}
+fn issue11835(v1: &[u8], v2: &[u8], v3: &[u8], v4: &[u8]) {
+ assert!(v1.len() == 2);
+ assert!(v2.len() == 4);
+ assert!(2 == v3.len());
+ assert!(4 == v4.len());
+
+ let _ = v1[0] + v1[1] + v1[2];
+ //~^ ERROR: indexing into a slice multiple times with an `assert` that does not cover the
+ let _ = v2[0] + v2[1] + v2[2];
+
+ let _ = v3[0] + v3[1] + v3[2];
+ //~^ ERROR: indexing into a slice multiple times with an `assert` that does not cover the
+ let _ = v4[0] + v4[1] + v4[2];
+}
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/missing_asserts_for_indexing.stderr b/src/tools/clippy/tests/ui/missing_asserts_for_indexing.stderr
index a3e66d795..61dce6ccc 100644
--- a/src/tools/clippy/tests/ui/missing_asserts_for_indexing.stderr
+++ b/src/tools/clippy/tests/ui/missing_asserts_for_indexing.stderr
@@ -249,5 +249,57 @@ LL | let _ = v1[0] + v1[12];
| ^^^^^^
= note: asserting the length before indexing will elide bounds checks
-error: aborting due to 9 previous errors
+error: indexing into a slice multiple times with an `assert` that does not cover the highest index
+ --> $DIR/missing_asserts_for_indexing.rs:127:13
+ |
+LL | assert!(v1.len() == 2);
+ | ---------------------- help: provide the highest index that is indexed with: `assert!(v1.len() == 3)`
+...
+LL | let _ = v1[0] + v1[1] + v1[2];
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+note: slice indexed here
+ --> $DIR/missing_asserts_for_indexing.rs:127:13
+ |
+LL | let _ = v1[0] + v1[1] + v1[2];
+ | ^^^^^
+note: slice indexed here
+ --> $DIR/missing_asserts_for_indexing.rs:127:21
+ |
+LL | let _ = v1[0] + v1[1] + v1[2];
+ | ^^^^^
+note: slice indexed here
+ --> $DIR/missing_asserts_for_indexing.rs:127:29
+ |
+LL | let _ = v1[0] + v1[1] + v1[2];
+ | ^^^^^
+ = note: asserting the length before indexing will elide bounds checks
+
+error: indexing into a slice multiple times with an `assert` that does not cover the highest index
+ --> $DIR/missing_asserts_for_indexing.rs:131:13
+ |
+LL | assert!(2 == v3.len());
+ | ---------------------- help: provide the highest index that is indexed with: `assert!(v3.len() == 3)`
+...
+LL | let _ = v3[0] + v3[1] + v3[2];
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+note: slice indexed here
+ --> $DIR/missing_asserts_for_indexing.rs:131:13
+ |
+LL | let _ = v3[0] + v3[1] + v3[2];
+ | ^^^^^
+note: slice indexed here
+ --> $DIR/missing_asserts_for_indexing.rs:131:21
+ |
+LL | let _ = v3[0] + v3[1] + v3[2];
+ | ^^^^^
+note: slice indexed here
+ --> $DIR/missing_asserts_for_indexing.rs:131:29
+ |
+LL | let _ = v3[0] + v3[1] + v3[2];
+ | ^^^^^
+ = note: asserting the length before indexing will elide bounds checks
+
+error: aborting due to 11 previous errors
diff --git a/src/tools/clippy/tests/ui/missing_asserts_for_indexing_unfixable.rs b/src/tools/clippy/tests/ui/missing_asserts_for_indexing_unfixable.rs
index 4346ed892..de53079a4 100644
--- a/src/tools/clippy/tests/ui/missing_asserts_for_indexing_unfixable.rs
+++ b/src/tools/clippy/tests/ui/missing_asserts_for_indexing_unfixable.rs
@@ -46,4 +46,26 @@ fn index_struct_different_fields(f: &Foo<'_>) {
let _ = f.v[0] + f.v2[1];
}
+fn shadowing() {
+ let x: &[i32] = &[1];
+ assert!(x.len() > 1);
+
+ let x: &[i32] = &[1];
+ let _ = x[0] + x[1];
+ //~^ ERROR: indexing into a slice multiple times without an `assert`
+}
+
+pub fn issue11856(values: &[i32]) -> usize {
+ let mut ascending = Vec::new();
+ for w in values.windows(2) {
+ assert!(w.len() > 1);
+ if w[0] < w[1] {
+ ascending.push((w[0], w[1]));
+ } else {
+ ascending.push((w[1], w[0]));
+ }
+ }
+ ascending.len()
+}
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/missing_asserts_for_indexing_unfixable.stderr b/src/tools/clippy/tests/ui/missing_asserts_for_indexing_unfixable.stderr
index 12c9eed5d..12e054227 100644
--- a/src/tools/clippy/tests/ui/missing_asserts_for_indexing_unfixable.stderr
+++ b/src/tools/clippy/tests/ui/missing_asserts_for_indexing_unfixable.stderr
@@ -160,5 +160,24 @@ LL | let _ = f.v[0] + f.v[1];
| ^^^^^^
= note: asserting the length before indexing will elide bounds checks
-error: aborting due to 7 previous errors
+error: indexing into a slice multiple times without an `assert`
+ --> $DIR/missing_asserts_for_indexing_unfixable.rs:54:13
+ |
+LL | let _ = x[0] + x[1];
+ | ^^^^^^^^^^^
+ |
+ = help: consider asserting the length before indexing: `assert!(x.len() > 1);`
+note: slice indexed here
+ --> $DIR/missing_asserts_for_indexing_unfixable.rs:54:13
+ |
+LL | let _ = x[0] + x[1];
+ | ^^^^
+note: slice indexed here
+ --> $DIR/missing_asserts_for_indexing_unfixable.rs:54:20
+ |
+LL | let _ = x[0] + x[1];
+ | ^^^^
+ = note: asserting the length before indexing will elide bounds checks
+
+error: aborting due to 8 previous errors
diff --git a/src/tools/clippy/tests/ui/missing_doc_crate_missing.stderr b/src/tools/clippy/tests/ui/missing_doc_crate_missing.stderr
index c684bc8e7..3aa9781c2 100644
--- a/src/tools/clippy/tests/ui/missing_doc_crate_missing.stderr
+++ b/src/tools/clippy/tests/ui/missing_doc_crate_missing.stderr
@@ -11,5 +11,5 @@ LL | | fn main() {}
= note: `-D clippy::missing-docs-in-private-items` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::missing_docs_in_private_items)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/missing_spin_loop_no_std.stderr b/src/tools/clippy/tests/ui/missing_spin_loop_no_std.stderr
index 0b7be4616..d84d06088 100644
--- a/src/tools/clippy/tests/ui/missing_spin_loop_no_std.stderr
+++ b/src/tools/clippy/tests/ui/missing_spin_loop_no_std.stderr
@@ -7,5 +7,5 @@ LL | while b.load(Ordering::Acquire) {}
= note: `-D clippy::missing-spin-loop` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::missing_spin_loop)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/mut_mutex_lock.stderr b/src/tools/clippy/tests/ui/mut_mutex_lock.stderr
index 9b20016be..819602882 100644
--- a/src/tools/clippy/tests/ui/mut_mutex_lock.stderr
+++ b/src/tools/clippy/tests/ui/mut_mutex_lock.stderr
@@ -7,5 +7,5 @@ LL | let mut value = value_mutex.lock().unwrap();
= note: `-D clippy::mut-mutex-lock` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::mut_mutex_lock)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/needless_arbitrary_self_type_unfixable.stderr b/src/tools/clippy/tests/ui/needless_arbitrary_self_type_unfixable.stderr
index 183e2dbc8..e91359a3c 100644
--- a/src/tools/clippy/tests/ui/needless_arbitrary_self_type_unfixable.stderr
+++ b/src/tools/clippy/tests/ui/needless_arbitrary_self_type_unfixable.stderr
@@ -7,5 +7,5 @@ LL | fn call_with_mut_self(self: &mut Self) {}
= note: `-D clippy::needless-arbitrary-self-type` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::needless_arbitrary_self_type)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/needless_bitwise_bool.stderr b/src/tools/clippy/tests/ui/needless_bitwise_bool.stderr
index 2ed9208e6..b1fc1a7a9 100644
--- a/src/tools/clippy/tests/ui/needless_bitwise_bool.stderr
+++ b/src/tools/clippy/tests/ui/needless_bitwise_bool.stderr
@@ -7,5 +7,5 @@ LL | if y & !x {
= note: `-D clippy::needless-bitwise-bool` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::needless_bitwise_bool)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/needless_bool_assign.stderr b/src/tools/clippy/tests/ui/needless_bool_assign.stderr
index 7866c89bd..244a88e66 100644
--- a/src/tools/clippy/tests/ui/needless_bool_assign.stderr
+++ b/src/tools/clippy/tests/ui/needless_bool_assign.stderr
@@ -48,7 +48,8 @@ LL | } else {
LL | | a.field = true;
LL | | }
| |_____^
- = note: `#[deny(clippy::if_same_then_else)]` on by default
+ = note: `-D clippy::if-same-then-else` implied by `-D warnings`
+ = help: to override `-D warnings` add `#[allow(clippy::if_same_then_else)]`
error: aborting due to 4 previous errors
diff --git a/src/tools/clippy/tests/ui/needless_borrow.fixed b/src/tools/clippy/tests/ui/needless_borrow.fixed
index c2c5f765a..ff1e2dc88 100644
--- a/src/tools/clippy/tests/ui/needless_borrow.fixed
+++ b/src/tools/clippy/tests/ui/needless_borrow.fixed
@@ -190,27 +190,48 @@ fn issue9383() {
// Should not lint because unions need explicit deref when accessing field
use std::mem::ManuallyDrop;
- union Coral {
- crab: ManuallyDrop<Vec<i32>>,
+ #[derive(Clone, Copy)]
+ struct Wrap<T>(T);
+ impl<T> core::ops::Deref for Wrap<T> {
+ type Target = T;
+ fn deref(&self) -> &T {
+ &self.0
+ }
+ }
+ impl<T> core::ops::DerefMut for Wrap<T> {
+ fn deref_mut(&mut self) -> &mut T {
+ &mut self.0
+ }
}
- union Ocean {
- coral: ManuallyDrop<Coral>,
+ union U<T: Copy> {
+ u: T,
}
- let mut ocean = Ocean {
- coral: ManuallyDrop::new(Coral {
- crab: ManuallyDrop::new(vec![1, 2, 3]),
- }),
- };
+ #[derive(Clone, Copy)]
+ struct Foo {
+ x: u32,
+ }
unsafe {
- ManuallyDrop::drop(&mut (&mut ocean.coral).crab);
-
- (*ocean.coral).crab = ManuallyDrop::new(vec![4, 5, 6]);
- ManuallyDrop::drop(&mut (*ocean.coral).crab);
-
- ManuallyDrop::drop(&mut ocean.coral);
+ let mut x = U {
+ u: ManuallyDrop::new(Foo { x: 0 }),
+ };
+ let _ = &mut (&mut x.u).x;
+ let _ = &mut { x.u }.x;
+ let _ = &mut ({ &mut x.u }).x;
+
+ let mut x = U {
+ u: Wrap(ManuallyDrop::new(Foo { x: 0 })),
+ };
+ let _ = &mut (&mut x.u).x;
+ let _ = &mut { x.u }.x;
+ let _ = &mut ({ &mut x.u }).x;
+
+ let mut x = U { u: Wrap(Foo { x: 0 }) };
+ let _ = &mut x.u.x;
+ let _ = &mut { x.u }.x;
+ let _ = &mut ({ &mut x.u }).x;
}
}
diff --git a/src/tools/clippy/tests/ui/needless_borrow.rs b/src/tools/clippy/tests/ui/needless_borrow.rs
index 0cd6e41b8..597021539 100644
--- a/src/tools/clippy/tests/ui/needless_borrow.rs
+++ b/src/tools/clippy/tests/ui/needless_borrow.rs
@@ -190,27 +190,48 @@ fn issue9383() {
// Should not lint because unions need explicit deref when accessing field
use std::mem::ManuallyDrop;
- union Coral {
- crab: ManuallyDrop<Vec<i32>>,
+ #[derive(Clone, Copy)]
+ struct Wrap<T>(T);
+ impl<T> core::ops::Deref for Wrap<T> {
+ type Target = T;
+ fn deref(&self) -> &T {
+ &self.0
+ }
+ }
+ impl<T> core::ops::DerefMut for Wrap<T> {
+ fn deref_mut(&mut self) -> &mut T {
+ &mut self.0
+ }
}
- union Ocean {
- coral: ManuallyDrop<Coral>,
+ union U<T: Copy> {
+ u: T,
}
- let mut ocean = Ocean {
- coral: ManuallyDrop::new(Coral {
- crab: ManuallyDrop::new(vec![1, 2, 3]),
- }),
- };
+ #[derive(Clone, Copy)]
+ struct Foo {
+ x: u32,
+ }
unsafe {
- ManuallyDrop::drop(&mut (&mut ocean.coral).crab);
-
- (*ocean.coral).crab = ManuallyDrop::new(vec![4, 5, 6]);
- ManuallyDrop::drop(&mut (*ocean.coral).crab);
-
- ManuallyDrop::drop(&mut ocean.coral);
+ let mut x = U {
+ u: ManuallyDrop::new(Foo { x: 0 }),
+ };
+ let _ = &mut (&mut x.u).x;
+ let _ = &mut (&mut { x.u }).x;
+ let _ = &mut ({ &mut x.u }).x;
+
+ let mut x = U {
+ u: Wrap(ManuallyDrop::new(Foo { x: 0 })),
+ };
+ let _ = &mut (&mut x.u).x;
+ let _ = &mut (&mut { x.u }).x;
+ let _ = &mut ({ &mut x.u }).x;
+
+ let mut x = U { u: Wrap(Foo { x: 0 }) };
+ let _ = &mut (&mut x.u).x;
+ let _ = &mut (&mut { x.u }).x;
+ let _ = &mut ({ &mut x.u }).x;
}
}
diff --git a/src/tools/clippy/tests/ui/needless_borrow.stderr b/src/tools/clippy/tests/ui/needless_borrow.stderr
index e91b78b0a..44552ee6a 100644
--- a/src/tools/clippy/tests/ui/needless_borrow.stderr
+++ b/src/tools/clippy/tests/ui/needless_borrow.stderr
@@ -133,5 +133,29 @@ error: this expression borrows a value the compiler would automatically borrow
LL | (&mut self.f)()
| ^^^^^^^^^^^^^ help: change this to: `(self.f)`
-error: aborting due to 22 previous errors
+error: this expression borrows a value the compiler would automatically borrow
+ --> $DIR/needless_borrow.rs:221:22
+ |
+LL | let _ = &mut (&mut { x.u }).x;
+ | ^^^^^^^^^^^^^^ help: change this to: `{ x.u }`
+
+error: this expression borrows a value the compiler would automatically borrow
+ --> $DIR/needless_borrow.rs:228:22
+ |
+LL | let _ = &mut (&mut { x.u }).x;
+ | ^^^^^^^^^^^^^^ help: change this to: `{ x.u }`
+
+error: this expression borrows a value the compiler would automatically borrow
+ --> $DIR/needless_borrow.rs:232:22
+ |
+LL | let _ = &mut (&mut x.u).x;
+ | ^^^^^^^^^^ help: change this to: `x.u`
+
+error: this expression borrows a value the compiler would automatically borrow
+ --> $DIR/needless_borrow.rs:233:22
+ |
+LL | let _ = &mut (&mut { x.u }).x;
+ | ^^^^^^^^^^^^^^ help: change this to: `{ x.u }`
+
+error: aborting due to 26 previous errors
diff --git a/src/tools/clippy/tests/ui/needless_borrows_for_generic_args.fixed b/src/tools/clippy/tests/ui/needless_borrows_for_generic_args.fixed
index 2a335516f..bd7a9a0b9 100644
--- a/src/tools/clippy/tests/ui/needless_borrows_for_generic_args.fixed
+++ b/src/tools/clippy/tests/ui/needless_borrows_for_generic_args.fixed
@@ -284,4 +284,19 @@ fn main() {
{
}
}
+ // address of field when operand impl Drop
+ {
+ struct CustomDrop(String);
+
+ impl Drop for CustomDrop {
+ fn drop(&mut self) {}
+ }
+
+ fn check_str<P: AsRef<str>>(_to: P) {}
+
+ fn test() {
+ let owner = CustomDrop(String::default());
+ check_str(&owner.0); // Don't lint. `owner` can't be partially moved because it impl Drop
+ }
+ }
}
diff --git a/src/tools/clippy/tests/ui/needless_borrows_for_generic_args.rs b/src/tools/clippy/tests/ui/needless_borrows_for_generic_args.rs
index f0567f486..5cfd4ce30 100644
--- a/src/tools/clippy/tests/ui/needless_borrows_for_generic_args.rs
+++ b/src/tools/clippy/tests/ui/needless_borrows_for_generic_args.rs
@@ -284,4 +284,19 @@ fn main() {
{
}
}
+ // address of field when operand impl Drop
+ {
+ struct CustomDrop(String);
+
+ impl Drop for CustomDrop {
+ fn drop(&mut self) {}
+ }
+
+ fn check_str<P: AsRef<str>>(_to: P) {}
+
+ fn test() {
+ let owner = CustomDrop(String::default());
+ check_str(&owner.0); // Don't lint. `owner` can't be partially moved because it impl Drop
+ }
+ }
}
diff --git a/src/tools/clippy/tests/ui/needless_else.stderr b/src/tools/clippy/tests/ui/needless_else.stderr
index e6f7138e9..66552109c 100644
--- a/src/tools/clippy/tests/ui/needless_else.stderr
+++ b/src/tools/clippy/tests/ui/needless_else.stderr
@@ -9,5 +9,5 @@ LL | | }
= note: `-D clippy::needless-else` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::needless_else)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/needless_for_each_unfixable.stderr b/src/tools/clippy/tests/ui/needless_for_each_unfixable.stderr
index 73f249ae6..24a22e232 100644
--- a/src/tools/clippy/tests/ui/needless_for_each_unfixable.stderr
+++ b/src/tools/clippy/tests/ui/needless_for_each_unfixable.stderr
@@ -29,5 +29,5 @@ help: ...and replace `return` with `continue`
LL | continue;
| ~~~~~~~~
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/needless_if.fixed b/src/tools/clippy/tests/ui/needless_if.fixed
index be35dcddb..1086ae2c9 100644
--- a/src/tools/clippy/tests/ui/needless_if.fixed
+++ b/src/tools/clippy/tests/ui/needless_if.fixed
@@ -1,7 +1,7 @@
//@aux-build:proc_macros.rs
#![feature(let_chains)]
#![allow(
- clippy::blocks_in_if_conditions,
+ clippy::blocks_in_conditions,
clippy::if_same_then_else,
clippy::ifs_same_cond,
clippy::let_unit_value,
diff --git a/src/tools/clippy/tests/ui/needless_if.rs b/src/tools/clippy/tests/ui/needless_if.rs
index e2ad17e69..131cceaf7 100644
--- a/src/tools/clippy/tests/ui/needless_if.rs
+++ b/src/tools/clippy/tests/ui/needless_if.rs
@@ -1,7 +1,7 @@
//@aux-build:proc_macros.rs
#![feature(let_chains)]
#![allow(
- clippy::blocks_in_if_conditions,
+ clippy::blocks_in_conditions,
clippy::if_same_then_else,
clippy::ifs_same_cond,
clippy::let_unit_value,
diff --git a/src/tools/clippy/tests/ui/needless_late_init.fixed b/src/tools/clippy/tests/ui/needless_late_init.fixed
index 891b2b014..6db870490 100644
--- a/src/tools/clippy/tests/ui/needless_late_init.fixed
+++ b/src/tools/clippy/tests/ui/needless_late_init.fixed
@@ -3,7 +3,7 @@
#![allow(unused)]
#![allow(
clippy::assign_op_pattern,
- clippy::blocks_in_if_conditions,
+ clippy::blocks_in_conditions,
clippy::let_and_return,
clippy::let_unit_value,
clippy::nonminimal_bool,
diff --git a/src/tools/clippy/tests/ui/needless_late_init.rs b/src/tools/clippy/tests/ui/needless_late_init.rs
index 553995116..c1e86212a 100644
--- a/src/tools/clippy/tests/ui/needless_late_init.rs
+++ b/src/tools/clippy/tests/ui/needless_late_init.rs
@@ -3,7 +3,7 @@
#![allow(unused)]
#![allow(
clippy::assign_op_pattern,
- clippy::blocks_in_if_conditions,
+ clippy::blocks_in_conditions,
clippy::let_and_return,
clippy::let_unit_value,
clippy::nonminimal_bool,
diff --git a/src/tools/clippy/tests/ui/needless_option_take.stderr b/src/tools/clippy/tests/ui/needless_option_take.stderr
index d3c22441d..bf43a18e7 100644
--- a/src/tools/clippy/tests/ui/needless_option_take.stderr
+++ b/src/tools/clippy/tests/ui/needless_option_take.stderr
@@ -7,5 +7,5 @@ LL | x.as_ref().take();
= note: `-D clippy::needless-option-take` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::needless_option_take)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.rs b/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.rs
index bdb6d40d9..a92197fb0 100644
--- a/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.rs
+++ b/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.rs
@@ -1,4 +1,9 @@
-#![allow(clippy::if_same_then_else, clippy::no_effect, clippy::redundant_closure_call)]
+#![allow(
+ clippy::if_same_then_else,
+ clippy::no_effect,
+ clippy::redundant_closure_call,
+ clippy::ptr_arg
+)]
#![warn(clippy::needless_pass_by_ref_mut)]
#![feature(lint_reasons)]
//@no-rustfix
diff --git a/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.stderr b/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.stderr
index 3e1415be0..5d1e9515d 100644
--- a/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.stderr
+++ b/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.stderr
@@ -1,5 +1,5 @@
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:7:11
+ --> $DIR/needless_pass_by_ref_mut.rs:12:11
|
LL | fn foo(s: &mut Vec<u32>, b: &u32, x: &mut u32) {
| ^^^^^^^^^^^^^ help: consider changing to: `&Vec<u32>`
@@ -8,79 +8,79 @@ LL | fn foo(s: &mut Vec<u32>, b: &u32, x: &mut u32) {
= help: to override `-D warnings` add `#[allow(clippy::needless_pass_by_ref_mut)]`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:32:12
+ --> $DIR/needless_pass_by_ref_mut.rs:37:12
|
LL | fn foo6(s: &mut Vec<u32>) {
| ^^^^^^^^^^^^^ help: consider changing to: `&Vec<u32>`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:45:29
+ --> $DIR/needless_pass_by_ref_mut.rs:50:29
|
LL | fn mushroom(&self, vec: &mut Vec<i32>) -> usize {
| ^^^^^^^^^^^^^ help: consider changing to: `&Vec<i32>`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:50:31
+ --> $DIR/needless_pass_by_ref_mut.rs:55:31
|
LL | fn badger(&mut self, vec: &mut Vec<i32>) -> usize {
| ^^^^^^^^^^^^^ help: consider changing to: `&Vec<i32>`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:127:16
+ --> $DIR/needless_pass_by_ref_mut.rs:132:16
|
LL | async fn a1(x: &mut i32) {
| ^^^^^^^^ help: consider changing to: `&i32`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:131:16
+ --> $DIR/needless_pass_by_ref_mut.rs:136:16
|
LL | async fn a2(x: &mut i32, y: String) {
| ^^^^^^^^ help: consider changing to: `&i32`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:135:16
+ --> $DIR/needless_pass_by_ref_mut.rs:140:16
|
LL | async fn a3(x: &mut i32, y: String, z: String) {
| ^^^^^^^^ help: consider changing to: `&i32`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:139:16
+ --> $DIR/needless_pass_by_ref_mut.rs:144:16
|
LL | async fn a4(x: &mut i32, y: i32) {
| ^^^^^^^^ help: consider changing to: `&i32`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:143:24
+ --> $DIR/needless_pass_by_ref_mut.rs:148:24
|
LL | async fn a5(x: i32, y: &mut i32) {
| ^^^^^^^^ help: consider changing to: `&i32`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:147:24
+ --> $DIR/needless_pass_by_ref_mut.rs:152:24
|
LL | async fn a6(x: i32, y: &mut i32) {
| ^^^^^^^^ help: consider changing to: `&i32`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:151:32
+ --> $DIR/needless_pass_by_ref_mut.rs:156:32
|
LL | async fn a7(x: i32, y: i32, z: &mut i32) {
| ^^^^^^^^ help: consider changing to: `&i32`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:155:24
+ --> $DIR/needless_pass_by_ref_mut.rs:160:24
|
LL | async fn a8(x: i32, a: &mut i32, y: i32, z: &mut i32) {
| ^^^^^^^^ help: consider changing to: `&i32`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:155:45
+ --> $DIR/needless_pass_by_ref_mut.rs:160:45
|
LL | async fn a8(x: i32, a: &mut i32, y: i32, z: &mut i32) {
| ^^^^^^^^ help: consider changing to: `&i32`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:189:16
+ --> $DIR/needless_pass_by_ref_mut.rs:194:16
|
LL | fn cfg_warn(s: &mut u32) {}
| ^^^^^^^^ help: consider changing to: `&u32`
@@ -88,7 +88,7 @@ LL | fn cfg_warn(s: &mut u32) {}
= note: this is cfg-gated and may require further changes
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:195:20
+ --> $DIR/needless_pass_by_ref_mut.rs:200:20
|
LL | fn cfg_warn(s: &mut u32) {}
| ^^^^^^^^ help: consider changing to: `&u32`
@@ -96,19 +96,19 @@ LL | fn cfg_warn(s: &mut u32) {}
= note: this is cfg-gated and may require further changes
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:209:39
+ --> $DIR/needless_pass_by_ref_mut.rs:214:39
|
LL | async fn inner_async2(x: &mut i32, y: &mut u32) {
| ^^^^^^^^ help: consider changing to: `&u32`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:217:26
+ --> $DIR/needless_pass_by_ref_mut.rs:222:26
|
LL | async fn inner_async3(x: &mut i32, y: &mut u32) {
| ^^^^^^^^ help: consider changing to: `&i32`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:236:34
+ --> $DIR/needless_pass_by_ref_mut.rs:241:34
|
LL | pub async fn call_in_closure1(n: &mut str) {
| ^^^^^^^^ help: consider changing to: `&str`
@@ -116,7 +116,7 @@ LL | pub async fn call_in_closure1(n: &mut str) {
= warning: changing this function will impact semver compatibility
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:248:25
+ --> $DIR/needless_pass_by_ref_mut.rs:253:25
|
LL | pub async fn closure(n: &mut usize) -> impl '_ + FnMut() {
| ^^^^^^^^^^ help: consider changing to: `&usize`
@@ -124,7 +124,7 @@ LL | pub async fn closure(n: &mut usize) -> impl '_ + FnMut() {
= warning: changing this function will impact semver compatibility
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:255:20
+ --> $DIR/needless_pass_by_ref_mut.rs:260:20
|
LL | pub fn closure2(n: &mut usize) -> impl '_ + FnMut() -> usize {
| ^^^^^^^^^^ help: consider changing to: `&usize`
@@ -132,7 +132,7 @@ LL | pub fn closure2(n: &mut usize) -> impl '_ + FnMut() -> usize {
= warning: changing this function will impact semver compatibility
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:266:26
+ --> $DIR/needless_pass_by_ref_mut.rs:271:26
|
LL | pub async fn closure4(n: &mut usize) {
| ^^^^^^^^^^ help: consider changing to: `&usize`
@@ -140,61 +140,61 @@ LL | pub async fn closure4(n: &mut usize) {
= warning: changing this function will impact semver compatibility
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:306:18
+ --> $DIR/needless_pass_by_ref_mut.rs:311:18
|
LL | fn _empty_tup(x: &mut (())) {}
| ^^^^^^^^^ help: consider changing to: `&()`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:307:19
+ --> $DIR/needless_pass_by_ref_mut.rs:312:19
|
LL | fn _single_tup(x: &mut ((i32,))) {}
| ^^^^^^^^^^^^^ help: consider changing to: `&(i32,)`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:308:18
+ --> $DIR/needless_pass_by_ref_mut.rs:313:18
|
LL | fn _multi_tup(x: &mut ((i32, u32))) {}
| ^^^^^^^^^^^^^^^^^ help: consider changing to: `&(i32, u32)`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:309:11
+ --> $DIR/needless_pass_by_ref_mut.rs:314:11
|
LL | fn _fn(x: &mut (fn())) {}
| ^^^^^^^^^^^ help: consider changing to: `&fn()`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:311:23
+ --> $DIR/needless_pass_by_ref_mut.rs:316:23
|
LL | fn _extern_rust_fn(x: &mut extern "Rust" fn()) {}
| ^^^^^^^^^^^^^^^^^^^^^^^ help: consider changing to: `&extern "Rust" fn()`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:312:20
+ --> $DIR/needless_pass_by_ref_mut.rs:317:20
|
LL | fn _extern_c_fn(x: &mut extern "C" fn()) {}
| ^^^^^^^^^^^^^^^^^^^^ help: consider changing to: `&extern "C" fn()`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:313:18
+ --> $DIR/needless_pass_by_ref_mut.rs:318:18
|
LL | fn _unsafe_fn(x: &mut unsafe fn()) {}
| ^^^^^^^^^^^^^^^^ help: consider changing to: `&unsafe fn()`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:314:25
+ --> $DIR/needless_pass_by_ref_mut.rs:319:25
|
LL | fn _unsafe_extern_fn(x: &mut unsafe extern "C" fn()) {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider changing to: `&unsafe extern "C" fn()`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:315:20
+ --> $DIR/needless_pass_by_ref_mut.rs:320:20
|
LL | fn _fn_with_arg(x: &mut unsafe extern "C" fn(i32)) {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider changing to: `&unsafe extern "C" fn(i32)`
error: this argument is a mutable reference, but not used mutably
- --> $DIR/needless_pass_by_ref_mut.rs:316:20
+ --> $DIR/needless_pass_by_ref_mut.rs:321:20
|
LL | fn _fn_with_ret(x: &mut unsafe extern "C" fn() -> (i32)) {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider changing to: `&unsafe extern "C" fn() -> (i32)`
diff --git a/src/tools/clippy/tests/ui/needless_return_with_question_mark.fixed b/src/tools/clippy/tests/ui/needless_return_with_question_mark.fixed
index 52d541809..0147c73a9 100644
--- a/src/tools/clippy/tests/ui/needless_return_with_question_mark.fixed
+++ b/src/tools/clippy/tests/ui/needless_return_with_question_mark.fixed
@@ -4,6 +4,8 @@
clippy::no_effect,
clippy::unit_arg,
clippy::useless_conversion,
+ clippy::diverging_sub_expression,
+ clippy::let_unit_value,
unused
)]
@@ -35,5 +37,43 @@ fn main() -> Result<(), ()> {
with_span! {
return Err(())?;
}
+
+ // Issue #11765
+ // Should not lint
+ let Some(s) = Some("") else {
+ return Err(())?;
+ };
+
+ let Some(s) = Some("") else {
+ let Some(s) = Some("") else {
+ return Err(())?;
+ };
+
+ return Err(())?;
+ };
+
+ let Some(_): Option<()> = ({
+ return Err(())?;
+ }) else {
+ panic!();
+ };
+
Err(())
}
+
+fn issue11616() -> Result<(), ()> {
+ let _x: String = {
+ return Err(())?;
+ };
+ let _x: () = {
+ Err(())?;
+ //~^ ERROR: unneeded `return` statement with `?` operator
+ };
+ let _x = match 1 {
+ 1 => vec![1, 2],
+ _ => {
+ return Err(())?;
+ },
+ };
+ Ok(())
+}
diff --git a/src/tools/clippy/tests/ui/needless_return_with_question_mark.rs b/src/tools/clippy/tests/ui/needless_return_with_question_mark.rs
index d253cae4d..66e1f438f 100644
--- a/src/tools/clippy/tests/ui/needless_return_with_question_mark.rs
+++ b/src/tools/clippy/tests/ui/needless_return_with_question_mark.rs
@@ -4,6 +4,8 @@
clippy::no_effect,
clippy::unit_arg,
clippy::useless_conversion,
+ clippy::diverging_sub_expression,
+ clippy::let_unit_value,
unused
)]
@@ -35,5 +37,43 @@ fn main() -> Result<(), ()> {
with_span! {
return Err(())?;
}
+
+ // Issue #11765
+ // Should not lint
+ let Some(s) = Some("") else {
+ return Err(())?;
+ };
+
+ let Some(s) = Some("") else {
+ let Some(s) = Some("") else {
+ return Err(())?;
+ };
+
+ return Err(())?;
+ };
+
+ let Some(_): Option<()> = ({
+ return Err(())?;
+ }) else {
+ panic!();
+ };
+
Err(())
}
+
+fn issue11616() -> Result<(), ()> {
+ let _x: String = {
+ return Err(())?;
+ };
+ let _x: () = {
+ return Err(())?;
+ //~^ ERROR: unneeded `return` statement with `?` operator
+ };
+ let _x = match 1 {
+ 1 => vec![1, 2],
+ _ => {
+ return Err(())?;
+ },
+ };
+ Ok(())
+}
diff --git a/src/tools/clippy/tests/ui/needless_return_with_question_mark.stderr b/src/tools/clippy/tests/ui/needless_return_with_question_mark.stderr
index 0de063380..17aa212ae 100644
--- a/src/tools/clippy/tests/ui/needless_return_with_question_mark.stderr
+++ b/src/tools/clippy/tests/ui/needless_return_with_question_mark.stderr
@@ -1,5 +1,5 @@
error: unneeded `return` statement with `?` operator
- --> $DIR/needless_return_with_question_mark.rs:27:5
+ --> $DIR/needless_return_with_question_mark.rs:29:5
|
LL | return Err(())?;
| ^^^^^^^ help: remove it
@@ -7,5 +7,11 @@ LL | return Err(())?;
= note: `-D clippy::needless-return-with-question-mark` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::needless_return_with_question_mark)]`
-error: aborting due to previous error
+error: unneeded `return` statement with `?` operator
+ --> $DIR/needless_return_with_question_mark.rs:69:9
+ |
+LL | return Err(())?;
+ | ^^^^^^^ help: remove it
+
+error: aborting due to 2 previous errors
diff --git a/src/tools/clippy/tests/ui/needless_update.stderr b/src/tools/clippy/tests/ui/needless_update.stderr
index 3e9e2941a..60aeb0493 100644
--- a/src/tools/clippy/tests/ui/needless_update.stderr
+++ b/src/tools/clippy/tests/ui/needless_update.stderr
@@ -7,5 +7,5 @@ LL | S { a: 1, b: 1, ..base };
= note: `-D clippy::needless-update` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::needless_update)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/new_ret_no_self_overflow.stderr b/src/tools/clippy/tests/ui/new_ret_no_self_overflow.stderr
index babb634fd..c0d6a74a5 100644
--- a/src/tools/clippy/tests/ui/new_ret_no_self_overflow.stderr
+++ b/src/tools/clippy/tests/ui/new_ret_no_self_overflow.stderr
@@ -4,6 +4,6 @@ error[E0275]: overflow evaluating the requirement `<i32 as std::ops::Add>::Outpu
LL | pub fn new() -> X {
| ^
-error: aborting due to previous error
+error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0275`.
diff --git a/src/tools/clippy/tests/ui/no_effect.rs b/src/tools/clippy/tests/ui/no_effect.rs
index c52f43891..777b1e52c 100644
--- a/src/tools/clippy/tests/ui/no_effect.rs
+++ b/src/tools/clippy/tests/ui/no_effect.rs
@@ -9,6 +9,30 @@
clippy::useless_vec
)]
+use std::fmt::Display;
+use std::ops::{Neg, Shl};
+
+struct Cout;
+
+impl<T> Shl<T> for Cout
+where
+ T: Display,
+{
+ type Output = Self;
+ fn shl(self, rhs: T) -> Self::Output {
+ println!("{}", rhs);
+ self
+ }
+}
+
+impl Neg for Cout {
+ type Output = Self;
+ fn neg(self) -> Self::Output {
+ println!("hello world");
+ self
+ }
+}
+
struct Unit;
struct Tuple(i32);
struct Struct {
@@ -174,4 +198,11 @@ fn main() {
GreetStruct1("world");
GreetStruct2()("world");
GreetStruct3 {}("world");
+
+ fn n() -> i32 {
+ 42
+ }
+
+ Cout << 142;
+ -Cout;
}
diff --git a/src/tools/clippy/tests/ui/no_effect.stderr b/src/tools/clippy/tests/ui/no_effect.stderr
index feba35697..f5ba234b4 100644
--- a/src/tools/clippy/tests/ui/no_effect.stderr
+++ b/src/tools/clippy/tests/ui/no_effect.stderr
@@ -1,5 +1,5 @@
error: statement with no effect
- --> $DIR/no_effect.rs:98:5
+ --> $DIR/no_effect.rs:122:5
|
LL | 0;
| ^^
@@ -8,151 +8,151 @@ LL | 0;
= help: to override `-D warnings` add `#[allow(clippy::no_effect)]`
error: statement with no effect
- --> $DIR/no_effect.rs:101:5
+ --> $DIR/no_effect.rs:125:5
|
LL | s2;
| ^^^
error: statement with no effect
- --> $DIR/no_effect.rs:103:5
+ --> $DIR/no_effect.rs:127:5
|
LL | Unit;
| ^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:105:5
+ --> $DIR/no_effect.rs:129:5
|
LL | Tuple(0);
| ^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:107:5
+ --> $DIR/no_effect.rs:131:5
|
LL | Struct { field: 0 };
| ^^^^^^^^^^^^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:109:5
+ --> $DIR/no_effect.rs:133:5
|
LL | Struct { ..s };
| ^^^^^^^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:111:5
+ --> $DIR/no_effect.rs:135:5
|
LL | Union { a: 0 };
| ^^^^^^^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:113:5
+ --> $DIR/no_effect.rs:137:5
|
LL | Enum::Tuple(0);
| ^^^^^^^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:115:5
+ --> $DIR/no_effect.rs:139:5
|
LL | Enum::Struct { field: 0 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:117:5
+ --> $DIR/no_effect.rs:141:5
|
LL | 5 + 6;
| ^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:119:5
+ --> $DIR/no_effect.rs:143:5
|
LL | *&42;
| ^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:121:5
+ --> $DIR/no_effect.rs:145:5
|
LL | &6;
| ^^^
error: statement with no effect
- --> $DIR/no_effect.rs:123:5
+ --> $DIR/no_effect.rs:147:5
|
LL | (5, 6, 7);
| ^^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:125:5
+ --> $DIR/no_effect.rs:149:5
|
LL | ..;
| ^^^
error: statement with no effect
- --> $DIR/no_effect.rs:127:5
+ --> $DIR/no_effect.rs:151:5
|
LL | 5..;
| ^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:129:5
+ --> $DIR/no_effect.rs:153:5
|
LL | ..5;
| ^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:131:5
+ --> $DIR/no_effect.rs:155:5
|
LL | 5..6;
| ^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:133:5
+ --> $DIR/no_effect.rs:157:5
|
LL | 5..=6;
| ^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:135:5
+ --> $DIR/no_effect.rs:159:5
|
LL | [42, 55];
| ^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:137:5
+ --> $DIR/no_effect.rs:161:5
|
LL | [42, 55][1];
| ^^^^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:139:5
+ --> $DIR/no_effect.rs:163:5
|
LL | (42, 55).1;
| ^^^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:141:5
+ --> $DIR/no_effect.rs:165:5
|
LL | [42; 55];
| ^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:143:5
+ --> $DIR/no_effect.rs:167:5
|
LL | [42; 55][13];
| ^^^^^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:146:5
+ --> $DIR/no_effect.rs:170:5
|
LL | || x += 5;
| ^^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:149:5
+ --> $DIR/no_effect.rs:173:5
|
LL | FooString { s: s };
| ^^^^^^^^^^^^^^^^^^^
error: binding to `_` prefixed variable with no side-effect
- --> $DIR/no_effect.rs:151:5
+ --> $DIR/no_effect.rs:175:5
|
LL | let _unused = 1;
| ^^^^^^^^^^^^^^^^
@@ -161,19 +161,19 @@ LL | let _unused = 1;
= help: to override `-D warnings` add `#[allow(clippy::no_effect_underscore_binding)]`
error: binding to `_` prefixed variable with no side-effect
- --> $DIR/no_effect.rs:154:5
+ --> $DIR/no_effect.rs:178:5
|
LL | let _penguin = || println!("Some helpful closure");
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: binding to `_` prefixed variable with no side-effect
- --> $DIR/no_effect.rs:156:5
+ --> $DIR/no_effect.rs:180:5
|
LL | let _duck = Struct { field: 0 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: binding to `_` prefixed variable with no side-effect
- --> $DIR/no_effect.rs:158:5
+ --> $DIR/no_effect.rs:182:5
|
LL | let _cat = [2, 4, 6, 8][2];
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/non_minimal_cfg2.stderr b/src/tools/clippy/tests/ui/non_minimal_cfg2.stderr
index 001fcddd9..036d38c22 100644
--- a/src/tools/clippy/tests/ui/non_minimal_cfg2.stderr
+++ b/src/tools/clippy/tests/ui/non_minimal_cfg2.stderr
@@ -7,5 +7,5 @@ LL | #[cfg(all())]
= note: `-D clippy::non-minimal-cfg` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::non_minimal_cfg)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/obfuscated_if_else.stderr b/src/tools/clippy/tests/ui/obfuscated_if_else.stderr
index ca9f5e1e3..abf5adce4 100644
--- a/src/tools/clippy/tests/ui/obfuscated_if_else.stderr
+++ b/src/tools/clippy/tests/ui/obfuscated_if_else.stderr
@@ -7,5 +7,5 @@ LL | true.then_some("a").unwrap_or("b");
= note: `-D clippy::obfuscated-if-else` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::obfuscated_if_else)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/option_as_ref_deref.stderr b/src/tools/clippy/tests/ui/option_as_ref_deref.stderr
index eb0661c52..9d173e409 100644
--- a/src/tools/clippy/tests/ui/option_as_ref_deref.stderr
+++ b/src/tools/clippy/tests/ui/option_as_ref_deref.stderr
@@ -1,4 +1,4 @@
-error: called `.as_ref().map(Deref::deref)` on an Option value. This can be done more directly by calling `opt.clone().as_deref()` instead
+error: called `.as_ref().map(Deref::deref)` on an `Option` value
--> $DIR/option_as_ref_deref.rs:11:13
|
LL | let _ = opt.clone().as_ref().map(Deref::deref).map(str::len);
@@ -7,7 +7,7 @@ LL | let _ = opt.clone().as_ref().map(Deref::deref).map(str::len);
= note: `-D clippy::option-as-ref-deref` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::option_as_ref_deref)]`
-error: called `.as_ref().map(Deref::deref)` on an Option value. This can be done more directly by calling `opt.clone().as_deref()` instead
+error: called `.as_ref().map(Deref::deref)` on an `Option` value
--> $DIR/option_as_ref_deref.rs:14:13
|
LL | let _ = opt.clone()
@@ -17,97 +17,97 @@ LL | | Deref::deref
LL | | )
| |_________^ help: try using as_deref instead: `opt.clone().as_deref()`
-error: called `.as_mut().map(DerefMut::deref_mut)` on an Option value. This can be done more directly by calling `opt.as_deref_mut()` instead
+error: called `.as_mut().map(DerefMut::deref_mut)` on an `Option` value
--> $DIR/option_as_ref_deref.rs:20:13
|
LL | let _ = opt.as_mut().map(DerefMut::deref_mut);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref_mut instead: `opt.as_deref_mut()`
-error: called `.as_ref().map(String::as_str)` on an Option value. This can be done more directly by calling `opt.as_deref()` instead
+error: called `.as_ref().map(String::as_str)` on an `Option` value
--> $DIR/option_as_ref_deref.rs:22:13
|
LL | let _ = opt.as_ref().map(String::as_str);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref instead: `opt.as_deref()`
-error: called `.as_ref().map(|x| x.as_str())` on an Option value. This can be done more directly by calling `opt.as_deref()` instead
+error: called `.as_ref().map(|x| x.as_str())` on an `Option` value
--> $DIR/option_as_ref_deref.rs:23:13
|
LL | let _ = opt.as_ref().map(|x| x.as_str());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref instead: `opt.as_deref()`
-error: called `.as_mut().map(String::as_mut_str)` on an Option value. This can be done more directly by calling `opt.as_deref_mut()` instead
+error: called `.as_mut().map(String::as_mut_str)` on an `Option` value
--> $DIR/option_as_ref_deref.rs:24:13
|
LL | let _ = opt.as_mut().map(String::as_mut_str);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref_mut instead: `opt.as_deref_mut()`
-error: called `.as_mut().map(|x| x.as_mut_str())` on an Option value. This can be done more directly by calling `opt.as_deref_mut()` instead
+error: called `.as_mut().map(|x| x.as_mut_str())` on an `Option` value
--> $DIR/option_as_ref_deref.rs:25:13
|
LL | let _ = opt.as_mut().map(|x| x.as_mut_str());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref_mut instead: `opt.as_deref_mut()`
-error: called `.as_ref().map(CString::as_c_str)` on an Option value. This can be done more directly by calling `Some(CString::new(vec![]).unwrap()).as_deref()` instead
+error: called `.as_ref().map(CString::as_c_str)` on an `Option` value
--> $DIR/option_as_ref_deref.rs:26:13
|
LL | let _ = Some(CString::new(vec![]).unwrap()).as_ref().map(CString::as_c_str);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref instead: `Some(CString::new(vec![]).unwrap()).as_deref()`
-error: called `.as_ref().map(OsString::as_os_str)` on an Option value. This can be done more directly by calling `Some(OsString::new()).as_deref()` instead
+error: called `.as_ref().map(OsString::as_os_str)` on an `Option` value
--> $DIR/option_as_ref_deref.rs:27:13
|
LL | let _ = Some(OsString::new()).as_ref().map(OsString::as_os_str);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref instead: `Some(OsString::new()).as_deref()`
-error: called `.as_ref().map(PathBuf::as_path)` on an Option value. This can be done more directly by calling `Some(PathBuf::new()).as_deref()` instead
+error: called `.as_ref().map(PathBuf::as_path)` on an `Option` value
--> $DIR/option_as_ref_deref.rs:28:13
|
LL | let _ = Some(PathBuf::new()).as_ref().map(PathBuf::as_path);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref instead: `Some(PathBuf::new()).as_deref()`
-error: called `.as_ref().map(Vec::as_slice)` on an Option value. This can be done more directly by calling `Some(Vec::<()>::new()).as_deref()` instead
+error: called `.as_ref().map(Vec::as_slice)` on an `Option` value
--> $DIR/option_as_ref_deref.rs:29:13
|
LL | let _ = Some(Vec::<()>::new()).as_ref().map(Vec::as_slice);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref instead: `Some(Vec::<()>::new()).as_deref()`
-error: called `.as_mut().map(Vec::as_mut_slice)` on an Option value. This can be done more directly by calling `Some(Vec::<()>::new()).as_deref_mut()` instead
+error: called `.as_mut().map(Vec::as_mut_slice)` on an `Option` value
--> $DIR/option_as_ref_deref.rs:30:13
|
LL | let _ = Some(Vec::<()>::new()).as_mut().map(Vec::as_mut_slice);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref_mut instead: `Some(Vec::<()>::new()).as_deref_mut()`
-error: called `.as_ref().map(|x| x.deref())` on an Option value. This can be done more directly by calling `opt.as_deref()` instead
+error: called `.as_ref().map(|x| x.deref())` on an `Option` value
--> $DIR/option_as_ref_deref.rs:32:13
|
LL | let _ = opt.as_ref().map(|x| x.deref());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref instead: `opt.as_deref()`
-error: called `.as_mut().map(|x| x.deref_mut())` on an Option value. This can be done more directly by calling `opt.clone().as_deref_mut()` instead
+error: called `.as_mut().map(|x| x.deref_mut())` on an `Option` value
--> $DIR/option_as_ref_deref.rs:33:13
|
LL | let _ = opt.clone().as_mut().map(|x| x.deref_mut()).map(|x| x.len());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref_mut instead: `opt.clone().as_deref_mut()`
-error: called `.as_ref().map(|x| &**x)` on an Option value. This can be done more directly by calling `opt.as_deref()` instead
+error: called `.as_ref().map(|x| &**x)` on an `Option` value
--> $DIR/option_as_ref_deref.rs:40:13
|
LL | let _ = opt.as_ref().map(|x| &**x);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref instead: `opt.as_deref()`
-error: called `.as_mut().map(|x| &mut **x)` on an Option value. This can be done more directly by calling `opt.as_deref_mut()` instead
+error: called `.as_mut().map(|x| &mut **x)` on an `Option` value
--> $DIR/option_as_ref_deref.rs:41:13
|
LL | let _ = opt.as_mut().map(|x| &mut **x);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref_mut instead: `opt.as_deref_mut()`
-error: called `.as_ref().map(std::ops::Deref::deref)` on an Option value. This can be done more directly by calling `opt.as_deref()` instead
+error: called `.as_ref().map(std::ops::Deref::deref)` on an `Option` value
--> $DIR/option_as_ref_deref.rs:44:13
|
LL | let _ = opt.as_ref().map(std::ops::Deref::deref);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref instead: `opt.as_deref()`
-error: called `.as_ref().map(String::as_str)` on an Option value. This can be done more directly by calling `opt.as_deref()` instead
+error: called `.as_ref().map(String::as_str)` on an `Option` value
--> $DIR/option_as_ref_deref.rs:56:13
|
LL | let _ = opt.as_ref().map(String::as_str);
diff --git a/src/tools/clippy/tests/ui/option_if_let_else.fixed b/src/tools/clippy/tests/ui/option_if_let_else.fixed
index f0113ca69..363520112 100644
--- a/src/tools/clippy/tests/ui/option_if_let_else.fixed
+++ b/src/tools/clippy/tests/ui/option_if_let_else.fixed
@@ -92,11 +92,13 @@ fn pattern_to_vec(pattern: &str) -> Vec<String> {
}
// #10335
-fn test_result_impure_else(variable: Result<u32, &str>) {
+fn test_result_impure_else(variable: Result<u32, &str>) -> bool {
variable.map_or_else(|_| {
println!("Err");
+ false
}, |binding| {
println!("Ok {binding}");
+ true
})
}
@@ -213,15 +215,19 @@ mod issue10729 {
pub fn reproduce(initial: &Option<String>) {
// 👇 needs `.as_ref()` because initial is an `&Option<_>`
- initial.as_ref().map_or({}, |value| do_something(value))
+ let _ = initial.as_ref().map_or(42, |value| do_something(value));
}
pub fn reproduce2(initial: &mut Option<String>) {
- initial.as_mut().map_or({}, |value| do_something2(value))
+ let _ = initial.as_mut().map_or(42, |value| do_something2(value));
}
- fn do_something(_value: &str) {}
- fn do_something2(_value: &mut str) {}
+ fn do_something(_value: &str) -> u32 {
+ todo!()
+ }
+ fn do_something2(_value: &mut str) -> u32 {
+ todo!()
+ }
}
fn issue11429() {
@@ -237,3 +243,13 @@ fn issue11429() {
let mut _hm = opt.as_ref().map_or_else(|| new_map!(), |hm| hm.clone());
}
+
+fn issue11893() {
+ use std::io::Write;
+ let mut output = std::io::stdout().lock();
+ if let Some(name) = Some("stuff") {
+ writeln!(output, "{name:?}").unwrap();
+ } else {
+ panic!("Haven't thought about this condition.");
+ }
+}
diff --git a/src/tools/clippy/tests/ui/option_if_let_else.rs b/src/tools/clippy/tests/ui/option_if_let_else.rs
index 18b7af443..aaa87a0db 100644
--- a/src/tools/clippy/tests/ui/option_if_let_else.rs
+++ b/src/tools/clippy/tests/ui/option_if_let_else.rs
@@ -115,11 +115,13 @@ fn pattern_to_vec(pattern: &str) -> Vec<String> {
}
// #10335
-fn test_result_impure_else(variable: Result<u32, &str>) {
+fn test_result_impure_else(variable: Result<u32, &str>) -> bool {
if let Ok(binding) = variable {
println!("Ok {binding}");
+ true
} else {
println!("Err");
+ false
}
}
@@ -254,21 +256,25 @@ mod issue10729 {
pub fn reproduce(initial: &Option<String>) {
// 👇 needs `.as_ref()` because initial is an `&Option<_>`
- match initial {
+ let _ = match initial {
Some(value) => do_something(value),
- None => {},
- }
+ None => 42,
+ };
}
pub fn reproduce2(initial: &mut Option<String>) {
- match initial {
+ let _ = match initial {
Some(value) => do_something2(value),
- None => {},
- }
+ None => 42,
+ };
}
- fn do_something(_value: &str) {}
- fn do_something2(_value: &mut str) {}
+ fn do_something(_value: &str) -> u32 {
+ todo!()
+ }
+ fn do_something2(_value: &mut str) -> u32 {
+ todo!()
+ }
}
fn issue11429() {
@@ -288,3 +294,13 @@ fn issue11429() {
let mut _hm = if let Some(hm) = &opt { hm.clone() } else { new_map!() };
}
+
+fn issue11893() {
+ use std::io::Write;
+ let mut output = std::io::stdout().lock();
+ if let Some(name) = Some("stuff") {
+ writeln!(output, "{name:?}").unwrap();
+ } else {
+ panic!("Haven't thought about this condition.");
+ }
+}
diff --git a/src/tools/clippy/tests/ui/option_if_let_else.stderr b/src/tools/clippy/tests/ui/option_if_let_else.stderr
index e36357bcb..55a8360ff 100644
--- a/src/tools/clippy/tests/ui/option_if_let_else.stderr
+++ b/src/tools/clippy/tests/ui/option_if_let_else.stderr
@@ -158,8 +158,10 @@ error: use Option::map_or_else instead of an if let/else
|
LL | / if let Ok(binding) = variable {
LL | | println!("Ok {binding}");
+LL | | true
LL | | } else {
LL | | println!("Err");
+LL | | false
LL | | }
| |_____^
|
@@ -167,19 +169,21 @@ help: try
|
LL ~ variable.map_or_else(|_| {
LL + println!("Err");
+LL + false
LL + }, |binding| {
LL + println!("Ok {binding}");
+LL + true
LL + })
|
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:141:13
+ --> $DIR/option_if_let_else.rs:143:13
|
LL | let _ = if let Some(x) = optional { x + 2 } else { 5 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `optional.map_or(5, |x| x + 2)`
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:151:13
+ --> $DIR/option_if_let_else.rs:153:13
|
LL | let _ = if let Some(x) = Some(0) {
| _____________^
@@ -201,13 +205,13 @@ LL ~ });
|
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:179:13
+ --> $DIR/option_if_let_else.rs:181:13
|
LL | let _ = if let Some(x) = Some(0) { s.len() + x } else { s.len() };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Some(0).map_or(s.len(), |x| s.len() + x)`
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:183:13
+ --> $DIR/option_if_let_else.rs:185:13
|
LL | let _ = if let Some(x) = Some(0) {
| _____________^
@@ -227,7 +231,7 @@ LL ~ });
|
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:222:13
+ --> $DIR/option_if_let_else.rs:224:13
|
LL | let _ = match s {
| _____________^
@@ -237,7 +241,7 @@ LL | | };
| |_____^ help: try: `s.map_or(1, |string| string.len())`
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:226:13
+ --> $DIR/option_if_let_else.rs:228:13
|
LL | let _ = match Some(10) {
| _____________^
@@ -247,7 +251,7 @@ LL | | };
| |_____^ help: try: `Some(10).map_or(5, |a| a + 1)`
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:232:13
+ --> $DIR/option_if_let_else.rs:234:13
|
LL | let _ = match res {
| _____________^
@@ -257,7 +261,7 @@ LL | | };
| |_____^ help: try: `res.map_or(1, |a| a + 1)`
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:236:13
+ --> $DIR/option_if_let_else.rs:238:13
|
LL | let _ = match res {
| _____________^
@@ -267,31 +271,33 @@ LL | | };
| |_____^ help: try: `res.map_or(1, |a| a + 1)`
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:240:13
+ --> $DIR/option_if_let_else.rs:242:13
|
LL | let _ = if let Ok(a) = res { a + 1 } else { 5 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `res.map_or(5, |a| a + 1)`
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:257:9
+ --> $DIR/option_if_let_else.rs:259:17
|
-LL | / match initial {
+LL | let _ = match initial {
+ | _________________^
LL | | Some(value) => do_something(value),
-LL | | None => {},
-LL | | }
- | |_________^ help: try: `initial.as_ref().map_or({}, |value| do_something(value))`
+LL | | None => 42,
+LL | | };
+ | |_________^ help: try: `initial.as_ref().map_or(42, |value| do_something(value))`
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:264:9
+ --> $DIR/option_if_let_else.rs:266:17
|
-LL | / match initial {
+LL | let _ = match initial {
+ | _________________^
LL | | Some(value) => do_something2(value),
-LL | | None => {},
-LL | | }
- | |_________^ help: try: `initial.as_mut().map_or({}, |value| do_something2(value))`
+LL | | None => 42,
+LL | | };
+ | |_________^ help: try: `initial.as_mut().map_or(42, |value| do_something2(value))`
error: use Option::map_or_else instead of an if let/else
- --> $DIR/option_if_let_else.rs:283:24
+ --> $DIR/option_if_let_else.rs:289:24
|
LL | let mut _hashmap = if let Some(hm) = &opt {
| ________________________^
@@ -302,7 +308,7 @@ LL | | };
| |_____^ help: try: `opt.as_ref().map_or_else(HashMap::new, |hm| hm.clone())`
error: use Option::map_or_else instead of an if let/else
- --> $DIR/option_if_let_else.rs:289:19
+ --> $DIR/option_if_let_else.rs:295:19
|
LL | let mut _hm = if let Some(hm) = &opt { hm.clone() } else { new_map!() };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `opt.as_ref().map_or_else(|| new_map!(), |hm| hm.clone())`
diff --git a/src/tools/clippy/tests/ui/option_map_or_err_ok.fixed b/src/tools/clippy/tests/ui/option_map_or_err_ok.fixed
new file mode 100644
index 000000000..131f4b209
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_map_or_err_ok.fixed
@@ -0,0 +1,7 @@
+#![warn(clippy::option_map_or_err_ok)]
+
+fn main() {
+ let x = Some("a");
+ let _ = x.ok_or("a");
+ //~^ ERROR: called `map_or(Err(_), Ok)` on an `Option` value
+}
diff --git a/src/tools/clippy/tests/ui/option_map_or_err_ok.rs b/src/tools/clippy/tests/ui/option_map_or_err_ok.rs
new file mode 100644
index 000000000..0f07a592a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_map_or_err_ok.rs
@@ -0,0 +1,7 @@
+#![warn(clippy::option_map_or_err_ok)]
+
+fn main() {
+ let x = Some("a");
+ let _ = x.map_or(Err("a"), Ok);
+ //~^ ERROR: called `map_or(Err(_), Ok)` on an `Option` value
+}
diff --git a/src/tools/clippy/tests/ui/option_map_or_err_ok.stderr b/src/tools/clippy/tests/ui/option_map_or_err_ok.stderr
new file mode 100644
index 000000000..a193e3c4c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_map_or_err_ok.stderr
@@ -0,0 +1,11 @@
+error: called `map_or(Err(_), Ok)` on an `Option` value
+ --> $DIR/option_map_or_err_ok.rs:5:13
+ |
+LL | let _ = x.map_or(Err("a"), Ok);
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try using `ok_or` instead: `x.ok_or("a")`
+ |
+ = note: `-D clippy::option-map-or-err-ok` implied by `-D warnings`
+ = help: to override `-D warnings` add `#[allow(clippy::option_map_or_err_ok)]`
+
+error: aborting due to 1 previous error
+
diff --git a/src/tools/clippy/tests/ui/option_map_or_none.stderr b/src/tools/clippy/tests/ui/option_map_or_none.stderr
index fa150718f..f2cfc3f9a 100644
--- a/src/tools/clippy/tests/ui/option_map_or_none.stderr
+++ b/src/tools/clippy/tests/ui/option_map_or_none.stderr
@@ -1,4 +1,4 @@
-error: called `map_or(None, ..)` on an `Option` value. This can be done more directly by calling `map(..)` instead
+error: called `map_or(None, ..)` on an `Option` value
--> $DIR/option_map_or_none.rs:10:26
|
LL | let _: Option<i32> = opt.map_or(None, |x| Some(x + 1));
@@ -7,7 +7,7 @@ LL | let _: Option<i32> = opt.map_or(None, |x| Some(x + 1));
= note: `-D clippy::option-map-or-none` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::option_map_or_none)]`
-error: called `map_or(None, ..)` on an `Option` value. This can be done more directly by calling `map(..)` instead
+error: called `map_or(None, ..)` on an `Option` value
--> $DIR/option_map_or_none.rs:13:26
|
LL | let _: Option<i32> = opt.map_or(None, |x| {
@@ -16,13 +16,13 @@ LL | | Some(x + 1)
LL | | });
| |_________________________^ help: try using `map` instead: `opt.map(|x| x + 1)`
-error: called `map_or(None, ..)` on an `Option` value. This can be done more directly by calling `and_then(..)` instead
+error: called `map_or(None, ..)` on an `Option` value
--> $DIR/option_map_or_none.rs:17:26
|
LL | let _: Option<i32> = opt.map_or(None, bar);
| ^^^^^^^^^^^^^^^^^^^^^ help: try using `and_then` instead: `opt.and_then(bar)`
-error: called `map_or(None, ..)` on an `Option` value. This can be done more directly by calling `and_then(..)` instead
+error: called `map_or(None, ..)` on an `Option` value
--> $DIR/option_map_or_none.rs:18:26
|
LL | let _: Option<i32> = opt.map_or(None, |x| {
@@ -42,7 +42,7 @@ LL + Some(offset + height)
LL ~ });
|
-error: called `map_or(None, Some)` on a `Result` value. This can be done more directly by calling `ok()` instead
+error: called `map_or(None, Some)` on a `Result` value
--> $DIR/option_map_or_none.rs:25:26
|
LL | let _: Option<i32> = r.map_or(None, Some);
diff --git a/src/tools/clippy/tests/ui/partialeq_ne_impl.stderr b/src/tools/clippy/tests/ui/partialeq_ne_impl.stderr
index 163d6b1dd..2210e706d 100644
--- a/src/tools/clippy/tests/ui/partialeq_ne_impl.stderr
+++ b/src/tools/clippy/tests/ui/partialeq_ne_impl.stderr
@@ -11,5 +11,5 @@ LL | | }
= note: `-D clippy::partialeq-ne-impl` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::partialeq_ne_impl)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/path_buf_push_overwrite.stderr b/src/tools/clippy/tests/ui/path_buf_push_overwrite.stderr
index 1453d020c..f96ce0de7 100644
--- a/src/tools/clippy/tests/ui/path_buf_push_overwrite.stderr
+++ b/src/tools/clippy/tests/ui/path_buf_push_overwrite.stderr
@@ -7,5 +7,5 @@ LL | x.push("/bar");
= note: `-D clippy::path-buf-push-overwrite` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::path_buf_push_overwrite)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/permissions_set_readonly_false.stderr b/src/tools/clippy/tests/ui/permissions_set_readonly_false.stderr
index 58a7de84d..bd3446308 100644
--- a/src/tools/clippy/tests/ui/permissions_set_readonly_false.stderr
+++ b/src/tools/clippy/tests/ui/permissions_set_readonly_false.stderr
@@ -10,5 +10,5 @@ LL | permissions.set_readonly(false);
= note: `-D clippy::permissions-set-readonly-false` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::permissions_set_readonly_false)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/proc_macro.stderr b/src/tools/clippy/tests/ui/proc_macro.stderr
index d912b5027..122374ea8 100644
--- a/src/tools/clippy/tests/ui/proc_macro.stderr
+++ b/src/tools/clippy/tests/ui/proc_macro.stderr
@@ -7,5 +7,5 @@ LL | let _x = 3.14;
= help: consider using the constant directly
= note: `#[deny(clippy::approx_constant)]` on by default
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/ptr_arg.rs b/src/tools/clippy/tests/ui/ptr_arg.rs
index 91e2e7fd6..fcd716f41 100644
--- a/src/tools/clippy/tests/ui/ptr_arg.rs
+++ b/src/tools/clippy/tests/ui/ptr_arg.rs
@@ -22,6 +22,12 @@ fn do_vec_mut(x: &mut Vec<i64>) {
//Nothing here
}
+fn do_vec_mut2(x: &mut Vec<i64>) {
+ //~^ ERROR: writing `&mut Vec` instead of `&mut [_]` involves a new object where a slice w
+ x.len();
+ x.is_empty();
+}
+
fn do_str(x: &String) {
//~^ ERROR: writing `&String` instead of `&str` involves a new object where a slice will d
//Nothing here either
diff --git a/src/tools/clippy/tests/ui/ptr_arg.stderr b/src/tools/clippy/tests/ui/ptr_arg.stderr
index cccf2d62d..35bd85092 100644
--- a/src/tools/clippy/tests/ui/ptr_arg.stderr
+++ b/src/tools/clippy/tests/ui/ptr_arg.stderr
@@ -13,38 +13,44 @@ error: writing `&mut Vec` instead of `&mut [_]` involves a new object where a sl
LL | fn do_vec_mut(x: &mut Vec<i64>) {
| ^^^^^^^^^^^^^ help: change this to: `&mut [i64]`
+error: writing `&mut Vec` instead of `&mut [_]` involves a new object where a slice will do
+ --> $DIR/ptr_arg.rs:25:19
+ |
+LL | fn do_vec_mut2(x: &mut Vec<i64>) {
+ | ^^^^^^^^^^^^^ help: change this to: `&mut [i64]`
+
error: writing `&String` instead of `&str` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:25:14
+ --> $DIR/ptr_arg.rs:31:14
|
LL | fn do_str(x: &String) {
| ^^^^^^^ help: change this to: `&str`
error: writing `&mut String` instead of `&mut str` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:30:18
+ --> $DIR/ptr_arg.rs:36:18
|
LL | fn do_str_mut(x: &mut String) {
| ^^^^^^^^^^^ help: change this to: `&mut str`
error: writing `&PathBuf` instead of `&Path` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:35:15
+ --> $DIR/ptr_arg.rs:41:15
|
LL | fn do_path(x: &PathBuf) {
| ^^^^^^^^ help: change this to: `&Path`
error: writing `&mut PathBuf` instead of `&mut Path` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:40:19
+ --> $DIR/ptr_arg.rs:46:19
|
LL | fn do_path_mut(x: &mut PathBuf) {
| ^^^^^^^^^^^^ help: change this to: `&mut Path`
error: writing `&Vec` instead of `&[_]` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:49:18
+ --> $DIR/ptr_arg.rs:55:18
|
LL | fn do_vec(x: &Vec<i64>);
| ^^^^^^^^^ help: change this to: `&[i64]`
error: writing `&Vec` instead of `&[_]` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:63:14
+ --> $DIR/ptr_arg.rs:69:14
|
LL | fn cloned(x: &Vec<u8>) -> Vec<u8> {
| ^^^^^^^^
@@ -62,7 +68,7 @@ LL ~ x.to_owned()
|
error: writing `&String` instead of `&str` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:73:18
+ --> $DIR/ptr_arg.rs:79:18
|
LL | fn str_cloned(x: &String) -> String {
| ^^^^^^^
@@ -79,7 +85,7 @@ LL ~ x.to_owned()
|
error: writing `&PathBuf` instead of `&Path` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:82:19
+ --> $DIR/ptr_arg.rs:88:19
|
LL | fn path_cloned(x: &PathBuf) -> PathBuf {
| ^^^^^^^^
@@ -96,7 +102,7 @@ LL ~ x.to_path_buf()
|
error: writing `&String` instead of `&str` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:91:44
+ --> $DIR/ptr_arg.rs:97:44
|
LL | fn false_positive_capacity(x: &Vec<u8>, y: &String) {
| ^^^^^^^
@@ -111,19 +117,19 @@ LL ~ let c = y;
|
error: using a reference to `Cow` is not recommended
- --> $DIR/ptr_arg.rs:106:25
+ --> $DIR/ptr_arg.rs:112:25
|
LL | fn test_cow_with_ref(c: &Cow<[i32]>) {}
| ^^^^^^^^^^^ help: change this to: `&[i32]`
error: writing `&String` instead of `&str` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:136:66
+ --> $DIR/ptr_arg.rs:142:66
|
LL | fn some_allowed(#[allow(clippy::ptr_arg)] _v: &Vec<u32>, _s: &String) {}
| ^^^^^^^ help: change this to: `&str`
error: writing `&Vec` instead of `&[_]` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:166:21
+ --> $DIR/ptr_arg.rs:172:21
|
LL | fn foo_vec(vec: &Vec<u8>) {
| ^^^^^^^^
@@ -137,7 +143,7 @@ LL ~ let _ = vec.to_owned().clone();
|
error: writing `&PathBuf` instead of `&Path` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:172:23
+ --> $DIR/ptr_arg.rs:178:23
|
LL | fn foo_path(path: &PathBuf) {
| ^^^^^^^^
@@ -151,7 +157,7 @@ LL ~ let _ = path.to_path_buf().clone();
|
error: writing `&PathBuf` instead of `&Path` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:178:21
+ --> $DIR/ptr_arg.rs:184:21
|
LL | fn foo_str(str: &PathBuf) {
| ^^^^^^^^
@@ -165,46 +171,46 @@ LL ~ let _ = str.to_path_buf().clone();
|
error: writing `&mut Vec` instead of `&mut [_]` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:185:29
+ --> $DIR/ptr_arg.rs:191:29
|
LL | fn mut_vec_slice_methods(v: &mut Vec<u32>) {
| ^^^^^^^^^^^^^ help: change this to: `&mut [u32]`
error: writing `&mut Vec` instead of `&mut [_]` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:248:17
+ --> $DIR/ptr_arg.rs:254:17
|
LL | fn dyn_trait(a: &mut Vec<u32>, b: &mut String, c: &mut PathBuf) {
| ^^^^^^^^^^^^^ help: change this to: `&mut [u32]`
error: writing `&mut String` instead of `&mut str` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:248:35
+ --> $DIR/ptr_arg.rs:254:35
|
LL | fn dyn_trait(a: &mut Vec<u32>, b: &mut String, c: &mut PathBuf) {
| ^^^^^^^^^^^ help: change this to: `&mut str`
error: writing `&mut PathBuf` instead of `&mut Path` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:248:51
+ --> $DIR/ptr_arg.rs:254:51
|
LL | fn dyn_trait(a: &mut Vec<u32>, b: &mut String, c: &mut PathBuf) {
| ^^^^^^^^^^^^ help: change this to: `&mut Path`
error: using a reference to `Cow` is not recommended
- --> $DIR/ptr_arg.rs:274:39
+ --> $DIR/ptr_arg.rs:280:39
|
LL | fn cow_elided_lifetime<'a>(input: &'a Cow<str>) -> &'a str {
| ^^^^^^^^^^^^ help: change this to: `&str`
error: using a reference to `Cow` is not recommended
- --> $DIR/ptr_arg.rs:280:36
+ --> $DIR/ptr_arg.rs:286:36
|
LL | fn cow_bad_ret_ty_1<'a>(input: &'a Cow<'a, str>) -> &'static str {
| ^^^^^^^^^^^^^^^^ help: change this to: `&str`
error: using a reference to `Cow` is not recommended
- --> $DIR/ptr_arg.rs:284:40
+ --> $DIR/ptr_arg.rs:290:40
|
LL | fn cow_bad_ret_ty_2<'a, 'b>(input: &'a Cow<'a, str>) -> &'b str {
| ^^^^^^^^^^^^^^^^ help: change this to: `&str`
-error: aborting due to 23 previous errors
+error: aborting due to 24 previous errors
diff --git a/src/tools/clippy/tests/ui/ptr_as_ptr.fixed b/src/tools/clippy/tests/ui/ptr_as_ptr.fixed
index ca13b52ae..fa15c3235 100644
--- a/src/tools/clippy/tests/ui/ptr_as_ptr.fixed
+++ b/src/tools/clippy/tests/ui/ptr_as_ptr.fixed
@@ -71,3 +71,118 @@ fn _msrv_1_38() {
let _ = ptr.cast::<i32>();
let _ = mut_ptr.cast::<i32>();
}
+
+#[allow(clippy::unnecessary_cast)]
+mod null {
+ fn use_path_mut() -> *mut u32 {
+ use std::ptr;
+ ptr::null_mut::<u32>()
+ }
+
+ fn full_path_mut() -> *mut u32 {
+ std::ptr::null_mut::<u32>()
+ }
+
+ fn core_path_mut() -> *mut u32 {
+ use core::ptr;
+ ptr::null_mut::<u32>()
+ }
+
+ fn full_core_path_mut() -> *mut u32 {
+ core::ptr::null_mut::<u32>()
+ }
+
+ fn use_path() -> *const u32 {
+ use std::ptr;
+ ptr::null::<u32>()
+ }
+
+ fn full_path() -> *const u32 {
+ std::ptr::null::<u32>()
+ }
+
+ fn core_path() -> *const u32 {
+ use core::ptr;
+ ptr::null::<u32>()
+ }
+
+ fn full_core_path() -> *const u32 {
+ core::ptr::null::<u32>()
+ }
+}
+
+mod null_ptr_infer {
+ fn use_path_mut() -> *mut u32 {
+ use std::ptr;
+ ptr::null_mut()
+ }
+
+ fn full_path_mut() -> *mut u32 {
+ std::ptr::null_mut()
+ }
+
+ fn core_path_mut() -> *mut u32 {
+ use core::ptr;
+ ptr::null_mut()
+ }
+
+ fn full_core_path_mut() -> *mut u32 {
+ core::ptr::null_mut()
+ }
+
+ fn use_path() -> *const u32 {
+ use std::ptr;
+ ptr::null()
+ }
+
+ fn full_path() -> *const u32 {
+ std::ptr::null()
+ }
+
+ fn core_path() -> *const u32 {
+ use core::ptr;
+ ptr::null()
+ }
+
+ fn full_core_path() -> *const u32 {
+ core::ptr::null()
+ }
+}
+
+mod null_entire_infer {
+ fn use_path_mut() -> *mut u32 {
+ use std::ptr;
+ ptr::null_mut()
+ }
+
+ fn full_path_mut() -> *mut u32 {
+ std::ptr::null_mut()
+ }
+
+ fn core_path_mut() -> *mut u32 {
+ use core::ptr;
+ ptr::null_mut()
+ }
+
+ fn full_core_path_mut() -> *mut u32 {
+ core::ptr::null_mut()
+ }
+
+ fn use_path() -> *const u32 {
+ use std::ptr;
+ ptr::null()
+ }
+
+ fn full_path() -> *const u32 {
+ std::ptr::null()
+ }
+
+ fn core_path() -> *const u32 {
+ use core::ptr;
+ ptr::null()
+ }
+
+ fn full_core_path() -> *const u32 {
+ core::ptr::null()
+ }
+}
diff --git a/src/tools/clippy/tests/ui/ptr_as_ptr.rs b/src/tools/clippy/tests/ui/ptr_as_ptr.rs
index 942c87344..7ab52e63d 100644
--- a/src/tools/clippy/tests/ui/ptr_as_ptr.rs
+++ b/src/tools/clippy/tests/ui/ptr_as_ptr.rs
@@ -71,3 +71,118 @@ fn _msrv_1_38() {
let _ = ptr as *const i32;
let _ = mut_ptr as *mut i32;
}
+
+#[allow(clippy::unnecessary_cast)]
+mod null {
+ fn use_path_mut() -> *mut u32 {
+ use std::ptr;
+ ptr::null_mut() as *mut u32
+ }
+
+ fn full_path_mut() -> *mut u32 {
+ std::ptr::null_mut() as *mut u32
+ }
+
+ fn core_path_mut() -> *mut u32 {
+ use core::ptr;
+ ptr::null_mut() as *mut u32
+ }
+
+ fn full_core_path_mut() -> *mut u32 {
+ core::ptr::null_mut() as *mut u32
+ }
+
+ fn use_path() -> *const u32 {
+ use std::ptr;
+ ptr::null() as *const u32
+ }
+
+ fn full_path() -> *const u32 {
+ std::ptr::null() as *const u32
+ }
+
+ fn core_path() -> *const u32 {
+ use core::ptr;
+ ptr::null() as *const u32
+ }
+
+ fn full_core_path() -> *const u32 {
+ core::ptr::null() as *const u32
+ }
+}
+
+mod null_ptr_infer {
+ fn use_path_mut() -> *mut u32 {
+ use std::ptr;
+ ptr::null_mut() as *mut _
+ }
+
+ fn full_path_mut() -> *mut u32 {
+ std::ptr::null_mut() as *mut _
+ }
+
+ fn core_path_mut() -> *mut u32 {
+ use core::ptr;
+ ptr::null_mut() as *mut _
+ }
+
+ fn full_core_path_mut() -> *mut u32 {
+ core::ptr::null_mut() as *mut _
+ }
+
+ fn use_path() -> *const u32 {
+ use std::ptr;
+ ptr::null() as *const _
+ }
+
+ fn full_path() -> *const u32 {
+ std::ptr::null() as *const _
+ }
+
+ fn core_path() -> *const u32 {
+ use core::ptr;
+ ptr::null() as *const _
+ }
+
+ fn full_core_path() -> *const u32 {
+ core::ptr::null() as *const _
+ }
+}
+
+mod null_entire_infer {
+ fn use_path_mut() -> *mut u32 {
+ use std::ptr;
+ ptr::null_mut() as _
+ }
+
+ fn full_path_mut() -> *mut u32 {
+ std::ptr::null_mut() as _
+ }
+
+ fn core_path_mut() -> *mut u32 {
+ use core::ptr;
+ ptr::null_mut() as _
+ }
+
+ fn full_core_path_mut() -> *mut u32 {
+ core::ptr::null_mut() as _
+ }
+
+ fn use_path() -> *const u32 {
+ use std::ptr;
+ ptr::null() as _
+ }
+
+ fn full_path() -> *const u32 {
+ std::ptr::null() as _
+ }
+
+ fn core_path() -> *const u32 {
+ use core::ptr;
+ ptr::null() as _
+ }
+
+ fn full_core_path() -> *const u32 {
+ core::ptr::null() as _
+ }
+}
diff --git a/src/tools/clippy/tests/ui/ptr_as_ptr.stderr b/src/tools/clippy/tests/ui/ptr_as_ptr.stderr
index c0ce69b43..ef64347e9 100644
--- a/src/tools/clippy/tests/ui/ptr_as_ptr.stderr
+++ b/src/tools/clippy/tests/ui/ptr_as_ptr.stderr
@@ -57,5 +57,149 @@ error: `as` casting between raw pointers without changing its mutability
LL | let _ = mut_ptr as *mut i32;
| ^^^^^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `mut_ptr.cast::<i32>()`
-error: aborting due to 9 previous errors
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:79:9
+ |
+LL | ptr::null_mut() as *mut u32
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `ptr::null_mut::<u32>()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:83:9
+ |
+LL | std::ptr::null_mut() as *mut u32
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `std::ptr::null_mut::<u32>()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:88:9
+ |
+LL | ptr::null_mut() as *mut u32
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `ptr::null_mut::<u32>()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:92:9
+ |
+LL | core::ptr::null_mut() as *mut u32
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `core::ptr::null_mut::<u32>()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:97:9
+ |
+LL | ptr::null() as *const u32
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `ptr::null::<u32>()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:101:9
+ |
+LL | std::ptr::null() as *const u32
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `std::ptr::null::<u32>()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:106:9
+ |
+LL | ptr::null() as *const u32
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `ptr::null::<u32>()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:110:9
+ |
+LL | core::ptr::null() as *const u32
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `core::ptr::null::<u32>()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:117:9
+ |
+LL | ptr::null_mut() as *mut _
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `ptr::null_mut()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:121:9
+ |
+LL | std::ptr::null_mut() as *mut _
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `std::ptr::null_mut()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:126:9
+ |
+LL | ptr::null_mut() as *mut _
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `ptr::null_mut()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:130:9
+ |
+LL | core::ptr::null_mut() as *mut _
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `core::ptr::null_mut()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:135:9
+ |
+LL | ptr::null() as *const _
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `ptr::null()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:139:9
+ |
+LL | std::ptr::null() as *const _
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `std::ptr::null()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:144:9
+ |
+LL | ptr::null() as *const _
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `ptr::null()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:148:9
+ |
+LL | core::ptr::null() as *const _
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `core::ptr::null()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:155:9
+ |
+LL | ptr::null_mut() as _
+ | ^^^^^^^^^^^^^^^^^^^^ help: try call directly: `ptr::null_mut()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:159:9
+ |
+LL | std::ptr::null_mut() as _
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `std::ptr::null_mut()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:164:9
+ |
+LL | ptr::null_mut() as _
+ | ^^^^^^^^^^^^^^^^^^^^ help: try call directly: `ptr::null_mut()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:168:9
+ |
+LL | core::ptr::null_mut() as _
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `core::ptr::null_mut()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:173:9
+ |
+LL | ptr::null() as _
+ | ^^^^^^^^^^^^^^^^ help: try call directly: `ptr::null()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:177:9
+ |
+LL | std::ptr::null() as _
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `std::ptr::null()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:182:9
+ |
+LL | ptr::null() as _
+ | ^^^^^^^^^^^^^^^^ help: try call directly: `ptr::null()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:186:9
+ |
+LL | core::ptr::null() as _
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `core::ptr::null()`
+
+error: aborting due to 33 previous errors
diff --git a/src/tools/clippy/tests/ui/pub_use.stderr b/src/tools/clippy/tests/ui/pub_use.stderr
index 781572736..f6f5db9a1 100644
--- a/src/tools/clippy/tests/ui/pub_use.stderr
+++ b/src/tools/clippy/tests/ui/pub_use.stderr
@@ -8,5 +8,5 @@ LL | pub use inner::Test;
= note: `-D clippy::pub-use` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::pub_use)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/question_mark_used.stderr b/src/tools/clippy/tests/ui/question_mark_used.stderr
index a3f440de8..b4e256ddb 100644
--- a/src/tools/clippy/tests/ui/question_mark_used.stderr
+++ b/src/tools/clippy/tests/ui/question_mark_used.stderr
@@ -8,5 +8,5 @@ LL | other_function()?;
= note: `-D clippy::question-mark-used` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::question_mark_used)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/range.stderr b/src/tools/clippy/tests/ui/range.stderr
index 9f174307b..78ef17b5b 100644
--- a/src/tools/clippy/tests/ui/range.stderr
+++ b/src/tools/clippy/tests/ui/range.stderr
@@ -7,5 +7,5 @@ LL | let _x = v1.iter().zip(0..v1.len());
= note: `-D clippy::range-zip-with-len` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::range_zip_with_len)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/redundant_closure_call_fixable.fixed b/src/tools/clippy/tests/ui/redundant_closure_call_fixable.fixed
index bf268d0b5..f272d8359 100644
--- a/src/tools/clippy/tests/ui/redundant_closure_call_fixable.fixed
+++ b/src/tools/clippy/tests/ui/redundant_closure_call_fixable.fixed
@@ -84,3 +84,21 @@ fn issue9956() {
bar()(42, 5);
foo(42, 5);
}
+
+async fn issue11357() {
+ async {}.await;
+}
+
+mod issue11707 {
+ use core::future::Future;
+
+ fn spawn_on(fut: impl Future<Output = ()>) {}
+
+ fn demo() {
+ spawn_on(async move {});
+ }
+}
+
+fn avoid_double_parens() {
+ std::convert::identity(13_i32 + 36_i32).leading_zeros();
+}
diff --git a/src/tools/clippy/tests/ui/redundant_closure_call_fixable.rs b/src/tools/clippy/tests/ui/redundant_closure_call_fixable.rs
index c8a91049d..f45db8c9c 100644
--- a/src/tools/clippy/tests/ui/redundant_closure_call_fixable.rs
+++ b/src/tools/clippy/tests/ui/redundant_closure_call_fixable.rs
@@ -84,3 +84,21 @@ fn issue9956() {
bar()((|| || 42)()(), 5);
foo((|| || 42)()(), 5);
}
+
+async fn issue11357() {
+ (|| async {})().await;
+}
+
+mod issue11707 {
+ use core::future::Future;
+
+ fn spawn_on(fut: impl Future<Output = ()>) {}
+
+ fn demo() {
+ spawn_on((|| async move {})());
+ }
+}
+
+fn avoid_double_parens() {
+ std::convert::identity((|| 13_i32 + 36_i32)()).leading_zeros();
+}
diff --git a/src/tools/clippy/tests/ui/redundant_closure_call_fixable.stderr b/src/tools/clippy/tests/ui/redundant_closure_call_fixable.stderr
index a7cdb4369..028d383ad 100644
--- a/src/tools/clippy/tests/ui/redundant_closure_call_fixable.stderr
+++ b/src/tools/clippy/tests/ui/redundant_closure_call_fixable.stderr
@@ -123,5 +123,23 @@ error: try not to call a closure in the expression where it is declared
LL | foo((|| || 42)()(), 5);
| ^^^^^^^^^^^^^^ help: try doing something like: `42`
-error: aborting due to 14 previous errors
+error: try not to call a closure in the expression where it is declared
+ --> $DIR/redundant_closure_call_fixable.rs:89:5
+ |
+LL | (|| async {})().await;
+ | ^^^^^^^^^^^^^^^ help: try doing something like: `async {}`
+
+error: try not to call a closure in the expression where it is declared
+ --> $DIR/redundant_closure_call_fixable.rs:98:18
+ |
+LL | spawn_on((|| async move {})());
+ | ^^^^^^^^^^^^^^^^^^^^ help: try doing something like: `async move {}`
+
+error: try not to call a closure in the expression where it is declared
+ --> $DIR/redundant_closure_call_fixable.rs:103:28
+ |
+LL | std::convert::identity((|| 13_i32 + 36_i32)()).leading_zeros();
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try doing something like: `13_i32 + 36_i32`
+
+error: aborting due to 17 previous errors
diff --git a/src/tools/clippy/tests/ui/redundant_guards.fixed b/src/tools/clippy/tests/ui/redundant_guards.fixed
index f8af90927..aef26ef22 100644
--- a/src/tools/clippy/tests/ui/redundant_guards.fixed
+++ b/src/tools/clippy/tests/ui/redundant_guards.fixed
@@ -193,3 +193,60 @@ mod issue11465 {
}
}
}
+
+fn issue11807() {
+ #![allow(clippy::single_match)]
+
+ match Some(Some("")) {
+ Some(Some("")) => {},
+ _ => {},
+ }
+
+ match Some(Some(String::new())) {
+ // Do not lint: String deref-coerces to &str
+ Some(Some(x)) if x.is_empty() => {},
+ _ => {},
+ }
+
+ match Some(Some(&[] as &[i32])) {
+ Some(Some([])) => {},
+ _ => {},
+ }
+
+ match Some(Some([] as [i32; 0])) {
+ Some(Some([])) => {},
+ _ => {},
+ }
+
+ match Some(Some(Vec::<()>::new())) {
+ // Do not lint: Vec deref-coerces to &[T]
+ Some(Some(x)) if x.is_empty() => {},
+ _ => {},
+ }
+
+ match Some(Some(&[] as &[i32])) {
+ Some(Some([..])) => {},
+ _ => {},
+ }
+
+ match Some(Some(&[] as &[i32])) {
+ Some(Some([1, ..])) => {},
+ _ => {},
+ }
+
+ match Some(Some(&[] as &[i32])) {
+ Some(Some([1, 2, ..])) => {},
+ _ => {},
+ }
+
+ match Some(Some(&[] as &[i32])) {
+ Some(Some([.., 1, 2])) => {},
+ _ => {},
+ }
+
+ match Some(Some(Vec::<i32>::new())) {
+ // Do not lint: deref coercion
+ Some(Some(x)) if x.starts_with(&[1, 2]) => {},
+ _ => {},
+ }
+}
diff --git a/src/tools/clippy/tests/ui/redundant_guards.rs b/src/tools/clippy/tests/ui/redundant_guards.rs
index b46f8a620..5d476f5b0 100644
--- a/src/tools/clippy/tests/ui/redundant_guards.rs
+++ b/src/tools/clippy/tests/ui/redundant_guards.rs
@@ -193,3 +193,60 @@ mod issue11465 {
}
}
}
+
+fn issue11807() {
+ #![allow(clippy::single_match)]
+
+ match Some(Some("")) {
+ Some(Some(x)) if x.is_empty() => {},
+ _ => {},
+ }
+
+ match Some(Some(String::new())) {
+ // Do not lint: String deref-coerces to &str
+ Some(Some(x)) if x.is_empty() => {},
+ _ => {},
+ }
+
+ match Some(Some(&[] as &[i32])) {
+ Some(Some(x)) if x.is_empty() => {},
+ _ => {},
+ }
+
+ match Some(Some([] as [i32; 0])) {
+ Some(Some(x)) if x.is_empty() => {},
+ _ => {},
+ }
+
+ match Some(Some(Vec::<()>::new())) {
+ // Do not lint: Vec deref-coerces to &[T]
+ Some(Some(x)) if x.is_empty() => {},
+ _ => {},
+ }
+
+ match Some(Some(&[] as &[i32])) {
+ Some(Some(x)) if x.starts_with(&[]) => {},
+ _ => {},
+ }
+
+ match Some(Some(&[] as &[i32])) {
+ Some(Some(x)) if x.starts_with(&[1]) => {},
+ _ => {},
+ }
+
+ match Some(Some(&[] as &[i32])) {
+ Some(Some(x)) if x.starts_with(&[1, 2]) => {},
+ _ => {},
+ }
+
+ match Some(Some(&[] as &[i32])) {
+ Some(Some(x)) if x.ends_with(&[1, 2]) => {},
+ _ => {},
+ }
+
+ match Some(Some(Vec::<i32>::new())) {
+ // Do not lint: deref coercion
+ Some(Some(x)) if x.starts_with(&[1, 2]) => {},
+ _ => {},
+ }
+}
diff --git a/src/tools/clippy/tests/ui/redundant_guards.stderr b/src/tools/clippy/tests/ui/redundant_guards.stderr
index b8d7834e3..f78d2a814 100644
--- a/src/tools/clippy/tests/ui/redundant_guards.stderr
+++ b/src/tools/clippy/tests/ui/redundant_guards.stderr
@@ -203,5 +203,89 @@ LL - B { ref c, .. } if matches!(c, &1) => {},
LL + B { c: 1, .. } => {},
|
-error: aborting due to 17 previous errors
+error: redundant guard
+ --> $DIR/redundant_guards.rs:201:26
+ |
+LL | Some(Some(x)) if x.is_empty() => {},
+ | ^^^^^^^^^^^^
+ |
+help: try
+ |
+LL - Some(Some(x)) if x.is_empty() => {},
+LL + Some(Some("")) => {},
+ |
+
+error: redundant guard
+ --> $DIR/redundant_guards.rs:212:26
+ |
+LL | Some(Some(x)) if x.is_empty() => {},
+ | ^^^^^^^^^^^^
+ |
+help: try
+ |
+LL - Some(Some(x)) if x.is_empty() => {},
+LL + Some(Some([])) => {},
+ |
+
+error: redundant guard
+ --> $DIR/redundant_guards.rs:217:26
+ |
+LL | Some(Some(x)) if x.is_empty() => {},
+ | ^^^^^^^^^^^^
+ |
+help: try
+ |
+LL - Some(Some(x)) if x.is_empty() => {},
+LL + Some(Some([])) => {},
+ |
+
+error: redundant guard
+ --> $DIR/redundant_guards.rs:228:26
+ |
+LL | Some(Some(x)) if x.starts_with(&[]) => {},
+ | ^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL - Some(Some(x)) if x.starts_with(&[]) => {},
+LL + Some(Some([..])) => {},
+ |
+
+error: redundant guard
+ --> $DIR/redundant_guards.rs:233:26
+ |
+LL | Some(Some(x)) if x.starts_with(&[1]) => {},
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL - Some(Some(x)) if x.starts_with(&[1]) => {},
+LL + Some(Some([1, ..])) => {},
+ |
+
+error: redundant guard
+ --> $DIR/redundant_guards.rs:238:26
+ |
+LL | Some(Some(x)) if x.starts_with(&[1, 2]) => {},
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL - Some(Some(x)) if x.starts_with(&[1, 2]) => {},
+LL + Some(Some([1, 2, ..])) => {},
+ |
+
+error: redundant guard
+ --> $DIR/redundant_guards.rs:243:26
+ |
+LL | Some(Some(x)) if x.ends_with(&[1, 2]) => {},
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL - Some(Some(x)) if x.ends_with(&[1, 2]) => {},
+LL + Some(Some([.., 1, 2])) => {},
+ |
+
+error: aborting due to 24 previous errors
diff --git a/src/tools/clippy/tests/ui/regex.rs b/src/tools/clippy/tests/ui/regex.rs
index 094d9574a..1ea0d65bf 100644
--- a/src/tools/clippy/tests/ui/regex.rs
+++ b/src/tools/clippy/tests/ui/regex.rs
@@ -112,6 +112,10 @@ fn trivial_regex() {
// #6005: unicode classes in bytes::Regex
let a_byte_of_unicode = BRegex::new(r"\p{C}");
+
+ // start and end word boundry, introduced in regex 0.10
+ let _ = BRegex::new(r"\<word\>");
+ let _ = BRegex::new(r"\b{start}word\b{end}");
}
fn main() {
diff --git a/src/tools/clippy/tests/ui/rename.fixed b/src/tools/clippy/tests/ui/rename.fixed
index 4df9be2c2..f4ff0f0b8 100644
--- a/src/tools/clippy/tests/ui/rename.fixed
+++ b/src/tools/clippy/tests/ui/rename.fixed
@@ -4,7 +4,7 @@
#![allow(clippy::almost_complete_range)]
#![allow(clippy::disallowed_names)]
-#![allow(clippy::blocks_in_if_conditions)]
+#![allow(clippy::blocks_in_conditions)]
#![allow(clippy::box_collection)]
#![allow(clippy::redundant_static_lifetimes)]
#![allow(clippy::cognitive_complexity)]
@@ -51,10 +51,12 @@
#![allow(undropped_manually_drops)]
#![allow(unknown_lints)]
#![allow(unused_labels)]
+#![allow(ambiguous_wide_pointer_comparisons)]
#![warn(clippy::almost_complete_range)]
#![warn(clippy::disallowed_names)]
-#![warn(clippy::blocks_in_if_conditions)]
-#![warn(clippy::blocks_in_if_conditions)]
+#![warn(clippy::blocks_in_conditions)]
+#![warn(clippy::blocks_in_conditions)]
+#![warn(clippy::blocks_in_conditions)]
#![warn(clippy::box_collection)]
#![warn(clippy::redundant_static_lifetimes)]
#![warn(clippy::cognitive_complexity)]
@@ -107,5 +109,6 @@
#![warn(undropped_manually_drops)]
#![warn(unknown_lints)]
#![warn(unused_labels)]
+#![warn(ambiguous_wide_pointer_comparisons)]
fn main() {}
diff --git a/src/tools/clippy/tests/ui/rename.rs b/src/tools/clippy/tests/ui/rename.rs
index 940e60068..0df1098f5 100644
--- a/src/tools/clippy/tests/ui/rename.rs
+++ b/src/tools/clippy/tests/ui/rename.rs
@@ -4,7 +4,7 @@
#![allow(clippy::almost_complete_range)]
#![allow(clippy::disallowed_names)]
-#![allow(clippy::blocks_in_if_conditions)]
+#![allow(clippy::blocks_in_conditions)]
#![allow(clippy::box_collection)]
#![allow(clippy::redundant_static_lifetimes)]
#![allow(clippy::cognitive_complexity)]
@@ -51,10 +51,12 @@
#![allow(undropped_manually_drops)]
#![allow(unknown_lints)]
#![allow(unused_labels)]
+#![allow(ambiguous_wide_pointer_comparisons)]
#![warn(clippy::almost_complete_letter_range)]
#![warn(clippy::blacklisted_name)]
#![warn(clippy::block_in_if_condition_expr)]
#![warn(clippy::block_in_if_condition_stmt)]
+#![warn(clippy::blocks_in_if_conditions)]
#![warn(clippy::box_vec)]
#![warn(clippy::const_static_lifetime)]
#![warn(clippy::cyclomatic_complexity)]
@@ -107,5 +109,6 @@
#![warn(clippy::undropped_manually_drops)]
#![warn(clippy::unknown_clippy_lints)]
#![warn(clippy::unused_label)]
+#![warn(clippy::vtable_address_comparisons)]
fn main() {}
diff --git a/src/tools/clippy/tests/ui/rename.stderr b/src/tools/clippy/tests/ui/rename.stderr
index 30824e154..f63ad82a7 100644
--- a/src/tools/clippy/tests/ui/rename.stderr
+++ b/src/tools/clippy/tests/ui/rename.stderr
@@ -1,5 +1,5 @@
error: lint `clippy::almost_complete_letter_range` has been renamed to `clippy::almost_complete_range`
- --> $DIR/rename.rs:54:9
+ --> $DIR/rename.rs:55:9
|
LL | #![warn(clippy::almost_complete_letter_range)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::almost_complete_range`
@@ -8,334 +8,346 @@ LL | #![warn(clippy::almost_complete_letter_range)]
= help: to override `-D warnings` add `#[allow(renamed_and_removed_lints)]`
error: lint `clippy::blacklisted_name` has been renamed to `clippy::disallowed_names`
- --> $DIR/rename.rs:55:9
+ --> $DIR/rename.rs:56:9
|
LL | #![warn(clippy::blacklisted_name)]
| ^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::disallowed_names`
-error: lint `clippy::block_in_if_condition_expr` has been renamed to `clippy::blocks_in_if_conditions`
- --> $DIR/rename.rs:56:9
+error: lint `clippy::block_in_if_condition_expr` has been renamed to `clippy::blocks_in_conditions`
+ --> $DIR/rename.rs:57:9
|
LL | #![warn(clippy::block_in_if_condition_expr)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::blocks_in_if_conditions`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::blocks_in_conditions`
-error: lint `clippy::block_in_if_condition_stmt` has been renamed to `clippy::blocks_in_if_conditions`
- --> $DIR/rename.rs:57:9
+error: lint `clippy::block_in_if_condition_stmt` has been renamed to `clippy::blocks_in_conditions`
+ --> $DIR/rename.rs:58:9
|
LL | #![warn(clippy::block_in_if_condition_stmt)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::blocks_in_if_conditions`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::blocks_in_conditions`
+
+error: lint `clippy::blocks_in_if_conditions` has been renamed to `clippy::blocks_in_conditions`
+ --> $DIR/rename.rs:59:9
+ |
+LL | #![warn(clippy::blocks_in_if_conditions)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::blocks_in_conditions`
error: lint `clippy::box_vec` has been renamed to `clippy::box_collection`
- --> $DIR/rename.rs:58:9
+ --> $DIR/rename.rs:60:9
|
LL | #![warn(clippy::box_vec)]
| ^^^^^^^^^^^^^^^ help: use the new name: `clippy::box_collection`
error: lint `clippy::const_static_lifetime` has been renamed to `clippy::redundant_static_lifetimes`
- --> $DIR/rename.rs:59:9
+ --> $DIR/rename.rs:61:9
|
LL | #![warn(clippy::const_static_lifetime)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::redundant_static_lifetimes`
error: lint `clippy::cyclomatic_complexity` has been renamed to `clippy::cognitive_complexity`
- --> $DIR/rename.rs:60:9
+ --> $DIR/rename.rs:62:9
|
LL | #![warn(clippy::cyclomatic_complexity)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::cognitive_complexity`
error: lint `clippy::derive_hash_xor_eq` has been renamed to `clippy::derived_hash_with_manual_eq`
- --> $DIR/rename.rs:61:9
+ --> $DIR/rename.rs:63:9
|
LL | #![warn(clippy::derive_hash_xor_eq)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::derived_hash_with_manual_eq`
error: lint `clippy::disallowed_method` has been renamed to `clippy::disallowed_methods`
- --> $DIR/rename.rs:62:9
+ --> $DIR/rename.rs:64:9
|
LL | #![warn(clippy::disallowed_method)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::disallowed_methods`
error: lint `clippy::disallowed_type` has been renamed to `clippy::disallowed_types`
- --> $DIR/rename.rs:63:9
+ --> $DIR/rename.rs:65:9
|
LL | #![warn(clippy::disallowed_type)]
| ^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::disallowed_types`
error: lint `clippy::eval_order_dependence` has been renamed to `clippy::mixed_read_write_in_expression`
- --> $DIR/rename.rs:64:9
+ --> $DIR/rename.rs:66:9
|
LL | #![warn(clippy::eval_order_dependence)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::mixed_read_write_in_expression`
error: lint `clippy::identity_conversion` has been renamed to `clippy::useless_conversion`
- --> $DIR/rename.rs:65:9
+ --> $DIR/rename.rs:67:9
|
LL | #![warn(clippy::identity_conversion)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::useless_conversion`
error: lint `clippy::if_let_some_result` has been renamed to `clippy::match_result_ok`
- --> $DIR/rename.rs:66:9
+ --> $DIR/rename.rs:68:9
|
LL | #![warn(clippy::if_let_some_result)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::match_result_ok`
error: lint `clippy::incorrect_clone_impl_on_copy_type` has been renamed to `clippy::non_canonical_clone_impl`
- --> $DIR/rename.rs:67:9
+ --> $DIR/rename.rs:69:9
|
LL | #![warn(clippy::incorrect_clone_impl_on_copy_type)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::non_canonical_clone_impl`
error: lint `clippy::incorrect_partial_ord_impl_on_ord_type` has been renamed to `clippy::non_canonical_partial_ord_impl`
- --> $DIR/rename.rs:68:9
+ --> $DIR/rename.rs:70:9
|
LL | #![warn(clippy::incorrect_partial_ord_impl_on_ord_type)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::non_canonical_partial_ord_impl`
error: lint `clippy::integer_arithmetic` has been renamed to `clippy::arithmetic_side_effects`
- --> $DIR/rename.rs:69:9
+ --> $DIR/rename.rs:71:9
|
LL | #![warn(clippy::integer_arithmetic)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::arithmetic_side_effects`
error: lint `clippy::logic_bug` has been renamed to `clippy::overly_complex_bool_expr`
- --> $DIR/rename.rs:70:9
+ --> $DIR/rename.rs:72:9
|
LL | #![warn(clippy::logic_bug)]
| ^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::overly_complex_bool_expr`
error: lint `clippy::new_without_default_derive` has been renamed to `clippy::new_without_default`
- --> $DIR/rename.rs:71:9
+ --> $DIR/rename.rs:73:9
|
LL | #![warn(clippy::new_without_default_derive)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::new_without_default`
error: lint `clippy::option_and_then_some` has been renamed to `clippy::bind_instead_of_map`
- --> $DIR/rename.rs:72:9
+ --> $DIR/rename.rs:74:9
|
LL | #![warn(clippy::option_and_then_some)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::bind_instead_of_map`
error: lint `clippy::option_expect_used` has been renamed to `clippy::expect_used`
- --> $DIR/rename.rs:73:9
+ --> $DIR/rename.rs:75:9
|
LL | #![warn(clippy::option_expect_used)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::expect_used`
error: lint `clippy::option_map_unwrap_or` has been renamed to `clippy::map_unwrap_or`
- --> $DIR/rename.rs:74:9
+ --> $DIR/rename.rs:76:9
|
LL | #![warn(clippy::option_map_unwrap_or)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::map_unwrap_or`
error: lint `clippy::option_map_unwrap_or_else` has been renamed to `clippy::map_unwrap_or`
- --> $DIR/rename.rs:75:9
+ --> $DIR/rename.rs:77:9
|
LL | #![warn(clippy::option_map_unwrap_or_else)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::map_unwrap_or`
error: lint `clippy::option_unwrap_used` has been renamed to `clippy::unwrap_used`
- --> $DIR/rename.rs:76:9
+ --> $DIR/rename.rs:78:9
|
LL | #![warn(clippy::option_unwrap_used)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::unwrap_used`
error: lint `clippy::ref_in_deref` has been renamed to `clippy::needless_borrow`
- --> $DIR/rename.rs:77:9
+ --> $DIR/rename.rs:79:9
|
LL | #![warn(clippy::ref_in_deref)]
| ^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::needless_borrow`
error: lint `clippy::result_expect_used` has been renamed to `clippy::expect_used`
- --> $DIR/rename.rs:78:9
+ --> $DIR/rename.rs:80:9
|
LL | #![warn(clippy::result_expect_used)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::expect_used`
error: lint `clippy::result_map_unwrap_or_else` has been renamed to `clippy::map_unwrap_or`
- --> $DIR/rename.rs:79:9
+ --> $DIR/rename.rs:81:9
|
LL | #![warn(clippy::result_map_unwrap_or_else)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::map_unwrap_or`
error: lint `clippy::result_unwrap_used` has been renamed to `clippy::unwrap_used`
- --> $DIR/rename.rs:80:9
+ --> $DIR/rename.rs:82:9
|
LL | #![warn(clippy::result_unwrap_used)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::unwrap_used`
error: lint `clippy::single_char_push_str` has been renamed to `clippy::single_char_add_str`
- --> $DIR/rename.rs:81:9
+ --> $DIR/rename.rs:83:9
|
LL | #![warn(clippy::single_char_push_str)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::single_char_add_str`
error: lint `clippy::stutter` has been renamed to `clippy::module_name_repetitions`
- --> $DIR/rename.rs:82:9
+ --> $DIR/rename.rs:84:9
|
LL | #![warn(clippy::stutter)]
| ^^^^^^^^^^^^^^^ help: use the new name: `clippy::module_name_repetitions`
error: lint `clippy::to_string_in_display` has been renamed to `clippy::recursive_format_impl`
- --> $DIR/rename.rs:83:9
+ --> $DIR/rename.rs:85:9
|
LL | #![warn(clippy::to_string_in_display)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::recursive_format_impl`
error: lint `clippy::unwrap_or_else_default` has been renamed to `clippy::unwrap_or_default`
- --> $DIR/rename.rs:84:9
+ --> $DIR/rename.rs:86:9
|
LL | #![warn(clippy::unwrap_or_else_default)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::unwrap_or_default`
error: lint `clippy::zero_width_space` has been renamed to `clippy::invisible_characters`
- --> $DIR/rename.rs:85:9
+ --> $DIR/rename.rs:87:9
|
LL | #![warn(clippy::zero_width_space)]
| ^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::invisible_characters`
error: lint `clippy::cast_ref_to_mut` has been renamed to `invalid_reference_casting`
- --> $DIR/rename.rs:86:9
+ --> $DIR/rename.rs:88:9
|
LL | #![warn(clippy::cast_ref_to_mut)]
| ^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `invalid_reference_casting`
error: lint `clippy::clone_double_ref` has been renamed to `suspicious_double_ref_op`
- --> $DIR/rename.rs:87:9
+ --> $DIR/rename.rs:89:9
|
LL | #![warn(clippy::clone_double_ref)]
| ^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `suspicious_double_ref_op`
error: lint `clippy::cmp_nan` has been renamed to `invalid_nan_comparisons`
- --> $DIR/rename.rs:88:9
+ --> $DIR/rename.rs:90:9
|
LL | #![warn(clippy::cmp_nan)]
| ^^^^^^^^^^^^^^^ help: use the new name: `invalid_nan_comparisons`
error: lint `clippy::drop_bounds` has been renamed to `drop_bounds`
- --> $DIR/rename.rs:89:9
+ --> $DIR/rename.rs:91:9
|
LL | #![warn(clippy::drop_bounds)]
| ^^^^^^^^^^^^^^^^^^^ help: use the new name: `drop_bounds`
error: lint `clippy::drop_copy` has been renamed to `dropping_copy_types`
- --> $DIR/rename.rs:90:9
+ --> $DIR/rename.rs:92:9
|
LL | #![warn(clippy::drop_copy)]
| ^^^^^^^^^^^^^^^^^ help: use the new name: `dropping_copy_types`
error: lint `clippy::drop_ref` has been renamed to `dropping_references`
- --> $DIR/rename.rs:91:9
+ --> $DIR/rename.rs:93:9
|
LL | #![warn(clippy::drop_ref)]
| ^^^^^^^^^^^^^^^^ help: use the new name: `dropping_references`
error: lint `clippy::fn_null_check` has been renamed to `useless_ptr_null_checks`
- --> $DIR/rename.rs:92:9
+ --> $DIR/rename.rs:94:9
|
LL | #![warn(clippy::fn_null_check)]
| ^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `useless_ptr_null_checks`
error: lint `clippy::for_loop_over_option` has been renamed to `for_loops_over_fallibles`
- --> $DIR/rename.rs:93:9
+ --> $DIR/rename.rs:95:9
|
LL | #![warn(clippy::for_loop_over_option)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `for_loops_over_fallibles`
error: lint `clippy::for_loop_over_result` has been renamed to `for_loops_over_fallibles`
- --> $DIR/rename.rs:94:9
+ --> $DIR/rename.rs:96:9
|
LL | #![warn(clippy::for_loop_over_result)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `for_loops_over_fallibles`
error: lint `clippy::for_loops_over_fallibles` has been renamed to `for_loops_over_fallibles`
- --> $DIR/rename.rs:95:9
+ --> $DIR/rename.rs:97:9
|
LL | #![warn(clippy::for_loops_over_fallibles)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `for_loops_over_fallibles`
error: lint `clippy::forget_copy` has been renamed to `forgetting_copy_types`
- --> $DIR/rename.rs:96:9
+ --> $DIR/rename.rs:98:9
|
LL | #![warn(clippy::forget_copy)]
| ^^^^^^^^^^^^^^^^^^^ help: use the new name: `forgetting_copy_types`
error: lint `clippy::forget_ref` has been renamed to `forgetting_references`
- --> $DIR/rename.rs:97:9
+ --> $DIR/rename.rs:99:9
|
LL | #![warn(clippy::forget_ref)]
| ^^^^^^^^^^^^^^^^^^ help: use the new name: `forgetting_references`
error: lint `clippy::into_iter_on_array` has been renamed to `array_into_iter`
- --> $DIR/rename.rs:98:9
+ --> $DIR/rename.rs:100:9
|
LL | #![warn(clippy::into_iter_on_array)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `array_into_iter`
error: lint `clippy::invalid_atomic_ordering` has been renamed to `invalid_atomic_ordering`
- --> $DIR/rename.rs:99:9
+ --> $DIR/rename.rs:101:9
|
LL | #![warn(clippy::invalid_atomic_ordering)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `invalid_atomic_ordering`
error: lint `clippy::invalid_ref` has been renamed to `invalid_value`
- --> $DIR/rename.rs:100:9
+ --> $DIR/rename.rs:102:9
|
LL | #![warn(clippy::invalid_ref)]
| ^^^^^^^^^^^^^^^^^^^ help: use the new name: `invalid_value`
error: lint `clippy::invalid_utf8_in_unchecked` has been renamed to `invalid_from_utf8_unchecked`
- --> $DIR/rename.rs:101:9
+ --> $DIR/rename.rs:103:9
|
LL | #![warn(clippy::invalid_utf8_in_unchecked)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `invalid_from_utf8_unchecked`
error: lint `clippy::let_underscore_drop` has been renamed to `let_underscore_drop`
- --> $DIR/rename.rs:102:9
+ --> $DIR/rename.rs:104:9
|
LL | #![warn(clippy::let_underscore_drop)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `let_underscore_drop`
error: lint `clippy::mem_discriminant_non_enum` has been renamed to `enum_intrinsics_non_enums`
- --> $DIR/rename.rs:103:9
+ --> $DIR/rename.rs:105:9
|
LL | #![warn(clippy::mem_discriminant_non_enum)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `enum_intrinsics_non_enums`
error: lint `clippy::panic_params` has been renamed to `non_fmt_panics`
- --> $DIR/rename.rs:104:9
+ --> $DIR/rename.rs:106:9
|
LL | #![warn(clippy::panic_params)]
| ^^^^^^^^^^^^^^^^^^^^ help: use the new name: `non_fmt_panics`
error: lint `clippy::positional_named_format_parameters` has been renamed to `named_arguments_used_positionally`
- --> $DIR/rename.rs:105:9
+ --> $DIR/rename.rs:107:9
|
LL | #![warn(clippy::positional_named_format_parameters)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `named_arguments_used_positionally`
error: lint `clippy::temporary_cstring_as_ptr` has been renamed to `temporary_cstring_as_ptr`
- --> $DIR/rename.rs:106:9
+ --> $DIR/rename.rs:108:9
|
LL | #![warn(clippy::temporary_cstring_as_ptr)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `temporary_cstring_as_ptr`
error: lint `clippy::undropped_manually_drops` has been renamed to `undropped_manually_drops`
- --> $DIR/rename.rs:107:9
+ --> $DIR/rename.rs:109:9
|
LL | #![warn(clippy::undropped_manually_drops)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `undropped_manually_drops`
error: lint `clippy::unknown_clippy_lints` has been renamed to `unknown_lints`
- --> $DIR/rename.rs:108:9
+ --> $DIR/rename.rs:110:9
|
LL | #![warn(clippy::unknown_clippy_lints)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `unknown_lints`
error: lint `clippy::unused_label` has been renamed to `unused_labels`
- --> $DIR/rename.rs:109:9
+ --> $DIR/rename.rs:111:9
|
LL | #![warn(clippy::unused_label)]
| ^^^^^^^^^^^^^^^^^^^^ help: use the new name: `unused_labels`
-error: aborting due to 56 previous errors
+error: lint `clippy::vtable_address_comparisons` has been renamed to `ambiguous_wide_pointer_comparisons`
+ --> $DIR/rename.rs:112:9
+ |
+LL | #![warn(clippy::vtable_address_comparisons)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `ambiguous_wide_pointer_comparisons`
+
+error: aborting due to 58 previous errors
diff --git a/src/tools/clippy/tests/ui/renamed_builtin_attr.stderr b/src/tools/clippy/tests/ui/renamed_builtin_attr.stderr
index 636d88fcd..662188bba 100644
--- a/src/tools/clippy/tests/ui/renamed_builtin_attr.stderr
+++ b/src/tools/clippy/tests/ui/renamed_builtin_attr.stderr
@@ -4,5 +4,5 @@ error: usage of deprecated attribute
LL | #[clippy::cyclomatic_complexity = "1"]
| ^^^^^^^^^^^^^^^^^^^^^ help: consider using: `cognitive_complexity`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/repeat_vec_with_capacity.fixed b/src/tools/clippy/tests/ui/repeat_vec_with_capacity.fixed
new file mode 100644
index 000000000..2afe2f433
--- /dev/null
+++ b/src/tools/clippy/tests/ui/repeat_vec_with_capacity.fixed
@@ -0,0 +1,38 @@
+#![warn(clippy::repeat_vec_with_capacity)]
+
+fn main() {
+ {
+ (0..123).map(|_| Vec::<()>::with_capacity(42)).collect::<Vec<_>>();
+ //~^ ERROR: repeating `Vec::with_capacity` using `vec![x; n]`, which does not retain capacity
+ }
+
+ {
+ let n = 123;
+ (0..n).map(|_| Vec::<()>::with_capacity(42)).collect::<Vec<_>>();
+ //~^ ERROR: repeating `Vec::with_capacity` using `vec![x; n]`, which does not retain capacity
+ }
+
+ {
+ macro_rules! from_macro {
+ ($x:expr) => {
+ vec![$x; 123];
+ };
+ }
+ // vec expansion is from another macro, don't lint
+ from_macro!(Vec::<()>::with_capacity(42));
+ }
+
+ {
+ std::iter::repeat_with(|| Vec::<()>::with_capacity(42));
+ //~^ ERROR: repeating `Vec::with_capacity` using `iter::repeat`, which does not retain capacity
+ }
+
+ {
+ macro_rules! from_macro {
+ ($x:expr) => {
+ std::iter::repeat($x)
+ };
+ }
+ from_macro!(Vec::<()>::with_capacity(42));
+ }
+}
diff --git a/src/tools/clippy/tests/ui/repeat_vec_with_capacity.rs b/src/tools/clippy/tests/ui/repeat_vec_with_capacity.rs
new file mode 100644
index 000000000..659f2a395
--- /dev/null
+++ b/src/tools/clippy/tests/ui/repeat_vec_with_capacity.rs
@@ -0,0 +1,38 @@
+#![warn(clippy::repeat_vec_with_capacity)]
+
+fn main() {
+ {
+ vec![Vec::<()>::with_capacity(42); 123];
+ //~^ ERROR: repeating `Vec::with_capacity` using `vec![x; n]`, which does not retain capacity
+ }
+
+ {
+ let n = 123;
+ vec![Vec::<()>::with_capacity(42); n];
+ //~^ ERROR: repeating `Vec::with_capacity` using `vec![x; n]`, which does not retain capacity
+ }
+
+ {
+ macro_rules! from_macro {
+ ($x:expr) => {
+ vec![$x; 123];
+ };
+ }
+ // vec expansion is from another macro, don't lint
+ from_macro!(Vec::<()>::with_capacity(42));
+ }
+
+ {
+ std::iter::repeat(Vec::<()>::with_capacity(42));
+ //~^ ERROR: repeating `Vec::with_capacity` using `iter::repeat`, which does not retain capacity
+ }
+
+ {
+ macro_rules! from_macro {
+ ($x:expr) => {
+ std::iter::repeat($x)
+ };
+ }
+ from_macro!(Vec::<()>::with_capacity(42));
+ }
+}
diff --git a/src/tools/clippy/tests/ui/repeat_vec_with_capacity.stderr b/src/tools/clippy/tests/ui/repeat_vec_with_capacity.stderr
new file mode 100644
index 000000000..10b5f1214
--- /dev/null
+++ b/src/tools/clippy/tests/ui/repeat_vec_with_capacity.stderr
@@ -0,0 +1,40 @@
+error: repeating `Vec::with_capacity` using `vec![x; n]`, which does not retain capacity
+ --> $DIR/repeat_vec_with_capacity.rs:5:9
+ |
+LL | vec![Vec::<()>::with_capacity(42); 123];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: only the last `Vec` will have the capacity
+ = note: `-D clippy::repeat-vec-with-capacity` implied by `-D warnings`
+ = help: to override `-D warnings` add `#[allow(clippy::repeat_vec_with_capacity)]`
+help: if you intended to initialize multiple `Vec`s with an initial capacity, try
+ |
+LL | (0..123).map(|_| Vec::<()>::with_capacity(42)).collect::<Vec<_>>();
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: repeating `Vec::with_capacity` using `vec![x; n]`, which does not retain capacity
+ --> $DIR/repeat_vec_with_capacity.rs:11:9
+ |
+LL | vec![Vec::<()>::with_capacity(42); n];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: only the last `Vec` will have the capacity
+help: if you intended to initialize multiple `Vec`s with an initial capacity, try
+ |
+LL | (0..n).map(|_| Vec::<()>::with_capacity(42)).collect::<Vec<_>>();
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: repeating `Vec::with_capacity` using `iter::repeat`, which does not retain capacity
+ --> $DIR/repeat_vec_with_capacity.rs:26:9
+ |
+LL | std::iter::repeat(Vec::<()>::with_capacity(42));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: none of the yielded `Vec`s will have the requested capacity
+help: if you intended to create an iterator that yields `Vec`s with an initial capacity, try
+ |
+LL | std::iter::repeat_with(|| Vec::<()>::with_capacity(42));
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/result_map_or_into_option.fixed b/src/tools/clippy/tests/ui/result_map_or_into_option.fixed
index fb2db6cf5..cf42b24b2 100644
--- a/src/tools/clippy/tests/ui/result_map_or_into_option.fixed
+++ b/src/tools/clippy/tests/ui/result_map_or_into_option.fixed
@@ -3,6 +3,12 @@
fn main() {
let opt: Result<u32, &str> = Ok(1);
let _ = opt.ok();
+ //~^ ERROR: called `map_or(None, Some)` on a `Result` value
+ let _ = opt.ok();
+ //~^ ERROR: called `map_or_else(|_| None, Some)` on a `Result` value
+ #[rustfmt::skip]
+ let _ = opt.ok();
+ //~^ ERROR: called `map_or_else(|_| None, Some)` on a `Result` value
let rewrap = |s: u32| -> Option<u32> { Some(s) };
@@ -14,4 +20,5 @@ fn main() {
// return should not emit the lint
let opt: Result<u32, &str> = Ok(1);
_ = opt.map_or(None, |_x| Some(1));
+ let _ = opt.map_or_else(|a| a.parse::<u32>().ok(), Some);
}
diff --git a/src/tools/clippy/tests/ui/result_map_or_into_option.rs b/src/tools/clippy/tests/ui/result_map_or_into_option.rs
index 06779a699..cdb45d6b8 100644
--- a/src/tools/clippy/tests/ui/result_map_or_into_option.rs
+++ b/src/tools/clippy/tests/ui/result_map_or_into_option.rs
@@ -3,6 +3,12 @@
fn main() {
let opt: Result<u32, &str> = Ok(1);
let _ = opt.map_or(None, Some);
+ //~^ ERROR: called `map_or(None, Some)` on a `Result` value
+ let _ = opt.map_or_else(|_| None, Some);
+ //~^ ERROR: called `map_or_else(|_| None, Some)` on a `Result` value
+ #[rustfmt::skip]
+ let _ = opt.map_or_else(|_| { None }, Some);
+ //~^ ERROR: called `map_or_else(|_| None, Some)` on a `Result` value
let rewrap = |s: u32| -> Option<u32> { Some(s) };
@@ -14,4 +20,5 @@ fn main() {
// return should not emit the lint
let opt: Result<u32, &str> = Ok(1);
_ = opt.map_or(None, |_x| Some(1));
+ let _ = opt.map_or_else(|a| a.parse::<u32>().ok(), Some);
}
diff --git a/src/tools/clippy/tests/ui/result_map_or_into_option.stderr b/src/tools/clippy/tests/ui/result_map_or_into_option.stderr
index 9396ea4c0..3d6bfef48 100644
--- a/src/tools/clippy/tests/ui/result_map_or_into_option.stderr
+++ b/src/tools/clippy/tests/ui/result_map_or_into_option.stderr
@@ -1,4 +1,4 @@
-error: called `map_or(None, Some)` on a `Result` value. This can be done more directly by calling `ok()` instead
+error: called `map_or(None, Some)` on a `Result` value
--> $DIR/result_map_or_into_option.rs:5:13
|
LL | let _ = opt.map_or(None, Some);
@@ -7,5 +7,17 @@ LL | let _ = opt.map_or(None, Some);
= note: `-D clippy::result-map-or-into-option` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::result_map_or_into_option)]`
-error: aborting due to previous error
+error: called `map_or_else(|_| None, Some)` on a `Result` value
+ --> $DIR/result_map_or_into_option.rs:7:13
+ |
+LL | let _ = opt.map_or_else(|_| None, Some);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `ok` instead: `opt.ok()`
+
+error: called `map_or_else(|_| None, Some)` on a `Result` value
+ --> $DIR/result_map_or_into_option.rs:10:13
+ |
+LL | let _ = opt.map_or_else(|_| { None }, Some);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `ok` instead: `opt.ok()`
+
+error: aborting due to 3 previous errors
diff --git a/src/tools/clippy/tests/ui/seek_from_current.stderr b/src/tools/clippy/tests/ui/seek_from_current.stderr
index 42eb342c1..4858cb82e 100644
--- a/src/tools/clippy/tests/ui/seek_from_current.stderr
+++ b/src/tools/clippy/tests/ui/seek_from_current.stderr
@@ -7,5 +7,5 @@ LL | f.seek(SeekFrom::Current(0))?;
= note: `-D clippy::seek-from-current` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::seek_from_current)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/self_named_constructors.stderr b/src/tools/clippy/tests/ui/self_named_constructors.stderr
index f299b860d..8083ff965 100644
--- a/src/tools/clippy/tests/ui/self_named_constructors.stderr
+++ b/src/tools/clippy/tests/ui/self_named_constructors.stderr
@@ -11,5 +11,5 @@ LL | | }
= note: `-D clippy::self-named-constructors` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::self_named_constructors)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/serde.stderr b/src/tools/clippy/tests/ui/serde.stderr
index e5d64e271..079ba42bd 100644
--- a/src/tools/clippy/tests/ui/serde.stderr
+++ b/src/tools/clippy/tests/ui/serde.stderr
@@ -13,5 +13,5 @@ LL | | }
= note: `-D clippy::serde-api-misuse` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::serde_api_misuse)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/should_panic_without_expect.stderr b/src/tools/clippy/tests/ui/should_panic_without_expect.stderr
index dfcef52a9..b13db83bd 100644
--- a/src/tools/clippy/tests/ui/should_panic_without_expect.stderr
+++ b/src/tools/clippy/tests/ui/should_panic_without_expect.stderr
@@ -10,5 +10,5 @@ note: the lint level is defined here
LL | #![deny(clippy::should_panic_without_expect)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/single_element_loop.fixed b/src/tools/clippy/tests/ui/single_element_loop.fixed
index a82eb6afc..4e59c7631 100644
--- a/src/tools/clippy/tests/ui/single_element_loop.fixed
+++ b/src/tools/clippy/tests/ui/single_element_loop.fixed
@@ -15,23 +15,19 @@ fn main() {
dbg!(item);
}
- {
- let item = &(0..5);
+ for item in 0..5 {
dbg!(item);
}
- {
- let item = &mut (0..5);
+ for item in 0..5 {
dbg!(item);
}
- {
- let item = 0..5;
+ for item in 0..5 {
dbg!(item);
}
- {
- let item = 0..5;
+ for item in 0..5 {
dbg!(item);
}
diff --git a/src/tools/clippy/tests/ui/single_element_loop.stderr b/src/tools/clippy/tests/ui/single_element_loop.stderr
index 603dd7406..952d70414 100644
--- a/src/tools/clippy/tests/ui/single_element_loop.stderr
+++ b/src/tools/clippy/tests/ui/single_element_loop.stderr
@@ -32,69 +32,29 @@ LL + dbg!(item);
LL + }
|
-error: for loop over a single element
- --> $DIR/single_element_loop.rs:16:5
- |
-LL | / for item in &[0..5] {
-LL | | dbg!(item);
-LL | | }
- | |_____^
- |
-help: try
- |
-LL ~ {
-LL + let item = &(0..5);
-LL + dbg!(item);
-LL + }
+error: this loops only once with `item` being `0..5`
+ --> $DIR/single_element_loop.rs:16:17
|
+LL | for item in &[0..5] {
+ | ^^^^^^^ help: did you mean to iterate over the range instead?: `0..5`
-error: for loop over a single element
- --> $DIR/single_element_loop.rs:20:5
- |
-LL | / for item in [0..5].iter_mut() {
-LL | | dbg!(item);
-LL | | }
- | |_____^
- |
-help: try
- |
-LL ~ {
-LL + let item = &mut (0..5);
-LL + dbg!(item);
-LL + }
+error: this loops only once with `item` being `0..5`
+ --> $DIR/single_element_loop.rs:20:17
|
+LL | for item in [0..5].iter_mut() {
+ | ^^^^^^^^^^^^^^^^^ help: did you mean to iterate over the range instead?: `0..5`
-error: for loop over a single element
- --> $DIR/single_element_loop.rs:24:5
- |
-LL | / for item in [0..5] {
-LL | | dbg!(item);
-LL | | }
- | |_____^
- |
-help: try
- |
-LL ~ {
-LL + let item = 0..5;
-LL + dbg!(item);
-LL + }
+error: this loops only once with `item` being `0..5`
+ --> $DIR/single_element_loop.rs:24:17
|
+LL | for item in [0..5] {
+ | ^^^^^^ help: did you mean to iterate over the range instead?: `0..5`
-error: for loop over a single element
- --> $DIR/single_element_loop.rs:28:5
- |
-LL | / for item in [0..5].into_iter() {
-LL | | dbg!(item);
-LL | | }
- | |_____^
- |
-help: try
- |
-LL ~ {
-LL + let item = 0..5;
-LL + dbg!(item);
-LL + }
+error: this loops only once with `item` being `0..5`
+ --> $DIR/single_element_loop.rs:28:17
|
+LL | for item in [0..5].into_iter() {
+ | ^^^^^^^^^^^^^^^^^^ help: did you mean to iterate over the range instead?: `0..5`
error: for loop over a single element
--> $DIR/single_element_loop.rs:47:5
diff --git a/src/tools/clippy/tests/ui/string_from_utf8_as_bytes.stderr b/src/tools/clippy/tests/ui/string_from_utf8_as_bytes.stderr
index cf5688a97..4738bef3a 100644
--- a/src/tools/clippy/tests/ui/string_from_utf8_as_bytes.stderr
+++ b/src/tools/clippy/tests/ui/string_from_utf8_as_bytes.stderr
@@ -7,5 +7,5 @@ LL | let _ = std::str::from_utf8(&"Hello World!".as_bytes()[6..11]);
= note: `-D clippy::string-from-utf8-as-bytes` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::string_from_utf8_as_bytes)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/string_to_string.stderr b/src/tools/clippy/tests/ui/string_to_string.stderr
index 27a844315..f1f8e176b 100644
--- a/src/tools/clippy/tests/ui/string_to_string.stderr
+++ b/src/tools/clippy/tests/ui/string_to_string.stderr
@@ -8,5 +8,5 @@ LL | let mut v = message.to_string();
= note: `-D clippy::string-to-string` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::string_to_string)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/test_attr_in_doctest.rs b/src/tools/clippy/tests/ui/test_attr_in_doctest.rs
new file mode 100644
index 000000000..4c904f7a0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/test_attr_in_doctest.rs
@@ -0,0 +1,51 @@
+/// This is a test for `#[test]` in doctests
+///
+/// # Examples
+///
+/// ```
+/// #[test]
+/// fn should_be_linted() {
+/// assert_eq!(1, 1);
+/// }
+/// ```
+///
+/// Make sure we catch multiple tests in one example,
+/// and show that we really parse the attr:
+///
+/// ```
+/// #[test]
+/// fn should_also_be_linted() {
+/// #[cfg(test)]
+/// assert!(true);
+/// }
+///
+/// #[test]
+/// fn should_be_linted_too() {
+/// assert_eq!("#[test]", "
+/// #[test]
+/// ");
+/// }
+/// ```
+///
+/// We don't catch examples that aren't run:
+///
+/// ```ignore
+/// #[test]
+/// fn ignored() { todo!() }
+/// ```
+///
+/// ```no_run
+/// #[test]
+/// fn ignored() { todo!() }
+/// ```
+///
+/// ```compile_fail
+/// #[test]
+/// fn ignored() { Err(()) }
+/// ```
+///
+/// ```txt
+/// #[test]
+/// fn not_even_rust() { panic!("Ouch") }
+/// ```
+fn test_attr_in_doctests() {}
diff --git a/src/tools/clippy/tests/ui/test_attr_in_doctest.stderr b/src/tools/clippy/tests/ui/test_attr_in_doctest.stderr
new file mode 100644
index 000000000..605259f3b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/test_attr_in_doctest.stderr
@@ -0,0 +1,29 @@
+error: unit tests in doctest are not executed
+ --> $DIR/test_attr_in_doctest.rs:6:5
+ |
+LL | /// #[test]
+ | _____^
+LL | | /// fn should_be_linted() {
+ | |_______________________^
+ |
+ = note: `-D clippy::test-attr-in-doctest` implied by `-D warnings`
+ = help: to override `-D warnings` add `#[allow(clippy::test_attr_in_doctest)]`
+
+error: unit tests in doctest are not executed
+ --> $DIR/test_attr_in_doctest.rs:16:5
+ |
+LL | /// #[test]
+ | _____^
+LL | | /// fn should_also_be_linted() {
+ | |____________________________^
+
+error: unit tests in doctest are not executed
+ --> $DIR/test_attr_in_doctest.rs:22:5
+ |
+LL | /// #[test]
+ | _____^
+LL | | /// fn should_be_linted_too() {
+ | |___________________________^
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/tests_outside_test_module.stderr b/src/tools/clippy/tests/ui/tests_outside_test_module.stderr
index 112d6ce1f..ec0cdea83 100644
--- a/src/tools/clippy/tests/ui/tests_outside_test_module.stderr
+++ b/src/tools/clippy/tests/ui/tests_outside_test_module.stderr
@@ -8,5 +8,5 @@ LL | fn my_test() {}
= note: `-D clippy::tests-outside-test-module` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::tests_outside_test_module)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/track-diagnostics.stderr b/src/tools/clippy/tests/ui/track-diagnostics.stderr
index 39418d359..131adfd58 100644
--- a/src/tools/clippy/tests/ui/track-diagnostics.stderr
+++ b/src/tools/clippy/tests/ui/track-diagnostics.stderr
@@ -5,6 +5,6 @@ LL | const S: A = B;
| ^ expected `A`, found `B`
-Ztrack-diagnostics: created at compiler/rustc_infer/src/infer/error_reporting/mod.rs:LL:CC
-error: aborting due to previous error
+error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0308`.
diff --git a/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.fixed b/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.fixed
index 19abced98..4e145693c 100644
--- a/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.fixed
+++ b/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.fixed
@@ -43,6 +43,9 @@ fn transmute_ptr_to_ptr() {
//~^ ERROR: transmute from a reference to a reference
let _: &GenericParam<f32> = &*(&GenericParam { t: 1u32 } as *const GenericParam<u32> as *const GenericParam<f32>);
//~^ ERROR: transmute from a reference to a reference
+ let u8_ref: &u8 = &0u8;
+ let u64_ref: &u64 = unsafe { &*(u8_ref as *const u8 as *const u64) };
+ //~^ ERROR: transmute from a reference to a reference
}
// these are recommendations for solving the above; if these lint we need to update
diff --git a/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.rs b/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.rs
index abba2b8e5..086aadc36 100644
--- a/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.rs
+++ b/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.rs
@@ -43,6 +43,9 @@ fn transmute_ptr_to_ptr() {
//~^ ERROR: transmute from a reference to a reference
let _: &GenericParam<f32> = std::mem::transmute(&GenericParam { t: 1u32 });
//~^ ERROR: transmute from a reference to a reference
+ let u8_ref: &u8 = &0u8;
+ let u64_ref: &u64 = unsafe { std::mem::transmute(u8_ref) };
+ //~^ ERROR: transmute from a reference to a reference
}
// these are recommendations for solving the above; if these lint we need to update
diff --git a/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.stderr b/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.stderr
index 564339c06..9f8599921 100644
--- a/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.stderr
+++ b/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.stderr
@@ -37,5 +37,11 @@ error: transmute from a reference to a reference
LL | let _: &GenericParam<f32> = std::mem::transmute(&GenericParam { t: 1u32 });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(&GenericParam { t: 1u32 } as *const GenericParam<u32> as *const GenericParam<f32>)`
-error: aborting due to 6 previous errors
+error: transmute from a reference to a reference
+ --> $DIR/transmute_ptr_to_ptr.rs:47:38
+ |
+LL | let u64_ref: &u64 = unsafe { std::mem::transmute(u8_ref) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(u8_ref as *const u8 as *const u64)`
+
+error: aborting due to 7 previous errors
diff --git a/src/tools/clippy/tests/ui/transmute_ref_to_ref.rs b/src/tools/clippy/tests/ui/transmute_ref_to_ref.rs
new file mode 100644
index 000000000..e7f35c574
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmute_ref_to_ref.rs
@@ -0,0 +1,18 @@
+//@no-rustfix
+
+#![deny(clippy::transmute_ptr_to_ptr)]
+#![allow(dead_code)]
+
+fn main() {
+ unsafe {
+ let single_u64: &[u64] = &[0xDEAD_BEEF_DEAD_BEEF];
+ let bools: &[bool] = unsafe { std::mem::transmute(single_u64) };
+ //~^ ERROR: transmute from a reference to a reference
+ let a: &[u32] = &[0x12345678, 0x90ABCDEF, 0xFEDCBA09, 0x87654321];
+ let b: &[u8] = unsafe { std::mem::transmute(a) };
+ //~^ ERROR: transmute from a reference to a reference
+ let bytes = &[1u8, 2u8, 3u8, 4u8] as &[u8];
+ let alt_slice: &[u32] = unsafe { core::mem::transmute(bytes) };
+ //~^ ERROR: transmute from a reference to a reference
+ }
+}
diff --git a/src/tools/clippy/tests/ui/transmute_ref_to_ref.stderr b/src/tools/clippy/tests/ui/transmute_ref_to_ref.stderr
new file mode 100644
index 000000000..cc6b156b1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmute_ref_to_ref.stderr
@@ -0,0 +1,26 @@
+error: transmute from a reference to a reference
+ --> $DIR/transmute_ref_to_ref.rs:9:39
+ |
+LL | let bools: &[bool] = unsafe { std::mem::transmute(single_u64) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(single_u64 as *const [u64] as *const [bool])`
+ |
+note: the lint level is defined here
+ --> $DIR/transmute_ref_to_ref.rs:3:9
+ |
+LL | #![deny(clippy::transmute_ptr_to_ptr)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from a reference to a reference
+ --> $DIR/transmute_ref_to_ref.rs:12:33
+ |
+LL | let b: &[u8] = unsafe { std::mem::transmute(a) };
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(a as *const [u32] as *const [u8])`
+
+error: transmute from a reference to a reference
+ --> $DIR/transmute_ref_to_ref.rs:15:42
+ |
+LL | let alt_slice: &[u32] = unsafe { core::mem::transmute(bytes) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(bytes as *const [u8] as *const [u32])`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/types.stderr b/src/tools/clippy/tests/ui/types.stderr
index b253cf338..f7473e1c5 100644
--- a/src/tools/clippy/tests/ui/types.stderr
+++ b/src/tools/clippy/tests/ui/types.stderr
@@ -7,5 +7,5 @@ LL | let c_i64: i64 = c as i64;
= note: `-D clippy::cast-lossless` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::cast_lossless)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/uninhabited_references.rs b/src/tools/clippy/tests/ui/uninhabited_references.rs
new file mode 100644
index 000000000..cd07b590a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/uninhabited_references.rs
@@ -0,0 +1,22 @@
+#![warn(clippy::uninhabited_references)]
+#![feature(never_type)]
+
+fn ret_uninh_ref() -> &'static std::convert::Infallible {
+ unsafe { std::mem::transmute(&()) }
+}
+
+macro_rules! ret_something {
+ ($name:ident, $ty:ty) => {
+ fn $name(x: &$ty) -> &$ty {
+ &*x
+ }
+ };
+}
+
+ret_something!(id_u32, u32);
+ret_something!(id_never, !);
+
+fn main() {
+ let x = ret_uninh_ref();
+ let _ = *x;
+}
diff --git a/src/tools/clippy/tests/ui/uninhabited_references.stderr b/src/tools/clippy/tests/ui/uninhabited_references.stderr
new file mode 100644
index 000000000..2cdf320b8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/uninhabited_references.stderr
@@ -0,0 +1,39 @@
+error: dereferencing a reference to an uninhabited type would be undefined behavior
+ --> $DIR/uninhabited_references.rs:4:23
+ |
+LL | fn ret_uninh_ref() -> &'static std::convert::Infallible {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::uninhabited-references` implied by `-D warnings`
+ = help: to override `-D warnings` add `#[allow(clippy::uninhabited_references)]`
+
+error: dereferencing a reference to an uninhabited type would be undefined behavior
+ --> $DIR/uninhabited_references.rs:10:30
+ |
+LL | fn $name(x: &$ty) -> &$ty {
+ | ^^^^
+...
+LL | ret_something!(id_never, !);
+ | --------------------------- in this macro invocation
+ |
+ = note: this error originates in the macro `ret_something` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: dereferencing a reference to an uninhabited type is undefined behavior
+ --> $DIR/uninhabited_references.rs:11:14
+ |
+LL | &*x
+ | ^^
+...
+LL | ret_something!(id_never, !);
+ | --------------------------- in this macro invocation
+ |
+ = note: this error originates in the macro `ret_something` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: dereferencing a reference to an uninhabited type is undefined behavior
+ --> $DIR/uninhabited_references.rs:21:13
+ |
+LL | let _ = *x;
+ | ^^
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/uninlined_format_args_panic.edition2018.stderr b/src/tools/clippy/tests/ui/uninlined_format_args_panic.edition2018.stderr
index 221efeb50..736a68ab1 100644
--- a/src/tools/clippy/tests/ui/uninlined_format_args_panic.edition2018.stderr
+++ b/src/tools/clippy/tests/ui/uninlined_format_args_panic.edition2018.stderr
@@ -12,5 +12,5 @@ LL - println!("val='{}'", var);
LL + println!("val='{var}'");
|
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/unknown_attribute.stderr b/src/tools/clippy/tests/ui/unknown_attribute.stderr
index 618c5980d..edad35d15 100644
--- a/src/tools/clippy/tests/ui/unknown_attribute.stderr
+++ b/src/tools/clippy/tests/ui/unknown_attribute.stderr
@@ -4,5 +4,5 @@ error: usage of unknown attribute
LL | #[clippy::unknown]
| ^^^^^^^
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/unnecessary_fallible_conversions.stderr b/src/tools/clippy/tests/ui/unnecessary_fallible_conversions.stderr
index b918fdf77..26b152515 100644
--- a/src/tools/clippy/tests/ui/unnecessary_fallible_conversions.stderr
+++ b/src/tools/clippy/tests/ui/unnecessary_fallible_conversions.stderr
@@ -4,6 +4,7 @@ error: use of a fallible conversion when an infallible one could be used
LL | let _: i64 = 0i32.try_into().unwrap();
| ^^^^^^^^^^^^^^^^^^^ help: use: `into()`
|
+ = note: converting `i32` to `i64` cannot fail
= note: `-D clippy::unnecessary-fallible-conversions` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::unnecessary_fallible_conversions)]`
@@ -12,6 +13,8 @@ error: use of a fallible conversion when an infallible one could be used
|
LL | let _: i64 = 0i32.try_into().expect("can't happen");
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `into()`
+ |
+ = note: converting `i32` to `i64` cannot fail
error: aborting due to 2 previous errors
diff --git a/src/tools/clippy/tests/ui/unnecessary_fallible_conversions_unfixable.stderr b/src/tools/clippy/tests/ui/unnecessary_fallible_conversions_unfixable.stderr
index 286decf8f..033de0e92 100644
--- a/src/tools/clippy/tests/ui/unnecessary_fallible_conversions_unfixable.stderr
+++ b/src/tools/clippy/tests/ui/unnecessary_fallible_conversions_unfixable.stderr
@@ -4,6 +4,7 @@ error: use of a fallible conversion when an infallible one could be used
LL | let _: Result<Foo, _> = 0i64.try_into();
| ^^^^^^^^ help: use: `into`
|
+ = note: converting `i64` to `Foo` cannot fail
= note: `-D clippy::unnecessary-fallible-conversions` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::unnecessary_fallible_conversions)]`
@@ -12,30 +13,40 @@ error: use of a fallible conversion when an infallible one could be used
|
LL | let _: Result<Foo, _> = i64::try_into(0i64);
| ^^^^^^^^^^^^^ help: use: `Into::into`
+ |
+ = note: converting `i64` to `Foo` cannot fail
error: use of a fallible conversion when an infallible one could be used
--> $DIR/unnecessary_fallible_conversions_unfixable.rs:31:29
|
LL | let _: Result<Foo, _> = Foo::try_from(0i64);
| ^^^^^^^^^^^^^ help: use: `From::from`
+ |
+ = note: converting `i64` to `Foo` cannot fail
error: use of a fallible conversion when an infallible one could be used
--> $DIR/unnecessary_fallible_conversions_unfixable.rs:34:34
|
LL | let _: Result<i64, _> = 0i32.try_into();
| ^^^^^^^^ help: use: `into`
+ |
+ = note: converting `i32` to `i64` cannot fail
error: use of a fallible conversion when an infallible one could be used
--> $DIR/unnecessary_fallible_conversions_unfixable.rs:36:29
|
LL | let _: Result<i64, _> = i32::try_into(0i32);
| ^^^^^^^^^^^^^ help: use: `Into::into`
+ |
+ = note: converting `i32` to `i64` cannot fail
error: use of a fallible conversion when an infallible one could be used
--> $DIR/unnecessary_fallible_conversions_unfixable.rs:38:29
|
LL | let _: Result<i64, _> = <_>::try_from(0i32);
| ^^^^^^^^^^^^^ help: use: `From::from`
+ |
+ = note: converting `i32` to `i64` cannot fail
error: aborting due to 6 previous errors
diff --git a/src/tools/clippy/tests/ui/unnecessary_lazy_eval.fixed b/src/tools/clippy/tests/ui/unnecessary_lazy_eval.fixed
index 4778eaefd..66598f892 100644
--- a/src/tools/clippy/tests/ui/unnecessary_lazy_eval.fixed
+++ b/src/tools/clippy/tests/ui/unnecessary_lazy_eval.fixed
@@ -6,6 +6,8 @@
#![allow(clippy::needless_borrow)]
#![allow(clippy::unnecessary_literal_unwrap)]
#![allow(clippy::unit_arg)]
+#![allow(arithmetic_overflow)]
+#![allow(unconditional_panic)]
use std::ops::Deref;
@@ -190,3 +192,79 @@ fn issue9485() {
// should not lint, is in proc macro
with_span!(span Some(42).unwrap_or_else(|| 2););
}
+
+fn issue9422(x: usize) -> Option<usize> {
+ (x >= 5).then(|| x - 5)
+ // (x >= 5).then_some(x - 5) // clippy suggestion panics
+}
+
+fn panicky_arithmetic_ops(x: usize, y: isize) {
+ #![allow(clippy::identity_op, clippy::eq_op)]
+
+ // See comments in `eager_or_lazy.rs` for the rules that this is meant to follow
+
+ let _x = false.then_some(i32::MAX + 1);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then_some(i32::MAX * 2);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then_some(i32::MAX - 1);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then_some(i32::MIN - 1);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then_some((1 + 2 * 3 - 2 / 3 + 9) << 2);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then_some(255u8 << 7);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then_some(255u8 << 8);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then_some(255u8 >> 8);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| 255u8 >> x);
+ let _x = false.then_some(i32::MAX + -1);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then_some(-i32::MAX);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then_some(-i32::MIN);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| -y);
+ let _x = false.then_some(255 >> -7);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then_some(255 << -1);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then_some(1 / 0);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then_some(x << -1);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then_some(x << 2);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| x + x);
+ let _x = false.then(|| x * x);
+ let _x = false.then(|| x - x);
+ let _x = false.then(|| x / x);
+ let _x = false.then(|| x % x);
+ let _x = false.then(|| x + 1);
+ let _x = false.then(|| 1 + x);
+
+ let _x = false.then_some(x / 0);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then_some(x % 0);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| y / -1);
+ let _x = false.then_some(1 / -1);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then_some(i32::MIN / -1);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| i32::MIN / x as i32);
+ let _x = false.then_some(i32::MIN / 0);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then_some(4 / 2);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| 1 / x);
+
+ // const eval doesn't read variables, but floating point math never panics, so we can still emit a
+ // warning
+ let f1 = 1.0;
+ let f2 = 2.0;
+ let _x = false.then_some(f1 + f2);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_lazy_eval.rs b/src/tools/clippy/tests/ui/unnecessary_lazy_eval.rs
index d4b7fd31b..5045fcd79 100644
--- a/src/tools/clippy/tests/ui/unnecessary_lazy_eval.rs
+++ b/src/tools/clippy/tests/ui/unnecessary_lazy_eval.rs
@@ -6,6 +6,8 @@
#![allow(clippy::needless_borrow)]
#![allow(clippy::unnecessary_literal_unwrap)]
#![allow(clippy::unit_arg)]
+#![allow(arithmetic_overflow)]
+#![allow(unconditional_panic)]
use std::ops::Deref;
@@ -190,3 +192,79 @@ fn issue9485() {
// should not lint, is in proc macro
with_span!(span Some(42).unwrap_or_else(|| 2););
}
+
+fn issue9422(x: usize) -> Option<usize> {
+ (x >= 5).then(|| x - 5)
+ // (x >= 5).then_some(x - 5) // clippy suggestion panics
+}
+
+fn panicky_arithmetic_ops(x: usize, y: isize) {
+ #![allow(clippy::identity_op, clippy::eq_op)]
+
+ // See comments in `eager_or_lazy.rs` for the rules that this is meant to follow
+
+ let _x = false.then(|| i32::MAX + 1);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| i32::MAX * 2);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| i32::MAX - 1);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| i32::MIN - 1);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| (1 + 2 * 3 - 2 / 3 + 9) << 2);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| 255u8 << 7);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| 255u8 << 8);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| 255u8 >> 8);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| 255u8 >> x);
+ let _x = false.then(|| i32::MAX + -1);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| -i32::MAX);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| -i32::MIN);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| -y);
+ let _x = false.then(|| 255 >> -7);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| 255 << -1);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| 1 / 0);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| x << -1);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| x << 2);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| x + x);
+ let _x = false.then(|| x * x);
+ let _x = false.then(|| x - x);
+ let _x = false.then(|| x / x);
+ let _x = false.then(|| x % x);
+ let _x = false.then(|| x + 1);
+ let _x = false.then(|| 1 + x);
+
+ let _x = false.then(|| x / 0);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| x % 0);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| y / -1);
+ let _x = false.then(|| 1 / -1);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| i32::MIN / -1);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| i32::MIN / x as i32);
+ let _x = false.then(|| i32::MIN / 0);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| 4 / 2);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+ let _x = false.then(|| 1 / x);
+
+ // const eval doesn't read variables, but floating point math never panics, so we can still emit a
+ // warning
+ let f1 = 1.0;
+ let f2 = 2.0;
+ let _x = false.then(|| f1 + f2);
+ //~^ ERROR: unnecessary closure used with `bool::then`
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_lazy_eval.stderr b/src/tools/clippy/tests/ui/unnecessary_lazy_eval.stderr
index 1b0db4759..466664aee 100644
--- a/src/tools/clippy/tests/ui/unnecessary_lazy_eval.stderr
+++ b/src/tools/clippy/tests/ui/unnecessary_lazy_eval.stderr
@@ -1,5 +1,5 @@
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:69:13
+ --> $DIR/unnecessary_lazy_eval.rs:71:13
|
LL | let _ = opt.unwrap_or_else(|| 2);
| ^^^^--------------------
@@ -10,7 +10,7 @@ LL | let _ = opt.unwrap_or_else(|| 2);
= help: to override `-D warnings` add `#[allow(clippy::unnecessary_lazy_evaluations)]`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:70:13
+ --> $DIR/unnecessary_lazy_eval.rs:72:13
|
LL | let _ = opt.unwrap_or_else(|| astronomers_pi);
| ^^^^---------------------------------
@@ -18,7 +18,7 @@ LL | let _ = opt.unwrap_or_else(|| astronomers_pi);
| help: use `unwrap_or(..)` instead: `unwrap_or(astronomers_pi)`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:71:13
+ --> $DIR/unnecessary_lazy_eval.rs:73:13
|
LL | let _ = opt.unwrap_or_else(|| ext_str.some_field);
| ^^^^-------------------------------------
@@ -26,7 +26,7 @@ LL | let _ = opt.unwrap_or_else(|| ext_str.some_field);
| help: use `unwrap_or(..)` instead: `unwrap_or(ext_str.some_field)`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:73:13
+ --> $DIR/unnecessary_lazy_eval.rs:75:13
|
LL | let _ = opt.and_then(|_| ext_opt);
| ^^^^---------------------
@@ -34,7 +34,7 @@ LL | let _ = opt.and_then(|_| ext_opt);
| help: use `and(..)` instead: `and(ext_opt)`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:74:13
+ --> $DIR/unnecessary_lazy_eval.rs:76:13
|
LL | let _ = opt.or_else(|| ext_opt);
| ^^^^-------------------
@@ -42,7 +42,7 @@ LL | let _ = opt.or_else(|| ext_opt);
| help: use `or(..)` instead: `or(ext_opt)`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:75:13
+ --> $DIR/unnecessary_lazy_eval.rs:77:13
|
LL | let _ = opt.or_else(|| None);
| ^^^^----------------
@@ -50,7 +50,7 @@ LL | let _ = opt.or_else(|| None);
| help: use `or(..)` instead: `or(None)`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:76:13
+ --> $DIR/unnecessary_lazy_eval.rs:78:13
|
LL | let _ = opt.get_or_insert_with(|| 2);
| ^^^^------------------------
@@ -58,7 +58,7 @@ LL | let _ = opt.get_or_insert_with(|| 2);
| help: use `get_or_insert(..)` instead: `get_or_insert(2)`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:77:13
+ --> $DIR/unnecessary_lazy_eval.rs:79:13
|
LL | let _ = opt.ok_or_else(|| 2);
| ^^^^----------------
@@ -66,7 +66,7 @@ LL | let _ = opt.ok_or_else(|| 2);
| help: use `ok_or(..)` instead: `ok_or(2)`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:78:13
+ --> $DIR/unnecessary_lazy_eval.rs:80:13
|
LL | let _ = nested_tuple_opt.unwrap_or_else(|| Some((1, 2)));
| ^^^^^^^^^^^^^^^^^-------------------------------
@@ -74,7 +74,7 @@ LL | let _ = nested_tuple_opt.unwrap_or_else(|| Some((1, 2)));
| help: use `unwrap_or(..)` instead: `unwrap_or(Some((1, 2)))`
error: unnecessary closure used with `bool::then`
- --> $DIR/unnecessary_lazy_eval.rs:79:13
+ --> $DIR/unnecessary_lazy_eval.rs:81:13
|
LL | let _ = cond.then(|| astronomers_pi);
| ^^^^^-----------------------
@@ -82,7 +82,7 @@ LL | let _ = cond.then(|| astronomers_pi);
| help: use `then_some(..)` instead: `then_some(astronomers_pi)`
error: unnecessary closure used with `bool::then`
- --> $DIR/unnecessary_lazy_eval.rs:80:13
+ --> $DIR/unnecessary_lazy_eval.rs:82:13
|
LL | let _ = true.then(|| -> _ {});
| ^^^^^----------------
@@ -90,7 +90,7 @@ LL | let _ = true.then(|| -> _ {});
| help: use `then_some(..)` instead: `then_some({})`
error: unnecessary closure used with `bool::then`
- --> $DIR/unnecessary_lazy_eval.rs:81:13
+ --> $DIR/unnecessary_lazy_eval.rs:83:13
|
LL | let _ = true.then(|| {});
| ^^^^^-----------
@@ -98,7 +98,7 @@ LL | let _ = true.then(|| {});
| help: use `then_some(..)` instead: `then_some({})`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:85:13
+ --> $DIR/unnecessary_lazy_eval.rs:87:13
|
LL | let _ = Some(1).unwrap_or_else(|| *r);
| ^^^^^^^^---------------------
@@ -106,7 +106,7 @@ LL | let _ = Some(1).unwrap_or_else(|| *r);
| help: use `unwrap_or(..)` instead: `unwrap_or(*r)`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:87:13
+ --> $DIR/unnecessary_lazy_eval.rs:89:13
|
LL | let _ = Some(1).unwrap_or_else(|| *b);
| ^^^^^^^^---------------------
@@ -114,7 +114,7 @@ LL | let _ = Some(1).unwrap_or_else(|| *b);
| help: use `unwrap_or(..)` instead: `unwrap_or(*b)`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:89:13
+ --> $DIR/unnecessary_lazy_eval.rs:91:13
|
LL | let _ = Some(1).as_ref().unwrap_or_else(|| &r);
| ^^^^^^^^^^^^^^^^^---------------------
@@ -122,7 +122,7 @@ LL | let _ = Some(1).as_ref().unwrap_or_else(|| &r);
| help: use `unwrap_or(..)` instead: `unwrap_or(&r)`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:90:13
+ --> $DIR/unnecessary_lazy_eval.rs:92:13
|
LL | let _ = Some(1).as_ref().unwrap_or_else(|| &b);
| ^^^^^^^^^^^^^^^^^---------------------
@@ -130,7 +130,7 @@ LL | let _ = Some(1).as_ref().unwrap_or_else(|| &b);
| help: use `unwrap_or(..)` instead: `unwrap_or(&b)`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:93:13
+ --> $DIR/unnecessary_lazy_eval.rs:95:13
|
LL | let _ = Some(10).unwrap_or_else(|| 2);
| ^^^^^^^^^--------------------
@@ -138,7 +138,7 @@ LL | let _ = Some(10).unwrap_or_else(|| 2);
| help: use `unwrap_or(..)` instead: `unwrap_or(2)`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:94:13
+ --> $DIR/unnecessary_lazy_eval.rs:96:13
|
LL | let _ = Some(10).and_then(|_| ext_opt);
| ^^^^^^^^^---------------------
@@ -146,7 +146,7 @@ LL | let _ = Some(10).and_then(|_| ext_opt);
| help: use `and(..)` instead: `and(ext_opt)`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:95:28
+ --> $DIR/unnecessary_lazy_eval.rs:97:28
|
LL | let _: Option<usize> = None.or_else(|| ext_opt);
| ^^^^^-------------------
@@ -154,7 +154,7 @@ LL | let _: Option<usize> = None.or_else(|| ext_opt);
| help: use `or(..)` instead: `or(ext_opt)`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:96:13
+ --> $DIR/unnecessary_lazy_eval.rs:98:13
|
LL | let _ = None.get_or_insert_with(|| 2);
| ^^^^^------------------------
@@ -162,7 +162,7 @@ LL | let _ = None.get_or_insert_with(|| 2);
| help: use `get_or_insert(..)` instead: `get_or_insert(2)`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:97:35
+ --> $DIR/unnecessary_lazy_eval.rs:99:35
|
LL | let _: Result<usize, usize> = None.ok_or_else(|| 2);
| ^^^^^----------------
@@ -170,7 +170,7 @@ LL | let _: Result<usize, usize> = None.ok_or_else(|| 2);
| help: use `ok_or(..)` instead: `ok_or(2)`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:98:28
+ --> $DIR/unnecessary_lazy_eval.rs:100:28
|
LL | let _: Option<usize> = None.or_else(|| None);
| ^^^^^----------------
@@ -178,7 +178,7 @@ LL | let _: Option<usize> = None.or_else(|| None);
| help: use `or(..)` instead: `or(None)`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:101:13
+ --> $DIR/unnecessary_lazy_eval.rs:103:13
|
LL | let _ = deep.0.unwrap_or_else(|| 2);
| ^^^^^^^--------------------
@@ -186,7 +186,7 @@ LL | let _ = deep.0.unwrap_or_else(|| 2);
| help: use `unwrap_or(..)` instead: `unwrap_or(2)`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:102:13
+ --> $DIR/unnecessary_lazy_eval.rs:104:13
|
LL | let _ = deep.0.and_then(|_| ext_opt);
| ^^^^^^^---------------------
@@ -194,7 +194,7 @@ LL | let _ = deep.0.and_then(|_| ext_opt);
| help: use `and(..)` instead: `and(ext_opt)`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:103:13
+ --> $DIR/unnecessary_lazy_eval.rs:105:13
|
LL | let _ = deep.0.or_else(|| None);
| ^^^^^^^----------------
@@ -202,7 +202,7 @@ LL | let _ = deep.0.or_else(|| None);
| help: use `or(..)` instead: `or(None)`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:104:13
+ --> $DIR/unnecessary_lazy_eval.rs:106:13
|
LL | let _ = deep.0.get_or_insert_with(|| 2);
| ^^^^^^^------------------------
@@ -210,7 +210,7 @@ LL | let _ = deep.0.get_or_insert_with(|| 2);
| help: use `get_or_insert(..)` instead: `get_or_insert(2)`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:105:13
+ --> $DIR/unnecessary_lazy_eval.rs:107:13
|
LL | let _ = deep.0.ok_or_else(|| 2);
| ^^^^^^^----------------
@@ -218,7 +218,7 @@ LL | let _ = deep.0.ok_or_else(|| 2);
| help: use `ok_or(..)` instead: `ok_or(2)`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:135:28
+ --> $DIR/unnecessary_lazy_eval.rs:137:28
|
LL | let _: Option<usize> = None.or_else(|| Some(3));
| ^^^^^-------------------
@@ -226,7 +226,7 @@ LL | let _: Option<usize> = None.or_else(|| Some(3));
| help: use `or(..)` instead: `or(Some(3))`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:136:13
+ --> $DIR/unnecessary_lazy_eval.rs:138:13
|
LL | let _ = deep.0.or_else(|| Some(3));
| ^^^^^^^-------------------
@@ -234,7 +234,7 @@ LL | let _ = deep.0.or_else(|| Some(3));
| help: use `or(..)` instead: `or(Some(3))`
error: unnecessary closure used to substitute value for `Option::None`
- --> $DIR/unnecessary_lazy_eval.rs:137:13
+ --> $DIR/unnecessary_lazy_eval.rs:139:13
|
LL | let _ = opt.or_else(|| Some(3));
| ^^^^-------------------
@@ -242,7 +242,7 @@ LL | let _ = opt.or_else(|| Some(3));
| help: use `or(..)` instead: `or(Some(3))`
error: unnecessary closure used to substitute value for `Result::Err`
- --> $DIR/unnecessary_lazy_eval.rs:143:13
+ --> $DIR/unnecessary_lazy_eval.rs:145:13
|
LL | let _ = res2.unwrap_or_else(|_| 2);
| ^^^^^---------------------
@@ -250,7 +250,7 @@ LL | let _ = res2.unwrap_or_else(|_| 2);
| help: use `unwrap_or(..)` instead: `unwrap_or(2)`
error: unnecessary closure used to substitute value for `Result::Err`
- --> $DIR/unnecessary_lazy_eval.rs:144:13
+ --> $DIR/unnecessary_lazy_eval.rs:146:13
|
LL | let _ = res2.unwrap_or_else(|_| astronomers_pi);
| ^^^^^----------------------------------
@@ -258,7 +258,7 @@ LL | let _ = res2.unwrap_or_else(|_| astronomers_pi);
| help: use `unwrap_or(..)` instead: `unwrap_or(astronomers_pi)`
error: unnecessary closure used to substitute value for `Result::Err`
- --> $DIR/unnecessary_lazy_eval.rs:145:13
+ --> $DIR/unnecessary_lazy_eval.rs:147:13
|
LL | let _ = res2.unwrap_or_else(|_| ext_str.some_field);
| ^^^^^--------------------------------------
@@ -266,7 +266,7 @@ LL | let _ = res2.unwrap_or_else(|_| ext_str.some_field);
| help: use `unwrap_or(..)` instead: `unwrap_or(ext_str.some_field)`
error: unnecessary closure used to substitute value for `Result::Err`
- --> $DIR/unnecessary_lazy_eval.rs:167:35
+ --> $DIR/unnecessary_lazy_eval.rs:169:35
|
LL | let _: Result<usize, usize> = res.and_then(|_| Err(2));
| ^^^^--------------------
@@ -274,7 +274,7 @@ LL | let _: Result<usize, usize> = res.and_then(|_| Err(2));
| help: use `and(..)` instead: `and(Err(2))`
error: unnecessary closure used to substitute value for `Result::Err`
- --> $DIR/unnecessary_lazy_eval.rs:168:35
+ --> $DIR/unnecessary_lazy_eval.rs:170:35
|
LL | let _: Result<usize, usize> = res.and_then(|_| Err(astronomers_pi));
| ^^^^---------------------------------
@@ -282,7 +282,7 @@ LL | let _: Result<usize, usize> = res.and_then(|_| Err(astronomers_pi));
| help: use `and(..)` instead: `and(Err(astronomers_pi))`
error: unnecessary closure used to substitute value for `Result::Err`
- --> $DIR/unnecessary_lazy_eval.rs:169:35
+ --> $DIR/unnecessary_lazy_eval.rs:171:35
|
LL | let _: Result<usize, usize> = res.and_then(|_| Err(ext_str.some_field));
| ^^^^-------------------------------------
@@ -290,7 +290,7 @@ LL | let _: Result<usize, usize> = res.and_then(|_| Err(ext_str.some_field))
| help: use `and(..)` instead: `and(Err(ext_str.some_field))`
error: unnecessary closure used to substitute value for `Result::Err`
- --> $DIR/unnecessary_lazy_eval.rs:171:35
+ --> $DIR/unnecessary_lazy_eval.rs:173:35
|
LL | let _: Result<usize, usize> = res.or_else(|_| Ok(2));
| ^^^^------------------
@@ -298,7 +298,7 @@ LL | let _: Result<usize, usize> = res.or_else(|_| Ok(2));
| help: use `or(..)` instead: `or(Ok(2))`
error: unnecessary closure used to substitute value for `Result::Err`
- --> $DIR/unnecessary_lazy_eval.rs:172:35
+ --> $DIR/unnecessary_lazy_eval.rs:174:35
|
LL | let _: Result<usize, usize> = res.or_else(|_| Ok(astronomers_pi));
| ^^^^-------------------------------
@@ -306,7 +306,7 @@ LL | let _: Result<usize, usize> = res.or_else(|_| Ok(astronomers_pi));
| help: use `or(..)` instead: `or(Ok(astronomers_pi))`
error: unnecessary closure used to substitute value for `Result::Err`
- --> $DIR/unnecessary_lazy_eval.rs:173:35
+ --> $DIR/unnecessary_lazy_eval.rs:175:35
|
LL | let _: Result<usize, usize> = res.or_else(|_| Ok(ext_str.some_field));
| ^^^^-----------------------------------
@@ -314,7 +314,7 @@ LL | let _: Result<usize, usize> = res.or_else(|_| Ok(ext_str.some_field));
| help: use `or(..)` instead: `or(Ok(ext_str.some_field))`
error: unnecessary closure used to substitute value for `Result::Err`
- --> $DIR/unnecessary_lazy_eval.rs:174:35
+ --> $DIR/unnecessary_lazy_eval.rs:176:35
|
LL | let _: Result<usize, usize> = res.
| ___________________________________^
@@ -328,5 +328,189 @@ LL | | or_else(|_| Ok(ext_str.some_field));
| |
| help: use `or(..)` instead: `or(Ok(ext_str.some_field))`
-error: aborting due to 40 previous errors
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:206:14
+ |
+LL | let _x = false.then(|| i32::MAX + 1);
+ | ^^^^^^---------------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(i32::MAX + 1)`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:208:14
+ |
+LL | let _x = false.then(|| i32::MAX * 2);
+ | ^^^^^^---------------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(i32::MAX * 2)`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:210:14
+ |
+LL | let _x = false.then(|| i32::MAX - 1);
+ | ^^^^^^---------------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(i32::MAX - 1)`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:212:14
+ |
+LL | let _x = false.then(|| i32::MIN - 1);
+ | ^^^^^^---------------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(i32::MIN - 1)`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:214:14
+ |
+LL | let _x = false.then(|| (1 + 2 * 3 - 2 / 3 + 9) << 2);
+ | ^^^^^^-------------------------------------
+ | |
+ | help: use `then_some(..)` instead: `then_some((1 + 2 * 3 - 2 / 3 + 9) << 2)`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:216:14
+ |
+LL | let _x = false.then(|| 255u8 << 7);
+ | ^^^^^^-------------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(255u8 << 7)`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:218:14
+ |
+LL | let _x = false.then(|| 255u8 << 8);
+ | ^^^^^^-------------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(255u8 << 8)`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:220:14
+ |
+LL | let _x = false.then(|| 255u8 >> 8);
+ | ^^^^^^-------------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(255u8 >> 8)`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:223:14
+ |
+LL | let _x = false.then(|| i32::MAX + -1);
+ | ^^^^^^----------------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(i32::MAX + -1)`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:225:14
+ |
+LL | let _x = false.then(|| -i32::MAX);
+ | ^^^^^^------------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(-i32::MAX)`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:227:14
+ |
+LL | let _x = false.then(|| -i32::MIN);
+ | ^^^^^^------------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(-i32::MIN)`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:230:14
+ |
+LL | let _x = false.then(|| 255 >> -7);
+ | ^^^^^^------------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(255 >> -7)`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:232:14
+ |
+LL | let _x = false.then(|| 255 << -1);
+ | ^^^^^^------------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(255 << -1)`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:234:14
+ |
+LL | let _x = false.then(|| 1 / 0);
+ | ^^^^^^--------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(1 / 0)`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:236:14
+ |
+LL | let _x = false.then(|| x << -1);
+ | ^^^^^^----------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(x << -1)`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:238:14
+ |
+LL | let _x = false.then(|| x << 2);
+ | ^^^^^^---------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(x << 2)`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:248:14
+ |
+LL | let _x = false.then(|| x / 0);
+ | ^^^^^^--------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(x / 0)`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:250:14
+ |
+LL | let _x = false.then(|| x % 0);
+ | ^^^^^^--------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(x % 0)`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:253:14
+ |
+LL | let _x = false.then(|| 1 / -1);
+ | ^^^^^^---------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(1 / -1)`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:255:14
+ |
+LL | let _x = false.then(|| i32::MIN / -1);
+ | ^^^^^^----------------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(i32::MIN / -1)`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:258:14
+ |
+LL | let _x = false.then(|| i32::MIN / 0);
+ | ^^^^^^---------------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(i32::MIN / 0)`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:260:14
+ |
+LL | let _x = false.then(|| 4 / 2);
+ | ^^^^^^--------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(4 / 2)`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:268:14
+ |
+LL | let _x = false.then(|| f1 + f2);
+ | ^^^^^^----------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(f1 + f2)`
+
+error: aborting due to 63 previous errors
diff --git a/src/tools/clippy/tests/ui/unnecessary_operation.fixed b/src/tools/clippy/tests/ui/unnecessary_operation.fixed
index d0c0298ef..463412dae 100644
--- a/src/tools/clippy/tests/ui/unnecessary_operation.fixed
+++ b/src/tools/clippy/tests/ui/unnecessary_operation.fixed
@@ -7,6 +7,9 @@
)]
#![warn(clippy::unnecessary_operation)]
+use std::fmt::Display;
+use std::ops::Shl;
+
struct Tuple(i32);
struct Struct {
field: i32,
@@ -50,6 +53,19 @@ fn get_drop_struct() -> DropStruct {
DropStruct { field: 0 }
}
+struct Cout;
+
+impl<T> Shl<T> for Cout
+where
+ T: Display,
+{
+ type Output = Self;
+ fn shl(self, rhs: T) -> Self::Output {
+ println!("{}", rhs);
+ self
+ }
+}
+
fn main() {
get_number();
get_number();
@@ -87,4 +103,7 @@ fn main() {
($($e:expr),*) => {{ $($e;)* }}
}
use_expr!(isize::MIN / -(one() as isize), i8::MIN / -one());
+
+ // Issue #11885
+ Cout << 16;
}
diff --git a/src/tools/clippy/tests/ui/unnecessary_operation.rs b/src/tools/clippy/tests/ui/unnecessary_operation.rs
index e8e3a2d56..f0d28e289 100644
--- a/src/tools/clippy/tests/ui/unnecessary_operation.rs
+++ b/src/tools/clippy/tests/ui/unnecessary_operation.rs
@@ -7,6 +7,9 @@
)]
#![warn(clippy::unnecessary_operation)]
+use std::fmt::Display;
+use std::ops::Shl;
+
struct Tuple(i32);
struct Struct {
field: i32,
@@ -50,6 +53,19 @@ fn get_drop_struct() -> DropStruct {
DropStruct { field: 0 }
}
+struct Cout;
+
+impl<T> Shl<T> for Cout
+where
+ T: Display,
+{
+ type Output = Self;
+ fn shl(self, rhs: T) -> Self::Output {
+ println!("{}", rhs);
+ self
+ }
+}
+
fn main() {
Tuple(get_number());
Struct { field: get_number() };
@@ -91,4 +107,7 @@ fn main() {
($($e:expr),*) => {{ $($e;)* }}
}
use_expr!(isize::MIN / -(one() as isize), i8::MIN / -one());
+
+ // Issue #11885
+ Cout << 16;
}
diff --git a/src/tools/clippy/tests/ui/unnecessary_operation.stderr b/src/tools/clippy/tests/ui/unnecessary_operation.stderr
index fbe495f51..eeee9ad60 100644
--- a/src/tools/clippy/tests/ui/unnecessary_operation.stderr
+++ b/src/tools/clippy/tests/ui/unnecessary_operation.stderr
@@ -1,5 +1,5 @@
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:54:5
+ --> $DIR/unnecessary_operation.rs:70:5
|
LL | Tuple(get_number());
| ^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
@@ -8,103 +8,103 @@ LL | Tuple(get_number());
= help: to override `-D warnings` add `#[allow(clippy::unnecessary_operation)]`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:55:5
+ --> $DIR/unnecessary_operation.rs:71:5
|
LL | Struct { field: get_number() };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:56:5
+ --> $DIR/unnecessary_operation.rs:72:5
|
LL | Struct { ..get_struct() };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_struct();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:57:5
+ --> $DIR/unnecessary_operation.rs:73:5
|
LL | Enum::Tuple(get_number());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:58:5
+ --> $DIR/unnecessary_operation.rs:74:5
|
LL | Enum::Struct { field: get_number() };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:59:5
+ --> $DIR/unnecessary_operation.rs:75:5
|
LL | 5 + get_number();
| ^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `5;get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:60:5
+ --> $DIR/unnecessary_operation.rs:76:5
|
LL | *&get_number();
| ^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:61:5
+ --> $DIR/unnecessary_operation.rs:77:5
|
LL | &get_number();
| ^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:62:5
+ --> $DIR/unnecessary_operation.rs:78:5
|
LL | (5, 6, get_number());
| ^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `5;6;get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:63:5
+ --> $DIR/unnecessary_operation.rs:79:5
|
LL | get_number()..;
| ^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:64:5
+ --> $DIR/unnecessary_operation.rs:80:5
|
LL | ..get_number();
| ^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:65:5
+ --> $DIR/unnecessary_operation.rs:81:5
|
LL | 5..get_number();
| ^^^^^^^^^^^^^^^^ help: statement can be reduced to: `5;get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:66:5
+ --> $DIR/unnecessary_operation.rs:82:5
|
LL | [42, get_number()];
| ^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `42;get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:67:5
+ --> $DIR/unnecessary_operation.rs:83:5
|
LL | [42, 55][get_usize()];
| ^^^^^^^^^^^^^^^^^^^^^^ help: statement can be written as: `assert!([42, 55].len() > get_usize());`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:68:5
+ --> $DIR/unnecessary_operation.rs:84:5
|
LL | (42, get_number()).1;
| ^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `42;get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:69:5
+ --> $DIR/unnecessary_operation.rs:85:5
|
LL | [get_number(); 55];
| ^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:70:5
+ --> $DIR/unnecessary_operation.rs:86:5
|
LL | [42; 55][get_usize()];
| ^^^^^^^^^^^^^^^^^^^^^^ help: statement can be written as: `assert!([42; 55].len() > get_usize());`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:71:5
+ --> $DIR/unnecessary_operation.rs:87:5
|
LL | / {
LL | | get_number()
@@ -112,7 +112,7 @@ LL | | };
| |______^ help: statement can be reduced to: `get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:74:5
+ --> $DIR/unnecessary_operation.rs:90:5
|
LL | / FooString {
LL | | s: String::from("blah"),
diff --git a/src/tools/clippy/tests/ui/upper_case_acronyms.fixed b/src/tools/clippy/tests/ui/upper_case_acronyms.fixed
index 460567b09..a8023ed00 100644
--- a/src/tools/clippy/tests/ui/upper_case_acronyms.fixed
+++ b/src/tools/clippy/tests/ui/upper_case_acronyms.fixed
@@ -59,4 +59,12 @@ enum Yaml {
Str(String),
}
+// test for issue #7708
+enum AllowOnField {
+ Disallow,
+ //~^ ERROR: name `DISALLOW` contains a capitalized acronym
+ #[allow(clippy::upper_case_acronyms)]
+ ALLOW,
+}
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/upper_case_acronyms.rs b/src/tools/clippy/tests/ui/upper_case_acronyms.rs
index 6a20aee62..c4711b87e 100644
--- a/src/tools/clippy/tests/ui/upper_case_acronyms.rs
+++ b/src/tools/clippy/tests/ui/upper_case_acronyms.rs
@@ -59,4 +59,12 @@ enum YAML {
Str(String),
}
+// test for issue #7708
+enum AllowOnField {
+ DISALLOW,
+ //~^ ERROR: name `DISALLOW` contains a capitalized acronym
+ #[allow(clippy::upper_case_acronyms)]
+ ALLOW,
+}
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/upper_case_acronyms.stderr b/src/tools/clippy/tests/ui/upper_case_acronyms.stderr
index c57b325e9..009c53c72 100644
--- a/src/tools/clippy/tests/ui/upper_case_acronyms.stderr
+++ b/src/tools/clippy/tests/ui/upper_case_acronyms.stderr
@@ -67,5 +67,11 @@ error: name `YAML` contains a capitalized acronym
LL | enum YAML {
| ^^^^ help: consider making the acronym lowercase, except the initial letter: `Yaml`
-error: aborting due to 11 previous errors
+error: name `DISALLOW` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:64:5
+ |
+LL | DISALLOW,
+ | ^^^^^^^^ help: consider making the acronym lowercase, except the initial letter: `Disallow`
+
+error: aborting due to 12 previous errors
diff --git a/src/tools/clippy/tests/ui/vec.fixed b/src/tools/clippy/tests/ui/vec.fixed
index bcbca971a..81b8bd7da 100644
--- a/src/tools/clippy/tests/ui/vec.fixed
+++ b/src/tools/clippy/tests/ui/vec.fixed
@@ -176,3 +176,37 @@ fn below() {
let _: String = a;
}
}
+
+fn func_needing_vec(_bar: usize, _baz: Vec<usize>) {}
+fn func_not_needing_vec(_bar: usize, _baz: usize) {}
+
+fn issue11861() {
+ macro_rules! this_macro_needs_vec {
+ ($x:expr) => {{
+ func_needing_vec($x.iter().sum(), $x);
+ for _ in $x {}
+ }};
+ }
+ macro_rules! this_macro_doesnt_need_vec {
+ ($x:expr) => {{ func_not_needing_vec($x.iter().sum(), $x.iter().sum()) }};
+ }
+
+ // Do not lint the next line
+ this_macro_needs_vec!(vec![1]);
+ this_macro_doesnt_need_vec!([1]); //~ ERROR: useless use of `vec!`
+
+ macro_rules! m {
+ ($x:expr) => {
+ fn f2() {
+ let _x: Vec<i32> = $x;
+ }
+ fn f() {
+ let _x = $x;
+ $x.starts_with(&[]);
+ }
+ };
+ }
+
+ // should not lint
+ m!(vec![1]);
+}
diff --git a/src/tools/clippy/tests/ui/vec.rs b/src/tools/clippy/tests/ui/vec.rs
index 087425585..5aca9b292 100644
--- a/src/tools/clippy/tests/ui/vec.rs
+++ b/src/tools/clippy/tests/ui/vec.rs
@@ -176,3 +176,37 @@ fn below() {
let _: String = a;
}
}
+
+fn func_needing_vec(_bar: usize, _baz: Vec<usize>) {}
+fn func_not_needing_vec(_bar: usize, _baz: usize) {}
+
+fn issue11861() {
+ macro_rules! this_macro_needs_vec {
+ ($x:expr) => {{
+ func_needing_vec($x.iter().sum(), $x);
+ for _ in $x {}
+ }};
+ }
+ macro_rules! this_macro_doesnt_need_vec {
+ ($x:expr) => {{ func_not_needing_vec($x.iter().sum(), $x.iter().sum()) }};
+ }
+
+ // Do not lint the next line
+ this_macro_needs_vec!(vec![1]);
+ this_macro_doesnt_need_vec!(vec![1]); //~ ERROR: useless use of `vec!`
+
+ macro_rules! m {
+ ($x:expr) => {
+ fn f2() {
+ let _x: Vec<i32> = $x;
+ }
+ fn f() {
+ let _x = $x;
+ $x.starts_with(&[]);
+ }
+ };
+ }
+
+ // should not lint
+ m!(vec![1]);
+}
diff --git a/src/tools/clippy/tests/ui/vec.stderr b/src/tools/clippy/tests/ui/vec.stderr
index fc261838f..c9018f94f 100644
--- a/src/tools/clippy/tests/ui/vec.stderr
+++ b/src/tools/clippy/tests/ui/vec.stderr
@@ -115,5 +115,11 @@ error: useless use of `vec!`
LL | for a in vec![String::new(), String::new()] {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: you can use an array directly: `[String::new(), String::new()]`
-error: aborting due to 19 previous errors
+error: useless use of `vec!`
+ --> $DIR/vec.rs:196:33
+ |
+LL | this_macro_doesnt_need_vec!(vec![1]);
+ | ^^^^^^^ help: you can use an array directly: `[1]`
+
+error: aborting due to 20 previous errors
diff --git a/src/tools/clippy/tests/ui/vec_box_sized.fixed b/src/tools/clippy/tests/ui/vec_box_sized.fixed
deleted file mode 100644
index 4363d2224..000000000
--- a/src/tools/clippy/tests/ui/vec_box_sized.fixed
+++ /dev/null
@@ -1,57 +0,0 @@
-#![allow(dead_code)]
-
-struct SizedStruct(i32);
-struct UnsizedStruct([i32]);
-struct BigStruct([i32; 10000]);
-
-/// The following should trigger the lint
-mod should_trigger {
- use super::SizedStruct;
- const C: Vec<i32> = Vec::new();
- static S: Vec<i32> = Vec::new();
-
- struct StructWithVecBox {
- sized_type: Vec<SizedStruct>,
- }
-
- struct A(Vec<SizedStruct>);
- struct B(Vec<Vec<u32>>);
-}
-
-/// The following should not trigger the lint
-mod should_not_trigger {
- use super::{BigStruct, UnsizedStruct};
-
- struct C(Vec<Box<UnsizedStruct>>);
- struct D(Vec<Box<BigStruct>>);
-
- struct StructWithVecBoxButItsUnsized {
- unsized_type: Vec<Box<UnsizedStruct>>,
- }
-
- struct TraitVec<T: ?Sized> {
- // Regression test for #3720. This was causing an ICE.
- inner: Vec<Box<T>>,
- }
-}
-
-mod inner_mod {
- mod inner {
- pub struct S;
- }
-
- mod inner2 {
- use super::inner::S;
-
- pub fn f() -> Vec<S> {
- vec![]
- }
- }
-}
-
-// https://github.com/rust-lang/rust-clippy/issues/11417
-fn in_closure() {
- let _ = |_: Vec<Box<dyn ToString>>| {};
-}
-
-fn main() {}
diff --git a/src/tools/clippy/tests/ui/vec_box_sized.rs b/src/tools/clippy/tests/ui/vec_box_sized.rs
index f4e27fe4b..49eaf8e06 100644
--- a/src/tools/clippy/tests/ui/vec_box_sized.rs
+++ b/src/tools/clippy/tests/ui/vec_box_sized.rs
@@ -1,12 +1,28 @@
+//@no-rustfix
+
#![allow(dead_code)]
+#![feature(allocator_api)]
+
+use std::alloc::{AllocError, Allocator, Layout};
+use std::ptr::NonNull;
struct SizedStruct(i32);
struct UnsizedStruct([i32]);
struct BigStruct([i32; 10000]);
+struct DummyAllocator;
+unsafe impl Allocator for DummyAllocator {
+ fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
+ todo!()
+ }
+ unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
+ todo!()
+ }
+}
+
/// The following should trigger the lint
mod should_trigger {
- use super::SizedStruct;
+ use super::{DummyAllocator, SizedStruct};
const C: Vec<Box<i32>> = Vec::new();
static S: Vec<Box<i32>> = Vec::new();
@@ -16,11 +32,21 @@ mod should_trigger {
struct A(Vec<Box<SizedStruct>>);
struct B(Vec<Vec<Box<(u32)>>>);
+
+ fn allocator_global_defined_vec() -> Vec<Box<i32>, std::alloc::Global> {
+ Vec::new()
+ }
+ fn allocator_global_defined_box() -> Vec<Box<i32, std::alloc::Global>> {
+ Vec::new()
+ }
+ fn allocator_match() -> Vec<Box<i32, DummyAllocator>, DummyAllocator> {
+ Vec::new_in(DummyAllocator)
+ }
}
/// The following should not trigger the lint
mod should_not_trigger {
- use super::{BigStruct, UnsizedStruct};
+ use super::{BigStruct, DummyAllocator, UnsizedStruct};
struct C(Vec<Box<UnsizedStruct>>);
struct D(Vec<Box<BigStruct>>);
@@ -33,6 +59,13 @@ mod should_not_trigger {
// Regression test for #3720. This was causing an ICE.
inner: Vec<Box<T>>,
}
+
+ fn allocator_mismatch() -> Vec<Box<i32, DummyAllocator>> {
+ Vec::new()
+ }
+ fn allocator_mismatch_2() -> Vec<Box<i32>, DummyAllocator> {
+ Vec::new_in(DummyAllocator)
+ }
}
mod inner_mod {
diff --git a/src/tools/clippy/tests/ui/vec_box_sized.stderr b/src/tools/clippy/tests/ui/vec_box_sized.stderr
index 9118f284b..d6479271f 100644
--- a/src/tools/clippy/tests/ui/vec_box_sized.stderr
+++ b/src/tools/clippy/tests/ui/vec_box_sized.stderr
@@ -1,5 +1,5 @@
error: `Vec<T>` is already on the heap, the boxing is unnecessary
- --> $DIR/vec_box_sized.rs:10:14
+ --> $DIR/vec_box_sized.rs:26:14
|
LL | const C: Vec<Box<i32>> = Vec::new();
| ^^^^^^^^^^^^^ help: try: `Vec<i32>`
@@ -8,34 +8,52 @@ LL | const C: Vec<Box<i32>> = Vec::new();
= help: to override `-D warnings` add `#[allow(clippy::vec_box)]`
error: `Vec<T>` is already on the heap, the boxing is unnecessary
- --> $DIR/vec_box_sized.rs:11:15
+ --> $DIR/vec_box_sized.rs:27:15
|
LL | static S: Vec<Box<i32>> = Vec::new();
| ^^^^^^^^^^^^^ help: try: `Vec<i32>`
error: `Vec<T>` is already on the heap, the boxing is unnecessary
- --> $DIR/vec_box_sized.rs:14:21
+ --> $DIR/vec_box_sized.rs:30:21
|
LL | sized_type: Vec<Box<SizedStruct>>,
| ^^^^^^^^^^^^^^^^^^^^^ help: try: `Vec<SizedStruct>`
error: `Vec<T>` is already on the heap, the boxing is unnecessary
- --> $DIR/vec_box_sized.rs:17:14
+ --> $DIR/vec_box_sized.rs:33:14
|
LL | struct A(Vec<Box<SizedStruct>>);
| ^^^^^^^^^^^^^^^^^^^^^ help: try: `Vec<SizedStruct>`
error: `Vec<T>` is already on the heap, the boxing is unnecessary
- --> $DIR/vec_box_sized.rs:18:18
+ --> $DIR/vec_box_sized.rs:34:18
|
LL | struct B(Vec<Vec<Box<(u32)>>>);
| ^^^^^^^^^^^^^^^ help: try: `Vec<u32>`
error: `Vec<T>` is already on the heap, the boxing is unnecessary
- --> $DIR/vec_box_sized.rs:46:23
+ --> $DIR/vec_box_sized.rs:36:42
+ |
+LL | fn allocator_global_defined_vec() -> Vec<Box<i32>, std::alloc::Global> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Vec<i32>`
+
+error: `Vec<T>` is already on the heap, the boxing is unnecessary
+ --> $DIR/vec_box_sized.rs:39:42
+ |
+LL | fn allocator_global_defined_box() -> Vec<Box<i32, std::alloc::Global>> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Vec<i32>`
+
+error: `Vec<T>` is already on the heap, the boxing is unnecessary
+ --> $DIR/vec_box_sized.rs:42:29
+ |
+LL | fn allocator_match() -> Vec<Box<i32, DummyAllocator>, DummyAllocator> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Vec<i32>`
+
+error: `Vec<T>` is already on the heap, the boxing is unnecessary
+ --> $DIR/vec_box_sized.rs:79:23
|
LL | pub fn f() -> Vec<Box<S>> {
| ^^^^^^^^^^^ help: try: `Vec<S>`
-error: aborting due to 6 previous errors
+error: aborting due to 9 previous errors
diff --git a/src/tools/clippy/tests/ui/vec_resize_to_zero.stderr b/src/tools/clippy/tests/ui/vec_resize_to_zero.stderr
index 715c9923b..c16ba4e52 100644
--- a/src/tools/clippy/tests/ui/vec_resize_to_zero.stderr
+++ b/src/tools/clippy/tests/ui/vec_resize_to_zero.stderr
@@ -10,5 +10,5 @@ LL | v.resize(0, 5);
= note: `-D clippy::vec-resize-to-zero` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::vec_resize_to_zero)]`
-error: aborting due to previous error
+error: aborting due to 1 previous error
diff --git a/src/tools/clippy/tests/ui/vtable_address_comparisons.rs b/src/tools/clippy/tests/ui/vtable_address_comparisons.rs
deleted file mode 100644
index 75647c027..000000000
--- a/src/tools/clippy/tests/ui/vtable_address_comparisons.rs
+++ /dev/null
@@ -1,52 +0,0 @@
-use std::fmt::Debug;
-use std::ptr;
-use std::rc::Rc;
-use std::sync::Arc;
-
-#[warn(clippy::vtable_address_comparisons)]
-#[allow(clippy::borrow_as_ptr)]
-
-fn main() {
- let a: *const dyn Debug = &1 as &dyn Debug;
- let b: *const dyn Debug = &1 as &dyn Debug;
-
- // These should fail:
- let _ = a == b;
- //~^ ERROR: comparing trait object pointers compares a non-unique vtable address
- let _ = a != b;
- //~^ ERROR: comparing trait object pointers compares a non-unique vtable address
- let _ = a < b;
- //~^ ERROR: comparing trait object pointers compares a non-unique vtable address
- let _ = a <= b;
- //~^ ERROR: comparing trait object pointers compares a non-unique vtable address
- let _ = a > b;
- //~^ ERROR: comparing trait object pointers compares a non-unique vtable address
- let _ = a >= b;
- //~^ ERROR: comparing trait object pointers compares a non-unique vtable address
- ptr::eq(a, b);
- //~^ ERROR: comparing trait object pointers compares a non-unique vtable address
-
- let a = &1 as &dyn Debug;
- let b = &1 as &dyn Debug;
- ptr::eq(a, b);
- //~^ ERROR: comparing trait object pointers compares a non-unique vtable address
-
- // These should be fine:
- let a = &1;
- ptr::eq(a, a);
-
- let a = Rc::new(1);
- Rc::ptr_eq(&a, &a);
-
- let a = Arc::new(1);
- Arc::ptr_eq(&a, &a);
-
- let a: Rc<dyn Debug> = Rc::new(1);
- Rc::ptr_eq(&a, &a);
-
- let a: Arc<dyn Debug> = Arc::new(1);
- Arc::ptr_eq(&a, &a);
-
- let a: &[u8] = b"";
- ptr::eq(a, a);
-}
diff --git a/src/tools/clippy/tests/ui/vtable_address_comparisons.stderr b/src/tools/clippy/tests/ui/vtable_address_comparisons.stderr
deleted file mode 100644
index 83c82f379..000000000
--- a/src/tools/clippy/tests/ui/vtable_address_comparisons.stderr
+++ /dev/null
@@ -1,68 +0,0 @@
-error: comparing trait object pointers compares a non-unique vtable address
- --> $DIR/vtable_address_comparisons.rs:14:13
- |
-LL | let _ = a == b;
- | ^^^^^^
- |
- = help: consider extracting and comparing data pointers only
- = note: `-D clippy::vtable-address-comparisons` implied by `-D warnings`
- = help: to override `-D warnings` add `#[allow(clippy::vtable_address_comparisons)]`
-
-error: comparing trait object pointers compares a non-unique vtable address
- --> $DIR/vtable_address_comparisons.rs:16:13
- |
-LL | let _ = a != b;
- | ^^^^^^
- |
- = help: consider extracting and comparing data pointers only
-
-error: comparing trait object pointers compares a non-unique vtable address
- --> $DIR/vtable_address_comparisons.rs:18:13
- |
-LL | let _ = a < b;
- | ^^^^^
- |
- = help: consider extracting and comparing data pointers only
-
-error: comparing trait object pointers compares a non-unique vtable address
- --> $DIR/vtable_address_comparisons.rs:20:13
- |
-LL | let _ = a <= b;
- | ^^^^^^
- |
- = help: consider extracting and comparing data pointers only
-
-error: comparing trait object pointers compares a non-unique vtable address
- --> $DIR/vtable_address_comparisons.rs:22:13
- |
-LL | let _ = a > b;
- | ^^^^^
- |
- = help: consider extracting and comparing data pointers only
-
-error: comparing trait object pointers compares a non-unique vtable address
- --> $DIR/vtable_address_comparisons.rs:24:13
- |
-LL | let _ = a >= b;
- | ^^^^^^
- |
- = help: consider extracting and comparing data pointers only
-
-error: comparing trait object pointers compares a non-unique vtable address
- --> $DIR/vtable_address_comparisons.rs:26:5
- |
-LL | ptr::eq(a, b);
- | ^^^^^^^^^^^^^
- |
- = help: consider extracting and comparing data pointers only
-
-error: comparing trait object pointers compares a non-unique vtable address
- --> $DIR/vtable_address_comparisons.rs:31:5
- |
-LL | ptr::eq(a, b);
- | ^^^^^^^^^^^^^
- |
- = help: consider extracting and comparing data pointers only
-
-error: aborting due to 8 previous errors
-
diff --git a/src/tools/clippy/triagebot.toml b/src/tools/clippy/triagebot.toml
index 419b3c30d..96085bcf9 100644
--- a/src/tools/clippy/triagebot.toml
+++ b/src/tools/clippy/triagebot.toml
@@ -19,6 +19,7 @@ new_pr = true
[assign]
contributing_url = "https://github.com/rust-lang/rust-clippy/blob/master/CONTRIBUTING.md"
+users_on_vacation = ["blyxyas"]
[assign.owners]
"/.github" = ["@flip1995"]
@@ -33,5 +34,4 @@ contributing_url = "https://github.com/rust-lang/rust-clippy/blob/master/CONTRIB
"@dswij",
"@Jarcho",
"@blyxyas",
- "@Centri3",
]
diff --git a/src/tools/collect-license-metadata/src/main.rs b/src/tools/collect-license-metadata/src/main.rs
index ca2a6f4b8..cbe94af35 100644
--- a/src/tools/collect-license-metadata/src/main.rs
+++ b/src/tools/collect-license-metadata/src/main.rs
@@ -6,6 +6,16 @@ use crate::licenses::LicensesInterner;
use anyhow::Error;
use std::path::PathBuf;
+// Some directories have too many slight license differences that'd result in a
+// huge report, and could be considered a standalone project anyway. Those
+// directories are "condensed" into a single licensing block for ease of
+// reading, merging the licensing information.
+//
+// For every `(dir, file)``, every file in `dir` is considered to have the
+// license info of `file`.
+const CONDENSED_DIRECTORIES: &[(&str, &str)] =
+ &[("./src/llvm-project/", "./src/llvm-project/README.md")];
+
fn main() -> Result<(), Error> {
let reuse_exe: PathBuf = std::env::var_os("REUSE_EXE").expect("Missing REUSE_EXE").into();
let dest: PathBuf = std::env::var_os("DEST").expect("Missing DEST").into();
diff --git a/src/tools/collect-license-metadata/src/path_tree.rs b/src/tools/collect-license-metadata/src/path_tree.rs
index 709d91897..fc8756d9a 100644
--- a/src/tools/collect-license-metadata/src/path_tree.rs
+++ b/src/tools/collect-license-metadata/src/path_tree.rs
@@ -4,7 +4,7 @@
//! passes over the tree to remove redundant information.
use crate::licenses::{License, LicenseId, LicensesInterner};
-use std::collections::BTreeMap;
+use std::collections::{BTreeMap, BTreeSet};
use std::path::{Path, PathBuf};
#[derive(serde::Serialize)]
@@ -12,6 +12,7 @@ use std::path::{Path, PathBuf};
pub(crate) enum Node<L> {
Root { children: Vec<Node<L>> },
Directory { name: PathBuf, children: Vec<Node<L>>, license: Option<L> },
+ CondensedDirectory { name: PathBuf, licenses: Vec<L> },
File { name: PathBuf, license: L },
Group { files: Vec<PathBuf>, directories: Vec<PathBuf>, license: L },
Empty,
@@ -57,9 +58,9 @@ impl Node<LicenseId> {
Node::Directory { name, mut children, license: None } => {
directories.entry(name).or_insert_with(Vec::new).append(&mut children);
}
- file @ Node::File { .. } => {
- files.push(file);
- }
+ file @ Node::File { .. } => files.push(file),
+ // Propagate condensed directories as-is.
+ condensed @ Node::CondensedDirectory { .. } => files.push(condensed),
Node::Empty => {}
Node::Root { .. } => {
panic!("can't have a root inside another element");
@@ -86,6 +87,7 @@ impl Node<LicenseId> {
}
Node::Empty => {}
Node::File { .. } => {}
+ Node::CondensedDirectory { .. } => {}
Node::Group { .. } => {
panic!("Group should not be present at this stage");
}
@@ -132,6 +134,7 @@ impl Node<LicenseId> {
}
}
Node::File { .. } => {}
+ Node::CondensedDirectory { .. } => {}
Node::Group { .. } => panic!("group should not be present at this stage"),
Node::Empty => {}
}
@@ -174,6 +177,9 @@ impl Node<LicenseId> {
Node::Directory { name: child_child_name, .. } => {
*child_child_name = child_name.join(&child_child_name);
}
+ Node::CondensedDirectory { name: child_child_name, .. } => {
+ *child_child_name = child_name.join(&child_child_name);
+ }
Node::File { name: child_child_name, .. } => {
*child_child_name = child_name.join(&child_child_name);
}
@@ -188,6 +194,7 @@ impl Node<LicenseId> {
}
Node::Empty => {}
Node::File { .. } => {}
+ Node::CondensedDirectory { .. } => {}
Node::Group { .. } => panic!("Group should not be present at this stage"),
}
}
@@ -255,6 +262,7 @@ impl Node<LicenseId> {
}
}
Node::File { .. } => {}
+ Node::CondensedDirectory { .. } => {}
Node::Group { .. } => panic!("FileGroup should not be present at this stage"),
Node::Empty => {}
}
@@ -270,6 +278,7 @@ impl Node<LicenseId> {
}
children.retain(|child| !matches!(child, Node::Empty));
}
+ Node::CondensedDirectory { .. } => {}
Node::Group { .. } => {}
Node::File { .. } => {}
Node::Empty => {}
@@ -293,7 +302,24 @@ pub(crate) fn build(mut input: Vec<(PathBuf, LicenseId)>) -> Node<LicenseId> {
// Ensure reproducibility of all future steps.
input.sort();
- for (path, license) in input {
+ let mut condensed_directories = BTreeMap::new();
+ 'outer: for (path, license) in input {
+ // Files in condensed directories are handled separately.
+ for (condensed_directory, allowed_file) in super::CONDENSED_DIRECTORIES {
+ if path.starts_with(condensed_directory) {
+ if path.as_path() == Path::new(allowed_file) {
+ // The licence on our allowed file is used to represent the entire directory
+ condensed_directories
+ .entry(*condensed_directory)
+ .or_insert_with(BTreeSet::new)
+ .insert(license);
+ } else {
+ // don't add the file
+ }
+ continue 'outer;
+ }
+ }
+
let mut node = Node::File { name: path.file_name().unwrap().into(), license };
for component in path.parent().unwrap_or_else(|| Path::new(".")).components().rev() {
node = Node::Directory {
@@ -306,6 +332,22 @@ pub(crate) fn build(mut input: Vec<(PathBuf, LicenseId)>) -> Node<LicenseId> {
children.push(node);
}
+ for (path, licenses) in condensed_directories {
+ let path = Path::new(path);
+ let mut node = Node::CondensedDirectory {
+ name: path.file_name().unwrap().into(),
+ licenses: licenses.iter().copied().collect(),
+ };
+ for component in path.parent().unwrap_or_else(|| Path::new(".")).components().rev() {
+ node = Node::Directory {
+ name: component.as_os_str().into(),
+ children: vec![node],
+ license: None,
+ };
+ }
+ children.push(node);
+ }
+
Node::Root { children }
}
@@ -334,6 +376,10 @@ pub(crate) fn expand_interned_licenses(
Node::Group { files, directories, license } => {
Node::Group { files, directories, license: interner.resolve(license) }
}
+ Node::CondensedDirectory { name, licenses } => Node::CondensedDirectory {
+ name,
+ licenses: licenses.into_iter().map(|license| interner.resolve(license)).collect(),
+ },
Node::Empty => Node::Empty,
}
}
diff --git a/src/tools/collect-license-metadata/src/reuse.rs b/src/tools/collect-license-metadata/src/reuse.rs
index d6b3772ba..a5d01935f 100644
--- a/src/tools/collect-license-metadata/src/reuse.rs
+++ b/src/tools/collect-license-metadata/src/reuse.rs
@@ -17,9 +17,11 @@ pub(crate) fn collect(
let mut result = Vec::new();
for file in document.file_information {
+ let concluded_license = file.concluded_license.expect("File should have licence info");
+ let copyright_text = file.copyright_text.expect("File should have copyright text");
let license = interner.intern(License {
- spdx: file.concluded_license.to_string(),
- copyright: file.copyright_text.split('\n').map(|s| s.into()).collect(),
+ spdx: concluded_license.to_string(),
+ copyright: copyright_text.split('\n').map(|s| s.into()).collect(),
});
result.push((file.file_name.into(), license));
@@ -30,7 +32,7 @@ pub(crate) fn collect(
fn obtain_spdx_document(reuse_exe: &Path) -> Result<String, Error> {
let output = Command::new(reuse_exe)
- .args(&["spdx", "--add-license-concluded", "--creator-person=bors"])
+ .args(&["--include-submodules", "spdx", "--add-license-concluded", "--creator-person=bors"])
.stdout(Stdio::piped())
.spawn()?
.wait_with_output()?;
diff --git a/src/tools/compiletest/src/common.rs b/src/tools/compiletest/src/common.rs
index 1e9684555..c45c0b3c6 100644
--- a/src/tools/compiletest/src/common.rs
+++ b/src/tools/compiletest/src/common.rs
@@ -242,6 +242,9 @@ pub struct Config {
/// Run ignored tests
pub run_ignored: bool,
+ /// Whether to run tests with `ignore-debug` header
+ pub with_debug_assertions: bool,
+
/// Only run tests that match these filters
pub filters: Vec<String>,
@@ -410,9 +413,6 @@ impl Config {
pub fn matches_arch(&self, arch: &str) -> bool {
self.target_cfg().arch == arch ||
- // Shorthand for convenience. The arch for
- // asmjs-unknown-emscripten is actually wasm32.
- (arch == "asmjs" && self.target.starts_with("asmjs")) ||
// Matching all the thumb variants as one can be convenient.
// (thumbv6m, thumbv7em, thumbv7m, etc.)
(arch == "thumb" && self.target.starts_with("thumb"))
@@ -479,6 +479,7 @@ impl TargetCfgs {
let mut targets: HashMap<String, TargetCfg> = serde_json::from_str(&rustc_output(
config,
&["--print=all-target-specs-json", "-Zunstable-options"],
+ Default::default(),
))
.unwrap();
@@ -491,16 +492,33 @@ impl TargetCfgs {
let mut all_families = HashSet::new();
let mut all_pointer_widths = HashSet::new();
- // Handle custom target specs, which are not included in `--print=all-target-specs-json`.
- if config.target.ends_with(".json") {
- targets.insert(
- config.target.clone(),
- serde_json::from_str(&rustc_output(
- config,
- &["--print=target-spec-json", "-Zunstable-options", "--target", &config.target],
- ))
- .unwrap(),
- );
+ // If current target is not included in the `--print=all-target-specs-json` output,
+ // we check whether it is a custom target from the user or a synthetic target from bootstrap.
+ if !targets.contains_key(&config.target) {
+ let mut envs: HashMap<String, String> = HashMap::new();
+
+ if let Ok(t) = std::env::var("RUST_TARGET_PATH") {
+ envs.insert("RUST_TARGET_PATH".into(), t);
+ }
+
+ // This returns false only when the target is neither a synthetic target
+ // nor a custom target from the user, indicating it is most likely invalid.
+ if config.target.ends_with(".json") || !envs.is_empty() {
+ targets.insert(
+ config.target.clone(),
+ serde_json::from_str(&rustc_output(
+ config,
+ &[
+ "--print=target-spec-json",
+ "-Zunstable-options",
+ "--target",
+ &config.target,
+ ],
+ envs,
+ ))
+ .unwrap(),
+ );
+ }
}
for (target, cfg) in targets.iter() {
@@ -545,7 +563,9 @@ impl TargetCfgs {
// code below extracts them from `--print=cfg`: make sure to only override fields that can
// actually be changed with `-C` flags.
for config in
- rustc_output(config, &["--print=cfg", "--target", &config.target]).trim().lines()
+ rustc_output(config, &["--print=cfg", "--target", &config.target], Default::default())
+ .trim()
+ .lines()
{
let (name, value) = config
.split_once("=\"")
@@ -624,11 +644,12 @@ pub enum Endian {
Big,
}
-fn rustc_output(config: &Config, args: &[&str]) -> String {
+fn rustc_output(config: &Config, args: &[&str], envs: HashMap<String, String>) -> String {
let mut command = Command::new(&config.rustc_path);
add_dylib_path(&mut command, iter::once(&config.compile_lib_path));
command.args(&config.target_rustcflags).args(args);
command.env("RUSTC_BOOTSTRAP", "1");
+ command.envs(envs);
let output = match command.output() {
Ok(output) => output,
diff --git a/src/tools/compiletest/src/header.rs b/src/tools/compiletest/src/header.rs
index d6516cff6..f85f9e674 100644
--- a/src/tools/compiletest/src/header.rs
+++ b/src/tools/compiletest/src/header.rs
@@ -632,7 +632,7 @@ fn iter_header_extra(
it(None, directive, 0);
}
- let comment = if testfile.extension().map(|e| e == "rs") == Some(true) { "//" } else { "#" };
+ let comment = if testfile.extension().is_some_and(|e| e == "rs") { "//" } else { "#" };
let mut rdr = BufReader::new(rdr);
let mut ln = String::new();
diff --git a/src/tools/compiletest/src/header/cfg.rs b/src/tools/compiletest/src/header/cfg.rs
index 77c2866b3..e2a04b7e5 100644
--- a/src/tools/compiletest/src/header/cfg.rs
+++ b/src/tools/compiletest/src/header/cfg.rs
@@ -146,8 +146,7 @@ pub(super) fn parse_cfg_name_directive<'a>(
}
// `wasm32-bare` is an alias to refer to just wasm32-unknown-unknown
- // (in contrast to `wasm32` which also matches non-bare targets like
- // asmjs-unknown-emscripten).
+ // (in contrast to `wasm32` which also matches non-bare targets)
condition! {
name: "wasm32-bare",
condition: config.target == "wasm32-unknown-unknown",
@@ -155,11 +154,6 @@ pub(super) fn parse_cfg_name_directive<'a>(
}
condition! {
- name: "asmjs",
- condition: config.target.starts_with("asmjs"),
- message: "when the architecture is asm.js",
- }
- condition! {
name: "thumb",
condition: config.target.starts_with("thumb"),
message: "when the architecture is part of the Thumb family"
@@ -196,8 +190,8 @@ pub(super) fn parse_cfg_name_directive<'a>(
}
condition! {
name: "debug",
- condition: cfg!(debug_assertions),
- message: "when building with debug assertions",
+ condition: config.with_debug_assertions,
+ message: "when running tests with `ignore-debug` header",
}
condition! {
name: config.debugger.as_ref().map(|d| d.to_str()),
diff --git a/src/tools/compiletest/src/header/tests.rs b/src/tools/compiletest/src/header/tests.rs
index 85e745bed..295134c78 100644
--- a/src/tools/compiletest/src/header/tests.rs
+++ b/src/tools/compiletest/src/header/tests.rs
@@ -398,8 +398,6 @@ fn ignore_arch() {
("x86_64-unknown-linux-gnu", "x86_64"),
("i686-unknown-linux-gnu", "x86"),
("nvptx64-nvidia-cuda", "nvptx64"),
- ("asmjs-unknown-emscripten", "wasm32"),
- ("asmjs-unknown-emscripten", "asmjs"),
("thumbv7m-none-eabi", "thumb"),
];
for (target, arch) in archs {
@@ -492,9 +490,6 @@ fn wasm_special() {
("wasm32-unknown-unknown", "wasm32", true),
("wasm32-unknown-unknown", "wasm32-bare", true),
("wasm32-unknown-unknown", "wasm64", false),
- ("asmjs-unknown-emscripten", "emscripten", true),
- ("asmjs-unknown-emscripten", "wasm32", true),
- ("asmjs-unknown-emscripten", "wasm32-bare", false),
("wasm32-unknown-emscripten", "emscripten", true),
("wasm32-unknown-emscripten", "wasm32", true),
("wasm32-unknown-emscripten", "wasm32-bare", false),
diff --git a/src/tools/compiletest/src/lib.rs b/src/tools/compiletest/src/lib.rs
index bb09c03ef..5a80b9121 100644
--- a/src/tools/compiletest/src/lib.rs
+++ b/src/tools/compiletest/src/lib.rs
@@ -81,6 +81,7 @@ pub fn parse_config(args: Vec<String>) -> Config {
)
.optopt("", "run", "whether to execute run-* tests", "auto | always | never")
.optflag("", "ignored", "run tests marked as ignored")
+ .optflag("", "with-debug-assertions", "whether to run tests with `ignore-debug` header")
.optmulti("", "skip", "skip tests matching SUBSTRING. Can be passed multiple times", "SUBSTRING")
.optflag("", "exact", "filters match exactly")
.optopt(
@@ -203,6 +204,7 @@ pub fn parse_config(args: Vec<String>) -> Config {
let src_base = opt_path(matches, "src-base");
let run_ignored = matches.opt_present("ignored");
+ let with_debug_assertions = matches.opt_present("with-debug-assertions");
let mode = matches.opt_str("mode").unwrap().parse().expect("invalid mode");
let has_tidy = if mode == Mode::Rustdoc {
Command::new("tidy")
@@ -238,6 +240,7 @@ pub fn parse_config(args: Vec<String>) -> Config {
suite: matches.opt_str("suite").unwrap(),
debugger: None,
run_ignored,
+ with_debug_assertions,
filters: matches.free.clone(),
skip: matches.opt_strs("skip"),
filter_exact: matches.opt_present("exact"),
diff --git a/src/tools/compiletest/src/runtest.rs b/src/tools/compiletest/src/runtest.rs
index 63e8ba7c7..5d53a4d28 100644
--- a/src/tools/compiletest/src/runtest.rs
+++ b/src/tools/compiletest/src/runtest.rs
@@ -20,12 +20,11 @@ use regex::{Captures, Regex};
use rustfix::{apply_suggestions, get_suggestions_from_json, Filter};
use std::borrow::Cow;
-use std::collections::hash_map::DefaultHasher;
use std::collections::{HashMap, HashSet};
use std::env;
use std::ffi::{OsStr, OsString};
use std::fs::{self, create_dir_all, File, OpenOptions};
-use std::hash::{Hash, Hasher};
+use std::hash::{DefaultHasher, Hash, Hasher};
use std::io::prelude::*;
use std::io::{self, BufReader};
use std::iter;
@@ -475,14 +474,12 @@ impl<'test> TestCx<'test> {
self.fatal("missing --coverage-dump");
};
- let proc_res = self.compile_test_and_save_ir();
+ let (proc_res, llvm_ir_path) = self.compile_test_and_save_ir();
if !proc_res.status.success() {
self.fatal_proc_rec("compilation failed!", &proc_res);
}
drop(proc_res);
- let llvm_ir_path = self.output_base_name().with_extension("ll");
-
let mut dump_command = Command::new(coverage_dump_path);
dump_command.arg(llvm_ir_path);
let proc_res = self.run_command_to_procres(&mut dump_command);
@@ -2546,10 +2543,10 @@ impl<'test> TestCx<'test> {
rustc.args(&["-Zpolonius"]);
}
Some(CompareMode::NextSolver) => {
- rustc.args(&["-Ztrait-solver=next"]);
+ rustc.args(&["-Znext-solver"]);
}
Some(CompareMode::NextSolverCoherence) => {
- rustc.args(&["-Ztrait-solver=next-coherence"]);
+ rustc.args(&["-Znext-solver=coherence"]);
}
Some(CompareMode::SplitDwarf) if self.config.target.contains("windows") => {
rustc.args(&["-Csplit-debuginfo=unpacked", "-Zunstable-options"]);
@@ -2786,10 +2783,54 @@ impl<'test> TestCx<'test> {
proc_res.fatal(None, || on_failure(*self));
}
+ fn get_output_file(&self, extension: &str) -> TargetLocation {
+ let thin_lto = self.props.compile_flags.iter().any(|s| s.ends_with("lto=thin"));
+ if thin_lto {
+ TargetLocation::ThisDirectory(self.output_base_dir())
+ } else {
+ // This works with both `--emit asm` (as default output name for the assembly)
+ // and `ptx-linker` because the latter can write output at requested location.
+ let output_path = self.output_base_name().with_extension(extension);
+ let output_file = TargetLocation::ThisFile(output_path.clone());
+ output_file
+ }
+ }
+
+ fn get_filecheck_file(&self, extension: &str) -> PathBuf {
+ let thin_lto = self.props.compile_flags.iter().any(|s| s.ends_with("lto=thin"));
+ if thin_lto {
+ let name = self.testpaths.file.file_stem().unwrap().to_str().unwrap();
+ let canonical_name = name.replace('-', "_");
+ let mut output_file = None;
+ for entry in self.output_base_dir().read_dir().unwrap() {
+ if let Ok(entry) = entry {
+ let entry_path = entry.path();
+ let entry_file = entry_path.file_name().unwrap().to_str().unwrap();
+ if entry_file.starts_with(&format!("{}.{}", name, canonical_name))
+ && entry_file.ends_with(extension)
+ {
+ assert!(
+ output_file.is_none(),
+ "thinlto doesn't support multiple cgu tests"
+ );
+ output_file = Some(entry_file.to_string());
+ }
+ }
+ }
+ if let Some(output_file) = output_file {
+ self.output_base_dir().join(output_file)
+ } else {
+ self.output_base_name().with_extension(extension)
+ }
+ } else {
+ self.output_base_name().with_extension(extension)
+ }
+ }
+
// codegen tests (using FileCheck)
- fn compile_test_and_save_ir(&self) -> ProcRes {
- let output_file = TargetLocation::ThisDirectory(self.output_base_dir());
+ fn compile_test_and_save_ir(&self) -> (ProcRes, PathBuf) {
+ let output_file = self.get_output_file("ll");
let input_file = &self.testpaths.file;
let rustc = self.make_compile_args(
input_file,
@@ -2800,15 +2841,13 @@ impl<'test> TestCx<'test> {
Vec::new(),
);
- self.compose_and_run_compiler(rustc, None)
+ let proc_res = self.compose_and_run_compiler(rustc, None);
+ let output_path = self.get_filecheck_file("ll");
+ (proc_res, output_path)
}
fn compile_test_and_save_assembly(&self) -> (ProcRes, PathBuf) {
- // This works with both `--emit asm` (as default output name for the assembly)
- // and `ptx-linker` because the latter can write output at requested location.
- let output_path = self.output_base_name().with_extension("s");
-
- let output_file = TargetLocation::ThisFile(output_path.clone());
+ let output_file = self.get_output_file("s");
let input_file = &self.testpaths.file;
let mut emit = Emit::None;
@@ -2838,7 +2877,9 @@ impl<'test> TestCx<'test> {
Vec::new(),
);
- (self.compose_and_run_compiler(rustc, None), output_path)
+ let proc_res = self.compose_and_run_compiler(rustc, None);
+ let output_path = self.get_filecheck_file("s");
+ (proc_res, output_path)
}
fn verify_with_filecheck(&self, output: &Path) -> ProcRes {
@@ -2871,7 +2912,7 @@ impl<'test> TestCx<'test> {
self.fatal("missing --llvm-filecheck");
}
- let proc_res = self.compile_test_and_save_ir();
+ let (proc_res, output_path) = self.compile_test_and_save_ir();
if !proc_res.status.success() {
self.fatal_proc_rec("compilation failed!", &proc_res);
}
@@ -2879,8 +2920,6 @@ impl<'test> TestCx<'test> {
if let Some(PassMode::Build) = self.pass_mode() {
return;
}
-
- let output_path = self.output_base_name().with_extension("ll");
let proc_res = self.verify_with_filecheck(&output_path);
if !proc_res.status.success() {
self.fatal_proc_rec("verification with 'FileCheck' failed", &proc_res);
@@ -3702,9 +3741,7 @@ impl<'test> TestCx<'test> {
let stderr_bits = format!("{}bit.stderr", self.config.get_pointer_width());
let (stderr_kind, stdout_kind) = match output_kind {
TestOutput::Compile => (
- {
- if self.props.stderr_per_bitwidth { &stderr_bits } else { UI_STDERR }
- },
+ { if self.props.stderr_per_bitwidth { &stderr_bits } else { UI_STDERR } },
UI_STDOUT,
),
TestOutput::Run => (UI_RUN_STDERR, UI_RUN_STDOUT),
@@ -4251,15 +4288,18 @@ impl<'test> TestCx<'test> {
let mut seen_allocs = indexmap::IndexSet::new();
// The alloc-id appears in pretty-printed allocations.
- let re = Regex::new(r"╾─*a(lloc)?([0-9]+)(\+0x[0-9]+)?─*╼").unwrap();
+ let re =
+ Regex::new(r"╾─*a(lloc)?([0-9]+)(\+0x[0-9]+)?(<imm>)?( \([0-9]+ ptr bytes\))?─*╼")
+ .unwrap();
normalized = re
.replace_all(&normalized, |caps: &Captures<'_>| {
// Renumber the captured index.
let index = caps.get(2).unwrap().as_str().to_string();
let (index, _) = seen_allocs.insert_full(index);
let offset = caps.get(3).map_or("", |c| c.as_str());
+ let imm = caps.get(4).map_or("", |c| c.as_str());
// Do not bother keeping it pretty, just make it deterministic.
- format!("╾ALLOC{index}{offset}╼")
+ format!("╾ALLOC{index}{offset}{imm}╼")
})
.into_owned();
diff --git a/src/tools/error_index_generator/main.rs b/src/tools/error_index_generator/main.rs
index 62a58576d..865d7172c 100644
--- a/src/tools/error_index_generator/main.rs
+++ b/src/tools/error_index_generator/main.rs
@@ -1,6 +1,7 @@
#![feature(rustc_private)]
extern crate rustc_driver;
+extern crate rustc_log;
extern crate rustc_session;
use std::env;
@@ -172,8 +173,8 @@ fn parse_args() -> (OutputFormat, PathBuf) {
fn main() {
let handler =
- rustc_session::EarlyErrorHandler::new(rustc_session::config::ErrorOutputType::default());
- rustc_driver::init_env_logger(&handler, "RUST_LOG");
+ rustc_session::EarlyDiagCtxt::new(rustc_session::config::ErrorOutputType::default());
+ rustc_driver::init_logger(&handler, rustc_log::LoggerConfig::from_env("RUST_LOG"));
let (format, dst) = parse_args();
let result = main_with_result(format, &dst);
if let Err(e) = result {
diff --git a/src/tools/generate-copyright/src/main.rs b/src/tools/generate-copyright/src/main.rs
index 60c771676..558e87290 100644
--- a/src/tools/generate-copyright/src/main.rs
+++ b/src/tools/generate-copyright/src/main.rs
@@ -1,4 +1,5 @@
use anyhow::Error;
+use std::collections::BTreeSet;
use std::io::Write;
use std::path::PathBuf;
@@ -26,7 +27,7 @@ fn render_recursive(node: &Node, buffer: &mut Vec<u8>, depth: usize) -> Result<(
}
}
Node::Directory { name, children, license } => {
- render_license(&prefix, std::iter::once(name), license, buffer)?;
+ render_license(&prefix, std::iter::once(name), license.iter(), buffer)?;
if !children.is_empty() {
writeln!(buffer, "{prefix}")?;
writeln!(buffer, "{prefix}*Exceptions:*")?;
@@ -36,11 +37,19 @@ fn render_recursive(node: &Node, buffer: &mut Vec<u8>, depth: usize) -> Result<(
}
}
}
+ Node::CondensedDirectory { name, licenses } => {
+ render_license(&prefix, std::iter::once(name), licenses.iter(), buffer)?;
+ }
Node::Group { files, directories, license } => {
- render_license(&prefix, directories.iter().chain(files.iter()), license, buffer)?;
+ render_license(
+ &prefix,
+ directories.iter().chain(files.iter()),
+ std::iter::once(license),
+ buffer,
+ )?;
}
Node::File { name, license } => {
- render_license(&prefix, std::iter::once(name), license, buffer)?;
+ render_license(&prefix, std::iter::once(name), std::iter::once(license), buffer)?;
}
}
@@ -50,15 +59,26 @@ fn render_recursive(node: &Node, buffer: &mut Vec<u8>, depth: usize) -> Result<(
fn render_license<'a>(
prefix: &str,
names: impl Iterator<Item = &'a String>,
- license: &License,
+ licenses: impl Iterator<Item = &'a License>,
buffer: &mut Vec<u8>,
) -> Result<(), Error> {
+ let mut spdxs = BTreeSet::new();
+ let mut copyrights = BTreeSet::new();
+ for license in licenses {
+ spdxs.insert(&license.spdx);
+ for copyright in &license.copyright {
+ copyrights.insert(copyright);
+ }
+ }
+
for name in names {
writeln!(buffer, "{prefix}**`{name}`** ")?;
}
- writeln!(buffer, "{prefix}License: `{}` ", license.spdx)?;
- for (i, copyright) in license.copyright.iter().enumerate() {
- let suffix = if i == license.copyright.len() - 1 { "" } else { " " };
+ for spdx in spdxs.iter() {
+ writeln!(buffer, "{prefix}License: `{spdx}` ")?;
+ }
+ for (i, copyright) in copyrights.iter().enumerate() {
+ let suffix = if i == copyrights.len() - 1 { "" } else { " " };
writeln!(buffer, "{prefix}Copyright: {copyright}{suffix}")?;
}
@@ -74,7 +94,8 @@ struct Metadata {
#[serde(rename_all = "kebab-case", tag = "type")]
pub(crate) enum Node {
Root { children: Vec<Node> },
- Directory { name: String, children: Vec<Node>, license: License },
+ Directory { name: String, children: Vec<Node>, license: Option<License> },
+ CondensedDirectory { name: String, licenses: Vec<License> },
File { name: String, license: License },
Group { files: Vec<String>, directories: Vec<String>, license: License },
}
diff --git a/src/tools/generate-windows-sys/Cargo.toml b/src/tools/generate-windows-sys/Cargo.toml
index 9821677a1..d8a7a06ef 100644
--- a/src/tools/generate-windows-sys/Cargo.toml
+++ b/src/tools/generate-windows-sys/Cargo.toml
@@ -4,4 +4,4 @@ version = "0.1.0"
edition = "2021"
[dependencies.windows-bindgen]
-version = "0.51.1"
+version = "0.52.0"
diff --git a/src/tools/jsondocck/Cargo.toml b/src/tools/jsondocck/Cargo.toml
index ccabe6483..6326a9b1e 100644
--- a/src/tools/jsondocck/Cargo.toml
+++ b/src/tools/jsondocck/Cargo.toml
@@ -4,7 +4,7 @@ version = "0.1.0"
edition = "2021"
[dependencies]
-jsonpath_lib = "0.2"
+jsonpath_lib = "0.3"
getopts = "0.2"
regex = "1.4"
shlex = "1.0"
diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock
index bd6554bf8..227d1db0e 100644
--- a/src/tools/rust-analyzer/Cargo.lock
+++ b/src/tools/rust-analyzer/Cargo.lock
@@ -28,21 +28,15 @@ dependencies = [
[[package]]
name = "anyhow"
-version = "1.0.71"
+version = "1.0.75"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
-
-[[package]]
-name = "anymap"
-version = "1.0.0-beta.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8f1f8f5a6f3d50d89e3797d7593a50f96bb2aaa20ca0cc7be1fb673232c91d72"
+checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6"
[[package]]
name = "arbitrary"
-version = "1.3.0"
+version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e2d098ff73c1ca148721f37baad5ea6a465a13f9573aba8641fbbbae8164a54e"
+checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110"
[[package]]
name = "arrayvec"
@@ -51,17 +45,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711"
[[package]]
-name = "atty"
-version = "0.2.14"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
-dependencies = [
- "hermit-abi 0.1.19",
- "libc",
- "winapi",
-]
-
-[[package]]
name = "autocfg"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -89,8 +72,8 @@ dependencies = [
"cfg",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"profile",
+ "rust-analyzer-salsa",
"rustc-hash",
- "salsa",
"stdx",
"syntax",
"test-utils",
@@ -107,9 +90,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
-version = "2.3.2"
+version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6dbe3c979c178231552ecba20214a8272df4e09f232a87aef4320cf06539aded"
+checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07"
[[package]]
name = "byteorder"
@@ -137,9 +120,9 @@ dependencies = [
[[package]]
name = "cargo_metadata"
-version = "0.15.4"
+version = "0.18.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eee4243f1f26fc7a42710e7439c149e2b10b05472f88090acce52632f231a73a"
+checksum = "2d886547e41f740c616ae73108f6eb70afe6d940c7bc697cb30f13daec073037"
dependencies = [
"camino",
"cargo-platform",
@@ -177,32 +160,32 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chalk-derive"
-version = "0.92.0"
+version = "0.95.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ff5053a8a42dbff5279a82423946fc56dc1253b76cf211b2b3c14b3aad4e1281"
+checksum = "329427f28cd2bddaacd47c4dcd3d7082d315c61fb164394c690fe98c1b6ee9d3"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.18",
+ "syn",
"synstructure",
]
[[package]]
name = "chalk-ir"
-version = "0.92.0"
+version = "0.95.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a56de2146a8ed0fcd54f4bd50db852f1de4eac9e1efe568494f106c21b77d2a"
+checksum = "9e1e1659238bd598d0f7dbc5034cf1ff46010a3d6827704c9ed443c8359cb484"
dependencies = [
- "bitflags 1.3.2",
+ "bitflags 2.4.1",
"chalk-derive",
"lazy_static",
]
[[package]]
name = "chalk-recursive"
-version = "0.92.0"
+version = "0.95.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5cc09e6e9531f3544989ef89b189e80fbc7ad9e2f73f1c5e03ddc9ffb0527463"
+checksum = "b3e0bff0ba1bed11407384fcec0353aeb6888901e63cb47d04505ec47adad847"
dependencies = [
"chalk-derive",
"chalk-ir",
@@ -213,14 +196,14 @@ dependencies = [
[[package]]
name = "chalk-solve"
-version = "0.92.0"
+version = "0.95.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b392e02b4c81ec76d3748da839fc70a5539b83d27c9030668463d34d5110b860"
+checksum = "eb9c46d501cf83732a91056c0c846ae7a16d6b3c67a6a6bb5e9cc0a2e91563b6"
dependencies = [
"chalk-derive",
"chalk-ir",
"ena",
- "indexmap 1.9.3",
+ "indexmap",
"itertools",
"petgraph",
"rustc-hash",
@@ -233,7 +216,7 @@ version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5080df6b0f0ecb76cab30808f00d937ba725cebe266a3da8cd89dff92f2a9916"
dependencies = [
- "nix",
+ "nix 0.26.2",
"winapi",
]
@@ -293,7 +276,7 @@ dependencies = [
"autocfg",
"cfg-if",
"crossbeam-utils",
- "memoffset 0.9.0",
+ "memoffset",
"scopeguard",
]
@@ -307,34 +290,44 @@ dependencies = [
]
[[package]]
+name = "ctrlc"
+version = "3.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "82e95fbd621905b854affdc67943b043a0fbb6ed7385fd5a25650d19a8a6cfdf"
+dependencies = [
+ "nix 0.27.1",
+ "windows-sys 0.48.0",
+]
+
+[[package]]
name = "dashmap"
-version = "5.4.0"
+version = "5.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc"
+checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856"
dependencies = [
"cfg-if",
- "hashbrown 0.12.3",
+ "hashbrown",
"lock_api",
"once_cell",
- "parking_lot_core 0.9.6",
+ "parking_lot_core",
]
[[package]]
name = "derive_arbitrary"
-version = "1.3.1"
+version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "53e0efad4403bfc52dc201159c4b842a246a14b98c64b55dfd0f2d89729dfeb8"
+checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.18",
+ "syn",
]
[[package]]
name = "dissimilar"
-version = "1.0.6"
+version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "210ec60ae7d710bed8683e333e9d2855a8a56a3e9892b38bad3bb0d4d29b0d5e"
+checksum = "86e3bdc80eee6e16b2b6b0f87fbc98c04bee3455e35174c0de1a125d0688c632"
[[package]]
name = "dot"
@@ -350,9 +343,9 @@ checksum = "9bda8e21c04aca2ae33ffc2fd8c23134f3cac46db123ba97bd9d3f3b8a4a85e1"
[[package]]
name = "either"
-version = "1.8.1"
+version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91"
+checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
[[package]]
name = "ena"
@@ -393,9 +386,9 @@ dependencies = [
[[package]]
name = "fixedbitset"
-version = "0.2.0"
+version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d"
+checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flate2"
@@ -455,33 +448,15 @@ checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e"
[[package]]
name = "hashbrown"
-version = "0.12.3"
+version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
-
-[[package]]
-name = "hashbrown"
-version = "0.14.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a"
+checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
[[package]]
name = "heck"
-version = "0.3.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c"
-dependencies = [
- "unicode-segmentation",
-]
-
-[[package]]
-name = "hermit-abi"
-version = "0.1.19"
+version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
-dependencies = [
- "libc",
-]
+checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
[[package]]
name = "hermit-abi"
@@ -518,10 +493,9 @@ dependencies = [
name = "hir-def"
version = "0.0.0"
dependencies = [
- "anymap",
"arrayvec",
"base-db",
- "bitflags 2.3.2",
+ "bitflags 2.4.1",
"cfg",
"cov-mark",
"dashmap",
@@ -529,11 +503,9 @@ dependencies = [
"either",
"expect-test",
"fst",
- "hashbrown 0.12.3",
+ "hashbrown",
"hir-expand",
- "hkalbasi-rustc-ap-rustc_abi",
- "hkalbasi-rustc-ap-rustc_index",
- "indexmap 2.0.0",
+ "indexmap",
"intern",
"itertools",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -541,7 +513,7 @@ dependencies = [
"mbe",
"once_cell",
"profile",
- "ra-ap-rustc_parse_format",
+ "rustc-dependencies",
"rustc-hash",
"smallvec",
"stdx",
@@ -561,7 +533,7 @@ dependencies = [
"cov-mark",
"either",
"expect-test",
- "hashbrown 0.12.3",
+ "hashbrown",
"intern",
"itertools",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -583,7 +555,7 @@ version = "0.0.0"
dependencies = [
"arrayvec",
"base-db",
- "bitflags 2.3.2",
+ "bitflags 2.4.1",
"chalk-derive",
"chalk-ir",
"chalk-recursive",
@@ -594,7 +566,6 @@ dependencies = [
"expect-test",
"hir-def",
"hir-expand",
- "hkalbasi-rustc-ap-rustc_index",
"intern",
"itertools",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -604,6 +575,7 @@ dependencies = [
"oorandom",
"profile",
"project-model",
+ "rustc-dependencies",
"rustc-hash",
"scoped-tls",
"smallvec",
@@ -618,27 +590,6 @@ dependencies = [
]
[[package]]
-name = "hkalbasi-rustc-ap-rustc_abi"
-version = "0.0.20221221"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "adabaadad9aa7576f97af02241cdf5554d62fb3d51a84cb05d77ba28edd3013f"
-dependencies = [
- "bitflags 1.3.2",
- "hkalbasi-rustc-ap-rustc_index",
- "tracing",
-]
-
-[[package]]
-name = "hkalbasi-rustc-ap-rustc_index"
-version = "0.0.20221221"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f4d3c48474e09afb0f5efbd6f758e05411699301a113c47d454d28ec7059d00e"
-dependencies = [
- "arrayvec",
- "smallvec",
-]
-
-[[package]]
name = "home"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -651,6 +602,7 @@ dependencies = [
name = "ide"
version = "0.0.0"
dependencies = [
+ "arrayvec",
"cfg",
"cov-mark",
"crossbeam-channel",
@@ -729,10 +681,10 @@ dependencies = [
"expect-test",
"fst",
"hir",
- "indexmap 2.0.0",
+ "indexmap",
"itertools",
"limit",
- "line-index 0.1.0-pre.1",
+ "line-index 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"memchr",
"nohash-hasher",
"once_cell",
@@ -802,22 +754,12 @@ dependencies = [
[[package]]
name = "indexmap"
-version = "1.9.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
-dependencies = [
- "autocfg",
- "hashbrown 0.12.3",
-]
-
-[[package]]
-name = "indexmap"
-version = "2.0.0"
+version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d"
+checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f"
dependencies = [
"equivalent",
- "hashbrown 0.14.0",
+ "hashbrown",
]
[[package]]
@@ -841,29 +783,20 @@ dependencies = [
]
[[package]]
-name = "instant"
-version = "0.1.12"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
-dependencies = [
- "cfg-if",
-]
-
-[[package]]
name = "intern"
version = "0.0.0"
dependencies = [
"dashmap",
- "hashbrown 0.12.3",
+ "hashbrown",
"rustc-hash",
"triomphe",
]
[[package]]
name = "itertools"
-version = "0.10.5"
+version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
+checksum = "25db6b064527c5d482d0423354fcd07a89a2dfe07b67892e62411946db7f07b0"
dependencies = [
"either",
]
@@ -918,9 +851,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
-version = "0.2.146"
+version = "0.2.150"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f92be4933c13fd498862a9e02a3055f8a8d9c039ce33db97306fd5a6caa7f29b"
+checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c"
[[package]]
name = "libloading"
@@ -948,9 +881,7 @@ version = "0.0.0"
[[package]]
name = "line-index"
-version = "0.1.0-pre.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2cad96769710c1745e11d4f940a8ff36000ade4bbada4285b001cb8aa2f745ce"
+version = "0.1.1"
dependencies = [
"nohash-hasher",
"text-size",
@@ -958,7 +889,9 @@ dependencies = [
[[package]]
name = "line-index"
-version = "0.1.0"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "67d61795376ae2683928c218fda7d7d7db136fd38c06b7552904667f0d55580a"
dependencies = [
"nohash-hasher",
"text-size",
@@ -1000,22 +933,23 @@ checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
[[package]]
name = "lsp-server"
version = "0.7.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b52dccdf3302eefab8c8a1273047f0a3c3dca4b527c8458d00c09484c8371928"
dependencies = [
"crossbeam-channel",
"log",
- "lsp-types",
"serde",
"serde_json",
]
[[package]]
name = "lsp-server"
-version = "0.7.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b52dccdf3302eefab8c8a1273047f0a3c3dca4b527c8458d00c09484c8371928"
+version = "0.7.5"
dependencies = [
"crossbeam-channel",
+ "ctrlc",
"log",
+ "lsp-types",
"serde",
"serde_json",
]
@@ -1050,9 +984,9 @@ dependencies = [
[[package]]
name = "memchr"
-version = "2.5.0"
+version = "2.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
+checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167"
[[package]]
name = "memmap2"
@@ -1065,15 +999,6 @@ dependencies = [
[[package]]
name = "memoffset"
-version = "0.8.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1"
-dependencies = [
- "autocfg",
-]
-
-[[package]]
-name = "memoffset"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c"
@@ -1122,11 +1047,11 @@ dependencies = [
[[package]]
name = "miow"
-version = "0.5.0"
+version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "52ffbca2f655e33c08be35d87278e5b18b89550a37dbd598c20db92f6a471123"
+checksum = "359f76430b20a79f9e20e115b3428614e654f04fab314482fc0fda0ebd3c6044"
dependencies = [
- "windows-sys 0.42.0",
+ "windows-sys 0.48.0",
]
[[package]]
@@ -1142,6 +1067,17 @@ dependencies = [
]
[[package]]
+name = "nix"
+version = "0.27.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053"
+dependencies = [
+ "bitflags 2.4.1",
+ "cfg-if",
+ "libc",
+]
+
+[[package]]
name = "nohash-hasher"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1153,7 +1089,7 @@ version = "6.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6205bd8bb1e454ad2e27422015fb5e4f2bcc7e08fa8f27058670d208324a4d2d"
dependencies = [
- "bitflags 2.3.2",
+ "bitflags 2.4.1",
"crossbeam-channel",
"filetime",
"fsevent-sys",
@@ -1168,12 +1104,11 @@ dependencies = [
[[package]]
name = "nu-ansi-term"
-version = "0.46.0"
+version = "0.49.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
+checksum = "c073d3c1930d0751774acf49e66653acecb416c3a54c6ec095a9b11caddb5a68"
dependencies = [
- "overload",
- "winapi",
+ "windows-sys 0.48.0",
]
[[package]]
@@ -1182,7 +1117,7 @@ version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b"
dependencies = [
- "hermit-abi 0.2.6",
+ "hermit-abi",
"libc",
]
@@ -1217,57 +1152,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575"
[[package]]
-name = "overload"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
-
-[[package]]
-name = "parking_lot"
-version = "0.11.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
-dependencies = [
- "instant",
- "lock_api",
- "parking_lot_core 0.8.6",
-]
-
-[[package]]
name = "parking_lot"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
dependencies = [
"lock_api",
- "parking_lot_core 0.9.6",
+ "parking_lot_core",
]
[[package]]
name = "parking_lot_core"
-version = "0.8.6"
+version = "0.9.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc"
+checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e"
dependencies = [
"cfg-if",
- "instant",
"libc",
- "redox_syscall 0.2.16",
+ "redox_syscall 0.4.1",
"smallvec",
- "winapi",
-]
-
-[[package]]
-name = "parking_lot_core"
-version = "0.9.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ba1ef8814b5c993410bb3adfad7a5ed269563e4a2f90c41f5d85be7fb47133bf"
-dependencies = [
- "cfg-if",
- "libc",
- "redox_syscall 0.2.16",
- "smallvec",
- "windows-sys 0.42.0",
+ "windows-targets",
]
[[package]]
@@ -1277,7 +1181,7 @@ dependencies = [
"drop_bomb",
"expect-test",
"limit",
- "ra-ap-rustc_lexer",
+ "rustc-dependencies",
"sourcegen",
"stdx",
]
@@ -1319,12 +1223,12 @@ dependencies = [
[[package]]
name = "petgraph"
-version = "0.5.1"
+version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "467d164a6de56270bd7c4d070df81d07beace25012d5103ced4e9ff08d6afdb7"
+checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9"
dependencies = [
"fixedbitset",
- "indexmap 1.9.3",
+ "indexmap",
]
[[package]]
@@ -1337,6 +1241,9 @@ checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
name = "proc-macro-api"
version = "0.0.0"
dependencies = [
+ "base-db",
+ "indexmap",
+ "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"memmap2",
"object 0.32.0",
"paths",
@@ -1345,6 +1252,7 @@ dependencies = [
"serde_json",
"snap",
"stdx",
+ "text-size",
"tracing",
"triomphe",
"tt",
@@ -1389,9 +1297,9 @@ version = "0.0.0"
[[package]]
name = "proc-macro2"
-version = "1.0.60"
+version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406"
+checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da"
dependencies = [
"unicode-ident",
]
@@ -1435,9 +1343,9 @@ dependencies = [
[[package]]
name = "protobuf"
-version = "3.1.0"
+version = "3.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4ee4a7d8b91800c8f167a6268d1a1026607368e1adc84e98fe044aeb905302f7"
+checksum = "b55bad9126f378a853655831eb7363b7b01b81d19f8cb1218861086ca4a1a61e"
dependencies = [
"once_cell",
"protobuf-support",
@@ -1446,9 +1354,9 @@ dependencies = [
[[package]]
name = "protobuf-support"
-version = "3.1.0"
+version = "3.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8ca157fe12fc7ee2e315f2f735e27df41b3d97cdd70ea112824dac1ffb08ee1c"
+checksum = "a5d4d7b8601c814cfb36bcebb79f0e61e45e1e93640cf778837833bbed05c372"
dependencies = [
"thiserror",
]
@@ -1483,20 +1391,44 @@ dependencies = [
]
[[package]]
+name = "ra-ap-rustc_abi"
+version = "0.21.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7816f980fab89e878ff2e916e2077d484e3aa1c619a3cc982c8a417c3dfe45fa"
+dependencies = [
+ "bitflags 1.3.2",
+ "ra-ap-rustc_index",
+ "tracing",
+]
+
+[[package]]
name = "ra-ap-rustc_index"
-version = "0.10.0"
+version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "07b5fa61d34da18e148dc3a81f654488ea07f40938d8aefb17f8b64bb78c6120"
+checksum = "8352918d61aa4afab9f2ed7314cf638976b20949b3d61d2f468c975b0d251f24"
dependencies = [
"arrayvec",
+ "ra-ap-rustc_index_macros",
"smallvec",
]
[[package]]
+name = "ra-ap-rustc_index_macros"
+version = "0.21.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "66a9424018828155a3e3596515598f90e68427d8f35eff6df7f0856c73fc58a8"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "synstructure",
+]
+
+[[package]]
name = "ra-ap-rustc_lexer"
-version = "0.10.0"
+version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f2e2f6b48422e4eed5218277ab7cc9733e60dd8f3167f4f36a49a0cafe4dc195"
+checksum = "dc741c7a78103efab416b562e35bd73c8d4967478575010c86c6062f8d3cbf29"
dependencies = [
"unicode-properties",
"unicode-xid",
@@ -1504,9 +1436,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_parse_format"
-version = "0.10.0"
+version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c3c7369ad01cc79f9e3513c9f6a6326f6b980100e4862a7ac71b9991c88108bb"
+checksum = "d557201d71792487bd2bab637ab5be9aa6fff59b88e25e12de180b0f9d2df60f"
dependencies = [
"ra-ap-rustc_index",
"ra-ap-rustc_lexer",
@@ -1514,9 +1446,9 @@ dependencies = [
[[package]]
name = "rayon"
-version = "1.7.0"
+version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b"
+checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1"
dependencies = [
"either",
"rayon-core",
@@ -1524,43 +1456,41 @@ dependencies = [
[[package]]
name = "rayon-core"
-version = "1.11.0"
+version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d"
+checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed"
dependencies = [
- "crossbeam-channel",
"crossbeam-deque",
"crossbeam-utils",
- "num_cpus",
]
[[package]]
name = "redox_syscall"
-version = "0.2.16"
+version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
+checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29"
dependencies = [
"bitflags 1.3.2",
]
[[package]]
name = "redox_syscall"
-version = "0.3.5"
+version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29"
+checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
dependencies = [
"bitflags 1.3.2",
]
[[package]]
name = "rowan"
-version = "0.15.11"
+version = "0.15.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "64449cfef9483a475ed56ae30e2da5ee96448789fb2aa240a04beb6a055078bf"
+checksum = "32a58fa8a7ccff2aec4f39cc45bf5f985cec7125ab271cf681c279fd00192b49"
dependencies = [
"countme",
- "hashbrown 0.12.3",
- "memoffset 0.8.0",
+ "hashbrown",
+ "memoffset",
"rustc-hash",
"text-size",
]
@@ -1584,20 +1514,20 @@ dependencies = [
"ide-ssr",
"itertools",
"load-cargo",
- "lsp-server 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lsp-server 0.7.4",
"lsp-types",
"mbe",
"mimalloc",
- "mio",
"nohash-hasher",
"num_cpus",
"oorandom",
- "parking_lot 0.12.1",
- "parking_lot_core 0.9.6",
+ "parking_lot",
+ "parser",
"proc-macro-api",
"profile",
"project-model",
"rayon",
+ "rustc-dependencies",
"rustc-hash",
"scip",
"serde",
@@ -1615,59 +1545,69 @@ dependencies = [
"triomphe",
"vfs",
"vfs-notify",
+ "walkdir",
"winapi",
"xflags",
"xshell",
]
[[package]]
-name = "rustc-demangle"
-version = "0.1.23"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
-
-[[package]]
-name = "rustc-hash"
-version = "1.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
-
-[[package]]
-name = "ryu"
-version = "1.0.13"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041"
-
-[[package]]
-name = "salsa"
-version = "0.17.0-pre.2"
+name = "rust-analyzer-salsa"
+version = "0.17.0-pre.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9b223dccb46c32753144d0b51290da7230bb4aedcd8379d6b4c9a474c18bf17a"
+checksum = "16c42b8737c320578b441a82daf7cdf8d897468de64e8a774fa54b53a50b6cc0"
dependencies = [
- "crossbeam-utils",
- "indexmap 1.9.3",
+ "indexmap",
"lock_api",
"log",
"oorandom",
- "parking_lot 0.11.2",
+ "parking_lot",
+ "rust-analyzer-salsa-macros",
"rustc-hash",
- "salsa-macros",
"smallvec",
]
[[package]]
-name = "salsa-macros"
-version = "0.17.0-pre.2"
+name = "rust-analyzer-salsa-macros"
+version = "0.17.0-pre.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ac6c2e352df550bf019da7b16164ed2f7fa107c39653d1311d1bba42d1582ff7"
+checksum = "db72b0883f3592ade2be15a10583c75e0b269ec26e1190800fda2e2ce5ae6634"
dependencies = [
"heck",
"proc-macro2",
"quote",
- "syn 1.0.109",
+ "syn",
+]
+
+[[package]]
+name = "rustc-demangle"
+version = "0.1.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
+
+[[package]]
+name = "rustc-dependencies"
+version = "0.0.0"
+dependencies = [
+ "ra-ap-rustc_abi",
+ "ra-ap-rustc_index",
+ "ra-ap-rustc_lexer",
+ "ra-ap-rustc_parse_format",
]
[[package]]
+name = "rustc-hash"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
+
+[[package]]
+name = "ryu"
+version = "1.0.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041"
+
+[[package]]
name = "same-file"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1678,9 +1618,9 @@ dependencies = [
[[package]]
name = "scip"
-version = "0.1.1"
+version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b2bfbb10286f69fad7c78db71004b7839bf957788359fe0c479f029f9849136b"
+checksum = "3e84d21062a3ba08d58870c8c36b0c005b2b2261c6ad1bf7042585427c781883"
dependencies = [
"protobuf",
]
@@ -1708,31 +1648,31 @@ dependencies = [
[[package]]
name = "serde"
-version = "1.0.156"
+version = "1.0.193"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "314b5b092c0ade17c00142951e50ced110ec27cea304b1037c6969246c2469a4"
+checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
-version = "1.0.156"
+version = "1.0.193"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d7e29c4601e36bcec74a223228dce795f4cd3616341a4af93520ca1a837c087d"
+checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3"
dependencies = [
"proc-macro2",
"quote",
- "syn 1.0.109",
+ "syn",
]
[[package]]
name = "serde_json"
-version = "1.0.97"
+version = "1.0.108"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bdf3bf93142acad5821c99197022e170842cdbc1c30482b98750c688c640842a"
+checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b"
dependencies = [
- "indexmap 1.9.3",
+ "indexmap",
"itoa",
"ryu",
"serde",
@@ -1746,7 +1686,7 @@ checksum = "bcec881020c684085e55a25f7fd888954d56609ef363479dc5a1305eb0d40cab"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.18",
+ "syn",
]
[[package]]
@@ -1799,6 +1739,7 @@ dependencies = [
"always-assert",
"backtrace",
"crossbeam-channel",
+ "itertools",
"jod-thread",
"libc",
"miow",
@@ -1807,20 +1748,9 @@ dependencies = [
[[package]]
name = "syn"
-version = "1.0.109"
+version = "2.0.39"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
-dependencies = [
- "proc-macro2",
- "quote",
- "unicode-ident",
-]
-
-[[package]]
-name = "syn"
-version = "2.0.18"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e"
+checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a"
dependencies = [
"proc-macro2",
"quote",
@@ -1835,7 +1765,7 @@ checksum = "285ba80e733fac80aa4270fbcdf83772a79b80aa35c97075320abfee4a915b06"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.18",
+ "syn",
"unicode-xid",
]
@@ -1846,16 +1776,16 @@ dependencies = [
"cov-mark",
"either",
"expect-test",
- "indexmap 2.0.0",
+ "indexmap",
"itertools",
"once_cell",
"parser",
"proc-macro2",
"profile",
"quote",
- "ra-ap-rustc_lexer",
"rayon",
"rowan",
+ "rustc-dependencies",
"rustc-hash",
"smol_str",
"sourcegen",
@@ -1887,9 +1817,9 @@ dependencies = [
[[package]]
name = "text-size"
-version = "1.1.0"
+version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a"
+checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233"
[[package]]
name = "thiserror"
@@ -1908,7 +1838,7 @@ checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.18",
+ "syn",
]
[[package]]
@@ -1992,11 +1922,10 @@ dependencies = [
[[package]]
name = "tracing"
-version = "0.1.37"
+version = "0.1.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8"
+checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
dependencies = [
- "cfg-if",
"pin-project-lite",
"tracing-attributes",
"tracing-core",
@@ -2004,20 +1933,20 @@ dependencies = [
[[package]]
name = "tracing-attributes"
-version = "0.1.26"
+version = "0.1.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab"
+checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.18",
+ "syn",
]
[[package]]
name = "tracing-core"
-version = "0.1.31"
+version = "0.1.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a"
+checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54"
dependencies = [
"once_cell",
"valuable",
@@ -2025,20 +1954,20 @@ dependencies = [
[[package]]
name = "tracing-log"
-version = "0.1.3"
+version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922"
+checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
dependencies = [
- "lazy_static",
"log",
+ "once_cell",
"tracing-core",
]
[[package]]
name = "tracing-subscriber"
-version = "0.3.17"
+version = "0.3.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77"
+checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b"
dependencies = [
"sharded-slab",
"thread_local",
@@ -2048,11 +1977,10 @@ dependencies = [
[[package]]
name = "tracing-tree"
-version = "0.2.3"
+version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4f9742d8df709837409dbb22aa25dd7769c260406f20ff48a2320b80a4a6aed0"
+checksum = "65139ecd2c3f6484c3b99bc01c77afe21e95473630747c7aca525e78b0666675"
dependencies = [
- "atty",
"nu-ansi-term",
"tracing-core",
"tracing-log",
@@ -2061,9 +1989,9 @@ dependencies = [
[[package]]
name = "triomphe"
-version = "0.1.8"
+version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f1ee9bd9239c339d714d657fac840c6d2a4f9c45f4f9ec7b0975113458be78db"
+checksum = "d0c5a71827ac326072b6405552093e2ad2accd25a32fd78d4edc82d98c7f2409"
[[package]]
name = "tt"
@@ -2071,6 +1999,7 @@ version = "0.0.0"
dependencies = [
"smol_str",
"stdx",
+ "text-size",
]
[[package]]
@@ -2122,12 +2051,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7f91c8b21fbbaa18853c3d0801c78f4fc94cdb976699bb03e832e75f7fd22f0"
[[package]]
-name = "unicode-segmentation"
-version = "1.10.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36"
-
-[[package]]
name = "unicode-xid"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2162,7 +2085,7 @@ name = "vfs"
version = "0.0.0"
dependencies = [
"fst",
- "indexmap 2.0.0",
+ "indexmap",
"nohash-hasher",
"paths",
"rustc-hash",
@@ -2375,18 +2298,18 @@ checksum = "f58e7b3ca8977093aae6b87b6a7730216fc4c53a6530bab5c43a783cd810c1a8"
[[package]]
name = "xshell"
-version = "0.2.3"
+version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "962c039b3a7b16cf4e9a4248397c6585c07547412e7d6a6e035389a802dcfe90"
+checksum = "ce2107fe03e558353b4c71ad7626d58ed82efaf56c54134228608893c77023ad"
dependencies = [
"xshell-macros",
]
[[package]]
name = "xshell-macros"
-version = "0.2.3"
+version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1dbabb1cbd15a1d6d12d9ed6b35cc6777d4af87ab3ba155ea37215f20beab80c"
+checksum = "7e2c411759b501fb9501aac2b1b2d287a6e93e5bdcf13c25306b23e1b716dd0e"
[[package]]
name = "xtask"
diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml
index cab88fc18..1213979c3 100644
--- a/src/tools/rust-analyzer/Cargo.toml
+++ b/src/tools/rust-analyzer/Cargo.toml
@@ -42,7 +42,7 @@ debug = 0
# ungrammar = { path = "../ungrammar" }
-# salsa = { path = "../salsa" }
+# rust-analyzer-salsa = { path = "../salsa" }
[workspace.dependencies]
# local crates
@@ -79,33 +79,50 @@ toolchain = { path = "./crates/toolchain", version = "0.0.0" }
tt = { path = "./crates/tt", version = "0.0.0" }
vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
vfs = { path = "./crates/vfs", version = "0.0.0" }
+rustc-dependencies = { path = "./crates/rustc-dependencies", version = "0.0.0" }
# local crates that aren't published to crates.io. These should not have versions.
proc-macro-test = { path = "./crates/proc-macro-test" }
# In-tree crates that are published separately and follow semver. See lib/README.md
-line-index = { version = "0.1.0-pre.1" }
+line-index = { version = "0.1.1" }
la-arena = { version = "0.3.1" }
lsp-server = { version = "0.7.4" }
# non-local crates
+anyhow = "1.0.75"
+bitflags = "2.4.1"
+cargo_metadata = "0.18.1"
+dissimilar = "1.0.7"
+either = "1.9.0"
+hashbrown = { version = "0.14", features = [
+ "inline-more",
+], default-features = false }
+indexmap = "2.1.0"
+itertools = "0.12.0"
+libc = "0.2.150"
+nohash-hasher = "0.2.0"
+rayon = "1.8.0"
+rust-analyzer-salsa = "0.17.0-pre.4"
+rustc-hash = "1.1.0"
+serde = { version = "1.0.192", features = ["derive"] }
+serde_json = "1.0.108"
smallvec = { version = "1.10.0", features = [
"const_new",
"union",
"const_generics",
] }
smol_str = "0.2.0"
-nohash-hasher = "0.2.0"
-text-size = "1.1.0"
-serde = { version = "1.0.156", features = ["derive"] }
-serde_json = "1.0.96"
-triomphe = { version = "0.1.8", default-features = false, features = ["std"] }
-# can't upgrade due to dashmap depending on 0.12.3 currently
-hashbrown = { version = "0.12.3", features = ["inline-more"], default-features = false }
-
-rustc_lexer = { version = "0.10.0", package = "ra-ap-rustc_lexer" }
-rustc_parse_format = { version = "0.10.0", package = "ra-ap-rustc_parse_format", default-features = false }
+text-size = "1.1.1"
+tracing = "0.1.40"
+tracing-tree = "0.3.0"
+tracing-subscriber = { version = "0.3.18", default-features = false, features = [
+ "registry",
+ "fmt",
+ "tracing-log",
+] }
+triomphe = { version = "0.1.10", default-features = false, features = ["std"] }
+xshell = "0.2.5"
-# Upstream broke this for us so we can't update it
-rustc_abi = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_abi", default-features = false }
-rustc_index = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_index", default-features = false }
+# We need to freeze the version of the crate, as the raw-api feature is considered unstable
+dashmap = { version = "=5.5.3", features = ["raw-api"] }
diff --git a/src/tools/rust-analyzer/crates/base-db/Cargo.toml b/src/tools/rust-analyzer/crates/base-db/Cargo.toml
index 171c113a9..393ffe155 100644
--- a/src/tools/rust-analyzer/crates/base-db/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/base-db/Cargo.toml
@@ -12,12 +12,10 @@ rust-version.workspace = true
doctest = false
[dependencies]
-salsa = "0.17.0-pre.2"
-rustc-hash = "1.1.0"
-
-triomphe.workspace = true
-
la-arena.workspace = true
+rust-analyzer-salsa.workspace = true
+rustc-hash.workspace = true
+triomphe.workspace = true
# local deps
cfg.workspace = true
diff --git a/src/tools/rust-analyzer/crates/base-db/src/fixture.rs b/src/tools/rust-analyzer/crates/base-db/src/fixture.rs
index 3f5ccb621..bfdd21555 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/fixture.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/fixture.rs
@@ -8,14 +8,15 @@ use test_utils::{
ESCAPED_CURSOR_MARKER,
};
use triomphe::Arc;
-use tt::token_id::{Leaf, Subtree, TokenTree};
+use tt::{Leaf, Subtree, TokenTree};
use vfs::{file_set::FileSet, VfsPath};
use crate::{
input::{CrateName, CrateOrigin, LangCrateOrigin},
- Change, CrateDisplayName, CrateGraph, CrateId, Dependency, Edition, Env, FileId, FilePosition,
- FileRange, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacros, ReleaseChannel,
- SourceDatabaseExt, SourceRoot, SourceRootId,
+ span::SpanData,
+ Change, CrateDisplayName, CrateGraph, CrateId, Dependency, DependencyKind, Edition, Env,
+ FileId, FilePosition, FileRange, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
+ ProcMacros, ReleaseChannel, SourceDatabaseExt, SourceRoot, SourceRootId,
};
pub const WORKSPACE: SourceRootId = SourceRootId(0);
@@ -134,7 +135,7 @@ impl ChangeFixture {
let mut file_set = FileSet::default();
let mut current_source_root_kind = SourceRootKind::Local;
- let mut file_id = FileId(0);
+ let mut file_id = FileId::from_raw(0);
let mut roots = Vec::new();
let mut file_position = None;
@@ -209,7 +210,7 @@ impl ChangeFixture {
let path = VfsPath::new_virtual_path(meta.path);
file_set.insert(file_id, path);
files.push(file_id);
- file_id.0 += 1;
+ file_id = FileId::from_raw(file_id.index() + 1);
}
if crates.is_empty() {
@@ -237,7 +238,12 @@ impl ChangeFixture {
crate_graph
.add_dep(
from_id,
- Dependency::with_prelude(CrateName::new(&to).unwrap(), to_id, prelude),
+ Dependency::with_prelude(
+ CrateName::new(&to).unwrap(),
+ to_id,
+ prelude,
+ DependencyKind::Normal,
+ ),
)
.unwrap();
}
@@ -249,7 +255,7 @@ impl ChangeFixture {
if let Some(mini_core) = mini_core {
let core_file = file_id;
- file_id.0 += 1;
+ file_id = FileId::from_raw(file_id.index() + 1);
let mut fs = FileSet::default();
fs.insert(core_file, VfsPath::new_virtual_path("/sysroot/core/lib.rs".to_string()));
@@ -275,7 +281,14 @@ impl ChangeFixture {
for krate in all_crates {
crate_graph
- .add_dep(krate, Dependency::new(CrateName::new("core").unwrap(), core_crate))
+ .add_dep(
+ krate,
+ Dependency::new(
+ CrateName::new("core").unwrap(),
+ core_crate,
+ DependencyKind::Normal,
+ ),
+ )
.unwrap();
}
}
@@ -283,7 +296,6 @@ impl ChangeFixture {
let mut proc_macros = ProcMacros::default();
if !proc_macro_names.is_empty() {
let proc_lib_file = file_id;
- file_id.0 += 1;
proc_macro_defs.extend(default_test_proc_macros());
let (proc_macro, source) = filter_test_proc_macros(&proc_macro_names, proc_macro_defs);
@@ -317,7 +329,11 @@ impl ChangeFixture {
crate_graph
.add_dep(
krate,
- Dependency::new(CrateName::new("proc_macros").unwrap(), proc_macros_crate),
+ Dependency::new(
+ CrateName::new("proc_macros").unwrap(),
+ proc_macros_crate,
+ DependencyKind::Normal,
+ ),
)
.unwrap();
}
@@ -523,10 +539,13 @@ struct IdentityProcMacroExpander;
impl ProcMacroExpander for IdentityProcMacroExpander {
fn expand(
&self,
- subtree: &Subtree,
- _: Option<&Subtree>,
+ subtree: &Subtree<SpanData>,
+ _: Option<&Subtree<SpanData>>,
_: &Env,
- ) -> Result<Subtree, ProcMacroExpansionError> {
+ _: SpanData,
+ _: SpanData,
+ _: SpanData,
+ ) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
Ok(subtree.clone())
}
}
@@ -537,10 +556,13 @@ struct AttributeInputReplaceProcMacroExpander;
impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander {
fn expand(
&self,
- _: &Subtree,
- attrs: Option<&Subtree>,
+ _: &Subtree<SpanData>,
+ attrs: Option<&Subtree<SpanData>>,
_: &Env,
- ) -> Result<Subtree, ProcMacroExpansionError> {
+ _: SpanData,
+ _: SpanData,
+ _: SpanData,
+ ) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
attrs
.cloned()
.ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into()))
@@ -552,11 +574,14 @@ struct MirrorProcMacroExpander;
impl ProcMacroExpander for MirrorProcMacroExpander {
fn expand(
&self,
- input: &Subtree,
- _: Option<&Subtree>,
+ input: &Subtree<SpanData>,
+ _: Option<&Subtree<SpanData>>,
_: &Env,
- ) -> Result<Subtree, ProcMacroExpansionError> {
- fn traverse(input: &Subtree) -> Subtree {
+ _: SpanData,
+ _: SpanData,
+ _: SpanData,
+ ) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
+ fn traverse(input: &Subtree<SpanData>) -> Subtree<SpanData> {
let mut token_trees = vec![];
for tt in input.token_trees.iter().rev() {
let tt = match tt {
@@ -579,13 +604,16 @@ struct ShortenProcMacroExpander;
impl ProcMacroExpander for ShortenProcMacroExpander {
fn expand(
&self,
- input: &Subtree,
- _: Option<&Subtree>,
+ input: &Subtree<SpanData>,
+ _: Option<&Subtree<SpanData>>,
_: &Env,
- ) -> Result<Subtree, ProcMacroExpansionError> {
+ _: SpanData,
+ _: SpanData,
+ _: SpanData,
+ ) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
return Ok(traverse(input));
- fn traverse(input: &Subtree) -> Subtree {
+ fn traverse(input: &Subtree<SpanData>) -> Subtree<SpanData> {
let token_trees = input
.token_trees
.iter()
@@ -597,7 +625,7 @@ impl ProcMacroExpander for ShortenProcMacroExpander {
Subtree { delimiter: input.delimiter, token_trees }
}
- fn modify_leaf(leaf: &Leaf) -> Leaf {
+ fn modify_leaf(leaf: &Leaf<SpanData>) -> Leaf<SpanData> {
let mut leaf = leaf.clone();
match &mut leaf {
Leaf::Literal(it) => {
diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs
index b75c7079b..c2472363a 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/input.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs
@@ -13,9 +13,10 @@ use la_arena::{Arena, Idx};
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::SmolStr;
use triomphe::Arc;
-use tt::token_id::Subtree;
use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};
+use crate::span::SpanData;
+
// Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`,
// then the crate for the proc-macro hasn't been build yet as the build data is missing.
pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>;
@@ -155,6 +156,10 @@ impl CrateOrigin {
pub fn is_local(&self) -> bool {
matches!(self, CrateOrigin::Local { .. })
}
+
+ pub fn is_lib(&self) -> bool {
+ matches!(self, CrateOrigin::Library { .. })
+ }
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -238,6 +243,9 @@ impl CrateDisplayName {
}
}
+// FIXME: These should not be defined in here? Why does base db know about proc-macros
+// ProcMacroKind is used in [`fixture`], but that module probably shouldn't be in this crate either.
+
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct ProcMacroId(pub u32);
@@ -251,12 +259,16 @@ pub enum ProcMacroKind {
pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
fn expand(
&self,
- subtree: &Subtree,
- attrs: Option<&Subtree>,
+ subtree: &tt::Subtree<SpanData>,
+ attrs: Option<&tt::Subtree<SpanData>>,
env: &Env,
- ) -> Result<Subtree, ProcMacroExpansionError>;
+ def_site: SpanData,
+ call_site: SpanData,
+ mixed_site: SpanData,
+ ) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError>;
}
+#[derive(Debug)]
pub enum ProcMacroExpansionError {
Panic(String),
/// Things like "proc macro server was killed by OOM".
@@ -318,11 +330,69 @@ pub struct CrateData {
pub dependencies: Vec<Dependency>,
pub origin: CrateOrigin,
pub is_proc_macro: bool,
- // FIXME: These things should not be per crate! These are more per workspace crate graph level things
+ // FIXME: These things should not be per crate! These are more per workspace crate graph level
+ // things. This info does need to be somewhat present though as to prevent deduplication from
+ // happening across different workspaces with different layouts.
pub target_layout: TargetLayoutLoadResult,
pub channel: Option<ReleaseChannel>,
}
+impl CrateData {
+ /// Check if [`other`] is almost equal to [`self`] ignoring `CrateOrigin` value.
+ pub fn eq_ignoring_origin_and_deps(&self, other: &CrateData, ignore_dev_deps: bool) -> bool {
+ // This method has some obscure bits. These are mostly there to be compliant with
+ // some patches. References to the patches are given.
+ if self.root_file_id != other.root_file_id {
+ return false;
+ }
+
+ if self.display_name != other.display_name {
+ return false;
+ }
+
+ if self.is_proc_macro != other.is_proc_macro {
+ return false;
+ }
+
+ if self.edition != other.edition {
+ return false;
+ }
+
+ if self.version != other.version {
+ return false;
+ }
+
+ let mut opts = self.cfg_options.difference(&other.cfg_options);
+ if let Some(it) = opts.next() {
+ // Don't care if rust_analyzer CfgAtom is the only cfg in the difference set of self's and other's cfgs.
+ // https://github.com/rust-lang/rust-analyzer/blob/0840038f02daec6ba3238f05d8caa037d28701a0/crates/project-model/src/workspace.rs#L894
+ if it.to_string() != "rust_analyzer" {
+ return false;
+ }
+
+ if let Some(_) = opts.next() {
+ return false;
+ }
+ }
+
+ if self.env != other.env {
+ return false;
+ }
+
+ let slf_deps = self.dependencies.iter();
+ let other_deps = other.dependencies.iter();
+
+ if ignore_dev_deps {
+ return slf_deps
+ .clone()
+ .filter(|it| it.kind != DependencyKind::Dev)
+ .eq(other_deps.clone().filter(|it| it.kind != DependencyKind::Dev));
+ }
+
+ slf_deps.eq(other_deps)
+ }
+}
+
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Edition {
Edition2015,
@@ -350,26 +420,43 @@ impl Env {
}
}
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum DependencyKind {
+ Normal,
+ Dev,
+ Build,
+}
+
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Dependency {
pub crate_id: CrateId,
pub name: CrateName,
+ kind: DependencyKind,
prelude: bool,
}
impl Dependency {
- pub fn new(name: CrateName, crate_id: CrateId) -> Self {
- Self { name, crate_id, prelude: true }
+ pub fn new(name: CrateName, crate_id: CrateId, kind: DependencyKind) -> Self {
+ Self { name, crate_id, prelude: true, kind }
}
- pub fn with_prelude(name: CrateName, crate_id: CrateId, prelude: bool) -> Self {
- Self { name, crate_id, prelude }
+ pub fn with_prelude(
+ name: CrateName,
+ crate_id: CrateId,
+ prelude: bool,
+ kind: DependencyKind,
+ ) -> Self {
+ Self { name, crate_id, prelude, kind }
}
/// Whether this dependency is to be added to the depending crate's extern prelude.
pub fn is_prelude(&self) -> bool {
self.prelude
}
+
+ pub fn kind(&self) -> DependencyKind {
+ self.kind
+ }
}
impl CrateGraph {
@@ -573,23 +660,46 @@ impl CrateGraph {
pub fn extend(&mut self, mut other: CrateGraph, proc_macros: &mut ProcMacroPaths) {
let topo = other.crates_in_topological_order();
let mut id_map: FxHashMap<CrateId, CrateId> = FxHashMap::default();
-
for topo in topo {
let crate_data = &mut other.arena[topo];
+
crate_data.dependencies.iter_mut().for_each(|dep| dep.crate_id = id_map[&dep.crate_id]);
crate_data.dependencies.sort_by_key(|dep| dep.crate_id);
-
- let res = self.arena.iter().find_map(
- |(id, data)| {
- if data == crate_data {
- Some(id)
- } else {
- None
+ let res = self.arena.iter().find_map(|(id, data)| {
+ match (&data.origin, &crate_data.origin) {
+ (a, b) if a == b => {
+ if data.eq_ignoring_origin_and_deps(&crate_data, false) {
+ return Some((id, false));
+ }
+ }
+ (a @ CrateOrigin::Local { .. }, CrateOrigin::Library { .. })
+ | (a @ CrateOrigin::Library { .. }, CrateOrigin::Local { .. }) => {
+ // If the origins differ, check if the two crates are equal without
+ // considering the dev dependencies, if they are, they most likely are in
+ // different loaded workspaces which may cause issues. We keep the local
+ // version and discard the library one as the local version may have
+ // dev-dependencies that we want to keep resolving. See #15656 for more
+ // information.
+ if data.eq_ignoring_origin_and_deps(&crate_data, true) {
+ return Some((id, if a.is_local() { false } else { true }));
+ }
}
- },
- );
- if let Some(res) = res {
+ (_, _) => return None,
+ }
+
+ None
+ });
+
+ if let Some((res, should_update_lib_to_local)) = res {
id_map.insert(topo, res);
+ if should_update_lib_to_local {
+ assert!(self.arena[res].origin.is_lib());
+ assert!(crate_data.origin.is_local());
+ self.arena[res].origin = crate_data.origin.clone();
+
+ // Move local's dev dependencies into the newly-local-formerly-lib crate.
+ self.arena[res].dependencies = crate_data.dependencies.clone();
+ }
} else {
let id = self.arena.alloc(crate_data.clone());
id_map.insert(topo, id);
@@ -635,9 +745,11 @@ impl CrateGraph {
match (cfg_if, std) {
(Some(cfg_if), Some(std)) => {
self.arena[cfg_if].dependencies.clear();
- self.arena[std]
- .dependencies
- .push(Dependency::new(CrateName::new("cfg_if").unwrap(), cfg_if));
+ self.arena[std].dependencies.push(Dependency::new(
+ CrateName::new("cfg_if").unwrap(),
+ cfg_if,
+ DependencyKind::Normal,
+ ));
true
}
_ => false,
@@ -657,6 +769,8 @@ impl ops::Index<CrateId> for CrateGraph {
}
impl CrateData {
+ /// Add a dependency to `self` without checking if the dependency
+ // is existent among `self.dependencies`.
fn add_dep(&mut self, dep: Dependency) {
self.dependencies.push(dep)
}
@@ -758,7 +872,7 @@ impl fmt::Display for CyclicDependenciesError {
#[cfg(test)]
mod tests {
- use crate::CrateOrigin;
+ use crate::{CrateOrigin, DependencyKind};
use super::{CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId};
@@ -766,7 +880,7 @@ mod tests {
fn detect_cyclic_dependency_indirect() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
- FileId(1u32),
+ FileId::from_raw(1u32),
Edition2018,
None,
None,
@@ -779,7 +893,7 @@ mod tests {
None,
);
let crate2 = graph.add_crate_root(
- FileId(2u32),
+ FileId::from_raw(2u32),
Edition2018,
None,
None,
@@ -792,7 +906,7 @@ mod tests {
None,
);
let crate3 = graph.add_crate_root(
- FileId(3u32),
+ FileId::from_raw(3u32),
Edition2018,
None,
None,
@@ -805,13 +919,22 @@ mod tests {
None,
);
assert!(graph
- .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
+ .add_dep(
+ crate1,
+ Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal)
+ )
.is_ok());
assert!(graph
- .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3))
+ .add_dep(
+ crate2,
+ Dependency::new(CrateName::new("crate3").unwrap(), crate3, DependencyKind::Normal)
+ )
.is_ok());
assert!(graph
- .add_dep(crate3, Dependency::new(CrateName::new("crate1").unwrap(), crate1))
+ .add_dep(
+ crate3,
+ Dependency::new(CrateName::new("crate1").unwrap(), crate1, DependencyKind::Normal)
+ )
.is_err());
}
@@ -819,7 +942,7 @@ mod tests {
fn detect_cyclic_dependency_direct() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
- FileId(1u32),
+ FileId::from_raw(1u32),
Edition2018,
None,
None,
@@ -832,7 +955,7 @@ mod tests {
None,
);
let crate2 = graph.add_crate_root(
- FileId(2u32),
+ FileId::from_raw(2u32),
Edition2018,
None,
None,
@@ -845,10 +968,16 @@ mod tests {
None,
);
assert!(graph
- .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
+ .add_dep(
+ crate1,
+ Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal)
+ )
.is_ok());
assert!(graph
- .add_dep(crate2, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
+ .add_dep(
+ crate2,
+ Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal)
+ )
.is_err());
}
@@ -856,7 +985,7 @@ mod tests {
fn it_works() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
- FileId(1u32),
+ FileId::from_raw(1u32),
Edition2018,
None,
None,
@@ -869,7 +998,7 @@ mod tests {
None,
);
let crate2 = graph.add_crate_root(
- FileId(2u32),
+ FileId::from_raw(2u32),
Edition2018,
None,
None,
@@ -882,7 +1011,7 @@ mod tests {
None,
);
let crate3 = graph.add_crate_root(
- FileId(3u32),
+ FileId::from_raw(3u32),
Edition2018,
None,
None,
@@ -895,10 +1024,16 @@ mod tests {
None,
);
assert!(graph
- .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
+ .add_dep(
+ crate1,
+ Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal)
+ )
.is_ok());
assert!(graph
- .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3))
+ .add_dep(
+ crate2,
+ Dependency::new(CrateName::new("crate3").unwrap(), crate3, DependencyKind::Normal)
+ )
.is_ok());
}
@@ -906,7 +1041,7 @@ mod tests {
fn dashes_are_normalized() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
- FileId(1u32),
+ FileId::from_raw(1u32),
Edition2018,
None,
None,
@@ -919,7 +1054,7 @@ mod tests {
None,
);
let crate2 = graph.add_crate_root(
- FileId(2u32),
+ FileId::from_raw(2u32),
Edition2018,
None,
None,
@@ -934,12 +1069,20 @@ mod tests {
assert!(graph
.add_dep(
crate1,
- Dependency::new(CrateName::normalize_dashes("crate-name-with-dashes"), crate2)
+ Dependency::new(
+ CrateName::normalize_dashes("crate-name-with-dashes"),
+ crate2,
+ DependencyKind::Normal
+ )
)
.is_ok());
assert_eq!(
graph[crate1].dependencies,
- vec![Dependency::new(CrateName::new("crate_name_with_dashes").unwrap(), crate2)]
+ vec![Dependency::new(
+ CrateName::new("crate_name_with_dashes").unwrap(),
+ crate2,
+ DependencyKind::Normal
+ )]
);
}
}
diff --git a/src/tools/rust-analyzer/crates/base-db/src/lib.rs b/src/tools/rust-analyzer/crates/base-db/src/lib.rs
index af204e44e..57e793436 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/lib.rs
@@ -1,10 +1,11 @@
//! base_db defines basic database traits. The concrete DB is defined by ide.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod input;
mod change;
pub mod fixture;
+pub mod span;
use std::panic;
@@ -16,9 +17,9 @@ pub use crate::{
change::Change,
input::{
CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency,
- Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
- ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, ProcMacros,
- ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
+ DependencyKind, Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander,
+ ProcMacroExpansionError, ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths,
+ ProcMacros, ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
},
};
pub use salsa::{self, Cancelled};
@@ -67,20 +68,19 @@ pub trait FileLoader {
/// model. Everything else in rust-analyzer is derived from these queries.
#[salsa::query_group(SourceDatabaseStorage)]
pub trait SourceDatabase: FileLoader + std::fmt::Debug {
- // Parses the file into the syntax tree.
- #[salsa::invoke(parse_query)]
+ /// Parses the file into the syntax tree.
fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>;
/// The crate graph.
#[salsa::input]
fn crate_graph(&self) -> Arc<CrateGraph>;
- /// The crate graph.
+ /// The proc macros.
#[salsa::input]
fn proc_macros(&self) -> Arc<ProcMacros>;
}
-fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
+fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
let _p = profile::span("parse_query").detail(|| format!("{file_id:?}"));
let text = db.file_text(file_id);
SourceFile::parse(&text)
diff --git a/src/tools/rust-analyzer/crates/base-db/src/span.rs b/src/tools/rust-analyzer/crates/base-db/src/span.rs
new file mode 100644
index 000000000..d8990eb7c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/base-db/src/span.rs
@@ -0,0 +1,208 @@
+//! File and span related types.
+// FIXME: This should probably be moved into its own crate.
+use std::fmt;
+
+use salsa::InternId;
+use tt::SyntaxContext;
+use vfs::FileId;
+
+pub type ErasedFileAstId = la_arena::Idx<syntax::SyntaxNodePtr>;
+
+// The first inde is always the root node's AstId
+pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId =
+ la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(0));
+
+pub type SpanData = tt::SpanData<SpanAnchor, SyntaxContextId>;
+
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct SyntaxContextId(InternId);
+
+impl fmt::Debug for SyntaxContextId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ if *self == Self::SELF_REF {
+ f.debug_tuple("SyntaxContextId")
+ .field(&{
+ #[derive(Debug)]
+ #[allow(non_camel_case_types)]
+ struct SELF_REF;
+ SELF_REF
+ })
+ .finish()
+ } else {
+ f.debug_tuple("SyntaxContextId").field(&self.0).finish()
+ }
+ }
+}
+crate::impl_intern_key!(SyntaxContextId);
+
+impl fmt::Display for SyntaxContextId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", self.0.as_u32())
+ }
+}
+
+impl SyntaxContext for SyntaxContextId {
+ const DUMMY: Self = Self::ROOT;
+}
+// inherent trait impls please tyvm
+impl SyntaxContextId {
+ pub const ROOT: Self = SyntaxContextId(unsafe { InternId::new_unchecked(0) });
+ // veykril(HACK): FIXME salsa doesn't allow us fetching the id of the current input to be allocated so
+ // we need a special value that behaves as the current context.
+ pub const SELF_REF: Self =
+ SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) });
+
+ pub fn is_root(self) -> bool {
+ self == Self::ROOT
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+pub struct SpanAnchor {
+ pub file_id: FileId,
+ pub ast_id: ErasedFileAstId,
+}
+
+impl fmt::Debug for SpanAnchor {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("SpanAnchor").field(&self.file_id).field(&self.ast_id.into_raw()).finish()
+ }
+}
+
+impl tt::SpanAnchor for SpanAnchor {
+ const DUMMY: Self = SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID };
+}
+
+/// Input to the analyzer is a set of files, where each file is identified by
+/// `FileId` and contains source code. However, another source of source code in
+/// Rust are macros: each macro can be thought of as producing a "temporary
+/// file". To assign an id to such a file, we use the id of the macro call that
+/// produced the file. So, a `HirFileId` is either a `FileId` (source code
+/// written by user), or a `MacroCallId` (source code produced by macro).
+///
+/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file
+/// containing the call plus the offset of the macro call in the file. Note that
+/// this is a recursive definition! However, the size_of of `HirFileId` is
+/// finite (because everything bottoms out at the real `FileId`) and small
+/// (`MacroCallId` uses the location interning. You can check details here:
+/// <https://en.wikipedia.org/wiki/String_interning>).
+///
+/// The two variants are encoded in a single u32 which are differentiated by the MSB.
+/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a
+/// `MacroCallId`.
+#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct HirFileId(u32);
+
+impl From<HirFileId> for u32 {
+ fn from(value: HirFileId) -> Self {
+ value.0
+ }
+}
+
+impl From<MacroCallId> for HirFileId {
+ fn from(value: MacroCallId) -> Self {
+ value.as_file()
+ }
+}
+
+impl fmt::Debug for HirFileId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.repr().fmt(f)
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct MacroFileId {
+ pub macro_call_id: MacroCallId,
+}
+
+/// `MacroCallId` identifies a particular macro invocation, like
+/// `println!("Hello, {}", world)`.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct MacroCallId(salsa::InternId);
+crate::impl_intern_key!(MacroCallId);
+
+impl MacroCallId {
+ pub fn as_file(self) -> HirFileId {
+ MacroFileId { macro_call_id: self }.into()
+ }
+
+ pub fn as_macro_file(self) -> MacroFileId {
+ MacroFileId { macro_call_id: self }
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Hash)]
+pub enum HirFileIdRepr {
+ FileId(FileId),
+ MacroFile(MacroFileId),
+}
+
+impl fmt::Debug for HirFileIdRepr {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Self::FileId(arg0) => f.debug_tuple("FileId").field(&arg0.index()).finish(),
+ Self::MacroFile(arg0) => {
+ f.debug_tuple("MacroFile").field(&arg0.macro_call_id.0).finish()
+ }
+ }
+ }
+}
+
+impl From<FileId> for HirFileId {
+ fn from(id: FileId) -> Self {
+ _ = Self::ASSERT_MAX_FILE_ID_IS_SAME;
+ assert!(id.index() <= Self::MAX_HIR_FILE_ID, "FileId index {} is too large", id.index());
+ HirFileId(id.index())
+ }
+}
+
+impl From<MacroFileId> for HirFileId {
+ fn from(MacroFileId { macro_call_id: MacroCallId(id) }: MacroFileId) -> Self {
+ _ = Self::ASSERT_MAX_FILE_ID_IS_SAME;
+ let id = id.as_u32();
+ assert!(id <= Self::MAX_HIR_FILE_ID, "MacroCallId index {} is too large", id);
+ HirFileId(id | Self::MACRO_FILE_TAG_MASK)
+ }
+}
+
+impl HirFileId {
+ const ASSERT_MAX_FILE_ID_IS_SAME: () =
+ [()][(Self::MAX_HIR_FILE_ID != FileId::MAX_FILE_ID) as usize];
+
+ const MAX_HIR_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK;
+ const MACRO_FILE_TAG_MASK: u32 = 1 << 31;
+
+ #[inline]
+ pub fn is_macro(self) -> bool {
+ self.0 & Self::MACRO_FILE_TAG_MASK != 0
+ }
+
+ #[inline]
+ pub fn macro_file(self) -> Option<MacroFileId> {
+ match self.0 & Self::MACRO_FILE_TAG_MASK {
+ 0 => None,
+ _ => Some(MacroFileId {
+ macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
+ }),
+ }
+ }
+
+ #[inline]
+ pub fn file_id(self) -> Option<FileId> {
+ match self.0 & Self::MACRO_FILE_TAG_MASK {
+ 0 => Some(FileId::from_raw(self.0)),
+ _ => None,
+ }
+ }
+
+ #[inline]
+ pub fn repr(self) -> HirFileIdRepr {
+ match self.0 & Self::MACRO_FILE_TAG_MASK {
+ 0 => HirFileIdRepr::FileId(FileId::from_raw(self.0)),
+ _ => HirFileIdRepr::MacroFile(MacroFileId {
+ macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
+ }),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/cfg/Cargo.toml b/src/tools/rust-analyzer/crates/cfg/Cargo.toml
index ed3808972..4324584df 100644
--- a/src/tools/rust-analyzer/crates/cfg/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/cfg/Cargo.toml
@@ -23,8 +23,8 @@ oorandom = "11.1.3"
# We depend on both individually instead of using `features = ["derive"]` to microoptimize the
# build graph: if the feature was enabled, syn would be built early on in the graph if `smolstr`
# supports `arbitrary`. This way, we avoid feature unification.
-arbitrary = "1.3.0"
-derive_arbitrary = "1.3.1"
+arbitrary = "1.3.2"
+derive_arbitrary = "1.3.2"
# local deps
mbe.workspace = true
diff --git a/src/tools/rust-analyzer/crates/cfg/src/lib.rs b/src/tools/rust-analyzer/crates/cfg/src/lib.rs
index 0aeb0b050..6b178e7b0 100644
--- a/src/tools/rust-analyzer/crates/cfg/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/cfg/src/lib.rs
@@ -1,6 +1,6 @@
//! cfg defines conditional compiling options, `cfg` attribute parser and evaluator
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod cfg_expr;
mod dnf;
@@ -58,6 +58,13 @@ impl CfgOptions {
self.enabled.insert(CfgAtom::KeyValue { key, value });
}
+ pub fn difference<'a>(
+ &'a self,
+ other: &'a CfgOptions,
+ ) -> impl Iterator<Item = &'a CfgAtom> + 'a {
+ self.enabled.difference(&other.enabled)
+ }
+
pub fn apply_diff(&mut self, diff: CfgDiff) {
for atom in diff.enable {
self.enabled.insert(atom);
diff --git a/src/tools/rust-analyzer/crates/cfg/src/tests.rs b/src/tools/rust-analyzer/crates/cfg/src/tests.rs
index bdc3f854e..c7ac1af93 100644
--- a/src/tools/rust-analyzer/crates/cfg/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/cfg/src/tests.rs
@@ -1,37 +1,31 @@
use arbitrary::{Arbitrary, Unstructured};
use expect_test::{expect, Expect};
-use mbe::syntax_node_to_token_tree;
+use mbe::{syntax_node_to_token_tree, DummyTestSpanMap};
use syntax::{ast, AstNode};
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
fn assert_parse_result(input: &str, expected: CfgExpr) {
- let (tt, _) = {
- let source_file = ast::SourceFile::parse(input).ok().unwrap();
- let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
- syntax_node_to_token_tree(tt.syntax())
- };
+ let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
assert_eq!(cfg, expected);
}
fn check_dnf(input: &str, expect: Expect) {
- let (tt, _) = {
- let source_file = ast::SourceFile::parse(input).ok().unwrap();
- let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
- syntax_node_to_token_tree(tt.syntax())
- };
+ let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
expect.assert_eq(&actual);
}
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
- let (tt, _) = {
- let source_file = ast::SourceFile::parse(input).ok().unwrap();
- let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
- syntax_node_to_token_tree(tt.syntax())
- };
+ let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg);
let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
@@ -40,11 +34,9 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
#[track_caller]
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
- let (tt, _) = {
- let source_file = ast::SourceFile::parse(input).ok().unwrap();
- let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
- syntax_node_to_token_tree(tt.syntax())
- };
+ let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg);
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();
diff --git a/src/tools/rust-analyzer/crates/flycheck/Cargo.toml b/src/tools/rust-analyzer/crates/flycheck/Cargo.toml
index e7f7adc78..4322d2d96 100644
--- a/src/tools/rust-analyzer/crates/flycheck/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/flycheck/Cargo.toml
@@ -12,9 +12,9 @@ rust-version.workspace = true
doctest = false
[dependencies]
+cargo_metadata.workspace = true
crossbeam-channel = "0.5.8"
-tracing = "0.1.37"
-cargo_metadata = "0.15.4"
+tracing.workspace = true
rustc-hash = "1.1.0"
serde_json.workspace = true
serde.workspace = true
diff --git a/src/tools/rust-analyzer/crates/flycheck/src/lib.rs b/src/tools/rust-analyzer/crates/flycheck/src/lib.rs
index 2de719af9..68faca51e 100644
--- a/src/tools/rust-analyzer/crates/flycheck/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/flycheck/src/lib.rs
@@ -2,7 +2,7 @@
//! another compatible command (f.x. clippy) in a background thread and provide
//! LSP diagnostics based on the output of the command.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
use std::{
ffi::OsString,
@@ -50,6 +50,7 @@ pub enum FlycheckConfig {
extra_args: Vec<String>,
extra_env: FxHashMap<String, String>,
ansi_color_output: bool,
+ target_dir: Option<PathBuf>,
},
CustomCommand {
command: String,
@@ -308,6 +309,7 @@ impl FlycheckActor {
features,
extra_env,
ansi_color_output,
+ target_dir,
} => {
let mut cmd = Command::new(toolchain::cargo());
cmd.arg(command);
@@ -340,6 +342,9 @@ impl FlycheckActor {
cmd.arg(features.join(" "));
}
}
+ if let Some(target_dir) = target_dir {
+ cmd.arg("--target-dir").arg(target_dir);
+ }
cmd.envs(extra_env);
(cmd, extra_args)
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
index 8cf61ee04..2d1745176 100644
--- a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
@@ -12,29 +12,24 @@ rust-version.workspace = true
doctest = false
[dependencies]
-anymap = "1.0.0-beta.2"
arrayvec = "0.7.2"
-bitflags = "2.1.0"
+bitflags.workspace = true
cov-mark = "2.0.0-pre.1"
-# We need to freeze the version of the crate, as the raw-api feature is considered unstable
-dashmap = { version = "=5.4.0", features = ["raw-api"] }
+dashmap.workspace = true
drop_bomb = "0.1.5"
-either = "1.7.0"
+either.workspace = true
fst = { version = "0.4.7", default-features = false }
-indexmap = "2.0.0"
-itertools = "0.10.5"
+indexmap.workspace = true
+itertools.workspace = true
la-arena.workspace = true
once_cell = "1.17.0"
rustc-hash = "1.1.0"
-tracing = "0.1.35"
+tracing.workspace = true
smallvec.workspace = true
hashbrown.workspace = true
triomphe.workspace = true
-rustc_abi.workspace = true
-rustc_index.workspace = true
-rustc_parse_format.workspace = true
-
+rustc-dependencies.workspace = true
# local deps
stdx.workspace = true
@@ -48,8 +43,12 @@ cfg.workspace = true
tt.workspace = true
limit.workspace = true
+
[dev-dependencies]
expect-test = "1.4.0"
# local deps
test-utils.workspace = true
+
+[features]
+in-rust-tree = ["rustc-dependencies/in-rust-tree"]
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
index c6454eb9e..942b28fc1 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
@@ -215,6 +215,10 @@ impl Attrs {
self.doc_exprs().flat_map(|doc_expr| doc_expr.aliases().to_vec())
}
+ pub fn export_name(&self) -> Option<&SmolStr> {
+ self.by_key("export_name").string_value()
+ }
+
pub fn is_proc_macro(&self) -> bool {
self.by_key("proc_macro").exists()
}
@@ -417,6 +421,7 @@ impl AttrsWithOwner {
RawAttrs::from_attrs_owner(
db.upcast(),
src.with_value(&src.value[it.local_id()]),
+ db.span_map(src.file_id).as_ref(),
)
}
GenericParamId::TypeParamId(it) => {
@@ -424,11 +429,16 @@ impl AttrsWithOwner {
RawAttrs::from_attrs_owner(
db.upcast(),
src.with_value(&src.value[it.local_id()]),
+ db.span_map(src.file_id).as_ref(),
)
}
GenericParamId::LifetimeParamId(it) => {
let src = it.parent.child_source(db);
- RawAttrs::from_attrs_owner(db.upcast(), src.with_value(&src.value[it.local_id]))
+ RawAttrs::from_attrs_owner(
+ db.upcast(),
+ src.with_value(&src.value[it.local_id]),
+ db.span_map(src.file_id).as_ref(),
+ )
}
},
AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr/builtin.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr/builtin.rs
index 2ae3cd2a9..48a596f7f 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/attr/builtin.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/attr/builtin.rs
@@ -2,7 +2,7 @@
//!
//! The actual definitions were copied from rustc's `compiler/rustc_feature/src/builtin_attrs.rs`.
//!
-//! It was last synchronized with upstream commit e29821ff85a2a3000d226f99f62f89464028d5d6.
+//! It was last synchronized with upstream commit c3def263a44e07e09ae6d57abfc8650227fb4972.
//!
//! The macros were adjusted to only expand to the attribute name, since that is all we need to do
//! name resolution, and `BUILTIN_ATTRIBUTES` is almost entirely unchanged from the original, to
@@ -240,7 +240,7 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
template!(List: "address, kcfi, memory, thread"), DuplicatesOk,
experimental!(no_sanitize)
),
- gated!(coverage, Normal, template!(Word, List: "on|off"), WarnFollowing, experimental!(coverage)),
+ gated!(coverage, Normal, template!(Word, List: "on|off"), WarnFollowing, coverage_attribute, experimental!(coverage)),
ungated!(
doc, Normal, template!(List: "hidden|inline|...", NameValueStr: "string"), DuplicatesOk
@@ -364,7 +364,6 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
allow_internal_unsafe, Normal, template!(Word), WarnFollowing,
"allow_internal_unsafe side-steps the unsafe_code lint",
),
- ungated!(rustc_safe_intrinsic, Normal, template!(Word), DuplicatesOk),
rustc_attr!(rustc_allowed_through_unstable_modules, Normal, template!(Word), WarnFollowing,
"rustc_allowed_through_unstable_modules special cases accidental stabilizations of stable items \
through unstable paths"),
@@ -453,6 +452,12 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
ErrorFollowing,
INTERNAL_UNSTABLE
),
+ rustc_attr!(
+ rustc_confusables, Normal,
+ template!(List: r#""name1", "name2", ..."#),
+ ErrorFollowing,
+ INTERNAL_UNSTABLE,
+ ),
// Enumerates "identity-like" conversion methods to suggest on type mismatch.
rustc_attr!(
rustc_conversion_suggestion, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE
@@ -488,6 +493,10 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
rustc_attr!(
rustc_do_not_const_check, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE
),
+ // Ensure the argument to this function is &&str during const-check.
+ rustc_attr!(
+ rustc_const_panic_str, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE
+ ),
// ==========================================================================
// Internal attributes, Layout related:
@@ -521,6 +530,10 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
"#[rustc_pass_by_value] is used to mark types that must be passed by value instead of reference."
),
rustc_attr!(
+ rustc_never_returns_null_ptr, Normal, template!(Word), ErrorFollowing,
+ "#[rustc_never_returns_null_ptr] is used to mark functions returning non-null pointers."
+ ),
+ rustc_attr!(
rustc_coherence_is_core, AttributeType::CrateLevel, template!(Word), ErrorFollowing, @only_local: true,
"#![rustc_coherence_is_core] allows inherent methods on builtin types, only intended to be used in `core`."
),
@@ -533,7 +546,11 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
"#[rustc_allow_incoherent_impl] has to be added to all impl items of an incoherent inherent impl."
),
rustc_attr!(
- rustc_deny_explicit_impl, AttributeType::Normal, template!(Word), ErrorFollowing, @only_local: false,
+ rustc_deny_explicit_impl,
+ AttributeType::Normal,
+ template!(List: "implement_via_object = (true|false)"),
+ ErrorFollowing,
+ @only_local: true,
"#[rustc_deny_explicit_impl] enforces that a trait can have no user-provided impls"
),
rustc_attr!(
@@ -614,6 +631,10 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
rustc_doc_primitive, Normal, template!(NameValueStr: "primitive name"), ErrorFollowing,
r#"`rustc_doc_primitive` is a rustc internal attribute"#,
),
+ rustc_attr!(
+ rustc_safe_intrinsic, Normal, template!(Word), WarnFollowing,
+ "the `#[rustc_safe_intrinsic]` attribute is used internally to mark intrinsics as safe"
+ ),
// ==========================================================================
// Internal attributes, Testing:
@@ -625,13 +646,16 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
rustc_attr!(TEST, rustc_insignificant_dtor, Normal, template!(Word), WarnFollowing),
rustc_attr!(TEST, rustc_strict_coherence, Normal, template!(Word), WarnFollowing),
rustc_attr!(TEST, rustc_variance, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_variance_of_opaques, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_hidden_type_of_opaques, Normal, template!(Word), WarnFollowing),
rustc_attr!(TEST, rustc_layout, Normal, template!(List: "field1, field2, ..."), WarnFollowing),
+ rustc_attr!(TEST, rustc_abi, Normal, template!(List: "field1, field2, ..."), WarnFollowing),
rustc_attr!(TEST, rustc_regions, Normal, template!(Word), WarnFollowing),
rustc_attr!(
TEST, rustc_error, Normal,
- template!(Word, List: "delay_span_bug_from_inside_query"), WarnFollowingWordOnly
+ template!(Word, List: "span_delayed_bug_from_inside_query"), WarnFollowingWordOnly
),
- rustc_attr!(TEST, rustc_dump_user_substs, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_dump_user_args, Normal, template!(Word), WarnFollowing),
rustc_attr!(TEST, rustc_evaluate_where_clauses, Normal, template!(Word), WarnFollowing),
rustc_attr!(
TEST, rustc_if_this_changed, Normal, template!(Word, List: "DepNode"), DuplicatesOk
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr/tests.rs
index e4c8d446a..0f98a4ec9 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/attr/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/attr/tests.rs
@@ -1,17 +1,20 @@
//! This module contains tests for doc-expression parsing.
//! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.
+use base_db::FileId;
+use hir_expand::span::{RealSpanMap, SpanMapRef};
use mbe::syntax_node_to_token_tree;
use syntax::{ast, AstNode};
use crate::attr::{DocAtom, DocExpr};
fn assert_parse_result(input: &str, expected: DocExpr) {
- let (tt, _) = {
- let source_file = ast::SourceFile::parse(input).ok().unwrap();
- let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
- syntax_node_to_token_tree(tt.syntax())
- };
+ let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ let tt = syntax_node_to_token_tree(
+ tt.syntax(),
+ SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId::from_raw(0))),
+ );
let cfg = DocExpr::parse(&tt);
assert_eq!(cfg, expected);
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/body.rs
index c0baf6011..db28c6731 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body.rs
@@ -57,7 +57,7 @@ pub struct Body {
pub type ExprPtr = AstPtr<ast::Expr>;
pub type ExprSource = InFile<ExprPtr>;
-pub type PatPtr = Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>;
+pub type PatPtr = AstPtr<Either<ast::Pat, ast::SelfParam>>;
pub type PatSource = InFile<PatPtr>;
pub type LabelPtr = AstPtr<ast::Label>;
@@ -95,6 +95,8 @@ pub struct BodySourceMap {
field_map_back: FxHashMap<ExprId, FieldSource>,
pat_field_map_back: FxHashMap<PatId, PatFieldSource>,
+ format_args_template_map: FxHashMap<ExprId, Vec<(syntax::TextRange, Name)>>,
+
expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, HirFileId>,
/// Diagnostics accumulated during body lowering. These contain `AstPtr`s and so are stored in
@@ -356,12 +358,12 @@ impl BodySourceMap {
}
pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option<PatId> {
- let src = node.map(|it| Either::Left(AstPtr::new(it)));
+ let src = node.map(|it| AstPtr::new(it).wrap_left());
self.pat_map.get(&src).cloned()
}
pub fn node_self_param(&self, node: InFile<&ast::SelfParam>) -> Option<PatId> {
- let src = node.map(|it| Either::Right(AstPtr::new(it)));
+ let src = node.map(|it| AstPtr::new(it).wrap_right());
self.pat_map.get(&src).cloned()
}
@@ -387,6 +389,14 @@ impl BodySourceMap {
self.expr_map.get(&src).copied()
}
+ pub fn implicit_format_args(
+ &self,
+ node: InFile<&ast::FormatArgsExpr>,
+ ) -> Option<&[(syntax::TextRange, Name)]> {
+ let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>);
+ self.format_args_template_map.get(self.expr_map.get(&src)?).map(std::ops::Deref::deref)
+ }
+
/// Get a reference to the body source map's diagnostics.
pub fn diagnostics(&self) -> &[BodyDiagnostic] {
&self.diagnostics
@@ -403,8 +413,10 @@ impl BodySourceMap {
field_map_back,
pat_field_map_back,
expansions,
+ format_args_template_map,
diagnostics,
} = self;
+ format_args_template_map.shrink_to_fit();
expr_map.shrink_to_fit();
expr_map_back.shrink_to_fit();
pat_map.shrink_to_fit();
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
index cc02df80a..c6a909320 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
@@ -196,16 +196,12 @@ impl ExprCollector<'_> {
if let Some(self_param) =
param_list.self_param().filter(|_| attr_enabled.next().unwrap_or(false))
{
- let ptr = AstPtr::new(&self_param);
- let binding_id: la_arena::Idx<Binding> = self.alloc_binding(
- name![self],
- BindingAnnotation::new(
- self_param.mut_token().is_some() && self_param.amp_token().is_none(),
- false,
- ),
- );
- let param_pat =
- self.alloc_pat(Pat::Bind { id: binding_id, subpat: None }, Either::Right(ptr));
+ let is_mutable =
+ self_param.mut_token().is_some() && self_param.amp_token().is_none();
+ let ptr = AstPtr::new(&Either::Right(self_param));
+ let binding_id: la_arena::Idx<Binding> =
+ self.alloc_binding(name![self], BindingAnnotation::new(is_mutable, false));
+ let param_pat = self.alloc_pat(Pat::Bind { id: binding_id, subpat: None }, ptr);
self.add_definition_to_binding(binding_id, param_pat);
self.body.params.push(param_pat);
}
@@ -1029,7 +1025,7 @@ impl ExprCollector<'_> {
let id = collector(self, Some(expansion.tree()));
self.ast_id_map = prev_ast_id_map;
- self.expander.exit(self.db, mark);
+ self.expander.exit(mark);
id
}
None => collector(self, None),
@@ -1260,8 +1256,8 @@ impl ExprCollector<'_> {
(Some(id), Pat::Bind { id, subpat })
};
- let ptr = AstPtr::new(&pat);
- let pat = self.alloc_pat(pattern, Either::Left(ptr));
+ let ptr = AstPtr::new(&Either::Left(pat));
+ let pat = self.alloc_pat(pattern, ptr);
if let Some(binding_id) = binding {
self.add_definition_to_binding(binding_id, pat);
}
@@ -1395,7 +1391,7 @@ impl ExprCollector<'_> {
ast::Pat::MacroPat(mac) => match mac.macro_call() {
Some(call) => {
let macro_ptr = AstPtr::new(&call);
- let src = self.expander.to_source(Either::Left(AstPtr::new(&pat)));
+ let src = self.expander.to_source(AstPtr::new(&Either::Left(pat)));
let pat =
self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| {
this.collect_pat_opt(expanded_pat, binding_list)
@@ -1430,8 +1426,8 @@ impl ExprCollector<'_> {
Pat::Range { start, end }
}
};
- let ptr = AstPtr::new(&pat);
- self.alloc_pat(pattern, Either::Left(ptr))
+ let ptr = AstPtr::new(&Either::Left(pat));
+ self.alloc_pat(pattern, ptr)
}
fn collect_pat_opt(&mut self, pat: Option<ast::Pat>, binding_list: &mut BindingList) -> PatId {
@@ -1601,13 +1597,25 @@ impl ExprCollector<'_> {
});
let template = f.template();
let fmt_snippet = template.as_ref().map(ToString::to_string);
+ let mut mappings = vec![];
let fmt = match template.and_then(|it| self.expand_macros_to_string(it)) {
- Some((s, is_direct_literal)) => {
- format_args::parse(&s, fmt_snippet, args, is_direct_literal, |name| {
- self.alloc_expr_desugared(Expr::Path(Path::from(name)))
- })
- }
- None => FormatArgs { template: Default::default(), arguments: args.finish() },
+ Some((s, is_direct_literal)) => format_args::parse(
+ &s,
+ fmt_snippet,
+ args,
+ is_direct_literal,
+ |name| self.alloc_expr_desugared(Expr::Path(Path::from(name))),
+ |name, span| {
+ if let Some(span) = span {
+ mappings.push((span, name.clone()))
+ }
+ },
+ ),
+ None => FormatArgs {
+ template: Default::default(),
+ arguments: args.finish(),
+ orphans: Default::default(),
+ },
};
// Create a list of all _unique_ (argument, format trait) combinations.
@@ -1746,18 +1754,26 @@ impl ExprCollector<'_> {
});
let unsafe_arg_new = self.alloc_expr_desugared(Expr::Unsafe {
id: None,
- statements: Box::default(),
+ // We collect the unused expressions here so that we still infer them instead of
+ // dropping them out of the expression tree
+ statements: fmt
+ .orphans
+ .into_iter()
+ .map(|expr| Statement::Expr { expr, has_semi: true })
+ .collect(),
tail: Some(unsafe_arg_new),
});
- self.alloc_expr(
+ let idx = self.alloc_expr(
Expr::Call {
callee: new_v1_formatted,
args: Box::new([lit_pieces, args, format_options, unsafe_arg_new]),
is_assignee_expr: false,
},
syntax_ptr,
- )
+ );
+ self.source_map.format_args_template_map.insert(idx, mappings);
+ idx
}
/// Generate a hir expression for a format_args placeholder specification.
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs
index fad4d7a4d..6ecf1c20d 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs
@@ -54,7 +54,7 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo
let mut p = Printer { db, body, buf: header, indent_level: 0, needs_indent: false };
if let DefWithBodyId::FunctionId(it) = owner {
p.buf.push('(');
- body.params.iter().zip(&db.function_data(it).params).for_each(|(&param, ty)| {
+ body.params.iter().zip(db.function_data(it).params.iter()).for_each(|(&param, ty)| {
p.print_pat(param);
p.buf.push(':');
p.print_type_ref(ty);
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs
index 2a90a09f2..baca293e2 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs
@@ -1,7 +1,6 @@
//! Name resolution for expressions.
use hir_expand::name::Name;
-use la_arena::{Arena, Idx, IdxRange, RawIdx};
-use rustc_hash::FxHashMap;
+use la_arena::{Arena, ArenaMap, Idx, IdxRange, RawIdx};
use triomphe::Arc;
use crate::{
@@ -17,7 +16,7 @@ pub type ScopeId = Idx<ScopeData>;
pub struct ExprScopes {
scopes: Arena<ScopeData>,
scope_entries: Arena<ScopeEntry>,
- scope_by_expr: FxHashMap<ExprId, ScopeId>,
+ scope_by_expr: ArenaMap<ExprId, ScopeId>,
}
#[derive(Debug, PartialEq, Eq)]
@@ -77,10 +76,10 @@ impl ExprScopes {
}
pub fn scope_for(&self, expr: ExprId) -> Option<ScopeId> {
- self.scope_by_expr.get(&expr).copied()
+ self.scope_by_expr.get(expr).copied()
}
- pub fn scope_by_expr(&self) -> &FxHashMap<ExprId, ScopeId> {
+ pub fn scope_by_expr(&self) -> &ArenaMap<ExprId, ScopeId> {
&self.scope_by_expr
}
}
@@ -94,7 +93,7 @@ impl ExprScopes {
let mut scopes = ExprScopes {
scopes: Arena::default(),
scope_entries: Arena::default(),
- scope_by_expr: FxHashMap::default(),
+ scope_by_expr: ArenaMap::with_capacity(body.exprs.len()),
};
let mut root = scopes.root_scope();
scopes.add_params_bindings(body, root, &body.params);
@@ -476,10 +475,7 @@ fn foo() {
.pat_syntax(*body.bindings[resolved.binding()].definitions.first().unwrap())
.unwrap();
- let local_name = pat_src.value.either(
- |it| it.syntax_node_ptr().to_node(file.syntax()),
- |it| it.syntax_node_ptr().to_node(file.syntax()),
- );
+ let local_name = pat_src.value.syntax_node_ptr().to_node(file.syntax());
assert_eq!(local_name.text_range(), expected_name.syntax().text_range());
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs
index 1658757d2..2b432dfbb 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs
@@ -143,7 +143,6 @@ mod m {
#[test]
fn desugar_builtin_format_args() {
- // Regression test for a path resolution bug introduced with inner item handling.
let (db, body, def) = lower(
r#"
//- minicore: fmt
@@ -161,7 +160,7 @@ fn main() {
let count = 10;
builtin#lang(Arguments::new_v1_formatted)(
&[
- "\"hello ", " ", " friends, we ", " ", "", "\"",
+ "hello ", " ", " friends, we ", " ", "",
],
&[
builtin#lang(Argument::new_display)(
@@ -221,3 +220,115 @@ fn main() {
}"#]]
.assert_eq(&body.pretty_print(&db, def))
}
+
+#[test]
+fn test_macro_hygiene() {
+ let (db, body, def) = lower(
+ r##"
+//- minicore: fmt, from
+//- /main.rs
+mod error;
+
+use crate::error::error;
+
+fn main() {
+ // _ = forces body expansion instead of block def map expansion
+ _ = error!("Failed to resolve path `{}`", node.text());
+}
+//- /error.rs
+macro_rules! _error {
+ ($fmt:expr, $($arg:tt)+) => {$crate::error::intermediate!(format_args!($fmt, $($arg)+))}
+}
+pub(crate) use _error as error;
+macro_rules! _intermediate {
+ ($arg:expr) => {$crate::error::SsrError::new($arg)}
+}
+pub(crate) use _intermediate as intermediate;
+
+pub struct SsrError(pub(crate) core::fmt::Arguments);
+
+impl SsrError {
+ pub(crate) fn new(message: impl Into<core::fmt::Arguments>) -> SsrError {
+ SsrError(message.into())
+ }
+}
+"##,
+ );
+
+ assert_eq!(db.body_with_source_map(def.into()).1.diagnostics(), &[]);
+ expect![[r#"
+ fn main() {
+ _ = $crate::error::SsrError::new(
+ builtin#lang(Arguments::new_v1_formatted)(
+ &[
+ "Failed to resolve path `", "`",
+ ],
+ &[
+ builtin#lang(Argument::new_display)(
+ &node.text(),
+ ),
+ ],
+ &[
+ builtin#lang(Placeholder::new)(
+ 0usize,
+ ' ',
+ builtin#lang(Alignment::Unknown),
+ 0u32,
+ builtin#lang(Count::Implied),
+ builtin#lang(Count::Implied),
+ ),
+ ],
+ unsafe {
+ builtin#lang(UnsafeArg::new)()
+ },
+ ),
+ );
+ }"#]]
+ .assert_eq(&body.pretty_print(&db, def))
+}
+
+#[test]
+fn regression_10300() {
+ let (db, body, def) = lower(
+ r#"
+//- minicore: concat, panic
+mod private {
+ pub use core::concat;
+}
+
+macro_rules! m {
+ () => {
+ panic!(concat!($crate::private::concat!("cc")));
+ };
+}
+
+fn f() {
+ m!();
+}
+"#,
+ );
+
+ let (_, source_map) = db.body_with_source_map(def.into());
+ assert_eq!(source_map.diagnostics(), &[]);
+
+ for (_, def_map) in body.blocks(&db) {
+ assert_eq!(def_map.diagnostics(), &[]);
+ }
+
+ expect![[r#"
+ fn f() {
+ $crate::panicking::panic_fmt(
+ builtin#lang(Arguments::new_v1_formatted)(
+ &[
+ "cc",
+ ],
+ &[],
+ &[],
+ unsafe {
+ builtin#lang(UnsafeArg::new)()
+ },
+ ),
+ );
+ }"#]]
+ .assert_eq(&body.pretty_print(&db, def))
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs b/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs
index 4cfd318a4..c82d2347d 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs
@@ -5,8 +5,7 @@
//! node for a *child*, and get its hir.
use either::Either;
-use hir_expand::HirFileId;
-use syntax::ast::HasDocComments;
+use hir_expand::{attrs::collect_attrs, HirFileId};
use crate::{
db::DefDatabase,
@@ -118,8 +117,8 @@ impl ChildBySource for ItemScope {
|(ast_id, calls)| {
let adt = ast_id.to_node(db.upcast());
calls.for_each(|(attr_id, call_id, calls)| {
- if let Some(Either::Left(attr)) =
- adt.doc_comments_and_attrs().nth(attr_id.ast_index())
+ if let Some((_, Either::Left(attr))) =
+ collect_attrs(&adt).nth(attr_id.ast_index())
{
res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, call_id, calls.into()));
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data.rs b/src/tools/rust-analyzer/crates/hir-def/src/data.rs
index 68defa385..635d13f24 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/data.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/data.rs
@@ -15,9 +15,7 @@ use crate::{
attr::Attrs,
db::DefDatabase,
expander::{Expander, Mark},
- item_tree::{
- self, AssocItem, FnFlags, ItemTree, ItemTreeId, MacroCall, ModItem, Param, TreeId,
- },
+ item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, MacroCall, ModItem, TreeId},
macro_call_as_call_id, macro_id_to_def_id,
nameres::{
attr_resolution::ResolvedAttr,
@@ -36,7 +34,7 @@ use crate::{
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct FunctionData {
pub name: Name,
- pub params: Vec<Interned<TypeRef>>,
+ pub params: Box<[Interned<TypeRef>]>,
pub ret_type: Interned<TypeRef>,
pub attrs: Attrs,
pub visibility: RawVisibility,
@@ -69,7 +67,7 @@ impl FunctionData {
let is_varargs = enabled_params
.clone()
.next_back()
- .map_or(false, |param| matches!(item_tree[param], Param::Varargs));
+ .map_or(false, |param| item_tree[param].type_ref.is_none());
let mut flags = func.flags;
if is_varargs {
@@ -105,10 +103,7 @@ impl FunctionData {
name: func.name.clone(),
params: enabled_params
.clone()
- .filter_map(|id| match &item_tree[id] {
- Param::Normal(ty) => Some(ty.clone()),
- Param::Varargs => None,
- })
+ .filter_map(|id| item_tree[id].type_ref.clone())
.collect(),
ret_type: func.ret_type.clone(),
attrs: item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()),
@@ -182,7 +177,7 @@ pub struct TypeAliasData {
pub rustc_has_incoherent_inherent_impls: bool,
pub rustc_allow_incoherent_impl: bool,
/// Bounds restricting the type alias itself (eg. `type Ty: Bound;` in a trait or impl).
- pub bounds: Vec<Interned<TypeBound>>,
+ pub bounds: Box<[Interned<TypeBound>]>,
}
impl TypeAliasData {
@@ -215,7 +210,7 @@ impl TypeAliasData {
is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)),
rustc_has_incoherent_inherent_impls,
rustc_allow_incoherent_impl,
- bounds: typ.bounds.to_vec(),
+ bounds: typ.bounds.clone(),
})
}
}
@@ -332,6 +327,7 @@ pub struct ImplData {
pub self_ty: Interned<TypeRef>,
pub items: Vec<AssocItemId>,
pub is_negative: bool,
+ pub is_unsafe: bool,
// box it as the vec is usually empty anyways
pub attribute_calls: Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
}
@@ -353,6 +349,7 @@ impl ImplData {
let target_trait = impl_def.target_trait.clone();
let self_ty = impl_def.self_ty.clone();
let is_negative = impl_def.is_negative;
+ let is_unsafe = impl_def.is_unsafe;
let mut collector =
AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::ImplId(id));
@@ -362,7 +359,14 @@ impl ImplData {
let items = items.into_iter().map(|(_, item)| item).collect();
(
- Arc::new(ImplData { target_trait, self_ty, items, is_negative, attribute_calls }),
+ Arc::new(ImplData {
+ target_trait,
+ self_ty,
+ items,
+ is_negative,
+ is_unsafe,
+ attribute_calls,
+ }),
diagnostics.into(),
)
}
@@ -659,7 +663,7 @@ impl<'a> AssocItemCollector<'a> {
self.module_id.local_id,
MacroCallKind::Attr {
ast_id,
- attr_args: Arc::new((tt::Subtree::empty(), Default::default())),
+ attr_args: None,
invoc_attr_index: attr.id,
},
attr.path().clone(),
@@ -702,7 +706,7 @@ impl<'a> AssocItemCollector<'a> {
}
AssocItem::MacroCall(call) => {
let file_id = self.expander.current_file_id();
- let MacroCall { ast_id, expand_to, ref path } = item_tree[call];
+ let MacroCall { ast_id, expand_to, call_site, ref path } = item_tree[call];
let module = self.expander.module.local_id;
let resolver = |path| {
@@ -721,6 +725,7 @@ impl<'a> AssocItemCollector<'a> {
match macro_call_as_call_id(
self.db.upcast(),
&AstIdWithPath::new(file_id, ast_id, Clone::clone(path)),
+ call_site,
expand_to,
self.expander.module.krate(),
resolver,
@@ -789,7 +794,7 @@ impl<'a> AssocItemCollector<'a> {
self.collect(&item_tree, tree_id, &iter);
- self.expander.exit(self.db, mark);
+ self.expander.exit(mark);
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs b/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs
index 224f7328f..b163112db 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs
@@ -11,7 +11,7 @@ use hir_expand::{
};
use intern::Interned;
use la_arena::{Arena, ArenaMap};
-use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions};
+use rustc_dependencies::abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions};
use syntax::ast::{self, HasName, HasVisibility};
use triomphe::Arc;
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs
index 63138aa6a..a59bbf7e2 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs
@@ -29,8 +29,8 @@ use std::{
ops::{Index, IndexMut},
};
-use anymap::Map;
use rustc_hash::FxHashMap;
+use stdx::anymap::Map;
pub struct Key<K, V, P = (K, V)> {
_phantom: PhantomData<(K, V, P)>,
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs b/src/tools/rust-analyzer/crates/hir-def/src/expander.rs
index 6db8398bc..398f116d8 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expander.rs
@@ -4,21 +4,21 @@ use base_db::CrateId;
use cfg::CfgOptions;
use drop_bomb::DropBomb;
use hir_expand::{
- attrs::RawAttrs, hygiene::Hygiene, mod_path::ModPath, ExpandError, ExpandResult, HirFileId,
- InFile, MacroCallId, UnresolvedMacro,
+ attrs::RawAttrs, mod_path::ModPath, span::SpanMap, ExpandError, ExpandResult, HirFileId,
+ InFile, MacroCallId,
};
use limit::Limit;
use syntax::{ast, Parse, SyntaxNode};
use crate::{
attr::Attrs, db::DefDatabase, lower::LowerCtx, macro_id_to_def_id, path::Path, AsMacroCall,
- MacroId, ModuleId,
+ MacroId, ModuleId, UnresolvedMacro,
};
#[derive(Debug)]
pub struct Expander {
cfg_options: CfgOptions,
- hygiene: Hygiene,
+ span_map: SpanMap,
krate: CrateId,
pub(crate) current_file_id: HirFileId,
pub(crate) module: ModuleId,
@@ -41,7 +41,7 @@ impl Expander {
recursion_depth: 0,
recursion_limit,
cfg_options: db.crate_graph()[module.krate].cfg_options.clone(),
- hygiene: Hygiene::new(db.upcast(), current_file_id),
+ span_map: db.span_map(current_file_id),
krate: module.krate,
}
}
@@ -94,8 +94,8 @@ impl Expander {
ExpandResult { value: Some(InFile::new(macro_file.into(), value.0)), err: error.or(err) }
}
- pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) {
- self.hygiene = Hygiene::new(db.upcast(), mark.file_id);
+ pub fn exit(&mut self, mut mark: Mark) {
+ self.span_map = mark.span_map;
self.current_file_id = mark.file_id;
if self.recursion_depth == u32::MAX {
// Recursion limit has been reached somewhere in the macro expansion tree. Reset the
@@ -110,7 +110,7 @@ impl Expander {
}
pub fn ctx<'a>(&self, db: &'a dyn DefDatabase) -> LowerCtx<'a> {
- LowerCtx::new(db, &self.hygiene, self.current_file_id)
+ LowerCtx::new(db, self.span_map.clone(), self.current_file_id)
}
pub(crate) fn to_source<T>(&self, value: T) -> InFile<T> {
@@ -118,7 +118,7 @@ impl Expander {
}
pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
- Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, &self.hygiene))
+ Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, self.span_map.as_ref()))
}
pub(crate) fn cfg_options(&self) -> &CfgOptions {
@@ -130,8 +130,8 @@ impl Expander {
}
pub(crate) fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option<Path> {
- let ctx = LowerCtx::new(db, &self.hygiene, self.current_file_id);
- Path::from_src(path, &ctx)
+ let ctx = LowerCtx::new(db, self.span_map.clone(), self.current_file_id);
+ Path::from_src(&ctx, path)
}
fn within_limit<F, T: ast::AstNode>(
@@ -174,10 +174,11 @@ impl Expander {
let parse = value.cast::<T>()?;
self.recursion_depth += 1;
- self.hygiene = Hygiene::new(db.upcast(), file_id);
+ let old_span_map = std::mem::replace(&mut self.span_map, db.span_map(file_id));
let old_file_id = std::mem::replace(&mut self.current_file_id, file_id);
let mark = Mark {
file_id: old_file_id,
+ span_map: old_span_map,
bomb: DropBomb::new("expansion mark dropped"),
};
Some((mark, parse))
@@ -190,5 +191,6 @@ impl Expander {
#[derive(Debug)]
pub struct Mark {
file_id: HirFileId,
+ span_map: SpanMap,
bomb: DropBomb,
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs
index b9c5ff727..13af0b021 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs
@@ -21,9 +21,10 @@ pub fn find_path(
item: ItemInNs,
from: ModuleId,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Option<ModPath> {
let _p = profile::span("find_path");
- find_path_inner(db, item, from, None, prefer_no_std)
+ find_path_inner(db, item, from, None, prefer_no_std, prefer_prelude)
}
pub fn find_path_prefixed(
@@ -32,9 +33,10 @@ pub fn find_path_prefixed(
from: ModuleId,
prefix_kind: PrefixKind,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Option<ModPath> {
let _p = profile::span("find_path_prefixed");
- find_path_inner(db, item, from, Some(prefix_kind), prefer_no_std)
+ find_path_inner(db, item, from, Some(prefix_kind), prefer_no_std, prefer_prelude)
}
#[derive(Copy, Clone, Debug)]
@@ -88,6 +90,7 @@ fn find_path_inner(
from: ModuleId,
prefixed: Option<PrefixKind>,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Option<ModPath> {
// - if the item is a builtin, it's in scope
if let ItemInNs::Types(ModuleDefId::BuiltinType(builtin)) = item {
@@ -109,6 +112,7 @@ fn find_path_inner(
MAX_PATH_LEN,
prefixed,
prefer_no_std || db.crate_supports_no_std(crate_root.krate),
+ prefer_prelude,
)
.map(|(item, _)| item);
}
@@ -134,6 +138,7 @@ fn find_path_inner(
from,
prefixed,
prefer_no_std,
+ prefer_prelude,
) {
let data = db.enum_data(variant.parent);
path.push_segment(data.variants[variant.local_id].name.clone());
@@ -156,6 +161,7 @@ fn find_path_inner(
from,
prefixed,
prefer_no_std || db.crate_supports_no_std(crate_root.krate),
+ prefer_prelude,
scope_name,
)
.map(|(item, _)| item)
@@ -171,6 +177,7 @@ fn find_path_for_module(
max_len: usize,
prefixed: Option<PrefixKind>,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Option<(ModPath, Stability)> {
if max_len == 0 {
return None;
@@ -236,6 +243,7 @@ fn find_path_for_module(
from,
prefixed,
prefer_no_std,
+ prefer_prelude,
scope_name,
)
}
@@ -316,6 +324,7 @@ fn calculate_best_path(
from: ModuleId,
mut prefixed: Option<PrefixKind>,
prefer_no_std: bool,
+ prefer_prelude: bool,
scope_name: Option<Name>,
) -> Option<(ModPath, Stability)> {
if max_len <= 1 {
@@ -351,11 +360,14 @@ fn calculate_best_path(
best_path_len - 1,
prefixed,
prefer_no_std,
+ prefer_prelude,
) {
path.0.push_segment(name);
let new_path = match best_path.take() {
- Some(best_path) => select_best_path(best_path, path, prefer_no_std),
+ Some(best_path) => {
+ select_best_path(best_path, path, prefer_no_std, prefer_prelude)
+ }
None => path,
};
best_path_len = new_path.0.len();
@@ -367,18 +379,18 @@ fn calculate_best_path(
// too (unless we can't name it at all). It could *also* be (re)exported by the same crate
// that wants to import it here, but we always prefer to use the external path here.
- let crate_graph = db.crate_graph();
- let extern_paths = crate_graph[from.krate].dependencies.iter().filter_map(|dep| {
+ for dep in &db.crate_graph()[from.krate].dependencies {
let import_map = db.import_map(dep.crate_id);
- import_map.import_info_for(item).and_then(|info| {
+ let Some(import_info_for) = import_map.import_info_for(item) else { continue };
+ for info in import_info_for {
if info.is_doc_hidden {
// the item or import is `#[doc(hidden)]`, so skip it as it is in an external crate
- return None;
+ continue;
}
// Determine best path for containing module and append last segment from `info`.
// FIXME: we should guide this to look up the path locally, or from the same crate again?
- let (mut path, path_stability) = find_path_for_module(
+ let Some((mut path, path_stability)) = find_path_for_module(
db,
def_map,
visited_modules,
@@ -388,22 +400,26 @@ fn calculate_best_path(
max_len - 1,
prefixed,
prefer_no_std,
- )?;
+ prefer_prelude,
+ ) else {
+ continue;
+ };
cov_mark::hit!(partially_imported);
path.push_segment(info.name.clone());
- Some((
+
+ let path_with_stab = (
path,
zip_stability(path_stability, if info.is_unstable { Unstable } else { Stable }),
- ))
- })
- });
+ );
- for path in extern_paths {
- let new_path = match best_path.take() {
- Some(best_path) => select_best_path(best_path, path, prefer_no_std),
- None => path,
- };
- update_best_path(&mut best_path, new_path);
+ let new_path_with_stab = match best_path.take() {
+ Some(best_path) => {
+ select_best_path(best_path, path_with_stab, prefer_no_std, prefer_prelude)
+ }
+ None => path_with_stab,
+ };
+ update_best_path(&mut best_path, new_path_with_stab);
+ }
}
}
if let Some(module) = item.module(db) {
@@ -420,17 +436,39 @@ fn calculate_best_path(
}
}
+/// Select the best (most relevant) path between two paths.
+/// This accounts for stability, path length whether std should be chosen over alloc/core paths as
+/// well as ignoring prelude like paths or not.
fn select_best_path(
- old_path: (ModPath, Stability),
- new_path: (ModPath, Stability),
+ old_path @ (_, old_stability): (ModPath, Stability),
+ new_path @ (_, new_stability): (ModPath, Stability),
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> (ModPath, Stability) {
- match (old_path.1, new_path.1) {
+ match (old_stability, new_stability) {
(Stable, Unstable) => return old_path,
(Unstable, Stable) => return new_path,
_ => {}
}
const STD_CRATES: [Name; 3] = [known::std, known::core, known::alloc];
+
+ let choose = |new_path: (ModPath, _), old_path: (ModPath, _)| {
+ let new_has_prelude = new_path.0.segments().iter().any(|seg| seg == &known::prelude);
+ let old_has_prelude = old_path.0.segments().iter().any(|seg| seg == &known::prelude);
+ match (new_has_prelude, old_has_prelude, prefer_prelude) {
+ (true, false, true) | (false, true, false) => new_path,
+ (true, false, false) | (false, true, true) => old_path,
+ // no prelude difference in the paths, so pick the smaller one
+ (true, true, _) | (false, false, _) => {
+ if new_path.0.len() < old_path.0.len() {
+ new_path
+ } else {
+ old_path
+ }
+ }
+ }
+ };
+
match (old_path.0.segments().first(), new_path.0.segments().first()) {
(Some(old), Some(new)) if STD_CRATES.contains(old) && STD_CRATES.contains(new) => {
let rank = match prefer_no_std {
@@ -451,23 +489,11 @@ fn select_best_path(
let orank = rank(old);
match nrank.cmp(&orank) {
Ordering::Less => old_path,
- Ordering::Equal => {
- if new_path.0.len() < old_path.0.len() {
- new_path
- } else {
- old_path
- }
- }
+ Ordering::Equal => choose(new_path, old_path),
Ordering::Greater => new_path,
}
}
- _ => {
- if new_path.0.len() < old_path.0.len() {
- new_path
- } else {
- old_path
- }
- }
+ _ => choose(new_path, old_path),
}
}
@@ -560,7 +586,7 @@ fn find_local_import_locations(
#[cfg(test)]
mod tests {
use base_db::fixture::WithFixture;
- use hir_expand::hygiene::Hygiene;
+ use hir_expand::db::ExpandDatabase;
use syntax::ast::AstNode;
use crate::test_db::TestDB;
@@ -570,13 +596,20 @@ mod tests {
/// `code` needs to contain a cursor marker; checks that `find_path` for the
/// item the `path` refers to returns that same path when called from the
/// module the cursor is in.
- fn check_found_path_(ra_fixture: &str, path: &str, prefix_kind: Option<PrefixKind>) {
+ #[track_caller]
+ fn check_found_path_(
+ ra_fixture: &str,
+ path: &str,
+ prefix_kind: Option<PrefixKind>,
+ prefer_prelude: bool,
+ ) {
let (db, pos) = TestDB::with_position(ra_fixture);
let module = db.module_at_position(pos);
let parsed_path_file = syntax::SourceFile::parse(&format!("use {path};"));
let ast_path =
parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap();
- let mod_path = ModPath::from_src(&db, ast_path, &Hygiene::new_unhygienic()).unwrap();
+ let mod_path =
+ ModPath::from_src(&db, ast_path, db.span_map(pos.file_id.into()).as_ref()).unwrap();
let def_map = module.def_map(&db);
let resolved = def_map
@@ -589,11 +622,17 @@ mod tests {
)
.0
.take_types()
- .unwrap();
-
- let found_path =
- find_path_inner(&db, ItemInNs::Types(resolved), module, prefix_kind, false);
- assert_eq!(found_path, Some(mod_path), "{prefix_kind:?}");
+ .expect("path does not resolve to a type");
+
+ let found_path = find_path_inner(
+ &db,
+ ItemInNs::Types(resolved),
+ module,
+ prefix_kind,
+ false,
+ prefer_prelude,
+ );
+ assert_eq!(found_path, Some(mod_path), "on kind: {prefix_kind:?}");
}
fn check_found_path(
@@ -603,10 +642,23 @@ mod tests {
absolute: &str,
self_prefixed: &str,
) {
- check_found_path_(ra_fixture, unprefixed, None);
- check_found_path_(ra_fixture, prefixed, Some(PrefixKind::Plain));
- check_found_path_(ra_fixture, absolute, Some(PrefixKind::ByCrate));
- check_found_path_(ra_fixture, self_prefixed, Some(PrefixKind::BySelf));
+ check_found_path_(ra_fixture, unprefixed, None, false);
+ check_found_path_(ra_fixture, prefixed, Some(PrefixKind::Plain), false);
+ check_found_path_(ra_fixture, absolute, Some(PrefixKind::ByCrate), false);
+ check_found_path_(ra_fixture, self_prefixed, Some(PrefixKind::BySelf), false);
+ }
+
+ fn check_found_path_prelude(
+ ra_fixture: &str,
+ unprefixed: &str,
+ prefixed: &str,
+ absolute: &str,
+ self_prefixed: &str,
+ ) {
+ check_found_path_(ra_fixture, unprefixed, None, true);
+ check_found_path_(ra_fixture, prefixed, Some(PrefixKind::Plain), true);
+ check_found_path_(ra_fixture, absolute, Some(PrefixKind::ByCrate), true);
+ check_found_path_(ra_fixture, self_prefixed, Some(PrefixKind::BySelf), true);
}
#[test]
@@ -1421,4 +1473,34 @@ pub mod error {
"std::error::Error",
);
}
+
+ #[test]
+ fn respects_prelude_setting() {
+ let ra_fixture = r#"
+//- /main.rs crate:main deps:krate
+$0
+//- /krate.rs crate:krate
+pub mod prelude {
+ pub use crate::foo::*;
+}
+
+pub mod foo {
+ pub struct Foo;
+}
+"#;
+ check_found_path(
+ ra_fixture,
+ "krate::foo::Foo",
+ "krate::foo::Foo",
+ "krate::foo::Foo",
+ "krate::foo::Foo",
+ );
+ check_found_path_prelude(
+ ra_fixture,
+ "krate::prelude::Foo",
+ "krate::prelude::Foo",
+ "krate::prelude::Foo",
+ "krate::prelude::Foo",
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
index 1e2535a8a..f5324f052 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
@@ -21,7 +21,7 @@ use crate::{
db::DefDatabase,
dyn_map::{keys, DynMap},
expander::Expander,
- item_tree::{AttrOwner, ItemTree},
+ item_tree::ItemTree,
lower::LowerCtx,
nameres::{DefMap, MacroSubNs},
src::{HasChildSource, HasSource},
@@ -222,12 +222,11 @@ impl GenericParams {
let module = loc.container.module(db);
let func_data = db.function_data(id);
- // Don't create an `Expander` nor call `loc.source(db)` if not needed since this
- // causes a reparse after the `ItemTree` has been created.
- let mut expander = Lazy::new(|| {
- (module.def_map(db), Expander::new(db, loc.source(db).file_id, module))
- });
- for param in &func_data.params {
+ // Don't create an `Expander` if not needed since this
+ // could cause a reparse after the `ItemTree` has been created due to the spanmap.
+ let mut expander =
+ Lazy::new(|| (module.def_map(db), Expander::new(db, loc.id.file_id(), module)));
+ for param in func_data.params.iter() {
generic_params.fill_implicit_impl_trait_args(db, &mut expander, param);
}
@@ -250,7 +249,10 @@ impl GenericParams {
&mut self,
lower_ctx: &LowerCtx<'_>,
node: &dyn HasGenericParams,
- add_param_attrs: impl FnMut(AttrOwner, ast::GenericParam),
+ add_param_attrs: impl FnMut(
+ Either<Idx<TypeOrConstParamData>, Idx<LifetimeParamData>>,
+ ast::GenericParam,
+ ),
) {
if let Some(params) = node.generic_param_list() {
self.fill_params(lower_ctx, params, add_param_attrs)
@@ -275,7 +277,10 @@ impl GenericParams {
&mut self,
lower_ctx: &LowerCtx<'_>,
params: ast::GenericParamList,
- mut add_param_attrs: impl FnMut(AttrOwner, ast::GenericParam),
+ mut add_param_attrs: impl FnMut(
+ Either<Idx<TypeOrConstParamData>, Idx<LifetimeParamData>>,
+ ast::GenericParam,
+ ),
) {
for type_or_const_param in params.type_or_const_params() {
match type_or_const_param {
@@ -297,7 +302,7 @@ impl GenericParams {
type_param.type_bound_list(),
Either::Left(type_ref),
);
- add_param_attrs(idx.into(), ast::GenericParam::TypeParam(type_param));
+ add_param_attrs(Either::Left(idx), ast::GenericParam::TypeParam(type_param));
}
ast::TypeOrConstParam::Const(const_param) => {
let name = const_param.name().map_or_else(Name::missing, |it| it.as_name());
@@ -310,7 +315,7 @@ impl GenericParams {
default: ConstRef::from_const_param(lower_ctx, &const_param),
};
let idx = self.type_or_consts.alloc(param.into());
- add_param_attrs(idx.into(), ast::GenericParam::ConstParam(const_param));
+ add_param_attrs(Either::Left(idx), ast::GenericParam::ConstParam(const_param));
}
}
}
@@ -325,7 +330,7 @@ impl GenericParams {
lifetime_param.type_bound_list(),
Either::Right(lifetime_ref),
);
- add_param_attrs(idx.into(), ast::GenericParam::LifetimeParam(lifetime_param));
+ add_param_attrs(Either::Right(idx), ast::GenericParam::LifetimeParam(lifetime_param));
}
}
@@ -433,7 +438,7 @@ impl GenericParams {
let ctx = expander.ctx(db);
let type_ref = TypeRef::from_ast(&ctx, expanded.tree());
self.fill_implicit_impl_trait_args(db, &mut *exp, &type_ref);
- exp.1.exit(db, mark);
+ exp.1.exit(mark);
}
}
});
@@ -518,7 +523,7 @@ fn file_id_and_params_of(
(src.file_id, src.value.generic_param_list())
}
// We won't be using this ID anyway
- GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => (FileId(!0).into(), None),
+ GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => (FileId::BOGUS.into(), None),
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs
index 75025a984..7fc33abc7 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs
@@ -2,10 +2,11 @@
use std::mem;
use hir_expand::name::Name;
-use rustc_parse_format as parse;
+use rustc_dependencies::parse_format as parse;
+use stdx::TupleExt;
use syntax::{
ast::{self, IsString},
- AstToken, SmolStr, TextRange,
+ SmolStr, TextRange, TextSize,
};
use crate::hir::ExprId;
@@ -14,6 +15,7 @@ use crate::hir::ExprId;
pub struct FormatArgs {
pub template: Box<[FormatArgsPiece]>,
pub arguments: FormatArguments,
+ pub orphans: Vec<ExprId>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -170,15 +172,18 @@ pub(crate) fn parse(
mut args: FormatArgumentsCollector,
is_direct_literal: bool,
mut synth: impl FnMut(Name) -> ExprId,
+ mut record_usage: impl FnMut(Name, Option<TextRange>),
) -> FormatArgs {
- let text = s.text();
+ let text = s.text_without_quotes();
let str_style = match s.quote_offsets() {
Some(offsets) => {
let raw = u32::from(offsets.quotes.0.len()) - 1;
- (raw != 0).then_some(raw as usize)
+ // subtract 1 for the `r` prefix
+ (raw != 0).then(|| raw as usize - 1)
}
None => None,
};
+
let mut parser =
parse::Parser::new(text, str_style, fmt_snippet, false, parse::ParseMode::Format);
@@ -193,12 +198,17 @@ pub(crate) fn parse(
let is_source_literal = parser.is_source_literal;
if !parser.errors.is_empty() {
// FIXME: Diagnose
- return FormatArgs { template: Default::default(), arguments: args.finish() };
+ return FormatArgs {
+ template: Default::default(),
+ arguments: args.finish(),
+ orphans: vec![],
+ };
}
let to_span = |inner_span: parse::InnerSpan| {
is_source_literal.then(|| {
TextRange::new(inner_span.start.try_into().unwrap(), inner_span.end.try_into().unwrap())
+ - TextSize::from(str_style.map(|it| it + 1).unwrap_or(0) as u32 + 1)
})
};
@@ -230,9 +240,10 @@ pub(crate) fn parse(
Err(index)
}
}
- ArgRef::Name(name, _span) => {
+ ArgRef::Name(name, span) => {
let name = Name::new_text_dont_use(SmolStr::new(name));
if let Some((index, _)) = args.by_name(&name) {
+ record_usage(name, span);
// Name found in `args`, so we resolve it to its index.
if index < args.explicit_args().len() {
// Mark it as used, if it was an explicit argument.
@@ -246,6 +257,7 @@ pub(crate) fn parse(
// disabled (see RFC #2795)
// FIXME: Diagnose
}
+ record_usage(name.clone(), span);
Ok(args.add(FormatArgument {
kind: FormatArgumentKind::Captured(name.clone()),
// FIXME: This is problematic, we might want to synthesize a dummy
@@ -413,7 +425,11 @@ pub(crate) fn parse(
// FIXME: Diagnose
}
- FormatArgs { template: template.into_boxed_slice(), arguments: args.finish() }
+ FormatArgs {
+ template: template.into_boxed_slice(),
+ arguments: args.finish(),
+ orphans: unused.into_iter().map(TupleExt::head).collect(),
+ }
}
#[derive(Debug, Clone, PartialEq, Eq)]
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
index 44b7f1b4f..26d333f9a 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
@@ -1,49 +1,49 @@
//! A map of all publicly exported items in a crate.
-use std::collections::hash_map::Entry;
use std::{fmt, hash::BuildHasherDefault};
use base_db::CrateId;
-use fst::{self, Streamer};
+use fst::{self, raw::IndexedValue, Streamer};
use hir_expand::name::Name;
use indexmap::IndexMap;
use itertools::Itertools;
-use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
+use rustc_hash::{FxHashSet, FxHasher};
+use smallvec::SmallVec;
+use stdx::format_to;
use triomphe::Arc;
-use crate::item_scope::ImportOrExternCrate;
use crate::{
- db::DefDatabase, item_scope::ItemInNs, nameres::DefMap, visibility::Visibility, AssocItemId,
- ModuleDefId, ModuleId, TraitId,
+ db::DefDatabase,
+ item_scope::{ImportOrExternCrate, ItemInNs},
+ nameres::DefMap,
+ visibility::Visibility,
+ AssocItemId, ModuleDefId, ModuleId, TraitId,
};
type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<FxHasher>>;
-// FIXME: Support aliases: an item may be exported under multiple names, so `ImportInfo` should
-// have `Vec<(Name, ModuleId)>` instead of `(Name, ModuleId)`.
/// Item import details stored in the `ImportMap`.
-#[derive(Debug, Clone, Eq, PartialEq)]
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct ImportInfo {
/// A name that can be used to import the item, relative to the crate's root.
pub name: Name,
/// The module containing this item.
pub container: ModuleId,
- /// Whether the import is a trait associated item or not.
- pub is_trait_assoc_item: bool,
/// Whether this item is annotated with `#[doc(hidden)]`.
pub is_doc_hidden: bool,
/// Whether this item is annotated with `#[unstable(..)]`.
pub is_unstable: bool,
}
+type ImportMapIndex = FxIndexMap<ItemInNs, (SmallVec<[ImportInfo; 1]>, IsTraitAssocItem)>;
+
/// A map from publicly exported items to its name.
///
/// Reexports of items are taken into account, ie. if something is exported under multiple
/// names, the one with the shortest import path will be used.
#[derive(Default)]
pub struct ImportMap {
- map: FxIndexMap<ItemInNs, ImportInfo>,
-
+ map: ImportMapIndex,
/// List of keys stored in `map`, sorted lexicographically by their `ModPath`. Indexed by the
/// values returned by running `fst`.
///
@@ -54,7 +54,25 @@ pub struct ImportMap {
fst: fst::Map<Vec<u8>>,
}
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Ord, PartialOrd)]
+enum IsTraitAssocItem {
+ Yes,
+ No,
+}
+
impl ImportMap {
+ pub fn dump(&self, db: &dyn DefDatabase) -> String {
+ let mut out = String::new();
+ for (k, v) in self.map.iter() {
+ format_to!(out, "{:?} ({:?}) -> ", k, v.1);
+ for v in &v.0 {
+ format_to!(out, "{}:{:?}, ", v.name.display(db.upcast()), v.container);
+ }
+ format_to!(out, "\n");
+ }
+ out
+ }
+
pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
let _p = profile::span("import_map_query");
@@ -63,30 +81,40 @@ impl ImportMap {
let mut importables: Vec<_> = map
.iter()
// We've only collected items, whose name cannot be tuple field.
- .map(|(&item, info)| (item, info.name.as_str().unwrap().to_ascii_lowercase()))
+ .flat_map(|(&item, (info, is_assoc))| {
+ info.iter().map(move |info| {
+ (item, *is_assoc, info.name.as_str().unwrap().to_ascii_lowercase())
+ })
+ })
.collect();
- importables.sort_by(|(_, lhs_name), (_, rhs_name)| lhs_name.cmp(rhs_name));
+ importables.sort_by(|(_, l_is_assoc, lhs_name), (_, r_is_assoc, rhs_name)| {
+ lhs_name.cmp(rhs_name).then_with(|| l_is_assoc.cmp(r_is_assoc))
+ });
+ importables.dedup();
// Build the FST, taking care not to insert duplicate values.
let mut builder = fst::MapBuilder::memory();
- let iter = importables.iter().enumerate().dedup_by(|lhs, rhs| lhs.1 .1 == rhs.1 .1);
- for (start_idx, (_, name)) in iter {
+ let iter = importables
+ .iter()
+ .enumerate()
+ .dedup_by(|(_, (_, _, lhs)), (_, (_, _, rhs))| lhs == rhs);
+ for (start_idx, (_, _, name)) in iter {
let _ = builder.insert(name, start_idx as u64);
}
Arc::new(ImportMap {
map,
fst: builder.into_map(),
- importables: importables.into_iter().map(|(item, _)| item).collect(),
+ importables: importables.into_iter().map(|(item, _, _)| item).collect(),
})
}
- pub fn import_info_for(&self, item: ItemInNs) -> Option<&ImportInfo> {
- self.map.get(&item)
+ pub fn import_info_for(&self, item: ItemInNs) -> Option<&[ImportInfo]> {
+ self.map.get(&item).map(|(info, _)| &**info)
}
}
-fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemInNs, ImportInfo> {
+fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMapIndex {
let _p = profile::span("collect_import_map");
let def_map = db.crate_def_map(krate);
@@ -94,11 +122,13 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemIn
// We look only into modules that are public(ly reexported), starting with the crate root.
let root = def_map.module_id(DefMap::ROOT);
- let mut worklist = vec![(root, 0)];
- // Records items' minimum module depth.
- let mut depth_map = FxHashMap::default();
+ let mut worklist = vec![root];
+ let mut visited = FxHashSet::default();
- while let Some((module, depth)) = worklist.pop() {
+ while let Some(module) = worklist.pop() {
+ if !visited.insert(module) {
+ continue;
+ }
let ext_def_map;
let mod_data = if module.krate == krate {
&def_map[module.local_id]
@@ -126,62 +156,18 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemIn
ItemInNs::Macros(id) => Some(id.into()),
}
};
- let status @ (is_doc_hidden, is_unstable) =
- attr_id.map_or((false, false), |attr_id| {
- let attrs = db.attrs(attr_id);
- (attrs.has_doc_hidden(), attrs.is_unstable())
- });
+ let (is_doc_hidden, is_unstable) = attr_id.map_or((false, false), |attr_id| {
+ let attrs = db.attrs(attr_id);
+ (attrs.has_doc_hidden(), attrs.is_unstable())
+ });
let import_info = ImportInfo {
name: name.clone(),
container: module,
- is_trait_assoc_item: false,
is_doc_hidden,
is_unstable,
};
- match depth_map.entry(item) {
- Entry::Vacant(entry) => _ = entry.insert((depth, status)),
- Entry::Occupied(mut entry) => {
- let &(occ_depth, (occ_is_doc_hidden, occ_is_unstable)) = entry.get();
- (depth, occ_depth);
- let overwrite = match (
- is_doc_hidden,
- occ_is_doc_hidden,
- is_unstable,
- occ_is_unstable,
- ) {
- // no change of hiddeness or unstableness
- (true, true, true, true)
- | (true, true, false, false)
- | (false, false, true, true)
- | (false, false, false, false) => depth < occ_depth,
-
- // either less hidden or less unstable, accept
- (true, true, false, true)
- | (false, true, true, true)
- | (false, true, false, true)
- | (false, true, false, false)
- | (false, false, false, true) => true,
- // more hidden or unstable, discard
- (true, true, true, false)
- | (true, false, true, true)
- | (true, false, true, false)
- | (true, false, false, false)
- | (false, false, true, false) => false,
-
- // exchanges doc(hidden) for unstable (and vice-versa),
- (true, false, false, true) | (false, true, true, false) => {
- depth < occ_depth
- }
- };
- if !overwrite {
- continue;
- }
- entry.insert((depth, status));
- }
- }
-
if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() {
collect_trait_assoc_items(
db,
@@ -192,13 +178,14 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemIn
);
}
- map.insert(item, import_info);
+ let (infos, _) =
+ map.entry(item).or_insert_with(|| (SmallVec::new(), IsTraitAssocItem::No));
+ infos.reserve_exact(1);
+ infos.push(import_info);
- // If we've just added a module, descend into it. We might traverse modules
- // multiple times, but only if the module depth is smaller (else we `continue`
- // above).
+ // If we've just added a module, descend into it.
if let Some(ModuleDefId::ModuleId(mod_id)) = item.as_module_def_id() {
- worklist.push((mod_id, depth + 1));
+ worklist.push(mod_id);
}
}
}
@@ -209,7 +196,7 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemIn
fn collect_trait_assoc_items(
db: &dyn DefDatabase,
- map: &mut FxIndexMap<ItemInNs, ImportInfo>,
+ map: &mut ImportMapIndex,
tr: TraitId,
is_type_in_ns: bool,
trait_import_info: &ImportInfo,
@@ -236,11 +223,14 @@ fn collect_trait_assoc_items(
let assoc_item_info = ImportInfo {
container: trait_import_info.container,
name: assoc_item_name.clone(),
- is_trait_assoc_item: true,
is_doc_hidden: attrs.has_doc_hidden(),
is_unstable: attrs.is_unstable(),
};
- map.insert(assoc_item, assoc_item_info);
+
+ let (infos, _) =
+ map.entry(assoc_item).or_insert_with(|| (SmallVec::new(), IsTraitAssocItem::Yes));
+ infos.reserve_exact(1);
+ infos.push(assoc_item_info);
}
}
@@ -258,10 +248,13 @@ impl fmt::Debug for ImportMap {
let mut importable_names: Vec<_> = self
.map
.iter()
- .map(|(item, _)| match item {
- ItemInNs::Types(it) => format!("- {it:?} (t)",),
- ItemInNs::Values(it) => format!("- {it:?} (v)",),
- ItemInNs::Macros(it) => format!("- {it:?} (m)",),
+ .map(|(item, (infos, _))| {
+ let l = infos.len();
+ match item {
+ ItemInNs::Types(it) => format!("- {it:?} (t) [{l}]",),
+ ItemInNs::Values(it) => format!("- {it:?} (v) [{l}]",),
+ ItemInNs::Macros(it) => format!("- {it:?} (m) [{l}]",),
+ }
})
.collect();
@@ -271,13 +264,15 @@ impl fmt::Debug for ImportMap {
}
/// A way to match import map contents against the search query.
-#[derive(Debug)]
+#[derive(Copy, Clone, Debug)]
enum SearchMode {
/// Import map entry should strictly match the query string.
Exact,
/// Import map entry should contain all letters from the query string,
/// in the same order, but not necessary adjacent.
Fuzzy,
+ /// Import map entry should match the query string by prefix.
+ Prefix,
}
/// Three possible ways to search for the name in associated and/or other items.
@@ -319,6 +314,14 @@ impl Query {
Self { search_mode: SearchMode::Fuzzy, ..self }
}
+ pub fn prefix(self) -> Self {
+ Self { search_mode: SearchMode::Prefix, ..self }
+ }
+
+ pub fn exact(self) -> Self {
+ Self { search_mode: SearchMode::Exact, ..self }
+ }
+
/// Specifies whether we want to include associated items in the result.
pub fn assoc_search_mode(self, assoc_mode: AssocSearchMode) -> Self {
Self { assoc_mode, ..self }
@@ -334,33 +337,39 @@ impl Query {
Self { case_sensitive: true, ..self }
}
- fn import_matches(
- &self,
- db: &dyn DefDatabase,
- import: &ImportInfo,
- enforce_lowercase: bool,
- ) -> bool {
- let _p = profile::span("import_map::Query::import_matches");
- match (import.is_trait_assoc_item, self.assoc_mode) {
- (true, AssocSearchMode::Exclude) => return false,
- (false, AssocSearchMode::AssocItemsOnly) => return false,
- _ => {}
+ fn matches_assoc_mode(&self, is_trait_assoc_item: IsTraitAssocItem) -> bool {
+ match (is_trait_assoc_item, self.assoc_mode) {
+ (IsTraitAssocItem::Yes, AssocSearchMode::Exclude)
+ | (IsTraitAssocItem::No, AssocSearchMode::AssocItemsOnly) => false,
+ _ => true,
}
+ }
- let mut input = import.name.display(db.upcast()).to_string();
+ /// Checks whether the import map entry matches the query.
+ fn import_matches(&self, import: &ImportInfo, enforce_lowercase: bool) -> bool {
+ let _p = profile::span("import_map::Query::import_matches");
+
+ // FIXME: Can we get rid of the alloc here?
+ let input = import.name.to_smol_str();
+ let mut _s_slot;
let case_insensitive = enforce_lowercase || !self.case_sensitive;
- if case_insensitive {
- input.make_ascii_lowercase();
- }
+ let input = if case_insensitive {
+ _s_slot = String::from(input);
+ _s_slot.make_ascii_lowercase();
+ &*_s_slot
+ } else {
+ &*input
+ };
let query_string = if case_insensitive { &self.lowercased } else { &self.query };
match self.search_mode {
- SearchMode::Exact => &input == query_string,
+ SearchMode::Exact => input == *query_string,
+ SearchMode::Prefix => input.starts_with(query_string),
SearchMode::Fuzzy => {
let mut input_chars = input.chars();
for query_char in query_string.chars() {
- if input_chars.find(|&it| it == query_char).is_none() {
+ if !input_chars.any(|it| it == query_char) {
return false;
}
}
@@ -376,11 +385,12 @@ impl Query {
pub fn search_dependencies(
db: &dyn DefDatabase,
krate: CrateId,
- query: Query,
+ ref query: Query,
) -> FxHashSet<ItemInNs> {
let _p = profile::span("search_dependencies").detail(|| format!("{query:?}"));
let graph = db.crate_graph();
+
let import_maps: Vec<_> =
graph[krate].dependencies.iter().map(|dep| db.import_map(dep.crate_id)).collect();
@@ -394,31 +404,57 @@ pub fn search_dependencies(
let mut stream = op.union();
let mut res = FxHashSet::default();
+ let mut common_importable_data_scratch = vec![];
+ // FIXME: Improve this, its rather unreadable and does duplicate amount of work
while let Some((_, indexed_values)) = stream.next() {
- for indexed_value in indexed_values {
- let import_map = &import_maps[indexed_value.index];
- let importables = &import_map.importables[indexed_value.value as usize..];
-
- let common_importable_data = &import_map.map[&importables[0]];
- if !query.import_matches(db, common_importable_data, true) {
+ for &IndexedValue { index, value } in indexed_values {
+ let import_map = &import_maps[index];
+ let importables @ [importable, ..] = &import_map.importables[value as usize..] else {
+ continue;
+ };
+ let &(ref importable_data, is_trait_assoc_item) = &import_map.map[importable];
+ if !query.matches_assoc_mode(is_trait_assoc_item) {
continue;
}
- // Name shared by the importable items in this group.
- let common_importable_name =
- common_importable_data.name.to_smol_str().to_ascii_lowercase();
- // Add the items from this name group. Those are all subsequent items in
- // `importables` whose name match `common_importable_name`.
- let iter = importables
- .iter()
- .copied()
- .take_while(|item| {
- common_importable_name
- == import_map.map[item].name.to_smol_str().to_ascii_lowercase()
- })
- .filter(|item| {
- !query.case_sensitive // we've already checked the common importables name case-insensitively
- || query.import_matches(db, &import_map.map[item], false)
+ // Fetch all the known names of this importable item (to handle import aliases/renames)
+ common_importable_data_scratch.extend(
+ importable_data
+ .iter()
+ .filter(|&info| query.import_matches(info, true))
+ // Name shared by the importable items in this group.
+ .map(|info| info.name.to_smol_str()),
+ );
+ if common_importable_data_scratch.is_empty() {
+ continue;
+ }
+ common_importable_data_scratch.sort();
+ common_importable_data_scratch.dedup();
+
+ let iter =
+ common_importable_data_scratch.drain(..).flat_map(|common_importable_name| {
+ // Add the items from this name group. Those are all subsequent items in
+ // `importables` whose name match `common_importable_name`.
+
+ importables
+ .iter()
+ .copied()
+ .take_while(move |item| {
+ let &(ref import_infos, assoc_mode) = &import_map.map[item];
+ query.matches_assoc_mode(assoc_mode)
+ && import_infos.iter().any(|info| {
+ info.name
+ .to_smol_str()
+ .eq_ignore_ascii_case(&common_importable_name)
+ })
+ })
+ .filter(move |item| {
+ !query.case_sensitive || {
+ // we've already checked the common importables name case-insensitively
+ let &(ref import_infos, _) = &import_map.map[item];
+ import_infos.iter().any(|info| query.import_matches(info, false))
+ }
+ })
});
res.extend(iter);
@@ -445,6 +481,7 @@ mod tests {
let mut importable_paths: Vec<_> = self
.map
.iter()
+ .flat_map(|(item, (info, _))| info.iter().map(move |info| (item, info)))
.map(|(item, info)| {
let path = render_path(db, info);
let ns = match item {
@@ -483,7 +520,7 @@ mod tests {
let (path, mark) = match assoc_item_path(&db, &dependency_imports, dependency) {
Some(assoc_item_path) => (assoc_item_path, "a"),
None => (
- render_path(&db, dependency_imports.import_info_for(dependency)?),
+ render_path(&db, &dependency_imports.import_info_for(dependency)?[0]),
match dependency {
ItemInNs::Types(ModuleDefId::FunctionId(_))
| ItemInNs::Values(ModuleDefId::FunctionId(_)) => "f",
@@ -531,7 +568,12 @@ mod tests {
.items
.iter()
.find(|(_, assoc_item_id)| &dependency_assoc_item_id == assoc_item_id)?;
- Some(format!("{}::{}", render_path(db, trait_info), assoc_item_name.display(db.upcast())))
+ // FIXME: This should check all import infos, not just the first
+ Some(format!(
+ "{}::{}",
+ render_path(db, &trait_info[0]),
+ assoc_item_name.display(db.upcast())
+ ))
}
fn check(ra_fixture: &str, expect: Expect) {
@@ -607,6 +649,7 @@ mod tests {
main:
- publ1 (t)
- real_pu2 (t)
+ - real_pu2::Pub (t)
- real_pub (t)
- real_pub::Pub (t)
"#]],
@@ -632,6 +675,7 @@ mod tests {
- sub (t)
- sub::Def (t)
- sub::subsub (t)
+ - sub::subsub::Def (t)
"#]],
);
}
@@ -731,7 +775,9 @@ mod tests {
- module (t)
- module::S (t)
- module::S (v)
+ - module::module (t)
- sub (t)
+ - sub::module (t)
"#]],
);
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
index 7c11fb9d1..ce83cb435 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
@@ -112,6 +112,7 @@ pub struct ItemScope {
#[derive(Debug, PartialEq, Eq)]
struct DeriveMacroInvocation {
attr_id: AttrId,
+ /// The `#[derive]` call
attr_call_id: MacroCallId,
derive_call_ids: SmallVec<[Option<MacroCallId>; 1]>,
}
@@ -401,6 +402,14 @@ impl ItemScope {
})
}
+ pub fn derive_macro_invoc(
+ &self,
+ ast_id: AstId<ast::Adt>,
+ attr_id: AttrId,
+ ) -> Option<MacroCallId> {
+ Some(self.derive_macros.get(&ast_id)?.iter().find(|it| it.attr_id == attr_id)?.attr_call_id)
+ }
+
// FIXME: This is only used in collection, we should move the relevant parts of it out of ItemScope
pub(crate) fn unnamed_trait_vis(&self, tr: TraitId) -> Option<Visibility> {
self.unnamed_trait_imports.get(&tr).copied().map(|(a, _)| a)
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
index 4c812b62a..3d2cddffa 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
@@ -38,17 +38,15 @@ mod tests;
use std::{
fmt::{self, Debug},
hash::{Hash, Hasher},
- marker::PhantomData,
ops::Index,
};
use ast::{AstNode, HasName, StructKind};
-use base_db::CrateId;
+use base_db::{span::SyntaxContextId, CrateId};
use either::Either;
use hir_expand::{
ast_id_map::{AstIdNode, FileAstId},
attrs::RawAttrs,
- hygiene::Hygiene,
name::{name, AsName, Name},
ExpandTo, HirFileId, InFile,
};
@@ -108,18 +106,13 @@ impl ItemTree {
pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
let _p = profile::span("file_item_tree_query").detail(|| format!("{file_id:?}"));
let syntax = db.parse_or_expand(file_id);
- if never!(syntax.kind() == SyntaxKind::ERROR, "{:?} from {:?} {}", file_id, syntax, syntax)
- {
- // FIXME: not 100% sure why these crop up, but return an empty tree to avoid a panic
- return Default::default();
- }
let ctx = lower::Ctx::new(db, file_id);
let mut top_attrs = None;
let mut item_tree = match_ast! {
match syntax {
ast::SourceFile(file) => {
- top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.hygiene()));
+ top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.span_map()));
ctx.lower_module_items(&file)
},
ast::MacroItems(items) => {
@@ -131,6 +124,9 @@ impl ItemTree {
ctx.lower_macro_stmts(stmts)
},
_ => {
+ if never!(syntax.kind() == SyntaxKind::ERROR, "{:?} from {:?} {}", file_id, syntax, syntax) {
+ return Default::default();
+ }
panic!("cannot create item tree for file {file_id:?} from {syntax:?} {syntax}");
},
}
@@ -340,34 +336,37 @@ pub trait ItemTreeNode: Clone {
fn id_to_mod_item(id: FileItemTreeId<Self>) -> ModItem;
}
-pub struct FileItemTreeId<N: ItemTreeNode> {
- index: Idx<N>,
- _p: PhantomData<N>,
+pub struct FileItemTreeId<N: ItemTreeNode>(Idx<N>);
+
+impl<N: ItemTreeNode> FileItemTreeId<N> {
+ pub fn index(&self) -> Idx<N> {
+ self.0
+ }
}
impl<N: ItemTreeNode> Clone for FileItemTreeId<N> {
fn clone(&self) -> Self {
- Self { index: self.index, _p: PhantomData }
+ Self(self.0)
}
}
impl<N: ItemTreeNode> Copy for FileItemTreeId<N> {}
impl<N: ItemTreeNode> PartialEq for FileItemTreeId<N> {
fn eq(&self, other: &FileItemTreeId<N>) -> bool {
- self.index == other.index
+ self.0 == other.0
}
}
impl<N: ItemTreeNode> Eq for FileItemTreeId<N> {}
impl<N: ItemTreeNode> Hash for FileItemTreeId<N> {
fn hash<H: Hasher>(&self, state: &mut H) {
- self.index.hash(state)
+ self.0.hash(state)
}
}
impl<N: ItemTreeNode> fmt::Debug for FileItemTreeId<N> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- self.index.fmt(f)
+ self.0.fmt(f)
}
}
@@ -548,7 +547,7 @@ impl Index<RawVisibilityId> for ItemTree {
impl<N: ItemTreeNode> Index<FileItemTreeId<N>> for ItemTree {
type Output = N;
fn index(&self, id: FileItemTreeId<N>) -> &N {
- N::lookup(self, id.index)
+ N::lookup(self, id.index())
}
}
@@ -613,10 +612,17 @@ pub struct Function {
pub(crate) flags: FnFlags,
}
-#[derive(Debug, Clone, Eq, PartialEq)]
-pub enum Param {
- Normal(Interned<TypeRef>),
- Varargs,
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Param {
+ /// This is [`None`] for varargs
+ pub type_ref: Option<Interned<TypeRef>>,
+ pub ast_id: ParamAstId,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ParamAstId {
+ Param(FileAstId<ast::Param>),
+ SelfParam(FileAstId<ast::SelfParam>),
}
bitflags::bitflags! {
@@ -702,6 +708,7 @@ pub struct Impl {
pub target_trait: Option<Interned<TraitRef>>,
pub self_ty: Interned<TypeRef>,
pub is_negative: bool,
+ pub is_unsafe: bool,
pub items: Box<[AssocItem]>,
pub ast_id: FileAstId<ast::Impl>,
}
@@ -739,6 +746,7 @@ pub struct MacroCall {
pub path: Interned<ModPath>,
pub ast_id: FileAstId<ast::MacroCall>,
pub expand_to: ExpandTo,
+ pub call_site: SyntaxContextId,
}
#[derive(Debug, Clone, Eq, PartialEq)]
@@ -768,9 +776,9 @@ impl Use {
// Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
- let hygiene = Hygiene::new(db.upcast(), file_id);
- let (_, source_map) =
- lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree");
+ let span_map = db.span_map(file_id);
+ let (_, source_map) = lower::lower_use_tree(db, span_map.as_ref(), ast_use_tree)
+ .expect("failed to lower use tree");
source_map[index].clone()
}
/// Maps a `UseTree` contained in this import back to its AST node.
@@ -783,8 +791,10 @@ impl Use {
// Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
- let hygiene = Hygiene::new(db.upcast(), file_id);
- lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree").1
+ let span_map = db.span_map(file_id);
+ lower::lower_use_tree(db, span_map.as_ref(), ast_use_tree)
+ .expect("failed to lower use tree")
+ .1
}
}
@@ -917,23 +927,23 @@ impl ModItem {
pub fn ast_id(&self, tree: &ItemTree) -> FileAstId<ast::Item> {
match self {
- ModItem::Use(it) => tree[it.index].ast_id().upcast(),
- ModItem::ExternCrate(it) => tree[it.index].ast_id().upcast(),
- ModItem::ExternBlock(it) => tree[it.index].ast_id().upcast(),
- ModItem::Function(it) => tree[it.index].ast_id().upcast(),
- ModItem::Struct(it) => tree[it.index].ast_id().upcast(),
- ModItem::Union(it) => tree[it.index].ast_id().upcast(),
- ModItem::Enum(it) => tree[it.index].ast_id().upcast(),
- ModItem::Const(it) => tree[it.index].ast_id().upcast(),
- ModItem::Static(it) => tree[it.index].ast_id().upcast(),
- ModItem::Trait(it) => tree[it.index].ast_id().upcast(),
- ModItem::TraitAlias(it) => tree[it.index].ast_id().upcast(),
- ModItem::Impl(it) => tree[it.index].ast_id().upcast(),
- ModItem::TypeAlias(it) => tree[it.index].ast_id().upcast(),
- ModItem::Mod(it) => tree[it.index].ast_id().upcast(),
- ModItem::MacroCall(it) => tree[it.index].ast_id().upcast(),
- ModItem::MacroRules(it) => tree[it.index].ast_id().upcast(),
- ModItem::MacroDef(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Use(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::ExternCrate(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::ExternBlock(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::Function(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::Struct(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::Union(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::Enum(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::Const(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::Static(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::Trait(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::TraitAlias(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::Impl(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::TypeAlias(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::Mod(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::MacroCall(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::MacroRules(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::MacroDef(it) => tree[it.index()].ast_id().upcast(),
}
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
index e4702c113..83a2790ce 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
@@ -2,18 +2,19 @@
use std::collections::hash_map::Entry;
-use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId};
+use hir_expand::{ast_id_map::AstIdMap, span::SpanMapRef, HirFileId};
use syntax::ast::{self, HasModuleItem, HasTypeBounds};
use crate::{
generics::{GenericParams, TypeParamData, TypeParamProvenance},
type_ref::{LifetimeRef, TraitBoundModifier, TraitRef},
+ LocalLifetimeParamId, LocalTypeOrConstParamId,
};
use super::*;
fn id<N: ItemTreeNode>(index: Idx<N>) -> FileItemTreeId<N> {
- FileItemTreeId { index, _p: PhantomData }
+ FileItemTreeId(index)
}
pub(super) struct Ctx<'a> {
@@ -33,8 +34,8 @@ impl<'a> Ctx<'a> {
}
}
- pub(super) fn hygiene(&self) -> &Hygiene {
- self.body_ctx.hygiene()
+ pub(super) fn span_map(&self) -> SpanMapRef<'_> {
+ self.body_ctx.span_map()
}
pub(super) fn lower_module_items(mut self, item_owner: &dyn HasModuleItem) -> ItemTree {
@@ -79,7 +80,7 @@ impl<'a> Ctx<'a> {
pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree {
self.tree
.attrs
- .insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.hygiene()));
+ .insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.span_map()));
self.tree.top_level = block
.statements()
.filter_map(|stmt| match stmt {
@@ -109,8 +110,7 @@ impl<'a> Ctx<'a> {
}
fn lower_mod_item(&mut self, item: &ast::Item) -> Option<ModItem> {
- let attrs = RawAttrs::new(self.db.upcast(), item, self.hygiene());
- let item: ModItem = match item {
+ let mod_item: ModItem = match item {
ast::Item::Struct(ast) => self.lower_struct(ast)?.into(),
ast::Item::Union(ast) => self.lower_union(ast)?.into(),
ast::Item::Enum(ast) => self.lower_enum(ast)?.into(),
@@ -129,10 +129,10 @@ impl<'a> Ctx<'a> {
ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(),
ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(),
};
+ let attrs = RawAttrs::new(self.db.upcast(), item, self.span_map());
+ self.add_attrs(mod_item.into(), attrs);
- self.add_attrs(item.into(), attrs);
-
- Some(item)
+ Some(mod_item)
}
fn add_attrs(&mut self, item: AttrOwner, attrs: RawAttrs) {
@@ -146,21 +146,32 @@ impl<'a> Ctx<'a> {
}
}
- fn lower_assoc_item(&mut self, item: &ast::AssocItem) -> Option<AssocItem> {
- match item {
+ fn lower_assoc_item(&mut self, item_node: &ast::AssocItem) -> Option<AssocItem> {
+ let item: AssocItem = match item_node {
ast::AssocItem::Fn(ast) => self.lower_function(ast).map(Into::into),
ast::AssocItem::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into),
ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()),
ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into),
- }
+ }?;
+ let attrs = RawAttrs::new(self.db.upcast(), item_node, self.span_map());
+ self.add_attrs(
+ match item {
+ AssocItem::Function(it) => AttrOwner::ModItem(ModItem::Function(it)),
+ AssocItem::TypeAlias(it) => AttrOwner::ModItem(ModItem::TypeAlias(it)),
+ AssocItem::Const(it) => AttrOwner::ModItem(ModItem::Const(it)),
+ AssocItem::MacroCall(it) => AttrOwner::ModItem(ModItem::MacroCall(it)),
+ },
+ attrs,
+ );
+ Some(item)
}
fn lower_struct(&mut self, strukt: &ast::Struct) -> Option<FileItemTreeId<Struct>> {
let visibility = self.lower_visibility(strukt);
let name = strukt.name()?.as_name();
+ let ast_id = self.source_ast_id_map.ast_id(strukt);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, strukt);
let fields = self.lower_fields(&strukt.kind());
- let ast_id = self.source_ast_id_map.ast_id(strukt);
let res = Struct { name, visibility, generic_params, fields, ast_id };
Some(id(self.data().structs.alloc(res)))
}
@@ -184,7 +195,10 @@ impl<'a> Ctx<'a> {
for field in fields.fields() {
if let Some(data) = self.lower_record_field(&field) {
let idx = self.data().fields.alloc(data);
- self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene()));
+ self.add_attrs(
+ idx.into(),
+ RawAttrs::new(self.db.upcast(), &field, self.span_map()),
+ );
}
}
let end = self.next_field_idx();
@@ -205,7 +219,7 @@ impl<'a> Ctx<'a> {
for (i, field) in fields.fields().enumerate() {
let data = self.lower_tuple_field(i, &field);
let idx = self.data().fields.alloc(data);
- self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene()));
+ self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.span_map()));
}
let end = self.next_field_idx();
IdxRange::new(start..end)
@@ -222,12 +236,12 @@ impl<'a> Ctx<'a> {
fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> {
let visibility = self.lower_visibility(union);
let name = union.name()?.as_name();
+ let ast_id = self.source_ast_id_map.ast_id(union);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, union);
let fields = match union.record_field_list() {
Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)),
None => Fields::Record(IdxRange::new(self.next_field_idx()..self.next_field_idx())),
};
- let ast_id = self.source_ast_id_map.ast_id(union);
let res = Union { name, visibility, generic_params, fields, ast_id };
Some(id(self.data().unions.alloc(res)))
}
@@ -235,12 +249,12 @@ impl<'a> Ctx<'a> {
fn lower_enum(&mut self, enum_: &ast::Enum) -> Option<FileItemTreeId<Enum>> {
let visibility = self.lower_visibility(enum_);
let name = enum_.name()?.as_name();
+ let ast_id = self.source_ast_id_map.ast_id(enum_);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, enum_);
let variants = match &enum_.variant_list() {
Some(variant_list) => self.lower_variants(variant_list),
None => IdxRange::new(self.next_variant_idx()..self.next_variant_idx()),
};
- let ast_id = self.source_ast_id_map.ast_id(enum_);
let res = Enum { name, visibility, generic_params, variants, ast_id };
Some(id(self.data().enums.alloc(res)))
}
@@ -252,7 +266,7 @@ impl<'a> Ctx<'a> {
let idx = self.data().variants.alloc(data);
self.add_attrs(
idx.into(),
- RawAttrs::new(self.db.upcast(), &variant, self.hygiene()),
+ RawAttrs::new(self.db.upcast(), &variant, self.span_map()),
);
}
}
@@ -295,24 +309,37 @@ impl<'a> Ctx<'a> {
}
}
};
- let ty = Interned::new(self_type);
- let idx = self.data().params.alloc(Param::Normal(ty));
+ let type_ref = Interned::new(self_type);
+ let ast_id = self.source_ast_id_map.ast_id(&self_param);
+ let idx = self.data().params.alloc(Param {
+ type_ref: Some(type_ref),
+ ast_id: ParamAstId::SelfParam(ast_id),
+ });
self.add_attrs(
idx.into(),
- RawAttrs::new(self.db.upcast(), &self_param, self.hygiene()),
+ RawAttrs::new(self.db.upcast(), &self_param, self.span_map()),
);
has_self_param = true;
}
for param in param_list.params() {
+ let ast_id = self.source_ast_id_map.ast_id(&param);
let idx = match param.dotdotdot_token() {
- Some(_) => self.data().params.alloc(Param::Varargs),
+ Some(_) => self
+ .data()
+ .params
+ .alloc(Param { type_ref: None, ast_id: ParamAstId::Param(ast_id) }),
None => {
let type_ref = TypeRef::from_ast_opt(&self.body_ctx, param.ty());
let ty = Interned::new(type_ref);
- self.data().params.alloc(Param::Normal(ty))
+ self.data()
+ .params
+ .alloc(Param { type_ref: Some(ty), ast_id: ParamAstId::Param(ast_id) })
}
};
- self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &param, self.hygiene()));
+ self.add_attrs(
+ idx.into(),
+ RawAttrs::new(self.db.upcast(), &param, self.span_map()),
+ );
}
}
let end_param = self.next_param_idx();
@@ -382,16 +409,9 @@ impl<'a> Ctx<'a> {
let type_ref = type_alias.ty().map(|it| self.lower_type_ref(&it));
let visibility = self.lower_visibility(type_alias);
let bounds = self.lower_type_bounds(type_alias);
- let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias);
let ast_id = self.source_ast_id_map.ast_id(type_alias);
- let res = TypeAlias {
- name,
- visibility,
- bounds: bounds.into_boxed_slice(),
- generic_params,
- type_ref,
- ast_id,
- };
+ let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias);
+ let res = TypeAlias { name, visibility, bounds, generic_params, type_ref, ast_id };
Some(id(self.data().type_aliases.alloc(res)))
}
@@ -438,23 +458,17 @@ impl<'a> Ctx<'a> {
fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option<FileItemTreeId<Trait>> {
let name = trait_def.name()?.as_name();
let visibility = self.lower_visibility(trait_def);
+ let ast_id = self.source_ast_id_map.ast_id(trait_def);
let generic_params =
self.lower_generic_params(HasImplicitSelf::Yes(trait_def.type_bound_list()), trait_def);
let is_auto = trait_def.auto_token().is_some();
let is_unsafe = trait_def.unsafe_token().is_some();
- let ast_id = self.source_ast_id_map.ast_id(trait_def);
let items = trait_def
.assoc_item_list()
.into_iter()
.flat_map(|list| list.assoc_items())
- .filter_map(|item| {
- let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
- self.lower_assoc_item(&item).map(|item| {
- self.add_attrs(ModItem::from(item).into(), attrs);
- item
- })
- })
+ .filter_map(|item_node| self.lower_assoc_item(&item_node))
.collect();
let def = Trait { name, visibility, generic_params, is_auto, is_unsafe, items, ast_id };
@@ -467,17 +481,18 @@ impl<'a> Ctx<'a> {
) -> Option<FileItemTreeId<TraitAlias>> {
let name = trait_alias_def.name()?.as_name();
let visibility = self.lower_visibility(trait_alias_def);
+ let ast_id = self.source_ast_id_map.ast_id(trait_alias_def);
let generic_params = self.lower_generic_params(
HasImplicitSelf::Yes(trait_alias_def.type_bound_list()),
trait_alias_def,
);
- let ast_id = self.source_ast_id_map.ast_id(trait_alias_def);
let alias = TraitAlias { name, visibility, generic_params, ast_id };
Some(id(self.data().trait_aliases.alloc(alias)))
}
fn lower_impl(&mut self, impl_def: &ast::Impl) -> Option<FileItemTreeId<Impl>> {
+ let ast_id = self.source_ast_id_map.ast_id(impl_def);
// Note that trait impls don't get implicit `Self` unlike traits, because here they are a
// type alias rather than a type parameter, so this is handled by the resolver.
let generic_params = self.lower_generic_params(HasImplicitSelf::No, impl_def);
@@ -487,28 +502,24 @@ impl<'a> Ctx<'a> {
let target_trait = impl_def.trait_().and_then(|tr| self.lower_trait_ref(&tr));
let self_ty = self.lower_type_ref(&impl_def.self_ty()?);
let is_negative = impl_def.excl_token().is_some();
+ let is_unsafe = impl_def.unsafe_token().is_some();
// We cannot use `assoc_items()` here as that does not include macro calls.
let items = impl_def
.assoc_item_list()
.into_iter()
.flat_map(|it| it.assoc_items())
- .filter_map(|item| {
- let assoc = self.lower_assoc_item(&item)?;
- let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
- self.add_attrs(ModItem::from(assoc).into(), attrs);
- Some(assoc)
- })
+ .filter_map(|item| self.lower_assoc_item(&item))
.collect();
- let ast_id = self.source_ast_id_map.ast_id(impl_def);
- let res = Impl { generic_params, target_trait, self_ty, is_negative, items, ast_id };
+ let res =
+ Impl { generic_params, target_trait, self_ty, is_negative, is_unsafe, items, ast_id };
Some(id(self.data().impls.alloc(res)))
}
fn lower_use(&mut self, use_item: &ast::Use) -> Option<FileItemTreeId<Use>> {
let visibility = self.lower_visibility(use_item);
let ast_id = self.source_ast_id_map.ast_id(use_item);
- let (use_tree, _) = lower_use_tree(self.db, self.hygiene(), use_item.use_tree()?)?;
+ let (use_tree, _) = lower_use_tree(self.db, self.span_map(), use_item.use_tree()?)?;
let res = Use { visibility, ast_id, use_tree };
Some(id(self.data().uses.alloc(res)))
@@ -530,10 +541,16 @@ impl<'a> Ctx<'a> {
}
fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<FileItemTreeId<MacroCall>> {
- let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, self.hygiene())?);
+ let span_map = self.span_map();
+ let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, span_map)?);
let ast_id = self.source_ast_id_map.ast_id(m);
let expand_to = hir_expand::ExpandTo::from_call_site(m);
- let res = MacroCall { path, ast_id, expand_to };
+ let res = MacroCall {
+ path,
+ ast_id,
+ expand_to,
+ call_site: span_map.span_for_range(m.syntax().text_range()).ctx,
+ };
Some(id(self.data().macro_calls.alloc(res)))
}
@@ -565,15 +582,15 @@ impl<'a> Ctx<'a> {
// (in other words, the knowledge that they're in an extern block must not be used).
// This is because an extern block can contain macros whose ItemTree's top-level items
// should be considered to be in an extern block too.
- let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
- let id: ModItem = match item {
- ast::ExternItem::Fn(ast) => self.lower_function(&ast)?.into(),
- ast::ExternItem::Static(ast) => self.lower_static(&ast)?.into(),
- ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(&ty)?.into(),
- ast::ExternItem::MacroCall(call) => self.lower_macro_call(&call)?.into(),
+ let mod_item: ModItem = match &item {
+ ast::ExternItem::Fn(ast) => self.lower_function(ast)?.into(),
+ ast::ExternItem::Static(ast) => self.lower_static(ast)?.into(),
+ ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(ty)?.into(),
+ ast::ExternItem::MacroCall(call) => self.lower_macro_call(call)?.into(),
};
- self.add_attrs(id.into(), attrs);
- Some(id)
+ let attrs = RawAttrs::new(self.db.upcast(), &item, self.span_map());
+ self.add_attrs(mod_item.into(), attrs);
+ Some(mod_item)
})
.collect()
});
@@ -605,12 +622,16 @@ impl<'a> Ctx<'a> {
generics.fill_bounds(&self.body_ctx, bounds, Either::Left(self_param));
}
- let add_param_attrs = |item, param| {
- let attrs = RawAttrs::new(self.db.upcast(), &param, self.body_ctx.hygiene());
+ let add_param_attrs = |item: Either<LocalTypeOrConstParamId, LocalLifetimeParamId>,
+ param| {
+ let attrs = RawAttrs::new(self.db.upcast(), &param, self.body_ctx.span_map());
// This is identical to the body of `Ctx::add_attrs()` but we can't call that here
// because it requires `&mut self` and the call to `generics.fill()` below also
// references `self`.
- match self.tree.attrs.entry(item) {
+ match self.tree.attrs.entry(match item {
+ Either::Right(id) => id.into(),
+ Either::Left(id) => id.into(),
+ }) {
Entry::Occupied(mut entry) => {
*entry.get_mut() = entry.get().merge(attrs);
}
@@ -625,18 +646,19 @@ impl<'a> Ctx<'a> {
Interned::new(generics)
}
- fn lower_type_bounds(&mut self, node: &dyn ast::HasTypeBounds) -> Vec<Interned<TypeBound>> {
+ fn lower_type_bounds(&mut self, node: &dyn ast::HasTypeBounds) -> Box<[Interned<TypeBound>]> {
match node.type_bound_list() {
Some(bound_list) => bound_list
.bounds()
.map(|it| Interned::new(TypeBound::from_ast(&self.body_ctx, it)))
.collect(),
- None => Vec::new(),
+ None => Box::default(),
}
}
fn lower_visibility(&mut self, item: &dyn ast::HasVisibility) -> RawVisibilityId {
- let vis = RawVisibility::from_ast_with_hygiene(self.db, item.visibility(), self.hygiene());
+ let vis =
+ RawVisibility::from_ast_with_span_map(self.db, item.visibility(), self.span_map());
self.data().vis.alloc(vis)
}
@@ -714,7 +736,7 @@ fn lower_abi(abi: ast::Abi) -> Interned<str> {
struct UseTreeLowering<'a> {
db: &'a dyn DefDatabase,
- hygiene: &'a Hygiene,
+ span_map: SpanMapRef<'a>,
mapping: Arena<ast::UseTree>,
}
@@ -727,7 +749,7 @@ impl UseTreeLowering<'_> {
// E.g. `use something::{inner}` (prefix is `None`, path is `something`)
// or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`)
Some(path) => {
- match ModPath::from_src(self.db.upcast(), path, self.hygiene) {
+ match ModPath::from_src(self.db.upcast(), path, self.span_map) {
Some(it) => Some(it),
None => return None, // FIXME: report errors somewhere
}
@@ -746,7 +768,7 @@ impl UseTreeLowering<'_> {
} else {
let is_glob = tree.star_token().is_some();
let path = match tree.path() {
- Some(path) => Some(ModPath::from_src(self.db.upcast(), path, self.hygiene)?),
+ Some(path) => Some(ModPath::from_src(self.db.upcast(), path, self.span_map)?),
None => None,
};
let alias = tree.rename().map(|a| {
@@ -782,10 +804,10 @@ impl UseTreeLowering<'_> {
pub(crate) fn lower_use_tree(
db: &dyn DefDatabase,
- hygiene: &Hygiene,
+ span_map: SpanMapRef<'_>,
tree: ast::UseTree,
) -> Option<(UseTree, Arena<ast::UseTree>)> {
- let mut lowering = UseTreeLowering { db, hygiene, mapping: Arena::new() };
+ let mut lowering = UseTreeLowering { db, span_map, mapping: Arena::new() };
let tree = lowering.lower_use_tree(tree)?;
Some((tree, lowering.mapping))
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
index 417bd37c8..244111d20 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
@@ -261,15 +261,15 @@ impl Printer<'_> {
self.indented(|this| {
for param in params.clone() {
this.print_attrs_of(param, "\n");
- match &this.tree[param] {
- Param::Normal(ty) => {
+ match &this.tree[param].type_ref {
+ Some(ty) => {
if flags.contains(FnFlags::HAS_SELF_PARAM) {
w!(this, "self: ");
}
this.print_type_ref(ty);
wln!(this, ",");
}
- Param::Varargs => {
+ None => {
wln!(this, "...");
}
};
@@ -388,8 +388,18 @@ impl Printer<'_> {
wln!(self);
}
ModItem::Impl(it) => {
- let Impl { target_trait, self_ty, is_negative, items, generic_params, ast_id: _ } =
- &self.tree[it];
+ let Impl {
+ target_trait,
+ self_ty,
+ is_negative,
+ is_unsafe,
+ items,
+ generic_params,
+ ast_id: _,
+ } = &self.tree[it];
+ if *is_unsafe {
+ w!(self, "unsafe");
+ }
w!(self, "impl");
self.print_generic_params(generic_params);
w!(self, " ");
@@ -447,7 +457,7 @@ impl Printer<'_> {
}
}
ModItem::MacroCall(it) => {
- let MacroCall { path, ast_id: _, expand_to: _ } = &self.tree[it];
+ let MacroCall { path, ast_id: _, expand_to: _, call_site: _ } = &self.tree[it];
wln!(self, "{}!(...);", path.display(self.db.upcast()));
}
ModItem::MacroRules(it) => {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs
index 4180f8172..96c65b941 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs
@@ -370,3 +370,15 @@ struct S<#[cfg(never)] T>;
"#]],
)
}
+
+#[test]
+fn pub_self() {
+ check(
+ r#"
+pub(self) struct S;
+ "#,
+ expect![[r#"
+ pub(self) struct S;
+ "#]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
index 3f87fe62b..b5333861c 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
@@ -7,7 +7,8 @@
//! Note that `hir_def` is a work in progress, so not all of the above is
//! actually true.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#[allow(unused)]
macro_rules! eprintln {
@@ -48,7 +49,7 @@ pub mod visibility;
pub mod find_path;
pub mod import_map;
-pub use rustc_abi as layout;
+pub use rustc_dependencies::abi as layout;
use triomphe::Arc;
#[cfg(test)]
@@ -62,7 +63,7 @@ use std::{
panic::{RefUnwindSafe, UnwindSafe},
};
-use base_db::{impl_intern_key, salsa, CrateId, ProcMacroKind};
+use base_db::{impl_intern_key, salsa, span::SyntaxContextId, CrateId, ProcMacroKind};
use hir_expand::{
ast_id_map::{AstIdNode, FileAstId},
attrs::{Attr, AttrId, AttrInput},
@@ -71,18 +72,18 @@ use hir_expand::{
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase,
eager::expand_eager_macro_input,
- hygiene::Hygiene,
+ name::Name,
proc_macro::ProcMacroExpander,
AstId, ExpandError, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind,
- MacroDefId, MacroDefKind, UnresolvedMacro,
+ MacroDefId, MacroDefKind,
};
use item_tree::ExternBlock;
use la_arena::Idx;
use nameres::DefMap;
use stdx::impl_from;
-use syntax::ast;
+use syntax::{ast, AstNode};
-use ::tt::token_id as tt;
+pub use hir_expand::tt;
use crate::{
builtin_type::BuiltinType,
@@ -150,7 +151,7 @@ impl TryFrom<ModuleId> for CrateRootModuleId {
}
}
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct ModuleId {
krate: CrateId,
/// If this `ModuleId` was derived from a `DefMap` for a block expression, this stores the
@@ -173,6 +174,18 @@ impl ModuleId {
self.krate
}
+ pub fn name(self, db: &dyn db::DefDatabase) -> Option<Name> {
+ let def_map = self.def_map(db);
+ let parent = def_map[self.local_id].parent?;
+ def_map[parent].children.iter().find_map(|(name, module_id)| {
+ if *module_id == self.local_id {
+ Some(name.clone())
+ } else {
+ None
+ }
+ })
+ }
+
pub fn containing_module(self, db: &dyn db::DefDatabase) -> Option<ModuleId> {
self.def_map(db).containing_module(self.local_id)
}
@@ -498,10 +511,7 @@ impl_from!(Macro2Id, MacroRulesId, ProcMacroId for MacroId);
impl MacroId {
pub fn is_attribute(self, db: &dyn db::DefDatabase) -> bool {
- match self {
- MacroId::ProcMacroId(it) => it.lookup(db).kind == ProcMacroKind::Attr,
- _ => false,
- }
+ matches!(self, MacroId::ProcMacroId(it) if it.lookup(db).kind == ProcMacroKind::Attr)
}
}
@@ -559,6 +569,8 @@ pub struct ConstBlockLoc {
pub root: hir::ExprId,
}
+/// Something that holds types, required for the current const arg lowering implementation as they
+/// need to be able to query where they are defined.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub enum TypeOwnerId {
FunctionId(FunctionId),
@@ -571,9 +583,6 @@ pub enum TypeOwnerId {
TypeAliasId(TypeAliasId),
ImplId(ImplId),
EnumVariantId(EnumVariantId),
- // FIXME(const-generic-body): ModuleId should not be a type owner. This needs to be fixed to make `TypeOwnerId` actually
- // useful for assigning ids to in type consts.
- ModuleId(ModuleId),
}
impl TypeOwnerId {
@@ -587,9 +596,7 @@ impl TypeOwnerId {
TypeOwnerId::TypeAliasId(it) => GenericDefId::TypeAliasId(it),
TypeOwnerId::ImplId(it) => GenericDefId::ImplId(it),
TypeOwnerId::EnumVariantId(it) => GenericDefId::EnumVariantId(it),
- TypeOwnerId::InTypeConstId(_) | TypeOwnerId::ModuleId(_) | TypeOwnerId::StaticId(_) => {
- return None
- }
+ TypeOwnerId::InTypeConstId(_) | TypeOwnerId::StaticId(_) => return None,
})
}
}
@@ -604,8 +611,7 @@ impl_from!(
TraitAliasId,
TypeAliasId,
ImplId,
- EnumVariantId,
- ModuleId
+ EnumVariantId
for TypeOwnerId
);
@@ -703,12 +709,15 @@ pub struct InTypeConstLoc {
pub id: AstId<ast::ConstArg>,
/// The thing this const arg appears in
pub owner: TypeOwnerId,
- pub thing: Box<dyn OpaqueInternableThing>,
+ // FIXME(const-generic-body): The expected type should not be
+ pub expected_ty: Box<dyn OpaqueInternableThing>,
}
impl PartialEq for InTypeConstLoc {
fn eq(&self, other: &Self) -> bool {
- self.id == other.id && self.owner == other.owner && &*self.thing == &*other.thing
+ self.id == other.id
+ && self.owner == other.owner
+ && &*self.expected_ty == &*other.expected_ty
}
}
@@ -1031,7 +1040,6 @@ impl HasModule for TypeOwnerId {
TypeOwnerId::TypeAliasId(it) => it.lookup(db).module(db),
TypeOwnerId::ImplId(it) => it.lookup(db).container,
TypeOwnerId::EnumVariantId(it) => it.parent.lookup(db).container,
- TypeOwnerId::ModuleId(it) => *it,
}
}
}
@@ -1155,16 +1163,20 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
let expands_to = hir_expand::ExpandTo::from_call_site(self.value);
let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
- let h = Hygiene::new(db, self.file_id);
- let path = self.value.path().and_then(|path| path::ModPath::from_src(db, path, &h));
+ let span_map = db.span_map(self.file_id);
+ let path =
+ self.value.path().and_then(|path| path::ModPath::from_src(db, path, span_map.as_ref()));
let Some(path) = path else {
return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
};
+ let call_site = span_map.span_for_range(self.value.syntax().text_range()).ctx;
+
macro_call_as_call_id_with_eager(
db,
&AstIdWithPath::new(ast_id.file_id, ast_id.value, path),
+ call_site,
expands_to,
krate,
resolver,
@@ -1189,17 +1201,19 @@ impl<T: AstIdNode> AstIdWithPath<T> {
fn macro_call_as_call_id(
db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>,
+ call_site: SyntaxContextId,
expand_to: ExpandTo,
krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
) -> Result<Option<MacroCallId>, UnresolvedMacro> {
- macro_call_as_call_id_with_eager(db, call, expand_to, krate, resolver, resolver)
+ macro_call_as_call_id_with_eager(db, call, call_site, expand_to, krate, resolver, resolver)
.map(|res| res.value)
}
fn macro_call_as_call_id_with_eager(
db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>,
+ call_site: SyntaxContextId,
expand_to: ExpandTo,
krate: CrateId,
resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>,
@@ -1211,7 +1225,7 @@ fn macro_call_as_call_id_with_eager(
let res = match def.kind {
MacroDefKind::BuiltInEager(..) => {
let macro_call = InFile::new(call.ast_id.file_id, call.ast_id.to_node(db));
- expand_eager_macro_input(db, krate, macro_call, def, &|path| {
+ expand_eager_macro_input(db, krate, macro_call, def, call_site, &|path| {
eager_resolver(path).filter(MacroDefId::is_fn_like)
})
}
@@ -1220,6 +1234,7 @@ fn macro_call_as_call_id_with_eager(
db,
krate,
MacroCallKind::FnLike { ast_id: call.ast_id, expand_to },
+ call_site,
)),
err: None,
},
@@ -1304,6 +1319,7 @@ fn derive_macro_as_call_id(
item_attr: &AstIdWithPath<ast::Adt>,
derive_attr_index: AttrId,
derive_pos: u32,
+ call_site: SyntaxContextId,
krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
@@ -1318,6 +1334,7 @@ fn derive_macro_as_call_id(
derive_index: derive_pos,
derive_attr_index,
},
+ call_site,
);
Ok((macro_id, def_id, call_id))
}
@@ -1330,15 +1347,13 @@ fn attr_macro_as_call_id(
def: MacroDefId,
) -> MacroCallId {
let arg = match macro_attr.input.as_deref() {
- Some(AttrInput::TokenTree(tt)) => (
- {
- let mut tt = tt.0.clone();
- tt.delimiter = tt::Delimiter::UNSPECIFIED;
- tt
- },
- tt.1.clone(),
- ),
- _ => (tt::Subtree::empty(), Default::default()),
+ Some(AttrInput::TokenTree(tt)) => {
+ let mut tt = tt.as_ref().clone();
+ tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
+ Some(tt)
+ }
+
+ _ => None,
};
def.as_lazy_macro(
@@ -1346,11 +1361,18 @@ fn attr_macro_as_call_id(
krate,
MacroCallKind::Attr {
ast_id: item_attr.ast_id,
- attr_args: Arc::new(arg),
+ attr_args: arg.map(Arc::new),
invoc_attr_index: macro_attr.id,
},
+ macro_attr.ctxt,
)
}
+
+#[derive(Debug)]
+pub struct UnresolvedMacro {
+ pub path: hir_expand::mod_path::ModPath,
+}
+
intern::impl_internable!(
crate::type_ref::TypeRef,
crate::type_ref::TraitRef,
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/lower.rs
index 52781d988..a3505b65f 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lower.rs
@@ -3,7 +3,7 @@ use std::cell::OnceCell;
use hir_expand::{
ast_id_map::{AstIdMap, AstIdNode},
- hygiene::Hygiene,
+ span::{SpanMap, SpanMapRef},
AstId, HirFileId, InFile,
};
use syntax::ast;
@@ -13,33 +13,34 @@ use crate::{db::DefDatabase, path::Path};
pub struct LowerCtx<'a> {
pub db: &'a dyn DefDatabase,
- hygiene: Hygiene,
+ span_map: SpanMap,
+ // FIXME: This optimization is probably pointless, ast id map should pretty much always exist anyways.
ast_id_map: Option<(HirFileId, OnceCell<Arc<AstIdMap>>)>,
}
impl<'a> LowerCtx<'a> {
- pub fn new(db: &'a dyn DefDatabase, hygiene: &Hygiene, file_id: HirFileId) -> Self {
- LowerCtx { db, hygiene: hygiene.clone(), ast_id_map: Some((file_id, OnceCell::new())) }
+ pub fn new(db: &'a dyn DefDatabase, span_map: SpanMap, file_id: HirFileId) -> Self {
+ LowerCtx { db, span_map, ast_id_map: Some((file_id, OnceCell::new())) }
}
pub fn with_file_id(db: &'a dyn DefDatabase, file_id: HirFileId) -> Self {
LowerCtx {
db,
- hygiene: Hygiene::new(db.upcast(), file_id),
+ span_map: db.span_map(file_id),
ast_id_map: Some((file_id, OnceCell::new())),
}
}
- pub fn with_hygiene(db: &'a dyn DefDatabase, hygiene: &Hygiene) -> Self {
- LowerCtx { db, hygiene: hygiene.clone(), ast_id_map: None }
+ pub fn with_span_map(db: &'a dyn DefDatabase, span_map: SpanMap) -> Self {
+ LowerCtx { db, span_map, ast_id_map: None }
}
- pub(crate) fn hygiene(&self) -> &Hygiene {
- &self.hygiene
+ pub(crate) fn span_map(&self) -> SpanMapRef<'_> {
+ self.span_map.as_ref()
}
pub(crate) fn lower_path(&self, ast: ast::Path) -> Option<Path> {
- Path::from_src(ast, self)
+ Path::from_src(self, ast)
}
pub(crate) fn ast_id<N: AstIdNode>(&self, item: &N) -> Option<AstId<N>> {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
index 4aedb22c6..514219ee7 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
@@ -17,7 +17,7 @@ fn main() { column!(); }
#[rustc_builtin_macro]
macro_rules! column {() => {}}
-fn main() { 0 as u32; }
+fn main() { 0u32; }
"#]],
);
}
@@ -74,7 +74,7 @@ fn main() { line!() }
#[rustc_builtin_macro]
macro_rules! line {() => {}}
-fn main() { 0 as u32 }
+fn main() { 0u32 }
"#]],
);
}
@@ -468,12 +468,12 @@ macro_rules! concat_bytes {}
fn main() { concat_bytes!(b'A', b"BC", [68, b'E', 70]); }
"##,
- expect![[r##"
+ expect![[r#"
#[rustc_builtin_macro]
macro_rules! concat_bytes {}
fn main() { [b'A', 66, 67, 68, b'E', 70]; }
-"##]],
+"#]],
);
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
index d09062132..9bf2a50d5 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -15,7 +15,6 @@ use crate::macro_expansion_tests::check;
fn token_mapping_smoke_test() {
check(
r#"
-// +tokenids
macro_rules! f {
( struct $ident:ident ) => {
struct $ident {
@@ -24,26 +23,22 @@ macro_rules! f {
};
}
-// +tokenids
+// +spans+syntaxctxt
f!(struct MyTraitMap2);
"#,
- expect![[r##"
-// call ids will be shifted by Shift(30)
-// +tokenids
-macro_rules! f {#0
- (#1 struct#2 $#3ident#4:#5ident#6 )#1 =#7>#8 {#9
- struct#10 $#11ident#12 {#13
- map#14:#15 :#16:#17std#18:#19:#20collections#21:#22:#23HashSet#24<#25(#26)#26>#27,#28
- }#13
- }#9;#29
-}#0
-
-// // +tokenids
-// f!(struct#1 MyTraitMap2#2);
-struct#10 MyTraitMap2#32 {#13
- map#14:#15 ::std#18::collections#21::HashSet#24<#25(#26)#26>#27,#28
-}#13
-"##]],
+ expect![[r#"
+macro_rules! f {
+ ( struct $ident:ident ) => {
+ struct $ident {
+ map: ::std::collections::HashSet<()>,
+ }
+ };
+}
+
+struct#FileId(0):1@58..64\2# MyTraitMap2#FileId(0):2@31..42\0# {#FileId(0):1@72..73\2#
+ map#FileId(0):1@86..89\2#:#FileId(0):1@89..90\2# #FileId(0):1@89..90\2#::#FileId(0):1@91..92\2#std#FileId(0):1@93..96\2#::#FileId(0):1@96..97\2#collections#FileId(0):1@98..109\2#::#FileId(0):1@109..110\2#HashSet#FileId(0):1@111..118\2#<#FileId(0):1@118..119\2#(#FileId(0):1@119..120\2#)#FileId(0):1@120..121\2#>#FileId(0):1@121..122\2#,#FileId(0):1@122..123\2#
+}#FileId(0):1@132..133\2#
+"#]],
);
}
@@ -53,49 +48,42 @@ fn token_mapping_floats() {
// (and related issues)
check(
r#"
-// +tokenids
+// +spans+syntaxctxt
macro_rules! f {
($($tt:tt)*) => {
$($tt)*
};
}
-// +tokenids
+// +spans+syntaxctxt
f! {
fn main() {
1;
1.0;
+ ((1,),).0.0;
let x = 1;
}
}
"#,
- expect![[r##"
-// call ids will be shifted by Shift(18)
-// +tokenids
-macro_rules! f {#0
- (#1$#2(#3$#4tt#5:#6tt#7)#3*#8)#1 =#9>#10 {#11
- $#12(#13$#14tt#15)#13*#16
- }#11;#17
-}#0
-
-// // +tokenids
-// f! {
-// fn#1 main#2() {
-// 1#5;#6
-// 1.0#7;#8
-// let#9 x#10 =#11 1#12;#13
-// }
-// }
-fn#19 main#20(#21)#21 {#22
- 1#23;#24
- 1.0#25;#26
- let#27 x#28 =#29 1#30;#31
-}#22
+ expect![[r#"
+// +spans+syntaxctxt
+macro_rules! f {
+ ($($tt:tt)*) => {
+ $($tt)*
+ };
+}
+fn#FileId(0):2@30..32\0# main#FileId(0):2@33..37\0#(#FileId(0):2@37..38\0#)#FileId(0):2@38..39\0# {#FileId(0):2@40..41\0#
+ 1#FileId(0):2@50..51\0#;#FileId(0):2@51..52\0#
+ 1.0#FileId(0):2@61..64\0#;#FileId(0):2@64..65\0#
+ (#FileId(0):2@74..75\0#(#FileId(0):2@75..76\0#1#FileId(0):2@76..77\0#,#FileId(0):2@77..78\0# )#FileId(0):2@78..79\0#,#FileId(0):2@79..80\0# )#FileId(0):2@80..81\0#.#FileId(0):2@81..82\0#0#FileId(0):2@82..85\0#.#FileId(0):2@82..85\0#0#FileId(0):2@82..85\0#;#FileId(0):2@85..86\0#
+ let#FileId(0):2@95..98\0# x#FileId(0):2@99..100\0# =#FileId(0):2@101..102\0# 1#FileId(0):2@103..104\0#;#FileId(0):2@104..105\0#
+}#FileId(0):2@110..111\0#
-"##]],
+
+"#]],
);
}
@@ -105,53 +93,86 @@ fn eager_expands_with_unresolved_within() {
r#"
#[rustc_builtin_macro]
#[macro_export]
-macro_rules! format_args {}
+macro_rules! concat {}
+macro_rules! identity {
+ ($tt:tt) => {
+ $tt
+ }
+}
fn main(foo: ()) {
- format_args!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
+ concat!("hello", identity!("world"), unresolved!(), identity!("!"));
}
"#,
expect![[r##"
#[rustc_builtin_macro]
#[macro_export]
-macro_rules! format_args {}
+macro_rules! concat {}
+macro_rules! identity {
+ ($tt:tt) => {
+ $tt
+ }
+}
fn main(foo: ()) {
- builtin #format_args ("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
+ /* error: unresolved macro unresolved */"helloworld!";
}
"##]],
);
}
#[test]
-fn token_mapping_eager() {
+fn concat_spans() {
check(
r#"
#[rustc_builtin_macro]
#[macro_export]
-macro_rules! format_args {}
-
+macro_rules! concat {}
macro_rules! identity {
- ($expr:expr) => { $expr };
+ ($tt:tt) => {
+ $tt
+ }
}
fn main(foo: ()) {
- format_args/*+tokenids*/!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
+ #[rustc_builtin_macro]
+ #[macro_export]
+ macro_rules! concat {}
+ macro_rules! identity {
+ ($tt:tt) => {
+ $tt
+ }
+ }
+
+ fn main(foo: ()) {
+ concat/*+spans+syntaxctxt*/!("hello", concat!("w", identity!("o")), identity!("rld"), unresolved!(), identity!("!"));
+ }
}
"#,
expect![[r##"
#[rustc_builtin_macro]
#[macro_export]
-macro_rules! format_args {}
-
+macro_rules! concat {}
macro_rules! identity {
- ($expr:expr) => { $expr };
+ ($tt:tt) => {
+ $tt
+ }
}
fn main(foo: ()) {
- // format_args/*+tokenids*/!("{} {} {}"#1,#2 format_args#3!#4("{}"#6,#7 0#8),#9 foo#10,#11 identity#12!#13(10#15),#16 "bar"#17)
-builtin#4294967295 ##4294967295format_args#4294967295 (#0"{} {} {}"#1,#2 format_args#3!#4(#5"{}"#6,#7 0#8)#5,#9 foo#10,#11 identity#12!#13(#1410#15)#14,#16 "bar"#17)#0
+ #[rustc_builtin_macro]
+ #[macro_export]
+ macro_rules! concat {}
+ macro_rules! identity {
+ ($tt:tt) => {
+ $tt
+ }
+ }
+
+ fn main(foo: ()) {
+ /* error: unresolved macro unresolved */"helloworld!"#FileId(0):3@207..323\6#;
+ }
}
"##]],
@@ -159,6 +180,29 @@ builtin#4294967295 ##4294967295format_args#4294967295 (#0"{} {} {}"#1,#2 format_
}
#[test]
+fn token_mapping_across_files() {
+ check(
+ r#"
+//- /lib.rs
+#[macro_use]
+mod foo;
+
+mk_struct/*+spans+syntaxctxt*/!(Foo with u32);
+//- /foo.rs
+macro_rules! mk_struct {
+ ($foo:ident with $ty:ty) => { struct $foo($ty); }
+}
+"#,
+ expect![[r#"
+#[macro_use]
+mod foo;
+
+struct#FileId(1):1@59..65\2# Foo#FileId(0):2@32..35\0#(#FileId(1):1@70..71\2#u32#FileId(0):2@41..44\0#)#FileId(1):1@74..75\2#;#FileId(1):1@75..76\2#
+"#]],
+ );
+}
+
+#[test]
fn float_field_access_macro_input() {
check(
r#"
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
index b416f45ff..71ba49721 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
@@ -13,37 +13,97 @@ fn test_vec() {
check(
r#"
macro_rules! vec {
- ($($item:expr),*) => {{
- let mut v = Vec::new();
- $( v.push($item); )*
- v
- }};
+ () => (
+ $crate::__rust_force_expr!($crate::vec::Vec::new())
+ );
+ ($elem:expr; $n:expr) => (
+ $crate::__rust_force_expr!($crate::vec::from_elem($elem, $n))
+ );
+ ($($x:expr),+ $(,)?) => (
+ $crate::__rust_force_expr!(<[_]>::into_vec(
+ // This rustc_box is not required, but it produces a dramatic improvement in compile
+ // time when constructing arrays with many elements.
+ #[rustc_box]
+ $crate::boxed::Box::new([$($x),+])
+ ))
+ );
+}
+
+macro_rules! __rust_force_expr {
+ ($e:expr) => {
+ $e
+ };
}
+
fn main() {
vec!();
vec![1u32,2];
+ vec![a.];
}
"#,
expect![[r#"
macro_rules! vec {
- ($($item:expr),*) => {{
- let mut v = Vec::new();
- $( v.push($item); )*
- v
- }};
+ () => (
+ $crate::__rust_force_expr!($crate::vec::Vec::new())
+ );
+ ($elem:expr; $n:expr) => (
+ $crate::__rust_force_expr!($crate::vec::from_elem($elem, $n))
+ );
+ ($($x:expr),+ $(,)?) => (
+ $crate::__rust_force_expr!(<[_]>::into_vec(
+ // This rustc_box is not required, but it produces a dramatic improvement in compile
+ // time when constructing arrays with many elements.
+ #[rustc_box]
+ $crate::boxed::Box::new([$($x),+])
+ ))
+ );
}
+
+macro_rules! __rust_force_expr {
+ ($e:expr) => {
+ $e
+ };
+}
+
fn main() {
- {
- let mut v = Vec::new();
- v
+ $crate::__rust_force_expr!($crate:: vec:: Vec:: new());
+ $crate::__rust_force_expr!(<[_]>:: into_vec(#[rustc_box]$crate:: boxed:: Box:: new([1u32, 2])));
+ /* error: expected Expr */$crate::__rust_force_expr!($crate:: vec:: from_elem((a.), $n));
+}
+"#]],
+ );
+ // FIXME we should ahev testing infra for multi level expansion tests
+ check(
+ r#"
+macro_rules! __rust_force_expr {
+ ($e:expr) => {
+ $e
};
- {
- let mut v = Vec::new();
- v.push(1u32);
- v.push(2);
- v
+}
+
+fn main() {
+ __rust_force_expr!(crate:: vec:: Vec:: new());
+ __rust_force_expr!(<[_]>:: into_vec(#[rustc_box] crate:: boxed:: Box:: new([1u32, 2])));
+ __rust_force_expr/*+errors*/!(crate:: vec:: from_elem((a.), $n));
+}
+"#,
+ expect![[r#"
+macro_rules! __rust_force_expr {
+ ($e:expr) => {
+ $e
};
}
+
+fn main() {
+ (crate ::vec::Vec::new());
+ (<[_]>::into_vec(#[rustc_box] crate ::boxed::Box::new([1u32, 2])));
+ /* error: expected Expr *//* parse error: expected field name or number */
+/* parse error: expected expression */
+/* parse error: expected R_PAREN */
+/* parse error: expected COMMA */
+/* parse error: expected expression, item or let statement */
+(crate ::vec::from_elem((a.), $n));
+}
"#]],
);
}
@@ -970,3 +1030,63 @@ builtin #format_args ("{}", &[0 2]);
"##]],
);
}
+
+#[test]
+fn eager_concat_line() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! concat {}
+
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! line {}
+
+fn main() {
+ concat!("event ", line!());
+}
+
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! concat {}
+
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! line {}
+
+fn main() {
+ "event 0u32";
+}
+
+"##]],
+ );
+}
+
+#[test]
+fn eager_concat_bytes_panic() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! concat_bytes {}
+
+fn main() {
+ let x = concat_bytes!(2);
+}
+
+"#,
+ expect![[r#"
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! concat_bytes {}
+
+fn main() {
+ let x = /* error: unexpected token in input */[];
+}
+
+"#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
index 8adced4e0..be2a503d8 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -16,21 +16,16 @@ mod proc_macros;
use std::{iter, ops::Range, sync};
-use ::mbe::TokenMap;
-use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase};
+use base_db::{fixture::WithFixture, span::SpanData, ProcMacro, SourceDatabase};
use expect_test::Expect;
-use hir_expand::{
- db::{DeclarativeMacroExpander, ExpandDatabase},
- AstId, InFile, MacroFile,
-};
+use hir_expand::{db::ExpandDatabase, span::SpanMapRef, InFile, MacroFileId, MacroFileIdExt};
use stdx::format_to;
use syntax::{
ast::{self, edit::IndentLevel},
- AstNode, SyntaxElement,
- SyntaxKind::{self, COMMENT, EOF, IDENT, LIFETIME_IDENT},
- SyntaxNode, TextRange, T,
+ AstNode,
+ SyntaxKind::{COMMENT, EOF, IDENT, LIFETIME_IDENT},
+ SyntaxNode, T,
};
-use tt::token_id::{Subtree, TokenId};
use crate::{
db::DefDatabase,
@@ -39,6 +34,7 @@ use crate::{
resolver::HasResolver,
src::HasSource,
test_db::TestDB,
+ tt::Subtree,
AdtId, AsMacroCall, Lookup, ModuleDefId,
};
@@ -88,43 +84,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
let mut text_edits = Vec::new();
let mut expansions = Vec::new();
- for macro_ in source_file.syntax().descendants().filter_map(ast::Macro::cast) {
- let mut show_token_ids = false;
- for comment in macro_.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
- show_token_ids |= comment.to_string().contains("+tokenids");
- }
- if !show_token_ids {
- continue;
- }
-
- let call_offset = macro_.syntax().text_range().start().into();
- let file_ast_id = db.ast_id_map(source.file_id).ast_id(&macro_);
- let ast_id = AstId::new(source.file_id, file_ast_id.upcast());
-
- let DeclarativeMacroExpander { mac, def_site_token_map } =
- &*db.decl_macro_expander(krate, ast_id);
- assert_eq!(mac.err(), None);
- let tt = match &macro_ {
- ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(),
- ast::Macro::MacroDef(_) => unimplemented!(""),
- };
-
- let tt_start = tt.syntax().text_range().start();
- tt.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token).for_each(
- |token| {
- let range = token.text_range().checked_sub(tt_start).unwrap();
- if let Some(id) = def_site_token_map.token_by_range(range) {
- let offset = (range.end() + tt_start).into();
- text_edits.push((offset..offset, format!("#{}", id.0)));
- }
- },
- );
- text_edits.push((
- call_offset..call_offset,
- format!("// call ids will be shifted by {:?}\n", mac.shift()),
- ));
- }
-
for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
let macro_call = InFile::new(source.file_id, &macro_call);
let res = macro_call
@@ -135,20 +94,22 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
})
.unwrap();
let macro_call_id = res.value.unwrap();
- let macro_file = MacroFile { macro_call_id };
+ let macro_file = MacroFileId { macro_call_id };
let mut expansion_result = db.parse_macro_expansion(macro_file);
expansion_result.err = expansion_result.err.or(res.err);
- expansions.push((macro_call.value.clone(), expansion_result, db.macro_arg(macro_call_id)));
+ expansions.push((macro_call.value.clone(), expansion_result));
}
- for (call, exp, arg) in expansions.into_iter().rev() {
+ for (call, exp) in expansions.into_iter().rev() {
let mut tree = false;
let mut expect_errors = false;
- let mut show_token_ids = false;
+ let mut show_spans = false;
+ let mut show_ctxt = false;
for comment in call.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
tree |= comment.to_string().contains("+tree");
expect_errors |= comment.to_string().contains("+errors");
- show_token_ids |= comment.to_string().contains("+tokenids");
+ show_spans |= comment.to_string().contains("+spans");
+ show_ctxt |= comment.to_string().contains("+syntaxctxt");
}
let mut expn_text = String::new();
@@ -164,13 +125,16 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
} else {
assert!(
parse.errors().is_empty(),
- "parse errors in expansion: \n{:#?}",
- parse.errors()
+ "parse errors in expansion: \n{:#?}\n```\n{}\n```",
+ parse.errors(),
+ parse.syntax_node(),
);
}
let pp = pretty_print_macro_expansion(
parse.syntax_node(),
- show_token_ids.then_some(&*token_map),
+ SpanMapRef::ExpansionSpanMap(&token_map),
+ show_spans,
+ show_ctxt,
);
let indent = IndentLevel::from_node(call.syntax());
let pp = reindent(indent, pp);
@@ -185,27 +149,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
}
let range = call.syntax().text_range();
let range: Range<usize> = range.into();
-
- if show_token_ids {
- if let Some((tree, map, _)) = arg.value.as_deref() {
- let tt_range = call.token_tree().unwrap().syntax().text_range();
- let mut ranges = Vec::new();
- extract_id_ranges(&mut ranges, map, tree);
- for (range, id) in ranges {
- let idx = (tt_range.start() + range.end()).into();
- text_edits.push((idx..idx, format!("#{}", id.0)));
- }
- }
- text_edits.push((range.start..range.start, "// ".into()));
- call.to_string().match_indices('\n').for_each(|(offset, _)| {
- let offset = offset + 1 + range.start;
- text_edits.push((offset..offset, "// ".into()));
- });
- text_edits.push((range.end..range.end, "\n".into()));
- text_edits.push((range.end..range.end, expn_text));
- } else {
- text_edits.push((range, expn_text));
- }
+ text_edits.push((range, expn_text));
}
text_edits.sort_by_key(|(range, _)| range.start);
@@ -226,19 +170,43 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
}
_ => None,
};
+
if let Some(src) = src {
- if src.file_id.is_attr_macro(&db) || src.file_id.is_custom_derive(&db) {
- let pp = pretty_print_macro_expansion(src.value, None);
- format_to!(expanded_text, "\n{}", pp)
+ if let Some(file_id) = src.file_id.macro_file() {
+ if file_id.is_attr_macro(&db) || file_id.is_custom_derive(&db) {
+ let call = file_id.call_node(&db);
+ let mut show_spans = false;
+ let mut show_ctxt = false;
+ for comment in
+ call.value.children_with_tokens().filter(|it| it.kind() == COMMENT)
+ {
+ show_spans |= comment.to_string().contains("+spans");
+ show_ctxt |= comment.to_string().contains("+syntaxctxt");
+ }
+ let pp = pretty_print_macro_expansion(
+ src.value,
+ db.span_map(src.file_id).as_ref(),
+ show_spans,
+ show_ctxt,
+ );
+ format_to!(expanded_text, "\n{}", pp)
+ }
}
}
}
for impl_id in def_map[local_id].scope.impls() {
let src = impl_id.lookup(&db).source(&db);
- if src.file_id.is_builtin_derive(&db) {
- let pp = pretty_print_macro_expansion(src.value.syntax().clone(), None);
- format_to!(expanded_text, "\n{}", pp)
+ if let Some(macro_file) = src.file_id.macro_file() {
+ if macro_file.is_builtin_derive(&db) {
+ let pp = pretty_print_macro_expansion(
+ src.value.syntax().clone(),
+ db.span_map(macro_file.into()).as_ref(),
+ false,
+ false,
+ );
+ format_to!(expanded_text, "\n{}", pp)
+ }
}
}
@@ -246,20 +214,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
expect.assert_eq(&expanded_text);
}
-fn extract_id_ranges(ranges: &mut Vec<(TextRange, TokenId)>, map: &TokenMap, tree: &Subtree) {
- tree.token_trees.iter().for_each(|tree| match tree {
- tt::TokenTree::Leaf(leaf) => {
- let id = match leaf {
- tt::Leaf::Literal(it) => it.span,
- tt::Leaf::Punct(it) => it.span,
- tt::Leaf::Ident(it) => it.span,
- };
- ranges.extend(map.ranges_by_token(id, SyntaxKind::ERROR).map(|range| (range, id)));
- }
- tt::TokenTree::Subtree(tree) => extract_id_ranges(ranges, map, tree),
- });
-}
-
fn reindent(indent: IndentLevel, pp: String) -> String {
if !pp.contains('\n') {
return pp;
@@ -276,7 +230,12 @@ fn reindent(indent: IndentLevel, pp: String) -> String {
res
}
-fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> String {
+fn pretty_print_macro_expansion(
+ expn: SyntaxNode,
+ map: SpanMapRef<'_>,
+ show_spans: bool,
+ show_ctxt: bool,
+) -> String {
let mut res = String::new();
let mut prev_kind = EOF;
let mut indent_level = 0;
@@ -322,10 +281,22 @@ fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> Str
}
prev_kind = curr_kind;
format_to!(res, "{}", token);
- if let Some(map) = map {
- if let Some(id) = map.token_by_range(token.text_range()) {
- format_to!(res, "#{}", id.0);
+ if show_spans || show_ctxt {
+ let span = map.span_for_range(token.text_range());
+ format_to!(res, "#");
+ if show_spans {
+ format_to!(
+ res,
+ "{:?}:{:?}@{:?}",
+ span.anchor.file_id,
+ span.anchor.ast_id.into_raw(),
+ span.range,
+ );
+ }
+ if show_ctxt {
+ format_to!(res, "\\{}", span.ctx);
}
+ format_to!(res, "#");
}
}
res
@@ -342,6 +313,9 @@ impl base_db::ProcMacroExpander for IdentityWhenValidProcMacroExpander {
subtree: &Subtree,
_: Option<&Subtree>,
_: &base_db::Env,
+ _: SpanData,
+ _: SpanData,
+ _: SpanData,
) -> Result<Subtree, base_db::ProcMacroExpansionError> {
let (parse, _) =
::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems);
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
index 822bdcc12..060b8aa8c 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
@@ -94,6 +94,41 @@ fn foo() {
}
#[test]
+fn macro_rules_in_attr() {
+ // Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211
+ check(
+ r#"
+//- proc_macros: identity
+macro_rules! id {
+ ($($t:tt)*) => {
+ $($t)*
+ };
+}
+id! {
+ #[proc_macros::identity]
+ impl Foo for WrapBj {
+ async fn foo(&self) {
+ self.id().await;
+ }
+ }
+}
+"#,
+ expect![[r#"
+macro_rules! id {
+ ($($t:tt)*) => {
+ $($t)*
+ };
+}
+#[proc_macros::identity] impl Foo for WrapBj {
+ async fn foo(&self ) {
+ self .id().await ;
+ }
+}
+"#]],
+ );
+}
+
+#[test]
fn float_parsing_panic() {
// Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211
check(
@@ -127,3 +162,27 @@ macro_rules! id {
"#]],
);
}
+
+#[test]
+fn float_attribute_mapping() {
+ check(
+ r#"
+//- proc_macros: identity
+//+spans+syntaxctxt
+#[proc_macros::identity]
+fn foo(&self) {
+ self.0. 1;
+}
+"#,
+ expect![[r#"
+//+spans+syntaxctxt
+#[proc_macros::identity]
+fn foo(&self) {
+ self.0. 1;
+}
+
+fn#FileId(0):1@45..47\0# foo#FileId(0):1@48..51\0#(#FileId(0):1@51..52\0#&#FileId(0):1@52..53\0#self#FileId(0):1@53..57\0# )#FileId(0):1@57..58\0# {#FileId(0):1@59..60\0#
+ self#FileId(0):1@65..69\0# .#FileId(0):1@69..70\0#0#FileId(0):1@70..71\0#.#FileId(0):1@71..72\0#1#FileId(0):1@73..74\0#;#FileId(0):1@74..75\0#
+}#FileId(0):1@76..77\0#"#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
index 2d4586146..b3a10a386 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
@@ -5,7 +5,7 @@
use std::{cmp::Ordering, iter, mem};
-use base_db::{CrateId, Dependency, Edition, FileId};
+use base_db::{span::SyntaxContextId, CrateId, Dependency, Edition, FileId};
use cfg::{CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{
@@ -14,7 +14,6 @@ use hir_expand::{
builtin_attr_macro::find_builtin_attr,
builtin_derive_macro::find_builtin_derive,
builtin_fn_macro::find_builtin_macro,
- hygiene::Hygiene,
name::{name, AsName, Name},
proc_macro::ProcMacroExpander,
ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroCallLoc,
@@ -85,8 +84,17 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
.enumerate()
.map(|(idx, it)| {
// FIXME: a hacky way to create a Name from string.
- let name =
- tt::Ident { text: it.name.clone(), span: tt::TokenId::unspecified() };
+ let name = tt::Ident {
+ text: it.name.clone(),
+ span: tt::SpanData {
+ range: syntax::TextRange::empty(syntax::TextSize::new(0)),
+ anchor: base_db::span::SpanAnchor {
+ file_id: FileId::BOGUS,
+ ast_id: base_db::span::ROOT_ERASED_FILE_AST_ID,
+ },
+ ctx: SyntaxContextId::ROOT,
+ },
+ };
(name.as_name(), ProcMacroExpander::new(base_db::ProcMacroId(idx as u32)))
})
.collect())
@@ -112,7 +120,6 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
from_glob_import: Default::default(),
skip_attrs: Default::default(),
is_proc_macro,
- hygienes: FxHashMap::default(),
};
if tree_id.is_block() {
collector.seed_with_inner(tree_id);
@@ -212,9 +219,23 @@ struct MacroDirective {
#[derive(Clone, Debug, Eq, PartialEq)]
enum MacroDirectiveKind {
- FnLike { ast_id: AstIdWithPath<ast::MacroCall>, expand_to: ExpandTo },
- Derive { ast_id: AstIdWithPath<ast::Adt>, derive_attr: AttrId, derive_pos: usize },
- Attr { ast_id: AstIdWithPath<ast::Item>, attr: Attr, mod_item: ModItem, tree: TreeId },
+ FnLike {
+ ast_id: AstIdWithPath<ast::MacroCall>,
+ expand_to: ExpandTo,
+ call_site: SyntaxContextId,
+ },
+ Derive {
+ ast_id: AstIdWithPath<ast::Adt>,
+ derive_attr: AttrId,
+ derive_pos: usize,
+ call_site: SyntaxContextId,
+ },
+ Attr {
+ ast_id: AstIdWithPath<ast::Item>,
+ attr: Attr,
+ mod_item: ModItem,
+ /* is this needed? */ tree: TreeId,
+ },
}
/// Walks the tree of module recursively
@@ -242,12 +263,6 @@ struct DefCollector<'a> {
/// This also stores the attributes to skip when we resolve derive helpers and non-macro
/// non-builtin attributes in general.
skip_attrs: FxHashMap<InFile<ModItem>, AttrId>,
- /// `Hygiene` cache, because `Hygiene` construction is expensive.
- ///
- /// Almost all paths should have been lowered to `ModPath` during `ItemTree` construction.
- /// However, `DefCollector` still needs to lower paths in attributes, in particular those in
- /// derive meta item list.
- hygienes: FxHashMap<HirFileId, Hygiene>,
}
impl DefCollector<'_> {
@@ -315,12 +330,11 @@ impl DefCollector<'_> {
}
if *attr_name == hir_expand::name![feature] {
- let hygiene = &Hygiene::new_unhygienic();
let features = attr
- .parse_path_comma_token_tree(self.db.upcast(), hygiene)
+ .parse_path_comma_token_tree(self.db.upcast())
.into_iter()
.flatten()
- .filter_map(|feat| match feat.segments() {
+ .filter_map(|(feat, _)| match feat.segments() {
[name] => Some(name.to_smol_str()),
_ => None,
});
@@ -471,7 +485,7 @@ impl DefCollector<'_> {
directive.module_id,
MacroCallKind::Attr {
ast_id: ast_id.ast_id,
- attr_args: Arc::new((tt::Subtree::empty(), Default::default())),
+ attr_args: None,
invoc_attr_index: attr.id,
},
attr.path().clone(),
@@ -1119,10 +1133,11 @@ impl DefCollector<'_> {
let resolver_def_id = |path| resolver(path).map(|(_, it)| it);
match &directive.kind {
- MacroDirectiveKind::FnLike { ast_id, expand_to } => {
+ MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => {
let call_id = macro_call_as_call_id(
self.db.upcast(),
ast_id,
+ *call_site,
*expand_to,
self.def_map.krate,
resolver_def_id,
@@ -1134,12 +1149,13 @@ impl DefCollector<'_> {
return false;
}
}
- MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => {
+ MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site } => {
let id = derive_macro_as_call_id(
self.db,
ast_id,
*derive_attr,
*derive_pos as u32,
+ *call_site,
self.def_map.krate,
resolver,
);
@@ -1212,7 +1228,7 @@ impl DefCollector<'_> {
};
if matches!(
def,
- MacroDefId { kind:MacroDefKind::BuiltInAttr(expander, _),.. }
+ MacroDefId { kind: MacroDefKind::BuiltInAttr(expander, _),.. }
if expander.is_derive()
) {
// Resolved to `#[derive]`
@@ -1234,22 +1250,10 @@ impl DefCollector<'_> {
};
let ast_id = ast_id.with_value(ast_adt_id);
- let extend_unhygenic;
- let hygiene = if file_id.is_macro() {
- self.hygienes
- .entry(file_id)
- .or_insert_with(|| Hygiene::new(self.db.upcast(), file_id))
- } else {
- // Avoid heap allocation (`Hygiene` embraces `Arc`) and hash map entry
- // when we're in an oridinary (non-macro) file.
- extend_unhygenic = Hygiene::new_unhygienic();
- &extend_unhygenic
- };
-
- match attr.parse_path_comma_token_tree(self.db.upcast(), hygiene) {
+ match attr.parse_path_comma_token_tree(self.db.upcast()) {
Some(derive_macros) => {
let mut len = 0;
- for (idx, path) in derive_macros.enumerate() {
+ for (idx, (path, call_site)) in derive_macros.enumerate() {
let ast_id = AstIdWithPath::new(file_id, ast_id.value, path);
self.unresolved_macros.push(MacroDirective {
module_id: directive.module_id,
@@ -1258,6 +1262,7 @@ impl DefCollector<'_> {
ast_id,
derive_attr: attr.id,
derive_pos: idx,
+ call_site,
},
container: directive.container,
});
@@ -1414,11 +1419,12 @@ impl DefCollector<'_> {
for directive in &self.unresolved_macros {
match &directive.kind {
- MacroDirectiveKind::FnLike { ast_id, expand_to } => {
+ MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => {
// FIXME: we shouldn't need to re-resolve the macro here just to get the unresolved error!
let macro_call_as_call_id = macro_call_as_call_id(
self.db.upcast(),
ast_id,
+ *call_site,
*expand_to,
self.def_map.krate,
|path| {
@@ -1444,7 +1450,7 @@ impl DefCollector<'_> {
));
}
}
- MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => {
+ MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site: _ } => {
self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
directive.module_id,
MacroCallKind::Derive {
@@ -1823,9 +1829,8 @@ impl ModCollector<'_, '_> {
cov_mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use);
let mut single_imports = Vec::new();
- let hygiene = Hygiene::new_unhygienic();
for attr in macro_use_attrs {
- let Some(paths) = attr.parse_path_comma_token_tree(db.upcast(), &hygiene) else {
+ let Some(paths) = attr.parse_path_comma_token_tree(db.upcast()) else {
// `#[macro_use]` (without any paths) found, forget collected names and just import
// all visible macros.
self.def_collector.import_macros_from_extern_crate(
@@ -1835,7 +1840,7 @@ impl ModCollector<'_, '_> {
);
return;
};
- for path in paths {
+ for (path, _) in paths {
if let Some(name) = path.as_ident() {
single_imports.push(name.clone());
}
@@ -2083,8 +2088,18 @@ impl ModCollector<'_, '_> {
let name = match attrs.by_key("rustc_builtin_macro").string_value() {
Some(it) => {
// FIXME: a hacky way to create a Name from string.
- name =
- tt::Ident { text: it.clone(), span: tt::TokenId::unspecified() }.as_name();
+ name = tt::Ident {
+ text: it.clone(),
+ span: tt::SpanData {
+ range: syntax::TextRange::empty(syntax::TextSize::new(0)),
+ anchor: base_db::span::SpanAnchor {
+ file_id: FileId::BOGUS,
+ ast_id: base_db::span::ROOT_ERASED_FILE_AST_ID,
+ },
+ ctx: SyntaxContextId::ROOT,
+ },
+ }
+ .as_name();
&name
}
None => {
@@ -2210,8 +2225,12 @@ impl ModCollector<'_, '_> {
}
}
- fn collect_macro_call(&mut self, mac: &MacroCall, container: ItemContainerId) {
- let ast_id = AstIdWithPath::new(self.file_id(), mac.ast_id, ModPath::clone(&mac.path));
+ fn collect_macro_call(
+ &mut self,
+ &MacroCall { ref path, ast_id, expand_to, call_site }: &MacroCall,
+ container: ItemContainerId,
+ ) {
+ let ast_id = AstIdWithPath::new(self.file_id(), ast_id, ModPath::clone(&path));
let db = self.def_collector.db;
// FIXME: Immediately expanding in "Case 1" is insufficient since "Case 2" may also define
@@ -2222,7 +2241,8 @@ impl ModCollector<'_, '_> {
if let Ok(res) = macro_call_as_call_id_with_eager(
db.upcast(),
&ast_id,
- mac.expand_to,
+ call_site,
+ expand_to,
self.def_collector.def_map.krate,
|path| {
path.as_ident().and_then(|name| {
@@ -2276,7 +2296,7 @@ impl ModCollector<'_, '_> {
self.def_collector.unresolved_macros.push(MacroDirective {
module_id: self.module_id,
depth: self.macro_depth + 1,
- kind: MacroDirectiveKind::FnLike { ast_id, expand_to: mac.expand_to },
+ kind: MacroDirectiveKind::FnLike { ast_id, expand_to: expand_to, call_site },
container,
});
}
@@ -2363,7 +2383,6 @@ mod tests {
from_glob_import: Default::default(),
skip_attrs: Default::default(),
is_proc_macro: false,
- hygienes: FxHashMap::default(),
};
collector.seed_with_top_level();
collector.collect();
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
index 2dcc2c30f..c45200e2d 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
@@ -1,7 +1,7 @@
//! This module resolves `mod foo;` declaration to file.
use arrayvec::ArrayVec;
use base_db::{AnchoredPath, FileId};
-use hir_expand::name::Name;
+use hir_expand::{name::Name, HirFileIdExt, MacroFileIdExt};
use limit::Limit;
use syntax::SmolStr;
@@ -66,14 +66,14 @@ impl ModDir {
attr_path: Option<&SmolStr>,
) -> Result<(FileId, bool, ModDir), Box<[String]>> {
let name = name.unescaped();
- let orig_file_id = file_id.original_file(db.upcast());
+ let orig_file_id = file_id.original_file_respecting_includes(db.upcast());
let mut candidate_files = ArrayVec::<_, 2>::new();
match attr_path {
Some(attr_path) => {
candidate_files.push(self.dir_path.join_attr(attr_path, self.root_non_dir_owner))
}
- None if file_id.is_include_macro(db.upcast()) => {
+ None if file_id.macro_file().map_or(false, |it| it.is_include_macro(db.upcast())) => {
candidate_files.push(format!("{}.rs", name.display(db.upcast())));
candidate_files.push(format!("{}/mod.rs", name.display(db.upcast())));
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs
index 460a908b6..be3438e42 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs
@@ -96,8 +96,8 @@ impl DefMap {
let types = result.take_types()?;
match types {
ModuleDefId::ModuleId(m) => Visibility::Module(m),
+ // error: visibility needs to refer to module
_ => {
- // error: visibility needs to refer to module
return None;
}
}
@@ -183,15 +183,6 @@ impl DefMap {
shadow: BuiltinShadowMode,
expected_macro_subns: Option<MacroSubNs>,
) -> ResolvePathResult {
- let graph = db.crate_graph();
- let _cx = stdx::panic_context::enter(format!(
- "DefMap {:?} crate_name={:?} block={:?} path={}",
- self.krate,
- graph[self.krate].display_name,
- self.block,
- path.display(db.upcast())
- ));
-
let mut segments = path.segments().iter().enumerate();
let mut curr_per_ns = match path.kind {
PathKind::DollarCrate(krate) => {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs
index e7cc44b04..b2ffbbe4c 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs
@@ -8,9 +8,7 @@ use base_db::{fixture::WithFixture, SourceDatabase};
use expect_test::{expect, Expect};
use triomphe::Arc;
-use crate::{db::DefDatabase, test_db::TestDB};
-
-use super::DefMap;
+use crate::{db::DefDatabase, nameres::DefMap, test_db::TestDB};
fn compute_crate_def_map(ra_fixture: &str) -> Arc<DefMap> {
let db = TestDB::with_files(ra_fixture);
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
index 4a86f88e5..78cb78e83 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
@@ -1,13 +1,19 @@
-use base_db::SourceDatabaseExt;
+use base_db::{SourceDatabase, SourceDatabaseExt};
use triomphe::Arc;
-use crate::{db::DefDatabase, AdtId, ModuleDefId};
-
-use super::*;
+use crate::{
+ db::DefDatabase,
+ nameres::tests::{TestDB, WithFixture},
+ AdtId, ModuleDefId,
+};
fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: &str) {
let (mut db, pos) = TestDB::with_position(ra_fixture_initial);
- let krate = db.test_crate();
+ let krate = {
+ let crate_graph = db.crate_graph();
+ // Some of these tests use minicore/proc-macros which will be injected as the first crate
+ crate_graph.iter().last().unwrap()
+ };
{
let events = db.log_executed(|| {
db.crate_def_map(krate);
@@ -28,84 +34,199 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change:
fn typing_inside_a_function_should_not_invalidate_def_map() {
check_def_map_is_not_recomputed(
r"
- //- /lib.rs
- mod foo;$0
+//- /lib.rs
+mod foo;$0
- use crate::foo::bar::Baz;
+use crate::foo::bar::Baz;
- enum E { A, B }
- use E::*;
+enum E { A, B }
+use E::*;
- fn foo() -> i32 {
- 1 + 1
- }
+fn foo() -> i32 {
+ 1 + 1
+}
- #[cfg(never)]
- fn no() {}
- //- /foo/mod.rs
- pub mod bar;
+#[cfg(never)]
+fn no() {}
+//- /foo/mod.rs
+pub mod bar;
- //- /foo/bar.rs
- pub struct Baz;
- ",
+//- /foo/bar.rs
+pub struct Baz;
+",
r"
- mod foo;
+mod foo;
- use crate::foo::bar::Baz;
+use crate::foo::bar::Baz;
- enum E { A, B }
- use E::*;
+enum E { A, B }
+use E::*;
- fn foo() -> i32 { 92 }
+fn foo() -> i32 { 92 }
- #[cfg(never)]
- fn no() {}
- ",
+#[cfg(never)]
+fn no() {}
+",
);
}
#[test]
fn typing_inside_a_macro_should_not_invalidate_def_map() {
- let (mut db, pos) = TestDB::with_position(
+ check_def_map_is_not_recomputed(
r"
- //- /lib.rs
- macro_rules! m {
- ($ident:ident) => {
- fn f() {
- $ident + $ident;
- };
- }
- }
- mod foo;
+//- /lib.rs
+macro_rules! m {
+ ($ident:ident) => {
+ fn f() {
+ $ident + $ident;
+ };
+ }
+}
+mod foo;
- //- /foo/mod.rs
- pub mod bar;
+//- /foo/mod.rs
+pub mod bar;
- //- /foo/bar.rs
- $0
- m!(X);
- ",
+//- /foo/bar.rs
+$0
+m!(X);
+
+pub struct S {}
+",
+ r"
+m!(Y);
+
+pub struct S {}
+",
);
- let krate = db.test_crate();
- {
- let events = db.log_executed(|| {
- let crate_def_map = db.crate_def_map(krate);
- let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
- assert_eq!(module_data.scope.resolutions().count(), 1);
- });
- assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
+}
+
+#[test]
+fn typing_inside_an_attribute_should_not_invalidate_def_map() {
+ check_def_map_is_not_recomputed(
+ r"
+//- proc_macros: identity
+//- /lib.rs
+mod foo;
+
+//- /foo/mod.rs
+pub mod bar;
+
+//- /foo/bar.rs
+$0
+#[proc_macros::identity]
+fn f() {}
+",
+ r"
+#[proc_macros::identity]
+fn f() { foo }
+",
+ );
+}
+
+#[test]
+fn typing_inside_an_attribute_arg_should_not_invalidate_def_map() {
+ check_def_map_is_not_recomputed(
+ r"
+//- proc_macros: identity
+//- /lib.rs
+mod foo;
+
+//- /foo/mod.rs
+pub mod bar;
+
+//- /foo/bar.rs
+$0
+#[proc_macros::identity]
+fn f() {}
+",
+ r"
+#[proc_macros::identity(foo)]
+fn f() {}
+",
+ );
+}
+#[test]
+fn typing_inside_macro_heavy_file_should_not_invalidate_def_map() {
+ check_def_map_is_not_recomputed(
+ r"
+//- proc_macros: identity, derive_identity
+//- /lib.rs
+macro_rules! m {
+ ($ident:ident) => {
+ fn fm() {
+ $ident + $ident;
+ };
}
- db.set_file_text(pos.file_id, Arc::from("m!(Y);"));
+}
+mod foo;
- {
- let events = db.log_executed(|| {
- let crate_def_map = db.crate_def_map(krate);
- let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
- assert_eq!(module_data.scope.resolutions().count(), 1);
- });
- assert!(!format!("{events:?}").contains("crate_def_map"), "{events:#?}")
+//- /foo/mod.rs
+pub mod bar;
+
+//- /foo/bar.rs
+$0
+fn f() {}
+
+m!(X);
+macro_rules! m2 {
+ ($ident:ident) => {
+ fn f2() {
+ $ident + $ident;
+ };
}
}
+m2!(X);
+
+#[proc_macros::identity]
+#[derive(proc_macros::DeriveIdentity)]
+pub struct S {}
+",
+ r"
+fn f() {0}
+
+m!(X);
+macro_rules! m2 {
+ ($ident:ident) => {
+ fn f2() {
+ $ident + $ident;
+ };
+ }
+}
+m2!(X);
+
+#[proc_macros::identity]
+#[derive(proc_macros::DeriveIdentity)]
+pub struct S {}
+",
+ );
+}
+
+#[test]
+fn typing_inside_a_derive_should_not_invalidate_def_map() {
+ check_def_map_is_not_recomputed(
+ r"
+//- proc_macros: derive_identity
+//- minicore:derive
+//- /lib.rs
+mod foo;
+
+//- /foo/mod.rs
+pub mod bar;
+
+//- /foo/bar.rs
+$0
+#[derive(proc_macros::DeriveIdentity)]
+#[allow()]
+struct S;
+",
+ r"
+#[derive(proc_macros::DeriveIdentity)]
+#[allow(dead_code)]
+struct S;
+",
+ );
+}
#[test]
fn typing_inside_a_function_should_not_invalidate_item_expansions() {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path.rs b/src/tools/rust-analyzer/crates/hir-def/src/path.rs
index 3894172a5..215c49d4c 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/path.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/path.rs
@@ -96,8 +96,8 @@ pub enum GenericArg {
impl Path {
/// Converts an `ast::Path` to `Path`. Works with use trees.
/// It correctly handles `$crate` based path from macro call.
- pub fn from_src(path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path> {
- lower::lower_path(path, ctx)
+ pub fn from_src(ctx: &LowerCtx<'_>, path: ast::Path) -> Option<Path> {
+ lower::lower_path(ctx, path)
}
/// Converts a known mod path to `Path`.
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
index abd817893..39f1b6f1c 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
@@ -4,8 +4,10 @@ use std::iter;
use crate::{lower::LowerCtx, type_ref::ConstRef};
-use either::Either;
-use hir_expand::name::{name, AsName};
+use hir_expand::{
+ mod_path::resolve_crate_root,
+ name::{name, AsName},
+};
use intern::Interned;
use syntax::ast::{self, AstNode, HasTypeBounds};
@@ -16,12 +18,12 @@ use crate::{
/// Converts an `ast::Path` to `Path`. Works with use trees.
/// It correctly handles `$crate` based path from macro call.
-pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path> {
+pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path> {
let mut kind = PathKind::Plain;
let mut type_anchor = None;
let mut segments = Vec::new();
let mut generic_args = Vec::new();
- let hygiene = ctx.hygiene();
+ let span_map = ctx.span_map();
loop {
let segment = path.segment()?;
@@ -31,31 +33,31 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
match segment.kind()? {
ast::PathSegmentKind::Name(name_ref) => {
- // FIXME: this should just return name
- match hygiene.name_ref_to_name(ctx.db.upcast(), name_ref) {
- Either::Left(name) => {
- let args = segment
- .generic_arg_list()
- .and_then(|it| lower_generic_args(ctx, it))
- .or_else(|| {
- lower_generic_args_from_fn_path(
- ctx,
- segment.param_list(),
- segment.ret_type(),
- )
- })
- .map(Interned::new);
- if let Some(_) = args {
- generic_args.resize(segments.len(), None);
- generic_args.push(args);
- }
- segments.push(name);
- }
- Either::Right(crate_id) => {
- kind = PathKind::DollarCrate(crate_id);
- break;
- }
+ if name_ref.text() == "$crate" {
+ break kind = resolve_crate_root(
+ ctx.db.upcast(),
+ span_map.span_for_range(name_ref.syntax().text_range()).ctx,
+ )
+ .map(PathKind::DollarCrate)
+ .unwrap_or(PathKind::Crate);
+ }
+ let name = name_ref.as_name();
+ let args = segment
+ .generic_arg_list()
+ .and_then(|it| lower_generic_args(ctx, it))
+ .or_else(|| {
+ lower_generic_args_from_fn_path(
+ ctx,
+ segment.param_list(),
+ segment.ret_type(),
+ )
+ })
+ .map(Interned::new);
+ if let Some(_) = args {
+ generic_args.resize(segments.len(), None);
+ generic_args.push(args);
}
+ segments.push(name);
}
ast::PathSegmentKind::SelfTypeKw => {
segments.push(name![Self]);
@@ -74,7 +76,7 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
// <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo
Some(trait_ref) => {
let Path::Normal { mod_path, generic_args: path_generic_args, .. } =
- Path::from_src(trait_ref.path()?, ctx)?
+ Path::from_src(ctx, trait_ref.path()?)?
else {
return None;
};
@@ -151,8 +153,14 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
// We follow what it did anyway :)
if segments.len() == 1 && kind == PathKind::Plain {
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
- if let Some(crate_id) = hygiene.local_inner_macros(ctx.db.upcast(), path) {
- kind = PathKind::DollarCrate(crate_id);
+ let syn_ctxt = span_map.span_for_range(path.segment()?.syntax().text_range()).ctx;
+ if let Some(macro_call_id) = ctx.db.lookup_intern_syntax_context(syn_ctxt).outer_expn {
+ if ctx.db.lookup_intern_macro_call(macro_call_id).def.local_inner {
+ kind = match resolve_crate_root(ctx.db.upcast(), syn_ctxt) {
+ Some(crate_root) => PathKind::DollarCrate(crate_root),
+ None => PathKind::Crate,
+ }
+ }
}
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
index 50da9ed06..2ac1516ec 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
@@ -588,6 +588,24 @@ impl Resolver {
_ => None,
})
}
+
+ pub fn type_owner(&self) -> Option<TypeOwnerId> {
+ self.scopes().find_map(|scope| match scope {
+ Scope::BlockScope(_) => None,
+ &Scope::GenericParams { def, .. } => Some(def.into()),
+ &Scope::ImplDefScope(id) => Some(id.into()),
+ &Scope::AdtScope(adt) => Some(adt.into()),
+ Scope::ExprScope(it) => Some(it.owner.into()),
+ })
+ }
+
+ pub fn impl_def(&self) -> Option<ImplId> {
+ self.scopes().find_map(|scope| match scope {
+ Scope::ImplDefScope(def) => Some(*def),
+ _ => None,
+ })
+ }
+
/// `expr_id` is required to be an expression id that comes after the top level expression scope in the given resolver
#[must_use]
pub fn update_to_inner_scope(
@@ -1071,7 +1089,6 @@ impl HasResolver for TypeOwnerId {
TypeOwnerId::TypeAliasId(it) => it.resolver(db),
TypeOwnerId::ImplId(it) => it.resolver(db),
TypeOwnerId::EnumVariantId(it) => it.resolver(db),
- TypeOwnerId::ModuleId(it) => it.resolver(db),
}
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
index a6befc8a8..f4a6b61f7 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
@@ -34,6 +34,7 @@ pub(crate) struct TestDB {
impl Default for TestDB {
fn default() -> Self {
let mut this = Self { storage: Default::default(), events: Default::default() };
+ this.setup_syntax_context_root();
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
this
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs b/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs
index 30f48de61..f5803653c 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs
@@ -2,7 +2,7 @@
use std::iter;
-use hir_expand::{hygiene::Hygiene, InFile};
+use hir_expand::{span::SpanMapRef, InFile};
use la_arena::ArenaMap;
use syntax::ast;
use triomphe::Arc;
@@ -34,22 +34,22 @@ impl RawVisibility {
db: &dyn DefDatabase,
node: InFile<Option<ast::Visibility>>,
) -> RawVisibility {
- Self::from_ast_with_hygiene(db, node.value, &Hygiene::new(db.upcast(), node.file_id))
+ Self::from_ast_with_span_map(db, node.value, db.span_map(node.file_id).as_ref())
}
- pub(crate) fn from_ast_with_hygiene(
+ pub(crate) fn from_ast_with_span_map(
db: &dyn DefDatabase,
node: Option<ast::Visibility>,
- hygiene: &Hygiene,
+ span_map: SpanMapRef<'_>,
) -> RawVisibility {
- Self::from_ast_with_hygiene_and_default(db, node, RawVisibility::private(), hygiene)
+ Self::from_ast_with_span_map_and_default(db, node, RawVisibility::private(), span_map)
}
- pub(crate) fn from_ast_with_hygiene_and_default(
+ pub(crate) fn from_ast_with_span_map_and_default(
db: &dyn DefDatabase,
node: Option<ast::Visibility>,
default: RawVisibility,
- hygiene: &Hygiene,
+ span_map: SpanMapRef<'_>,
) -> RawVisibility {
let node = match node {
None => return default,
@@ -57,7 +57,7 @@ impl RawVisibility {
};
match node.kind() {
ast::VisibilityKind::In(path) => {
- let path = ModPath::from_src(db.upcast(), path, hygiene);
+ let path = ModPath::from_src(db.upcast(), path, span_map);
let path = match path {
None => return RawVisibility::private(),
Some(path) => path,
@@ -73,7 +73,7 @@ impl RawVisibility {
RawVisibility::Module(path)
}
ast::VisibilityKind::PubSelf => {
- let path = ModPath::from_kind(PathKind::Plain);
+ let path = ModPath::from_kind(PathKind::Super(0));
RawVisibility::Module(path)
}
ast::VisibilityKind::Pub => RawVisibility::Public,
diff --git a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
index 1f27204c1..361bbec43 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
@@ -13,11 +13,11 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
-tracing = "0.1.35"
-either = "1.7.0"
+tracing.workspace = true
+either.workspace = true
rustc-hash = "1.1.0"
la-arena.workspace = true
-itertools = "0.10.5"
+itertools.workspace = true
hashbrown.workspace = true
smallvec.workspace = true
triomphe.workspace = true
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs b/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs
index 1906ed15b..be0b72f9d 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs
@@ -12,11 +12,40 @@ use std::{
marker::PhantomData,
};
-use la_arena::{Arena, Idx};
+use la_arena::{Arena, Idx, RawIdx};
use profile::Count;
use rustc_hash::FxHasher;
use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
+use crate::db;
+
+pub use base_db::span::ErasedFileAstId;
+
+/// `AstId` points to an AST node in any file.
+///
+/// It is stable across reparses, and can be used as salsa key/value.
+pub type AstId<N> = crate::InFile<FileAstId<N>>;
+
+impl<N: AstIdNode> AstId<N> {
+ pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N {
+ self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
+ }
+ pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> crate::InFile<N> {
+ crate::InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)))
+ }
+ pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr<N> {
+ db.ast_id_map(self.file_id).get(self.value)
+ }
+}
+
+pub type ErasedAstId = crate::InFile<ErasedFileAstId>;
+
+impl ErasedAstId {
+ pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr {
+ db.ast_id_map(self.file_id).get_erased(self.value)
+ }
+}
+
/// `AstId` points to an AST node in a specific file.
pub struct FileAstId<N: AstIdNode> {
raw: ErasedFileAstId,
@@ -62,8 +91,6 @@ impl<N: AstIdNode> FileAstId<N> {
}
}
-pub type ErasedFileAstId = Idx<SyntaxNodePtr>;
-
pub trait AstIdNode: AstNode {}
macro_rules! register_ast_id_node {
(impl AstIdNode for $($ident:ident),+ ) => {
@@ -99,7 +126,7 @@ register_ast_id_node! {
TraitAlias,
TypeAlias,
Use,
- AssocItem, BlockExpr, Variant, RecordField, TupleField, ConstArg
+ AssocItem, BlockExpr, Variant, RecordField, TupleField, ConstArg, Param, SelfParam
}
/// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back.
@@ -129,6 +156,11 @@ impl AstIdMap {
pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
assert!(node.parent().is_none());
let mut res = AstIdMap::default();
+
+ // make sure to allocate the root node
+ if !should_alloc_id(node.kind()) {
+ res.alloc(node);
+ }
// By walking the tree in breadth-first order we make sure that parents
// get lower ids then children. That is, adding a new child does not
// change parent's id. This means that, say, adding a new function to a
@@ -136,9 +168,9 @@ impl AstIdMap {
bdfs(node, |it| {
if should_alloc_id(it.kind()) {
res.alloc(&it);
- true
+ TreeOrder::BreadthFirst
} else {
- false
+ TreeOrder::DepthFirst
}
});
res.map = hashbrown::HashMap::with_capacity_and_hasher(res.arena.len(), ());
@@ -155,6 +187,11 @@ impl AstIdMap {
res
}
+ /// The [`AstId`] of the root node
+ pub fn root(&self) -> SyntaxNodePtr {
+ self.arena[Idx::from_raw(RawIdx::from_u32(0))].clone()
+ }
+
pub fn ast_id<N: AstIdNode>(&self, item: &N) -> FileAstId<N> {
let raw = self.erased_ast_id(item.syntax());
FileAstId { raw, covariant: PhantomData }
@@ -164,7 +201,7 @@ impl AstIdMap {
AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap()
}
- pub(crate) fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
+ pub fn get_erased(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
self.arena[id].clone()
}
@@ -192,14 +229,20 @@ fn hash_ptr(ptr: &SyntaxNodePtr) -> u64 {
hasher.finish()
}
+#[derive(Copy, Clone, PartialEq, Eq)]
+enum TreeOrder {
+ BreadthFirst,
+ DepthFirst,
+}
+
/// Walks the subtree in bdfs order, calling `f` for each node. What is bdfs
/// order? It is a mix of breadth-first and depth first orders. Nodes for which
-/// `f` returns true are visited breadth-first, all the other nodes are explored
-/// depth-first.
+/// `f` returns [`TreeOrder::BreadthFirst`] are visited breadth-first, all the other nodes are explored
+/// [`TreeOrder::DepthFirst`].
///
/// In other words, the size of the bfs queue is bound by the number of "true"
/// nodes.
-fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> bool) {
+fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> TreeOrder) {
let mut curr_layer = vec![node.clone()];
let mut next_layer = vec![];
while !curr_layer.is_empty() {
@@ -208,7 +251,7 @@ fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> bool) {
while let Some(event) = preorder.next() {
match event {
syntax::WalkEvent::Enter(node) => {
- if f(node.clone()) {
+ if f(node.clone()) == TreeOrder::BreadthFirst {
next_layer.extend(node.children());
preorder.skip_subtree();
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
index 0ec2422b3..b8fc30c91 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
@@ -1,19 +1,19 @@
//! A higher level attributes based on TokenTree, with also some shortcuts.
use std::{fmt, ops};
-use base_db::CrateId;
+use base_db::{span::SyntaxContextId, CrateId};
use cfg::CfgExpr;
use either::Either;
use intern::Interned;
use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
use smallvec::{smallvec, SmallVec};
-use syntax::{ast, match_ast, AstNode, SmolStr, SyntaxNode};
+use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode};
use triomphe::Arc;
use crate::{
db::ExpandDatabase,
- hygiene::Hygiene,
mod_path::ModPath,
+ span::SpanMapRef,
tt::{self, Subtree},
InFile,
};
@@ -39,28 +39,33 @@ impl ops::Deref for RawAttrs {
impl RawAttrs {
pub const EMPTY: Self = Self { entries: None };
- pub fn new(db: &dyn ExpandDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self {
- let entries = collect_attrs(owner)
- .filter_map(|(id, attr)| match attr {
- Either::Left(attr) => {
- attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id))
- }
- Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
- id,
- input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
- path: Interned::new(ModPath::from(crate::name!(doc))),
- }),
- })
- .collect::<Vec<_>>();
- // FIXME: use `Arc::from_iter` when it becomes available
- let entries: Arc<[Attr]> = Arc::from(entries);
+ pub fn new(
+ db: &dyn ExpandDatabase,
+ owner: &dyn ast::HasAttrs,
+ span_map: SpanMapRef<'_>,
+ ) -> Self {
+ let entries = collect_attrs(owner).filter_map(|(id, attr)| match attr {
+ Either::Left(attr) => {
+ attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id))
+ }
+ Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
+ id,
+ input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
+ path: Interned::new(ModPath::from(crate::name!(doc))),
+ ctxt: span_map.span_for_range(comment.syntax().text_range()).ctx,
+ }),
+ });
+ let entries: Arc<[Attr]> = Arc::from_iter(entries);
Self { entries: if entries.is_empty() { None } else { Some(entries) } }
}
- pub fn from_attrs_owner(db: &dyn ExpandDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self {
- let hygiene = Hygiene::new(db, owner.file_id);
- Self::new(db, owner.value, &hygiene)
+ pub fn from_attrs_owner(
+ db: &dyn ExpandDatabase,
+ owner: InFile<&dyn ast::HasAttrs>,
+ span_map: SpanMapRef<'_>,
+ ) -> Self {
+ Self::new(db, owner.value, span_map)
}
pub fn merge(&self, other: Self) -> Self {
@@ -71,19 +76,13 @@ impl RawAttrs {
(Some(a), Some(b)) => {
let last_ast_index = a.last().map_or(0, |it| it.id.ast_index() + 1) as u32;
Self {
- entries: Some(Arc::from(
- a.iter()
- .cloned()
- .chain(b.iter().map(|it| {
- let mut it = it.clone();
- it.id.id = it.id.ast_index() as u32 + last_ast_index
- | (it.id.cfg_attr_index().unwrap_or(0) as u32)
- << AttrId::AST_INDEX_BITS;
- it
- }))
- // FIXME: use `Arc::from_iter` when it becomes available
- .collect::<Vec<_>>(),
- )),
+ entries: Some(Arc::from_iter(a.iter().cloned().chain(b.iter().map(|it| {
+ let mut it = it.clone();
+ it.id.id = it.id.ast_index() as u32 + last_ast_index
+ | (it.id.cfg_attr_index().unwrap_or(0) as u32)
+ << AttrId::AST_INDEX_BITS;
+ it
+ })))),
}
}
}
@@ -100,51 +99,43 @@ impl RawAttrs {
}
let crate_graph = db.crate_graph();
- let new_attrs = Arc::from(
- self.iter()
- .flat_map(|attr| -> SmallVec<[_; 1]> {
- let is_cfg_attr =
- attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]);
- if !is_cfg_attr {
- return smallvec![attr.clone()];
- }
-
- let subtree = match attr.token_tree_value() {
- Some(it) => it,
- _ => return smallvec![attr.clone()],
- };
+ let new_attrs = Arc::from_iter(self.iter().flat_map(|attr| -> SmallVec<[_; 1]> {
+ let is_cfg_attr =
+ attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]);
+ if !is_cfg_attr {
+ return smallvec![attr.clone()];
+ }
- let (cfg, parts) = match parse_cfg_attr_input(subtree) {
- Some(it) => it,
- None => return smallvec![attr.clone()],
+ let subtree = match attr.token_tree_value() {
+ Some(it) => it,
+ _ => return smallvec![attr.clone()],
+ };
+
+ let (cfg, parts) = match parse_cfg_attr_input(subtree) {
+ Some(it) => it,
+ None => return smallvec![attr.clone()],
+ };
+ let index = attr.id;
+ let attrs =
+ parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| {
+ let tree = Subtree {
+ delimiter: tt::Delimiter::dummy_invisible(),
+ token_trees: attr.to_vec(),
};
- let index = attr.id;
- let attrs = parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(
- |(idx, attr)| {
- let tree = Subtree {
- delimiter: tt::Delimiter::unspecified(),
- token_trees: attr.to_vec(),
- };
- // FIXME hygiene
- let hygiene = Hygiene::new_unhygienic();
- Attr::from_tt(db, &tree, &hygiene, index.with_cfg_attr(idx))
- },
- );
-
- let cfg_options = &crate_graph[krate].cfg_options;
- let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
- let cfg = CfgExpr::parse(&cfg);
- if cfg_options.check(&cfg) == Some(false) {
- smallvec![]
- } else {
- cov_mark::hit!(cfg_attr_active);
-
- attrs.collect()
- }
- })
- // FIXME: use `Arc::from_iter` when it becomes available
- .collect::<Vec<_>>(),
- );
+ Attr::from_tt(db, &tree, index.with_cfg_attr(idx))
+ });
+
+ let cfg_options = &crate_graph[krate].cfg_options;
+ let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
+ let cfg = CfgExpr::parse(&cfg);
+ if cfg_options.check(&cfg) == Some(false) {
+ smallvec![]
+ } else {
+ cov_mark::hit!(cfg_attr_active);
+
+ attrs.collect()
+ }
+ }));
RawAttrs { entries: Some(new_attrs) }
}
@@ -185,21 +176,23 @@ pub struct Attr {
pub id: AttrId,
pub path: Interned<ModPath>,
pub input: Option<Interned<AttrInput>>,
+ pub ctxt: SyntaxContextId,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum AttrInput {
/// `#[attr = "string"]`
+ // FIXME: This is losing span
Literal(SmolStr),
/// `#[attr(subtree)]`
- TokenTree(Box<(tt::Subtree, mbe::TokenMap)>),
+ TokenTree(Box<tt::Subtree>),
}
impl fmt::Display for AttrInput {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()),
- AttrInput::TokenTree(tt) => tt.0.fmt(f),
+ AttrInput::TokenTree(tt) => tt.fmt(f),
}
}
}
@@ -208,10 +201,10 @@ impl Attr {
fn from_src(
db: &dyn ExpandDatabase,
ast: ast::Meta,
- hygiene: &Hygiene,
+ span_map: SpanMapRef<'_>,
id: AttrId,
) -> Option<Attr> {
- let path = Interned::new(ModPath::from_src(db, ast.path()?, hygiene)?);
+ let path = Interned::new(ModPath::from_src(db, ast.path()?, span_map)?);
let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
let value = match lit.kind() {
ast::LiteralKind::String(string) => string.value()?.into(),
@@ -219,24 +212,20 @@ impl Attr {
};
Some(Interned::new(AttrInput::Literal(value)))
} else if let Some(tt) = ast.token_tree() {
- let (tree, map) = syntax_node_to_token_tree(tt.syntax());
- Some(Interned::new(AttrInput::TokenTree(Box::new((tree, map)))))
+ let tree = syntax_node_to_token_tree(tt.syntax(), span_map);
+ Some(Interned::new(AttrInput::TokenTree(Box::new(tree))))
} else {
None
};
- Some(Attr { id, path, input })
+ Some(Attr { id, path, input, ctxt: span_map.span_for_range(ast.syntax().text_range()).ctx })
}
- fn from_tt(
- db: &dyn ExpandDatabase,
- tt: &tt::Subtree,
- hygiene: &Hygiene,
- id: AttrId,
- ) -> Option<Attr> {
- let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
+ fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> {
+ // FIXME: Unecessary roundtrip tt -> ast -> tt
+ let (parse, map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
let ast = ast::Meta::cast(parse.syntax_node())?;
- Self::from_src(db, ast, hygiene, id)
+ Self::from_src(db, ast, SpanMapRef::ExpansionSpanMap(&map), id)
}
pub fn path(&self) -> &ModPath {
@@ -256,7 +245,7 @@ impl Attr {
/// #[path(ident)]
pub fn single_ident_value(&self) -> Option<&tt::Ident> {
match self.input.as_deref()? {
- AttrInput::TokenTree(tt) => match &*tt.0.token_trees {
+ AttrInput::TokenTree(tt) => match &*tt.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident),
_ => None,
},
@@ -267,7 +256,7 @@ impl Attr {
/// #[path TokenTree]
pub fn token_tree_value(&self) -> Option<&Subtree> {
match self.input.as_deref()? {
- AttrInput::TokenTree(tt) => Some(&tt.0),
+ AttrInput::TokenTree(tt) => Some(tt),
_ => None,
}
}
@@ -276,8 +265,7 @@ impl Attr {
pub fn parse_path_comma_token_tree<'a>(
&'a self,
db: &'a dyn ExpandDatabase,
- hygiene: &'a Hygiene,
- ) -> Option<impl Iterator<Item = ModPath> + 'a> {
+ ) -> Option<impl Iterator<Item = (ModPath, SyntaxContextId)> + 'a> {
let args = self.token_tree_value()?;
if args.delimiter.kind != DelimiterKind::Parenthesis {
@@ -290,12 +278,13 @@ impl Attr {
if tts.is_empty() {
return None;
}
- // FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation here.
+ // FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation
+ // here or maybe just parse a mod path from a token tree directly
let subtree = tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
- token_trees: tts.into_iter().cloned().collect(),
+ delimiter: tt::Delimiter::dummy_invisible(),
+ token_trees: tts.to_vec(),
};
- let (parse, _) =
+ let (parse, span_map) =
mbe::token_tree_to_syntax_node(&subtree, mbe::TopEntryPoint::MetaItem);
let meta = ast::Meta::cast(parse.syntax_node())?;
// Only simple paths are allowed.
@@ -304,7 +293,11 @@ impl Attr {
return None;
}
let path = meta.path()?;
- ModPath::from_src(db, path, hygiene)
+ let call_site = span_map.span_at(path.syntax().text_range().start()).ctx;
+ Some((
+ ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?,
+ call_site,
+ ))
});
Some(paths)
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
index 4ee12e2f2..de58a495f 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
@@ -1,16 +1,22 @@
//! Builtin attributes.
+use base_db::{
+ span::{SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
+ FileId,
+};
+use syntax::{TextRange, TextSize};
+
use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind};
macro_rules! register_builtin {
- ( $(($name:ident, $variant:ident) => $expand:ident),* ) => {
+ ($expand_fn:ident: $(($name:ident, $variant:ident) => $expand:ident),* ) => {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum BuiltinAttrExpander {
$($variant),*
}
impl BuiltinAttrExpander {
- pub fn expand(
+ pub fn $expand_fn(
&self,
db: &dyn ExpandDatabase,
id: MacroCallId,
@@ -45,7 +51,7 @@ impl BuiltinAttrExpander {
}
}
-register_builtin! {
+register_builtin! { expand:
(bench, Bench) => dummy_attr_expand,
(cfg_accessible, CfgAccessible) => dummy_attr_expand,
(cfg_eval, CfgEval) => dummy_attr_expand,
@@ -77,9 +83,8 @@ fn dummy_attr_expand(
///
/// As such, we expand `#[derive(Foo, bar::Bar)]` into
/// ```
-/// #[Foo]
-/// #[bar::Bar]
-/// ();
+/// #![Foo]
+/// #![bar::Bar]
/// ```
/// which allows fallback path resolution in hir::Semantics to properly identify our derives.
/// Since we do not expand the attribute in nameres though, we keep the original item.
@@ -98,21 +103,31 @@ fn derive_attr_expand(
) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(id);
let derives = match &loc.kind {
- MacroCallKind::Attr { attr_args, .. } if loc.def.is_attribute_derive() => &attr_args.0,
- _ => return ExpandResult::ok(tt::Subtree::empty()),
+ MacroCallKind::Attr { attr_args: Some(attr_args), .. } if loc.def.is_attribute_derive() => {
+ attr_args
+ }
+ _ => return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan::DUMMY)),
};
- pseudo_derive_attr_expansion(tt, derives)
+ pseudo_derive_attr_expansion(tt, derives, loc.call_site)
}
pub fn pseudo_derive_attr_expansion(
tt: &tt::Subtree,
args: &tt::Subtree,
+ call_site: SyntaxContextId,
) -> ExpandResult<tt::Subtree> {
let mk_leaf = |char| {
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char,
spacing: tt::Spacing::Alone,
- span: tt::TokenId::unspecified(),
+ span: tt::SpanData {
+ range: TextRange::empty(TextSize::new(0)),
+ anchor: base_db::span::SpanAnchor {
+ file_id: FileId::BOGUS,
+ ast_id: ROOT_ERASED_FILE_AST_ID,
+ },
+ ctx: call_site,
+ },
}))
};
@@ -122,12 +137,10 @@ pub fn pseudo_derive_attr_expansion(
.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))))
{
token_trees.push(mk_leaf('#'));
+ token_trees.push(mk_leaf('!'));
token_trees.push(mk_leaf('['));
token_trees.extend(tt.iter().cloned());
token_trees.push(mk_leaf(']'));
}
- token_trees.push(mk_leaf('('));
- token_trees.push(mk_leaf(')'));
- token_trees.push(mk_leaf(';'));
ExpandResult::ok(tt::Subtree { delimiter: tt.delimiter, token_trees })
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
index ecc8b407a..410aa4d28 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
@@ -1,16 +1,16 @@
//! Builtin derives.
-use ::tt::Ident;
-use base_db::{CrateOrigin, LangCrateOrigin};
+use base_db::{span::SpanData, CrateOrigin, LangCrateOrigin};
use itertools::izip;
-use mbe::TokenMap;
use rustc_hash::FxHashSet;
use stdx::never;
use tracing::debug;
use crate::{
+ hygiene::span_with_def_site_ctxt,
name::{AsName, Name},
- tt::{self, TokenId},
+ span::SpanMapRef,
+ tt,
};
use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds};
@@ -29,12 +29,15 @@ macro_rules! register_builtin {
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &ast::Adt,
- token_map: &TokenMap,
+ token_map: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let expander = match *self {
$( BuiltinDeriveExpander::$trait => $expand, )*
};
- expander(db, id, tt, token_map)
+
+ let span = db.lookup_intern_macro_call(id).span(db);
+ let span = span_with_def_site_ctxt(db, span, id);
+ expander(db, id, span, tt, token_map)
}
fn find_by_name(name: &name::Name) -> Option<Self> {
@@ -70,19 +73,19 @@ enum VariantShape {
Unit,
}
-fn tuple_field_iterator(n: usize) -> impl Iterator<Item = tt::Ident> {
- (0..n).map(|it| Ident::new(format!("f{it}"), tt::TokenId::unspecified()))
+fn tuple_field_iterator(span: SpanData, n: usize) -> impl Iterator<Item = tt::Ident> {
+ (0..n).map(move |it| tt::Ident::new(format!("f{it}"), span))
}
impl VariantShape {
- fn as_pattern(&self, path: tt::Subtree) -> tt::Subtree {
- self.as_pattern_map(path, |it| quote!(#it))
+ fn as_pattern(&self, path: tt::Subtree, span: SpanData) -> tt::Subtree {
+ self.as_pattern_map(path, span, |it| quote!(span => #it))
}
- fn field_names(&self) -> Vec<tt::Ident> {
+ fn field_names(&self, span: SpanData) -> Vec<tt::Ident> {
match self {
VariantShape::Struct(s) => s.clone(),
- VariantShape::Tuple(n) => tuple_field_iterator(*n).collect(),
+ VariantShape::Tuple(n) => tuple_field_iterator(span, *n).collect(),
VariantShape::Unit => vec![],
}
}
@@ -90,26 +93,27 @@ impl VariantShape {
fn as_pattern_map(
&self,
path: tt::Subtree,
+ span: SpanData,
field_map: impl Fn(&tt::Ident) -> tt::Subtree,
) -> tt::Subtree {
match self {
VariantShape::Struct(fields) => {
let fields = fields.iter().map(|it| {
let mapped = field_map(it);
- quote! { #it : #mapped , }
+ quote! {span => #it : #mapped , }
});
- quote! {
+ quote! {span =>
#path { ##fields }
}
}
&VariantShape::Tuple(n) => {
- let fields = tuple_field_iterator(n).map(|it| {
+ let fields = tuple_field_iterator(span, n).map(|it| {
let mapped = field_map(&it);
- quote! {
+ quote! {span =>
#mapped ,
}
});
- quote! {
+ quote! {span =>
#path ( ##fields )
}
}
@@ -117,7 +121,7 @@ impl VariantShape {
}
}
- fn from(tm: &TokenMap, value: Option<FieldList>) -> Result<Self, ExpandError> {
+ fn from(tm: SpanMapRef<'_>, value: Option<FieldList>) -> Result<Self, ExpandError> {
let r = match value {
None => VariantShape::Unit,
Some(FieldList::RecordFieldList(it)) => VariantShape::Struct(
@@ -139,17 +143,17 @@ enum AdtShape {
}
impl AdtShape {
- fn as_pattern(&self, name: &tt::Ident) -> Vec<tt::Subtree> {
- self.as_pattern_map(name, |it| quote!(#it))
+ fn as_pattern(&self, span: SpanData, name: &tt::Ident) -> Vec<tt::Subtree> {
+ self.as_pattern_map(name, |it| quote!(span =>#it), span)
}
- fn field_names(&self) -> Vec<Vec<tt::Ident>> {
+ fn field_names(&self, span: SpanData) -> Vec<Vec<tt::Ident>> {
match self {
AdtShape::Struct(s) => {
- vec![s.field_names()]
+ vec![s.field_names(span)]
}
AdtShape::Enum { variants, .. } => {
- variants.iter().map(|(_, fields)| fields.field_names()).collect()
+ variants.iter().map(|(_, fields)| fields.field_names(span)).collect()
}
AdtShape::Union => {
never!("using fields of union in derive is always wrong");
@@ -162,18 +166,21 @@ impl AdtShape {
&self,
name: &tt::Ident,
field_map: impl Fn(&tt::Ident) -> tt::Subtree,
+ span: SpanData,
) -> Vec<tt::Subtree> {
match self {
AdtShape::Struct(s) => {
- vec![s.as_pattern_map(quote! { #name }, field_map)]
+ vec![s.as_pattern_map(quote! {span => #name }, span, field_map)]
}
AdtShape::Enum { variants, .. } => variants
.iter()
- .map(|(v, fields)| fields.as_pattern_map(quote! { #name :: #v }, &field_map))
+ .map(|(v, fields)| {
+ fields.as_pattern_map(quote! {span => #name :: #v }, span, &field_map)
+ })
.collect(),
AdtShape::Union => {
never!("pattern matching on union is always wrong");
- vec![quote! { un }]
+ vec![quote! {span => un }]
}
}
}
@@ -189,8 +196,12 @@ struct BasicAdtInfo {
associated_types: Vec<tt::Subtree>,
}
-fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError> {
- let (name, generic_param_list, shape) = match &adt {
+fn parse_adt(
+ tm: SpanMapRef<'_>,
+ adt: &ast::Adt,
+ call_site: SpanData,
+) -> Result<BasicAdtInfo, ExpandError> {
+ let (name, generic_param_list, shape) = match adt {
ast::Adt::Struct(it) => (
it.name(),
it.generic_param_list(),
@@ -234,22 +245,26 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError>
match this {
Some(it) => {
param_type_set.insert(it.as_name());
- mbe::syntax_node_to_token_tree(it.syntax()).0
+ mbe::syntax_node_to_token_tree(it.syntax(), tm)
+ }
+ None => {
+ tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
}
- None => tt::Subtree::empty(),
}
};
let bounds = match &param {
ast::TypeOrConstParam::Type(it) => {
- it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
+ it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm))
}
ast::TypeOrConstParam::Const(_) => None,
};
let ty = if let ast::TypeOrConstParam::Const(param) = param {
let ty = param
.ty()
- .map(|ty| mbe::syntax_node_to_token_tree(ty.syntax()).0)
- .unwrap_or_else(tt::Subtree::empty);
+ .map(|ty| mbe::syntax_node_to_token_tree(ty.syntax(), tm))
+ .unwrap_or_else(|| {
+ tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
+ });
Some(ty)
} else {
None
@@ -282,20 +297,22 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError>
let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
param_type_set.contains(&name).then_some(p)
})
- .map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
+ .map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm))
.collect();
- let name_token = name_to_token(&tm, name)?;
+ let name_token = name_to_token(tm, name)?;
Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })
}
-fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Ident, ExpandError> {
+fn name_to_token(
+ token_map: SpanMapRef<'_>,
+ name: Option<ast::Name>,
+) -> Result<tt::Ident, ExpandError> {
let name = name.ok_or_else(|| {
debug!("parsed item has no name");
ExpandError::other("missing name")
})?;
- let name_token_id =
- token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified);
- let name_token = tt::Ident { span: name_token_id, text: name.text().into() };
+ let span = token_map.span_for_range(name.syntax().text_range());
+ let name_token = tt::Ident { span, text: name.text().into() };
Ok(name_token)
}
@@ -331,14 +348,21 @@ fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Id
/// where B1, ..., BN are the bounds given by `bounds_paths`. Z is a phantom type, and
/// therefore does not get bound by the derived trait.
fn expand_simple_derive(
+ // FIXME: use
+ invoc_span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
trait_path: tt::Subtree,
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree,
) -> ExpandResult<tt::Subtree> {
- let info = match parse_adt(tm, tt) {
+ let info = match parse_adt(tm, tt, invoc_span) {
Ok(info) => info,
- Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
+ Err(e) => {
+ return ExpandResult::new(
+ tt::Subtree::empty(tt::DelimSpan { open: invoc_span, close: invoc_span }),
+ e,
+ )
+ }
};
let trait_body = make_trait_body(&info);
let mut where_block = vec![];
@@ -349,13 +373,13 @@ fn expand_simple_derive(
let ident_ = ident.clone();
if let Some(b) = bound {
let ident = ident.clone();
- where_block.push(quote! { #ident : #b , });
+ where_block.push(quote! {invoc_span => #ident : #b , });
}
if let Some(ty) = param_ty {
- (quote! { const #ident : #ty , }, quote! { #ident_ , })
+ (quote! {invoc_span => const #ident : #ty , }, quote! {invoc_span => #ident_ , })
} else {
let bound = trait_path.clone();
- (quote! { #ident : #bound , }, quote! { #ident_ , })
+ (quote! {invoc_span => #ident : #bound , }, quote! {invoc_span => #ident_ , })
}
})
.unzip();
@@ -363,17 +387,17 @@ fn expand_simple_derive(
where_block.extend(info.associated_types.iter().map(|it| {
let it = it.clone();
let bound = trait_path.clone();
- quote! { #it : #bound , }
+ quote! {invoc_span => #it : #bound , }
}));
let name = info.name;
- let expanded = quote! {
+ let expanded = quote! {invoc_span =>
impl < ##params > #trait_path for #name < ##args > where ##where_block { #trait_body }
};
ExpandResult::ok(expanded)
}
-fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree {
+fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId, span: SpanData) -> tt::TokenTree {
// FIXME: make hygiene works for builtin derive macro
// such that $crate can be used here.
let cg = db.crate_graph();
@@ -381,9 +405,9 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree
let tt = if matches!(cg[krate].origin, CrateOrigin::Lang(LangCrateOrigin::Core)) {
cov_mark::hit!(test_copy_expand_in_core);
- quote! { crate }
+ quote! {span => crate }
} else {
- quote! { core }
+ quote! {span => core }
};
tt.token_trees[0].clone()
@@ -392,56 +416,50 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree
fn copy_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::marker::Copy }, |_| quote! {})
+ let krate = find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::marker::Copy }, |_| quote! {span =>})
}
fn clone_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::clone::Clone }, |adt| {
+ let krate = find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::clone::Clone }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
- let star = tt::Punct {
- char: '*',
- spacing: ::tt::Spacing::Alone,
- span: tt::TokenId::unspecified(),
- };
- return quote! {
+ let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
+ return quote! {span =>
fn clone(&self) -> Self {
#star self
}
};
}
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
- let star = tt::Punct {
- char: '*',
- spacing: ::tt::Spacing::Alone,
- span: tt::TokenId::unspecified(),
- };
- return quote! {
+ let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
+ return quote! {span =>
fn clone(&self) -> Self {
match #star self {}
}
};
}
let name = &adt.name;
- let patterns = adt.shape.as_pattern(name);
- let exprs = adt.shape.as_pattern_map(name, |it| quote! { #it .clone() });
+ let patterns = adt.shape.as_pattern(span, name);
+ let exprs = adt.shape.as_pattern_map(name, |it| quote! {span => #it .clone() }, span);
let arms = patterns.into_iter().zip(exprs.into_iter()).map(|(pat, expr)| {
- let fat_arrow = fat_arrow();
- quote! {
+ let fat_arrow = fat_arrow(span);
+ quote! {span =>
#pat #fat_arrow #expr,
}
});
- quote! {
+ quote! {span =>
fn clone(&self) -> Self {
match self {
##arms
@@ -451,53 +469,56 @@ fn clone_expand(
})
}
-/// This function exists since `quote! { => }` doesn't work.
-fn fat_arrow() -> ::tt::Subtree<TokenId> {
- let eq =
- tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() };
- quote! { #eq> }
+/// This function exists since `quote! {span => => }` doesn't work.
+fn fat_arrow(span: SpanData) -> tt::Subtree {
+ let eq = tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span };
+ quote! {span => #eq> }
}
-/// This function exists since `quote! { && }` doesn't work.
-fn and_and() -> ::tt::Subtree<TokenId> {
- let and =
- tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() };
- quote! { #and& }
+/// This function exists since `quote! {span => && }` doesn't work.
+fn and_and(span: SpanData) -> tt::Subtree {
+ let and = tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span };
+ quote! {span => #and& }
}
fn default_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::default::Default }, |adt| {
+ let krate = &find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::default::Default }, |adt| {
let body = match &adt.shape {
AdtShape::Struct(fields) => {
let name = &adt.name;
- fields
- .as_pattern_map(quote!(#name), |_| quote!(#krate::default::Default::default()))
+ fields.as_pattern_map(
+ quote!(span =>#name),
+ span,
+ |_| quote!(span =>#krate::default::Default::default()),
+ )
}
AdtShape::Enum { default_variant, variants } => {
if let Some(d) = default_variant {
let (name, fields) = &variants[*d];
let adt_name = &adt.name;
fields.as_pattern_map(
- quote!(#adt_name :: #name),
- |_| quote!(#krate::default::Default::default()),
+ quote!(span =>#adt_name :: #name),
+ span,
+ |_| quote!(span =>#krate::default::Default::default()),
)
} else {
// FIXME: Return expand error here
- quote!()
+ quote!(span =>)
}
}
AdtShape::Union => {
// FIXME: Return expand error here
- quote!()
+ quote!(span =>)
}
};
- quote! {
+ quote! {span =>
fn default() -> Self {
#body
}
@@ -508,44 +529,41 @@ fn default_expand(
fn debug_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::fmt::Debug }, |adt| {
+ let krate = &find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::fmt::Debug }, |adt| {
let for_variant = |name: String, v: &VariantShape| match v {
VariantShape::Struct(fields) => {
let for_fields = fields.iter().map(|it| {
let x_string = it.to_string();
- quote! {
+ quote! {span =>
.field(#x_string, & #it)
}
});
- quote! {
+ quote! {span =>
f.debug_struct(#name) ##for_fields .finish()
}
}
VariantShape::Tuple(n) => {
- let for_fields = tuple_field_iterator(*n).map(|it| {
- quote! {
+ let for_fields = tuple_field_iterator(span, *n).map(|it| {
+ quote! {span =>
.field( & #it)
}
});
- quote! {
+ quote! {span =>
f.debug_tuple(#name) ##for_fields .finish()
}
}
- VariantShape::Unit => quote! {
+ VariantShape::Unit => quote! {span =>
f.write_str(#name)
},
};
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
- let star = tt::Punct {
- char: '*',
- spacing: ::tt::Spacing::Alone,
- span: tt::TokenId::unspecified(),
- };
- return quote! {
+ let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
+ return quote! {span =>
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
match #star self {}
}
@@ -553,20 +571,20 @@ fn debug_expand(
}
let arms = match &adt.shape {
AdtShape::Struct(fields) => {
- let fat_arrow = fat_arrow();
+ let fat_arrow = fat_arrow(span);
let name = &adt.name;
- let pat = fields.as_pattern(quote!(#name));
+ let pat = fields.as_pattern(quote!(span =>#name), span);
let expr = for_variant(name.to_string(), fields);
- vec![quote! { #pat #fat_arrow #expr }]
+ vec![quote! {span => #pat #fat_arrow #expr }]
}
AdtShape::Enum { variants, .. } => variants
.iter()
.map(|(name, v)| {
- let fat_arrow = fat_arrow();
+ let fat_arrow = fat_arrow(span);
let adt_name = &adt.name;
- let pat = v.as_pattern(quote!(#adt_name :: #name));
+ let pat = v.as_pattern(quote!(span =>#adt_name :: #name), span);
let expr = for_variant(name.to_string(), v);
- quote! {
+ quote! {span =>
#pat #fat_arrow #expr ,
}
})
@@ -576,7 +594,7 @@ fn debug_expand(
vec![]
}
};
- quote! {
+ quote! {span =>
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
match self {
##arms
@@ -589,47 +607,46 @@ fn debug_expand(
fn hash_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::hash::Hash }, |adt| {
+ let krate = &find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::hash::Hash }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
- return quote! {};
+ return quote! {span =>};
}
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
- let star = tt::Punct {
- char: '*',
- spacing: ::tt::Spacing::Alone,
- span: tt::TokenId::unspecified(),
- };
- return quote! {
+ let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
+ return quote! {span =>
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
match #star self {}
}
};
}
- let arms = adt.shape.as_pattern(&adt.name).into_iter().zip(adt.shape.field_names()).map(
- |(pat, names)| {
- let expr = {
- let it = names.iter().map(|it| quote! { #it . hash(ra_expand_state); });
- quote! { {
- ##it
- } }
- };
- let fat_arrow = fat_arrow();
- quote! {
- #pat #fat_arrow #expr ,
- }
- },
- );
+ let arms =
+ adt.shape.as_pattern(span, &adt.name).into_iter().zip(adt.shape.field_names(span)).map(
+ |(pat, names)| {
+ let expr = {
+ let it =
+ names.iter().map(|it| quote! {span => #it . hash(ra_expand_state); });
+ quote! {span => {
+ ##it
+ } }
+ };
+ let fat_arrow = fat_arrow(span);
+ quote! {span =>
+ #pat #fat_arrow #expr ,
+ }
+ },
+ );
let check_discriminant = if matches!(&adt.shape, AdtShape::Enum { .. }) {
- quote! { #krate::mem::discriminant(self).hash(ra_expand_state); }
+ quote! {span => #krate::mem::discriminant(self).hash(ra_expand_state); }
} else {
- quote! {}
+ quote! {span =>}
};
- quote! {
+ quote! {span =>
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
#check_discriminant
match self {
@@ -643,56 +660,58 @@ fn hash_expand(
fn eq_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::cmp::Eq }, |_| quote! {})
+ let krate = find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>})
}
fn partial_eq_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialEq }, |adt| {
+ let krate = find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::PartialEq }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
- return quote! {};
+ return quote! {span =>};
}
let name = &adt.name;
- let (self_patterns, other_patterns) = self_and_other_patterns(adt, name);
- let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
+ let (self_patterns, other_patterns) = self_and_other_patterns(adt, name, span);
+ let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, names)| {
- let fat_arrow = fat_arrow();
+ let fat_arrow = fat_arrow(span);
let body = match &*names {
[] => {
- quote!(true)
+ quote!(span =>true)
}
[first, rest @ ..] => {
let rest = rest.iter().map(|it| {
- let t1 = Ident::new(format!("{}_self", it.text), it.span);
- let t2 = Ident::new(format!("{}_other", it.text), it.span);
- let and_and = and_and();
- quote!(#and_and #t1 .eq( #t2 ))
+ let t1 = tt::Ident::new(format!("{}_self", it.text), it.span);
+ let t2 = tt::Ident::new(format!("{}_other", it.text), it.span);
+ let and_and = and_and(span);
+ quote!(span =>#and_and #t1 .eq( #t2 ))
});
let first = {
- let t1 = Ident::new(format!("{}_self", first.text), first.span);
- let t2 = Ident::new(format!("{}_other", first.text), first.span);
- quote!(#t1 .eq( #t2 ))
+ let t1 = tt::Ident::new(format!("{}_self", first.text), first.span);
+ let t2 = tt::Ident::new(format!("{}_other", first.text), first.span);
+ quote!(span =>#t1 .eq( #t2 ))
};
- quote!(#first ##rest)
+ quote!(span =>#first ##rest)
}
};
- quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
+ quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
},
);
- let fat_arrow = fat_arrow();
- quote! {
+ let fat_arrow = fat_arrow(span);
+ quote! {span =>
fn eq(&self, other: &Self) -> bool {
match (self, other) {
##arms
@@ -706,35 +725,46 @@ fn partial_eq_expand(
fn self_and_other_patterns(
adt: &BasicAdtInfo,
name: &tt::Ident,
+ span: SpanData,
) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) {
- let self_patterns = adt.shape.as_pattern_map(name, |it| {
- let t = Ident::new(format!("{}_self", it.text), it.span);
- quote!(#t)
- });
- let other_patterns = adt.shape.as_pattern_map(name, |it| {
- let t = Ident::new(format!("{}_other", it.text), it.span);
- quote!(#t)
- });
+ let self_patterns = adt.shape.as_pattern_map(
+ name,
+ |it| {
+ let t = tt::Ident::new(format!("{}_self", it.text), it.span);
+ quote!(span =>#t)
+ },
+ span,
+ );
+ let other_patterns = adt.shape.as_pattern_map(
+ name,
+ |it| {
+ let t = tt::Ident::new(format!("{}_other", it.text), it.span);
+ quote!(span =>#t)
+ },
+ span,
+ );
(self_patterns, other_patterns)
}
fn ord_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::cmp::Ord }, |adt| {
+ let krate = &find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::Ord }, |adt| {
fn compare(
krate: &tt::TokenTree,
left: tt::Subtree,
right: tt::Subtree,
rest: tt::Subtree,
+ span: SpanData,
) -> tt::Subtree {
- let fat_arrow1 = fat_arrow();
- let fat_arrow2 = fat_arrow();
- quote! {
+ let fat_arrow1 = fat_arrow(span);
+ let fat_arrow2 = fat_arrow(span);
+ quote! {span =>
match #left.cmp(&#right) {
#krate::cmp::Ordering::Equal #fat_arrow1 {
#rest
@@ -745,34 +775,34 @@ fn ord_expand(
}
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
- return quote!();
+ return quote!(span =>);
}
- let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name);
- let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
+ let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span);
+ let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, fields)| {
- let mut body = quote!(#krate::cmp::Ordering::Equal);
+ let mut body = quote!(span =>#krate::cmp::Ordering::Equal);
for f in fields.into_iter().rev() {
- let t1 = Ident::new(format!("{}_self", f.text), f.span);
- let t2 = Ident::new(format!("{}_other", f.text), f.span);
- body = compare(krate, quote!(#t1), quote!(#t2), body);
+ let t1 = tt::Ident::new(format!("{}_self", f.text), f.span);
+ let t2 = tt::Ident::new(format!("{}_other", f.text), f.span);
+ body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span);
}
- let fat_arrow = fat_arrow();
- quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
+ let fat_arrow = fat_arrow(span);
+ quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
},
);
- let fat_arrow = fat_arrow();
- let mut body = quote! {
+ let fat_arrow = fat_arrow(span);
+ let mut body = quote! {span =>
match (self, other) {
##arms
_unused #fat_arrow #krate::cmp::Ordering::Equal
}
};
if matches!(&adt.shape, AdtShape::Enum { .. }) {
- let left = quote!(#krate::intrinsics::discriminant_value(self));
- let right = quote!(#krate::intrinsics::discriminant_value(other));
- body = compare(krate, left, right, body);
+ let left = quote!(span =>#krate::intrinsics::discriminant_value(self));
+ let right = quote!(span =>#krate::intrinsics::discriminant_value(other));
+ body = compare(krate, left, right, body, span);
}
- quote! {
+ quote! {span =>
fn cmp(&self, other: &Self) -> #krate::cmp::Ordering {
#body
}
@@ -783,20 +813,22 @@ fn ord_expand(
fn partial_ord_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialOrd }, |adt| {
+ let krate = &find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::PartialOrd }, |adt| {
fn compare(
krate: &tt::TokenTree,
left: tt::Subtree,
right: tt::Subtree,
rest: tt::Subtree,
+ span: SpanData,
) -> tt::Subtree {
- let fat_arrow1 = fat_arrow();
- let fat_arrow2 = fat_arrow();
- quote! {
+ let fat_arrow1 = fat_arrow(span);
+ let fat_arrow2 = fat_arrow(span);
+ quote! {span =>
match #left.partial_cmp(&#right) {
#krate::option::Option::Some(#krate::cmp::Ordering::Equal) #fat_arrow1 {
#rest
@@ -807,37 +839,39 @@ fn partial_ord_expand(
}
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
- return quote!();
+ return quote!(span =>);
}
- let left = quote!(#krate::intrinsics::discriminant_value(self));
- let right = quote!(#krate::intrinsics::discriminant_value(other));
+ let left = quote!(span =>#krate::intrinsics::discriminant_value(self));
+ let right = quote!(span =>#krate::intrinsics::discriminant_value(other));
- let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name);
- let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
+ let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span);
+ let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, fields)| {
- let mut body = quote!(#krate::option::Option::Some(#krate::cmp::Ordering::Equal));
+ let mut body =
+ quote!(span =>#krate::option::Option::Some(#krate::cmp::Ordering::Equal));
for f in fields.into_iter().rev() {
- let t1 = Ident::new(format!("{}_self", f.text), f.span);
- let t2 = Ident::new(format!("{}_other", f.text), f.span);
- body = compare(krate, quote!(#t1), quote!(#t2), body);
+ let t1 = tt::Ident::new(format!("{}_self", f.text), f.span);
+ let t2 = tt::Ident::new(format!("{}_other", f.text), f.span);
+ body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span);
}
- let fat_arrow = fat_arrow();
- quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
+ let fat_arrow = fat_arrow(span);
+ quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
},
);
- let fat_arrow = fat_arrow();
+ let fat_arrow = fat_arrow(span);
let body = compare(
krate,
left,
right,
- quote! {
+ quote! {span =>
match (self, other) {
##arms
_unused #fat_arrow #krate::option::Option::Some(#krate::cmp::Ordering::Equal)
}
},
+ span,
);
- quote! {
+ quote! {span =>
fn partial_cmp(&self, other: &Self) -> #krate::option::Option::Option<#krate::cmp::Ordering> {
#body
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
index 30b19b6e5..c8f04bfee 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
@@ -1,17 +1,24 @@
//! Builtin macro
-use base_db::{AnchoredPath, Edition, FileId};
+use base_db::{
+ span::{SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
+ AnchoredPath, Edition, FileId,
+};
use cfg::CfgExpr;
use either::Either;
-use mbe::{parse_exprs_with_sep, parse_to_token_tree, TokenMap};
+use itertools::Itertools;
+use mbe::{parse_exprs_with_sep, parse_to_token_tree};
use syntax::{
ast::{self, AstToken},
SmolStr,
};
use crate::{
- db::ExpandDatabase, name, quote, tt, EagerCallInfo, ExpandError, ExpandResult, MacroCallId,
- MacroCallLoc,
+ db::ExpandDatabase,
+ hygiene::span_with_def_site_ctxt,
+ name, quote,
+ tt::{self, DelimSpan},
+ ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc,
};
macro_rules! register_builtin {
@@ -36,7 +43,10 @@ macro_rules! register_builtin {
let expander = match *self {
$( BuiltinFnLikeExpander::$kind => $expand, )*
};
- expander(db, id, tt)
+
+ let span = db.lookup_intern_macro_call(id).span(db);
+ let span = span_with_def_site_ctxt(db, span, id);
+ expander(db, id, tt, span)
}
}
@@ -44,13 +54,16 @@ macro_rules! register_builtin {
pub fn expand(
&self,
db: &dyn ExpandDatabase,
- arg_id: MacroCallId,
+ id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
let expander = match *self {
$( EagerExpander::$e_kind => $e_expand, )*
};
- expander(db, arg_id, tt)
+
+ let span = db.lookup_intern_macro_call(id).span(db);
+ let span = span_with_def_site_ctxt(db, span, id);
+ expander(db, id, tt, span)
}
}
@@ -78,7 +91,7 @@ pub fn find_builtin_macro(
register_builtin! {
LAZY:
- (column, Column) => column_expand,
+ (column, Column) => line_expand,
(file, File) => file_expand,
(line, Line) => line_expand,
(module_path, ModulePath) => module_path_expand,
@@ -109,99 +122,108 @@ register_builtin! {
(option_env, OptionEnv) => option_env_expand
}
-const DOLLAR_CRATE: tt::Ident =
- tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::TokenId::unspecified() };
+fn mk_pound(span: SpanData) -> tt::Subtree {
+ crate::quote::IntoTt::to_subtree(
+ vec![crate::tt::Leaf::Punct(crate::tt::Punct {
+ char: '#',
+ spacing: crate::tt::Spacing::Alone,
+ span: span,
+ })
+ .into()],
+ span,
+ )
+}
fn module_path_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
// Just return a dummy result.
- ExpandResult::ok(quote! { "module::path" })
+ ExpandResult::ok(quote! {span =>
+ "module::path"
+ })
}
fn line_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
// dummy implementation for type-checking purposes
- let expanded = quote! {
- 0 as u32
- };
-
- ExpandResult::ok(expanded)
+ // Note that `line!` and `column!` will never be implemented properly, as they are by definition
+ // not incremental
+ ExpandResult::ok(tt::Subtree {
+ delimiter: tt::Delimiter::dummy_invisible(),
+ token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
+ text: "0u32".into(),
+ span,
+ }))],
+ })
}
fn log_syntax_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
- ExpandResult::ok(quote! {})
+ ExpandResult::ok(quote! {span =>})
}
fn trace_macros_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
- ExpandResult::ok(quote! {})
+ ExpandResult::ok(quote! {span =>})
}
fn stringify_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let pretty = ::tt::pretty(&tt.token_trees);
- let expanded = quote! {
+ let expanded = quote! {span =>
#pretty
};
ExpandResult::ok(expanded)
}
-fn column_expand(
- _db: &dyn ExpandDatabase,
- _id: MacroCallId,
- _tt: &tt::Subtree,
-) -> ExpandResult<tt::Subtree> {
- // dummy implementation for type-checking purposes
- let expanded = quote! {
- 0 as u32
- };
-
- ExpandResult::ok(expanded)
-}
-
fn assert_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let args = parse_exprs_with_sep(tt, ',');
+ let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
let expanded = match &*args {
[cond, panic_args @ ..] => {
let comma = tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
+ delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char: ',',
spacing: tt::Spacing::Alone,
- span: tt::TokenId::unspecified(),
+ span,
}))],
};
let cond = cond.clone();
let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma);
- quote! {{
+ quote! {span =>{
if !(#cond) {
- #DOLLAR_CRATE::panic!(##panic_args);
+ #dollar_crate::panic!(##panic_args);
}
}}
}
- [] => quote! {{}},
+ [] => quote! {span =>{}},
};
ExpandResult::ok(expanded)
@@ -211,12 +233,13 @@ fn file_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
// FIXME: RA purposefully lacks knowledge of absolute file names
// so just return "".
let file_name = "";
- let expanded = quote! {
+ let expanded = quote! {span =>
#file_name
};
@@ -227,16 +250,18 @@ fn format_args_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
- format_args_expand_general(db, id, tt, "")
+ format_args_expand_general(db, id, tt, "", span)
}
fn format_args_nl_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
- format_args_expand_general(db, id, tt, "\\n")
+ format_args_expand_general(db, id, tt, "\\n", span)
}
fn format_args_expand_general(
@@ -245,11 +270,12 @@ fn format_args_expand_general(
tt: &tt::Subtree,
// FIXME: Make use of this so that mir interpretation works properly
_end_string: &str,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
- let pound = quote! {@PUNCT '#'};
+ let pound = mk_pound(span);
let mut tt = tt.clone();
tt.delimiter.kind = tt::DelimiterKind::Parenthesis;
- return ExpandResult::ok(quote! {
+ return ExpandResult::ok(quote! {span =>
builtin #pound format_args #tt
});
}
@@ -258,25 +284,25 @@ fn asm_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
// We expand all assembly snippets to `format_args!` invocations to get format syntax
// highlighting for them.
-
let mut literals = Vec::new();
for tt in tt.token_trees.chunks(2) {
match tt {
[tt::TokenTree::Leaf(tt::Leaf::Literal(lit))]
| [tt::TokenTree::Leaf(tt::Leaf::Literal(lit)), tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', span: _, spacing: _ }))] =>
{
- let krate = DOLLAR_CRATE.clone();
- literals.push(quote!(#krate::format_args!(#lit);));
+ let dollar_krate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
+ literals.push(quote!(span=>#dollar_krate::format_args!(#lit);));
}
_ => break,
}
}
- let pound = quote! {@PUNCT '#'};
- let expanded = quote! {
+ let pound = mk_pound(span);
+ let expanded = quote! {span =>
builtin #pound asm (
{##literals}
)
@@ -288,20 +314,22 @@ fn global_asm_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
// Expand to nothing (at item-level)
- ExpandResult::ok(quote! {})
+ ExpandResult::ok(quote! {span =>})
}
fn cfg_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(id);
let expr = CfgExpr::parse(tt);
let enabled = db.crate_graph()[loc.krate].cfg_options.check(&expr) != Some(false);
- let expanded = if enabled { quote!(true) } else { quote!(false) };
+ let expanded = if enabled { quote!(span=>true) } else { quote!(span=>false) };
ExpandResult::ok(expanded)
}
@@ -309,13 +337,15 @@ fn panic_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
+ let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
// Expand to a macro call `$crate::panic::panic_{edition}`
let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 {
- quote!(#DOLLAR_CRATE::panic::panic_2021!)
+ quote!(span =>#dollar_crate::panic::panic_2021!)
} else {
- quote!(#DOLLAR_CRATE::panic::panic_2015!)
+ quote!(span =>#dollar_crate::panic::panic_2015!)
};
// Pass the original arguments
@@ -327,13 +357,15 @@ fn unreachable_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
// Expand to a macro call `$crate::panic::unreachable_{edition}`
+ let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 {
- quote!(#DOLLAR_CRATE::panic::unreachable_2021!)
+ quote!(span =>#dollar_crate::panic::unreachable_2021!)
} else {
- quote!(#DOLLAR_CRATE::panic::unreachable_2015!)
+ quote!(span =>#dollar_crate::panic::unreachable_2015!)
};
// Pass the original arguments
@@ -363,6 +395,7 @@ fn compile_error_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let err = match &*tt.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) {
@@ -372,13 +405,14 @@ fn compile_error_expand(
_ => ExpandError::other("`compile_error!` argument must be a string"),
};
- ExpandResult { value: quote! {}, err: Some(err) }
+ ExpandResult { value: quote! {span =>}, err: Some(err) }
}
fn concat_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let mut err = None;
let mut text = String::new();
@@ -418,13 +452,14 @@ fn concat_expand(
}
}
}
- ExpandResult { value: quote!(#text), err }
+ ExpandResult { value: quote!(span =>#text), err }
}
fn concat_bytes_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let mut bytes = Vec::new();
let mut err = None;
@@ -457,8 +492,25 @@ fn concat_bytes_expand(
}
}
}
- let ident = tt::Ident { text: bytes.join(", ").into(), span: tt::TokenId::unspecified() };
- ExpandResult { value: quote!([#ident]), err }
+ let value = tt::Subtree {
+ delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket },
+ token_trees: {
+ Itertools::intersperse_with(
+ bytes.into_iter().map(|it| {
+ tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: it.into(), span }))
+ }),
+ || {
+ tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
+ char: ',',
+ spacing: tt::Spacing::Alone,
+ span,
+ }))
+ },
+ )
+ .collect()
+ },
+ };
+ ExpandResult { value, err }
}
fn concat_bytes_expand_subtree(
@@ -491,6 +543,7 @@ fn concat_idents_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let mut err = None;
let mut ident = String::new();
@@ -505,8 +558,9 @@ fn concat_idents_expand(
}
}
}
- let ident = tt::Ident { text: ident.into(), span: tt::TokenId::unspecified() };
- ExpandResult { value: quote!(#ident), err }
+ // FIXME merge spans
+ let ident = tt::Ident { text: ident.into(), span };
+ ExpandResult { value: quote!(span =>#ident), err }
}
fn relative_file(
@@ -541,45 +595,48 @@ fn parse_string(tt: &tt::Subtree) -> Result<String, ExpandError> {
fn include_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
- _tt: &tt::Subtree,
+ tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
- match db.include_expand(arg_id) {
- Ok((res, _)) => ExpandResult::ok(res.0.clone()),
- Err(e) => ExpandResult::new(tt::Subtree::empty(), e),
+ let file_id = match include_input_to_file_id(db, arg_id, tt) {
+ Ok(it) => it,
+ Err(e) => {
+ return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
+ }
+ };
+ match parse_to_token_tree(
+ SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
+ SyntaxContextId::ROOT,
+ &db.file_text(file_id),
+ ) {
+ Some(it) => ExpandResult::ok(it),
+ None => ExpandResult::new(
+ tt::Subtree::empty(DelimSpan { open: span, close: span }),
+ ExpandError::other("failed to parse included file"),
+ ),
}
}
-pub(crate) fn include_arg_to_tt(
+pub fn include_input_to_file_id(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
-) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> {
- let loc = db.lookup_intern_macro_call(arg_id);
- let Some(EagerCallInfo { arg, arg_id, .. }) = loc.eager.as_deref() else {
- panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager);
- };
- let path = parse_string(&arg.0)?;
- let file_id = relative_file(db, *arg_id, &path, false)?;
-
- let (subtree, map) =
- parse_to_token_tree(&db.file_text(file_id)).ok_or(mbe::ExpandError::ConversionError)?;
- Ok((triomphe::Arc::new((subtree, map)), file_id))
+ arg: &tt::Subtree,
+) -> Result<FileId, ExpandError> {
+ relative_file(db, arg_id, &parse_string(arg)?, false)
}
fn include_bytes_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
- tt: &tt::Subtree,
+ _tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
- if let Err(e) = parse_string(tt) {
- return ExpandResult::new(tt::Subtree::empty(), e);
- }
-
// FIXME: actually read the file here if the user asked for macro expansion
let res = tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
+ delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text: r#"b"""#.into(),
- span: tt::TokenId::unspecified(),
+ span,
}))],
};
ExpandResult::ok(res)
@@ -589,10 +646,13 @@ fn include_str_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let path = match parse_string(tt) {
Ok(it) => it,
- Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
+ Err(e) => {
+ return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
+ }
};
// FIXME: we're not able to read excluded files (which is most of them because
@@ -602,14 +662,14 @@ fn include_str_expand(
let file_id = match relative_file(db, arg_id, &path, true) {
Ok(file_id) => file_id,
Err(_) => {
- return ExpandResult::ok(quote!(""));
+ return ExpandResult::ok(quote!(span =>""));
}
};
let text = db.file_text(file_id);
let text = &*text;
- ExpandResult::ok(quote!(#text))
+ ExpandResult::ok(quote!(span =>#text))
}
fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &str) -> Option<String> {
@@ -621,10 +681,13 @@ fn env_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let key = match parse_string(tt) {
Ok(it) => it,
- Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
+ Err(e) => {
+ return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
+ }
};
let mut err = None;
@@ -641,7 +704,7 @@ fn env_expand(
// `include!("foo.rs"), which might go to infinite loop
"UNRESOLVED_ENV_VAR".to_string()
});
- let expanded = quote! { #s };
+ let expanded = quote! {span => #s };
ExpandResult { value: expanded, err }
}
@@ -650,15 +713,18 @@ fn option_env_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let key = match parse_string(tt) {
Ok(it) => it,
- Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
+ Err(e) => {
+ return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
+ }
};
// FIXME: Use `DOLLAR_CRATE` when that works in eager macros.
let expanded = match get_env_inner(db, arg_id, &key) {
- None => quote! { ::core::option::Option::None::<&str> },
- Some(s) => quote! { ::core::option::Option::Some(#s) },
+ None => quote! {span => ::core::option::Option::None::<&str> },
+ Some(s) => quote! {span => ::core::option::Option::Some(#s) },
};
ExpandResult::ok(expanded)
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
index 5292a5fa1..935669d49 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
@@ -1,22 +1,31 @@
//! Defines database & queries for macro expansion.
-use base_db::{salsa, CrateId, Edition, SourceDatabase};
+use base_db::{
+ salsa::{self, debug::DebugQueryTable},
+ span::SyntaxContextId,
+ CrateId, Edition, FileId, SourceDatabase,
+};
use either::Either;
use limit::Limit;
use mbe::{syntax_node_to_token_tree, ValueResult};
use rustc_hash::FxHashSet;
use syntax::{
- ast::{self, HasAttrs, HasDocComments},
- AstNode, GreenNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
+ ast::{self, HasAttrs},
+ AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
};
use triomphe::Arc;
use crate::{
- ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
- builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander,
- BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
- ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
- MacroDefKind, MacroFile, ProcMacroExpander,
+ ast_id_map::AstIdMap,
+ attrs::{collect_attrs, RawAttrs},
+ builtin_attr_macro::pseudo_derive_attr_expansion,
+ builtin_fn_macro::EagerExpander,
+ fixup::{self, reverse_fixups, SyntaxFixupUndoInfo},
+ hygiene::{apply_mark, SyntaxContextData, Transparency},
+ span::{RealSpanMap, SpanMap, SpanMapRef},
+ tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
+ ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, HirFileId, HirFileIdRepr, MacroCallId,
+ MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFileId, ProcMacroExpander,
};
/// Total limit on the number of tokens produced by any macro invocation.
@@ -30,32 +39,43 @@ static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
#[derive(Debug, Clone, Eq, PartialEq)]
/// Old-style `macro_rules` or the new macros 2.0
pub struct DeclarativeMacroExpander {
- pub mac: mbe::DeclarativeMacro,
- pub def_site_token_map: mbe::TokenMap,
+ pub mac: mbe::DeclarativeMacro<base_db::span::SpanData>,
+ pub transparency: Transparency,
}
impl DeclarativeMacroExpander {
- pub fn expand(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
+ pub fn expand(
+ &self,
+ db: &dyn ExpandDatabase,
+ tt: tt::Subtree,
+ call_id: MacroCallId,
+ ) -> ExpandResult<tt::Subtree> {
match self.mac.err() {
Some(e) => ExpandResult::new(
- tt::Subtree::empty(),
+ tt::Subtree::empty(tt::DelimSpan::DUMMY),
ExpandError::other(format!("invalid macro definition: {e}")),
),
- None => self.mac.expand(tt).map_err(Into::into),
+ None => self
+ .mac
+ .expand(&tt, |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency))
+ .map_err(Into::into),
}
}
- pub fn map_id_down(&self, token_id: tt::TokenId) -> tt::TokenId {
- self.mac.map_id_down(token_id)
- }
-
- pub fn map_id_up(&self, token_id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
- self.mac.map_id_up(token_id)
+ pub fn expand_unhygienic(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
+ match self.mac.err() {
+ Some(e) => ExpandResult::new(
+ tt::Subtree::empty(tt::DelimSpan::DUMMY),
+ ExpandError::other(format!("invalid macro definition: {e}")),
+ ),
+ None => self.mac.expand(&tt, |_| ()).map_err(Into::into),
+ }
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum TokenExpander {
+ /// Old-style `macro_rules` or the new macros 2.0
DeclarativeMacro(Arc<DeclarativeMacroExpander>),
/// Stuff like `line!` and `file!`.
BuiltIn(BuiltinFnLikeExpander),
@@ -69,31 +89,6 @@ pub enum TokenExpander {
ProcMacro(ProcMacroExpander),
}
-// FIXME: Get rid of these methods
-impl TokenExpander {
- pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
- match self {
- TokenExpander::DeclarativeMacro(expander) => expander.map_id_down(id),
- TokenExpander::BuiltIn(..)
- | TokenExpander::BuiltInEager(..)
- | TokenExpander::BuiltInAttr(..)
- | TokenExpander::BuiltInDerive(..)
- | TokenExpander::ProcMacro(..) => id,
- }
- }
-
- pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
- match self {
- TokenExpander::DeclarativeMacro(expander) => expander.map_id_up(id),
- TokenExpander::BuiltIn(..)
- | TokenExpander::BuiltInEager(..)
- | TokenExpander::BuiltInAttr(..)
- | TokenExpander::BuiltInDerive(..)
- | TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
- }
- }
-}
-
#[salsa::query_group(ExpandDatabaseStorage)]
pub trait ExpandDatabase: SourceDatabase {
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
@@ -108,8 +103,12 @@ pub trait ExpandDatabase: SourceDatabase {
// This query is LRU cached
fn parse_macro_expansion(
&self,
- macro_file: MacroFile,
- ) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>;
+ macro_file: MacroFileId,
+ ) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>;
+ #[salsa::transparent]
+ fn span_map(&self, file_id: HirFileId) -> SpanMap;
+
+ fn real_span_map(&self, file_id: FileId) -> Arc<RealSpanMap>;
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
/// reason why we use salsa at all.
@@ -118,23 +117,21 @@ pub trait ExpandDatabase: SourceDatabase {
/// to be incremental.
#[salsa::interned]
fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
+ #[salsa::interned]
+ fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId;
- /// Lowers syntactic macro call to a token tree representation.
#[salsa::transparent]
- fn macro_arg(
- &self,
- id: MacroCallId,
- ) -> ValueResult<
- Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
- Arc<Box<[SyntaxError]>>,
- >;
- /// Extracts syntax node, corresponding to a macro call. That's a firewall
+ fn setup_syntax_context_root(&self) -> ();
+ #[salsa::transparent]
+ fn dump_syntax_contexts(&self) -> String;
+
+ /// Lowers syntactic macro call to a token tree representation. That's a firewall
/// query, only typing in the macro call itself changes the returned
/// subtree.
- fn macro_arg_node(
+ fn macro_arg(
&self,
id: MacroCallId,
- ) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>>;
+ ) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>>;
/// Fetches the expander for this macro.
#[salsa::transparent]
fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
@@ -144,18 +141,6 @@ pub trait ExpandDatabase: SourceDatabase {
def_crate: CrateId,
id: AstId<ast::Macro>,
) -> Arc<DeclarativeMacroExpander>;
-
- /// Expand macro call to a token tree.
- // This query is LRU cached
- fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
- #[salsa::invoke(crate::builtin_fn_macro::include_arg_to_tt)]
- fn include_expand(
- &self,
- arg_id: MacroCallId,
- ) -> Result<
- (triomphe::Arc<(::tt::Subtree<::tt::TokenId>, mbe::TokenMap)>, base_db::FileId),
- ExpandError,
- >;
/// Special case of the previous query for procedural macros. We can't LRU
/// proc macros, since they are not deterministic in general, and
/// non-determinism breaks salsa in a very, very, very bad way.
@@ -166,8 +151,20 @@ pub trait ExpandDatabase: SourceDatabase {
&self,
macro_call: MacroCallId,
) -> ExpandResult<Box<[SyntaxError]>>;
+}
- fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>;
+#[inline]
+pub fn span_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap {
+ match file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
+ HirFileIdRepr::MacroFile(m) => {
+ SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1)
+ }
+ }
+}
+
+pub fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<RealSpanMap> {
+ Arc::new(RealSpanMap::from_file(db, file_id))
}
/// This expands the given macro call, but with different arguments. This is
@@ -181,21 +178,36 @@ pub fn expand_speculative(
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
let loc = db.lookup_intern_macro_call(actual_macro_call);
- let token_range = token_to_map.text_range();
+
+ let span_map = RealSpanMap::absolute(FileId::BOGUS);
+ let span_map = SpanMapRef::RealSpanMap(&span_map);
// Build the subtree and token mapping for the speculative args
- let censor = censor_for_macro_input(&loc, speculative_args);
- let mut fixups = fixup::fixup_syntax(speculative_args);
- fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
- let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
- speculative_args,
- fixups.token_map,
- fixups.next_id,
- fixups.replace,
- fixups.append,
- );
+ let (mut tt, undo_info) = match loc.kind {
+ MacroCallKind::FnLike { .. } => {
+ (mbe::syntax_node_to_token_tree(speculative_args, span_map), SyntaxFixupUndoInfo::NONE)
+ }
+ MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
+ let censor = censor_for_macro_input(&loc, speculative_args);
+ let mut fixups = fixup::fixup_syntax(span_map, speculative_args);
+ fixups.append.retain(|it, _| match it {
+ syntax::NodeOrToken::Node(it) => !censor.contains(it),
+ syntax::NodeOrToken::Token(_) => true,
+ });
+ fixups.remove.extend(censor);
+ (
+ mbe::syntax_node_to_token_tree_modified(
+ speculative_args,
+ span_map,
+ fixups.append,
+ fixups.remove,
+ ),
+ fixups.undo_info,
+ )
+ }
+ };
- let (attr_arg, token_id) = match loc.kind {
+ let attr_arg = match loc.kind {
MacroCallKind::Attr { invoc_attr_index, .. } => {
let attr = if loc.def.is_attribute_derive() {
// for pseudo-derive expansion we actually pass the attribute itself only
@@ -204,65 +216,51 @@ pub fn expand_speculative(
// Attributes may have an input token tree, build the subtree and map for this as well
// then try finding a token id for our token if it is inside this input subtree.
let item = ast::Item::cast(speculative_args.clone())?;
- item.doc_comments_and_attrs()
+ collect_attrs(&item)
.nth(invoc_attr_index.ast_index())
- .and_then(Either::left)
+ .and_then(|x| Either::left(x.1))
}?;
match attr.token_tree() {
Some(token_tree) => {
- let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax());
- tree.delimiter = tt::Delimiter::unspecified();
-
- let shift = mbe::Shift::new(&tt);
- shift.shift_all(&mut tree);
-
- let token_id = if token_tree.syntax().text_range().contains_range(token_range) {
- let attr_input_start =
- token_tree.left_delimiter_token()?.text_range().start();
- let range = token_range.checked_sub(attr_input_start)?;
- let token_id = shift.shift(map.token_by_range(range)?);
- Some(token_id)
- } else {
- None
- };
- (Some(tree), token_id)
- }
- _ => (None, None),
- }
- }
- _ => (None, None),
- };
- let token_id = match token_id {
- Some(token_id) => token_id,
- // token wasn't inside an attribute input so it has to be in the general macro input
- None => {
- let range = token_range.checked_sub(speculative_args.text_range().start())?;
- let token_id = spec_args_tmap.token_by_range(range)?;
- match loc.def.kind {
- MacroDefKind::Declarative(it) => {
- db.decl_macro_expander(loc.krate, it).map_id_down(token_id)
+ let mut tree = syntax_node_to_token_tree(token_tree.syntax(), span_map);
+ tree.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
+
+ Some(tree)
}
- _ => token_id,
+ _ => None,
}
}
+ _ => None,
};
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
let mut speculative_expansion = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => {
- tt.delimiter = tt::Delimiter::unspecified();
- expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref())
+ tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
+ let call_site = loc.span(db);
+ expander.expand(
+ db,
+ loc.def.krate,
+ loc.krate,
+ &tt,
+ attr_arg.as_ref(),
+ call_site,
+ call_site,
+ call_site,
+ )
}
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
- pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
+ pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, loc.call_site)
}
MacroDefKind::BuiltInDerive(expander, ..) => {
// this cast is a bit sus, can we avoid losing the typedness here?
let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
- expander.expand(db, actual_macro_call, &adt, &spec_args_tmap)
+ expander.expand(db, actual_macro_call, &adt, span_map)
+ }
+ MacroDefKind::Declarative(it) => {
+ db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt)
}
- MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt),
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
MacroDefKind::BuiltInEager(it, _) => {
it.expand(db, actual_macro_call, &tt).map_err(Into::into)
@@ -270,13 +268,14 @@ pub fn expand_speculative(
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt),
};
- let expand_to = macro_expand_to(db, actual_macro_call);
- fixup::reverse_fixups(&mut speculative_expansion.value, &spec_args_tmap, &fixups.undo_info);
+ let expand_to = loc.expand_to();
+
+ fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info);
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
let syntax_node = node.syntax_node();
let token = rev_tmap
- .ranges_by_token(token_id, token_to_map.kind())
+ .ranges_with_span(span_map.span_for_range(token_to_map.text_range()))
.filter_map(|range| syntax_node.covering_element(range).into_token())
.min_by_key(|t| {
// prefer tokens of the same kind and text
@@ -293,7 +292,7 @@ fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
match file_id.repr() {
- HirFileIdRepr::FileId(file_id) => db.parse(file_id).tree().syntax().clone(),
+ HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(),
HirFileIdRepr::MacroFile(macro_file) => {
db.parse_macro_expansion(macro_file).value.0.syntax_node()
}
@@ -312,17 +311,16 @@ fn parse_or_expand_with_err(
}
}
+// FIXME: We should verify that the parsed node is one of the many macro node variants we expect
+// instead of having it be untyped
fn parse_macro_expansion(
db: &dyn ExpandDatabase,
- macro_file: MacroFile,
-) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
+ macro_file: MacroFileId,
+) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
let _p = profile::span("parse_macro_expansion");
- let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id);
-
- let expand_to = macro_expand_to(db, macro_file.macro_call_id);
-
- tracing::debug!("expanded = {}", tt.as_debug_string());
- tracing::debug!("kind = {:?}", expand_to);
+ let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ let expand_to = loc.expand_to();
+ let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc);
let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to);
@@ -333,51 +331,138 @@ fn parse_macro_expansion_error(
db: &dyn ExpandDatabase,
macro_call_id: MacroCallId,
) -> ExpandResult<Box<[SyntaxError]>> {
- db.parse_macro_expansion(MacroFile { macro_call_id })
+ db.parse_macro_expansion(MacroFileId { macro_call_id })
.map(|it| it.0.errors().to_vec().into_boxed_slice())
}
+fn parse_with_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> (Parse<SyntaxNode>, SpanMap) {
+ match file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ (db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
+ }
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let (parse, map) = db.parse_macro_expansion(macro_file).value;
+ (parse, SpanMap::ExpansionSpanMap(map))
+ }
+ }
+}
+
fn macro_arg(
db: &dyn ExpandDatabase,
id: MacroCallId,
-) -> ValueResult<
- Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
- Arc<Box<[SyntaxError]>>,
-> {
- let loc = db.lookup_intern_macro_call(id);
-
- if let Some(EagerCallInfo { arg, arg_id: _, error: _ }) = loc.eager.as_deref() {
- return ValueResult::ok(Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default()))));
- }
-
- let ValueResult { value, err } = db.macro_arg_node(id);
- let Some(arg) = value else {
- return ValueResult { value: None, err };
+ // FIXME: consider the following by putting fixup info into eager call info args
+ // ) -> ValueResult<Option<Arc<(tt::Subtree, SyntaxFixupUndoInfo)>>, Arc<Box<[SyntaxError]>>> {
+) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>> {
+ let mismatched_delimiters = |arg: &SyntaxNode| {
+ let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
+ let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
+ let well_formed_tt =
+ matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
+ if !well_formed_tt {
+ // Don't expand malformed (unbalanced) macro invocations. This is
+ // less than ideal, but trying to expand unbalanced macro calls
+ // sometimes produces pathological, deeply nested code which breaks
+ // all kinds of things.
+ //
+ // Some day, we'll have explicit recursion counters for all
+ // recursive things, at which point this code might be removed.
+ cov_mark::hit!(issue9358_bad_macro_stack_overflow);
+ Some(Arc::new(Box::new([SyntaxError::new(
+ "unbalanced token tree".to_owned(),
+ arg.text_range(),
+ )]) as Box<[_]>))
+ } else {
+ None
+ }
};
+ let loc = db.lookup_intern_macro_call(id);
+ if let Some(EagerCallInfo { arg, .. }) = matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
+ .then(|| loc.eager.as_deref())
+ .flatten()
+ {
+ ValueResult::ok(Some((arg.clone(), SyntaxFixupUndoInfo::NONE)))
+ } else {
+ let (parse, map) = parse_with_map(db, loc.kind.file_id());
+ let root = parse.syntax_node();
+
+ let syntax = match loc.kind {
+ MacroCallKind::FnLike { ast_id, .. } => {
+ let node = &ast_id.to_ptr(db).to_node(&root);
+ let offset = node.syntax().text_range().start();
+ match node.token_tree() {
+ Some(tt) => {
+ let tt = tt.syntax();
+ if let Some(e) = mismatched_delimiters(tt) {
+ return ValueResult::only_err(e);
+ }
+ tt.clone()
+ }
+ None => {
+ return ValueResult::only_err(Arc::new(Box::new([
+ SyntaxError::new_at_offset("missing token tree".to_owned(), offset),
+ ])));
+ }
+ }
+ }
+ MacroCallKind::Derive { ast_id, .. } => {
+ ast_id.to_ptr(db).to_node(&root).syntax().clone()
+ }
+ MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(),
+ };
+ let (mut tt, undo_info) = match loc.kind {
+ MacroCallKind::FnLike { .. } => {
+ (mbe::syntax_node_to_token_tree(&syntax, map.as_ref()), SyntaxFixupUndoInfo::NONE)
+ }
+ MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
+ let censor = censor_for_macro_input(&loc, &syntax);
+ let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax);
+ fixups.append.retain(|it, _| match it {
+ syntax::NodeOrToken::Node(it) => !censor.contains(it),
+ syntax::NodeOrToken::Token(_) => true,
+ });
+ fixups.remove.extend(censor);
+ {
+ let mut tt = mbe::syntax_node_to_token_tree_modified(
+ &syntax,
+ map.as_ref(),
+ fixups.append.clone(),
+ fixups.remove.clone(),
+ );
+ reverse_fixups(&mut tt, &fixups.undo_info);
+ }
+ (
+ mbe::syntax_node_to_token_tree_modified(
+ &syntax,
+ map,
+ fixups.append,
+ fixups.remove,
+ ),
+ fixups.undo_info,
+ )
+ }
+ };
- let node = SyntaxNode::new_root(arg);
- let censor = censor_for_macro_input(&loc, &node);
- let mut fixups = fixup::fixup_syntax(&node);
- fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
- let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
- &node,
- fixups.token_map,
- fixups.next_id,
- fixups.replace,
- fixups.append,
- );
+ if loc.def.is_proc_macro() {
+ // proc macros expect their inputs without parentheses, MBEs expect it with them included
+ tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
+ }
- if loc.def.is_proc_macro() {
- // proc macros expect their inputs without parentheses, MBEs expect it with them included
- tt.delimiter = tt::Delimiter::unspecified();
- }
- let val = Some(Arc::new((tt, tmap, fixups.undo_info)));
- match err {
- Some(err) => ValueResult::new(val, err),
- None => ValueResult::ok(val),
+ if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) {
+ match parse.errors() {
+ [] => ValueResult::ok(Some((Arc::new(tt), undo_info))),
+ errors => ValueResult::new(
+ Some((Arc::new(tt), undo_info)),
+ // Box::<[_]>::from(res.errors()), not stable yet
+ Arc::new(errors.to_vec().into_boxed_slice()),
+ ),
+ }
+ } else {
+ ValueResult::ok(Some((Arc::new(tt), undo_info)))
+ }
}
}
+// FIXME: Censoring info should be calculated by the caller! Namely by name resolution
/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
/// - attributes expect the invoking attribute to be stripped
@@ -403,10 +488,9 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => return None,
MacroCallKind::Attr { invoc_attr_index, .. } => {
cov_mark::hit!(attribute_macro_attr_censoring);
- ast::Item::cast(node.clone())?
- .doc_comments_and_attrs()
+ collect_attrs(&ast::Item::cast(node.clone())?)
.nth(invoc_attr_index.ast_index())
- .and_then(Either::left)
+ .and_then(|x| Either::left(x.1))
.map(|attr| attr.syntax().clone())
.into_iter()
.collect()
@@ -417,103 +501,67 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
.unwrap_or_default()
}
-fn macro_arg_node(
- db: &dyn ExpandDatabase,
- id: MacroCallId,
-) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>> {
- let err = || -> Arc<Box<[_]>> {
- Arc::new(Box::new([SyntaxError::new_at_offset(
- "invalid macro call".to_owned(),
- syntax::TextSize::from(0),
- )]))
- };
- let loc = db.lookup_intern_macro_call(id);
- let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind {
- let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
- Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::MacroEagerInput).0)
- } else {
- loc.kind
- .arg(db)
- .and_then(|arg| ast::TokenTree::cast(arg.value))
- .map(|tt| tt.reparse_as_comma_separated_expr().to_syntax())
- };
- match res {
- Some(res) if res.errors().is_empty() => res.syntax_node(),
- Some(res) => {
- return ValueResult::new(
- Some(res.syntax_node().green().into()),
- // Box::<[_]>::from(res.errors()), not stable yet
- Arc::new(res.errors().to_vec().into_boxed_slice()),
- );
- }
- None => return ValueResult::only_err(err()),
- }
- } else {
- match loc.kind.arg(db) {
- Some(res) => res.value,
- None => return ValueResult::only_err(err()),
- }
- };
- if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
- let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
- let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
- let well_formed_tt =
- matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
- if !well_formed_tt {
- // Don't expand malformed (unbalanced) macro invocations. This is
- // less than ideal, but trying to expand unbalanced macro calls
- // sometimes produces pathological, deeply nested code which breaks
- // all kinds of things.
- //
- // Some day, we'll have explicit recursion counters for all
- // recursive things, at which point this code might be removed.
- cov_mark::hit!(issue9358_bad_macro_stack_overflow);
- return ValueResult::only_err(Arc::new(Box::new([SyntaxError::new(
- "unbalanced token tree".to_owned(),
- arg.text_range(),
- )])));
- }
- }
- ValueResult::ok(Some(arg.green().into()))
-}
-
fn decl_macro_expander(
db: &dyn ExpandDatabase,
def_crate: CrateId,
id: AstId<ast::Macro>,
) -> Arc<DeclarativeMacroExpander> {
let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021;
- let (mac, def_site_token_map) = match id.to_node(db) {
- ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() {
- Some(arg) => {
- let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
- let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
- (mac, def_site_token_map)
- }
- None => (
- mbe::DeclarativeMacro::from_err(
+ let (root, map) = parse_with_map(db, id.file_id);
+ let root = root.syntax_node();
+
+ let transparency = |node| {
+ // ... would be nice to have the item tree here
+ let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate);
+ match &*attrs
+ .iter()
+ .find(|it| {
+ it.path.as_ident().and_then(|it| it.as_str()) == Some("rustc_macro_transparency")
+ })?
+ .token_tree_value()?
+ .token_trees
+ {
+ [tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text {
+ "transparent" => Some(Transparency::Transparent),
+ "semitransparent" => Some(Transparency::SemiTransparent),
+ "opaque" => Some(Transparency::Opaque),
+ _ => None,
+ },
+ _ => None,
+ }
+ };
+
+ let (mac, transparency) = match id.to_ptr(db).to_node(&root) {
+ ast::Macro::MacroRules(macro_rules) => (
+ match macro_rules.token_tree() {
+ Some(arg) => {
+ let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref());
+ let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
+ mac
+ }
+ None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
- Default::default(),
- ),
- },
- ast::Macro::MacroDef(macro_def) => match macro_def.body() {
- Some(arg) => {
- let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
- let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
- (mac, def_site_token_map)
- }
- None => (
- mbe::DeclarativeMacro::from_err(
+ },
+ transparency(&macro_rules).unwrap_or(Transparency::SemiTransparent),
+ ),
+ ast::Macro::MacroDef(macro_def) => (
+ match macro_def.body() {
+ Some(arg) => {
+ let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref());
+ let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
+ mac
+ }
+ None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
- Default::default(),
- ),
- },
+ },
+ transparency(&macro_def).unwrap_or(Transparency::Opaque),
+ ),
};
- Arc::new(DeclarativeMacroExpander { mac, def_site_token_map })
+ Arc::new(DeclarativeMacroExpander { mac, transparency })
}
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
@@ -529,39 +577,31 @@ fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
}
}
-fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
+fn macro_expand(
+ db: &dyn ExpandDatabase,
+ macro_call_id: MacroCallId,
+ loc: MacroCallLoc,
+) -> ExpandResult<Arc<tt::Subtree>> {
let _p = profile::span("macro_expand");
- let loc = db.lookup_intern_macro_call(id);
let ExpandResult { value: tt, mut err } = match loc.def.kind {
- MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(id),
+ MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id),
MacroDefKind::BuiltInDerive(expander, ..) => {
- let arg = db.macro_arg_node(id).value.unwrap();
-
- let node = SyntaxNode::new_root(arg);
- let censor = censor_for_macro_input(&loc, &node);
- let mut fixups = fixup::fixup_syntax(&node);
- fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
- let (tmap, _) = mbe::syntax_node_to_token_map_with_modifications(
- &node,
- fixups.token_map,
- fixups.next_id,
- fixups.replace,
- fixups.append,
- );
-
- // this cast is a bit sus, can we avoid losing the typedness here?
- let adt = ast::Adt::cast(node).unwrap();
- let mut res = expander.expand(db, id, &adt, &tmap);
- fixup::reverse_fixups(&mut res.value, &tmap, &fixups.undo_info);
- res
+ let (root, map) = parse_with_map(db, loc.kind.file_id());
+ let root = root.syntax_node();
+ let MacroCallKind::Derive { ast_id, .. } = loc.kind else { unreachable!() };
+ let node = ast_id.to_ptr(db).to_node(&root);
+
+ // FIXME: Use censoring
+ let _censor = censor_for_macro_input(&loc, node.syntax());
+ expander.expand(db, macro_call_id, &node, map.as_ref())
}
_ => {
- let ValueResult { value, err } = db.macro_arg(id);
- let Some(macro_arg) = value else {
+ let ValueResult { value, err } = db.macro_arg(macro_call_id);
+ let Some((macro_arg, undo_info)) = value else {
return ExpandResult {
value: Arc::new(tt::Subtree {
- delimiter: tt::Delimiter::UNSPECIFIED,
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: Vec::new(),
}),
// FIXME: We should make sure to enforce an invariant that invalid macro
@@ -570,12 +610,14 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
};
};
- let (arg, arg_tm, undo_info) = &*macro_arg;
- let mut res = match loc.def.kind {
+ let arg = &*macro_arg;
+ match loc.def.kind {
MacroDefKind::Declarative(id) => {
- db.decl_macro_expander(loc.def.krate, id).expand(arg.clone())
+ db.decl_macro_expander(loc.def.krate, id).expand(db, arg.clone(), macro_call_id)
+ }
+ MacroDefKind::BuiltIn(it, _) => {
+ it.expand(db, macro_call_id, &arg).map_err(Into::into)
}
- MacroDefKind::BuiltIn(it, _) => it.expand(db, id, &arg).map_err(Into::into),
// This might look a bit odd, but we do not expand the inputs to eager macros here.
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
// That kind of expansion uses the ast id map of an eager macros input though which goes through
@@ -583,11 +625,8 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
// will end up going through here again, whereas we want to just want to inspect the raw input.
// As such we just return the input subtree here.
MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
- let mut arg = arg.clone();
- fixup::reverse_fixups(&mut arg, arg_tm, undo_info);
-
return ExpandResult {
- value: Arc::new(arg),
+ value: macro_arg.clone(),
err: err.map(|err| {
let mut buf = String::new();
for err in &**err {
@@ -600,12 +639,16 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
}),
};
}
- MacroDefKind::BuiltInEager(it, _) => it.expand(db, id, &arg).map_err(Into::into),
- MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg),
+ MacroDefKind::BuiltInEager(it, _) => {
+ it.expand(db, macro_call_id, &arg).map_err(Into::into)
+ }
+ MacroDefKind::BuiltInAttr(it, _) => {
+ let mut res = it.expand(db, macro_call_id, &arg);
+ fixup::reverse_fixups(&mut res.value, &undo_info);
+ res
+ }
_ => unreachable!(),
- };
- fixup::reverse_fixups(&mut res.value, arg_tm, undo_info);
- res
+ }
}
};
@@ -614,9 +657,12 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
err = error.clone().or(err);
}
- // Set a hard limit for the expanded tt
- if let Err(value) = check_tt_count(&tt) {
- return value;
+ // Skip checking token tree limit for include! macro call
+ if !loc.def.is_include() {
+ // Set a hard limit for the expanded tt
+ if let Err(value) = check_tt_count(&tt) {
+ return value;
+ }
}
ExpandResult { value: Arc::new(tt), err }
@@ -624,10 +670,10 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
let loc = db.lookup_intern_macro_call(id);
- let Some(macro_arg) = db.macro_arg(id).value else {
+ let Some((macro_arg, undo_info)) = db.macro_arg(id).value else {
return ExpandResult {
value: Arc::new(tt::Subtree {
- delimiter: tt::Delimiter::UNSPECIFIED,
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: Vec::new(),
}),
// FIXME: We should make sure to enforce an invariant that invalid macro
@@ -636,47 +682,44 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
};
};
- let (arg_tt, arg_tm, undo_info) = &*macro_arg;
-
let expander = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => expander,
_ => unreachable!(),
};
let attr_arg = match &loc.kind {
- MacroCallKind::Attr { attr_args, .. } => {
- let mut attr_args = attr_args.0.clone();
- mbe::Shift::new(arg_tt).shift_all(&mut attr_args);
- Some(attr_args)
- }
+ MacroCallKind::Attr { attr_args: Some(attr_args), .. } => Some(&**attr_args),
_ => None,
};
- let ExpandResult { value: mut tt, err } =
- expander.expand(db, loc.def.krate, loc.krate, arg_tt, attr_arg.as_ref());
+ let call_site = loc.span(db);
+ let ExpandResult { value: mut tt, err } = expander.expand(
+ db,
+ loc.def.krate,
+ loc.krate,
+ &macro_arg,
+ attr_arg,
+ // FIXME
+ call_site,
+ call_site,
+ // FIXME
+ call_site,
+ );
// Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt) {
return value;
}
- fixup::reverse_fixups(&mut tt, arg_tm, undo_info);
+ fixup::reverse_fixups(&mut tt, &undo_info);
ExpandResult { value: Arc::new(tt), err }
}
-fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
- Arc::new(HygieneFrame::new(db, file_id))
-}
-
-fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo {
- db.lookup_intern_macro_call(id).expand_to()
-}
-
fn token_tree_to_syntax_node(
tt: &tt::Subtree,
expand_to: ExpandTo,
-) -> (Parse<SyntaxNode>, mbe::TokenMap) {
+) -> (Parse<SyntaxNode>, ExpansionSpanMap) {
let entry_point = match expand_to {
ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
ExpandTo::Items => mbe::TopEntryPoint::MacroItems,
@@ -692,7 +735,7 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>
if TOKEN_LIMIT.check(count).is_err() {
Err(ExpandResult {
value: Arc::new(tt::Subtree {
- delimiter: tt::Delimiter::UNSPECIFIED,
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: vec![],
}),
err: Some(ExpandError::other(format!(
@@ -705,3 +748,44 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>
Ok(())
}
}
+
+fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
+ db.intern_syntax_context(SyntaxContextData::root());
+}
+
+fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
+ let mut s = String::from("Expansions:");
+ let mut entries = InternMacroCallLookupQuery.in_db(db).entries::<Vec<_>>();
+ entries.sort_by_key(|e| e.key);
+ for e in entries {
+ let id = e.key;
+ let expn_data = e.value.as_ref().unwrap();
+ s.push_str(&format!(
+ "\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}",
+ id,
+ expn_data.kind.file_id(),
+ expn_data.call_site,
+ SyntaxContextId::ROOT, // FIXME expn_data.def_site,
+ expn_data.kind.descr(),
+ ));
+ }
+
+ s.push_str("\n\nSyntaxContexts:\n");
+ let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::<Vec<_>>();
+ entries.sort_by_key(|e| e.key);
+ for e in entries {
+ struct SyntaxContextDebug<'a>(
+ &'a dyn ExpandDatabase,
+ SyntaxContextId,
+ &'a SyntaxContextData,
+ );
+
+ impl<'a> std::fmt::Debug for SyntaxContextDebug<'a> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.2.fancy_debug(self.1, self.0, f)
+ }
+ }
+ stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap()));
+ }
+ s
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
index 4110f2847..8d55240ae 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
@@ -18,18 +18,17 @@
//!
//!
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
-use base_db::CrateId;
-use rustc_hash::{FxHashMap, FxHashSet};
-use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent};
+use base_db::{span::SyntaxContextId, CrateId};
+use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent};
use triomphe::Arc;
use crate::{
ast::{self, AstNode},
db::ExpandDatabase,
- hygiene::Hygiene,
mod_path::ModPath,
- EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind,
- MacroCallLoc, MacroDefId, MacroDefKind,
+ span::SpanMapRef,
+ EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, MacroCallId,
+ MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
};
pub fn expand_eager_macro_input(
@@ -37,6 +36,7 @@ pub fn expand_eager_macro_input(
krate: CrateId,
macro_call: InFile<ast::MacroCall>,
def: MacroDefId,
+ call_site: SyntaxContextId,
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<MacroCallId>> {
let ast_map = db.ast_id_map(macro_call.file_id);
@@ -53,75 +53,44 @@ pub fn expand_eager_macro_input(
krate,
eager: None,
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
+ call_site,
});
let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
db.parse_macro_expansion(arg_id.as_macro_file());
- // we need this map here as the expansion of the eager input fake file loses whitespace ...
- let mut ws_mapping = FxHashMap::default();
- if let Some((_, tm, _)) = db.macro_arg(arg_id).value.as_deref() {
- ws_mapping.extend(tm.entries().filter_map(|(id, range)| {
- Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range))
- }));
- }
+
+ let mut arg_map = ExpansionSpanMap::empty();
let ExpandResult { value: expanded_eager_input, err } = {
eager_macro_recur(
db,
- &Hygiene::new(db, macro_call.file_id),
+ &arg_exp_map,
+ &mut arg_map,
+ TextSize::new(0),
InFile::new(arg_id.as_file(), arg_exp.syntax_node()),
krate,
+ call_site,
resolver,
)
};
let err = parse_err.or(err);
+ if cfg!(debug_assertions) {
+ arg_map.finish();
+ }
- let Some((expanded_eager_input, mapping)) = expanded_eager_input else {
+ let Some((expanded_eager_input, _mapping)) = expanded_eager_input else {
return ExpandResult { value: None, err };
};
- let (mut subtree, expanded_eager_input_token_map) =
- mbe::syntax_node_to_token_tree(&expanded_eager_input);
+ let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map);
- let og_tmap = if let Some(tt) = macro_call.value.token_tree() {
- let mut ids_used = FxHashSet::default();
- let mut og_tmap = mbe::syntax_node_to_token_map(tt.syntax());
- // The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside
- // so we need to remap them to the original input of the eager macro.
- subtree.visit_ids(&mut |id| {
- // Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix
-
- if let Some(range) = expanded_eager_input_token_map
- .first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)
- {
- // remap from expanded eager input to eager input expansion
- if let Some(og_range) = mapping.get(&range) {
- // remap from eager input expansion to original eager input
- if let Some(&og_range) = ws_mapping.get(og_range) {
- if let Some(og_token) = og_tmap.token_by_range(og_range) {
- ids_used.insert(og_token);
- return og_token;
- }
- }
- }
- }
- tt::TokenId::UNSPECIFIED
- });
- og_tmap.filter(|id| ids_used.contains(&id));
- og_tmap
- } else {
- Default::default()
- };
- subtree.delimiter = crate::tt::Delimiter::unspecified();
+ subtree.delimiter = crate::tt::Delimiter::DUMMY_INVISIBLE;
let loc = MacroCallLoc {
def,
krate,
- eager: Some(Box::new(EagerCallInfo {
- arg: Arc::new((subtree, og_tmap)),
- arg_id,
- error: err.clone(),
- })),
+ eager: Some(Arc::new(EagerCallInfo { arg: Arc::new(subtree), arg_id, error: err.clone() })),
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
+ call_site,
};
ExpandResult { value: Some(db.intern_macro_call(loc)), err }
@@ -132,12 +101,13 @@ fn lazy_expand(
def: &MacroDefId,
macro_call: InFile<ast::MacroCall>,
krate: CrateId,
-) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<mbe::TokenMap>)> {
+ call_site: SyntaxContextId,
+) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<ExpansionSpanMap>)> {
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(&macro_call.value);
let expand_to = ExpandTo::from_call_site(&macro_call.value);
let ast_id = macro_call.with_value(ast_id);
- let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to });
+ let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to }, call_site);
let macro_file = id.as_macro_file();
db.parse_macro_expansion(macro_file)
@@ -146,57 +116,59 @@ fn lazy_expand(
fn eager_macro_recur(
db: &dyn ExpandDatabase,
- hygiene: &Hygiene,
+ span_map: &ExpansionSpanMap,
+ expanded_map: &mut ExpansionSpanMap,
+ mut offset: TextSize,
curr: InFile<SyntaxNode>,
krate: CrateId,
+ call_site: SyntaxContextId,
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
-) -> ExpandResult<Option<(SyntaxNode, FxHashMap<TextRange, TextRange>)>> {
+) -> ExpandResult<Option<(SyntaxNode, TextSize)>> {
let original = curr.value.clone_for_update();
- let mut mapping = FxHashMap::default();
let mut replacements = Vec::new();
// FIXME: We only report a single error inside of eager expansions
let mut error = None;
- let mut offset = 0i32;
- let apply_offset = |it: TextSize, offset: i32| {
- TextSize::from(u32::try_from(offset + u32::from(it) as i32).unwrap_or_default())
- };
let mut children = original.preorder_with_tokens();
// Collect replacement
while let Some(child) = children.next() {
- let WalkEvent::Enter(child) = child else { continue };
let call = match child {
- syntax::NodeOrToken::Node(node) => match ast::MacroCall::cast(node) {
+ WalkEvent::Enter(SyntaxElement::Node(child)) => match ast::MacroCall::cast(child) {
Some(it) => {
children.skip_subtree();
it
}
- None => continue,
+ _ => continue,
},
- syntax::NodeOrToken::Token(t) => {
- mapping.insert(
- TextRange::new(
- apply_offset(t.text_range().start(), offset),
- apply_offset(t.text_range().end(), offset),
- ),
- t.text_range(),
- );
+ WalkEvent::Enter(_) => continue,
+ WalkEvent::Leave(child) => {
+ if let SyntaxElement::Token(t) = child {
+ let start = t.text_range().start();
+ offset += t.text_range().len();
+ expanded_map.push(offset, span_map.span_at(start));
+ }
continue;
}
};
- let def = match call.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
+
+ let def = match call
+ .path()
+ .and_then(|path| ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(span_map)))
+ {
Some(path) => match macro_resolver(path.clone()) {
Some(def) => def,
None => {
error =
Some(ExpandError::other(format!("unresolved macro {}", path.display(db))));
+ offset += call.syntax().text_range().len();
continue;
}
},
None => {
error = Some(ExpandError::other("malformed macro invocation"));
+ offset += call.syntax().text_range().len();
continue;
}
};
@@ -207,29 +179,22 @@ fn eager_macro_recur(
krate,
curr.with_value(call.clone()),
def,
+ call_site,
macro_resolver,
);
match value {
Some(call_id) => {
- let ExpandResult { value, err: err2 } =
+ let ExpandResult { value: (parse, map), err: err2 } =
db.parse_macro_expansion(call_id.as_macro_file());
- if let Some(tt) = call.token_tree() {
- let call_tt_start = tt.syntax().text_range().start();
- let call_start =
- apply_offset(call.syntax().text_range().start(), offset);
- if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() {
- mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
- value
- .1
- .first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE)
- .map(|r| (r + call_start, range + call_tt_start))
- }));
- }
- }
+ map.iter().for_each(|(o, span)| expanded_map.push(o + offset, span));
+ let syntax_node = parse.syntax_node();
ExpandResult {
- value: Some(value.0.syntax_node().clone_for_update()),
+ value: Some((
+ syntax_node.clone_for_update(),
+ offset + syntax_node.text_range().len(),
+ )),
err: err.or(err2),
}
}
@@ -242,45 +207,23 @@ fn eager_macro_recur(
| MacroDefKind::BuiltInDerive(..)
| MacroDefKind::ProcMacro(..) => {
let ExpandResult { value: (parse, tm), err } =
- lazy_expand(db, &def, curr.with_value(call.clone()), krate);
- let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind {
- Some(db.decl_macro_expander(def.krate, ast_id))
- } else {
- None
- };
+ lazy_expand(db, &def, curr.with_value(call.clone()), krate, call_site);
// replace macro inside
- let hygiene = Hygiene::new(db, parse.file_id);
let ExpandResult { value, err: error } = eager_macro_recur(
db,
- &hygiene,
+ &tm,
+ expanded_map,
+ offset,
// FIXME: We discard parse errors here
parse.as_ref().map(|it| it.syntax_node()),
krate,
+ call_site,
macro_resolver,
);
let err = err.or(error);
- if let Some(tt) = call.token_tree() {
- let call_tt_start = tt.syntax().text_range().start();
- let call_start = apply_offset(call.syntax().text_range().start(), offset);
- if let Some((_tt, arg_map, _)) = parse
- .file_id
- .macro_file()
- .and_then(|id| db.macro_arg(id.macro_call_id).value)
- .as_deref()
- {
- mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
- tm.first_range_by_token(
- decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid),
- syntax::SyntaxKind::TOMBSTONE,
- )
- .map(|r| (r + call_start, range + call_tt_start))
- }));
- }
- }
- // FIXME: Do we need to re-use _m here?
- ExpandResult { value: value.map(|(n, _m)| n), err }
+ ExpandResult { value, err }
}
};
if err.is_some() {
@@ -288,16 +231,18 @@ fn eager_macro_recur(
}
// check if the whole original syntax is replaced
if call.syntax() == &original {
- return ExpandResult { value: value.zip(Some(mapping)), err: error };
+ return ExpandResult { value, err: error };
}
- if let Some(insert) = value {
- offset += u32::from(insert.text_range().len()) as i32
- - u32::from(call.syntax().text_range().len()) as i32;
- replacements.push((call, insert));
+ match value {
+ Some((insert, new_offset)) => {
+ replacements.push((call, insert));
+ offset = new_offset;
+ }
+ None => offset += call.syntax().text_range().len(),
}
}
replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
- ExpandResult { value: Some((original, mapping)), err: error }
+ ExpandResult { value: Some((original, offset)), err: error }
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs
new file mode 100644
index 000000000..89f0685d5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs
@@ -0,0 +1,375 @@
+//! Things to wrap other things in file ids.
+use std::iter;
+
+use base_db::{
+ span::{HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId},
+ FileId, FileRange,
+};
+use either::Either;
+use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize};
+
+use crate::{db, ExpansionInfo, MacroFileIdExt};
+
+/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
+///
+/// Typical usages are:
+///
+/// * `InFile<SyntaxNode>` -- syntax node in a file
+/// * `InFile<ast::FnDef>` -- ast node in a file
+/// * `InFile<TextSize>` -- offset in a file
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
+pub struct InFileWrapper<FileKind, T> {
+ pub file_id: FileKind,
+ pub value: T,
+}
+pub type InFile<T> = InFileWrapper<HirFileId, T>;
+pub type InMacroFile<T> = InFileWrapper<MacroFileId, T>;
+pub type InRealFile<T> = InFileWrapper<FileId, T>;
+
+impl<FileKind, T> InFileWrapper<FileKind, T> {
+ pub fn new(file_id: FileKind, value: T) -> Self {
+ Self { file_id, value }
+ }
+
+ pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> InFileWrapper<FileKind, U> {
+ InFileWrapper::new(self.file_id, f(self.value))
+ }
+}
+
+impl<FileKind: Copy, T> InFileWrapper<FileKind, T> {
+ pub fn with_value<U>(&self, value: U) -> InFileWrapper<FileKind, U> {
+ InFileWrapper::new(self.file_id, value)
+ }
+
+ pub fn as_ref(&self) -> InFileWrapper<FileKind, &T> {
+ self.with_value(&self.value)
+ }
+}
+
+impl<FileKind: Copy, T: Clone> InFileWrapper<FileKind, &T> {
+ pub fn cloned(&self) -> InFileWrapper<FileKind, T> {
+ self.with_value(self.value.clone())
+ }
+}
+
+impl<T> From<InMacroFile<T>> for InFile<T> {
+ fn from(InMacroFile { file_id, value }: InMacroFile<T>) -> Self {
+ InFile { file_id: file_id.into(), value }
+ }
+}
+
+impl<T> From<InRealFile<T>> for InFile<T> {
+ fn from(InRealFile { file_id, value }: InRealFile<T>) -> Self {
+ InFile { file_id: file_id.into(), value }
+ }
+}
+
+// region:transpose impls
+
+impl<FileKind, T> InFileWrapper<FileKind, Option<T>> {
+ pub fn transpose(self) -> Option<InFileWrapper<FileKind, T>> {
+ Some(InFileWrapper::new(self.file_id, self.value?))
+ }
+}
+
+impl<FileKind, L, R> InFileWrapper<FileKind, Either<L, R>> {
+ pub fn transpose(self) -> Either<InFileWrapper<FileKind, L>, InFileWrapper<FileKind, R>> {
+ match self.value {
+ Either::Left(l) => Either::Left(InFileWrapper::new(self.file_id, l)),
+ Either::Right(r) => Either::Right(InFileWrapper::new(self.file_id, r)),
+ }
+ }
+}
+
+// endregion:transpose impls
+
+trait FileIdToSyntax: Copy {
+ fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode;
+}
+
+impl FileIdToSyntax for FileId {
+ fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
+ db.parse(self).syntax_node()
+ }
+}
+impl FileIdToSyntax for MacroFileId {
+ fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
+ db.parse_macro_expansion(self).value.0.syntax_node()
+ }
+}
+impl FileIdToSyntax for HirFileId {
+ fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
+ db.parse_or_expand(self)
+ }
+}
+
+#[allow(private_bounds)]
+impl<FileId: FileIdToSyntax, T> InFileWrapper<FileId, T> {
+ pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
+ FileIdToSyntax::file_syntax(self.file_id, db)
+ }
+}
+
+impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, N> {
+ pub fn syntax(&self) -> InFileWrapper<FileId, &SyntaxNode> {
+ self.with_value(self.value.syntax())
+ }
+}
+
+// region:specific impls
+
+impl InFile<&SyntaxNode> {
+ /// Skips the attributed item that caused the macro invocation we are climbing up
+ pub fn ancestors_with_macros_skip_attr_item(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
+ let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
+ Some(parent) => Some(node.with_value(parent)),
+ None => {
+ let macro_file_id = node.file_id.macro_file()?;
+ let parent_node = macro_file_id.call_node(db);
+ if macro_file_id.is_attr_macro(db) {
+ // macro call was an attributed item, skip it
+ // FIXME: does this fail if this is a direct expansion of another macro?
+ parent_node.map(|node| node.parent()).transpose()
+ } else {
+ Some(parent_node)
+ }
+ }
+ };
+ iter::successors(succ(&self.cloned()), succ)
+ }
+
+ /// Falls back to the macro call range if the node cannot be mapped up fully.
+ ///
+ /// For attributes and derives, this will point back to the attribute only.
+ /// For the entire item use [`InFile::original_file_range_full`].
+ pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
+ HirFileIdRepr::MacroFile(mac_file) => {
+ if let Some((res, ctxt)) =
+ ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
+ {
+ // FIXME: Figure out an API that makes proper use of ctx, this only exists to
+ // keep pre-token map rewrite behaviour.
+ if ctxt.is_root() {
+ return res;
+ }
+ }
+ // Fall back to whole macro call.
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ loc.kind.original_call_range(db)
+ }
+ }
+ }
+
+ /// Falls back to the macro call range if the node cannot be mapped up fully.
+ pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
+ HirFileIdRepr::MacroFile(mac_file) => {
+ if let Some((res, ctxt)) =
+ ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
+ {
+ // FIXME: Figure out an API that makes proper use of ctx, this only exists to
+ // keep pre-token map rewrite behaviour.
+ if ctxt.is_root() {
+ return res;
+ }
+ }
+ // Fall back to whole macro call.
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ loc.kind.original_call_range_with_body(db)
+ }
+ }
+ }
+
+ /// Attempts to map the syntax node back up its macro calls.
+ pub fn original_file_range_opt(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> Option<(FileRange, SyntaxContextId)> {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ Some((FileRange { file_id, range: self.value.text_range() }, SyntaxContextId::ROOT))
+ }
+ HirFileIdRepr::MacroFile(mac_file) => {
+ ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
+ }
+ }
+ }
+
+ pub fn original_syntax_node(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> Option<InRealFile<SyntaxNode>> {
+ // This kind of upmapping can only be achieved in attribute expanded files,
+ // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
+ let file_id = match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ return Some(InRealFile { file_id, value: self.value.clone() })
+ }
+ HirFileIdRepr::MacroFile(m) => m,
+ };
+ if !file_id.is_attr_macro(db) {
+ return None;
+ }
+
+ let (FileRange { file_id, range }, ctx) =
+ ExpansionInfo::new(db, file_id).map_node_range_up(db, self.value.text_range())?;
+
+ // FIXME: Figure out an API that makes proper use of ctx, this only exists to
+ // keep pre-token map rewrite behaviour.
+ if !ctx.is_root() {
+ return None;
+ }
+
+ let anc = db.parse(file_id).syntax_node().covering_element(range);
+ let kind = self.value.kind();
+ // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
+ let value = anc.ancestors().find(|it| it.kind() == kind)?;
+ Some(InRealFile::new(file_id, value))
+ }
+}
+
+impl InMacroFile<SyntaxToken> {
+ pub fn upmap_once(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> InFile<smallvec::SmallVec<[TextRange; 1]>> {
+ self.file_id.expansion_info(db).map_range_up_once(db, self.value.text_range())
+ }
+}
+
+impl InFile<SyntaxToken> {
+ /// Falls back to the macro call range if the node cannot be mapped up fully.
+ pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
+ HirFileIdRepr::MacroFile(mac_file) => {
+ let (range, ctxt) = ExpansionInfo::new(db, mac_file)
+ .span_for_offset(db, self.value.text_range().start());
+
+ // FIXME: Figure out an API that makes proper use of ctx, this only exists to
+ // keep pre-token map rewrite behaviour.
+ if ctxt.is_root() {
+ return range;
+ }
+
+ // Fall back to whole macro call.
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ loc.kind.original_call_range(db)
+ }
+ }
+ }
+
+ /// Attempts to map the syntax node back up its macro calls.
+ pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ Some(FileRange { file_id, range: self.value.text_range() })
+ }
+ HirFileIdRepr::MacroFile(mac_file) => {
+ let (range, ctxt) = ExpansionInfo::new(db, mac_file)
+ .span_for_offset(db, self.value.text_range().start());
+
+ // FIXME: Figure out an API that makes proper use of ctx, this only exists to
+ // keep pre-token map rewrite behaviour.
+ if ctxt.is_root() {
+ Some(range)
+ } else {
+ None
+ }
+ }
+ }
+ }
+}
+
+impl InMacroFile<TextSize> {
+ pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> (FileRange, SyntaxContextId) {
+ ExpansionInfo::new(db, self.file_id).span_for_offset(db, self.value)
+ }
+}
+
+impl InFile<TextRange> {
+ pub fn original_node_file_range(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> (FileRange, SyntaxContextId) {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ (FileRange { file_id, range: self.value }, SyntaxContextId::ROOT)
+ }
+ HirFileIdRepr::MacroFile(mac_file) => {
+ match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) {
+ Some(it) => it,
+ None => {
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ (loc.kind.original_call_range(db), SyntaxContextId::ROOT)
+ }
+ }
+ }
+ }
+ }
+
+ pub fn original_node_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
+ HirFileIdRepr::MacroFile(mac_file) => {
+ match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) {
+ Some((it, SyntaxContextId::ROOT)) => it,
+ _ => {
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ loc.kind.original_call_range(db)
+ }
+ }
+ }
+ }
+ }
+
+ pub fn original_node_file_range_opt(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> Option<(FileRange, SyntaxContextId)> {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ Some((FileRange { file_id, range: self.value }, SyntaxContextId::ROOT))
+ }
+ HirFileIdRepr::MacroFile(mac_file) => {
+ ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value)
+ }
+ }
+ }
+}
+
+impl<N: AstNode> InFile<N> {
+ pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<N>> {
+ // This kind of upmapping can only be achieved in attribute expanded files,
+ // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
+ let file_id = match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ return Some(InRealFile { file_id, value: self.value })
+ }
+ HirFileIdRepr::MacroFile(m) => m,
+ };
+ if !file_id.is_attr_macro(db) {
+ return None;
+ }
+
+ let (FileRange { file_id, range }, ctx) = ExpansionInfo::new(db, file_id)
+ .map_node_range_up(db, self.value.syntax().text_range())?;
+
+ // FIXME: Figure out an API that makes proper use of ctx, this only exists to
+ // keep pre-token map rewrite behaviour.
+ if !ctx.is_root() {
+ return None;
+ }
+
+ // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
+ let anc = db.parse(file_id).syntax_node().covering_element(range);
+ let value = anc.ancestors().find_map(N::cast)?;
+ Some(InRealFile::new(file_id, value))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
index e6e8d8c02..346cd39a7 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
@@ -1,111 +1,126 @@
//! To make attribute macros work reliably when typing, we need to take care to
//! fix up syntax errors in the code we're passing to them.
-use std::mem;
-use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
-use rustc_hash::FxHashMap;
+use base_db::{
+ span::{ErasedFileAstId, SpanAnchor, SpanData},
+ FileId,
+};
+use la_arena::RawIdx;
+use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec;
+use stdx::never;
use syntax::{
ast::{self, AstNode, HasLoopBody},
- match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange,
+ match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
+};
+use triomphe::Arc;
+use tt::{Spacing, Span};
+
+use crate::{
+ span::SpanMapRef,
+ tt::{Ident, Leaf, Punct, Subtree},
};
-use tt::token_id::Subtree;
/// The result of calculating fixes for a syntax node -- a bunch of changes
/// (appending to and replacing nodes), the information that is needed to
/// reverse those changes afterwards, and a token map.
#[derive(Debug, Default)]
pub(crate) struct SyntaxFixups {
- pub(crate) append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- pub(crate) replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ pub(crate) append: FxHashMap<SyntaxElement, Vec<Leaf>>,
+ pub(crate) remove: FxHashSet<SyntaxNode>,
pub(crate) undo_info: SyntaxFixupUndoInfo,
- pub(crate) token_map: TokenMap,
- pub(crate) next_id: u32,
}
/// This is the information needed to reverse the fixups.
-#[derive(Debug, Default, PartialEq, Eq)]
+#[derive(Clone, Debug, Default, PartialEq, Eq)]
pub struct SyntaxFixupUndoInfo {
- original: Box<[Subtree]>,
+ // FIXME: ThinArc<[Subtree]>
+ original: Option<Arc<Box<[Subtree]>>>,
+}
+
+impl SyntaxFixupUndoInfo {
+ pub(crate) const NONE: Self = SyntaxFixupUndoInfo { original: None };
}
-const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
+// censoring -> just don't convert the node
+// replacement -> censor + append
+// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how
+// to remove later
+const FIXUP_DUMMY_FILE: FileId = FileId::from_raw(FileId::MAX_FILE_ID);
+const FIXUP_DUMMY_AST_ID: ErasedFileAstId = ErasedFileAstId::from_raw(RawIdx::from_u32(!0));
+const FIXUP_DUMMY_RANGE: TextRange = TextRange::empty(TextSize::new(0));
+const FIXUP_DUMMY_RANGE_END: TextSize = TextSize::new(!0);
-pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
+pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> SyntaxFixups {
let mut append = FxHashMap::<SyntaxElement, _>::default();
- let mut replace = FxHashMap::<SyntaxElement, _>::default();
+ let mut remove = FxHashSet::<SyntaxNode>::default();
let mut preorder = node.preorder();
let mut original = Vec::new();
- let mut token_map = TokenMap::default();
- let mut next_id = 0;
+ let dummy_range = FIXUP_DUMMY_RANGE;
+ // we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as
+ // the index into the replacement vec but only if the end points to !0
+ let dummy_anchor = SpanAnchor { file_id: FIXUP_DUMMY_FILE, ast_id: FIXUP_DUMMY_AST_ID };
+ let fake_span = |range| SpanData {
+ range: dummy_range,
+ anchor: dummy_anchor,
+ ctx: span_map.span_for_range(range).ctx,
+ };
while let Some(event) = preorder.next() {
- let node = match event {
- syntax::WalkEvent::Enter(node) => node,
- syntax::WalkEvent::Leave(_) => continue,
- };
+ let syntax::WalkEvent::Enter(node) = event else { continue };
+ let node_range = node.text_range();
if can_handle_error(&node) && has_error_to_handle(&node) {
+ remove.insert(node.clone().into());
// the node contains an error node, we have to completely replace it by something valid
- let (original_tree, new_tmap, new_next_id) =
- mbe::syntax_node_to_token_tree_with_modifications(
- &node,
- mem::take(&mut token_map),
- next_id,
- Default::default(),
- Default::default(),
- );
- token_map = new_tmap;
- next_id = new_next_id;
+ let original_tree = mbe::syntax_node_to_token_tree(&node, span_map);
let idx = original.len() as u32;
original.push(original_tree);
- let replacement = SyntheticToken {
- kind: SyntaxKind::IDENT,
+ let replacement = Leaf::Ident(Ident {
text: "__ra_fixup".into(),
- range: node.text_range(),
- id: SyntheticTokenId(idx),
- };
- replace.insert(node.clone().into(), vec![replacement]);
+ span: SpanData {
+ range: TextRange::new(TextSize::new(idx), FIXUP_DUMMY_RANGE_END),
+ anchor: dummy_anchor,
+ ctx: span_map.span_for_range(node_range).ctx,
+ },
+ });
+ append.insert(node.clone().into(), vec![replacement]);
preorder.skip_subtree();
continue;
}
+
// In some other situations, we can fix things by just appending some tokens.
- let end_range = TextRange::empty(node.text_range().end());
match_ast! {
match node {
ast::FieldExpr(it) => {
if it.name_ref().is_none() {
// incomplete field access: some_expr.|
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::IDENT,
+ Leaf::Ident(Ident {
text: "__ra_fixup".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ span: fake_span(node_range),
+ }),
]);
}
},
ast::ExprStmt(it) => {
if it.semicolon_token().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::SEMICOLON,
- text: ";".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ Leaf::Punct(Punct {
+ char: ';',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range),
+ }),
]);
}
},
ast::LetStmt(it) => {
if it.semicolon_token().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::SEMICOLON,
- text: ";".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ Leaf::Punct(Punct {
+ char: ';',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
]);
}
},
@@ -117,28 +132,25 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue,
};
append.insert(if_token.into(), vec![
- SyntheticToken {
- kind: SyntaxKind::IDENT,
+ Leaf::Ident(Ident {
text: "__ra_fixup".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ span: fake_span(node_range)
+ }),
]);
}
if it.then_branch().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::L_CURLY,
- text: "{".into(),
- range: end_range,
- id: EMPTY_ID,
- },
- SyntheticToken {
- kind: SyntaxKind::R_CURLY,
- text: "}".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ // FIXME: THis should be a subtree no?
+ Leaf::Punct(Punct {
+ char: '{',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
+ Leaf::Punct(Punct {
+ char: '}',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
]);
}
},
@@ -150,46 +162,42 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue,
};
append.insert(while_token.into(), vec![
- SyntheticToken {
- kind: SyntaxKind::IDENT,
+ Leaf::Ident(Ident {
text: "__ra_fixup".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ span: fake_span(node_range)
+ }),
]);
}
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::L_CURLY,
- text: "{".into(),
- range: end_range,
- id: EMPTY_ID,
- },
- SyntheticToken {
- kind: SyntaxKind::R_CURLY,
- text: "}".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ // FIXME: THis should be a subtree no?
+ Leaf::Punct(Punct {
+ char: '{',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
+ Leaf::Punct(Punct {
+ char: '}',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
]);
}
},
ast::LoopExpr(it) => {
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::L_CURLY,
- text: "{".into(),
- range: end_range,
- id: EMPTY_ID,
- },
- SyntheticToken {
- kind: SyntaxKind::R_CURLY,
- text: "}".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ // FIXME: THis should be a subtree no?
+ Leaf::Punct(Punct {
+ char: '{',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
+ Leaf::Punct(Punct {
+ char: '}',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
]);
}
},
@@ -201,29 +209,26 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue
};
append.insert(match_token.into(), vec![
- SyntheticToken {
- kind: SyntaxKind::IDENT,
+ Leaf::Ident(Ident {
text: "__ra_fixup".into(),
- range: end_range,
- id: EMPTY_ID
- },
+ span: fake_span(node_range)
+ }),
]);
}
if it.match_arm_list().is_none() {
// No match arms
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::L_CURLY,
- text: "{".into(),
- range: end_range,
- id: EMPTY_ID,
- },
- SyntheticToken {
- kind: SyntaxKind::R_CURLY,
- text: "}".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ // FIXME: THis should be a subtree no?
+ Leaf::Punct(Punct {
+ char: '{',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
+ Leaf::Punct(Punct {
+ char: '}',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
]);
}
},
@@ -234,10 +239,15 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
};
let [pat, in_token, iter] = [
- (SyntaxKind::UNDERSCORE, "_"),
- (SyntaxKind::IN_KW, "in"),
- (SyntaxKind::IDENT, "__ra_fixup")
- ].map(|(kind, text)| SyntheticToken { kind, text: text.into(), range: end_range, id: EMPTY_ID});
+ "_",
+ "in",
+ "__ra_fixup"
+ ].map(|text|
+ Leaf::Ident(Ident {
+ text: text.into(),
+ span: fake_span(node_range)
+ }),
+ );
if it.pat().is_none() && it.in_token().is_none() && it.iterable().is_none() {
append.insert(for_token.into(), vec![pat, in_token, iter]);
@@ -248,18 +258,17 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::L_CURLY,
- text: "{".into(),
- range: end_range,
- id: EMPTY_ID,
- },
- SyntheticToken {
- kind: SyntaxKind::R_CURLY,
- text: "}".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ // FIXME: THis should be a subtree no?
+ Leaf::Punct(Punct {
+ char: '{',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
+ Leaf::Punct(Punct {
+ char: '}',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
]);
}
},
@@ -267,12 +276,13 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
}
}
}
+ let needs_fixups = !append.is_empty() || !original.is_empty();
SyntaxFixups {
append,
- replace,
- token_map,
- next_id,
- undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() },
+ remove,
+ undo_info: SyntaxFixupUndoInfo {
+ original: needs_fixups.then(|| Arc::new(original.into_boxed_slice())),
+ },
}
}
@@ -288,36 +298,57 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool {
has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c))
}
-pub(crate) fn reverse_fixups(
- tt: &mut Subtree,
- token_map: &TokenMap,
- undo_info: &SyntaxFixupUndoInfo,
-) {
+pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) {
+ let Some(undo_info) = undo_info.original.as_deref() else { return };
+ let undo_info = &**undo_info;
+ if never!(
+ tt.delimiter.close.anchor.file_id == FIXUP_DUMMY_FILE
+ || tt.delimiter.open.anchor.file_id == FIXUP_DUMMY_FILE
+ ) {
+ tt.delimiter.close = SpanData::DUMMY;
+ tt.delimiter.open = SpanData::DUMMY;
+ }
+ reverse_fixups_(tt, undo_info);
+}
+
+fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
let tts = std::mem::take(&mut tt.token_trees);
tt.token_trees = tts
.into_iter()
+ // delete all fake nodes
.filter(|tt| match tt {
tt::TokenTree::Leaf(leaf) => {
- token_map.synthetic_token_id(*leaf.span()) != Some(EMPTY_ID)
- }
- tt::TokenTree::Subtree(st) => {
- token_map.synthetic_token_id(st.delimiter.open) != Some(EMPTY_ID)
+ let span = leaf.span();
+ let is_real_leaf = span.anchor.file_id != FIXUP_DUMMY_FILE;
+ let is_replaced_node = span.range.end() == FIXUP_DUMMY_RANGE_END;
+ is_real_leaf || is_replaced_node
}
+ tt::TokenTree::Subtree(_) => true,
})
.flat_map(|tt| match tt {
tt::TokenTree::Subtree(mut tt) => {
- reverse_fixups(&mut tt, token_map, undo_info);
+ if tt.delimiter.close.anchor.file_id == FIXUP_DUMMY_FILE
+ || tt.delimiter.open.anchor.file_id == FIXUP_DUMMY_FILE
+ {
+ // Even though fixup never creates subtrees with fixup spans, the old proc-macro server
+ // might copy them if the proc-macro asks for it, so we need to filter those out
+ // here as well.
+ return SmallVec::new_const();
+ }
+ reverse_fixups_(&mut tt, undo_info);
SmallVec::from_const([tt.into()])
}
tt::TokenTree::Leaf(leaf) => {
- if let Some(id) = token_map.synthetic_token_id(*leaf.span()) {
- let original = undo_info.original[id.0 as usize].clone();
+ if leaf.span().anchor.file_id == FIXUP_DUMMY_FILE {
+ // we have a fake node here, we need to replace it again with the original
+ let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone();
if original.delimiter.kind == tt::DelimiterKind::Invisible {
original.token_trees.into()
} else {
SmallVec::from_const([original.into()])
}
} else {
+ // just a normal leaf
SmallVec::from_const([leaf.into()])
}
}
@@ -327,11 +358,15 @@ pub(crate) fn reverse_fixups(
#[cfg(test)]
mod tests {
+ use base_db::FileId;
use expect_test::{expect, Expect};
+ use triomphe::Arc;
- use crate::tt;
-
- use super::reverse_fixups;
+ use crate::{
+ fixup::reverse_fixups,
+ span::{RealSpanMap, SpanMap},
+ tt,
+ };
// The following three functions are only meant to check partial structural equivalence of
// `TokenTree`s, see the last assertion in `check()`.
@@ -361,13 +396,13 @@ mod tests {
#[track_caller]
fn check(ra_fixture: &str, mut expect: Expect) {
let parsed = syntax::SourceFile::parse(ra_fixture);
- let fixups = super::fixup_syntax(&parsed.syntax_node());
- let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
+ let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
+ let fixups = super::fixup_syntax(span_map.as_ref(), &parsed.syntax_node());
+ let mut tt = mbe::syntax_node_to_token_tree_modified(
&parsed.syntax_node(),
- fixups.token_map,
- fixups.next_id,
- fixups.replace,
+ span_map.as_ref(),
fixups.append,
+ fixups.remove,
);
let actual = format!("{tt}\n");
@@ -383,14 +418,15 @@ mod tests {
parse.syntax_node()
);
- reverse_fixups(&mut tt, &tmap, &fixups.undo_info);
+ reverse_fixups(&mut tt, &fixups.undo_info);
// the fixed-up + reversed version should be equivalent to the original input
// modulo token IDs and `Punct`s' spacing.
- let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node());
+ let original_as_tt =
+ mbe::syntax_node_to_token_tree(&parsed.syntax_node(), span_map.as_ref());
assert!(
check_subtree_eq(&tt, &original_as_tt),
- "different token tree: {tt:?},\n{original_as_tt:?}"
+ "different token tree:\n{tt:?}\n\n{original_as_tt:?}"
);
}
@@ -403,7 +439,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {for _ in __ra_fixup {}}
+fn foo () {for _ in __ra_fixup { }}
"#]],
)
}
@@ -431,7 +467,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {for bar in qux {}}
+fn foo () {for bar in qux { }}
"#]],
)
}
@@ -462,7 +498,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {match __ra_fixup {}}
+fn foo () {match __ra_fixup { }}
"#]],
)
}
@@ -494,7 +530,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {match __ra_fixup {}}
+fn foo () {match __ra_fixup { }}
"#]],
)
}
@@ -609,7 +645,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {if a {}}
+fn foo () {if a { }}
"#]],
)
}
@@ -623,7 +659,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {if __ra_fixup {}}
+fn foo () {if __ra_fixup { }}
"#]],
)
}
@@ -637,7 +673,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {if __ra_fixup {} {}}
+fn foo () {if __ra_fixup {} { }}
"#]],
)
}
@@ -651,7 +687,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {while __ra_fixup {}}
+fn foo () {while __ra_fixup { }}
"#]],
)
}
@@ -665,7 +701,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {while foo {}}
+fn foo () {while foo { }}
"#]],
)
}
@@ -692,7 +728,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {loop {}}
+fn foo () {loop { }}
"#]],
)
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
index ca65db113..7b03709ac 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
@@ -2,252 +2,247 @@
//!
//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at
//! this moment, this is horribly incomplete and handles only `$crate`.
-use base_db::CrateId;
-use db::TokenExpander;
-use either::Either;
-use mbe::Origin;
-use syntax::{
- ast::{self, HasDocComments},
- AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize,
-};
-use triomphe::Arc;
-
-use crate::{
- db::{self, ExpandDatabase},
- fixup,
- name::{AsName, Name},
- HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile,
-};
-
-#[derive(Clone, Debug)]
-pub struct Hygiene {
- frames: Option<HygieneFrames>,
+use std::iter;
+
+use base_db::span::{MacroCallId, SpanData, SyntaxContextId};
+
+use crate::db::ExpandDatabase;
+
+#[derive(Copy, Clone, Hash, PartialEq, Eq)]
+pub struct SyntaxContextData {
+ pub outer_expn: Option<MacroCallId>,
+ pub outer_transparency: Transparency,
+ pub parent: SyntaxContextId,
+ /// This context, but with all transparent and semi-transparent expansions filtered away.
+ pub opaque: SyntaxContextId,
+ /// This context, but with all transparent expansions filtered away.
+ pub opaque_and_semitransparent: SyntaxContextId,
}
-impl Hygiene {
- pub fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Hygiene {
- Hygiene { frames: Some(HygieneFrames::new(db, file_id)) }
+impl std::fmt::Debug for SyntaxContextData {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("SyntaxContextData")
+ .field("outer_expn", &self.outer_expn)
+ .field("outer_transparency", &self.outer_transparency)
+ .field("parent", &self.parent)
+ .field("opaque", &self.opaque)
+ .field("opaque_and_semitransparent", &self.opaque_and_semitransparent)
+ .finish()
}
+}
- pub fn new_unhygienic() -> Hygiene {
- Hygiene { frames: None }
+impl SyntaxContextData {
+ pub fn root() -> Self {
+ SyntaxContextData {
+ outer_expn: None,
+ outer_transparency: Transparency::Opaque,
+ parent: SyntaxContextId::ROOT,
+ opaque: SyntaxContextId::ROOT,
+ opaque_and_semitransparent: SyntaxContextId::ROOT,
+ }
}
- // FIXME: this should just return name
- pub fn name_ref_to_name(
- &self,
+ pub fn fancy_debug(
+ self,
+ self_id: SyntaxContextId,
db: &dyn ExpandDatabase,
- name_ref: ast::NameRef,
- ) -> Either<Name, CrateId> {
- if let Some(frames) = &self.frames {
- if name_ref.text() == "$crate" {
- if let Some(krate) = frames.root_crate(db, name_ref.syntax()) {
- return Either::Right(krate);
- }
+ f: &mut std::fmt::Formatter<'_>,
+ ) -> std::fmt::Result {
+ write!(f, "#{self_id} parent: #{}, outer_mark: (", self.parent)?;
+ match self.outer_expn {
+ Some(id) => {
+ write!(f, "{:?}::{{{{expn{:?}}}}}", db.lookup_intern_macro_call(id).krate, id)?
}
+ None => write!(f, "root")?,
}
-
- Either::Left(name_ref.as_name())
+ write!(f, ", {:?})", self.outer_transparency)
}
+}
- pub fn local_inner_macros(&self, db: &dyn ExpandDatabase, path: ast::Path) -> Option<CrateId> {
- let mut token = path.syntax().first_token()?.text_range();
- let frames = self.frames.as_ref()?;
- let mut current = &frames.0;
-
- loop {
- let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?;
- if origin == Origin::Def {
- return if current.local_inner {
- frames.root_crate(db, path.syntax())
- } else {
- None
- };
- }
- current = current.call_site.as_ref()?;
- token = mapped.value;
- }
- }
+/// A property of a macro expansion that determines how identifiers
+/// produced by that expansion are resolved.
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug)]
+pub enum Transparency {
+ /// Identifier produced by a transparent expansion is always resolved at call-site.
+ /// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this.
+ Transparent,
+ /// Identifier produced by a semi-transparent expansion may be resolved
+ /// either at call-site or at definition-site.
+ /// If it's a local variable, label or `$crate` then it's resolved at def-site.
+ /// Otherwise it's resolved at call-site.
+ /// `macro_rules` macros behave like this, built-in macros currently behave like this too,
+ /// but that's an implementation detail.
+ SemiTransparent,
+ /// Identifier produced by an opaque expansion is always resolved at definition-site.
+ /// Def-site spans in procedural macros, identifiers from `macro` by default use this.
+ Opaque,
}
-#[derive(Clone, Debug)]
-struct HygieneFrames(Arc<HygieneFrame>);
+pub fn span_with_def_site_ctxt(
+ db: &dyn ExpandDatabase,
+ span: SpanData,
+ expn_id: MacroCallId,
+) -> SpanData {
+ span_with_ctxt_from_mark(db, span, expn_id, Transparency::Opaque)
+}
-#[derive(Clone, Debug, Eq, PartialEq)]
-pub struct HygieneFrame {
- expansion: Option<HygieneInfo>,
+pub fn span_with_call_site_ctxt(
+ db: &dyn ExpandDatabase,
+ span: SpanData,
+ expn_id: MacroCallId,
+) -> SpanData {
+ span_with_ctxt_from_mark(db, span, expn_id, Transparency::Transparent)
+}
- // Indicate this is a local inner macro
- local_inner: bool,
- krate: Option<CrateId>,
+pub fn span_with_mixed_site_ctxt(
+ db: &dyn ExpandDatabase,
+ span: SpanData,
+ expn_id: MacroCallId,
+) -> SpanData {
+ span_with_ctxt_from_mark(db, span, expn_id, Transparency::SemiTransparent)
+}
- call_site: Option<Arc<HygieneFrame>>,
- def_site: Option<Arc<HygieneFrame>>,
+fn span_with_ctxt_from_mark(
+ db: &dyn ExpandDatabase,
+ span: SpanData,
+ expn_id: MacroCallId,
+ transparency: Transparency,
+) -> SpanData {
+ SpanData { ctx: apply_mark(db, SyntaxContextId::ROOT, expn_id, transparency), ..span }
}
-impl HygieneFrames {
- fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Self {
- // Note that this intentionally avoids the `hygiene_frame` query to avoid blowing up memory
- // usage. The query is only helpful for nested `HygieneFrame`s as it avoids redundant work.
- HygieneFrames(Arc::new(HygieneFrame::new(db, file_id)))
+pub(super) fn apply_mark(
+ db: &dyn ExpandDatabase,
+ ctxt: SyntaxContextId,
+ call_id: MacroCallId,
+ transparency: Transparency,
+) -> SyntaxContextId {
+ if transparency == Transparency::Opaque {
+ return apply_mark_internal(db, ctxt, Some(call_id), transparency);
}
- fn root_crate(&self, db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option<CrateId> {
- let mut token = node.first_token()?.text_range();
- let mut result = self.0.krate;
- let mut current = self.0.clone();
-
- while let Some((mapped, origin)) =
- current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token))
- {
- result = current.krate;
-
- let site = match origin {
- Origin::Def => &current.def_site,
- Origin::Call => &current.call_site,
- };
+ let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site;
+ let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
+ call_site_ctxt.normalize_to_macros_2_0(db)
+ } else {
+ call_site_ctxt.normalize_to_macro_rules(db)
+ };
- let site = match site {
- None => break,
- Some(it) => it,
- };
-
- current = site.clone();
- token = mapped.value;
- }
+ if call_site_ctxt.is_root() {
+ return apply_mark_internal(db, ctxt, Some(call_id), transparency);
+ }
- result
+ // Otherwise, `expn_id` is a macros 1.0 definition and the call site is in a
+ // macros 2.0 expansion, i.e., a macros 1.0 invocation is in a macros 2.0 definition.
+ //
+ // In this case, the tokens from the macros 1.0 definition inherit the hygiene
+ // at their invocation. That is, we pretend that the macros 1.0 definition
+ // was defined at its invocation (i.e., inside the macros 2.0 definition)
+ // so that the macros 2.0 definition remains hygienic.
+ //
+ // See the example at `test/ui/hygiene/legacy_interaction.rs`.
+ for (call_id, transparency) in ctxt.marks(db) {
+ call_site_ctxt = apply_mark_internal(db, call_site_ctxt, call_id, transparency);
}
+ apply_mark_internal(db, call_site_ctxt, Some(call_id), transparency)
}
-#[derive(Debug, Clone, PartialEq, Eq)]
-struct HygieneInfo {
- file: MacroFile,
- /// The start offset of the `macro_rules!` arguments or attribute input.
- attr_input_or_mac_def_start: Option<InFile<TextSize>>,
+fn apply_mark_internal(
+ db: &dyn ExpandDatabase,
+ ctxt: SyntaxContextId,
+ call_id: Option<MacroCallId>,
+ transparency: Transparency,
+) -> SyntaxContextId {
+ let syntax_context_data = db.lookup_intern_syntax_context(ctxt);
+ let mut opaque = syntax_context_data.opaque;
+ let mut opaque_and_semitransparent = syntax_context_data.opaque_and_semitransparent;
+
+ if transparency >= Transparency::Opaque {
+ let parent = opaque;
+ let new_opaque = SyntaxContextId::SELF_REF;
+ // But we can't just grab the to be allocated ID either as that would not deduplicate
+ // things!
+ // So we need a new salsa store type here ...
+ opaque = db.intern_syntax_context(SyntaxContextData {
+ outer_expn: call_id,
+ outer_transparency: transparency,
+ parent,
+ opaque: new_opaque,
+ opaque_and_semitransparent: new_opaque,
+ });
+ }
+
+ if transparency >= Transparency::SemiTransparent {
+ let parent = opaque_and_semitransparent;
+ let new_opaque_and_semitransparent = SyntaxContextId::SELF_REF;
+ opaque_and_semitransparent = db.intern_syntax_context(SyntaxContextData {
+ outer_expn: call_id,
+ outer_transparency: transparency,
+ parent,
+ opaque,
+ opaque_and_semitransparent: new_opaque_and_semitransparent,
+ });
+ }
- macro_def: TokenExpander,
- macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
- macro_arg_shift: mbe::Shift,
- exp_map: Arc<mbe::TokenMap>,
+ let parent = ctxt;
+ db.intern_syntax_context(SyntaxContextData {
+ outer_expn: call_id,
+ outer_transparency: transparency,
+ parent,
+ opaque,
+ opaque_and_semitransparent,
+ })
+}
+pub trait SyntaxContextExt {
+ fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self;
+ fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self;
+ fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self;
+ fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
+ fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
+ fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option<MacroCallId>, Transparency)>;
}
-impl HygieneInfo {
- fn map_ident_up(
- &self,
- db: &dyn ExpandDatabase,
- token: TextRange,
- ) -> Option<(InFile<TextRange>, Origin)> {
- let token_id = self.exp_map.token_by_range(token)?;
- let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
-
- let loc = db.lookup_intern_macro_call(self.file.macro_call_id);
-
- let (token_map, tt) = match &loc.kind {
- MacroCallKind::Attr { attr_args, .. } => match self.macro_arg_shift.unshift(token_id) {
- Some(unshifted) => {
- token_id = unshifted;
- (&attr_args.1, self.attr_input_or_mac_def_start?)
- }
- None => (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())),
- },
- _ => match origin {
- mbe::Origin::Call => {
- (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start()))
- }
- mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def_start) {
- (TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
- (&expander.def_site_token_map, *tt)
- }
- _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
- },
- },
- };
-
- let range = token_map.first_range_by_token(token_id, SyntaxKind::IDENT)?;
- Some((tt.with_value(range + tt.value), origin))
+#[inline(always)]
+fn handle_self_ref(p: SyntaxContextId, n: SyntaxContextId) -> SyntaxContextId {
+ match n {
+ SyntaxContextId::SELF_REF => p,
+ _ => n,
}
}
-fn make_hygiene_info(
- db: &dyn ExpandDatabase,
- macro_file: MacroFile,
- loc: &MacroCallLoc,
-) -> HygieneInfo {
- let def = loc.def.ast_id().left().and_then(|id| {
- let def_tt = match id.to_node(db) {
- ast::Macro::MacroRules(mac) => mac.token_tree()?,
- ast::Macro::MacroDef(mac) => mac.body()?,
- };
- Some(InFile::new(id.file_id, def_tt))
- });
- let attr_input_or_mac_def = def.or_else(|| match loc.kind {
- MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
- let tt = ast_id
- .to_node(db)
- .doc_comments_and_attrs()
- .nth(invoc_attr_index.ast_index())
- .and_then(Either::left)?
- .token_tree()?;
- Some(InFile::new(ast_id.file_id, tt))
- }
- _ => None,
- });
-
- let macro_def = db.macro_expander(loc.def);
- let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
- let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
- Arc::new((
- tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
- Default::default(),
- Default::default(),
- ))
- });
-
- HygieneInfo {
- file: macro_file,
- attr_input_or_mac_def_start: attr_input_or_mac_def
- .map(|it| it.map(|tt| tt.syntax().text_range().start())),
- macro_arg_shift: mbe::Shift::new(&macro_arg.0),
- macro_arg,
- macro_def,
- exp_map,
+impl SyntaxContextExt for SyntaxContextId {
+ fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self {
+ handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque_and_semitransparent)
+ }
+ fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self {
+ handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque)
+ }
+ fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self {
+ db.lookup_intern_syntax_context(self).parent
+ }
+ fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency) {
+ let data = db.lookup_intern_syntax_context(self);
+ (data.outer_expn, data.outer_transparency)
+ }
+ fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency) {
+ let data = db.lookup_intern_syntax_context(*self);
+ *self = data.parent;
+ (data.outer_expn, data.outer_transparency)
+ }
+ fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option<MacroCallId>, Transparency)> {
+ let mut marks = marks_rev(self, db).collect::<Vec<_>>();
+ marks.reverse();
+ marks
}
}
-impl HygieneFrame {
- pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> HygieneFrame {
- let (info, krate, local_inner) = match file_id.macro_file() {
- None => (None, None, false),
- Some(macro_file) => {
- let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- let info = Some((make_hygiene_info(db, macro_file, &loc), loc.kind.file_id()));
- match loc.def.kind {
- MacroDefKind::Declarative(_) => {
- (info, Some(loc.def.krate), loc.def.local_inner)
- }
- MacroDefKind::BuiltIn(..) => (info, Some(loc.def.krate), false),
- MacroDefKind::BuiltInAttr(..) => (info, None, false),
- MacroDefKind::BuiltInDerive(..) => (info, None, false),
- MacroDefKind::BuiltInEager(..) => (info, None, false),
- MacroDefKind::ProcMacro(..) => (info, None, false),
- }
- }
- };
-
- let Some((info, calling_file)) = info else {
- return HygieneFrame {
- expansion: None,
- local_inner,
- krate,
- call_site: None,
- def_site: None,
- };
- };
-
- let def_site = info.attr_input_or_mac_def_start.map(|it| db.hygiene_frame(it.file_id));
- let call_site = Some(db.hygiene_frame(calling_file));
-
- HygieneFrame { expansion: Some(info), local_inner, krate, call_site, def_site }
- }
+// FIXME: Make this a SyntaxContextExt method once we have RPIT
+pub fn marks_rev(
+ ctxt: SyntaxContextId,
+ db: &dyn ExpandDatabase,
+) -> impl Iterator<Item = (Option<MacroCallId>, Transparency)> + '_ {
+ iter::successors(Some(ctxt), move |&mark| {
+ Some(mark.parent_ctxt(db)).filter(|&it| it != SyntaxContextId::ROOT)
+ })
+ .map(|ctx| ctx.outer_mark(db))
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
index 4be55126b..d7819b315 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
@@ -4,7 +4,7 @@
//! tree originates not from the text of some `FileId`, but from some macro
//! expansion.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
pub mod db;
pub mod ast_id_map;
@@ -18,39 +18,59 @@ pub mod quote;
pub mod eager;
pub mod mod_path;
pub mod attrs;
+pub mod span;
+pub mod files;
mod fixup;
-use mbe::TokenMap;
-pub use mbe::{Origin, ValueResult};
-
-use ::tt::token_id as tt;
+use attrs::collect_attrs;
use triomphe::Arc;
-use std::{fmt, hash::Hash, iter};
+use std::{fmt, hash::Hash};
use base_db::{
- impl_intern_key,
- salsa::{self, InternId},
+ span::{HirFileIdRepr, SpanData, SyntaxContextId},
CrateId, FileId, FileRange, ProcMacroKind,
};
use either::Either;
use syntax::{
- algo::{self, skip_trivia_token},
- ast::{self, AstNode, HasDocComments},
- AstPtr, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
+ ast::{self, AstNode},
+ SyntaxNode, SyntaxToken, TextRange, TextSize,
};
use crate::{
- ast_id_map::{AstIdNode, ErasedFileAstId, FileAstId},
attrs::AttrId,
builtin_attr_macro::BuiltinAttrExpander,
builtin_derive_macro::BuiltinDeriveExpander,
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
db::TokenExpander,
+ fixup::SyntaxFixupUndoInfo,
mod_path::ModPath,
proc_macro::ProcMacroExpander,
+ span::{ExpansionSpanMap, SpanMap},
};
+pub use crate::ast_id_map::{AstId, ErasedAstId, ErasedFileAstId};
+pub use crate::files::{InFile, InMacroFile, InRealFile};
+
+pub use base_db::span::{HirFileId, MacroCallId, MacroFileId};
+pub use mbe::ValueResult;
+
+pub type DeclarativeMacro = ::mbe::DeclarativeMacro<tt::SpanData>;
+
+pub mod tt {
+ pub use base_db::span::SpanData;
+ pub use tt::{DelimiterKind, Spacing, Span, SpanAnchor};
+
+ pub type Delimiter = ::tt::Delimiter<SpanData>;
+ pub type DelimSpan = ::tt::DelimSpan<SpanData>;
+ pub type Subtree = ::tt::Subtree<SpanData>;
+ pub type Leaf = ::tt::Leaf<SpanData>;
+ pub type Literal = ::tt::Literal<SpanData>;
+ pub type Punct = ::tt::Punct<SpanData>;
+ pub type Ident = ::tt::Ident<SpanData>;
+ pub type TokenTree = ::tt::TokenTree<SpanData>;
+}
+
pub type ExpandResult<T> = ValueResult<T, ExpandError>;
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
@@ -59,6 +79,7 @@ pub enum ExpandError {
Mbe(mbe::ExpandError),
RecursionOverflowPoisoned,
Other(Box<Box<str>>),
+ ProcMacroPanic(Box<Box<str>>),
}
impl ExpandError {
@@ -81,56 +102,24 @@ impl fmt::Display for ExpandError {
ExpandError::RecursionOverflowPoisoned => {
f.write_str("overflow expanding the original macro")
}
+ ExpandError::ProcMacroPanic(it) => {
+ f.write_str("proc-macro panicked: ")?;
+ f.write_str(it)
+ }
ExpandError::Other(it) => f.write_str(it),
}
}
}
-/// Input to the analyzer is a set of files, where each file is identified by
-/// `FileId` and contains source code. However, another source of source code in
-/// Rust are macros: each macro can be thought of as producing a "temporary
-/// file". To assign an id to such a file, we use the id of the macro call that
-/// produced the file. So, a `HirFileId` is either a `FileId` (source code
-/// written by user), or a `MacroCallId` (source code produced by macro).
-///
-/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file
-/// containing the call plus the offset of the macro call in the file. Note that
-/// this is a recursive definition! However, the size_of of `HirFileId` is
-/// finite (because everything bottoms out at the real `FileId`) and small
-/// (`MacroCallId` uses the location interning. You can check details here:
-/// <https://en.wikipedia.org/wiki/String_interning>).
-///
-/// The two variants are encoded in a single u32 which are differentiated by the MSB.
-/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a
-/// `MacroCallId`.
-#[derive(Clone, Copy, PartialEq, Eq, Hash)]
-pub struct HirFileId(u32);
-
-impl fmt::Debug for HirFileId {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- self.repr().fmt(f)
- }
-}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct MacroFile {
- pub macro_call_id: MacroCallId,
-}
-
-/// `MacroCallId` identifies a particular macro invocation, like
-/// `println!("Hello, {}", world)`.
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct MacroCallId(salsa::InternId);
-impl_intern_key!(MacroCallId);
-
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct MacroCallLoc {
pub def: MacroDefId,
- pub(crate) krate: CrateId,
+ pub krate: CrateId,
/// Some if this is a macro call for an eager macro. Note that this is `None`
/// for the eager input macro file.
- eager: Option<Box<EagerCallInfo>>,
+ eager: Option<Arc<EagerCallInfo>>,
pub kind: MacroCallKind,
+ pub call_site: SyntaxContextId,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -139,6 +128,7 @@ pub struct MacroDefId {
pub kind: MacroDefKind,
pub local_inner: bool,
pub allow_internal_unsafe: bool,
+ // pub def_site: SyntaxContextId,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -152,9 +142,9 @@ pub enum MacroDefKind {
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-struct EagerCallInfo {
+pub struct EagerCallInfo {
/// The expanded argument of the eager macro.
- arg: Arc<(tt::Subtree, TokenMap)>,
+ arg: Arc<tt::Subtree>,
/// Call id of the eager macro's input file (this is the macro file for its fully expanded input).
arg_id: MacroCallId,
error: Option<ExpandError>,
@@ -178,7 +168,7 @@ pub enum MacroCallKind {
},
Attr {
ast_id: AstId<ast::Item>,
- attr_args: Arc<(tt::Subtree, mbe::TokenMap)>,
+ attr_args: Option<Arc<tt::Subtree>>,
/// Syntactical index of the invoking `#[attribute]`.
///
/// Outer attributes are counted first, then inner attributes. This does not support
@@ -187,76 +177,68 @@ pub enum MacroCallKind {
},
}
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-enum HirFileIdRepr {
- FileId(FileId),
- MacroFile(MacroFile),
-}
+pub trait HirFileIdExt {
+ /// Returns the original file of this macro call hierarchy.
+ fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId;
-impl From<FileId> for HirFileId {
- fn from(FileId(id): FileId) -> Self {
- assert!(id < Self::MAX_FILE_ID);
- HirFileId(id)
- }
-}
+ /// Returns the original file of this macro call hierarchy while going into the included file if
+ /// one of the calls comes from an `include!``.
+ fn original_file_respecting_includes(self, db: &dyn db::ExpandDatabase) -> FileId;
-impl From<MacroFile> for HirFileId {
- fn from(MacroFile { macro_call_id: MacroCallId(id) }: MacroFile) -> Self {
- let id = id.as_u32();
- assert!(id < Self::MAX_FILE_ID);
- HirFileId(id | Self::MACRO_FILE_TAG_MASK)
- }
-}
+ /// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
+ fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<SyntaxNode>>;
+
+ /// Return expansion information if it is a macro-expansion file
+ fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo>;
-impl HirFileId {
- const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK;
- const MACRO_FILE_TAG_MASK: u32 = 1 << 31;
+ fn as_builtin_derive_attr_node(&self, db: &dyn db::ExpandDatabase)
+ -> Option<InFile<ast::Attr>>;
+}
- /// For macro-expansion files, returns the file original source file the
- /// expansion originated from.
- pub fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId {
+impl HirFileIdExt for HirFileId {
+ fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId {
let mut file_id = self;
loop {
match file_id.repr() {
HirFileIdRepr::FileId(id) => break id,
- HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_call_id);
- let is_include_expansion = loc.def.is_include() && loc.eager.is_some();
- file_id = match is_include_expansion.then(|| db.include_expand(macro_call_id)) {
- Some(Ok((_, file))) => file.into(),
- _ => loc.kind.file_id(),
- }
+ HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => {
+ file_id = db.lookup_intern_macro_call(macro_call_id).kind.file_id();
}
}
}
}
- pub fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 {
- let mut level = 0;
- let mut curr = self;
- while let Some(macro_file) = curr.macro_file() {
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
-
- level += 1;
- curr = loc.kind.file_id();
+ fn original_file_respecting_includes(mut self, db: &dyn db::ExpandDatabase) -> FileId {
+ loop {
+ match self.repr() {
+ base_db::span::HirFileIdRepr::FileId(id) => break id,
+ base_db::span::HirFileIdRepr::MacroFile(file) => {
+ let loc = db.lookup_intern_macro_call(file.macro_call_id);
+ if loc.def.is_include() {
+ if let Some(eager) = &loc.eager {
+ if let Ok(it) = builtin_fn_macro::include_input_to_file_id(
+ db,
+ file.macro_call_id,
+ &eager.arg,
+ ) {
+ break it;
+ }
+ }
+ }
+ self = loc.kind.file_id();
+ }
+ }
}
- level
}
- /// If this is a macro call, returns the syntax node of the call.
- pub fn call_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
- let macro_file = self.macro_file()?;
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- Some(loc.to_node(db))
- }
-
- /// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
- pub fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<(FileId, SyntaxNode)> {
+ fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<SyntaxNode>> {
let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db);
loop {
match call.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => break Some((file_id, call.value)),
- HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
+ HirFileIdRepr::FileId(file_id) => {
+ break Some(InRealFile { file_id, value: call.value })
+ }
+ HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => {
call = db.lookup_intern_macro_call(macro_call_id).to_node(db);
}
}
@@ -264,12 +246,11 @@ impl HirFileId {
}
/// Return expansion information if it is a macro-expansion file
- pub fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo> {
- let macro_file = self.macro_file()?;
- ExpansionInfo::new(db, macro_file)
+ fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo> {
+ Some(ExpansionInfo::new(db, self.macro_file()?))
}
- pub fn as_builtin_derive_attr_node(
+ fn as_builtin_derive_attr_node(
&self,
db: &dyn db::ExpandDatabase,
) -> Option<InFile<ast::Attr>> {
@@ -281,104 +262,84 @@ impl HirFileId {
};
Some(attr.with_value(ast::Attr::cast(attr.value.clone())?))
}
+}
- pub fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
- match self.macro_file() {
- Some(macro_file) => {
- matches!(
- db.lookup_intern_macro_call(macro_file.macro_call_id).def.kind,
- MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)
- )
- }
- None => false,
- }
- }
+pub trait MacroFileIdExt {
+ fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32;
+ /// If this is a macro call, returns the syntax node of the call.
+ fn call_node(self, db: &dyn db::ExpandDatabase) -> InFile<SyntaxNode>;
- pub fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
- match self.macro_file() {
- Some(macro_file) => {
- matches!(
- db.lookup_intern_macro_call(macro_file.macro_call_id).def.kind,
- MacroDefKind::BuiltInDerive(..)
- )
- }
- None => false,
- }
- }
+ fn expansion_info(self, db: &dyn db::ExpandDatabase) -> ExpansionInfo;
+
+ fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool;
+ fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool;
/// Return whether this file is an include macro
- pub fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
- match self.macro_file() {
- Some(macro_file) => {
- db.lookup_intern_macro_call(macro_file.macro_call_id).def.is_include()
- }
- _ => false,
- }
+ fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool;
+
+ fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool;
+ /// Return whether this file is an attr macro
+ fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool;
+
+ /// Return whether this file is the pseudo expansion of the derive attribute.
+ /// See [`crate::builtin_attr_macro::derive_attr_expand`].
+ fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool;
+}
+
+impl MacroFileIdExt for MacroFileId {
+ fn call_node(self, db: &dyn db::ExpandDatabase) -> InFile<SyntaxNode> {
+ db.lookup_intern_macro_call(self.macro_call_id).to_node(db)
}
+ fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 {
+ let mut level = 0;
+ let mut macro_file = self;
+ loop {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- pub fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool {
- match self.macro_file() {
- Some(macro_file) => {
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
- }
- _ => false,
+ level += 1;
+ macro_file = match loc.kind.file_id().repr() {
+ HirFileIdRepr::FileId(_) => break level,
+ HirFileIdRepr::MacroFile(it) => it,
+ };
}
}
- /// Return whether this file is an attr macro
- pub fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
- match self.macro_file() {
- Some(macro_file) => {
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- matches!(loc.kind, MacroCallKind::Attr { .. })
- }
- _ => false,
- }
+ /// Return expansion information if it is a macro-expansion file
+ fn expansion_info(self, db: &dyn db::ExpandDatabase) -> ExpansionInfo {
+ ExpansionInfo::new(db, self)
}
- /// Return whether this file is the pseudo expansion of the derive attribute.
- /// See [`crate::builtin_attr_macro::derive_attr_expand`].
- pub fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool {
- match self.macro_file() {
- Some(macro_file) => {
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- loc.def.is_attribute_derive()
- }
- None => false,
- }
+ fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
+ matches!(
+ db.lookup_intern_macro_call(self.macro_call_id).def.kind,
+ MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)
+ )
}
- #[inline]
- pub fn is_macro(self) -> bool {
- self.0 & Self::MACRO_FILE_TAG_MASK != 0
+ fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
+ matches!(
+ db.lookup_intern_macro_call(self.macro_call_id).def.kind,
+ MacroDefKind::BuiltInDerive(..)
+ )
}
- #[inline]
- pub fn macro_file(self) -> Option<MacroFile> {
- match self.0 & Self::MACRO_FILE_TAG_MASK {
- 0 => None,
- _ => Some(MacroFile {
- macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
- }),
- }
+ fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
+ db.lookup_intern_macro_call(self.macro_call_id).def.is_include()
}
- #[inline]
- pub fn file_id(self) -> Option<FileId> {
- match self.0 & Self::MACRO_FILE_TAG_MASK {
- 0 => Some(FileId(self.0)),
- _ => None,
- }
+ fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id);
+ matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
}
- fn repr(self) -> HirFileIdRepr {
- match self.0 & Self::MACRO_FILE_TAG_MASK {
- 0 => HirFileIdRepr::FileId(FileId(self.0)),
- _ => HirFileIdRepr::MacroFile(MacroFile {
- macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
- }),
- }
+ fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id);
+ matches!(loc.kind, MacroCallKind::Attr { .. })
+ }
+
+ fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id);
+ loc.def.is_attribute_derive()
}
}
@@ -388,20 +349,35 @@ impl MacroDefId {
db: &dyn db::ExpandDatabase,
krate: CrateId,
kind: MacroCallKind,
+ call_site: SyntaxContextId,
) -> MacroCallId {
- db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind })
+ db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind, call_site })
+ }
+
+ pub fn definition_range(&self, db: &dyn db::ExpandDatabase) -> InFile<TextRange> {
+ match self.kind {
+ MacroDefKind::Declarative(id)
+ | MacroDefKind::BuiltIn(_, id)
+ | MacroDefKind::BuiltInAttr(_, id)
+ | MacroDefKind::BuiltInDerive(_, id)
+ | MacroDefKind::BuiltInEager(_, id) => {
+ id.with_value(db.ast_id_map(id.file_id).get(id.value).text_range())
+ }
+ MacroDefKind::ProcMacro(_, _, id) => {
+ id.with_value(db.ast_id_map(id.file_id).get(id.value).text_range())
+ }
+ }
}
pub fn ast_id(&self) -> Either<AstId<ast::Macro>, AstId<ast::Fn>> {
- let id = match self.kind {
+ match self.kind {
MacroDefKind::ProcMacro(.., id) => return Either::Right(id),
MacroDefKind::Declarative(id)
| MacroDefKind::BuiltIn(_, id)
| MacroDefKind::BuiltInAttr(_, id)
| MacroDefKind::BuiltInDerive(_, id)
- | MacroDefKind::BuiltInEager(_, id) => id,
- };
- Either::Left(id)
+ | MacroDefKind::BuiltInEager(_, id) => Either::Left(id),
+ }
}
pub fn is_proc_macro(&self) -> bool {
@@ -443,6 +419,18 @@ impl MacroDefId {
}
impl MacroCallLoc {
+ pub fn span(&self, db: &dyn db::ExpandDatabase) -> SpanData {
+ let ast_id = self.kind.erased_ast_id();
+ let file_id = self.kind.file_id();
+ let range = db.ast_id_map(file_id).get_erased(ast_id).text_range();
+ match file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => db.real_span_map(file_id).span_for_range(range),
+ HirFileIdRepr::MacroFile(m) => {
+ db.parse_macro_expansion(m).value.1.span_at(range.start())
+ }
+ }
+ }
+
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> InFile<SyntaxNode> {
match self.kind {
MacroCallKind::FnLike { ast_id, .. } => {
@@ -451,9 +439,9 @@ impl MacroCallLoc {
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: handle `cfg_attr`
ast_id.with_value(ast_id.to_node(db)).map(|it| {
- it.doc_comments_and_attrs()
+ collect_attrs(&it)
.nth(derive_attr_index.ast_index())
- .and_then(|it| match it {
+ .and_then(|it| match it.1 {
Either::Left(attr) => Some(attr.syntax().clone()),
Either::Right(_) => None,
})
@@ -464,9 +452,9 @@ impl MacroCallLoc {
if self.def.is_attribute_derive() {
// FIXME: handle `cfg_attr`
ast_id.with_value(ast_id.to_node(db)).map(|it| {
- it.doc_comments_and_attrs()
+ collect_attrs(&it)
.nth(invoc_attr_index.ast_index())
- .and_then(|it| match it {
+ .and_then(|it| match it.1 {
Either::Left(attr) => Some(attr.syntax().clone()),
Either::Right(_) => None,
})
@@ -483,20 +471,26 @@ impl MacroCallLoc {
match self.kind {
MacroCallKind::FnLike { expand_to, .. } => expand_to,
MacroCallKind::Derive { .. } => ExpandTo::Items,
- MacroCallKind::Attr { .. } if self.def.is_attribute_derive() => ExpandTo::Statements,
+ MacroCallKind::Attr { .. } if self.def.is_attribute_derive() => ExpandTo::Items,
MacroCallKind::Attr { .. } => {
- // is this always correct?
+ // FIXME(stmt_expr_attributes)
ExpandTo::Items
}
}
}
}
-// FIXME: attribute indices do not account for nested `cfg_attr`
-
impl MacroCallKind {
+ fn descr(&self) -> &'static str {
+ match self {
+ MacroCallKind::FnLike { .. } => "macro call",
+ MacroCallKind::Derive { .. } => "derive macro",
+ MacroCallKind::Attr { .. } => "attribute macro",
+ }
+ }
+
/// Returns the file containing the macro invocation.
- fn file_id(&self) -> HirFileId {
+ pub fn file_id(&self) -> HirFileId {
match *self {
MacroCallKind::FnLike { ast_id: InFile { file_id, .. }, .. }
| MacroCallKind::Derive { ast_id: InFile { file_id, .. }, .. }
@@ -504,6 +498,14 @@ impl MacroCallKind {
}
}
+ fn erased_ast_id(&self) -> ErasedFileAstId {
+ match *self {
+ MacroCallKind::FnLike { ast_id: InFile { value, .. }, .. } => value.erase(),
+ MacroCallKind::Derive { ast_id: InFile { value, .. }, .. } => value.erase(),
+ MacroCallKind::Attr { ast_id: InFile { value, .. }, .. } => value.erase(),
+ }
+ }
+
/// Returns the original file range that best describes the location of this macro call.
///
/// Unlike `MacroCallKind::original_call_range`, this also spans the item of attributes and derives.
@@ -548,242 +550,179 @@ impl MacroCallKind {
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: should be the range of the macro name, not the whole derive
// FIXME: handle `cfg_attr`
- ast_id
- .to_node(db)
- .doc_comments_and_attrs()
+ collect_attrs(&ast_id.to_node(db))
.nth(derive_attr_index.ast_index())
.expect("missing derive")
+ .1
.expect_left("derive is a doc comment?")
.syntax()
.text_range()
}
// FIXME: handle `cfg_attr`
- MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => ast_id
- .to_node(db)
- .doc_comments_and_attrs()
- .nth(invoc_attr_index.ast_index())
- .expect("missing attribute")
- .expect_left("attribute macro is a doc comment?")
- .syntax()
- .text_range(),
+ MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+ collect_attrs(&ast_id.to_node(db))
+ .nth(invoc_attr_index.ast_index())
+ .expect("missing attribute")
+ .1
+ .expect_left("attribute macro is a doc comment?")
+ .syntax()
+ .text_range()
+ }
};
FileRange { range, file_id }
}
- fn arg(&self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
+ fn arg(&self, db: &dyn db::ExpandDatabase) -> InFile<Option<SyntaxNode>> {
match self {
- MacroCallKind::FnLike { ast_id, .. } => ast_id
- .to_in_file_node(db)
- .map(|it| Some(it.token_tree()?.syntax().clone()))
- .transpose(),
+ MacroCallKind::FnLike { ast_id, .. } => {
+ ast_id.to_in_file_node(db).map(|it| Some(it.token_tree()?.syntax().clone()))
+ }
MacroCallKind::Derive { ast_id, .. } => {
- Some(ast_id.to_in_file_node(db).syntax().cloned())
+ ast_id.to_in_file_node(db).syntax().cloned().map(Some)
}
MacroCallKind::Attr { ast_id, .. } => {
- Some(ast_id.to_in_file_node(db).syntax().cloned())
+ ast_id.to_in_file_node(db).syntax().cloned().map(Some)
}
}
}
}
-impl MacroCallId {
- pub fn as_file(self) -> HirFileId {
- MacroFile { macro_call_id: self }.into()
- }
-
- pub fn as_macro_file(self) -> MacroFile {
- MacroFile { macro_call_id: self }
- }
-}
-
/// ExpansionInfo mainly describes how to map text range between src and expanded macro
+// FIXME: can be expensive to create, we should check the use sites and maybe replace them with
+// simpler function calls if the map is only used once
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ExpansionInfo {
- expanded: InMacroFile<SyntaxNode>,
+ pub expanded: InMacroFile<SyntaxNode>,
/// The argument TokenTree or item for attributes
- arg: InFile<SyntaxNode>,
+ arg: InFile<Option<SyntaxNode>>,
/// The `macro_rules!` or attribute input.
attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
macro_def: TokenExpander,
- macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
- /// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg
- /// and as such we need to shift tokens if they are part of an attributes input instead of their item.
- macro_arg_shift: mbe::Shift,
- exp_map: Arc<mbe::TokenMap>,
+ macro_arg: Arc<tt::Subtree>,
+ pub exp_map: Arc<ExpansionSpanMap>,
+ arg_map: SpanMap,
}
impl ExpansionInfo {
- pub fn expanded(&self) -> InFile<SyntaxNode> {
- self.expanded.clone().into()
+ pub fn expanded(&self) -> InMacroFile<SyntaxNode> {
+ self.expanded.clone()
}
pub fn call_node(&self) -> Option<InFile<SyntaxNode>> {
- Some(self.arg.with_value(self.arg.value.parent()?))
+ Some(self.arg.with_value(self.arg.value.as_ref()?.parent()?))
}
- /// Map a token down from macro input into the macro expansion.
- ///
- /// The inner workings of this function differ slightly depending on the type of macro we are dealing with:
- /// - declarative:
- /// For declarative macros, we need to accommodate for the macro definition site(which acts as a second unchanging input)
- /// , as tokens can mapped in and out of it.
- /// To do this we shift all ids in the expansion by the maximum id of the definition site giving us an easy
- /// way to map all the tokens.
- /// - attribute:
- /// Attributes have two different inputs, the input tokentree in the attribute node and the item
- /// the attribute is annotating. Similarly as for declarative macros we need to do a shift here
- /// as well. Currently this is done by shifting the attribute input by the maximum id of the item.
- /// - function-like and derives:
- /// Both of these only have one simple call site input so no special handling is required here.
- pub fn map_token_down(
- &self,
- db: &dyn db::ExpandDatabase,
- item: Option<ast::Item>,
- token: InFile<&SyntaxToken>,
- // FIXME: use this for range mapping, so that we can resolve inline format args
- _relative_token_offset: Option<TextSize>,
- ) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> {
- assert_eq!(token.file_id, self.arg.file_id);
- let token_id_in_attr_input = if let Some(item) = item {
- // check if we are mapping down in an attribute input
- // this is a special case as attributes can have two inputs
- let call_id = self.expanded.file_id.macro_call_id;
- let loc = db.lookup_intern_macro_call(call_id);
-
- let token_range = token.value.text_range();
- match &loc.kind {
- MacroCallKind::Attr { attr_args, invoc_attr_index, .. } => {
- // FIXME: handle `cfg_attr`
- let attr = item
- .doc_comments_and_attrs()
- .nth(invoc_attr_index.ast_index())
- .and_then(Either::left)?;
- match attr.token_tree() {
- Some(token_tree)
- if token_tree.syntax().text_range().contains_range(token_range) =>
- {
- let attr_input_start =
- token_tree.left_delimiter_token()?.text_range().start();
- let relative_range =
- token.value.text_range().checked_sub(attr_input_start)?;
- // shift by the item's tree's max id
- let token_id = attr_args.1.token_by_range(relative_range)?;
-
- let token_id = if loc.def.is_attribute_derive() {
- // we do not shift for `#[derive]`, as we only need to downmap the derive attribute tokens
- token_id
- } else {
- self.macro_arg_shift.shift(token_id)
- };
- Some(token_id)
- }
- _ => None,
- }
- }
- _ => None,
- }
- } else {
- None
- };
-
- let token_id = match token_id_in_attr_input {
- Some(token_id) => token_id,
- // the token is not inside `an attribute's input so do the lookup in the macro_arg as usual
- None => {
- let relative_range =
- token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
- let token_id = self.macro_arg.1.token_by_range(relative_range)?;
- // conditionally shift the id by a declarative macro definition
- self.macro_def.map_id_down(token_id)
- }
- };
-
+ /// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
+ pub fn map_range_down<'a>(
+ &'a self,
+ span: SpanData,
+ ) -> Option<InMacroFile<impl Iterator<Item = SyntaxToken> + 'a>> {
let tokens = self
.exp_map
- .ranges_by_token(token_id, token.value.kind())
+ .ranges_with_span(span)
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());
- Some(tokens.map(move |token| InFile::new(self.expanded.file_id.into(), token)))
+ Some(InMacroFile::new(self.expanded.file_id, tokens))
}
- /// Map a token up out of the expansion it resides in into the arguments of the macro call of the expansion.
- pub fn map_token_up(
+ /// Looks up the span at the given offset.
+ pub fn span_for_offset(
+ &self,
+ db: &dyn db::ExpandDatabase,
+ offset: TextSize,
+ ) -> (FileRange, SyntaxContextId) {
+ debug_assert!(self.expanded.value.text_range().contains(offset));
+ let span = self.exp_map.span_at(offset);
+ let anchor_offset = db
+ .ast_id_map(span.anchor.file_id.into())
+ .get_erased(span.anchor.ast_id)
+ .text_range()
+ .start();
+ (FileRange { file_id: span.anchor.file_id, range: span.range + anchor_offset }, span.ctx)
+ }
+
+ /// Maps up the text range out of the expansion hierarchy back into the original file its from.
+ pub fn map_node_range_up(
&self,
db: &dyn db::ExpandDatabase,
- token: InFile<&SyntaxToken>,
- ) -> Option<(InFile<SyntaxToken>, Origin)> {
- assert_eq!(token.file_id, self.expanded.file_id.into());
- // Fetch the id through its text range,
- let token_id = self.exp_map.token_by_range(token.value.text_range())?;
- // conditionally unshifting the id to accommodate for macro-rules def site
- let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
-
- let call_id = self.expanded.file_id.macro_call_id;
- let loc = db.lookup_intern_macro_call(call_id);
-
- // Special case: map tokens from `include!` expansions to the included file
- if loc.def.is_include() {
- if let Ok((tt_and_map, file_id)) = db.include_expand(call_id) {
- let range = tt_and_map.1.first_range_by_token(token_id, token.value.kind())?;
- let source = db.parse(file_id);
-
- let token = source.syntax_node().covering_element(range).into_token()?;
-
- return Some((InFile::new(file_id.into(), token), Origin::Call));
+ range: TextRange,
+ ) -> Option<(FileRange, SyntaxContextId)> {
+ debug_assert!(self.expanded.value.text_range().contains_range(range));
+ let mut spans = self.exp_map.spans_for_range(range);
+ let SpanData { range, anchor, ctx } = spans.next()?;
+ let mut start = range.start();
+ let mut end = range.end();
+
+ for span in spans {
+ if span.anchor != anchor || span.ctx != ctx {
+ return None;
}
+ start = start.min(span.range.start());
+ end = end.max(span.range.end());
}
-
- // Attributes are a bit special for us, they have two inputs, the input tokentree and the annotated item.
- let (token_map, tt) = match &loc.kind {
- MacroCallKind::Attr { attr_args, .. } => {
- if loc.def.is_attribute_derive() {
- (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
- } else {
- // try unshifting the token id, if unshifting fails, the token resides in the non-item attribute input
- // note that the `TokenExpander::map_id_up` earlier only unshifts for declarative macros, so we don't double unshift with this
- match self.macro_arg_shift.unshift(token_id) {
- Some(unshifted) => {
- token_id = unshifted;
- (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
- }
- None => (&self.macro_arg.1, self.arg.clone()),
- }
- }
- }
- _ => match origin {
- mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()),
- mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def) {
- (TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
- (&expander.def_site_token_map, tt.syntax().cloned())
- }
- _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
- },
+ let anchor_offset =
+ db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
+ Some((
+ FileRange {
+ file_id: anchor.file_id,
+ range: TextRange::new(start, end) + anchor_offset,
},
- };
+ ctx,
+ ))
+ }
- let range = token_map.first_range_by_token(token_id, token.value.kind())?;
- let token =
- tt.value.covering_element(range + tt.value.text_range().start()).into_token()?;
- Some((tt.with_value(token), origin))
+ /// Maps up the text range out of the expansion into is macro call.
+ pub fn map_range_up_once(
+ &self,
+ db: &dyn db::ExpandDatabase,
+ token: TextRange,
+ ) -> InFile<smallvec::SmallVec<[TextRange; 1]>> {
+ debug_assert!(self.expanded.value.text_range().contains_range(token));
+ let span = self.exp_map.span_at(token.start());
+ match &self.arg_map {
+ SpanMap::RealSpanMap(_) => {
+ let file_id = span.anchor.file_id.into();
+ let anchor_offset =
+ db.ast_id_map(file_id).get_erased(span.anchor.ast_id).text_range().start();
+ InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] }
+ }
+ SpanMap::ExpansionSpanMap(arg_map) => {
+ let arg_range = self
+ .arg
+ .value
+ .as_ref()
+ .map_or_else(|| TextRange::empty(TextSize::from(0)), |it| it.text_range());
+ InFile::new(
+ self.arg.file_id,
+ arg_map
+ .ranges_with_span(span)
+ .filter(|range| range.intersect(arg_range).is_some())
+ .collect(),
+ )
+ }
+ }
}
- fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFile) -> Option<ExpansionInfo> {
+ pub fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- let arg_tt = loc.kind.arg(db)?;
+ let arg_tt = loc.kind.arg(db);
+ let arg_map = db.span_map(arg_tt.file_id);
let macro_def = db.macro_expander(loc.def);
let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() };
- let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
- Arc::new((
- tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
- Default::default(),
- Default::default(),
- ))
+ let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
+ (
+ Arc::new(tt::Subtree {
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
+ token_trees: Vec::new(),
+ }),
+ SyntaxFixupUndoInfo::NONE,
+ )
});
let def = loc.def.ast_id().left().and_then(|id| {
@@ -799,342 +738,27 @@ impl ExpansionInfo {
let attr_input_or_mac_def = def.or_else(|| match loc.kind {
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
// FIXME: handle `cfg_attr`
- let tt = ast_id
- .to_node(db)
- .doc_comments_and_attrs()
+ let tt = collect_attrs(&ast_id.to_node(db))
.nth(invoc_attr_index.ast_index())
- .and_then(Either::left)?
+ .and_then(|x| Either::left(x.1))?
.token_tree()?;
Some(InFile::new(ast_id.file_id, tt))
}
_ => None,
});
- Some(ExpansionInfo {
+ ExpansionInfo {
expanded,
arg: arg_tt,
attr_input_or_mac_def,
- macro_arg_shift: mbe::Shift::new(&macro_arg.0),
macro_arg,
macro_def,
exp_map,
- })
- }
-}
-
-/// `AstId` points to an AST node in any file.
-///
-/// It is stable across reparses, and can be used as salsa key/value.
-pub type AstId<N> = InFile<FileAstId<N>>;
-
-impl<N: AstIdNode> AstId<N> {
- pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N {
- self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
- }
- pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> InFile<N> {
- InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)))
- }
- pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr<N> {
- db.ast_id_map(self.file_id).get(self.value)
- }
-}
-
-pub type ErasedAstId = InFile<ErasedFileAstId>;
-
-impl ErasedAstId {
- pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr {
- db.ast_id_map(self.file_id).get_raw(self.value)
- }
-}
-
-/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
-///
-/// Typical usages are:
-///
-/// * `InFile<SyntaxNode>` -- syntax node in a file
-/// * `InFile<ast::FnDef>` -- ast node in a file
-/// * `InFile<TextSize>` -- offset in a file
-#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
-pub struct InFile<T> {
- pub file_id: HirFileId,
- pub value: T,
-}
-
-impl<T> InFile<T> {
- pub fn new(file_id: HirFileId, value: T) -> InFile<T> {
- InFile { file_id, value }
- }
-
- pub fn with_value<U>(&self, value: U) -> InFile<U> {
- InFile::new(self.file_id, value)
- }
-
- pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> InFile<U> {
- InFile::new(self.file_id, f(self.value))
- }
-
- pub fn as_ref(&self) -> InFile<&T> {
- self.with_value(&self.value)
- }
-
- pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
- db.parse_or_expand(self.file_id)
- }
-}
-
-impl<T: Clone> InFile<&T> {
- pub fn cloned(&self) -> InFile<T> {
- self.with_value(self.value.clone())
- }
-}
-
-impl<T> InFile<Option<T>> {
- pub fn transpose(self) -> Option<InFile<T>> {
- let value = self.value?;
- Some(InFile::new(self.file_id, value))
- }
-}
-
-impl<L, R> InFile<Either<L, R>> {
- pub fn transpose(self) -> Either<InFile<L>, InFile<R>> {
- match self.value {
- Either::Left(l) => Either::Left(InFile::new(self.file_id, l)),
- Either::Right(r) => Either::Right(InFile::new(self.file_id, r)),
+ arg_map,
}
}
}
-impl InFile<&SyntaxNode> {
- pub fn ancestors_with_macros(
- self,
- db: &dyn db::ExpandDatabase,
- ) -> impl Iterator<Item = InFile<SyntaxNode>> + Clone + '_ {
- iter::successors(Some(self.cloned()), move |node| match node.value.parent() {
- Some(parent) => Some(node.with_value(parent)),
- None => node.file_id.call_node(db),
- })
- }
-
- /// Skips the attributed item that caused the macro invocation we are climbing up
- pub fn ancestors_with_macros_skip_attr_item(
- self,
- db: &dyn db::ExpandDatabase,
- ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
- let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
- Some(parent) => Some(node.with_value(parent)),
- None => {
- let parent_node = node.file_id.call_node(db)?;
- if node.file_id.is_attr_macro(db) {
- // macro call was an attributed item, skip it
- // FIXME: does this fail if this is a direct expansion of another macro?
- parent_node.map(|node| node.parent()).transpose()
- } else {
- Some(parent_node)
- }
- }
- };
- iter::successors(succ(&self.cloned()), succ)
- }
-
- /// Falls back to the macro call range if the node cannot be mapped up fully.
- ///
- /// For attributes and derives, this will point back to the attribute only.
- /// For the entire item use [`InFile::original_file_range_full`].
- pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
- match self.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
- HirFileIdRepr::MacroFile(mac_file) => {
- if let Some(res) = self.original_file_range_opt(db) {
- return res;
- }
- // Fall back to whole macro call.
- let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
- loc.kind.original_call_range(db)
- }
- }
- }
-
- /// Falls back to the macro call range if the node cannot be mapped up fully.
- pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange {
- match self.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
- HirFileIdRepr::MacroFile(mac_file) => {
- if let Some(res) = self.original_file_range_opt(db) {
- return res;
- }
- // Fall back to whole macro call.
- let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
- loc.kind.original_call_range_with_body(db)
- }
- }
- }
-
- /// Attempts to map the syntax node back up its macro calls.
- pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
- match ascend_node_border_tokens(db, self) {
- Some(InFile { file_id, value: (first, last) }) => {
- let original_file = file_id.original_file(db);
- let range = first.text_range().cover(last.text_range());
- if file_id != original_file.into() {
- tracing::error!("Failed mapping up more for {:?}", range);
- return None;
- }
- Some(FileRange { file_id: original_file, range })
- }
- _ if !self.file_id.is_macro() => Some(FileRange {
- file_id: self.file_id.original_file(db),
- range: self.value.text_range(),
- }),
- _ => None,
- }
- }
-
- pub fn original_syntax_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
- // This kind of upmapping can only be achieved in attribute expanded files,
- // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
- if !self.file_id.is_macro() {
- return Some(self.map(Clone::clone));
- } else if !self.file_id.is_attr_macro(db) {
- return None;
- }
-
- if let Some(InFile { file_id, value: (first, last) }) = ascend_node_border_tokens(db, self)
- {
- if file_id.is_macro() {
- let range = first.text_range().cover(last.text_range());
- tracing::error!("Failed mapping out of macro file for {:?}", range);
- return None;
- }
- // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes
- let anc = algo::least_common_ancestor(&first.parent()?, &last.parent()?)?;
- let kind = self.value.kind();
- let value = anc.ancestors().find(|it| it.kind() == kind)?;
- return Some(InFile::new(file_id, value));
- }
- None
- }
-}
-
-impl InFile<SyntaxToken> {
- pub fn upmap(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxToken>> {
- let expansion = self.file_id.expansion_info(db)?;
- expansion.map_token_up(db, self.as_ref()).map(|(it, _)| it)
- }
-
- /// Falls back to the macro call range if the node cannot be mapped up fully.
- pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
- match self.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
- HirFileIdRepr::MacroFile(mac_file) => {
- if let Some(res) = self.original_file_range_opt(db) {
- return res;
- }
- // Fall back to whole macro call.
- let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
- loc.kind.original_call_range(db)
- }
- }
- }
-
- /// Attempts to map the syntax node back up its macro calls.
- pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
- match self.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => {
- Some(FileRange { file_id, range: self.value.text_range() })
- }
- HirFileIdRepr::MacroFile(_) => {
- let expansion = self.file_id.expansion_info(db)?;
- let InFile { file_id, value } = ascend_call_token(db, &expansion, self)?;
- let original_file = file_id.original_file(db);
- if file_id != original_file.into() {
- return None;
- }
- Some(FileRange { file_id: original_file, range: value.text_range() })
- }
- }
- }
-}
-
-#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
-pub struct InMacroFile<T> {
- pub file_id: MacroFile,
- pub value: T,
-}
-
-impl<T> From<InMacroFile<T>> for InFile<T> {
- fn from(macro_file: InMacroFile<T>) -> Self {
- InFile { file_id: macro_file.file_id.into(), value: macro_file.value }
- }
-}
-
-fn ascend_node_border_tokens(
- db: &dyn db::ExpandDatabase,
- InFile { file_id, value: node }: InFile<&SyntaxNode>,
-) -> Option<InFile<(SyntaxToken, SyntaxToken)>> {
- let expansion = file_id.expansion_info(db)?;
-
- let first_token = |node: &SyntaxNode| skip_trivia_token(node.first_token()?, Direction::Next);
- let last_token = |node: &SyntaxNode| skip_trivia_token(node.last_token()?, Direction::Prev);
-
- // FIXME: Once the token map rewrite is done, this shouldnt need to rely on syntax nodes and tokens anymore
- let first = first_token(node)?;
- let last = last_token(node)?;
- let first = ascend_call_token(db, &expansion, InFile::new(file_id, first))?;
- let last = ascend_call_token(db, &expansion, InFile::new(file_id, last))?;
- (first.file_id == last.file_id).then(|| InFile::new(first.file_id, (first.value, last.value)))
-}
-
-fn ascend_call_token(
- db: &dyn db::ExpandDatabase,
- expansion: &ExpansionInfo,
- token: InFile<SyntaxToken>,
-) -> Option<InFile<SyntaxToken>> {
- let mut mapping = expansion.map_token_up(db, token.as_ref())?;
- while let (mapped, Origin::Call) = mapping {
- match mapped.file_id.expansion_info(db) {
- Some(info) => mapping = info.map_token_up(db, mapped.as_ref())?,
- None => return Some(mapped),
- }
- }
- None
-}
-
-impl<N: AstNode> InFile<N> {
- pub fn descendants<T: AstNode>(self) -> impl Iterator<Item = InFile<T>> {
- self.value.syntax().descendants().filter_map(T::cast).map(move |n| self.with_value(n))
- }
-
- // FIXME: this should return `Option<InFileNotHirFile<N>>`
- pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<N>> {
- // This kind of upmapping can only be achieved in attribute expanded files,
- // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
- if !self.file_id.is_macro() {
- return Some(self);
- } else if !self.file_id.is_attr_macro(db) {
- return None;
- }
-
- if let Some(InFile { file_id, value: (first, last) }) =
- ascend_node_border_tokens(db, self.syntax())
- {
- if file_id.is_macro() {
- let range = first.text_range().cover(last.text_range());
- tracing::error!("Failed mapping out of macro file for {:?}", range);
- return None;
- }
- // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes
- let anc = algo::least_common_ancestor(&first.parent()?, &last.parent()?)?;
- let value = anc.ancestors().find_map(N::cast)?;
- return Some(InFile::new(file_id, value));
- }
- None
- }
-
- pub fn syntax(&self) -> InFile<&SyntaxNode> {
- self.with_value(self.value.syntax())
- }
-}
-
/// In Rust, macros expand token trees to token trees. When we want to turn a
/// token tree into an AST node, we need to figure out what kind of AST node we
/// want: something like `foo` can be a type, an expression, or a pattern.
@@ -1199,9 +823,4 @@ impl ExpandTo {
}
}
-#[derive(Debug)]
-pub struct UnresolvedMacro {
- pub path: ModPath,
-}
-
intern::impl_internable!(ModPath, attrs::AttrInput);
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
index 69aa09c4a..9534b5039 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
@@ -7,11 +7,11 @@ use std::{
use crate::{
db::ExpandDatabase,
- hygiene::Hygiene,
- name::{known, Name},
+ hygiene::{marks_rev, SyntaxContextExt, Transparency},
+ name::{known, AsName, Name},
+ span::SpanMapRef,
};
-use base_db::CrateId;
-use either::Either;
+use base_db::{span::SyntaxContextId, CrateId};
use smallvec::SmallVec;
use syntax::{ast, AstNode};
@@ -38,6 +38,7 @@ pub enum PathKind {
Crate,
/// Absolute path (::foo)
Abs,
+ // FIXME: Remove this
/// `$crate` from macro expansion
DollarCrate(CrateId),
}
@@ -46,9 +47,9 @@ impl ModPath {
pub fn from_src(
db: &dyn ExpandDatabase,
path: ast::Path,
- hygiene: &Hygiene,
+ span_map: SpanMapRef<'_>,
) -> Option<ModPath> {
- convert_path(db, None, path, hygiene)
+ convert_path(db, None, path, span_map)
}
pub fn from_segments(kind: PathKind, segments: impl IntoIterator<Item = Name>) -> ModPath {
@@ -193,33 +194,36 @@ fn convert_path(
db: &dyn ExpandDatabase,
prefix: Option<ModPath>,
path: ast::Path,
- hygiene: &Hygiene,
+ span_map: SpanMapRef<'_>,
) -> Option<ModPath> {
let prefix = match path.qualifier() {
- Some(qual) => Some(convert_path(db, prefix, qual, hygiene)?),
+ Some(qual) => Some(convert_path(db, prefix, qual, span_map)?),
None => prefix,
};
let segment = path.segment()?;
let mut mod_path = match segment.kind()? {
ast::PathSegmentKind::Name(name_ref) => {
- match hygiene.name_ref_to_name(db, name_ref) {
- Either::Left(name) => {
- // no type args in use
- let mut res = prefix.unwrap_or_else(|| {
- ModPath::from_kind(
- segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
- )
- });
- res.segments.push(name);
- res
- }
- Either::Right(crate_id) => {
- return Some(ModPath::from_segments(
- PathKind::DollarCrate(crate_id),
- iter::empty(),
- ))
+ if name_ref.text() == "$crate" {
+ if prefix.is_some() {
+ return None;
}
+ ModPath::from_kind(
+ resolve_crate_root(
+ db,
+ span_map.span_for_range(name_ref.syntax().text_range()).ctx,
+ )
+ .map(PathKind::DollarCrate)
+ .unwrap_or(PathKind::Crate),
+ )
+ } else {
+ let mut res = prefix.unwrap_or_else(|| {
+ ModPath::from_kind(
+ segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
+ )
+ });
+ res.segments.push(name_ref.as_name());
+ res
}
}
ast::PathSegmentKind::SelfTypeKw => {
@@ -261,8 +265,14 @@ fn convert_path(
// We follow what it did anyway :)
if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain {
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
- if let Some(crate_id) = hygiene.local_inner_macros(db, path) {
- mod_path.kind = PathKind::DollarCrate(crate_id);
+ let syn_ctx = span_map.span_for_range(segment.syntax().text_range()).ctx;
+ if let Some(macro_call_id) = db.lookup_intern_syntax_context(syn_ctx).outer_expn {
+ if db.lookup_intern_macro_call(macro_call_id).def.local_inner {
+ mod_path.kind = match resolve_crate_root(db, syn_ctx) {
+ Some(crate_root) => PathKind::DollarCrate(crate_root),
+ None => PathKind::Crate,
+ }
+ }
}
}
}
@@ -270,6 +280,29 @@ fn convert_path(
Some(mod_path)
}
+pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> Option<CrateId> {
+ // When resolving `$crate` from a `macro_rules!` invoked in a `macro`,
+ // we don't want to pretend that the `macro_rules!` definition is in the `macro`
+ // as described in `SyntaxContext::apply_mark`, so we ignore prepended opaque marks.
+ // FIXME: This is only a guess and it doesn't work correctly for `macro_rules!`
+ // definitions actually produced by `macro` and `macro` definitions produced by
+ // `macro_rules!`, but at least such configurations are not stable yet.
+ ctxt = ctxt.normalize_to_macro_rules(db);
+ let mut iter = marks_rev(ctxt, db).peekable();
+ let mut result_mark = None;
+ // Find the last opaque mark from the end if it exists.
+ while let Some(&(mark, Transparency::Opaque)) = iter.peek() {
+ result_mark = Some(mark);
+ iter.next();
+ }
+ // Then find the last semi-transparent mark from the end if it exists.
+ while let Some((mark, Transparency::SemiTransparent)) = iter.next() {
+ result_mark = Some(mark);
+ }
+
+ result_mark.flatten().map(|call| db.lookup_intern_macro_call(call.into()).def.krate)
+}
+
pub use crate::name as __name;
#[macro_export]
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
index a876f48bd..a321f94cd 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
@@ -470,6 +470,7 @@ pub mod known {
pub const SELF_TYPE: super::Name = super::Name::new_inline("Self");
pub const STATIC_LIFETIME: super::Name = super::Name::new_inline("'static");
+ pub const DOLLAR_CRATE: super::Name = super::Name::new_inline("$crate");
#[macro_export]
macro_rules! name {
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
index 41675c630..de5777968 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
@@ -1,6 +1,6 @@
//! Proc Macro Expander stub
-use base_db::{CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
+use base_db::{span::SpanData, CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
use stdx::never;
use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult};
@@ -33,11 +33,15 @@ impl ProcMacroExpander {
calling_crate: CrateId,
tt: &tt::Subtree,
attr_arg: Option<&tt::Subtree>,
+ def_site: SpanData,
+ call_site: SpanData,
+ mixed_site: SpanData,
) -> ExpandResult<tt::Subtree> {
match self.proc_macro_id {
- ProcMacroId(DUMMY_ID) => {
- ExpandResult::new(tt::Subtree::empty(), ExpandError::UnresolvedProcMacro(def_crate))
- }
+ ProcMacroId(DUMMY_ID) => ExpandResult::new(
+ tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
+ ExpandError::UnresolvedProcMacro(def_crate),
+ ),
ProcMacroId(id) => {
let proc_macros = db.proc_macros();
let proc_macros = match proc_macros.get(&def_crate) {
@@ -45,7 +49,7 @@ impl ProcMacroExpander {
Some(Err(_)) | None => {
never!("Non-dummy expander even though there are no proc macros");
return ExpandResult::new(
- tt::Subtree::empty(),
+ tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::other("Internal error"),
);
}
@@ -59,7 +63,7 @@ impl ProcMacroExpander {
id
);
return ExpandResult::new(
- tt::Subtree::empty(),
+ tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::other("Internal error"),
);
}
@@ -68,7 +72,8 @@ impl ProcMacroExpander {
let krate_graph = db.crate_graph();
// Proc macros have access to the environment variables of the invoking crate.
let env = &krate_graph[calling_crate].env;
- match proc_macro.expander.expand(tt, attr_arg, env) {
+ match proc_macro.expander.expand(tt, attr_arg, env, def_site, call_site, mixed_site)
+ {
Ok(t) => ExpandResult::ok(t),
Err(err) => match err {
// Don't discard the item in case something unexpected happened while expanding attributes
@@ -78,9 +83,10 @@ impl ProcMacroExpander {
ExpandResult { value: tt.clone(), err: Some(ExpandError::other(text)) }
}
ProcMacroExpansionError::System(text)
- | ProcMacroExpansionError::Panic(text) => {
- ExpandResult::new(tt::Subtree::empty(), ExpandError::other(text))
- }
+ | ProcMacroExpansionError::Panic(text) => ExpandResult::new(
+ tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
+ ExpandError::ProcMacroPanic(Box::new(text.into_boxed_str())),
+ ),
},
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs b/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs
index ab3809abc..acbde26c8 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs
@@ -1,5 +1,7 @@
//! A simplified version of quote-crate like quasi quote macro
+use base_db::span::SpanData;
+
// A helper macro quote macro
// FIXME:
// 1. Not all puncts are handled
@@ -8,109 +10,109 @@
#[doc(hidden)]
#[macro_export]
macro_rules! __quote {
- () => {
+ ($span:ident) => {
Vec::<crate::tt::TokenTree>::new()
};
- ( @SUBTREE $delim:ident $($tt:tt)* ) => {
+ ( @SUBTREE($span:ident) $delim:ident $($tt:tt)* ) => {
{
- let children = $crate::__quote!($($tt)*);
+ let children = $crate::__quote!($span $($tt)*);
crate::tt::Subtree {
delimiter: crate::tt::Delimiter {
kind: crate::tt::DelimiterKind::$delim,
- open: crate::tt::TokenId::unspecified(),
- close: crate::tt::TokenId::unspecified(),
+ open: $span,
+ close: $span,
},
token_trees: $crate::quote::IntoTt::to_tokens(children),
}
}
};
- ( @PUNCT $first:literal ) => {
+ ( @PUNCT($span:ident) $first:literal ) => {
{
vec![
crate::tt::Leaf::Punct(crate::tt::Punct {
char: $first,
spacing: crate::tt::Spacing::Alone,
- span: crate::tt::TokenId::unspecified(),
+ span: $span,
}).into()
]
}
};
- ( @PUNCT $first:literal, $sec:literal ) => {
+ ( @PUNCT($span:ident) $first:literal, $sec:literal ) => {
{
vec![
crate::tt::Leaf::Punct(crate::tt::Punct {
char: $first,
spacing: crate::tt::Spacing::Joint,
- span: crate::tt::TokenId::unspecified(),
+ span: $span,
}).into(),
crate::tt::Leaf::Punct(crate::tt::Punct {
char: $sec,
spacing: crate::tt::Spacing::Alone,
- span: crate::tt::TokenId::unspecified(),
+ span: $span,
}).into()
]
}
};
// hash variable
- ( # $first:ident $($tail:tt)* ) => {
+ ($span:ident # $first:ident $($tail:tt)* ) => {
{
- let token = $crate::quote::ToTokenTree::to_token($first);
+ let token = $crate::quote::ToTokenTree::to_token($first, $span);
let mut tokens = vec![token.into()];
- let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
+ let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
tokens.append(&mut tail_tokens);
tokens
}
};
- ( ## $first:ident $($tail:tt)* ) => {
+ ($span:ident ## $first:ident $($tail:tt)* ) => {
{
- let mut tokens = $first.into_iter().map($crate::quote::ToTokenTree::to_token).collect::<Vec<crate::tt::TokenTree>>();
- let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
+ let mut tokens = $first.into_iter().map(|it| $crate::quote::ToTokenTree::to_token(it, $span)).collect::<Vec<crate::tt::TokenTree>>();
+ let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
tokens.append(&mut tail_tokens);
tokens
}
};
// Brace
- ( { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE Brace $($tt)*) };
+ ($span:ident { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE($span) Brace $($tt)*) };
// Bracket
- ( [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE Bracket $($tt)*) };
+ ($span:ident [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE($span) Bracket $($tt)*) };
// Parenthesis
- ( ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE Parenthesis $($tt)*) };
+ ($span:ident ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE($span) Parenthesis $($tt)*) };
// Literal
- ( $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt).into()] };
+ ($span:ident $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt, $span).into()] };
// Ident
- ( $tt:ident ) => {
+ ($span:ident $tt:ident ) => {
vec![ {
crate::tt::Leaf::Ident(crate::tt::Ident {
text: stringify!($tt).into(),
- span: crate::tt::TokenId::unspecified(),
+ span: $span,
}).into()
}]
};
// Puncts
// FIXME: Not all puncts are handled
- ( -> ) => {$crate::__quote!(@PUNCT '-', '>')};
- ( & ) => {$crate::__quote!(@PUNCT '&')};
- ( , ) => {$crate::__quote!(@PUNCT ',')};
- ( : ) => {$crate::__quote!(@PUNCT ':')};
- ( ; ) => {$crate::__quote!(@PUNCT ';')};
- ( :: ) => {$crate::__quote!(@PUNCT ':', ':')};
- ( . ) => {$crate::__quote!(@PUNCT '.')};
- ( < ) => {$crate::__quote!(@PUNCT '<')};
- ( > ) => {$crate::__quote!(@PUNCT '>')};
- ( ! ) => {$crate::__quote!(@PUNCT '!')};
-
- ( $first:tt $($tail:tt)+ ) => {
+ ($span:ident -> ) => {$crate::__quote!(@PUNCT($span) '-', '>')};
+ ($span:ident & ) => {$crate::__quote!(@PUNCT($span) '&')};
+ ($span:ident , ) => {$crate::__quote!(@PUNCT($span) ',')};
+ ($span:ident : ) => {$crate::__quote!(@PUNCT($span) ':')};
+ ($span:ident ; ) => {$crate::__quote!(@PUNCT($span) ';')};
+ ($span:ident :: ) => {$crate::__quote!(@PUNCT($span) ':', ':')};
+ ($span:ident . ) => {$crate::__quote!(@PUNCT($span) '.')};
+ ($span:ident < ) => {$crate::__quote!(@PUNCT($span) '<')};
+ ($span:ident > ) => {$crate::__quote!(@PUNCT($span) '>')};
+ ($span:ident ! ) => {$crate::__quote!(@PUNCT($span) '!')};
+
+ ($span:ident $first:tt $($tail:tt)+ ) => {
{
- let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($first));
- let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
+ let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $first ));
+ let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
tokens.append(&mut tail_tokens);
tokens
@@ -122,19 +124,22 @@ macro_rules! __quote {
/// It probably should implement in proc-macro
#[macro_export]
macro_rules! quote {
- ( $($tt:tt)* ) => {
- $crate::quote::IntoTt::to_subtree($crate::__quote!($($tt)*))
+ ($span:ident=> $($tt:tt)* ) => {
+ $crate::quote::IntoTt::to_subtree($crate::__quote!($span $($tt)*), $span)
}
}
pub(crate) trait IntoTt {
- fn to_subtree(self) -> crate::tt::Subtree;
+ fn to_subtree(self, span: SpanData) -> crate::tt::Subtree;
fn to_tokens(self) -> Vec<crate::tt::TokenTree>;
}
impl IntoTt for Vec<crate::tt::TokenTree> {
- fn to_subtree(self) -> crate::tt::Subtree {
- crate::tt::Subtree { delimiter: crate::tt::Delimiter::unspecified(), token_trees: self }
+ fn to_subtree(self, span: SpanData) -> crate::tt::Subtree {
+ crate::tt::Subtree {
+ delimiter: crate::tt::Delimiter::invisible_spanned(span),
+ token_trees: self,
+ }
}
fn to_tokens(self) -> Vec<crate::tt::TokenTree> {
@@ -143,7 +148,7 @@ impl IntoTt for Vec<crate::tt::TokenTree> {
}
impl IntoTt for crate::tt::Subtree {
- fn to_subtree(self) -> crate::tt::Subtree {
+ fn to_subtree(self, _: SpanData) -> crate::tt::Subtree {
self
}
@@ -153,39 +158,39 @@ impl IntoTt for crate::tt::Subtree {
}
pub(crate) trait ToTokenTree {
- fn to_token(self) -> crate::tt::TokenTree;
+ fn to_token(self, span: SpanData) -> crate::tt::TokenTree;
}
impl ToTokenTree for crate::tt::TokenTree {
- fn to_token(self) -> crate::tt::TokenTree {
+ fn to_token(self, _: SpanData) -> crate::tt::TokenTree {
self
}
}
impl ToTokenTree for &crate::tt::TokenTree {
- fn to_token(self) -> crate::tt::TokenTree {
+ fn to_token(self, _: SpanData) -> crate::tt::TokenTree {
self.clone()
}
}
impl ToTokenTree for crate::tt::Subtree {
- fn to_token(self) -> crate::tt::TokenTree {
+ fn to_token(self, _: SpanData) -> crate::tt::TokenTree {
self.into()
}
}
macro_rules! impl_to_to_tokentrees {
- ($($ty:ty => $this:ident $im:block);*) => {
+ ($($span:ident: $ty:ty => $this:ident $im:block);*) => {
$(
impl ToTokenTree for $ty {
- fn to_token($this) -> crate::tt::TokenTree {
+ fn to_token($this, $span: SpanData) -> crate::tt::TokenTree {
let leaf: crate::tt::Leaf = $im.into();
leaf.into()
}
}
impl ToTokenTree for &$ty {
- fn to_token($this) -> crate::tt::TokenTree {
+ fn to_token($this, $span: SpanData) -> crate::tt::TokenTree {
let leaf: crate::tt::Leaf = $im.clone().into();
leaf.into()
}
@@ -195,60 +200,76 @@ macro_rules! impl_to_to_tokentrees {
}
impl_to_to_tokentrees! {
- u32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
- usize => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
- i32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
- bool => self { crate::tt::Ident{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
- crate::tt::Leaf => self { self };
- crate::tt::Literal => self { self };
- crate::tt::Ident => self { self };
- crate::tt::Punct => self { self };
- &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}};
- String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}}
+ span: u32 => self { crate::tt::Literal{text: self.to_string().into(), span} };
+ span: usize => self { crate::tt::Literal{text: self.to_string().into(), span} };
+ span: i32 => self { crate::tt::Literal{text: self.to_string().into(), span} };
+ span: bool => self { crate::tt::Ident{text: self.to_string().into(), span} };
+ _span: crate::tt::Leaf => self { self };
+ _span: crate::tt::Literal => self { self };
+ _span: crate::tt::Ident => self { self };
+ _span: crate::tt::Punct => self { self };
+ span: &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}};
+ span: String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}}
}
#[cfg(test)]
mod tests {
+ use crate::tt;
+ use base_db::{
+ span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
+ FileId,
+ };
+ use expect_test::expect;
+ use syntax::{TextRange, TextSize};
+
+ const DUMMY: tt::SpanData = tt::SpanData {
+ range: TextRange::empty(TextSize::new(0)),
+ anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID },
+ ctx: SyntaxContextId::ROOT,
+ };
+
#[test]
fn test_quote_delimiters() {
- assert_eq!(quote!({}).to_string(), "{}");
- assert_eq!(quote!(()).to_string(), "()");
- assert_eq!(quote!([]).to_string(), "[]");
+ assert_eq!(quote!(DUMMY =>{}).to_string(), "{}");
+ assert_eq!(quote!(DUMMY =>()).to_string(), "()");
+ assert_eq!(quote!(DUMMY =>[]).to_string(), "[]");
}
#[test]
fn test_quote_idents() {
- assert_eq!(quote!(32).to_string(), "32");
- assert_eq!(quote!(struct).to_string(), "struct");
+ assert_eq!(quote!(DUMMY =>32).to_string(), "32");
+ assert_eq!(quote!(DUMMY =>struct).to_string(), "struct");
}
#[test]
fn test_quote_hash_simple_literal() {
let a = 20;
- assert_eq!(quote!(#a).to_string(), "20");
+ assert_eq!(quote!(DUMMY =>#a).to_string(), "20");
let s: String = "hello".into();
- assert_eq!(quote!(#s).to_string(), "\"hello\"");
+ assert_eq!(quote!(DUMMY =>#s).to_string(), "\"hello\"");
}
fn mk_ident(name: &str) -> crate::tt::Ident {
- crate::tt::Ident { text: name.into(), span: crate::tt::TokenId::unspecified() }
+ crate::tt::Ident { text: name.into(), span: DUMMY }
}
#[test]
fn test_quote_hash_token_tree() {
let a = mk_ident("hello");
- let quoted = quote!(#a);
+ let quoted = quote!(DUMMY =>#a);
assert_eq!(quoted.to_string(), "hello");
let t = format!("{quoted:?}");
- assert_eq!(t, "SUBTREE $$ 4294967295 4294967295\n IDENT hello 4294967295");
+ expect![[r#"
+ SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) } SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) }
+ IDENT hello SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) }"#]].assert_eq(&t);
}
#[test]
fn test_quote_simple_derive_copy() {
let name = mk_ident("Foo");
- let quoted = quote! {
+ let quoted = quote! {DUMMY =>
impl Clone for #name {
fn clone(&self) -> Self {
Self {}
@@ -268,18 +289,19 @@ mod tests {
// }
let struct_name = mk_ident("Foo");
let fields = [mk_ident("name"), mk_ident("id")];
- let fields = fields.iter().flat_map(|it| quote!(#it: self.#it.clone(), ).token_trees);
+ let fields =
+ fields.iter().flat_map(|it| quote!(DUMMY =>#it: self.#it.clone(), ).token_trees);
let list = crate::tt::Subtree {
delimiter: crate::tt::Delimiter {
kind: crate::tt::DelimiterKind::Brace,
- open: crate::tt::TokenId::unspecified(),
- close: crate::tt::TokenId::unspecified(),
+ open: DUMMY,
+ close: DUMMY,
},
token_trees: fields.collect(),
};
- let quoted = quote! {
+ let quoted = quote! {DUMMY =>
impl Clone for #struct_name {
fn clone(&self) -> Self {
Self #list
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/span.rs b/src/tools/rust-analyzer/crates/hir-expand/src/span.rs
new file mode 100644
index 000000000..fe476a40f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/span.rs
@@ -0,0 +1,124 @@
+//! Spanmaps allow turning absolute ranges into relative ranges for incrementality purposes as well
+//! as associating spans with text ranges in a particular file.
+use base_db::{
+ span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
+ FileId,
+};
+use syntax::{ast::HasModuleItem, AstNode, TextRange, TextSize};
+use triomphe::Arc;
+
+use crate::db::ExpandDatabase;
+
+pub type ExpansionSpanMap = mbe::SpanMap<SpanData>;
+
+/// Spanmap for a macro file or a real file
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum SpanMap {
+ /// Spanmap for a macro file
+ ExpansionSpanMap(Arc<ExpansionSpanMap>),
+ /// Spanmap for a real file
+ RealSpanMap(Arc<RealSpanMap>),
+}
+
+#[derive(Copy, Clone)]
+pub enum SpanMapRef<'a> {
+ /// Spanmap for a macro file
+ ExpansionSpanMap(&'a ExpansionSpanMap),
+ /// Spanmap for a real file
+ RealSpanMap(&'a RealSpanMap),
+}
+
+impl mbe::SpanMapper<SpanData> for SpanMap {
+ fn span_for(&self, range: TextRange) -> SpanData {
+ self.span_for_range(range)
+ }
+}
+impl mbe::SpanMapper<SpanData> for SpanMapRef<'_> {
+ fn span_for(&self, range: TextRange) -> SpanData {
+ self.span_for_range(range)
+ }
+}
+impl mbe::SpanMapper<SpanData> for RealSpanMap {
+ fn span_for(&self, range: TextRange) -> SpanData {
+ self.span_for_range(range)
+ }
+}
+
+impl SpanMap {
+ pub fn span_for_range(&self, range: TextRange) -> SpanData {
+ match self {
+ Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()),
+ Self::RealSpanMap(span_map) => span_map.span_for_range(range),
+ }
+ }
+
+ pub fn as_ref(&self) -> SpanMapRef<'_> {
+ match self {
+ Self::ExpansionSpanMap(span_map) => SpanMapRef::ExpansionSpanMap(span_map),
+ Self::RealSpanMap(span_map) => SpanMapRef::RealSpanMap(span_map),
+ }
+ }
+}
+
+impl SpanMapRef<'_> {
+ pub fn span_for_range(self, range: TextRange) -> SpanData {
+ match self {
+ Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()),
+ Self::RealSpanMap(span_map) => span_map.span_for_range(range),
+ }
+ }
+}
+
+#[derive(PartialEq, Eq, Hash, Debug)]
+pub struct RealSpanMap {
+ file_id: FileId,
+ /// Invariant: Sorted vec over TextSize
+ // FIXME: SortedVec<(TextSize, ErasedFileAstId)>?
+ pairs: Box<[(TextSize, ErasedFileAstId)]>,
+ end: TextSize,
+}
+
+impl RealSpanMap {
+ /// Creates a real file span map that returns absolute ranges (relative ranges to the root ast id).
+ pub fn absolute(file_id: FileId) -> Self {
+ RealSpanMap {
+ file_id,
+ pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]),
+ end: TextSize::new(!0),
+ }
+ }
+
+ pub fn from_file(db: &dyn ExpandDatabase, file_id: FileId) -> Self {
+ let mut pairs = vec![(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)];
+ let ast_id_map = db.ast_id_map(file_id.into());
+ let tree = db.parse(file_id).tree();
+ pairs
+ .extend(tree.items().map(|item| {
+ (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())
+ }));
+ RealSpanMap {
+ file_id,
+ pairs: pairs.into_boxed_slice(),
+ end: tree.syntax().text_range().end(),
+ }
+ }
+
+ pub fn span_for_range(&self, range: TextRange) -> SpanData {
+ assert!(
+ range.end() <= self.end,
+ "range {range:?} goes beyond the end of the file {:?}",
+ self.end
+ );
+ let start = range.start();
+ let idx = self
+ .pairs
+ .binary_search_by(|&(it, _)| it.cmp(&start).then(std::cmp::Ordering::Less))
+ .unwrap_err();
+ let (offset, ast_id) = self.pairs[idx - 1];
+ SpanData {
+ range: range - offset,
+ anchor: SpanAnchor { file_id: self.file_id, ast_id },
+ ctx: SyntaxContextId::ROOT,
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
index b95ae05cc..bbcb76a43 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
@@ -13,27 +13,27 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
-itertools = "0.10.5"
+itertools.workspace = true
arrayvec = "0.7.2"
-bitflags = "2.1.0"
+bitflags.workspace = true
smallvec.workspace = true
ena = "0.14.0"
-either = "1.7.0"
+either.workspace = true
oorandom = "11.1.3"
-tracing = "0.1.35"
+tracing.workspace = true
rustc-hash = "1.1.0"
scoped-tls = "1.0.0"
-chalk-solve = { version = "0.92.0", default-features = false }
-chalk-ir = "0.92.0"
-chalk-recursive = { version = "0.92.0", default-features = false }
-chalk-derive = "0.92.0"
+chalk-solve = { version = "0.95.0", default-features = false }
+chalk-ir = "0.95.0"
+chalk-recursive = { version = "0.95.0", default-features = false }
+chalk-derive = "0.95.0"
la-arena.workspace = true
once_cell = "1.17.0"
triomphe.workspace = true
nohash-hasher.workspace = true
typed-arena = "2.0.1"
-rustc_index.workspace = true
+rustc-dependencies.workspace = true
# local deps
stdx.workspace = true
@@ -47,12 +47,13 @@ limit.workspace = true
[dev-dependencies]
expect-test = "1.4.0"
-tracing = "0.1.35"
-tracing-subscriber = { version = "0.3.16", default-features = false, features = [
- "registry",
-] }
-tracing-tree = "0.2.1"
+tracing.workspace = true
+tracing-subscriber.workspace = true
+tracing-tree.workspace = true
project-model = { path = "../project-model" }
# local deps
test-utils.workspace = true
+
+[features]
+in-rust-tree = ["rustc-dependencies/in-rust-tree"]
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
index c0b243ea2..c9ab35685 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
@@ -28,6 +28,7 @@ pub trait TyExt {
fn is_unknown(&self) -> bool;
fn contains_unknown(&self) -> bool;
fn is_ty_var(&self) -> bool;
+ fn is_union(&self) -> bool;
fn as_adt(&self) -> Option<(hir_def::AdtId, &Substitution)>;
fn as_builtin(&self) -> Option<BuiltinType>;
@@ -96,6 +97,10 @@ impl TyExt for Ty {
matches!(self.kind(Interner), TyKind::InferenceVar(_, _))
}
+ fn is_union(&self) -> bool {
+ matches!(self.adt_id(Interner), Some(AdtId(hir_def::AdtId::UnionId(_))))
+ }
+
fn as_adt(&self) -> Option<(hir_def::AdtId, &Substitution)> {
match self.kind(Interner) {
TyKind::Adt(AdtId(adt), parameters) => Some((*adt, parameters)),
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
index 0348680e5..9792d945e 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
@@ -1,9 +1,10 @@
//! Constant evaluation details
-use base_db::CrateId;
+use base_db::{salsa::Cycle, CrateId};
use chalk_ir::{cast::Cast, BoundVar, DebruijnIndex};
use hir_def::{
- hir::Expr,
+ body::Body,
+ hir::{Expr, ExprId},
path::Path,
resolver::{Resolver, ValueNs},
type_ref::LiteralConstRef,
@@ -136,7 +137,7 @@ pub fn intern_const_ref(
ty: Ty,
krate: CrateId,
) -> Const {
- let layout = db.layout_of_ty(ty.clone(), Arc::new(TraitEnvironment::empty(krate)));
+ let layout = db.layout_of_ty(ty.clone(), TraitEnvironment::empty(krate));
let bytes = match value {
LiteralConstRef::Int(i) => {
// FIXME: We should handle failure of layout better.
@@ -184,7 +185,7 @@ pub fn try_const_usize(db: &dyn HirDatabase, c: &Const) -> Option<u128> {
pub(crate) fn const_eval_recover(
_: &dyn HirDatabase,
- _: &[String],
+ _: &Cycle,
_: &GeneralConstId,
_: &Substitution,
_: &Option<Arc<TraitEnvironment>>,
@@ -194,7 +195,7 @@ pub(crate) fn const_eval_recover(
pub(crate) fn const_eval_static_recover(
_: &dyn HirDatabase,
- _: &[String],
+ _: &Cycle,
_: &StaticId,
) -> Result<Const, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
@@ -202,7 +203,7 @@ pub(crate) fn const_eval_static_recover(
pub(crate) fn const_eval_discriminant_recover(
_: &dyn HirDatabase,
- _: &[String],
+ _: &Cycle,
_: &EnumVariantId,
) -> Result<i128, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
@@ -280,7 +281,7 @@ pub(crate) fn const_eval_discriminant_variant(
// get an `InferenceResult` instead of an `InferenceContext`. And we should remove `ctx.clone().resolve_all()` here
// and make this function private. See the fixme comment on `InferenceContext::resolve_all`.
pub(crate) fn eval_to_const(
- expr: Idx<Expr>,
+ expr: ExprId,
mode: ParamLoweringMode,
ctx: &mut InferenceContext<'_>,
args: impl FnOnce() -> Generics,
@@ -288,13 +289,24 @@ pub(crate) fn eval_to_const(
) -> Const {
let db = ctx.db;
let infer = ctx.clone().resolve_all();
+ fn has_closure(body: &Body, expr: ExprId) -> bool {
+ if matches!(body[expr], Expr::Closure { .. }) {
+ return true;
+ }
+ let mut r = false;
+ body[expr].walk_child_exprs(|idx| r |= has_closure(body, idx));
+ r
+ }
+ if has_closure(&ctx.body, expr) {
+ // Type checking clousres need an isolated body (See the above FIXME). Bail out early to prevent panic.
+ return unknown_const(infer[expr].clone());
+ }
if let Expr::Path(p) = &ctx.body.exprs[expr] {
let resolver = &ctx.resolver;
if let Some(c) = path_to_const(db, resolver, p, mode, args, debruijn, infer[expr].clone()) {
return c;
}
}
- let infer = ctx.clone().resolve_all();
if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, &ctx.body, &infer, expr) {
if let Ok(result) = interpret_mir(db, Arc::new(mir_body), true, None).0 {
return result;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
index 7ad3659a4..b395e7f4a 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
@@ -1159,6 +1159,20 @@ fn pattern_matching_slice() {
"#,
33213,
);
+ check_number(
+ r#"
+ //- minicore: slice, index, coerce_unsized, copy
+ const fn f(mut slice: &[u32]) -> usize {
+ slice = match slice {
+ [0, rest @ ..] | rest => rest,
+ };
+ slice.len()
+ }
+ const GOAL: usize = f(&[]) + f(&[10]) + f(&[0, 100])
+ + f(&[1000, 1000, 1000]) + f(&[0, 57, 34, 46, 10000, 10000]);
+ "#,
+ 10,
+ );
}
#[test]
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
index 9c96b5ab8..410bcbf03 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
@@ -20,8 +20,8 @@ use crate::{
method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
mir::{BorrowckResult, MirBody, MirLowerError},
Binders, CallableDefId, ClosureId, Const, FnDefId, GenericArg, ImplTraitId, InferenceResult,
- Interner, PolyFnSig, QuantifiedWhereClause, ReturnTypeImplTraits, Substitution, TraitRef, Ty,
- TyDefId, ValueTyDefId,
+ Interner, PolyFnSig, QuantifiedWhereClause, ReturnTypeImplTraits, Substitution,
+ TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId,
};
use hir_expand::name::Name;
@@ -47,7 +47,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
&self,
def: DefWithBodyId,
subst: Substitution,
- env: Arc<crate::TraitEnvironment>,
+ env: Arc<TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::monomorphized_mir_body_for_closure_query)]
@@ -55,7 +55,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
&self,
def: ClosureId,
subst: Substitution,
- env: Arc<crate::TraitEnvironment>,
+ env: Arc<TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::borrowck_query)]
@@ -81,7 +81,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
&self,
def: GeneralConstId,
subst: Substitution,
- trait_env: Option<Arc<crate::TraitEnvironment>>,
+ trait_env: Option<Arc<TraitEnvironment>>,
) -> Result<Const, ConstEvalError>;
#[salsa::invoke(crate::consteval::const_eval_static_query)]
@@ -104,16 +104,12 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
&self,
def: AdtId,
subst: Substitution,
- env: Arc<crate::TraitEnvironment>,
+ env: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::layout_of_ty_query)]
#[salsa::cycle(crate::layout::layout_of_ty_recover)]
- fn layout_of_ty(
- &self,
- ty: Ty,
- env: Arc<crate::TraitEnvironment>,
- ) -> Result<Arc<Layout>, LayoutError>;
+ fn layout_of_ty(&self, ty: Ty, env: Arc<TraitEnvironment>) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::target_data_layout_query)]
fn target_data_layout(&self, krate: CrateId) -> Option<Arc<TargetDataLayout>>;
@@ -121,7 +117,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::method_resolution::lookup_impl_method_query)]
fn lookup_impl_method(
&self,
- env: Arc<crate::TraitEnvironment>,
+ env: Arc<TraitEnvironment>,
func: FunctionId,
fn_subst: Substitution,
) -> (FunctionId, Substitution);
@@ -149,10 +145,10 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::lower::trait_environment_for_body_query)]
#[salsa::transparent]
- fn trait_environment_for_body(&self, def: DefWithBodyId) -> Arc<crate::TraitEnvironment>;
+ fn trait_environment_for_body(&self, def: DefWithBodyId) -> Arc<TraitEnvironment>;
#[salsa::invoke(crate::lower::trait_environment_query)]
- fn trait_environment(&self, def: GenericDefId) -> Arc<crate::TraitEnvironment>;
+ fn trait_environment(&self, def: GenericDefId) -> Arc<TraitEnvironment>;
#[salsa::invoke(crate::lower::generic_defaults_query)]
#[salsa::cycle(crate::lower::generic_defaults_recover)]
@@ -249,7 +245,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
fn normalize_projection(
&self,
projection: crate::ProjectionTy,
- env: Arc<crate::TraitEnvironment>,
+ env: Arc<TraitEnvironment>,
) -> Ty;
#[salsa::invoke(trait_solve_wait)]
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
index ef43ed5c4..c1b361900 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
@@ -11,9 +11,3 @@ pub use crate::diagnostics::{
},
unsafe_check::{missing_unsafe, unsafe_expressions, UnsafeExpr},
};
-
-#[derive(Debug, PartialEq, Eq)]
-pub struct IncoherentImpl {
- pub file_id: hir_expand::HirFileId,
- pub impl_: syntax::AstPtr<syntax::ast::Impl>,
-}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
index 36d69edf9..51a044d8e 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -9,6 +9,7 @@
//! - constants (e.g. `const FOO: u8 = 10;`)
//! - static items (e.g. `static FOO: u8 = 10;`)
//! - match arm bindings (e.g. `foo @ Some(_)`)
+//! - modules (e.g. `mod foo { ... }` or `mod foo;`)
mod case_conv;
@@ -18,12 +19,12 @@ use hir_def::{
data::adt::VariantData,
hir::{Pat, PatId},
src::HasSource,
- AdtId, AttrDefId, ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, ItemContainerId,
- Lookup, ModuleDefId, StaticId, StructId,
+ AdtId, AttrDefId, ConstId, EnumId, FunctionId, ItemContainerId, Lookup, ModuleDefId, ModuleId,
+ StaticId, StructId,
};
use hir_expand::{
name::{AsName, Name},
- HirFileId,
+ HirFileId, MacroFileIdExt,
};
use stdx::{always, never};
use syntax::{
@@ -83,6 +84,7 @@ pub enum IdentType {
Structure,
Variable,
Variant,
+ Module,
}
impl fmt::Display for IdentType {
@@ -97,6 +99,7 @@ impl fmt::Display for IdentType {
IdentType::Structure => "Structure",
IdentType::Variable => "Variable",
IdentType::Variant => "Variant",
+ IdentType::Module => "Module",
};
repr.fmt(f)
@@ -132,6 +135,7 @@ impl<'a> DeclValidator<'a> {
pub(super) fn validate_item(&mut self, item: ModuleDefId) {
match item {
+ ModuleDefId::ModuleId(module_id) => self.validate_module(module_id),
ModuleDefId::FunctionId(func) => self.validate_func(func),
ModuleDefId::AdtId(adt) => self.validate_adt(adt),
ModuleDefId::ConstId(const_id) => self.validate_const(const_id),
@@ -192,7 +196,7 @@ impl<'a> DeclValidator<'a> {
AttrDefId::GenericParamId(_) => None,
}
.map_or(false, |file_id| {
- file_id.is_custom_derive(db.upcast()) || file_id.is_builtin_derive(db.upcast())
+ matches!(file_id.macro_file(), Some(file_id) if file_id.is_custom_derive(db.upcast()) || file_id.is_builtin_derive(db.upcast()))
})
};
@@ -230,6 +234,55 @@ impl<'a> DeclValidator<'a> {
|| parent()
}
+ fn validate_module(&mut self, module_id: ModuleId) {
+ // Check whether non-snake case identifiers are allowed for this module.
+ if self.allowed(module_id.into(), allow::NON_SNAKE_CASE, false) {
+ return;
+ }
+
+ // Check the module name.
+ let Some(module_name) = module_id.name(self.db.upcast()) else { return };
+ let module_name_replacement =
+ module_name.as_str().and_then(to_lower_snake_case).map(|new_name| Replacement {
+ current_name: module_name,
+ suggested_text: new_name,
+ expected_case: CaseType::LowerSnakeCase,
+ });
+
+ if let Some(module_name_replacement) = module_name_replacement {
+ let module_data = &module_id.def_map(self.db.upcast())[module_id.local_id];
+ let module_src = module_data.declaration_source(self.db.upcast());
+
+ if let Some(module_src) = module_src {
+ let ast_ptr = match module_src.value.name() {
+ Some(name) => name,
+ None => {
+ never!(
+ "Replacement ({:?}) was generated for a module without a name: {:?}",
+ module_name_replacement,
+ module_src
+ );
+ return;
+ }
+ };
+
+ let diagnostic = IncorrectCase {
+ file: module_src.file_id,
+ ident_type: IdentType::Module,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: module_name_replacement.expected_case,
+ ident_text: module_name_replacement
+ .current_name
+ .display(self.db.upcast())
+ .to_string(),
+ suggested_text: module_name_replacement.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+ }
+ }
+
fn validate_func(&mut self, func: FunctionId) {
let data = self.db.function_data(func);
if matches!(func.lookup(self.db.upcast()).container, ItemContainerId::ExternBlockId(_)) {
@@ -237,8 +290,6 @@ impl<'a> DeclValidator<'a> {
return;
}
- self.validate_body_inner_items(func.into());
-
// Check whether non-snake case identifiers are allowed for this function.
if self.allowed(func.into(), allow::NON_SNAKE_CASE, false) {
return;
@@ -336,48 +387,44 @@ impl<'a> DeclValidator<'a> {
for (id, replacement) in pats_replacements {
if let Ok(source_ptr) = source_map.pat_syntax(id) {
- if let Some(expr) = source_ptr.value.as_ref().left() {
+ if let Some(ptr) = source_ptr.value.clone().cast::<ast::IdentPat>() {
let root = source_ptr.file_syntax(self.db.upcast());
- if let ast::Pat::IdentPat(ident_pat) = expr.to_node(&root) {
- let parent = match ident_pat.syntax().parent() {
- Some(parent) => parent,
- None => continue,
- };
- let name_ast = match ident_pat.name() {
- Some(name_ast) => name_ast,
- None => continue,
- };
+ let ident_pat = ptr.to_node(&root);
+ let parent = match ident_pat.syntax().parent() {
+ Some(parent) => parent,
+ None => continue,
+ };
+ let name_ast = match ident_pat.name() {
+ Some(name_ast) => name_ast,
+ None => continue,
+ };
+
+ let is_param = ast::Param::can_cast(parent.kind());
+
+ // We have to check that it's either `let var = ...` or `var @ Variant(_)` statement,
+ // because e.g. match arms are patterns as well.
+ // In other words, we check that it's a named variable binding.
+ let is_binding = ast::LetStmt::can_cast(parent.kind())
+ || (ast::MatchArm::can_cast(parent.kind())
+ && ident_pat.at_token().is_some());
+ if !(is_param || is_binding) {
+ // This pattern is not an actual variable declaration, e.g. `Some(val) => {..}` match arm.
+ continue;
+ }
- let is_param = ast::Param::can_cast(parent.kind());
-
- // We have to check that it's either `let var = ...` or `var @ Variant(_)` statement,
- // because e.g. match arms are patterns as well.
- // In other words, we check that it's a named variable binding.
- let is_binding = ast::LetStmt::can_cast(parent.kind())
- || (ast::MatchArm::can_cast(parent.kind())
- && ident_pat.at_token().is_some());
- if !(is_param || is_binding) {
- // This pattern is not an actual variable declaration, e.g. `Some(val) => {..}` match arm.
- continue;
- }
+ let ident_type =
+ if is_param { IdentType::Parameter } else { IdentType::Variable };
- let ident_type =
- if is_param { IdentType::Parameter } else { IdentType::Variable };
-
- let diagnostic = IncorrectCase {
- file: source_ptr.file_id,
- ident_type,
- ident: AstPtr::new(&name_ast),
- expected_case: replacement.expected_case,
- ident_text: replacement
- .current_name
- .display(self.db.upcast())
- .to_string(),
- suggested_text: replacement.suggested_text,
- };
+ let diagnostic = IncorrectCase {
+ file: source_ptr.file_id,
+ ident_type,
+ ident: AstPtr::new(&name_ast),
+ expected_case: replacement.expected_case,
+ ident_text: replacement.current_name.display(self.db.upcast()).to_string(),
+ suggested_text: replacement.suggested_text,
+ };
- self.sink.push(diagnostic);
- }
+ self.sink.push(diagnostic);
}
}
}
@@ -519,11 +566,6 @@ impl<'a> DeclValidator<'a> {
fn validate_enum(&mut self, enum_id: EnumId) {
let data = self.db.enum_data(enum_id);
- for (local_id, _) in data.variants.iter() {
- let variant_id = EnumVariantId { parent: enum_id, local_id };
- self.validate_body_inner_items(variant_id.into());
- }
-
// Check whether non-camel case names are allowed for this enum.
if self.allowed(enum_id.into(), allow::NON_CAMEL_CASE_TYPES, false) {
return;
@@ -648,8 +690,6 @@ impl<'a> DeclValidator<'a> {
fn validate_const(&mut self, const_id: ConstId) {
let data = self.db.const_data(const_id);
- self.validate_body_inner_items(const_id.into());
-
if self.allowed(const_id.into(), allow::NON_UPPER_CASE_GLOBAL, false) {
return;
}
@@ -698,8 +738,6 @@ impl<'a> DeclValidator<'a> {
return;
}
- self.validate_body_inner_items(static_id.into());
-
if self.allowed(static_id.into(), allow::NON_UPPER_CASE_GLOBAL, false) {
return;
}
@@ -737,17 +775,4 @@ impl<'a> DeclValidator<'a> {
self.sink.push(diagnostic);
}
-
- // FIXME: We don't currently validate names within `DefWithBodyId::InTypeConstId`.
- /// Recursively validates inner scope items, such as static variables and constants.
- fn validate_body_inner_items(&mut self, body_id: DefWithBodyId) {
- let body = self.db.body(body_id);
- for (_, block_def_map) in body.blocks(self.db.upcast()) {
- for (_, module) in block_def_map.modules() {
- for def_id in module.scope.declarations() {
- self.validate_item(def_id);
- }
- }
- }
- }
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs
index 2c1368962..cbe1af157 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs
@@ -11,50 +11,7 @@ pub(crate) fn to_camel_case(ident: &str) -> Option<String> {
return None;
}
- // Taken from rustc.
- let ret = ident
- .trim_matches('_')
- .split('_')
- .filter(|component| !component.is_empty())
- .map(|component| {
- let mut camel_cased_component = String::with_capacity(component.len());
-
- let mut new_word = true;
- let mut prev_is_lower_case = true;
-
- for c in component.chars() {
- // Preserve the case if an uppercase letter follows a lowercase letter, so that
- // `camelCase` is converted to `CamelCase`.
- if prev_is_lower_case && c.is_uppercase() {
- new_word = true;
- }
-
- if new_word {
- camel_cased_component.extend(c.to_uppercase());
- } else {
- camel_cased_component.extend(c.to_lowercase());
- }
-
- prev_is_lower_case = c.is_lowercase();
- new_word = false;
- }
-
- camel_cased_component
- })
- .fold((String::new(), None), |(acc, prev): (_, Option<String>), next| {
- // separate two components with an underscore if their boundary cannot
- // be distinguished using an uppercase/lowercase case distinction
- let join = prev
- .and_then(|prev| {
- let f = next.chars().next()?;
- let l = prev.chars().last()?;
- Some(!char_has_case(l) && !char_has_case(f))
- })
- .unwrap_or(false);
- (acc + if join { "_" } else { "" } + &next, Some(next))
- })
- .0;
- Some(ret)
+ Some(stdx::to_camel_case(ident))
}
/// Converts an identifier to a lower_snake_case form.
@@ -97,7 +54,9 @@ fn is_camel_case(name: &str) -> bool {
&& !name.chars().any(|snd| {
let ret = match fst {
None => false,
- Some(fst) => char_has_case(fst) && snd == '_' || char_has_case(snd) && fst == '_',
+ Some(fst) => {
+ stdx::char_has_case(fst) && snd == '_' || stdx::char_has_case(snd) && fst == '_'
+ }
};
fst = Some(snd);
@@ -135,11 +94,6 @@ fn is_snake_case<F: Fn(char) -> bool>(ident: &str, wrong_case: F) -> bool {
})
}
-// Taken from rustc.
-fn char_has_case(c: char) -> bool {
- c.is_lowercase() || c.is_uppercase()
-}
-
#[cfg(test)]
mod tests {
use super::*;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs
index f8cdeaa5e..2e04bbfee 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs
@@ -147,7 +147,7 @@ impl<'a> PatCtxt<'a> {
}
hir_def::hir::Pat::Bind { id, subpat, .. } => {
- let bm = self.infer.binding_modes[id];
+ let bm = self.infer.binding_modes[pat];
ty = &self.infer[id];
let name = &self.body.bindings[id].name;
match (bm, ty.kind(Interner)) {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
index f6d6b00d7..d81926f7c 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
@@ -23,7 +23,7 @@ use hir_def::{
EnumVariantId, HasModule, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId,
TraitId,
};
-use hir_expand::{hygiene::Hygiene, name::Name};
+use hir_expand::name::Name;
use intern::{Internable, Interned};
use itertools::Itertools;
use la_arena::ArenaMap;
@@ -448,9 +448,8 @@ fn render_const_scalar(
) -> Result<(), HirDisplayError> {
// FIXME: We need to get krate from the final callers of the hir display
// infrastructure and have it here as a field on `f`.
- let trait_env = Arc::new(TraitEnvironment::empty(
- *f.db.crate_graph().crates_in_topological_order().last().unwrap(),
- ));
+ let trait_env =
+ TraitEnvironment::empty(*f.db.crate_graph().crates_in_topological_order().last().unwrap());
match ty.kind(Interner) {
TyKind::Scalar(s) => match s {
Scalar::Bool => write!(f, "{}", if b[0] == 0 { false } else { true }),
@@ -945,6 +944,7 @@ impl HirDisplay for Ty {
ItemInNs::Types((*def_id).into()),
module_id,
false,
+ true,
) {
write!(f, "{}", path.display(f.db.upcast()))?;
} else {
@@ -1731,13 +1731,13 @@ impl HirDisplay for TypeRef {
f.write_joined(bounds, " + ")?;
}
TypeRef::Macro(macro_call) => {
- let macro_call = macro_call.to_node(f.db.upcast());
- let ctx = hir_def::lower::LowerCtx::with_hygiene(
+ let ctx = hir_def::lower::LowerCtx::with_span_map(
f.db.upcast(),
- &Hygiene::new_unhygienic(),
+ f.db.span_map(macro_call.file_id),
);
+ let macro_call = macro_call.to_node(f.db.upcast());
match macro_call.path() {
- Some(path) => match Path::from_src(path, &ctx) {
+ Some(path) => match Path::from_src(&ctx, path) {
Some(path) => path.hir_fmt(f)?,
None => write!(f, "{{macro}}")?,
},
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
index 78d3c667a..6f724e458 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
@@ -113,7 +113,7 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
// FIXME(const-generic-body): We should not get the return type in this way.
ctx.return_ty = c
.lookup(db.upcast())
- .thing
+ .expected_ty
.box_any()
.downcast::<InTypeConstIdMetadata>()
.unwrap()
@@ -420,7 +420,19 @@ pub struct InferenceResult {
standard_types: InternedStandardTypes,
/// Stores the types which were implicitly dereferenced in pattern binding modes.
pub pat_adjustments: FxHashMap<PatId, Vec<Ty>>,
- pub binding_modes: ArenaMap<BindingId, BindingMode>,
+ /// Stores the binding mode (`ref` in `let ref x = 2`) of bindings.
+ ///
+ /// This one is tied to the `PatId` instead of `BindingId`, because in some rare cases, a binding in an
+ /// or pattern can have multiple binding modes. For example:
+ /// ```
+ /// fn foo(mut slice: &[u32]) -> usize {
+ /// slice = match slice {
+ /// [0, rest @ ..] | rest => rest,
+ /// };
+ /// }
+ /// ```
+ /// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`.
+ pub binding_modes: ArenaMap<PatId, BindingMode>,
pub expr_adjustments: FxHashMap<ExprId, Vec<Adjustment>>,
pub(crate) closure_info: FxHashMap<ClosureId, (Vec<CapturedItem>, FnTrait)>,
// FIXME: remove this field
@@ -1140,20 +1152,15 @@ impl<'a> InferenceContext<'a> {
(ty, variant)
}
TypeNs::TypeAliasId(it) => {
- let container = it.lookup(self.db.upcast()).container;
- let parent_subst = match container {
- ItemContainerId::TraitId(id) => {
- let subst = TyBuilder::subst_for_def(self.db, id, None)
- .fill_with_inference_vars(&mut self.table)
- .build();
- Some(subst)
- }
- // Type aliases do not exist in impls.
- _ => None,
+ let resolved_seg = match unresolved {
+ None => path.segments().last().unwrap(),
+ Some(n) => path.segments().get(path.segments().len() - n - 1).unwrap(),
};
- let ty = TyBuilder::def_ty(self.db, it.into(), parent_subst)
- .fill_with_inference_vars(&mut self.table)
- .build();
+ let substs =
+ ctx.substs_from_path_segment(resolved_seg, Some(it.into()), true, None);
+ let ty = self.db.ty(it.into());
+ let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
+
self.resolve_variant_on_alias(ty, unresolved, mod_path)
}
TypeNs::AdtSelfType(_) => {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
index 13d6b5643..af74df103 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
@@ -679,7 +679,7 @@ impl InferenceContext<'_> {
| Pat::Range { .. } => {
update_result(CaptureKind::ByRef(BorrowKind::Shared));
}
- Pat::Bind { id, .. } => match self.result.binding_modes[*id] {
+ Pat::Bind { id, .. } => match self.result.binding_modes[p] {
crate::BindingMode::Move => {
if self.is_ty_copy(self.result.type_of_binding[*id].clone()) {
update_result(CaptureKind::ByRef(BorrowKind::Shared));
@@ -735,6 +735,32 @@ impl InferenceContext<'_> {
self.walk_expr(expr);
}
+ fn restrict_precision_for_unsafe(&mut self) {
+ for capture in &mut self.current_captures {
+ let mut ty = self.table.resolve_completely(self.result[capture.place.local].clone());
+ if ty.as_raw_ptr().is_some() || ty.is_union() {
+ capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
+ capture.place.projections.truncate(0);
+ continue;
+ }
+ for (i, p) in capture.place.projections.iter().enumerate() {
+ ty = p.projected_ty(
+ ty,
+ self.db,
+ |_, _, _| {
+ unreachable!("Closure field only happens in MIR");
+ },
+ self.owner.module(self.db.upcast()).krate(),
+ );
+ if ty.as_raw_ptr().is_some() || ty.is_union() {
+ capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
+ capture.place.projections.truncate(i + 1);
+ break;
+ }
+ }
+ }
+ }
+
fn adjust_for_move_closure(&mut self) {
for capture in &mut self.current_captures {
if let Some(first_deref) =
@@ -838,8 +864,8 @@ impl InferenceContext<'_> {
| Pat::ConstBlock(_)
| Pat::Path(_)
| Pat::Lit(_) => self.consume_place(place, pat.into()),
- Pat::Bind { id, subpat: _ } => {
- let mode = self.result.binding_modes[*id];
+ Pat::Bind { id: _, subpat: _ } => {
+ let mode = self.result.binding_modes[pat];
let capture_kind = match mode {
BindingMode::Move => {
self.consume_place(place, pat.into());
@@ -924,6 +950,7 @@ impl InferenceContext<'_> {
self.result.mutated_bindings_in_closure.insert(item.place.local);
}
}
+ self.restrict_precision_for_unsafe();
// closure_kind should be done before adjust_for_move_closure
let closure_kind = self.closure_kind();
match capture_by {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
index 0c3c725a7..a5e77a12d 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
@@ -18,7 +18,6 @@ use hir_def::{
use hir_expand::name::{name, Name};
use stdx::always;
use syntax::ast::RangeOp;
-use triomphe::Arc;
use crate::{
autoderef::{builtin_deref, deref_by_trait, Autoderef},
@@ -40,7 +39,8 @@ use crate::{
traits::FnTrait,
utils::{generics, Generics},
Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, FnPointer, FnSig, FnSubst,
- Interner, Rawness, Scalar, Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind,
+ Interner, Rawness, Scalar, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt,
+ TyKind,
};
use super::{
@@ -579,7 +579,7 @@ impl InferenceContext<'_> {
}
ty
}
- Expr::Field { expr, name } => self.infer_field_access(tgt_expr, *expr, name),
+ Expr::Field { expr, name } => self.infer_field_access(tgt_expr, *expr, name, expected),
Expr::Await { expr } => {
let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
self.resolve_associated_type(inner_ty, self.resolve_future_future_output())
@@ -1291,7 +1291,7 @@ impl InferenceContext<'_> {
let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr);
let prev_env = block_id.map(|block_id| {
let prev_env = self.table.trait_env.clone();
- Arc::make_mut(&mut self.table.trait_env).block = Some(block_id);
+ TraitEnvironment::with_block(&mut self.table.trait_env, block_id);
prev_env
});
@@ -1456,7 +1456,13 @@ impl InferenceContext<'_> {
})
}
- fn infer_field_access(&mut self, tgt_expr: ExprId, receiver: ExprId, name: &Name) -> Ty {
+ fn infer_field_access(
+ &mut self,
+ tgt_expr: ExprId,
+ receiver: ExprId,
+ name: &Name,
+ expected: &Expectation,
+ ) -> Ty {
let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none());
if name.is_missing() {
@@ -1482,28 +1488,42 @@ impl InferenceContext<'_> {
ty
}
None => {
- // no field found,
- let method_with_same_name_exists = {
- self.get_traits_in_scope();
-
- let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
- method_resolution::lookup_method(
- self.db,
- &canonicalized_receiver.value,
- self.table.trait_env.clone(),
- self.get_traits_in_scope().as_ref().left_or_else(|&it| it),
- VisibleFromModule::Filter(self.resolver.module()),
- name,
- )
- .is_some()
- };
+ // no field found, lets attempt to resolve it like a function so that IDE things
+ // work out while people are typing
+ let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
+ let resolved = method_resolution::lookup_method(
+ self.db,
+ &canonicalized_receiver.value,
+ self.table.trait_env.clone(),
+ self.get_traits_in_scope().as_ref().left_or_else(|&it| it),
+ VisibleFromModule::Filter(self.resolver.module()),
+ name,
+ );
self.result.diagnostics.push(InferenceDiagnostic::UnresolvedField {
expr: tgt_expr,
- receiver: receiver_ty,
+ receiver: receiver_ty.clone(),
name: name.clone(),
- method_with_same_name_exists,
+ method_with_same_name_exists: resolved.is_some(),
});
- self.err_ty()
+ match resolved {
+ Some((adjust, func, _)) => {
+ let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
+ let generics = generics(self.db.upcast(), func.into());
+ let substs = self.substs_for_method_call(generics, None);
+ self.write_expr_adj(receiver, adjustments);
+ self.write_method_resolution(tgt_expr, func, substs.clone());
+
+ self.check_method_call(
+ tgt_expr,
+ &[],
+ self.db.value_ty(func.into()),
+ substs,
+ ty,
+ expected,
+ )
+ }
+ None => self.err_ty(),
+ }
}
}
}
@@ -1517,7 +1537,7 @@ impl InferenceContext<'_> {
generic_args: Option<&GenericArgs>,
expected: &Expectation,
) -> Ty {
- let receiver_ty = self.infer_expr(receiver, &Expectation::none());
+ let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none());
let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
let resolved = method_resolution::lookup_method(
@@ -1568,23 +1588,32 @@ impl InferenceContext<'_> {
)
}
};
+ self.check_method_call(tgt_expr, args, method_ty, substs, receiver_ty, expected)
+ }
+
+ fn check_method_call(
+ &mut self,
+ tgt_expr: ExprId,
+ args: &[ExprId],
+ method_ty: Binders<Ty>,
+ substs: Substitution,
+ receiver_ty: Ty,
+ expected: &Expectation,
+ ) -> Ty {
let method_ty = method_ty.substitute(Interner, &substs);
self.register_obligations_for_call(&method_ty);
- let (formal_receiver_ty, param_tys, ret_ty, is_varargs) =
+ let ((formal_receiver_ty, param_tys), ret_ty, is_varargs) =
match method_ty.callable_sig(self.db) {
- Some(sig) => {
+ Some(sig) => (
if !sig.params().is_empty() {
- (
- sig.params()[0].clone(),
- sig.params()[1..].to_vec(),
- sig.ret().clone(),
- sig.is_varargs,
- )
+ (sig.params()[0].clone(), sig.params()[1..].to_vec())
} else {
- (self.err_ty(), Vec::new(), sig.ret().clone(), sig.is_varargs)
- }
- }
- None => (self.err_ty(), Vec::new(), self.err_ty(), true),
+ (self.err_ty(), Vec::new())
+ },
+ sig.ret().clone(),
+ sig.is_varargs,
+ ),
+ None => ((self.err_ty(), Vec::new()), self.err_ty(), true),
};
self.unify(&formal_receiver_ty, &receiver_ty);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
index 4e28ec060..acdb54028 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
@@ -262,7 +262,7 @@ impl InferenceContext<'_> {
fn infer_pat(&mut self, pat: PatId, expected: &Ty, mut default_bm: BindingMode) -> Ty {
let mut expected = self.resolve_ty_shallow(expected);
- if is_non_ref_pat(self.body, pat) {
+ if self.is_non_ref_pat(self.body, pat) {
let mut pat_adjustments = Vec::new();
while let Some((inner, _lifetime, mutability)) = expected.as_reference() {
pat_adjustments.push(expected.clone());
@@ -421,7 +421,7 @@ impl InferenceContext<'_> {
} else {
BindingMode::convert(mode)
};
- self.result.binding_modes.insert(binding, mode);
+ self.result.binding_modes.insert(pat, mode);
let inner_ty = match subpat {
Some(subpat) => self.infer_pat(subpat, &expected, default_bm),
@@ -496,24 +496,28 @@ impl InferenceContext<'_> {
self.infer_expr(expr, &Expectation::has_type(expected.clone()))
}
-}
-fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool {
- match &body[pat] {
- Pat::Tuple { .. }
- | Pat::TupleStruct { .. }
- | Pat::Record { .. }
- | Pat::Range { .. }
- | Pat::Slice { .. } => true,
- Pat::Or(pats) => pats.iter().all(|p| is_non_ref_pat(body, *p)),
- // FIXME: ConstBlock/Path/Lit might actually evaluate to ref, but inference is unimplemented.
- Pat::Path(..) => true,
- Pat::ConstBlock(..) => true,
- Pat::Lit(expr) => !matches!(
- body[*expr],
- Expr::Literal(Literal::String(..) | Literal::CString(..) | Literal::ByteString(..))
- ),
- Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Box { .. } | Pat::Missing => false,
+ fn is_non_ref_pat(&mut self, body: &hir_def::body::Body, pat: PatId) -> bool {
+ match &body[pat] {
+ Pat::Tuple { .. }
+ | Pat::TupleStruct { .. }
+ | Pat::Record { .. }
+ | Pat::Range { .. }
+ | Pat::Slice { .. } => true,
+ Pat::Or(pats) => pats.iter().all(|p| self.is_non_ref_pat(body, *p)),
+ Pat::Path(p) => {
+ let v = self.resolve_value_path_inner(p, pat.into());
+ v.is_some_and(|x| !matches!(x.0, hir_def::resolver::ValueNs::ConstId(_)))
+ }
+ Pat::ConstBlock(..) => false,
+ Pat::Lit(expr) => !matches!(
+ body[*expr],
+ Expr::Literal(Literal::String(..) | Literal::CString(..) | Literal::ByteString(..))
+ ),
+ Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Box { .. } | Pat::Missing => {
+ false
+ }
+ }
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
index c6bbf2f61..49fb78f67 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
@@ -40,33 +40,7 @@ impl InferenceContext<'_> {
}
fn resolve_value_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<ValuePathResolution> {
- let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
- let last = path.segments().last()?;
-
- // Don't use `self.make_ty()` here as we need `orig_ns`.
- let ctx =
- crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into());
- let (ty, orig_ns) = ctx.lower_ty_ext(type_ref);
- let ty = self.table.insert_type_vars(ty);
- let ty = self.table.normalize_associated_types_in(ty);
-
- let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
- let (ty, _) = ctx.lower_ty_relative_path(ty, orig_ns, remaining_segments_for_ty);
- let ty = self.table.insert_type_vars(ty);
- let ty = self.table.normalize_associated_types_in(ty);
- self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))?
- } else {
- // FIXME: report error, unresolved first path segment
- let value_or_partial =
- self.resolver.resolve_path_in_value_ns(self.db.upcast(), path)?;
-
- match value_or_partial {
- ResolveValueResult::ValueNs(it, _) => (it, None),
- ResolveValueResult::Partial(def, remaining_index, _) => self
- .resolve_assoc_item(def, path, remaining_index, id)
- .map(|(it, substs)| (it, Some(substs)))?,
- }
- };
+ let (value, self_subst) = self.resolve_value_path_inner(path, id)?;
let value_def = match value {
ValueNs::LocalBinding(pat) => match self.result.type_of_binding.get(pat) {
@@ -144,6 +118,41 @@ impl InferenceContext<'_> {
Some(ValuePathResolution::GenericDef(value_def, generic_def, substs))
}
+ pub(super) fn resolve_value_path_inner(
+ &mut self,
+ path: &Path,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<chalk_ir::Substitution<Interner>>)> {
+ let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
+ let last = path.segments().last()?;
+
+ // Don't use `self.make_ty()` here as we need `orig_ns`.
+ let ctx =
+ crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into());
+ let (ty, orig_ns) = ctx.lower_ty_ext(type_ref);
+ let ty = self.table.insert_type_vars(ty);
+ let ty = self.table.normalize_associated_types_in(ty);
+
+ let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
+ let (ty, _) = ctx.lower_ty_relative_path(ty, orig_ns, remaining_segments_for_ty);
+ let ty = self.table.insert_type_vars(ty);
+ let ty = self.table.normalize_associated_types_in(ty);
+ self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))?
+ } else {
+ // FIXME: report error, unresolved first path segment
+ let value_or_partial =
+ self.resolver.resolve_path_in_value_ns(self.db.upcast(), path)?;
+
+ match value_or_partial {
+ ResolveValueResult::ValueNs(it, _) => (it, None),
+ ResolveValueResult::Partial(def, remaining_index, _) => self
+ .resolve_assoc_item(def, path, remaining_index, id)
+ .map(|(it, substs)| (it, Some(substs)))?,
+ }
+ };
+ Some((value, self_subst))
+ }
+
fn add_required_obligations_for_value_path(&mut self, def: GenericDefId, subst: &Substitution) {
let predicates = self.db.generic_predicates(def);
for predicate in predicates.iter() {
@@ -390,6 +399,7 @@ impl InferenceContext<'_> {
}
}
+#[derive(Debug)]
enum ValuePathResolution {
// It's awkward to wrap a single ID in two enums, but we need both and this saves fallible
// conversion between them + `unwrap()`.
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
index 0a68a9f3b..ac39bdf5b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
@@ -43,7 +43,7 @@ where
}
impl<T: HasInterner<Interner = Interner>> Canonicalized<T> {
- pub(super) fn apply_solution(
+ pub(crate) fn apply_solution(
&self,
ctx: &mut InferenceTable<'_>,
solution: Canonical<Substitution>,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
index 1a6106c02..bfc4f1383 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
@@ -1,5 +1,8 @@
//! Compute the binary representation of a type
+use std::fmt;
+
+use base_db::salsa::Cycle;
use chalk_ir::{AdtId, FloatTy, IntTy, TyKind, UintTy};
use hir_def::{
layout::{
@@ -9,6 +12,10 @@ use hir_def::{
LocalEnumVariantId, LocalFieldId, StructId,
};
use la_arena::{Idx, RawIdx};
+use rustc_dependencies::{
+ abi::AddressSpace,
+ index::{IndexSlice, IndexVec},
+};
use stdx::never;
use triomphe::Arc;
@@ -22,19 +29,13 @@ pub use self::{
target::target_data_layout_query,
};
-macro_rules! user_error {
- ($it: expr) => {
- return Err(LayoutError::UserError(format!($it).into()))
- };
-}
-
mod adt;
mod target;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct RustcEnumVariantIdx(pub LocalEnumVariantId);
-impl rustc_index::vec::Idx for RustcEnumVariantIdx {
+impl rustc_dependencies::index::Idx for RustcEnumVariantIdx {
fn new(idx: usize) -> Self {
RustcEnumVariantIdx(Idx::from_raw(RawIdx::from(idx as u32)))
}
@@ -44,19 +45,63 @@ impl rustc_index::vec::Idx for RustcEnumVariantIdx {
}
}
-pub type Layout = LayoutS<RustcEnumVariantIdx>;
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct RustcFieldIdx(pub LocalFieldId);
+
+impl RustcFieldIdx {
+ pub fn new(idx: usize) -> Self {
+ RustcFieldIdx(Idx::from_raw(RawIdx::from(idx as u32)))
+ }
+}
+
+impl rustc_dependencies::index::Idx for RustcFieldIdx {
+ fn new(idx: usize) -> Self {
+ RustcFieldIdx(Idx::from_raw(RawIdx::from(idx as u32)))
+ }
+
+ fn index(self) -> usize {
+ u32::from(self.0.into_raw()) as usize
+ }
+}
+
+pub type Layout = LayoutS<RustcFieldIdx, RustcEnumVariantIdx>;
pub type TagEncoding = hir_def::layout::TagEncoding<RustcEnumVariantIdx>;
-pub type Variants = hir_def::layout::Variants<RustcEnumVariantIdx>;
+pub type Variants = hir_def::layout::Variants<RustcFieldIdx, RustcEnumVariantIdx>;
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum LayoutError {
- UserError(Box<str>),
- SizeOverflow,
- TargetLayoutNotAvailable,
- HasPlaceholder,
+ HasErrorConst,
HasErrorType,
+ HasPlaceholder,
+ InvalidSimdType,
NotImplemented,
+ RecursiveTypeWithoutIndirection,
+ SizeOverflow,
+ TargetLayoutNotAvailable,
Unknown,
+ UserReprTooSmall,
+}
+
+impl std::error::Error for LayoutError {}
+impl fmt::Display for LayoutError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ LayoutError::HasErrorConst => write!(f, "type contains an unevaluatable const"),
+ LayoutError::HasErrorType => write!(f, "type contains an error"),
+ LayoutError::HasPlaceholder => write!(f, "type contains placeholders"),
+ LayoutError::InvalidSimdType => write!(f, "invalid simd type definition"),
+ LayoutError::NotImplemented => write!(f, "not implemented"),
+ LayoutError::RecursiveTypeWithoutIndirection => {
+ write!(f, "recursive type without indirection")
+ }
+ LayoutError::SizeOverflow => write!(f, "size overflow"),
+ LayoutError::TargetLayoutNotAvailable => write!(f, "target layout not available"),
+ LayoutError::Unknown => write!(f, "unknown"),
+ LayoutError::UserReprTooSmall => {
+ write!(f, "the `#[repr]` hint is too small to hold the discriminants of the enum")
+ }
+ }
+ }
}
struct LayoutCx<'a> {
@@ -66,7 +111,7 @@ struct LayoutCx<'a> {
impl<'a> LayoutCalculator for LayoutCx<'a> {
type TargetDataLayoutRef = &'a TargetDataLayout;
- fn delay_bug(&self, txt: &str) {
+ fn delayed_bug(&self, txt: String) {
never!("{}", txt);
}
@@ -95,9 +140,7 @@ fn layout_of_simd_ty(
let f0_ty = match fields.iter().next() {
Some(it) => it.1.clone().substitute(Interner, subst),
- None => {
- user_error!("simd type with zero fields");
- }
+ None => return Err(LayoutError::InvalidSimdType),
};
// The element type and number of elements of the SIMD vector
@@ -111,7 +154,7 @@ fn layout_of_simd_ty(
// Extract the number of elements from the layout of the array field:
let FieldsShape::Array { count, .. } = db.layout_of_ty(f0_ty.clone(), env.clone())?.fields
else {
- user_error!("Array with non array layout");
+ return Err(LayoutError::Unknown);
};
(e_ty.clone(), count, true)
@@ -123,7 +166,7 @@ fn layout_of_simd_ty(
// Compute the ABI of the element type:
let e_ly = db.layout_of_ty(e_ty, env.clone())?;
let Abi::Scalar(e_abi) = e_ly.abi else {
- user_error!("simd type with inner non scalar type");
+ return Err(LayoutError::Unknown);
};
// Compute the size and alignment of the vector:
@@ -145,6 +188,8 @@ fn layout_of_simd_ty(
largest_niche: e_ly.largest_niche,
size,
align,
+ max_repr_align: None,
+ unadjusted_abi_align: align.abi,
}))
}
@@ -230,13 +275,11 @@ pub fn layout_of_ty_query(
.map(|k| db.layout_of_ty(k.assert_ty_ref(Interner).clone(), trait_env.clone()))
.collect::<Result<Vec<_>, _>>()?;
let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>();
- let fields = fields.iter().collect::<Vec<_>>();
+ let fields = fields.iter().collect::<IndexVec<_, _>>();
cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)?
}
TyKind::Array(element, count) => {
- let count = try_const_usize(db, &count).ok_or(LayoutError::UserError(Box::from(
- "unevaluated or mistyped const generic parameter",
- )))? as u64;
+ let count = try_const_usize(db, &count).ok_or(LayoutError::HasErrorConst)? as u64;
let element = db.layout_of_ty(element.clone(), trait_env.clone())?;
let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?;
@@ -255,6 +298,8 @@ pub fn layout_of_ty_query(
largest_niche,
align: element.align,
size,
+ max_repr_align: None,
+ unadjusted_abi_align: element.align.abi,
}
}
TyKind::Slice(element) => {
@@ -266,11 +311,23 @@ pub fn layout_of_ty_query(
largest_niche: None,
align: element.align,
size: Size::ZERO,
+ max_repr_align: None,
+ unadjusted_abi_align: element.align.abi,
}
}
+ TyKind::Str => Layout {
+ variants: Variants::Single { index: struct_variant_idx() },
+ fields: FieldsShape::Array { stride: Size::from_bytes(1), count: 0 },
+ abi: Abi::Aggregate { sized: false },
+ largest_niche: None,
+ align: dl.i8_align,
+ size: Size::ZERO,
+ max_repr_align: None,
+ unadjusted_abi_align: dl.i8_align.abi,
+ },
// Potentially-wide pointers.
TyKind::Ref(_, _, pointee) | TyKind::Raw(_, pointee) => {
- let mut data_ptr = scalar_unit(dl, Primitive::Pointer);
+ let mut data_ptr = scalar_unit(dl, Primitive::Pointer(AddressSpace::DATA));
if matches!(ty.kind(Interner), TyKind::Ref(..)) {
data_ptr.valid_range_mut().start = 1;
}
@@ -294,7 +351,7 @@ pub fn layout_of_ty_query(
scalar_unit(dl, Primitive::Int(dl.ptr_sized_integer(), false))
}
TyKind::Dyn(..) => {
- let mut vtable = scalar_unit(dl, Primitive::Pointer);
+ let mut vtable = scalar_unit(dl, Primitive::Pointer(AddressSpace::DATA));
vtable.valid_range_mut().start = 1;
vtable
}
@@ -308,32 +365,17 @@ pub fn layout_of_ty_query(
cx.scalar_pair(data_ptr, metadata)
}
TyKind::FnDef(_, _) => layout_of_unit(&cx, dl)?,
- TyKind::Str => Layout {
- variants: Variants::Single { index: struct_variant_idx() },
- fields: FieldsShape::Array { stride: Size::from_bytes(1), count: 0 },
- abi: Abi::Aggregate { sized: false },
- largest_niche: None,
- align: dl.i8_align,
- size: Size::ZERO,
- },
- TyKind::Never => Layout {
- variants: Variants::Single { index: struct_variant_idx() },
- fields: FieldsShape::Primitive,
- abi: Abi::Uninhabited,
- largest_niche: None,
- align: dl.i8_align,
- size: Size::ZERO,
- },
+ TyKind::Never => cx.layout_of_never_type(),
TyKind::Dyn(_) | TyKind::Foreign(_) => {
let mut unit = layout_of_unit(&cx, dl)?;
match unit.abi {
Abi::Aggregate { ref mut sized } => *sized = false,
- _ => user_error!("bug"),
+ _ => return Err(LayoutError::Unknown),
}
unit
}
TyKind::Function(_) => {
- let mut ptr = scalar_unit(dl, Primitive::Pointer);
+ let mut ptr = scalar_unit(dl, Primitive::Pointer(dl.instruction_address_space));
ptr.valid_range_mut().start = 1;
Layout::scalar(dl, ptr)
}
@@ -363,7 +405,7 @@ pub fn layout_of_ty_query(
})
.collect::<Result<Vec<_>, _>>()?;
let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>();
- let fields = fields.iter().collect::<Vec<_>>();
+ let fields = fields.iter().collect::<IndexVec<_, _>>();
cx.univariant(dl, &fields, &ReprOptions::default(), StructKind::AlwaysSized)
.ok_or(LayoutError::Unknown)?
}
@@ -390,17 +432,17 @@ pub fn layout_of_ty_query(
pub fn layout_of_ty_recover(
_: &dyn HirDatabase,
- _: &[String],
+ _: &Cycle,
_: &Ty,
_: &Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {
- user_error!("infinite sized recursive type");
+ Err(LayoutError::RecursiveTypeWithoutIndirection)
}
fn layout_of_unit(cx: &LayoutCx<'_>, dl: &TargetDataLayout) -> Result<Layout, LayoutError> {
- cx.univariant::<RustcEnumVariantIdx, &&Layout>(
+ cx.univariant::<RustcFieldIdx, RustcEnumVariantIdx, &&Layout>(
dl,
- &[],
+ IndexSlice::empty(),
&ReprOptions::default(),
StructKind::AlwaysSized,
)
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
index 85ef649b8..39788a950 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
@@ -2,12 +2,14 @@
use std::{cmp, ops::Bound};
+use base_db::salsa::Cycle;
use hir_def::{
data::adt::VariantData,
layout::{Integer, LayoutCalculator, ReprOptions, TargetDataLayout},
AdtId, EnumVariantId, LocalEnumVariantId, VariantId,
};
use la_arena::RawIdx;
+use rustc_dependencies::index::IndexVec;
use smallvec::SmallVec;
use triomphe::Arc;
@@ -20,8 +22,8 @@ use crate::{
use super::LayoutCx;
-pub(crate) fn struct_variant_idx() -> RustcEnumVariantIdx {
- RustcEnumVariantIdx(LocalEnumVariantId::from_raw(RawIdx::from(0)))
+pub(crate) const fn struct_variant_idx() -> RustcEnumVariantIdx {
+ RustcEnumVariantIdx(LocalEnumVariantId::from_raw(RawIdx::from_u32(0)))
}
pub fn layout_of_adt_query(
@@ -74,7 +76,7 @@ pub fn layout_of_adt_query(
.iter()
.map(|it| it.iter().map(|it| &**it).collect::<Vec<_>>())
.collect::<SmallVec<[_; 1]>>();
- let variants = variants.iter().map(|it| it.iter().collect()).collect();
+ let variants = variants.iter().map(|it| it.iter().collect()).collect::<IndexVec<_, _>>();
let result = if matches!(def, AdtId::UnionId(..)) {
cx.layout_of_union(&repr, &variants).ok_or(LayoutError::Unknown)?
} else {
@@ -105,7 +107,7 @@ pub fn layout_of_adt_query(
&& variants
.iter()
.next()
- .and_then(|it| it.last().map(|it| !it.is_unsized()))
+ .and_then(|it| it.iter().last().map(|it| !it.is_unsized()))
.unwrap_or(true),
)
.ok_or(LayoutError::SizeOverflow)?
@@ -119,7 +121,15 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>,
let attr = attrs.by_key(name).tt_values();
for tree in attr {
if let Some(it) = tree.token_trees.first() {
- if let Ok(it) = it.to_string().parse() {
+ let text = it.to_string().replace('_', "");
+ let (text, base) = match text.as_bytes() {
+ [b'0', b'x', ..] => (&text[2..], 16),
+ [b'0', b'o', ..] => (&text[2..], 8),
+ [b'0', b'b', ..] => (&text[2..], 2),
+ _ => (&*text, 10),
+ };
+
+ if let Ok(it) = u128::from_str_radix(text, base) {
return Bound::Included(it);
}
}
@@ -131,12 +141,12 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>,
pub fn layout_of_adt_recover(
_: &dyn HirDatabase,
- _: &[String],
+ _: &Cycle,
_: &AdtId,
_: &Substitution,
_: &Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {
- user_error!("infinite sized recursive type");
+ Err(LayoutError::RecursiveTypeWithoutIndirection)
}
/// Finds the appropriate Integer type and signedness for the given
@@ -160,11 +170,7 @@ fn repr_discr(
let discr = Integer::from_attr(dl, ity);
let fit = if ity.is_signed() { signed_fit } else { unsigned_fit };
if discr < fit {
- return Err(LayoutError::UserError(
- "Integer::repr_discr: `#[repr]` hint too small for \
- discriminant range of enum "
- .into(),
- ));
+ return Err(LayoutError::UserReprTooSmall);
}
return Ok((discr, ity.is_signed()));
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
index ffdbb9de9..5e3a86c80 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
@@ -210,16 +210,13 @@ fn recursive() {
struct BoxLike<T: ?Sized>(*mut T);
struct Goal(BoxLike<Goal>);
}
- check_fail(
- r#"struct Goal(Goal);"#,
- LayoutError::UserError("infinite sized recursive type".into()),
- );
+ check_fail(r#"struct Goal(Goal);"#, LayoutError::RecursiveTypeWithoutIndirection);
check_fail(
r#"
struct Foo<T>(Foo<T>);
struct Goal(Foo<i32>);
"#,
- LayoutError::UserError("infinite sized recursive type".into()),
+ LayoutError::RecursiveTypeWithoutIndirection,
);
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests/closure.rs
index bbe855a14..939025461 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests/closure.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests/closure.rs
@@ -186,9 +186,9 @@ fn capture_specific_fields() {
fn match_pattern() {
size_and_align_expr! {
struct X(i64, i32, (u8, i128));
- let y: X = X(2, 5, (7, 3));
+ let _y: X = X(2, 5, (7, 3));
move |x: i64| {
- match y {
+ match _y {
_ => x,
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
index 405bb001b..cf174feed 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
@@ -1,6 +1,7 @@
//! The type system. We currently use this to infer types for completion, hover
//! information and various assists.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#[allow(unused)]
macro_rules! eprintln {
@@ -72,14 +73,15 @@ pub use infer::{
};
pub use interner::Interner;
pub use lower::{
- associated_type_shorthand_candidates, CallableDefId, ImplTraitLoweringMode, TyDefId,
- TyLoweringContext, ValueTyDefId,
+ associated_type_shorthand_candidates, CallableDefId, ImplTraitLoweringMode, ParamLoweringMode,
+ TyDefId, TyLoweringContext, ValueTyDefId,
};
pub use mapping::{
from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx,
lt_from_placeholder_idx, to_assoc_type_id, to_chalk_trait_id, to_foreign_def_id,
to_placeholder_idx,
};
+pub use method_resolution::check_orphan_rules;
pub use traits::TraitEnvironment;
pub use utils::{all_super_traits, is_fn_unsafe_to_call};
@@ -120,7 +122,7 @@ pub type TyKind = chalk_ir::TyKind<Interner>;
pub type TypeFlags = chalk_ir::TypeFlags;
pub type DynTy = chalk_ir::DynTy<Interner>;
pub type FnPointer = chalk_ir::FnPointer<Interner>;
-// pub type FnSubst = chalk_ir::FnSubst<Interner>;
+// pub type FnSubst = chalk_ir::FnSubst<Interner>; // a re-export so we don't lose the tuple constructor
pub use chalk_ir::FnSubst;
pub type ProjectionTy = chalk_ir::ProjectionTy<Interner>;
pub type AliasTy = chalk_ir::AliasTy<Interner>;
@@ -320,8 +322,7 @@ impl CallableSig {
pub fn from_fn_ptr(fn_ptr: &FnPointer) -> CallableSig {
CallableSig {
// FIXME: what to do about lifetime params? -> return PolyFnSig
- // FIXME: use `Arc::from_iter` when it becomes available
- params_and_return: Arc::from(
+ params_and_return: Arc::from_iter(
fn_ptr
.substitution
.clone()
@@ -330,8 +331,7 @@ impl CallableSig {
.0
.as_slice(Interner)
.iter()
- .map(|arg| arg.assert_ty_ref(Interner).clone())
- .collect::<Vec<_>>(),
+ .map(|arg| arg.assert_ty_ref(Interner).clone()),
),
is_varargs: fn_ptr.sig.variadic,
safety: fn_ptr.sig.safety,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
index 9a61f1535..97c4a741f 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
@@ -10,7 +10,7 @@ use std::{
iter,
};
-use base_db::CrateId;
+use base_db::{salsa::Cycle, CrateId};
use chalk_ir::{
cast::Cast, fold::Shift, fold::TypeFoldable, interner::HasInterner, Mutability, Safety,
};
@@ -113,7 +113,9 @@ pub struct TyLoweringContext<'a> {
pub db: &'a dyn HirDatabase,
resolver: &'a Resolver,
in_binders: DebruijnIndex,
- owner: TypeOwnerId,
+ // FIXME: Should not be an `Option` but `Resolver` currently does not return owners in all cases
+ // where expected
+ owner: Option<TypeOwnerId>,
/// Note: Conceptually, it's thinkable that we could be in a location where
/// some type params should be represented as placeholders, and others
/// should be converted to variables. I think in practice, this isn't
@@ -127,6 +129,14 @@ pub struct TyLoweringContext<'a> {
impl<'a> TyLoweringContext<'a> {
pub fn new(db: &'a dyn HirDatabase, resolver: &'a Resolver, owner: TypeOwnerId) -> Self {
+ Self::new_maybe_unowned(db, resolver, Some(owner))
+ }
+
+ pub fn new_maybe_unowned(
+ db: &'a dyn HirDatabase,
+ resolver: &'a Resolver,
+ owner: Option<TypeOwnerId>,
+ ) -> Self {
let impl_trait_mode = ImplTraitLoweringState::Disallowed;
let type_param_mode = ParamLoweringMode::Placeholder;
let in_binders = DebruijnIndex::INNERMOST;
@@ -213,10 +223,11 @@ impl<'a> TyLoweringContext<'a> {
}
pub fn lower_const(&self, const_ref: &ConstRef, const_type: Ty) -> Const {
+ let Some(owner) = self.owner else { return unknown_const(const_type) };
const_or_path_to_chalk(
self.db,
self.resolver,
- self.owner,
+ owner,
const_type,
const_ref,
self.type_param_mode,
@@ -407,11 +418,7 @@ impl<'a> TyLoweringContext<'a> {
drop(expander);
let ty = self.lower_ty(&type_ref);
- self.expander
- .borrow_mut()
- .as_mut()
- .unwrap()
- .exit(self.db.upcast(), mark);
+ self.expander.borrow_mut().as_mut().unwrap().exit(mark);
Some(ty)
}
_ => {
@@ -768,7 +775,7 @@ impl<'a> TyLoweringContext<'a> {
}
}
- fn substs_from_path_segment(
+ pub(super) fn substs_from_path_segment(
&self,
segment: PathSegment<'_>,
def: Option<GenericDefId>,
@@ -1097,10 +1104,25 @@ impl<'a> TyLoweringContext<'a> {
binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(),
);
if let Some(type_ref) = &binding.type_ref {
- let ty = self.lower_ty(type_ref);
- let alias_eq =
- AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty };
- predicates.push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
+ if let (TypeRef::ImplTrait(bounds), ImplTraitLoweringState::Disallowed) =
+ (type_ref, &self.impl_trait_mode)
+ {
+ for bound in bounds {
+ predicates.extend(
+ self.lower_type_bound(
+ bound,
+ TyKind::Alias(AliasTy::Projection(projection_ty.clone()))
+ .intern(Interner),
+ false,
+ ),
+ );
+ }
+ } else {
+ let ty = self.lower_ty(type_ref);
+ let alias_eq =
+ AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty };
+ predicates.push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
+ }
}
for bound in binding.bounds.iter() {
predicates.extend(self.lower_type_bound(
@@ -1383,51 +1405,50 @@ pub(crate) fn generic_predicates_for_param_query(
let ctx = TyLoweringContext::new(db, &resolver, def.into())
.with_type_param_mode(ParamLoweringMode::Variable);
let generics = generics(db.upcast(), def);
- let mut predicates: Vec<_> = resolver
- .where_predicates_in_scope()
- // we have to filter out all other predicates *first*, before attempting to lower them
- .filter(|pred| match pred {
- WherePredicate::ForLifetime { target, bound, .. }
- | WherePredicate::TypeBound { target, bound, .. } => {
- match target {
- WherePredicateTypeTarget::TypeRef(type_ref) => {
- if ctx.lower_ty_only_param(type_ref) != Some(param_id) {
- return false;
- }
- }
- &WherePredicateTypeTarget::TypeOrConstParam(local_id) => {
- let target_id = TypeOrConstParamId { parent: def, local_id };
- if target_id != param_id {
- return false;
- }
- }
- };
- match &**bound {
- TypeBound::ForLifetime(_, path) | TypeBound::Path(path, _) => {
- // Only lower the bound if the trait could possibly define the associated
- // type we're looking for.
+ // we have to filter out all other predicates *first*, before attempting to lower them
+ let predicate = |pred: &&_| match pred {
+ WherePredicate::ForLifetime { target, bound, .. }
+ | WherePredicate::TypeBound { target, bound, .. } => {
+ let invalid_target = match target {
+ WherePredicateTypeTarget::TypeRef(type_ref) => {
+ ctx.lower_ty_only_param(type_ref) != Some(param_id)
+ }
+ &WherePredicateTypeTarget::TypeOrConstParam(local_id) => {
+ let target_id = TypeOrConstParamId { parent: def, local_id };
+ target_id != param_id
+ }
+ };
+ if invalid_target {
+ return false;
+ }
+
+ match &**bound {
+ TypeBound::ForLifetime(_, path) | TypeBound::Path(path, _) => {
+ // Only lower the bound if the trait could possibly define the associated
+ // type we're looking for.
- let assoc_name = match &assoc_name {
- Some(it) => it,
- None => return true,
- };
- let tr = match resolver.resolve_path_in_type_ns_fully(db.upcast(), path) {
- Some(TypeNs::TraitId(tr)) => tr,
- _ => return false,
- };
+ let Some(assoc_name) = &assoc_name else { return true };
+ let Some(TypeNs::TraitId(tr)) =
+ resolver.resolve_path_in_type_ns_fully(db.upcast(), path)
+ else {
+ return false;
+ };
- all_super_traits(db.upcast(), tr).iter().any(|tr| {
- db.trait_data(*tr).items.iter().any(|(name, item)| {
- matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name
- })
+ all_super_traits(db.upcast(), tr).iter().any(|tr| {
+ db.trait_data(*tr).items.iter().any(|(name, item)| {
+ matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name
})
- }
- TypeBound::Lifetime(_) | TypeBound::Error => false,
+ })
}
+ TypeBound::Lifetime(_) | TypeBound::Error => false,
}
- WherePredicate::Lifetime { .. } => false,
- })
+ }
+ WherePredicate::Lifetime { .. } => false,
+ };
+ let mut predicates: Vec<_> = resolver
+ .where_predicates_in_scope()
+ .filter(predicate)
.flat_map(|pred| {
ctx.lower_where_predicate(pred, true).map(|p| make_binders(db, &generics, p))
})
@@ -1444,13 +1465,12 @@ pub(crate) fn generic_predicates_for_param_query(
pub(crate) fn generic_predicates_for_param_recover(
_db: &dyn HirDatabase,
- _cycle: &[String],
+ _cycle: &Cycle,
_def: &GenericDefId,
_param_id: &TypeOrConstParamId,
_assoc_name: &Option<Name>,
) -> Arc<[Binders<QuantifiedWhereClause>]> {
- // FIXME: use `Arc::from_iter` when it becomes available
- Arc::from(vec![])
+ Arc::from_iter(None)
}
pub(crate) fn trait_environment_for_body_query(
@@ -1459,7 +1479,7 @@ pub(crate) fn trait_environment_for_body_query(
) -> Arc<TraitEnvironment> {
let Some(def) = def.as_generic_def_id() else {
let krate = def.module(db.upcast()).krate();
- return Arc::new(TraitEnvironment::empty(krate));
+ return TraitEnvironment::empty(krate);
};
db.trait_environment(def)
}
@@ -1519,7 +1539,7 @@ pub(crate) fn trait_environment_query(
let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses);
- Arc::new(TraitEnvironment { krate, block: None, traits_from_clauses: traits_in_scope, env })
+ TraitEnvironment::new(krate, None, traits_in_scope.into_boxed_slice(), env)
}
/// Resolve the where clause(s) of an item with generics.
@@ -1588,69 +1608,54 @@ pub(crate) fn generic_defaults_query(
let generic_params = generics(db.upcast(), def);
let parent_start_idx = generic_params.len_self();
- let defaults = Arc::from(
- generic_params
- .iter()
- .enumerate()
- .map(|(idx, (id, p))| {
- match p {
- TypeOrConstParamData::TypeParamData(p) => {
- let mut ty = p
- .default
- .as_ref()
- .map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t));
- // Each default can only refer to previous parameters.
- // Type variable default referring to parameter coming
- // after it is forbidden (FIXME: report diagnostic)
- ty = fallback_bound_vars(ty, idx, parent_start_idx);
- crate::make_binders(db, &generic_params, ty.cast(Interner))
- }
- TypeOrConstParamData::ConstParamData(p) => {
- let mut val = p.default.as_ref().map_or_else(
- || {
- unknown_const_as_generic(
- db.const_param_ty(ConstParamId::from_unchecked(id)),
- )
- },
- |c| {
- let c = ctx.lower_const(c, ctx.lower_ty(&p.ty));
- c.cast(Interner)
- },
- );
- // Each default can only refer to previous parameters, see above.
- val = fallback_bound_vars(val, idx, parent_start_idx);
- make_binders(db, &generic_params, val)
- }
- }
- })
- // FIXME: use `Arc::from_iter` when it becomes available
- .collect::<Vec<_>>(),
- );
+ let defaults = Arc::from_iter(generic_params.iter().enumerate().map(|(idx, (id, p))| {
+ match p {
+ TypeOrConstParamData::TypeParamData(p) => {
+ let mut ty =
+ p.default.as_ref().map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t));
+ // Each default can only refer to previous parameters.
+ // Type variable default referring to parameter coming
+ // after it is forbidden (FIXME: report diagnostic)
+ ty = fallback_bound_vars(ty, idx, parent_start_idx);
+ crate::make_binders(db, &generic_params, ty.cast(Interner))
+ }
+ TypeOrConstParamData::ConstParamData(p) => {
+ let mut val = p.default.as_ref().map_or_else(
+ || {
+ unknown_const_as_generic(
+ db.const_param_ty(ConstParamId::from_unchecked(id)),
+ )
+ },
+ |c| {
+ let c = ctx.lower_const(c, ctx.lower_ty(&p.ty));
+ c.cast(Interner)
+ },
+ );
+ // Each default can only refer to previous parameters, see above.
+ val = fallback_bound_vars(val, idx, parent_start_idx);
+ make_binders(db, &generic_params, val)
+ }
+ }
+ }));
defaults
}
pub(crate) fn generic_defaults_recover(
db: &dyn HirDatabase,
- _cycle: &[String],
+ _cycle: &Cycle,
def: &GenericDefId,
) -> Arc<[Binders<crate::GenericArg>]> {
let generic_params = generics(db.upcast(), *def);
// FIXME: this code is not covered in tests.
// we still need one default per parameter
- let defaults = Arc::from(
- generic_params
- .iter_id()
- .map(|id| {
- let val = match id {
- Either::Left(_) => TyKind::Error.intern(Interner).cast(Interner),
- Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
- };
- crate::make_binders(db, &generic_params, val)
- })
- // FIXME: use `Arc::from_iter` when it becomes available
- .collect::<Vec<_>>(),
- );
+ let defaults = Arc::from_iter(generic_params.iter_id().map(|id| {
+ let val = match id {
+ Either::Left(_) => TyKind::Error.intern(Interner).cast(Interner),
+ Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
+ };
+ crate::make_binders(db, &generic_params, val)
+ }));
defaults
}
@@ -1774,10 +1779,11 @@ fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders<Ty> {
let resolver = t.resolver(db.upcast());
let ctx = TyLoweringContext::new(db, &resolver, t.into())
.with_type_param_mode(ParamLoweringMode::Variable);
- if db.type_alias_data(t).is_extern {
+ let type_alias_data = db.type_alias_data(t);
+ if type_alias_data.is_extern {
Binders::empty(Interner, TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner))
} else {
- let type_ref = &db.type_alias_data(t).type_ref;
+ let type_ref = &type_alias_data.type_ref;
let inner = ctx.lower_ty(type_ref.as_deref().unwrap_or(&TypeRef::Error));
make_binders(db, &generics, inner)
}
@@ -1866,7 +1872,7 @@ pub(crate) fn ty_query(db: &dyn HirDatabase, def: TyDefId) -> Binders<Ty> {
}
}
-pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &[String], def: &TyDefId) -> Binders<Ty> {
+pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &Cycle, def: &TyDefId) -> Binders<Ty> {
let generics = match *def {
TyDefId::BuiltinType(_) => return Binders::empty(Interner, TyKind::Error.intern(Interner)),
TyDefId::AdtId(it) => generics(db.upcast(), it.into()),
@@ -1916,7 +1922,7 @@ pub(crate) fn const_param_ty_query(db: &dyn HirDatabase, def: ConstParamId) -> T
pub(crate) fn impl_self_ty_recover(
db: &dyn HirDatabase,
- _cycle: &[String],
+ _cycle: &Cycle,
impl_id: &ImplId,
) -> Binders<Ty> {
let generics = generics(db.upcast(), (*impl_id).into());
@@ -2048,7 +2054,7 @@ pub(crate) fn const_or_path_to_chalk(
.intern_in_type_const(InTypeConstLoc {
id: it,
owner,
- thing: Box::new(InTypeConstIdMetadata(expected_ty.clone())),
+ expected_ty: Box::new(InTypeConstIdMetadata(expected_ty.clone())),
})
.into();
intern_const_scalar(
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
index f3a5f69b2..041d61c1b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
@@ -27,8 +27,9 @@ use crate::{
primitive::{FloatTy, IntTy, UintTy},
static_lifetime, to_chalk_trait_id,
utils::all_super_traits,
- AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, DynTyExt, ForeignDefId, InEnvironment,
- Interner, Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt,
+ AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, DynTyExt, ForeignDefId, Goal, Guidance,
+ InEnvironment, Interner, Scalar, Solution, Substitution, TraitEnvironment, TraitRef,
+ TraitRefExt, Ty, TyBuilder, TyExt,
};
/// This is used as a key for indexing impls.
@@ -167,12 +168,9 @@ impl TraitImpls {
) -> Arc<[Arc<Self>]> {
let _p = profile::span("trait_impls_in_deps_query").detail(|| format!("{krate:?}"));
let crate_graph = db.crate_graph();
- // FIXME: use `Arc::from_iter` when it becomes available
- Arc::from(
- crate_graph
- .transitive_deps(krate)
- .map(|krate| db.trait_impls_in_crate(krate))
- .collect::<Vec<_>>(),
+
+ Arc::from_iter(
+ crate_graph.transitive_deps(krate).map(|krate| db.trait_impls_in_crate(krate)),
)
}
@@ -862,6 +860,62 @@ fn is_inherent_impl_coherent(
}
}
+/// Checks whether the impl satisfies the orphan rules.
+///
+/// Given `impl<P1..=Pn> Trait<T1..=Tn> for T0`, an `impl`` is valid only if at least one of the following is true:
+/// - Trait is a local trait
+/// - All of
+/// - At least one of the types `T0..=Tn`` must be a local type. Let `Ti`` be the first such type.
+/// - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`)
+pub fn check_orphan_rules(db: &dyn HirDatabase, impl_: ImplId) -> bool {
+ let substs = TyBuilder::placeholder_subst(db, impl_);
+ let Some(impl_trait) = db.impl_trait(impl_) else {
+ // not a trait impl
+ return true;
+ };
+
+ let local_crate = impl_.lookup(db.upcast()).container.krate();
+ let is_local = |tgt_crate| tgt_crate == local_crate;
+
+ let trait_ref = impl_trait.substitute(Interner, &substs);
+ let trait_id = from_chalk_trait_id(trait_ref.trait_id);
+ if is_local(trait_id.module(db.upcast()).krate()) {
+ // trait to be implemented is local
+ return true;
+ }
+
+ let unwrap_fundamental = |ty: Ty| match ty.kind(Interner) {
+ TyKind::Ref(_, _, referenced) => referenced.clone(),
+ &TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), ref subs) => {
+ let struct_data = db.struct_data(s);
+ if struct_data.flags.contains(StructFlags::IS_FUNDAMENTAL) {
+ let next = subs.type_parameters(Interner).next();
+ match next {
+ Some(ty) => ty,
+ None => ty,
+ }
+ } else {
+ ty
+ }
+ }
+ _ => ty,
+ };
+ // - At least one of the types `T0..=Tn`` must be a local type. Let `Ti`` be the first such type.
+ let is_not_orphan = trait_ref.substitution.type_parameters(Interner).any(|ty| {
+ match unwrap_fundamental(ty).kind(Interner) {
+ &TyKind::Adt(AdtId(id), _) => is_local(id.module(db.upcast()).krate()),
+ TyKind::Error => true,
+ TyKind::Dyn(it) => it.principal().map_or(false, |trait_ref| {
+ is_local(from_chalk_trait_id(trait_ref.trait_id).module(db.upcast()).krate())
+ }),
+ _ => false,
+ }
+ });
+ // FIXME: param coverage
+ // - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`)
+ is_not_orphan
+}
+
pub fn iterate_path_candidates(
ty: &Canonical<Ty>,
db: &dyn HirDatabase,
@@ -1422,26 +1476,52 @@ fn is_valid_fn_candidate(
// We need to consider the bounds on the impl to distinguish functions of the same name
// for a type.
let predicates = db.generic_predicates(impl_id.into());
- let valid = predicates
- .iter()
- .map(|predicate| {
- let (p, b) = predicate
- .clone()
- .substitute(Interner, &impl_subst)
- // Skipping the inner binders is ok, as we don't handle quantified where
- // clauses yet.
- .into_value_and_skipped_binders();
- stdx::always!(b.len(Interner) == 0);
- p
- })
- // It's ok to get ambiguity here, as we may not have enough information to prove
- // obligations. We'll check if the user is calling the selected method properly
- // later anyway.
- .all(|p| table.try_obligation(p.cast(Interner)).is_some());
- match valid {
- true => IsValidCandidate::Yes,
- false => IsValidCandidate::No,
+ let goals = predicates.iter().map(|p| {
+ let (p, b) = p
+ .clone()
+ .substitute(Interner, &impl_subst)
+ // Skipping the inner binders is ok, as we don't handle quantified where
+ // clauses yet.
+ .into_value_and_skipped_binders();
+ stdx::always!(b.len(Interner) == 0);
+
+ p.cast::<Goal>(Interner)
+ });
+
+ for goal in goals.clone() {
+ let in_env = InEnvironment::new(&table.trait_env.env, goal);
+ let canonicalized = table.canonicalize(in_env);
+ let solution = table.db.trait_solve(
+ table.trait_env.krate,
+ table.trait_env.block,
+ canonicalized.value.clone(),
+ );
+
+ match solution {
+ Some(Solution::Unique(canonical_subst)) => {
+ canonicalized.apply_solution(
+ table,
+ Canonical {
+ binders: canonical_subst.binders,
+ value: canonical_subst.value.subst,
+ },
+ );
+ }
+ Some(Solution::Ambig(Guidance::Definite(substs))) => {
+ canonicalized.apply_solution(table, substs);
+ }
+ Some(_) => (),
+ None => return IsValidCandidate::No,
+ }
}
+
+ for goal in goals {
+ if table.try_obligation(goal).is_none() {
+ return IsValidCandidate::No;
+ }
+ }
+
+ IsValidCandidate::Yes
} else {
// For `ItemContainerId::TraitId`, we check if `self_ty` implements the trait in
// `iterate_trait_method_candidates()`.
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
index e953058cc..f1795e71d 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
@@ -40,7 +40,6 @@ pub use monomorphization::{
use rustc_hash::FxHashMap;
use smallvec::{smallvec, SmallVec};
use stdx::{impl_from, never};
-use triomphe::Arc;
use super::consteval::{intern_const_scalar, try_const_usize};
@@ -147,7 +146,7 @@ impl<V, T> ProjectionElem<V, T> {
base = normalize(
db,
// FIXME: we should get this from caller
- Arc::new(TraitEnvironment::empty(krate)),
+ TraitEnvironment::empty(krate),
base,
);
}
@@ -243,16 +242,16 @@ impl Default for ProjectionStore {
}
impl ProjectionStore {
- fn shrink_to_fit(&mut self) {
+ pub fn shrink_to_fit(&mut self) {
self.id_to_proj.shrink_to_fit();
self.proj_to_id.shrink_to_fit();
}
- fn intern_if_exist(&self, projection: &[PlaceElem]) -> Option<ProjectionId> {
+ pub fn intern_if_exist(&self, projection: &[PlaceElem]) -> Option<ProjectionId> {
self.proj_to_id.get(projection).copied()
}
- fn intern(&mut self, projection: Box<[PlaceElem]>) -> ProjectionId {
+ pub fn intern(&mut self, projection: Box<[PlaceElem]>) -> ProjectionId {
let new_id = ProjectionId(self.proj_to_id.len() as u32);
match self.proj_to_id.entry(projection) {
Entry::Occupied(id) => *id.get(),
@@ -267,20 +266,24 @@ impl ProjectionStore {
}
impl ProjectionId {
- const EMPTY: ProjectionId = ProjectionId(0);
+ pub const EMPTY: ProjectionId = ProjectionId(0);
+
+ pub fn is_empty(self) -> bool {
+ self == ProjectionId::EMPTY
+ }
- fn lookup(self, store: &ProjectionStore) -> &[PlaceElem] {
+ pub fn lookup(self, store: &ProjectionStore) -> &[PlaceElem] {
store.id_to_proj.get(&self).unwrap()
}
- fn project(self, projection: PlaceElem, store: &mut ProjectionStore) -> ProjectionId {
+ pub fn project(self, projection: PlaceElem, store: &mut ProjectionStore) -> ProjectionId {
let mut current = self.lookup(store).to_vec();
current.push(projection);
store.intern(current.into())
}
}
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Place {
pub local: LocalId,
pub projection: ProjectionId,
@@ -1007,7 +1010,7 @@ pub enum Rvalue {
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum StatementKind {
Assign(Place, Rvalue),
- //FakeRead(Box<(FakeReadCause, Place)>),
+ FakeRead(Place),
//SetDiscriminant {
// place: Box<Place>,
// variant_index: VariantIdx,
@@ -1069,6 +1072,10 @@ pub struct MirBody {
}
impl MirBody {
+ pub fn local_to_binding_map(&self) -> ArenaMap<LocalId, BindingId> {
+ self.binding_locals.iter().map(|(it, y)| (*y, it)).collect()
+ }
+
fn walk_places(&mut self, mut f: impl FnMut(&mut Place, &mut ProjectionStore)) {
fn for_operand(
op: &mut Operand,
@@ -1109,7 +1116,9 @@ impl MirBody {
}
}
}
- StatementKind::Deinit(p) => f(p, &mut self.projection_store),
+ StatementKind::FakeRead(p) | StatementKind::Deinit(p) => {
+ f(p, &mut self.projection_store)
+ }
StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::Nop => (),
@@ -1186,3 +1195,9 @@ pub enum MirSpan {
}
impl_from!(ExprId, PatId for MirSpan);
+
+impl From<&ExprId> for MirSpan {
+ fn from(value: &ExprId) -> Self {
+ (*value).into()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
index 41fb12965..74c5efd6c 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
@@ -24,6 +24,7 @@ use super::{
pub enum MutabilityReason {
Mut { spans: Vec<MirSpan> },
Not,
+ Unused,
}
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -144,7 +145,8 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
}
}
},
- StatementKind::Deinit(_)
+ StatementKind::FakeRead(_)
+ | StatementKind::Deinit(_)
| StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::Nop => (),
@@ -264,7 +266,10 @@ fn ever_initialized_map(
is_ever_initialized = false;
}
}
- StatementKind::Deinit(_) | StatementKind::Nop | StatementKind::StorageLive(_) => (),
+ StatementKind::Deinit(_)
+ | StatementKind::FakeRead(_)
+ | StatementKind::Nop
+ | StatementKind::StorageLive(_) => (),
}
}
let Some(terminator) = &block.terminator else {
@@ -331,16 +336,37 @@ fn ever_initialized_map(
result
}
+fn push_mut_span(local: LocalId, span: MirSpan, result: &mut ArenaMap<LocalId, MutabilityReason>) {
+ match &mut result[local] {
+ MutabilityReason::Mut { spans } => spans.push(span),
+ it @ (MutabilityReason::Not | MutabilityReason::Unused) => {
+ *it = MutabilityReason::Mut { spans: vec![span] }
+ }
+ };
+}
+
+fn record_usage(local: LocalId, result: &mut ArenaMap<LocalId, MutabilityReason>) {
+ match &mut result[local] {
+ it @ MutabilityReason::Unused => {
+ *it = MutabilityReason::Not;
+ }
+ _ => (),
+ };
+}
+
+fn record_usage_for_operand(arg: &Operand, result: &mut ArenaMap<LocalId, MutabilityReason>) {
+ if let Operand::Copy(p) | Operand::Move(p) = arg {
+ record_usage(p.local, result);
+ }
+}
+
fn mutability_of_locals(
db: &dyn HirDatabase,
body: &MirBody,
) -> ArenaMap<LocalId, MutabilityReason> {
let mut result: ArenaMap<LocalId, MutabilityReason> =
- body.locals.iter().map(|it| (it.0, MutabilityReason::Not)).collect();
- let mut push_mut_span = |local, span| match &mut result[local] {
- MutabilityReason::Mut { spans } => spans.push(span),
- it @ MutabilityReason::Not => *it = MutabilityReason::Mut { spans: vec![span] },
- };
+ body.locals.iter().map(|it| (it.0, MutabilityReason::Unused)).collect();
+
let ever_init_maps = ever_initialized_map(db, body);
for (block_id, mut ever_init_map) in ever_init_maps.into_iter() {
let block = &body.basic_blocks[block_id];
@@ -350,23 +376,51 @@ fn mutability_of_locals(
match place_case(db, body, place) {
ProjectionCase::Direct => {
if ever_init_map.get(place.local).copied().unwrap_or_default() {
- push_mut_span(place.local, statement.span);
+ push_mut_span(place.local, statement.span, &mut result);
} else {
ever_init_map.insert(place.local, true);
}
}
ProjectionCase::DirectPart => {
// Partial initialization is not supported, so it is definitely `mut`
- push_mut_span(place.local, statement.span);
+ push_mut_span(place.local, statement.span, &mut result);
+ }
+ ProjectionCase::Indirect => {
+ record_usage(place.local, &mut result);
}
- ProjectionCase::Indirect => (),
+ }
+ match value {
+ Rvalue::CopyForDeref(p)
+ | Rvalue::Discriminant(p)
+ | Rvalue::Len(p)
+ | Rvalue::Ref(_, p) => {
+ record_usage(p.local, &mut result);
+ }
+ Rvalue::Use(o)
+ | Rvalue::Repeat(o, _)
+ | Rvalue::Cast(_, o, _)
+ | Rvalue::UnaryOp(_, o) => record_usage_for_operand(o, &mut result),
+ Rvalue::CheckedBinaryOp(_, o1, o2) => {
+ for o in [o1, o2] {
+ record_usage_for_operand(o, &mut result);
+ }
+ }
+ Rvalue::Aggregate(_, args) => {
+ for arg in args.iter() {
+ record_usage_for_operand(arg, &mut result);
+ }
+ }
+ Rvalue::ShallowInitBox(_, _) | Rvalue::ShallowInitBoxWithAlloc(_) => (),
}
if let Rvalue::Ref(BorrowKind::Mut { .. }, p) = value {
if place_case(db, body, p) != ProjectionCase::Indirect {
- push_mut_span(p.local, statement.span);
+ push_mut_span(p.local, statement.span, &mut result);
}
}
}
+ StatementKind::FakeRead(p) => {
+ record_usage(p.local, &mut result);
+ }
StatementKind::StorageDead(p) => {
ever_init_map.insert(*p, false);
}
@@ -386,15 +440,21 @@ fn mutability_of_locals(
| TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. }
| TerminatorKind::GeneratorDrop
- | TerminatorKind::SwitchInt { .. }
| TerminatorKind::Drop { .. }
| TerminatorKind::DropAndReplace { .. }
| TerminatorKind::Assert { .. }
| TerminatorKind::Yield { .. } => (),
- TerminatorKind::Call { destination, .. } => {
+ TerminatorKind::SwitchInt { discr, targets: _ } => {
+ record_usage_for_operand(discr, &mut result);
+ }
+ TerminatorKind::Call { destination, args, func, .. } => {
+ record_usage_for_operand(func, &mut result);
+ for arg in args.iter() {
+ record_usage_for_operand(arg, &mut result);
+ }
if destination.projection.lookup(&body.projection_store).len() == 0 {
if ever_init_map.get(destination.local).copied().unwrap_or_default() {
- push_mut_span(destination.local, MirSpan::Unknown);
+ push_mut_span(destination.local, MirSpan::Unknown, &mut result);
} else {
ever_init_map.insert(destination.local, true);
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
index 4364e0d32..fbfb6ff8c 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
@@ -21,7 +21,7 @@ use hir_def::{
AdtId, ConstId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup,
StaticId, VariantId,
};
-use hir_expand::{mod_path::ModPath, InFile};
+use hir_expand::{mod_path::ModPath, HirFileIdExt, InFile};
use intern::Interned;
use la_arena::ArenaMap;
use rustc_hash::{FxHashMap, FxHashSet};
@@ -162,7 +162,7 @@ pub struct Evaluator<'a> {
not_special_fn_cache: RefCell<FxHashSet<FunctionId>>,
mir_or_dyn_index_cache: RefCell<FxHashMap<(FunctionId, Substitution), MirOrDynIndex>>,
/// Constantly dropping and creating `Locals` is very costly. We store
- /// old locals that we normaly want to drop here, to reuse their allocations
+ /// old locals that we normally want to drop here, to reuse their allocations
/// later.
unused_locals_store: RefCell<FxHashMap<DefWithBodyId, Vec<Locals>>>,
cached_ptr_size: usize,
@@ -375,10 +375,7 @@ impl MirEvalError {
Err(_) => continue,
},
MirSpan::PatId(p) => match source_map.pat_syntax(*p) {
- Ok(s) => s.map(|it| match it {
- Either::Left(e) => e.into(),
- Either::Right(e) => e.into(),
- }),
+ Ok(s) => s.map(|it| it.syntax_node_ptr()),
Err(_) => continue,
},
MirSpan::Unknown => continue,
@@ -842,6 +839,7 @@ impl Evaluator<'_> {
}
StatementKind::Deinit(_) => not_supported!("de-init statement"),
StatementKind::StorageLive(_)
+ | StatementKind::FakeRead(_)
| StatementKind::StorageDead(_)
| StatementKind::Nop => (),
}
@@ -2301,7 +2299,7 @@ impl Evaluator<'_> {
match self.get_mir_or_dyn_index(def, generic_args.clone(), locals, span)? {
MirOrDynIndex::Dyn(self_ty_idx) => {
// In the layout of current possible receiver, which at the moment of writing this code is one of
- // `&T`, `&mut T`, `Box<T>`, `Rc<T>`, `Arc<T>`, and `Pin<P>` where `P` is one of possible recievers,
+ // `&T`, `&mut T`, `Box<T>`, `Rc<T>`, `Arc<T>`, and `Pin<P>` where `P` is one of possible receivers,
// the vtable is exactly in the `[ptr_size..2*ptr_size]` bytes. So we can use it without branching on
// the type.
let first_arg = arg_bytes.clone().next().unwrap();
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs
index 803ef631f..2de99e416 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs
@@ -1045,7 +1045,7 @@ impl Evaluator<'_> {
}
"transmute" => {
let [arg] = args else {
- return Err(MirEvalError::TypeError("trasmute arg is not provided"));
+ return Err(MirEvalError::TypeError("transmute arg is not provided"));
};
destination.write_from_interval(self, arg.interval)
}
@@ -1065,7 +1065,7 @@ impl Evaluator<'_> {
}
"ctlz" | "ctlz_nonzero" => {
let [arg] = args else {
- return Err(MirEvalError::TypeError("cttz arg is not provided"));
+ return Err(MirEvalError::TypeError("ctlz arg is not provided"));
};
let result =
u128::from_le_bytes(pad16(arg.get(self)?, false)).leading_zeros() as usize;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
index dd2dba717..639fabc19 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
@@ -2,7 +2,7 @@
use std::{fmt::Write, iter, mem};
-use base_db::FileId;
+use base_db::{salsa::Cycle, FileId};
use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind};
use hir_def::{
body::Body,
@@ -105,9 +105,14 @@ pub enum MirLowerError {
/// A token to ensuring that each drop scope is popped at most once, thanks to the compiler that checks moves.
struct DropScopeToken;
impl DropScopeToken {
- fn pop_and_drop(self, ctx: &mut MirLowerCtx<'_>, current: BasicBlockId) -> BasicBlockId {
+ fn pop_and_drop(
+ self,
+ ctx: &mut MirLowerCtx<'_>,
+ current: BasicBlockId,
+ span: MirSpan,
+ ) -> BasicBlockId {
std::mem::forget(self);
- ctx.pop_drop_scope_internal(current)
+ ctx.pop_drop_scope_internal(current, span)
}
/// It is useful when we want a drop scope is syntaxically closed, but we don't want to execute any drop
@@ -529,6 +534,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
else {
return Ok(None);
};
+ self.push_fake_read(current, cond_place, expr_id.into());
let (then_target, else_target) =
self.pattern_match(current, None, cond_place, *pat)?;
self.write_bytes_to_place(
@@ -581,7 +587,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.lower_loop(current, place, *label, expr_id.into(), |this, begin| {
let scope = this.push_drop_scope();
if let Some((_, mut current)) = this.lower_expr_as_place(begin, *body, true)? {
- current = scope.pop_and_drop(this, current);
+ current = scope.pop_and_drop(this, current, body.into());
this.set_goto(current, begin, expr_id.into());
} else {
scope.pop_assume_dropped(this);
@@ -668,6 +674,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
else {
return Ok(None);
};
+ self.push_fake_read(current, cond_place, expr_id.into());
let mut end = None;
for MatchArm { pat, guard, expr } in arms.iter() {
let (then, mut otherwise) =
@@ -718,7 +725,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
.ok_or(MirLowerError::ContinueWithoutLoop)?,
};
let begin = loop_data.begin;
- current = self.drop_until_scope(loop_data.drop_scope_index, current);
+ current =
+ self.drop_until_scope(loop_data.drop_scope_index, current, expr_id.into());
self.set_goto(current, begin, expr_id.into());
Ok(None)
}
@@ -757,7 +765,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.current_loop_blocks.as_ref().unwrap().drop_scope_index,
),
};
- current = self.drop_until_scope(drop_scope, current);
+ current = self.drop_until_scope(drop_scope, current, expr_id.into());
self.set_goto(current, end, expr_id.into());
Ok(None)
}
@@ -771,7 +779,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
return Ok(None);
}
}
- current = self.drop_until_scope(0, current);
+ current = self.drop_until_scope(0, current, expr_id.into());
self.set_terminator(current, TerminatorKind::Return, expr_id.into());
Ok(None)
}
@@ -1299,6 +1307,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
return Ok(None);
};
if matches!(&self.body.exprs[lhs], Expr::Underscore) {
+ self.push_fake_read_for_operand(current, rhs_op, span);
return Ok(Some(current));
}
if matches!(
@@ -1575,6 +1584,16 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.result.basic_blocks[block].statements.push(statement);
}
+ fn push_fake_read(&mut self, block: BasicBlockId, p: Place, span: MirSpan) {
+ self.push_statement(block, StatementKind::FakeRead(p).with_span(span));
+ }
+
+ fn push_fake_read_for_operand(&mut self, block: BasicBlockId, operand: Operand, span: MirSpan) {
+ if let Operand::Move(p) | Operand::Copy(p) = operand {
+ self.push_fake_read(block, p, span);
+ }
+ }
+
fn push_assignment(
&mut self,
block: BasicBlockId,
@@ -1733,6 +1752,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
return Ok(None);
};
current = c;
+ self.push_fake_read(current, init_place, span);
(current, else_block) =
self.pattern_match(current, None, init_place, *pat)?;
match (else_block, else_branch) {
@@ -1760,14 +1780,15 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
}
}
- hir_def::hir::Statement::Expr { expr, has_semi: _ } => {
+ &hir_def::hir::Statement::Expr { expr, has_semi: _ } => {
let scope2 = self.push_drop_scope();
- let Some((_, c)) = self.lower_expr_as_place(current, *expr, true)? else {
+ let Some((p, c)) = self.lower_expr_as_place(current, expr, true)? else {
scope2.pop_assume_dropped(self);
scope.pop_assume_dropped(self);
return Ok(None);
};
- current = scope2.pop_and_drop(self, c);
+ self.push_fake_read(c, p, expr.into());
+ current = scope2.pop_and_drop(self, c, expr.into());
}
}
}
@@ -1778,7 +1799,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
};
current = c;
}
- current = scope.pop_and_drop(self, current);
+ current = scope.pop_and_drop(self, current, span);
Ok(Some(current))
}
@@ -1858,9 +1879,14 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
}
- fn drop_until_scope(&mut self, scope_index: usize, mut current: BasicBlockId) -> BasicBlockId {
+ fn drop_until_scope(
+ &mut self,
+ scope_index: usize,
+ mut current: BasicBlockId,
+ span: MirSpan,
+ ) -> BasicBlockId {
for scope in self.drop_scopes[scope_index..].to_vec().iter().rev() {
- self.emit_drop_and_storage_dead_for_scope(scope, &mut current);
+ self.emit_drop_and_storage_dead_for_scope(scope, &mut current, span);
}
current
}
@@ -1876,17 +1902,22 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
/// Don't call directly
- fn pop_drop_scope_internal(&mut self, mut current: BasicBlockId) -> BasicBlockId {
+ fn pop_drop_scope_internal(
+ &mut self,
+ mut current: BasicBlockId,
+ span: MirSpan,
+ ) -> BasicBlockId {
let scope = self.drop_scopes.pop().unwrap();
- self.emit_drop_and_storage_dead_for_scope(&scope, &mut current);
+ self.emit_drop_and_storage_dead_for_scope(&scope, &mut current, span);
current
}
fn pop_drop_scope_assert_finished(
&mut self,
mut current: BasicBlockId,
+ span: MirSpan,
) -> Result<BasicBlockId> {
- current = self.pop_drop_scope_internal(current);
+ current = self.pop_drop_scope_internal(current, span);
if !self.drop_scopes.is_empty() {
implementation_error!("Mismatched count between drop scope push and pops");
}
@@ -1897,6 +1928,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
&mut self,
scope: &DropScope,
current: &mut Idx<BasicBlock>,
+ span: MirSpan,
) {
for &l in scope.locals.iter().rev() {
if !self.result.locals[l].ty.clone().is_copy(self.db, self.owner) {
@@ -1904,13 +1936,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.set_terminator(
prev,
TerminatorKind::Drop { place: l.into(), target: *current, unwind: None },
- MirSpan::Unknown,
+ span,
);
}
- self.push_statement(
- *current,
- StatementKind::StorageDead(l).with_span(MirSpan::Unknown),
- );
+ self.push_statement(*current, StatementKind::StorageDead(l).with_span(span));
}
}
}
@@ -1987,7 +2016,7 @@ pub fn mir_body_for_closure_query(
|_| true,
)?;
if let Some(current) = ctx.lower_expr_to_place(*root, return_slot().into(), current)? {
- let current = ctx.pop_drop_scope_assert_finished(current)?;
+ let current = ctx.pop_drop_scope_assert_finished(current, root.into())?;
ctx.set_terminator(current, TerminatorKind::Return, (*root).into());
}
let mut upvar_map: FxHashMap<LocalId, Vec<(&CapturedItem, usize)>> = FxHashMap::default();
@@ -2081,7 +2110,7 @@ pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<Mi
pub fn mir_body_recover(
_db: &dyn HirDatabase,
- _cycle: &[String],
+ _cycle: &Cycle,
_def: &DefWithBodyId,
) -> Result<Arc<MirBody>> {
Err(MirLowerError::Loop)
@@ -2131,7 +2160,7 @@ pub fn lower_to_mir(
ctx.lower_params_and_bindings([].into_iter(), binding_picker)?
};
if let Some(current) = ctx.lower_expr_to_place(root_expr, return_slot().into(), current)? {
- let current = ctx.pop_drop_scope_assert_finished(current)?;
+ let current = ctx.pop_drop_scope_assert_finished(current, root_expr.into())?;
ctx.set_terminator(current, TerminatorKind::Return, root_expr.into());
}
Ok(ctx.result)
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs
index 270f75ad9..1120bb1c1 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs
@@ -284,6 +284,7 @@ impl MirLowerCtx<'_> {
);
(current, current_else) = self.pattern_match_binding(
id,
+ *slice,
next_place,
(*slice).into(),
current,
@@ -395,6 +396,7 @@ impl MirLowerCtx<'_> {
if mode == MatchingMode::Bind {
self.pattern_match_binding(
*id,
+ pattern,
cond_place,
pattern.into(),
current,
@@ -431,13 +433,14 @@ impl MirLowerCtx<'_> {
fn pattern_match_binding(
&mut self,
id: BindingId,
+ pat: PatId,
cond_place: Place,
span: MirSpan,
current: BasicBlockId,
current_else: Option<BasicBlockId>,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
let target_place = self.binding_local(id)?;
- let mode = self.infer.binding_modes[id];
+ let mode = self.infer.binding_modes[pat];
self.push_storage_live(id, current)?;
self.push_assignment(
current,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs
index df16d0d82..8da03eef2 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs
@@ -9,6 +9,7 @@
use std::mem;
+use base_db::salsa::Cycle;
use chalk_ir::{
fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable},
ConstData, DebruijnIndex,
@@ -248,6 +249,7 @@ impl Filler<'_> {
| Rvalue::CopyForDeref(_) => (),
},
StatementKind::Deinit(_)
+ | StatementKind::FakeRead(_)
| StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::Nop => (),
@@ -299,7 +301,7 @@ pub fn monomorphized_mir_body_query(
pub fn monomorphized_mir_body_recover(
_: &dyn HirDatabase,
- _: &[String],
+ _: &Cycle,
_: &DefWithBodyId,
_: &Substitution,
_: &Arc<crate::TraitEnvironment>,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs
index 0108859ff..a91f90bc2 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs
@@ -145,7 +145,7 @@ impl<'a> MirPrettyCtx<'a> {
let indent = mem::take(&mut self.indent);
let mut ctx = MirPrettyCtx {
body: &body,
- local_to_binding: body.binding_locals.iter().map(|(it, y)| (*y, it)).collect(),
+ local_to_binding: body.local_to_binding_map(),
result,
indent,
..*self
@@ -167,7 +167,7 @@ impl<'a> MirPrettyCtx<'a> {
}
fn new(body: &'a MirBody, hir_body: &'a Body, db: &'a dyn HirDatabase) -> Self {
- let local_to_binding = body.binding_locals.iter().map(|(it, y)| (*y, it)).collect();
+ let local_to_binding = body.local_to_binding_map();
MirPrettyCtx {
body,
db,
@@ -233,6 +233,11 @@ impl<'a> MirPrettyCtx<'a> {
this.place(p);
wln!(this, ");");
}
+ StatementKind::FakeRead(p) => {
+ w!(this, "FakeRead(");
+ this.place(p);
+ wln!(this, ");");
+ }
StatementKind::Nop => wln!(this, "Nop;"),
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
index 7d19e0a19..6f4aef22d 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
@@ -30,6 +30,7 @@ pub(crate) struct TestDB {
impl Default for TestDB {
fn default() -> Self {
let mut this = Self { storage: Default::default(), events: Default::default() };
+ this.setup_syntax_context_root();
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
this
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
index d22d0d85c..1446e83fa 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
@@ -269,12 +269,7 @@ fn pat_node(
Some(match body_source_map.pat_syntax(pat) {
Ok(sp) => {
let root = db.parse_or_expand(sp.file_id);
- sp.map(|ptr| {
- ptr.either(
- |it| it.to_node(&root).syntax().clone(),
- |it| it.to_node(&root).syntax().clone(),
- )
- })
+ sp.map(|ptr| ptr.to_node(&root).syntax().clone())
}
Err(SyntheticSyntax) => return None,
})
@@ -303,12 +298,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
let syntax_ptr = match body_source_map.pat_syntax(pat) {
Ok(sp) => {
let root = db.parse_or_expand(sp.file_id);
- sp.map(|ptr| {
- ptr.either(
- |it| it.to_node(&root).syntax().clone(),
- |it| it.to_node(&root).syntax().clone(),
- )
- })
+ sp.map(|ptr| ptr.to_node(&root).syntax().clone())
}
Err(SyntheticSyntax) => continue,
};
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
index bb15ca8c4..28e84e480 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
@@ -9,11 +9,10 @@ use super::visit_module;
fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
let (mut db, pos) = TestDB::with_position(
"
- //- /lib.rs
- fn foo() -> i32 {
- $01 + 1
- }
- ",
+//- /lib.rs
+fn foo() -> i32 {
+ $01 + 1
+}",
);
{
let events = db.log_executed(|| {
@@ -27,12 +26,11 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
}
let new_text = "
- fn foo() -> i32 {
- 1
- +
- 1
- }
- ";
+fn foo() -> i32 {
+ 1
+ +
+ 1
+}";
db.set_file_text(pos.file_id, Arc::from(new_text));
@@ -47,3 +45,55 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
assert!(!format!("{events:?}").contains("infer"), "{events:#?}")
}
}
+
+#[test]
+fn typing_inside_a_function_should_not_invalidate_types_in_another() {
+ let (mut db, pos) = TestDB::with_position(
+ "
+//- /lib.rs
+fn foo() -> f32 {
+ 1.0 + 2.0
+}
+fn bar() -> i32 {
+ $01 + 1
+}
+fn baz() -> i32 {
+ 1 + 1
+}",
+ );
+ {
+ let events = db.log_executed(|| {
+ let module = db.module_for_file(pos.file_id);
+ let crate_def_map = module.def_map(&db);
+ visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ db.infer(def);
+ });
+ });
+ assert!(format!("{events:?}").contains("infer"))
+ }
+
+ let new_text = "
+fn foo() -> f32 {
+ 1.0 + 2.0
+}
+fn bar() -> i32 {
+ 53
+}
+fn baz() -> i32 {
+ 1 + 1
+}
+";
+
+ db.set_file_text(pos.file_id, Arc::from(new_text));
+
+ {
+ let events = db.log_executed(|| {
+ let module = db.module_for_file(pos.file_id);
+ let crate_def_map = module.def_map(&db);
+ visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ db.infer(def);
+ });
+ });
+ assert!(format!("{events:?}").matches("infer").count() == 1, "{events:#?}")
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs
index 1e6e946a1..d16e0eb01 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs
@@ -684,8 +684,7 @@ fn infer_builtin_macros_line() {
}
"#,
expect![[r#"
- !0..1 '0': i32
- !0..6 '0asu32': u32
+ !0..4 '0u32': u32
63..87 '{ ...!(); }': ()
73..74 'x': u32
"#]],
@@ -723,8 +722,7 @@ fn infer_builtin_macros_column() {
}
"#,
expect![[r#"
- !0..1 '0': i32
- !0..6 '0asu32': u32
+ !0..4 '0u32': u32
65..91 '{ ...!(); }': ()
75..76 'x': u32
"#]],
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs
index 0f5a3e175..7234af2d6 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs
@@ -1129,3 +1129,65 @@ fn foo() {
"#,
);
}
+
+#[test]
+fn generic_alias() {
+ check_types(
+ r#"
+type Wrap<T> = T;
+
+enum X {
+ A { cool: u32, stuff: u32 },
+ B,
+}
+
+fn main() {
+ let wrapped = Wrap::<X>::A {
+ cool: 100,
+ stuff: 100,
+ };
+
+ if let Wrap::<X>::A { cool, ..} = &wrapped {}
+ //^^^^ &u32
+}
+"#,
+ );
+}
+
+#[test]
+fn type_mismatch_pat_const_reference() {
+ check_no_mismatches(
+ r#"
+const TEST_STR: &'static str = "abcd";
+
+fn main() {
+ let s = "abcd";
+ match s {
+ TEST_STR => (),
+ _ => (),
+ }
+}
+
+ "#,
+ );
+ check(
+ r#"
+struct Foo<T>(T);
+
+impl<T> Foo<T> {
+ const TEST_I32_REF: &'static i32 = &3;
+ const TEST_I32: i32 = 3;
+}
+
+fn main() {
+ match &6 {
+ Foo::<i32>::TEST_I32_REF => (),
+ Foo::<i32>::TEST_I32 => (),
+ //^^^^^^^^^^^^^^^^^^^^ expected &i32, got i32
+ _ => (),
+ }
+}
+
+ "#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
index 6ea059065..35079e709 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
@@ -2000,3 +2000,15 @@ fn test() {
"#,
);
}
+
+#[test]
+fn rustc_test_issue_52437() {
+ check_types(
+ r#"
+ fn main() {
+ let x = [(); &(&'static: loop { |x| {}; }) as *const _ as usize]
+ //^ [(); _]
+ }
+ "#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
index d36b885ec..003ae60e8 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
@@ -2598,6 +2598,34 @@ fn test<T: Trait>() {
}
#[test]
+fn associated_type_in_type_bound() {
+ check_types(
+ r#"
+//- minicore: deref
+fn fb(f: Foo<&u8>) {
+ f.foobar();
+ //^^^^^^^^^^ u8
+}
+trait Bar {
+ fn bar(&self) -> u8;
+}
+impl Bar for u8 {
+ fn bar(&self) -> u8 { *self }
+}
+
+struct Foo<F> {
+ foo: F,
+}
+impl<F: core::ops::Deref<Target = impl Bar>> Foo<F> {
+ fn foobar(&self) -> u8 {
+ self.foo.deref().bar()
+ }
+}
+"#,
+ )
+}
+
+#[test]
fn dyn_trait_through_chalk() {
check_types(
r#"
@@ -4439,42 +4467,42 @@ fn test(v: S<i32>) {
fn associated_type_in_argument() {
check(
r#"
- trait A {
- fn m(&self) -> i32;
- }
+trait A {
+ fn m(&self) -> i32;
+}
- fn x<T: B>(k: &<T as B>::Ty) {
- k.m();
- }
+fn x<T: B>(k: &<T as B>::Ty) {
+ k.m();
+}
- struct X;
- struct Y;
+struct X;
+struct Y;
- impl A for X {
- fn m(&self) -> i32 {
- 8
- }
+impl A for X {
+ fn m(&self) -> i32 {
+ 8
}
+}
- impl A for Y {
- fn m(&self) -> i32 {
- 32
- }
+impl A for Y {
+ fn m(&self) -> i32 {
+ 32
}
+}
- trait B {
- type Ty: A;
- }
+trait B {
+ type Ty: A;
+}
- impl B for u16 {
- type Ty = X;
- }
+impl B for u16 {
+ type Ty = X;
+}
- fn ttt() {
- let inp = Y;
- x::<u16>(&inp);
- //^^^^ expected &X, got &Y
- }
- "#,
+fn ttt() {
+ let inp = Y;
+ x::<u16>(&inp);
+ //^^^^ expected &X, got &Y
+}
+"#,
);
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
index 3c7cfbaed..b6bc76bc9 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
@@ -48,18 +48,32 @@ pub struct TraitEnvironment {
pub krate: CrateId,
pub block: Option<BlockId>,
// FIXME make this a BTreeMap
- pub(crate) traits_from_clauses: Vec<(Ty, TraitId)>,
+ traits_from_clauses: Box<[(Ty, TraitId)]>,
pub env: chalk_ir::Environment<Interner>,
}
impl TraitEnvironment {
- pub fn empty(krate: CrateId) -> Self {
- TraitEnvironment {
+ pub fn empty(krate: CrateId) -> Arc<Self> {
+ Arc::new(TraitEnvironment {
krate,
block: None,
- traits_from_clauses: Vec::new(),
+ traits_from_clauses: Box::default(),
env: chalk_ir::Environment::new(Interner),
- }
+ })
+ }
+
+ pub fn new(
+ krate: CrateId,
+ block: Option<BlockId>,
+ traits_from_clauses: Box<[(Ty, TraitId)]>,
+ env: chalk_ir::Environment<Interner>,
+ ) -> Arc<Self> {
+ Arc::new(TraitEnvironment { krate, block, traits_from_clauses, env })
+ }
+
+ // pub fn with_block(self: &mut Arc<Self>, block: BlockId) {
+ pub fn with_block(this: &mut Arc<Self>, block: BlockId) {
+ Arc::make_mut(this).block = Some(block);
}
pub fn traits_in_scope_from_clauses(&self, ty: Ty) -> impl Iterator<Item = TraitId> + '_ {
diff --git a/src/tools/rust-analyzer/crates/hir/Cargo.toml b/src/tools/rust-analyzer/crates/hir/Cargo.toml
index f860ee948..4c1dfbc29 100644
--- a/src/tools/rust-analyzer/crates/hir/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir/Cargo.toml
@@ -13,9 +13,9 @@ doctest = false
[dependencies]
rustc-hash = "1.1.0"
-either = "1.7.0"
+either.workspace = true
arrayvec = "0.7.2"
-itertools = "0.10.5"
+itertools.workspace = true
smallvec.workspace = true
triomphe.workspace = true
once_cell = "1.17.1"
@@ -30,3 +30,6 @@ profile.workspace = true
stdx.workspace = true
syntax.workspace = true
tt.workspace = true
+
+[features]
+in-rust-tree = []
diff --git a/src/tools/rust-analyzer/crates/hir/src/attrs.rs b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
index 796490abd..185853353 100644
--- a/src/tools/rust-analyzer/crates/hir/src/attrs.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
@@ -1,5 +1,6 @@
//! Attributes & documentation for hir types.
+use base_db::FileId;
use hir_def::{
attr::AttrsWithOwner,
item_scope::ItemInNs,
@@ -8,7 +9,10 @@ use hir_def::{
resolver::{HasResolver, Resolver, TypeNs},
AssocItemId, AttrDefId, ModuleDefId,
};
-use hir_expand::{hygiene::Hygiene, name::Name};
+use hir_expand::{
+ name::Name,
+ span::{RealSpanMap, SpanMapRef},
+};
use hir_ty::db::HirDatabase;
use syntax::{ast, AstNode};
@@ -234,7 +238,11 @@ fn modpath_from_str(db: &dyn HirDatabase, link: &str) -> Option<ModPath> {
if ast_path.syntax().text() != link {
return None;
}
- ModPath::from_src(db.upcast(), ast_path, &Hygiene::new_unhygienic())
+ ModPath::from_src(
+ db.upcast(),
+ ast_path,
+ SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId::BOGUS)),
+ )
};
let full = try_get_modpath(link);
diff --git a/src/tools/rust-analyzer/crates/hir/src/db.rs b/src/tools/rust-analyzer/crates/hir/src/db.rs
index 936581bfe..d98e3decd 100644
--- a/src/tools/rust-analyzer/crates/hir/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/db.rs
@@ -3,10 +3,27 @@
//! we didn't do that.
//!
//! But we need this for at least LRU caching at the query level.
-pub use hir_def::db::*;
+pub use hir_def::db::{
+ AttrsQuery, BlockDefMapQuery, BlockItemTreeQueryQuery, BodyQuery, BodyWithSourceMapQuery,
+ ConstDataQuery, ConstVisibilityQuery, CrateDefMapQueryQuery, CrateLangItemsQuery,
+ CrateSupportsNoStdQuery, DefDatabase, DefDatabaseStorage, EnumDataQuery,
+ EnumDataWithDiagnosticsQuery, ExprScopesQuery, ExternCrateDeclDataQuery,
+ FieldVisibilitiesQuery, FieldsAttrsQuery, FieldsAttrsSourceMapQuery, FileItemTreeQuery,
+ FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery, ImplDataQuery,
+ ImplDataWithDiagnosticsQuery, ImportMapQuery, InternAnonymousConstQuery, InternBlockQuery,
+ InternConstQuery, InternDatabase, InternDatabaseStorage, InternEnumQuery,
+ InternExternBlockQuery, InternExternCrateQuery, InternFunctionQuery, InternImplQuery,
+ InternInTypeConstQuery, InternMacro2Query, InternMacroRulesQuery, InternProcMacroQuery,
+ InternStaticQuery, InternStructQuery, InternTraitAliasQuery, InternTraitQuery,
+ InternTypeAliasQuery, InternUnionQuery, InternUseQuery, LangAttrQuery, LangItemQuery,
+ Macro2DataQuery, MacroRulesDataQuery, ProcMacroDataQuery, StaticDataQuery, StructDataQuery,
+ StructDataWithDiagnosticsQuery, TraitAliasDataQuery, TraitDataQuery,
+ TraitDataWithDiagnosticsQuery, TypeAliasDataQuery, UnionDataQuery,
+ UnionDataWithDiagnosticsQuery, VariantsAttrsQuery, VariantsAttrsSourceMapQuery,
+};
pub use hir_expand::db::{
AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
- ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgNodeQuery,
- MacroExpandQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery,
+ ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery,
+ ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, RealSpanMapQuery,
};
pub use hir_ty::db::*;
diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
index 479138b67..1cb36f9b0 100644
--- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
@@ -3,7 +3,7 @@
//!
//! This probably isn't the best way to do this -- ideally, diagnostics should
//! be expressed in terms of hir types themselves.
-pub use hir_ty::diagnostics::{CaseType, IncoherentImpl, IncorrectCase};
+pub use hir_ty::diagnostics::{CaseType, IncorrectCase};
use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions};
@@ -12,7 +12,7 @@ use hir_def::path::ModPath;
use hir_expand::{name::Name, HirFileId, InFile};
use syntax::{ast, AstPtr, SyntaxError, SyntaxNodePtr, TextRange};
-use crate::{AssocItem, Field, Local, MacroKind, Type};
+use crate::{AssocItem, Field, Local, MacroKind, Trait, Type};
macro_rules! diagnostics {
($($diag:ident,)*) => {
@@ -53,6 +53,10 @@ diagnostics![
PrivateAssocItem,
PrivateField,
ReplaceFilterMapNextWithFindMap,
+ TraitImplIncorrectSafety,
+ TraitImplMissingAssocItems,
+ TraitImplRedundantAssocItems,
+ TraitImplOrphan,
TypedHole,
TypeMismatch,
UndeclaredLabel,
@@ -66,6 +70,7 @@ diagnostics![
UnresolvedModule,
UnresolvedProcMacro,
UnusedMut,
+ UnusedVariable,
];
#[derive(Debug)]
@@ -173,20 +178,19 @@ pub struct MalformedDerive {
#[derive(Debug)]
pub struct NoSuchField {
- pub field: InFile<Either<AstPtr<ast::RecordExprField>, AstPtr<ast::RecordPatField>>>,
+ pub field: InFile<AstPtr<Either<ast::RecordExprField, ast::RecordPatField>>>,
pub private: bool,
}
#[derive(Debug)]
pub struct PrivateAssocItem {
- pub expr_or_pat:
- InFile<Either<AstPtr<ast::Expr>, Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>>>,
+ pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, Either<ast::Pat, ast::SelfParam>>>>,
pub item: AssocItem,
}
#[derive(Debug)]
pub struct MismatchedTupleStructPatArgCount {
- pub expr_or_pat: InFile<Either<AstPtr<ast::Expr>, AstPtr<ast::Pat>>>,
+ pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
pub expected: usize,
pub found: usize,
}
@@ -227,7 +231,7 @@ pub struct MissingUnsafe {
#[derive(Debug)]
pub struct MissingFields {
pub file: HirFileId,
- pub field_list_parent: Either<AstPtr<ast::RecordExpr>, AstPtr<ast::RecordPat>>,
+ pub field_list_parent: AstPtr<Either<ast::RecordExpr, ast::RecordPat>>,
pub field_list_parent_path: Option<AstPtr<ast::Path>>,
pub missed_fields: Vec<Name>,
}
@@ -254,7 +258,7 @@ pub struct MissingMatchArms {
#[derive(Debug)]
pub struct TypeMismatch {
- pub expr_or_pat: Either<InFile<AstPtr<ast::Expr>>, InFile<AstPtr<ast::Pat>>>,
+ pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
pub expected: Type,
pub actual: Type,
}
@@ -271,7 +275,47 @@ pub struct UnusedMut {
}
#[derive(Debug)]
+pub struct UnusedVariable {
+ pub local: Local,
+}
+
+#[derive(Debug)]
pub struct MovedOutOfRef {
pub ty: Type,
pub span: InFile<SyntaxNodePtr>,
}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct IncoherentImpl {
+ pub file_id: HirFileId,
+ pub impl_: AstPtr<ast::Impl>,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct TraitImplOrphan {
+ pub file_id: HirFileId,
+ pub impl_: AstPtr<ast::Impl>,
+}
+
+// FIXME: Split this off into the corresponding 4 rustc errors
+#[derive(Debug, PartialEq, Eq)]
+pub struct TraitImplIncorrectSafety {
+ pub file_id: HirFileId,
+ pub impl_: AstPtr<ast::Impl>,
+ pub should_be_safe: bool,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct TraitImplMissingAssocItems {
+ pub file_id: HirFileId,
+ pub impl_: AstPtr<ast::Impl>,
+ pub missing: Vec<(Name, AssocItem)>,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct TraitImplRedundantAssocItems {
+ pub file_id: HirFileId,
+ pub trait_: Trait,
+ pub impl_: AstPtr<ast::Impl>,
+ pub assoc_item: (Name, AssocItem),
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs
index ac171026d..5847c8a9f 100644
--- a/src/tools/rust-analyzer/crates/hir/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/display.rs
@@ -1,6 +1,6 @@
//! HirDisplay implementations for various hir types.
use hir_def::{
- data::adt::VariantData,
+ data::adt::{StructKind, VariantData},
generics::{
TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget,
},
@@ -163,7 +163,40 @@ impl HirDisplay for Struct {
write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
let def_id = GenericDefId::AdtId(AdtId::StructId(self.id));
write_generic_params(def_id, f)?;
+
+ let variant_data = self.variant_data(f.db);
+ if let StructKind::Tuple = variant_data.kind() {
+ f.write_char('(')?;
+ let mut it = variant_data.fields().iter().peekable();
+
+ while let Some((id, _)) = it.next() {
+ let field = Field { parent: (*self).into(), id };
+ field.ty(f.db).hir_fmt(f)?;
+ if it.peek().is_some() {
+ f.write_str(", ")?;
+ }
+ }
+
+ f.write_str(");")?;
+ }
+
write_where_clause(def_id, f)?;
+
+ if let StructKind::Record = variant_data.kind() {
+ let fields = self.fields(f.db);
+ if fields.is_empty() {
+ f.write_str(" {}")?;
+ } else {
+ f.write_str(" {\n")?;
+ for field in self.fields(f.db) {
+ f.write_str(" ")?;
+ field.hir_fmt(f)?;
+ f.write_str(",\n")?;
+ }
+ f.write_str("}")?;
+ }
+ }
+
Ok(())
}
}
@@ -176,6 +209,18 @@ impl HirDisplay for Enum {
let def_id = GenericDefId::AdtId(AdtId::EnumId(self.id));
write_generic_params(def_id, f)?;
write_where_clause(def_id, f)?;
+
+ let variants = self.variants(f.db);
+ if !variants.is_empty() {
+ f.write_str(" {\n")?;
+ for variant in variants {
+ f.write_str(" ")?;
+ variant.hir_fmt(f)?;
+ f.write_str(",\n")?;
+ }
+ f.write_str("}")?;
+ }
+
Ok(())
}
}
@@ -188,6 +233,18 @@ impl HirDisplay for Union {
let def_id = GenericDefId::AdtId(AdtId::UnionId(self.id));
write_generic_params(def_id, f)?;
write_where_clause(def_id, f)?;
+
+ let fields = self.fields(f.db);
+ if !fields.is_empty() {
+ f.write_str(" {\n")?;
+ for field in self.fields(f.db) {
+ f.write_str(" ")?;
+ field.hir_fmt(f)?;
+ f.write_str(",\n")?;
+ }
+ f.write_str("}")?;
+ }
+
Ok(())
}
}
@@ -559,7 +616,7 @@ impl HirDisplay for TypeAlias {
write_where_clause(def_id, f)?;
if !data.bounds.is_empty() {
f.write_str(": ")?;
- f.write_joined(&data.bounds, " + ")?;
+ f.write_joined(data.bounds.iter(), " + ")?;
}
if let Some(ty) = &data.type_ref {
f.write_str(" = ")?;
diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs
index b215ed38f..e0230fa37 100644
--- a/src/tools/rust-analyzer/crates/hir/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs
@@ -17,7 +17,8 @@
//! from the ide with completions, hovers, etc. It is a (soft, internal) boundary:
//! <https://www.tedinski.com/2018/02/06/system-boundaries.html>.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#![recursion_limit = "512"]
mod semantics;
@@ -33,7 +34,7 @@ pub mod symbols;
mod display;
-use std::{iter, ops::ControlFlow};
+use std::{iter, mem::discriminant, ops::ControlFlow};
use arrayvec::ArrayVec;
use base_db::{CrateDisplayName, CrateId, CrateOrigin, Edition, FileId, ProcMacroKind};
@@ -53,20 +54,20 @@ use hir_def::{
resolver::{HasResolver, Resolver},
src::HasSource as _,
AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId,
- EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, HasModule, ImplId,
- InTypeConstId, ItemContainerId, LifetimeParamId, LocalEnumVariantId, LocalFieldId, Lookup,
- MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId,
- TypeOrConstParamId, TypeParamId, UnionId,
+ EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, HasModule,
+ ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalEnumVariantId, LocalFieldId,
+ Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId,
+ TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
};
-use hir_expand::{name::name, MacroCallKind};
+use hir_expand::{attrs::collect_attrs, name::name, MacroCallKind};
use hir_ty::{
- all_super_traits, autoderef,
+ all_super_traits, autoderef, check_orphan_rules,
consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt},
diagnostics::BodyValidationDiagnostic,
known_const_to_ast,
- layout::{Layout as TyLayout, RustcEnumVariantIdx, TagEncoding},
+ layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding},
method_resolution::{self, TyFingerprint},
- mir::{self, interpret_mir},
+ mir::interpret_mir,
primitive::UintTy,
traits::FnTrait,
AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg,
@@ -80,7 +81,7 @@ use once_cell::unsync::Lazy;
use rustc_hash::FxHashSet;
use stdx::{impl_from, never};
use syntax::{
- ast::{self, HasAttrs as _, HasDocComments, HasName},
+ ast::{self, HasAttrs as _, HasName},
AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, T,
};
use triomphe::Arc;
@@ -89,19 +90,11 @@ use crate::db::{DefDatabase, HirDatabase};
pub use crate::{
attrs::{resolve_doc_path_on, HasAttrs},
- diagnostics::{
- AnyDiagnostic, BreakOutsideOfLoop, CaseType, ExpectedFunction, InactiveCode,
- IncoherentImpl, IncorrectCase, InvalidDeriveTarget, MacroDefError, MacroError,
- MacroExpansionParseError, MalformedDerive, MismatchedArgCount,
- MismatchedTupleStructPatArgCount, MissingFields, MissingMatchArms, MissingUnsafe,
- MovedOutOfRef, NeedMut, NoSuchField, PrivateAssocItem, PrivateField,
- ReplaceFilterMapNextWithFindMap, TypeMismatch, TypedHole, UndeclaredLabel,
- UnimplementedBuiltinMacro, UnreachableLabel, UnresolvedExternCrate, UnresolvedField,
- UnresolvedImport, UnresolvedMacroCall, UnresolvedMethodCall, UnresolvedModule,
- UnresolvedProcMacro, UnusedMut,
- },
+ diagnostics::*,
has_source::HasSource,
- semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits},
+ semantics::{
+ DescendPreference, PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits,
+ },
};
// Be careful with these re-exports.
@@ -132,15 +125,18 @@ pub use {
},
hir_expand::{
attrs::{Attr, AttrId},
+ hygiene::{marks_rev, SyntaxContextExt},
name::{known, Name},
- ExpandResult, HirFileId, InFile, MacroFile, Origin,
+ tt, ExpandResult, HirFileId, HirFileIdExt, InFile, InMacroFile, InRealFile, MacroFileId,
+ MacroFileIdExt,
},
hir_ty::{
display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite},
layout::LayoutError,
- mir::MirEvalError,
PointerCast, Safety,
},
+ // FIXME: Properly encapsulate mir
+ hir_ty::{mir, Interner as ChalkTyInterner},
};
// These are negative re-exports: pub using these names is forbidden, they
@@ -148,7 +144,10 @@ pub use {
#[allow(unused)]
use {
hir_def::path::Path,
- hir_expand::{hygiene::Hygiene, name::AsName},
+ hir_expand::{
+ name::AsName,
+ span::{ExpansionSpanMap, RealSpanMap, SpanMap, SpanMapRef},
+ },
};
/// hir::Crate describes a single crate. It's the main interface with which
@@ -452,15 +451,7 @@ impl HasVisibility for ModuleDef {
impl Module {
/// Name of this module.
pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
- let def_map = self.id.def_map(db.upcast());
- let parent = def_map[self.id.local_id].parent?;
- def_map[parent].children.iter().find_map(|(name, module_id)| {
- if *module_id == self.id.local_id {
- Some(name.clone())
- } else {
- None
- }
- })
+ self.id.name(db.upcast())
}
/// Returns the crate this module is part of.
@@ -571,6 +562,7 @@ impl Module {
if def_map[m.id.local_id].origin.is_inline() {
m.diagnostics(db, acc)
}
+ acc.extend(def.diagnostics(db))
}
ModuleDef::Trait(t) => {
for diag in db.trait_data_with_diagnostics(t.id).1.iter() {
@@ -610,29 +602,141 @@ impl Module {
let inherent_impls = db.inherent_impls_in_crate(self.id.krate());
+ let mut impl_assoc_items_scratch = vec![];
for impl_def in self.impl_defs(db) {
let loc = impl_def.id.lookup(db.upcast());
let tree = loc.id.item_tree(db.upcast());
let node = &tree[loc.id.value];
let file_id = loc.id.file_id();
- if file_id.is_builtin_derive(db.upcast()) {
+ if file_id.macro_file().map_or(false, |it| it.is_builtin_derive(db.upcast())) {
// these expansion come from us, diagnosing them is a waste of resources
// FIXME: Once we diagnose the inputs to builtin derives, we should at least extract those diagnostics somehow
continue;
}
+ let ast_id_map = db.ast_id_map(file_id);
for diag in db.impl_data_with_diagnostics(impl_def.id).1.iter() {
emit_def_diagnostic(db, acc, diag);
}
if inherent_impls.invalid_impls().contains(&impl_def.id) {
- let ast_id_map = db.ast_id_map(file_id);
-
acc.push(IncoherentImpl { impl_: ast_id_map.get(node.ast_id()), file_id }.into())
}
- for item in impl_def.items(db) {
- let def: DefWithBody = match item {
+ if !impl_def.check_orphan_rules(db) {
+ acc.push(TraitImplOrphan { impl_: ast_id_map.get(node.ast_id()), file_id }.into())
+ }
+
+ let trait_ = impl_def.trait_(db);
+ let trait_is_unsafe = trait_.map_or(false, |t| t.is_unsafe(db));
+ let impl_is_negative = impl_def.is_negative(db);
+ let impl_is_unsafe = impl_def.is_unsafe(db);
+
+ let drop_maybe_dangle = (|| {
+ // FIXME: This can be simplified a lot by exposing hir-ty's utils.rs::Generics helper
+ let trait_ = trait_?;
+ let drop_trait = db.lang_item(self.krate().into(), LangItem::Drop)?.as_trait()?;
+ if drop_trait != trait_.into() {
+ return None;
+ }
+ let parent = impl_def.id.into();
+ let generic_params = db.generic_params(parent);
+ let lifetime_params = generic_params.lifetimes.iter().map(|(local_id, _)| {
+ GenericParamId::LifetimeParamId(LifetimeParamId { parent, local_id })
+ });
+ let type_params = generic_params
+ .iter()
+ .filter(|(_, it)| it.type_param().is_some())
+ .map(|(local_id, _)| {
+ GenericParamId::TypeParamId(TypeParamId::from_unchecked(
+ TypeOrConstParamId { parent, local_id },
+ ))
+ });
+ let res = type_params
+ .chain(lifetime_params)
+ .any(|p| db.attrs(AttrDefId::GenericParamId(p)).by_key("may_dangle").exists());
+ Some(res)
+ })()
+ .unwrap_or(false);
+
+ match (impl_is_unsafe, trait_is_unsafe, impl_is_negative, drop_maybe_dangle) {
+ // unsafe negative impl
+ (true, _, true, _) |
+ // unsafe impl for safe trait
+ (true, false, _, false) => acc.push(TraitImplIncorrectSafety { impl_: ast_id_map.get(node.ast_id()), file_id, should_be_safe: true }.into()),
+ // safe impl for unsafe trait
+ (false, true, false, _) |
+ // safe impl of dangling drop
+ (false, false, _, true) => acc.push(TraitImplIncorrectSafety { impl_: ast_id_map.get(node.ast_id()), file_id, should_be_safe: false }.into()),
+ _ => (),
+ };
+
+ // Negative impls can't have items, don't emit missing items diagnostic for them
+ if let (false, Some(trait_)) = (impl_is_negative, trait_) {
+ let items = &db.trait_data(trait_.into()).items;
+ let required_items = items.iter().filter(|&(_, assoc)| match *assoc {
+ AssocItemId::FunctionId(it) => !db.function_data(it).has_body(),
+ AssocItemId::ConstId(id) => Const::from(id).value(db).is_none(),
+ AssocItemId::TypeAliasId(it) => db.type_alias_data(it).type_ref.is_none(),
+ });
+ impl_assoc_items_scratch.extend(db.impl_data(impl_def.id).items.iter().filter_map(
+ |&item| {
+ Some((
+ item,
+ match item {
+ AssocItemId::FunctionId(it) => db.function_data(it).name.clone(),
+ AssocItemId::ConstId(it) => {
+ db.const_data(it).name.as_ref()?.clone()
+ }
+ AssocItemId::TypeAliasId(it) => db.type_alias_data(it).name.clone(),
+ },
+ ))
+ },
+ ));
+
+ let redundant = impl_assoc_items_scratch
+ .iter()
+ .filter(|(id, name)| {
+ !items.iter().any(|(impl_name, impl_item)| {
+ discriminant(impl_item) == discriminant(id) && impl_name == name
+ })
+ })
+ .map(|(item, name)| (name.clone(), AssocItem::from(*item)));
+ for (name, assoc_item) in redundant {
+ acc.push(
+ TraitImplRedundantAssocItems {
+ trait_,
+ file_id,
+ impl_: ast_id_map.get(node.ast_id()),
+ assoc_item: (name, assoc_item),
+ }
+ .into(),
+ )
+ }
+
+ let missing: Vec<_> = required_items
+ .filter(|(name, id)| {
+ !impl_assoc_items_scratch.iter().any(|(impl_item, impl_name)| {
+ discriminant(impl_item) == discriminant(id) && impl_name == name
+ })
+ })
+ .map(|(name, item)| (name.clone(), AssocItem::from(*item)))
+ .collect();
+ if !missing.is_empty() {
+ acc.push(
+ TraitImplMissingAssocItems {
+ impl_: ast_id_map.get(node.ast_id()),
+ file_id,
+ missing,
+ }
+ .into(),
+ )
+ }
+ impl_assoc_items_scratch.clear();
+ }
+
+ for &item in &db.impl_data(impl_def.id).items {
+ let def: DefWithBody = match AssocItem::from(item) {
AssocItem::Function(it) => it.into(),
AssocItem::Const(it) => it.into(),
AssocItem::TypeAlias(_) => continue,
@@ -671,8 +775,15 @@ impl Module {
db: &dyn DefDatabase,
item: impl Into<ItemInNs>,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Option<ModPath> {
- hir_def::find_path::find_path(db, item.into().into(), self.into(), prefer_no_std)
+ hir_def::find_path::find_path(
+ db,
+ item.into().into(),
+ self.into(),
+ prefer_no_std,
+ prefer_prelude,
+ )
}
/// Finds a path that can be used to refer to the given item from within
@@ -683,6 +794,7 @@ impl Module {
item: impl Into<ItemInNs>,
prefix_kind: PrefixKind,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Option<ModPath> {
hir_def::find_path::find_path_prefixed(
db,
@@ -690,6 +802,7 @@ impl Module {
self.into(),
prefix_kind,
prefer_no_std,
+ prefer_prelude,
)
}
}
@@ -862,10 +975,9 @@ fn precise_macro_call_location(
// Compute the precise location of the macro name's token in the derive
// list.
let token = (|| {
- let derive_attr = node
- .doc_comments_and_attrs()
+ let derive_attr = collect_attrs(&node)
.nth(derive_attr_index.ast_index())
- .and_then(Either::left)?;
+ .and_then(|x| Either::left(x.1))?;
let token_tree = derive_attr.meta()?.token_tree()?;
let group_by = token_tree
.syntax()
@@ -890,10 +1002,9 @@ fn precise_macro_call_location(
}
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
let node = ast_id.to_node(db.upcast());
- let attr = node
- .doc_comments_and_attrs()
+ let attr = collect_attrs(&node)
.nth(invoc_attr_index.ast_index())
- .and_then(Either::left)
+ .and_then(|x| Either::left(x.1))
.unwrap_or_else(|| {
panic!("cannot find attribute #{}", invoc_attr_index.ast_index())
});
@@ -1453,9 +1564,7 @@ impl DefWithBody {
let (body, source_map) = db.body_with_source_map(self.into());
for (_, def_map) in body.blocks(db.upcast()) {
- for diag in def_map.diagnostics() {
- emit_def_diagnostic(db, acc, diag);
- }
+ Module { id: def_map.module_id(DefMap::ROOT) }.diagnostics(db, acc);
}
for diag in source_map.diagnostics() {
@@ -1509,10 +1618,10 @@ impl DefWithBody {
&hir_ty::InferenceDiagnostic::NoSuchField { field: expr, private } => {
let expr_or_pat = match expr {
ExprOrPatId::ExprId(expr) => {
- source_map.field_syntax(expr).map(Either::Left)
+ source_map.field_syntax(expr).map(AstPtr::wrap_left)
}
ExprOrPatId::PatId(pat) => {
- source_map.pat_field_syntax(pat).map(Either::Right)
+ source_map.pat_field_syntax(pat).map(AstPtr::wrap_right)
}
};
acc.push(NoSuchField { field: expr_or_pat, private }.into())
@@ -1530,8 +1639,8 @@ impl DefWithBody {
}
&hir_ty::InferenceDiagnostic::PrivateAssocItem { id, item } => {
let expr_or_pat = match id {
- ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(Either::Left),
- ExprOrPatId::PatId(pat) => pat_syntax(pat).map(Either::Right),
+ ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
+ ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right),
};
let item = item.into();
acc.push(PrivateAssocItem { expr_or_pat, item }.into())
@@ -1609,12 +1718,17 @@ impl DefWithBody {
found,
} => {
let expr_or_pat = match pat {
- ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(Either::Left),
- ExprOrPatId::PatId(pat) => source_map
- .pat_syntax(pat)
- .expect("unexpected synthetic")
- .map(|it| it.unwrap_left())
- .map(Either::Right),
+ ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
+ ExprOrPatId::PatId(pat) => {
+ let InFile { file_id, value } =
+ source_map.pat_syntax(pat).expect("unexpected synthetic");
+
+ // cast from Either<Pat, SelfParam> -> Either<_, Pat>
+ let Some(ptr) = AstPtr::try_from_raw(value.syntax_node_ptr()) else {
+ continue;
+ };
+ InFile { file_id, value: ptr }
+ }
};
acc.push(
MismatchedTupleStructPatArgCount { expr_or_pat, expected, found }.into(),
@@ -1628,11 +1742,15 @@ impl DefWithBody {
ExprOrPatId::PatId(pat) => source_map.pat_syntax(pat).map(Either::Right),
};
let expr_or_pat = match expr_or_pat {
- Ok(Either::Left(expr)) => Either::Left(expr),
- Ok(Either::Right(InFile { file_id, value: Either::Left(pat) })) => {
- Either::Right(InFile { file_id, value: pat })
+ Ok(Either::Left(expr)) => expr.map(AstPtr::wrap_left),
+ Ok(Either::Right(InFile { file_id, value: pat })) => {
+ // cast from Either<Pat, SelfParam> -> Either<_, Pat>
+ let Some(ptr) = AstPtr::try_from_raw(pat.syntax_node_ptr()) else {
+ continue;
+ };
+ InFile { file_id, value: ptr }
}
- Ok(Either::Right(_)) | Err(SyntheticSyntax) => continue,
+ Err(SyntheticSyntax) => continue,
};
acc.push(
@@ -1667,10 +1785,7 @@ impl DefWithBody {
Err(_) => continue,
},
mir::MirSpan::PatId(p) => match source_map.pat_syntax(p) {
- Ok(s) => s.map(|it| match it {
- Either::Left(e) => e.into(),
- Either::Right(e) => e.into(),
- }),
+ Ok(s) => s.map(|it| it.into()),
Err(_) => continue,
},
mir::MirSpan::Unknown => continue,
@@ -1697,9 +1812,20 @@ impl DefWithBody {
// Skip synthetic bindings
continue;
}
- let need_mut = &mol[local];
+ let mut need_mut = &mol[local];
+ if body[binding_id].name.as_str() == Some("self")
+ && need_mut == &mir::MutabilityReason::Unused
+ {
+ need_mut = &mir::MutabilityReason::Not;
+ }
let local = Local { parent: self.into(), binding_id };
match (need_mut, local.is_mut(db)) {
+ (mir::MutabilityReason::Unused, _) => {
+ let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with("_"));
+ if !should_ignore {
+ acc.push(UnusedVariable { local }.into())
+ }
+ }
(mir::MutabilityReason::Mut { .. }, true)
| (mir::MutabilityReason::Not, false) => (),
(mir::MutabilityReason::Mut { spans }, false) => {
@@ -1710,10 +1836,7 @@ impl DefWithBody {
Err(_) => continue,
},
mir::MirSpan::PatId(p) => match source_map.pat_syntax(*p) {
- Ok(s) => s.map(|it| match it {
- Either::Left(e) => e.into(),
- Either::Right(e) => e.into(),
- }),
+ Ok(s) => s.map(|it| it.into()),
Err(_) => continue,
},
mir::MirSpan::Unknown => continue,
@@ -1752,18 +1875,18 @@ impl DefWithBody {
Ok(source_ptr) => {
let root = source_ptr.file_syntax(db.upcast());
if let ast::Expr::RecordExpr(record_expr) =
- &source_ptr.value.to_node(&root)
+ source_ptr.value.to_node(&root)
{
if record_expr.record_expr_field_list().is_some() {
+ let field_list_parent_path =
+ record_expr.path().map(|path| AstPtr::new(&path));
acc.push(
MissingFields {
file: source_ptr.file_id,
- field_list_parent: Either::Left(AstPtr::new(
+ field_list_parent: AstPtr::new(&Either::Left(
record_expr,
)),
- field_list_parent_path: record_expr
- .path()
- .map(|path| AstPtr::new(&path)),
+ field_list_parent_path,
missed_fields,
}
.into(),
@@ -1775,24 +1898,24 @@ impl DefWithBody {
},
Either::Right(record_pat) => match source_map.pat_syntax(record_pat) {
Ok(source_ptr) => {
- if let Some(expr) = source_ptr.value.as_ref().left() {
+ if let Some(ptr) = source_ptr.value.clone().cast::<ast::RecordPat>()
+ {
let root = source_ptr.file_syntax(db.upcast());
- if let ast::Pat::RecordPat(record_pat) = expr.to_node(&root) {
- if record_pat.record_pat_field_list().is_some() {
- acc.push(
- MissingFields {
- file: source_ptr.file_id,
- field_list_parent: Either::Right(AstPtr::new(
- &record_pat,
- )),
- field_list_parent_path: record_pat
- .path()
- .map(|path| AstPtr::new(&path)),
- missed_fields,
- }
- .into(),
- )
- }
+ let record_pat = ptr.to_node(&root);
+ if record_pat.record_pat_field_list().is_some() {
+ let field_list_parent_path =
+ record_pat.path().map(|path| AstPtr::new(&path));
+ acc.push(
+ MissingFields {
+ file: source_ptr.file_id,
+ field_list_parent: AstPtr::new(&Either::Right(
+ record_pat,
+ )),
+ field_list_parent_path,
+ missed_fields,
+ }
+ .into(),
+ )
}
}
}
@@ -1818,17 +1941,20 @@ impl DefWithBody {
if let ast::Expr::MatchExpr(match_expr) =
&source_ptr.value.to_node(&root)
{
- if let Some(scrut_expr) = match_expr.expr() {
- acc.push(
- MissingMatchArms {
- scrutinee_expr: InFile::new(
- source_ptr.file_id,
- AstPtr::new(&scrut_expr),
- ),
- uncovered_patterns,
- }
- .into(),
- );
+ match match_expr.expr() {
+ Some(scrut_expr) if match_expr.match_arm_list().is_some() => {
+ acc.push(
+ MissingMatchArms {
+ scrutinee_expr: InFile::new(
+ source_ptr.file_id,
+ AstPtr::new(&scrut_expr),
+ ),
+ uncovered_patterns,
+ }
+ .into(),
+ );
+ }
+ _ => {}
}
}
}
@@ -1960,6 +2086,17 @@ impl Function {
db.function_data(self.id).attrs.is_test()
}
+ /// is this a `fn main` or a function with an `export_name` of `main`?
+ pub fn is_main(self, db: &dyn HirDatabase) -> bool {
+ if !self.module(db).is_crate_root() {
+ return false;
+ }
+ let data = db.function_data(self.id);
+
+ data.name.to_smol_str() == "main"
+ || data.attrs.export_name().map(core::ops::Deref::deref) == Some("main")
+ }
+
/// Does this function have the ignore attribute?
pub fn is_ignore(self, db: &dyn HirDatabase) -> bool {
db.function_data(self.id).attrs.is_ignore()
@@ -2926,10 +3063,10 @@ impl Local {
.map(|&definition| {
let src = source_map.pat_syntax(definition).unwrap(); // Hmm...
let root = src.file_syntax(db.upcast());
- src.map(|ast| match ast {
- // Suspicious unwrap
- Either::Left(it) => Either::Left(it.cast().unwrap().to_node(&root)),
- Either::Right(it) => Either::Right(it.to_node(&root)),
+ src.map(|ast| match ast.to_node(&root) {
+ Either::Left(ast::Pat::IdentPat(it)) => Either::Left(it),
+ Either::Left(_) => unreachable!("local with non ident-pattern"),
+ Either::Right(it) => Either::Right(it),
})
})
.map(move |source| LocalSource { local: self, source })
@@ -3371,13 +3508,46 @@ impl Impl {
db.impl_data(self.id).is_negative
}
+ pub fn is_unsafe(self, db: &dyn HirDatabase) -> bool {
+ db.impl_data(self.id).is_unsafe
+ }
+
pub fn module(self, db: &dyn HirDatabase) -> Module {
self.id.lookup(db.upcast()).container.into()
}
- pub fn as_builtin_derive(self, db: &dyn HirDatabase) -> Option<InFile<ast::Attr>> {
+ pub fn as_builtin_derive_path(self, db: &dyn HirDatabase) -> Option<InMacroFile<ast::Path>> {
let src = self.source(db)?;
- src.file_id.as_builtin_derive_attr_node(db.upcast())
+
+ let macro_file = src.file_id.macro_file()?;
+ let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ let (derive_attr, derive_index) = match loc.kind {
+ MacroCallKind::Derive { ast_id, derive_attr_index, derive_index } => {
+ let module_id = self.id.lookup(db.upcast()).container;
+ (
+ db.crate_def_map(module_id.krate())[module_id.local_id]
+ .scope
+ .derive_macro_invoc(ast_id, derive_attr_index)?,
+ derive_index,
+ )
+ }
+ _ => return None,
+ };
+ let file_id = MacroFileId { macro_call_id: derive_attr };
+ let path = db
+ .parse_macro_expansion(file_id)
+ .value
+ .0
+ .syntax_node()
+ .children()
+ .nth(derive_index as usize)
+ .and_then(<ast::Attr as AstNode>::cast)
+ .and_then(|it| it.path())?;
+ Some(InMacroFile { file_id, value: path })
+ }
+
+ pub fn check_orphan_rules(self, db: &dyn HirDatabase) -> bool {
+ check_orphan_rules(db, self.id)
}
}
@@ -3393,10 +3563,9 @@ impl TraitRef {
resolver: &Resolver,
trait_ref: hir_ty::TraitRef,
) -> TraitRef {
- let env = resolver.generic_def().map_or_else(
- || Arc::new(TraitEnvironment::empty(resolver.krate())),
- |d| db.trait_environment(d),
- );
+ let env = resolver
+ .generic_def()
+ .map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
TraitRef { env, trait_ref }
}
@@ -3536,15 +3705,14 @@ impl Type {
resolver: &Resolver,
ty: Ty,
) -> Type {
- let environment = resolver.generic_def().map_or_else(
- || Arc::new(TraitEnvironment::empty(resolver.krate())),
- |d| db.trait_environment(d),
- );
+ let environment = resolver
+ .generic_def()
+ .map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
Type { env: environment, ty }
}
pub(crate) fn new_for_crate(krate: CrateId, ty: Ty) -> Type {
- Type { env: Arc::new(TraitEnvironment::empty(krate)), ty }
+ Type { env: TraitEnvironment::empty(krate), ty }
}
pub fn reference(inner: &Type, m: Mutability) -> Type {
@@ -3560,10 +3728,9 @@ impl Type {
fn new(db: &dyn HirDatabase, lexical_env: impl HasResolver, ty: Ty) -> Type {
let resolver = lexical_env.resolver(db.upcast());
- let environment = resolver.generic_def().map_or_else(
- || Arc::new(TraitEnvironment::empty(resolver.krate())),
- |d| db.trait_environment(d),
- );
+ let environment = resolver
+ .generic_def()
+ .map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
Type { env: environment, ty }
}
@@ -4133,10 +4300,10 @@ impl Type {
let canonical = hir_ty::replace_errors_with_variables(&self.ty);
let krate = scope.krate();
- let environment = scope.resolver().generic_def().map_or_else(
- || Arc::new(TraitEnvironment::empty(krate.id)),
- |d| db.trait_environment(d),
- );
+ let environment = scope
+ .resolver()
+ .generic_def()
+ .map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
method_resolution::iterate_method_candidates_dyn(
&canonical,
@@ -4190,10 +4357,10 @@ impl Type {
let canonical = hir_ty::replace_errors_with_variables(&self.ty);
let krate = scope.krate();
- let environment = scope.resolver().generic_def().map_or_else(
- || Arc::new(TraitEnvironment::empty(krate.id)),
- |d| db.trait_environment(d),
- );
+ let environment = scope
+ .resolver()
+ .generic_def()
+ .map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
method_resolution::iterate_path_candidates(
&canonical,
@@ -4515,15 +4682,31 @@ impl Layout {
Some(self.0.largest_niche?.available(&*self.1))
}
- pub fn field_offset(&self, idx: usize) -> Option<u64> {
+ pub fn field_offset(&self, field: Field) -> Option<u64> {
match self.0.fields {
layout::FieldsShape::Primitive => None,
layout::FieldsShape::Union(_) => Some(0),
layout::FieldsShape::Array { stride, count } => {
- let i = u64::try_from(idx).ok()?;
+ let i = u64::try_from(field.index()).ok()?;
(i < count).then_some((stride * i).bytes())
}
- layout::FieldsShape::Arbitrary { ref offsets, .. } => Some(offsets.get(idx)?.bytes()),
+ layout::FieldsShape::Arbitrary { ref offsets, .. } => {
+ Some(offsets.get(RustcFieldIdx(field.id))?.bytes())
+ }
+ }
+ }
+
+ pub fn tuple_field_offset(&self, field: usize) -> Option<u64> {
+ match self.0.fields {
+ layout::FieldsShape::Primitive => None,
+ layout::FieldsShape::Union(_) => Some(0),
+ layout::FieldsShape::Array { stride, count } => {
+ let i = u64::try_from(field).ok()?;
+ (i < count).then_some((stride * i).bytes())
+ }
+ layout::FieldsShape::Arbitrary { ref offsets, .. } => {
+ Some(offsets.get(RustcFieldIdx::new(field))?.bytes())
+ }
}
}
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
index a42e0978b..a03ff2207 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
@@ -2,7 +2,11 @@
mod source_to_def;
-use std::{cell::RefCell, fmt, iter, mem, ops};
+use std::{
+ cell::RefCell,
+ fmt, iter, mem,
+ ops::{self, ControlFlow, Not},
+};
use base_db::{FileId, FileRange};
use either::Either;
@@ -13,16 +17,21 @@ use hir_def::{
nameres::MacroSubNs,
resolver::{self, HasResolver, Resolver, TypeNs},
type_ref::Mutability,
- AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId,
+ AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId,
+};
+use hir_expand::{
+ attrs::collect_attrs, db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo,
+ InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
};
-use hir_expand::{db::ExpandDatabase, name::AsName, ExpansionInfo, MacroCallId};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec};
+use stdx::TupleExt;
use syntax::{
algo::skip_trivia_token,
- ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody},
- match_ast, AstNode, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
+ ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody, IsString as _},
+ match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken,
+ TextRange, TextSize,
};
use crate::{
@@ -35,7 +44,13 @@ use crate::{
TypeAlias, TypeParam, VariantDef,
};
-#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum DescendPreference {
+ SameText,
+ SameKind,
+ None,
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum PathResolution {
/// An item
Def(ModuleDef),
@@ -114,11 +129,12 @@ pub struct Semantics<'db, DB> {
pub struct SemanticsImpl<'db> {
pub db: &'db dyn HirDatabase,
s2d_cache: RefCell<SourceToDefCache>,
- expansion_info_cache: RefCell<FxHashMap<HirFileId, Option<ExpansionInfo>>>,
- // Rootnode to HirFileId cache
+ /// Rootnode to HirFileId cache
cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
- // MacroCall to its expansion's HirFileId cache
- macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, HirFileId>>,
+ // These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens
+ expansion_info_cache: RefCell<FxHashMap<MacroFileId, ExpansionInfo>>,
+ /// MacroCall to its expansion's MacroFileId cache
+ macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
}
impl<DB> fmt::Debug for Semantics<'_, DB> {
@@ -182,20 +198,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
}
- pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
- self.imp.resolve_method_call(call).map(Function::from)
- }
-
- /// Attempts to resolve this call expression as a method call falling back to resolving it as a field.
- pub fn resolve_method_call_field_fallback(
- &self,
- call: &ast::MethodCallExpr,
- ) -> Option<Either<Function, Field>> {
- self.imp
- .resolve_method_call_fallback(call)
- .map(|it| it.map_left(Function::from).map_right(Field::from))
- }
-
pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<Function> {
self.imp.resolve_await_to_poll(await_expr).map(Function::from)
}
@@ -255,7 +257,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
let sa = self.analyze_no_infer(macro_call.syntax())?;
let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
- let node = self.parse_or_expand(file_id);
+ let node = self.parse_or_expand(file_id.into());
Some(node)
}
@@ -388,11 +390,72 @@ impl<'db> SemanticsImpl<'db> {
)
}
+ pub fn as_format_args_parts(
+ &self,
+ string: &ast::String,
+ ) -> Option<Vec<(TextRange, Option<PathResolution>)>> {
+ if let Some(quote) = string.open_quote_text_range() {
+ return self
+ .descend_into_macros(DescendPreference::SameText, string.syntax().clone())
+ .into_iter()
+ .find_map(|token| {
+ let string = ast::String::cast(token)?;
+ let literal =
+ string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
+ let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?;
+ let source_analyzer = self.analyze_no_infer(format_args.syntax())?;
+ let format_args = self.wrap_node_infile(format_args);
+ let res = source_analyzer
+ .as_format_args_parts(self.db, format_args.as_ref())?
+ .map(|(range, res)| (range + quote.end(), res))
+ .collect();
+ Some(res)
+ });
+ }
+ None
+ }
+
+ pub fn check_for_format_args_template(
+ &self,
+ original_token: SyntaxToken,
+ offset: TextSize,
+ ) -> Option<(TextRange, Option<PathResolution>)> {
+ if let Some(original_string) = ast::String::cast(original_token.clone()) {
+ if let Some(quote) = original_string.open_quote_text_range() {
+ return self
+ .descend_into_macros(DescendPreference::SameText, original_token.clone())
+ .into_iter()
+ .find_map(|token| {
+ self.resolve_offset_in_format_args(
+ ast::String::cast(token)?,
+ offset - quote.end(),
+ )
+ })
+ .map(|(range, res)| (range + quote.end(), res));
+ }
+ }
+ None
+ }
+
+ fn resolve_offset_in_format_args(
+ &self,
+ string: ast::String,
+ offset: TextSize,
+ ) -> Option<(TextRange, Option<PathResolution>)> {
+ debug_assert!(offset <= string.syntax().text_range().len());
+ let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
+ let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?;
+ let source_analyzer = &self.analyze_no_infer(format_args.syntax())?;
+ let format_args = self.wrap_node_infile(format_args);
+ source_analyzer.resolve_offset_in_format_args(self.db, format_args.as_ref(), offset)
+ }
+
/// Maps a node down by mapping its first and last token down.
pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
// This might not be the correct way to do this, but it works for now
let mut res = smallvec![];
let tokens = (|| {
+ // FIXME: the trivia skipping should not be necessary
let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?;
let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?;
Some((first, last))
@@ -403,24 +466,28 @@ impl<'db> SemanticsImpl<'db> {
};
if first == last {
- self.descend_into_macros_impl(first, 0.into(), &mut |InFile { value, .. }| {
- if let Some(node) = value.parent_ancestors().find_map(N::cast) {
+ // node is just the token, so descend the token
+ self.descend_into_macros_impl(first, &mut |InFile { value, .. }| {
+ if let Some(node) = value
+ .parent_ancestors()
+ .take_while(|it| it.text_range() == value.text_range())
+ .find_map(N::cast)
+ {
res.push(node)
}
- false
+ ControlFlow::Continue(())
});
} else {
// Descend first and last token, then zip them to look for the node they belong to
let mut scratch: SmallVec<[_; 1]> = smallvec![];
- self.descend_into_macros_impl(first, 0.into(), &mut |token| {
+ self.descend_into_macros_impl(first, &mut |token| {
scratch.push(token);
- false
+ ControlFlow::Continue(())
});
let mut scratch = scratch.into_iter();
self.descend_into_macros_impl(
last,
- 0.into(),
&mut |InFile { value: last, file_id: last_fid }| {
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
if first_fid == last_fid {
@@ -437,7 +504,7 @@ impl<'db> SemanticsImpl<'db> {
}
}
}
- false
+ ControlFlow::Continue(())
},
);
}
@@ -449,32 +516,42 @@ impl<'db> SemanticsImpl<'db> {
/// be considered for the mapping in case of inline format args.
pub fn descend_into_macros(
&self,
+ mode: DescendPreference,
token: SyntaxToken,
- offset: TextSize,
- ) -> SmallVec<[SyntaxToken; 1]> {
- let mut res = smallvec![];
- self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| {
- res.push(value);
- false
- });
- res
- }
-
- /// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token.
- ///
- /// Returns the original non descended token if none of the mapped counterparts have the same text.
- pub fn descend_into_macros_with_same_text(
- &self,
- token: SyntaxToken,
- offset: TextSize,
) -> SmallVec<[SyntaxToken; 1]> {
- let text = token.text();
+ enum Dp<'t> {
+ SameText(&'t str),
+ SameKind(SyntaxKind),
+ None,
+ }
+ let fetch_kind = |token: &SyntaxToken| match token.parent() {
+ Some(node) => match node.kind() {
+ kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind,
+ _ => token.kind(),
+ },
+ None => token.kind(),
+ };
+ let mode = match mode {
+ DescendPreference::SameText => Dp::SameText(token.text()),
+ DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)),
+ DescendPreference::None => Dp::None,
+ };
let mut res = smallvec![];
- self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| {
- if value.text() == text {
+ self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
+ let is_a_match = match mode {
+ Dp::SameText(text) => value.text() == text,
+ Dp::SameKind(preferred_kind) => {
+ let kind = fetch_kind(&value);
+ kind == preferred_kind
+ // special case for derive macros
+ || (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF)
+ }
+ Dp::None => true,
+ };
+ if is_a_match {
res.push(value);
}
- false
+ ControlFlow::Continue(())
});
if res.is_empty() {
res.push(token);
@@ -482,44 +559,46 @@ impl<'db> SemanticsImpl<'db> {
res
}
- pub fn descend_into_macros_with_kind_preference(
+ pub fn descend_into_macros_single(
&self,
+ mode: DescendPreference,
token: SyntaxToken,
- offset: TextSize,
) -> SyntaxToken {
+ enum Dp<'t> {
+ SameText(&'t str),
+ SameKind(SyntaxKind),
+ None,
+ }
let fetch_kind = |token: &SyntaxToken| match token.parent() {
Some(node) => match node.kind() {
- kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => {
- node.parent().map_or(kind, |it| it.kind())
- }
+ kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind,
_ => token.kind(),
},
None => token.kind(),
};
- let preferred_kind = fetch_kind(&token);
- let mut res = None;
- self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| {
- if fetch_kind(&value) == preferred_kind {
- res = Some(value);
- true
- } else {
- if let None = res {
- res = Some(value)
- }
- false
- }
- });
- res.unwrap_or(token)
- }
-
- /// Descend the token into its macro call if it is part of one, returning the token in the
- /// expansion that it is associated with. If `offset` points into the token's range, it will
- /// be considered for the mapping in case of inline format args.
- pub fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken {
+ let mode = match mode {
+ DescendPreference::SameText => Dp::SameText(token.text()),
+ DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)),
+ DescendPreference::None => Dp::None,
+ };
let mut res = token.clone();
- self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| {
+ self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
+ let is_a_match = match mode {
+ Dp::SameText(text) => value.text() == text,
+ Dp::SameKind(preferred_kind) => {
+ let kind = fetch_kind(&value);
+ kind == preferred_kind
+ // special case for derive macros
+ || (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF)
+ }
+ Dp::None => true,
+ };
res = value;
- true
+ if is_a_match {
+ ControlFlow::Break(())
+ } else {
+ ControlFlow::Continue(())
+ }
});
res
}
@@ -527,177 +606,204 @@ impl<'db> SemanticsImpl<'db> {
fn descend_into_macros_impl(
&self,
token: SyntaxToken,
- // FIXME: We might want this to be Option<TextSize> to be able to opt out of subrange
- // mapping, specifically for node downmapping
- offset: TextSize,
- f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool,
+ f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
) {
let _p = profile::span("descend_into_macros");
- let relative_token_offset = token.text_range().start().checked_sub(offset);
- let parent = match token.parent() {
+ let sa = match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
Some(it) => it,
None => return,
};
- let sa = match self.analyze_no_infer(&parent) {
- Some(it) => it,
- None => return,
+
+ let span = match sa.file_id.file_id() {
+ Some(file_id) => self.db.real_span_map(file_id).span_for_range(token.text_range()),
+ None => {
+ stdx::never!();
+ return;
+ }
};
- let def_map = sa.resolver.def_map();
- let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
let mut cache = self.expansion_info_cache.borrow_mut();
let mut mcache = self.macro_call_cache.borrow_mut();
+ let def_map = sa.resolver.def_map();
- let mut process_expansion_for_token =
- |stack: &mut SmallVec<_>, macro_file, item, token: InFile<&_>| {
- let expansion_info = cache
- .entry(macro_file)
- .or_insert_with(|| macro_file.expansion_info(self.db.upcast()))
- .as_ref()?;
+ let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
+ let expansion_info = cache
+ .entry(macro_file)
+ .or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
- {
- let InFile { file_id, value } = expansion_info.expanded();
- self.cache(value, file_id);
- }
+ {
+ let InMacroFile { file_id, value } = expansion_info.expanded();
+ self.cache(value, file_id.into());
+ }
- let mapped_tokens = expansion_info.map_token_down(
- self.db.upcast(),
- item,
- token,
- relative_token_offset,
- )?;
- let len = stack.len();
-
- // requeue the tokens we got from mapping our current token down
- stack.extend(mapped_tokens);
- // if the length changed we have found a mapping for the token
- (stack.len() != len).then_some(())
- };
+ let InMacroFile { file_id, value: mapped_tokens } =
+ expansion_info.map_range_down(span)?;
+ let mapped_tokens: SmallVec<[_; 2]> = mapped_tokens.collect();
- // Remap the next token in the queue into a macro call its in, if it is not being remapped
- // either due to not being in a macro-call or because its unused push it into the result vec,
- // otherwise push the remapped tokens back into the queue as they can potentially be remapped again.
- while let Some(token) = stack.pop() {
- self.db.unwind_if_cancelled();
- let was_not_remapped = (|| {
- // First expand into attribute invocations
- let containing_attribute_macro_call = self.with_ctx(|ctx| {
- token.value.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
- if item.attrs().next().is_none() {
- // Don't force populate the dyn cache for items that don't have an attribute anyways
- return None;
- }
- Some((ctx.item_to_macro_call(token.with_value(item.clone()))?, item))
- })
- });
- if let Some((call_id, item)) = containing_attribute_macro_call {
- let file_id = call_id.as_file();
- return process_expansion_for_token(
- &mut stack,
- file_id,
- Some(item),
- token.as_ref(),
- );
- }
+ // if the length changed we have found a mapping for the token
+ let res = mapped_tokens.is_empty().not().then_some(());
+ // requeue the tokens we got from mapping our current token down
+ stack.push((HirFileId::from(file_id), mapped_tokens));
+ res
+ };
- // Then check for token trees, that means we are either in a function-like macro or
- // secondary attribute inputs
- let tt = token.value.parent_ancestors().map_while(ast::TokenTree::cast).last()?;
- let parent = tt.syntax().parent()?;
+ let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(sa.file_id, smallvec![token])];
+
+ while let Some((file_id, mut tokens)) = stack.pop() {
+ while let Some(token) = tokens.pop() {
+ let was_not_remapped = (|| {
+ // First expand into attribute invocations
+ let containing_attribute_macro_call = self.with_ctx(|ctx| {
+ token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
+ if item.attrs().next().is_none() {
+ // Don't force populate the dyn cache for items that don't have an attribute anyways
+ return None;
+ }
+ Some((
+ ctx.item_to_macro_call(InFile::new(file_id, item.clone()))?,
+ item,
+ ))
+ })
+ });
+ if let Some((call_id, item)) = containing_attribute_macro_call {
+ let file_id = call_id.as_macro_file();
+ let attr_id = match self.db.lookup_intern_macro_call(call_id).kind {
+ hir_expand::MacroCallKind::Attr { invoc_attr_index, .. } => {
+ invoc_attr_index.ast_index()
+ }
+ _ => 0,
+ };
+ // FIXME: here, the attribute's text range is used to strip away all
+ // entries from the start of the attribute "list" up the the invoking
+ // attribute. But in
+ // ```
+ // mod foo {
+ // #![inner]
+ // }
+ // ```
+ // we don't wanna strip away stuff in the `mod foo {` range, that is
+ // here if the id corresponds to an inner attribute we got strip all
+ // text ranges of the outer ones, and then all of the inner ones up
+ // to the invoking attribute so that the inbetween is ignored.
+ let text_range = item.syntax().text_range();
+ let start = collect_attrs(&item)
+ .nth(attr_id)
+ .map(|attr| match attr.1 {
+ Either::Left(it) => it.syntax().text_range().start(),
+ Either::Right(it) => it.syntax().text_range().start(),
+ })
+ .unwrap_or_else(|| text_range.start());
+ let text_range = TextRange::new(start, text_range.end());
+ // remove any other token in this macro input, all their mappings are the
+ // same as this one
+ tokens.retain(|t| !text_range.contains_range(t.text_range()));
+ return process_expansion_for_token(&mut stack, file_id);
+ }
- if tt.left_delimiter_token().map_or(false, |it| it == token.value) {
- return None;
- }
- if tt.right_delimiter_token().map_or(false, |it| it == token.value) {
- return None;
- }
+ // Then check for token trees, that means we are either in a function-like macro or
+ // secondary attribute inputs
+ let tt = token.parent_ancestors().map_while(ast::TokenTree::cast).last()?;
+ let parent = tt.syntax().parent()?;
- if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) {
- let mcall = token.with_value(macro_call);
- let file_id = match mcache.get(&mcall) {
- Some(&it) => it,
- None => {
- let it = sa.expand(self.db, mcall.as_ref())?;
- mcache.insert(mcall, it);
- it
- }
- };
- process_expansion_for_token(&mut stack, file_id, None, token.as_ref())
- } else if let Some(meta) = ast::Meta::cast(parent) {
- // attribute we failed expansion for earlier, this might be a derive invocation
- // or derive helper attribute
- let attr = meta.parent_attr()?;
-
- let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast) {
- // this might be a derive, or a derive helper on an ADT
- let derive_call = self.with_ctx(|ctx| {
- // so try downmapping the token into the pseudo derive expansion
- // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
- ctx.attr_to_derive_macro_call(
- token.with_value(&adt),
- token.with_value(attr.clone()),
- )
- .map(|(_, call_id, _)| call_id)
- });
-
- match derive_call {
- Some(call_id) => {
- // resolved to a derive
- let file_id = call_id.as_file();
- return process_expansion_for_token(
- &mut stack,
- file_id,
- Some(adt.into()),
- token.as_ref(),
- );
+ if tt.left_delimiter_token().map_or(false, |it| it == token) {
+ return None;
+ }
+ if tt.right_delimiter_token().map_or(false, |it| it == token) {
+ return None;
+ }
+
+ if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) {
+ let mcall: hir_expand::files::InFileWrapper<HirFileId, ast::MacroCall> =
+ InFile::new(file_id, macro_call);
+ let file_id = match mcache.get(&mcall) {
+ Some(&it) => it,
+ None => {
+ let it = sa.expand(self.db, mcall.as_ref())?;
+ mcache.insert(mcall, it);
+ it
}
- None => Some(adt),
- }
- } else {
- // Otherwise this could be a derive helper on a variant or field
- if let Some(field) = attr.syntax().parent().and_then(ast::RecordField::cast)
+ };
+ let text_range = tt.syntax().text_range();
+ // remove any other token in this macro input, all their mappings are the
+ // same as this one
+ tokens.retain(|t| !text_range.contains_range(t.text_range()));
+ process_expansion_for_token(&mut stack, file_id)
+ } else if let Some(meta) = ast::Meta::cast(parent) {
+ // attribute we failed expansion for earlier, this might be a derive invocation
+ // or derive helper attribute
+ let attr = meta.parent_attr()?;
+
+ let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast)
{
- field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
- } else if let Some(field) =
- attr.syntax().parent().and_then(ast::TupleField::cast)
- {
- field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
- } else if let Some(variant) =
- attr.syntax().parent().and_then(ast::Variant::cast)
- {
- variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
+ // this might be a derive, or a derive helper on an ADT
+ let derive_call = self.with_ctx(|ctx| {
+ // so try downmapping the token into the pseudo derive expansion
+ // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
+ ctx.attr_to_derive_macro_call(
+ InFile::new(file_id, &adt),
+ InFile::new(file_id, attr.clone()),
+ )
+ .map(|(_, call_id, _)| call_id)
+ });
+
+ match derive_call {
+ Some(call_id) => {
+ // resolved to a derive
+ let file_id = call_id.as_macro_file();
+ let text_range = attr.syntax().text_range();
+ // remove any other token in this macro input, all their mappings are the
+ // same as this one
+ tokens.retain(|t| !text_range.contains_range(t.text_range()));
+ return process_expansion_for_token(&mut stack, file_id);
+ }
+ None => Some(adt),
+ }
} else {
- None
+ // Otherwise this could be a derive helper on a variant or field
+ if let Some(field) =
+ attr.syntax().parent().and_then(ast::RecordField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(field) =
+ attr.syntax().parent().and_then(ast::TupleField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(variant) =
+ attr.syntax().parent().and_then(ast::Variant::cast)
+ {
+ variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
+ } else {
+ None
+ }
+ }?;
+ if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(file_id, &adt))) {
+ return None;
}
- }?;
- if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(token.file_id, &adt))) {
- return None;
- }
- // Not an attribute, nor a derive, so it's either a builtin or a derive helper
- // Try to resolve to a derive helper and downmap
- let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
- let id = self.db.ast_id_map(token.file_id).ast_id(&adt);
- let helpers =
- def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?;
- let item = Some(adt.into());
- let mut res = None;
- for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) {
- res = res.or(process_expansion_for_token(
- &mut stack,
- derive.as_file(),
- item.clone(),
- token.as_ref(),
- ));
+ // Not an attribute, nor a derive, so it's either a builtin or a derive helper
+ // Try to resolve to a derive helper and downmap
+ let attr_name =
+ attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
+ let id = self.db.ast_id_map(file_id).ast_id(&adt);
+ let helpers = def_map.derive_helpers_in_scope(InFile::new(file_id, id))?;
+ let mut res = None;
+ for (.., derive) in
+ helpers.iter().filter(|(helper, ..)| *helper == attr_name)
+ {
+ res = res.or(process_expansion_for_token(
+ &mut stack,
+ derive.as_macro_file(),
+ ));
+ }
+ res
+ } else {
+ None
}
- res
- } else {
- None
- }
- })()
- .is_none();
+ })()
+ .is_none();
- if was_not_remapped && f(token) {
- break;
+ if was_not_remapped && f(InFile::new(file_id, token)).is_break() {
+ break;
+ }
}
}
}
@@ -712,7 +818,7 @@ impl<'db> SemanticsImpl<'db> {
offset: TextSize,
) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
node.token_at_offset(offset)
- .map(move |token| self.descend_into_macros(token, offset))
+ .map(move |token| self.descend_into_macros(DescendPreference::None, token))
.map(|descendants| {
descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
})
@@ -737,14 +843,16 @@ impl<'db> SemanticsImpl<'db> {
pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
let node = self.find_file(node);
node.original_file_range_opt(self.db.upcast())
+ .filter(|(_, ctx)| ctx.is_root())
+ .map(TupleExt::head)
}
/// Attempts to map the node out of macro expanded files.
/// This only work for attribute expansions, as other ones do not have nodes as input.
pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map(
- |InFile { file_id, value }| {
- self.cache(find_root(value.syntax()), file_id);
+ |InRealFile { file_id, value }| {
+ self.cache(find_root(value.syntax()), file_id.into());
value
},
)
@@ -755,8 +863,8 @@ impl<'db> SemanticsImpl<'db> {
pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
let InFile { file_id, .. } = self.find_file(node);
InFile::new(file_id, node).original_syntax_node(self.db.upcast()).map(
- |InFile { file_id, value }| {
- self.cache(find_root(&value), file_id);
+ |InRealFile { file_id, value }| {
+ self.cache(find_root(&value), file_id.into());
value
},
)
@@ -787,7 +895,7 @@ impl<'db> SemanticsImpl<'db> {
Some(parent) => Some(InFile::new(file_id, parent)),
None => {
self.cache(value.clone(), file_id);
- file_id.call_node(db)
+ Some(file_id.macro_file()?.call_node(db))
}
}
})
@@ -840,10 +948,10 @@ impl<'db> SemanticsImpl<'db> {
pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
let analyze = self.analyze(ty.syntax())?;
let ctx = LowerCtx::with_file_id(self.db.upcast(), analyze.file_id);
- let ty = hir_ty::TyLoweringContext::new(
+ let ty = hir_ty::TyLoweringContext::new_maybe_unowned(
self.db,
&analyze.resolver,
- analyze.resolver.module().into(),
+ analyze.resolver.type_owner(),
)
.lower_ty(&crate::TypeRef::from_ast(&ctx, ty.clone()));
Some(Type::new_with_resolver(self.db, &analyze.resolver, ty))
@@ -851,9 +959,9 @@ impl<'db> SemanticsImpl<'db> {
pub fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
let analyze = self.analyze(path.syntax())?;
- let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id);
- let ctx = LowerCtx::with_hygiene(self.db.upcast(), &hygiene);
- let hir_path = Path::from_src(path.clone(), &ctx)?;
+ let span_map = self.db.span_map(analyze.file_id);
+ let ctx = LowerCtx::with_span_map(self.db.upcast(), span_map);
+ let hir_path = Path::from_src(&ctx, path.clone())?;
match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), &hir_path)? {
TypeNs::TraitId(id) => Some(Trait { id }),
_ => None,
@@ -937,14 +1045,15 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
}
- fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<FunctionId> {
+ pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
self.analyze(call.syntax())?.resolve_method_call(self.db, call)
}
- fn resolve_method_call_fallback(
+ /// Attempts to resolve this call expression as a method call falling back to resolving it as a field.
+ pub fn resolve_method_call_fallback(
&self,
call: &ast::MethodCallExpr,
- ) -> Option<Either<FunctionId, FieldId>> {
+ ) -> Option<Either<Function, Field>> {
self.analyze(call.syntax())?.resolve_method_call_fallback(self.db, call)
}
@@ -976,6 +1085,13 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(field.syntax())?.resolve_field(self.db, field)
}
+ pub fn resolve_field_fallback(
+ &self,
+ field: &ast::FieldExpr,
+ ) -> Option<Either<Field, Function>> {
+ self.analyze(field.syntax())?.resolve_field_fallback(self.db, field)
+ }
+
pub fn resolve_record_field(
&self,
field: &ast::RecordExprField,
@@ -1037,7 +1153,7 @@ impl<'db> SemanticsImpl<'db> {
fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
let mut cache = self.s2d_cache.borrow_mut();
- let mut ctx = SourceToDefCtx { db: self.db, cache: &mut cache };
+ let mut ctx = SourceToDefCtx { db: self.db, dynmap_cache: &mut cache };
f(&mut ctx)
}
@@ -1187,7 +1303,7 @@ impl<'db> SemanticsImpl<'db> {
return None;
}
- let func = self.resolve_method_call(method_call_expr).map(Function::from)?;
+ let func = self.resolve_method_call(method_call_expr)?;
let res = match func.self_param(self.db)?.access(self.db) {
Access::Shared | Access::Exclusive => true,
Access::Owned => false,
@@ -1451,7 +1567,7 @@ impl SemanticsScope<'_> {
/// necessary a heuristic, as it doesn't take hygiene into account.
pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> {
let ctx = LowerCtx::with_file_id(self.db.upcast(), self.file_id);
- let path = Path::from_src(path.clone(), &ctx)?;
+ let path = Path::from_src(&ctx, path.clone())?;
resolve_hir_path(self.db, &self.resolver, &path)
}
@@ -1478,6 +1594,10 @@ impl SemanticsScope<'_> {
pub fn extern_crate_decls(&self) -> impl Iterator<Item = Name> + '_ {
self.resolver.extern_crate_decls_in_scope(self.db.upcast())
}
+
+ pub fn has_same_self_type(&self, other: &SemanticsScope<'_>) -> bool {
+ self.resolver.impl_def() == other.resolver.impl_def()
+ }
}
#[derive(Debug)]
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
index aabda3655..df8c1e904 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
@@ -97,7 +97,7 @@ use hir_def::{
FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId,
StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId,
};
-use hir_expand::{attrs::AttrId, name::AsName, HirFileId, MacroCallId};
+use hir_expand::{attrs::AttrId, name::AsName, HirFileId, HirFileIdExt, MacroCallId};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use stdx::{impl_from, never};
@@ -112,7 +112,7 @@ pub(super) type SourceToDefCache = FxHashMap<(ChildContainer, HirFileId), DynMap
pub(super) struct SourceToDefCtx<'a, 'b> {
pub(super) db: &'b dyn HirDatabase,
- pub(super) cache: &'a mut SourceToDefCache,
+ pub(super) dynmap_cache: &'a mut SourceToDefCache,
}
impl SourceToDefCtx<'_, '_> {
@@ -300,7 +300,7 @@ impl SourceToDefCtx<'_, '_> {
fn cache_for(&mut self, container: ChildContainer, file_id: HirFileId) -> &DynMap {
let db = self.db;
- self.cache
+ self.dynmap_cache
.entry((container, file_id))
.or_insert_with(|| container.child_by_source(db, file_id))
}
diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
index f29fb1edf..d05118bbc 100644
--- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
@@ -26,11 +26,10 @@ use hir_def::{
};
use hir_expand::{
builtin_fn_macro::BuiltinFnLikeExpander,
- hygiene::Hygiene,
mod_path::path,
name,
name::{AsName, Name},
- HirFileId, InFile,
+ HirFileId, InFile, MacroFileId, MacroFileIdExt,
};
use hir_ty::{
diagnostics::{
@@ -236,9 +235,9 @@ impl SourceAnalyzer {
_db: &dyn HirDatabase,
pat: &ast::IdentPat,
) -> Option<BindingMode> {
- let binding_id = self.binding_id_of_pat(pat)?;
+ let id = self.pat_id(&pat.clone().into())?;
let infer = self.infer.as_ref()?;
- infer.binding_modes.get(binding_id).map(|bm| match bm {
+ infer.binding_modes.get(id).map(|bm| match bm {
hir_ty::BindingMode::Move => BindingMode::Move,
hir_ty::BindingMode::Ref(hir_ty::Mutability::Mut) => BindingMode::Ref(Mutability::Mut),
hir_ty::BindingMode::Ref(hir_ty::Mutability::Not) => {
@@ -281,25 +280,49 @@ impl SourceAnalyzer {
&self,
db: &dyn HirDatabase,
call: &ast::MethodCallExpr,
- ) -> Option<FunctionId> {
+ ) -> Option<Function> {
let expr_id = self.expr_id(db, &call.clone().into())?;
let (f_in_trait, substs) = self.infer.as_ref()?.method_resolution(expr_id)?;
- Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs))
+ Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs).into())
}
pub(crate) fn resolve_method_call_fallback(
&self,
db: &dyn HirDatabase,
call: &ast::MethodCallExpr,
- ) -> Option<Either<FunctionId, FieldId>> {
+ ) -> Option<Either<Function, Field>> {
let expr_id = self.expr_id(db, &call.clone().into())?;
let inference_result = self.infer.as_ref()?;
match inference_result.method_resolution(expr_id) {
- Some((f_in_trait, substs)) => {
- Some(Either::Left(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs)))
- }
- None => inference_result.field_resolution(expr_id).map(Either::Right),
+ Some((f_in_trait, substs)) => Some(Either::Left(
+ self.resolve_impl_method_or_trait_def(db, f_in_trait, substs).into(),
+ )),
+ None => inference_result.field_resolution(expr_id).map(Into::into).map(Either::Right),
+ }
+ }
+
+ pub(crate) fn resolve_field(
+ &self,
+ db: &dyn HirDatabase,
+ field: &ast::FieldExpr,
+ ) -> Option<Field> {
+ let expr_id = self.expr_id(db, &field.clone().into())?;
+ self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into())
+ }
+
+ pub(crate) fn resolve_field_fallback(
+ &self,
+ db: &dyn HirDatabase,
+ field: &ast::FieldExpr,
+ ) -> Option<Either<Field, Function>> {
+ let expr_id = self.expr_id(db, &field.clone().into())?;
+ let inference_result = self.infer.as_ref()?;
+ match inference_result.field_resolution(expr_id) {
+ Some(field) => Some(Either::Left(field.into())),
+ None => inference_result.method_resolution(expr_id).map(|(f, substs)| {
+ Either::Right(self.resolve_impl_method_or_trait_def(db, f, substs).into())
+ }),
}
}
@@ -418,15 +441,6 @@ impl SourceAnalyzer {
Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
}
- pub(crate) fn resolve_field(
- &self,
- db: &dyn HirDatabase,
- field: &ast::FieldExpr,
- ) -> Option<Field> {
- let expr_id = self.expr_id(db, &field.clone().into())?;
- self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into())
- }
-
pub(crate) fn resolve_record_field(
&self,
db: &dyn HirDatabase,
@@ -484,7 +498,7 @@ impl SourceAnalyzer {
macro_call: InFile<&ast::MacroCall>,
) -> Option<Macro> {
let ctx = LowerCtx::with_file_id(db.upcast(), macro_call.file_id);
- let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &ctx))?;
+ let path = macro_call.value.path().and_then(|ast| Path::from_src(&ctx, ast))?;
self.resolver
.resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Bang))
.map(|(it, _)| it.into())
@@ -596,9 +610,8 @@ impl SourceAnalyzer {
}
// This must be a normal source file rather than macro file.
- let hygiene = Hygiene::new(db.upcast(), self.file_id);
- let ctx = LowerCtx::with_hygiene(db.upcast(), &hygiene);
- let hir_path = Path::from_src(path.clone(), &ctx)?;
+ let ctx = LowerCtx::with_span_map(db.upcast(), db.span_map(self.file_id));
+ let hir_path = Path::from_src(&ctx, path.clone())?;
// Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are
// trying to resolve foo::bar.
@@ -755,14 +768,15 @@ impl SourceAnalyzer {
&self,
db: &dyn HirDatabase,
macro_call: InFile<&ast::MacroCall>,
- ) -> Option<HirFileId> {
+ ) -> Option<MacroFileId> {
let krate = self.resolver.krate();
let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| {
self.resolver
.resolve_path_as_macro(db.upcast(), &path, Some(MacroSubNs::Bang))
.map(|(it, _)| macro_id_to_def_id(db.upcast(), it))
})?;
- Some(macro_call_id.as_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
+ // why the 64?
+ Some(macro_call_id.as_macro_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
}
pub(crate) fn resolve_variant(
@@ -821,6 +835,52 @@ impl SourceAnalyzer {
false
}
+ pub(crate) fn resolve_offset_in_format_args(
+ &self,
+ db: &dyn HirDatabase,
+ format_args: InFile<&ast::FormatArgsExpr>,
+ offset: TextSize,
+ ) -> Option<(TextRange, Option<PathResolution>)> {
+ let implicits = self.body_source_map()?.implicit_format_args(format_args)?;
+ implicits.iter().find(|(range, _)| range.contains_inclusive(offset)).map(|(range, name)| {
+ (
+ *range,
+ resolve_hir_value_path(
+ db,
+ &self.resolver,
+ self.resolver.body_owner(),
+ &Path::from_known_path_with_no_generic(ModPath::from_segments(
+ PathKind::Plain,
+ Some(name.clone()),
+ )),
+ ),
+ )
+ })
+ }
+
+ pub(crate) fn as_format_args_parts<'a>(
+ &'a self,
+ db: &'a dyn HirDatabase,
+ format_args: InFile<&ast::FormatArgsExpr>,
+ ) -> Option<impl Iterator<Item = (TextRange, Option<PathResolution>)> + 'a> {
+ Some(self.body_source_map()?.implicit_format_args(format_args)?.iter().map(
+ move |(range, name)| {
+ (
+ *range,
+ resolve_hir_value_path(
+ db,
+ &self.resolver,
+ self.resolver.body_owner(),
+ &Path::from_known_path_with_no_generic(ModPath::from_segments(
+ PathKind::Plain,
+ Some(name.clone()),
+ )),
+ ),
+ )
+ },
+ ))
+ }
+
fn resolve_impl_method_or_trait_def(
&self,
db: &dyn HirDatabase,
@@ -888,17 +948,18 @@ fn scope_for_offset(
.scope_by_expr()
.iter()
.filter_map(|(id, scope)| {
- let InFile { file_id, value } = source_map.expr_syntax(*id).ok()?;
+ let InFile { file_id, value } = source_map.expr_syntax(id).ok()?;
if from_file == file_id {
return Some((value.text_range(), scope));
}
// FIXME handle attribute expansion
- let source = iter::successors(file_id.call_node(db.upcast()), |it| {
- it.file_id.call_node(db.upcast())
- })
- .find(|it| it.file_id == from_file)
- .filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?;
+ let source =
+ iter::successors(file_id.macro_file().map(|it| it.call_node(db.upcast())), |it| {
+ Some(it.file_id.macro_file()?.call_node(db.upcast()))
+ })
+ .find(|it| it.file_id == from_file)
+ .filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?;
Some((source.value.text_range(), scope))
})
.filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end())
@@ -923,7 +984,7 @@ fn adjust(
.scope_by_expr()
.iter()
.filter_map(|(id, scope)| {
- let source = source_map.expr_syntax(*id).ok()?;
+ let source = source_map.expr_syntax(id).ok()?;
// FIXME: correctly handle macro expansion
if source.file_id != from_file {
return None;
@@ -979,8 +1040,9 @@ fn resolve_hir_path_(
let types = || {
let (ty, unresolved) = match path.type_anchor() {
Some(type_ref) => {
- let (_, res) = TyLoweringContext::new(db, resolver, resolver.module().into())
- .lower_ty_ext(type_ref);
+ let (_, res) =
+ TyLoweringContext::new_maybe_unowned(db, resolver, resolver.type_owner())
+ .lower_ty_ext(type_ref);
res.map(|ty_ns| (ty_ns, path.segments().first()))
}
None => {
@@ -1039,24 +1101,7 @@ fn resolve_hir_path_(
};
let body_owner = resolver.body_owner();
- let values = || {
- resolver.resolve_path_in_value_ns_fully(db.upcast(), path).and_then(|val| {
- let res = match val {
- ValueNs::LocalBinding(binding_id) => {
- let var = Local { parent: body_owner?, binding_id };
- PathResolution::Local(var)
- }
- ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
- ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
- ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
- ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
- ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
- ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()),
- ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()),
- };
- Some(res)
- })
- };
+ let values = || resolve_hir_value_path(db, resolver, body_owner, path);
let items = || {
resolver
@@ -1076,6 +1121,30 @@ fn resolve_hir_path_(
.or_else(macros)
}
+fn resolve_hir_value_path(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ body_owner: Option<DefWithBodyId>,
+ path: &Path,
+) -> Option<PathResolution> {
+ resolver.resolve_path_in_value_ns_fully(db.upcast(), path).and_then(|val| {
+ let res = match val {
+ ValueNs::LocalBinding(binding_id) => {
+ let var = Local { parent: body_owner?, binding_id };
+ PathResolution::Local(var)
+ }
+ ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
+ ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
+ ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
+ ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
+ ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
+ ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()),
+ ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()),
+ };
+ Some(res)
+ })
+}
+
/// Resolves a path where we know it is a qualifier of another path.
///
/// For example, if we have:
diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
index ca7874c36..a2a30edeb 100644
--- a/src/tools/rust-analyzer/crates/hir/src/symbols.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
@@ -9,7 +9,7 @@ use hir_def::{
};
use hir_expand::{HirFileId, InFile};
use hir_ty::db::HirDatabase;
-use syntax::{ast::HasName, AstNode, SmolStr, SyntaxNode, SyntaxNodePtr};
+use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr};
use crate::{Module, ModuleDef, Semantics};
@@ -23,6 +23,7 @@ pub struct FileSymbol {
pub loc: DeclarationLocation,
pub container_name: Option<SmolStr>,
pub is_alias: bool,
+ pub is_assoc: bool,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -32,7 +33,7 @@ pub struct DeclarationLocation {
/// This points to the whole syntax node of the declaration.
pub ptr: SyntaxNodePtr,
/// This points to the [`syntax::ast::Name`] identifier of the declaration.
- pub name_ptr: SyntaxNodePtr,
+ pub name_ptr: AstPtr<syntax::ast::Name>,
}
impl DeclarationLocation {
@@ -49,15 +50,6 @@ impl DeclarationLocation {
let node = resolve_node(db, self.hir_file_id, &self.ptr);
node.as_ref().original_file_range(db.upcast())
}
-
- pub fn original_name_range(&self, db: &dyn HirDatabase) -> Option<FileRange> {
- if let Some(file_id) = self.hir_file_id.file_id() {
- // fast path to prevent parsing
- return Some(FileRange { file_id, range: self.name_ptr.text_range() });
- }
- let node = resolve_node(db, self.hir_file_id, &self.name_ptr);
- node.as_ref().original_file_range_opt(db.upcast())
- }
}
fn resolve_node(
@@ -130,34 +122,34 @@ impl<'a> SymbolCollector<'a> {
match module_def_id {
ModuleDefId::ModuleId(id) => self.push_module(id),
ModuleDefId::FunctionId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
self.collect_from_body(id);
}
- ModuleDefId::AdtId(AdtId::StructId(id)) => self.push_decl(id),
- ModuleDefId::AdtId(AdtId::EnumId(id)) => self.push_decl(id),
- ModuleDefId::AdtId(AdtId::UnionId(id)) => self.push_decl(id),
+ ModuleDefId::AdtId(AdtId::StructId(id)) => self.push_decl(id, false),
+ ModuleDefId::AdtId(AdtId::EnumId(id)) => self.push_decl(id, false),
+ ModuleDefId::AdtId(AdtId::UnionId(id)) => self.push_decl(id, false),
ModuleDefId::ConstId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
self.collect_from_body(id);
}
ModuleDefId::StaticId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
self.collect_from_body(id);
}
ModuleDefId::TraitId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
self.collect_from_trait(id);
}
ModuleDefId::TraitAliasId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
}
ModuleDefId::TypeAliasId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
}
ModuleDefId::MacroId(id) => match id {
- MacroId::Macro2Id(id) => self.push_decl(id),
- MacroId::MacroRulesId(id) => self.push_decl(id),
- MacroId::ProcMacroId(id) => self.push_decl(id),
+ MacroId::Macro2Id(id) => self.push_decl(id, false),
+ MacroId::MacroRulesId(id) => self.push_decl(id, false),
+ MacroId::ProcMacroId(id) => self.push_decl(id, false),
},
// Don't index these.
ModuleDefId::BuiltinType(_) => {}
@@ -190,7 +182,7 @@ impl<'a> SymbolCollector<'a> {
let dec_loc = DeclarationLocation {
hir_file_id: source.file_id,
ptr: SyntaxNodePtr::new(use_tree_src.syntax()),
- name_ptr: SyntaxNodePtr::new(name.syntax()),
+ name_ptr: AstPtr::new(&name),
};
self.symbols.push(FileSymbol {
@@ -199,6 +191,7 @@ impl<'a> SymbolCollector<'a> {
container_name: self.current_container_name.clone(),
loc: dec_loc,
is_alias: false,
+ is_assoc: false,
});
});
}
@@ -211,9 +204,9 @@ impl<'a> SymbolCollector<'a> {
for &id in id {
if id.module(self.db.upcast()) == module_id {
match id {
- MacroId::Macro2Id(id) => self.push_decl(id),
- MacroId::MacroRulesId(id) => self.push_decl(id),
- MacroId::ProcMacroId(id) => self.push_decl(id),
+ MacroId::Macro2Id(id) => self.push_decl(id, false),
+ MacroId::MacroRulesId(id) => self.push_decl(id, false),
+ MacroId::ProcMacroId(id) => self.push_decl(id, false),
}
}
}
@@ -275,13 +268,13 @@ impl<'a> SymbolCollector<'a> {
fn push_assoc_item(&mut self, assoc_item_id: AssocItemId) {
match assoc_item_id {
- AssocItemId::FunctionId(id) => self.push_decl(id),
- AssocItemId::ConstId(id) => self.push_decl(id),
- AssocItemId::TypeAliasId(id) => self.push_decl(id),
+ AssocItemId::FunctionId(id) => self.push_decl(id, true),
+ AssocItemId::ConstId(id) => self.push_decl(id, true),
+ AssocItemId::TypeAliasId(id) => self.push_decl(id, true),
}
}
- fn push_decl<L>(&mut self, id: L)
+ fn push_decl<L>(&mut self, id: L, is_assoc: bool)
where
L: Lookup + Into<ModuleDefId>,
<L as Lookup>::Data: HasSource,
@@ -294,7 +287,7 @@ impl<'a> SymbolCollector<'a> {
let dec_loc = DeclarationLocation {
hir_file_id: source.file_id,
ptr: SyntaxNodePtr::new(source.value.syntax()),
- name_ptr: SyntaxNodePtr::new(name_node.syntax()),
+ name_ptr: AstPtr::new(&name_node),
};
if let Some(attrs) = def.attrs(self.db) {
@@ -305,6 +298,7 @@ impl<'a> SymbolCollector<'a> {
loc: dec_loc.clone(),
container_name: self.current_container_name.clone(),
is_alias: true,
+ is_assoc,
});
}
}
@@ -315,6 +309,7 @@ impl<'a> SymbolCollector<'a> {
container_name: self.current_container_name.clone(),
loc: dec_loc,
is_alias: false,
+ is_assoc,
});
}
@@ -327,7 +322,7 @@ impl<'a> SymbolCollector<'a> {
let dec_loc = DeclarationLocation {
hir_file_id: declaration.file_id,
ptr: SyntaxNodePtr::new(module.syntax()),
- name_ptr: SyntaxNodePtr::new(name_node.syntax()),
+ name_ptr: AstPtr::new(&name_node),
};
let def = ModuleDef::Module(module_id.into());
@@ -340,6 +335,7 @@ impl<'a> SymbolCollector<'a> {
loc: dec_loc.clone(),
container_name: self.current_container_name.clone(),
is_alias: true,
+ is_assoc: false,
});
}
}
@@ -350,6 +346,7 @@ impl<'a> SymbolCollector<'a> {
container_name: self.current_container_name.clone(),
loc: dec_loc,
is_alias: false,
+ is_assoc: false,
});
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml
index 447e38f91..a622ec1a9 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml
@@ -14,8 +14,8 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
-itertools = "0.10.5"
-either = "1.7.0"
+itertools.workspace = true
+either.workspace = true
smallvec.workspace = true
# local deps
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs b/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs
index b273ebc85..fbe17dbfd 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs
@@ -14,5 +14,6 @@ pub struct AssistConfig {
pub allowed: Option<Vec<AssistKind>>,
pub insert_use: InsertUseConfig,
pub prefer_no_std: bool,
+ pub prefer_prelude: bool,
pub assist_emit_must_use: bool,
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
index c0e5429a2..410c62310 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
@@ -2249,4 +2249,35 @@ impl b::LocalTrait for B {
"#,
)
}
+
+ #[test]
+ fn doc_hidden_nondefault_member() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+//- /lib.rs crate:b new_source_root:local
+trait LocalTrait {
+ #[doc(hidden)]
+ fn no_skip_non_default() -> Option<()>;
+
+ #[doc(hidden)]
+ fn skip_default() -> Option<()> {
+ todo!()
+ }
+}
+
+//- /main.rs crate:a deps:b
+struct B;
+impl b::Loc$0alTrait for B {}
+ "#,
+ r#"
+struct B;
+impl b::LocalTrait for B {
+ fn no_skip_non_default() -> Option<()> {
+ ${0:todo!()}
+ }
+}
+ "#,
+ )
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
index 3b162d7c4..2374da9a3 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
@@ -88,7 +88,13 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
.into_iter()
.filter_map(|variant| {
Some((
- build_pat(ctx.db(), module, variant, ctx.config.prefer_no_std)?,
+ build_pat(
+ ctx.db(),
+ module,
+ variant,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )?,
variant.should_be_hidden(ctx.db(), module.krate()),
))
})
@@ -140,7 +146,13 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
.iter()
.any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
let patterns = variants.into_iter().filter_map(|variant| {
- build_pat(ctx.db(), module, variant, ctx.config.prefer_no_std)
+ build_pat(
+ ctx.db(),
+ module,
+ variant,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )
});
(ast::Pat::from(make::tuple_pat(patterns)), is_hidden)
@@ -173,7 +185,13 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
.iter()
.any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
let patterns = variants.into_iter().filter_map(|variant| {
- build_pat(ctx.db(), module, variant.clone(), ctx.config.prefer_no_std)
+ build_pat(
+ ctx.db(),
+ module,
+ variant.clone(),
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )
});
(ast::Pat::from(make::slice_pat(patterns)), is_hidden)
})
@@ -273,9 +291,10 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
syntax::SyntaxElement::Token(it) => {
// Don't have a way to make tokens mut, so instead make the parent mut
// and find the token again
- let parent = edit.make_syntax_mut(it.parent().unwrap());
+ let parent =
+ edit.make_syntax_mut(it.parent().expect("Token must have a parent."));
let mut_token =
- parent.covering_element(it.text_range()).into_token().unwrap();
+ parent.covering_element(it.text_range()).into_token().expect("Covering element cannot be found. Range may be beyond the current node's range");
syntax::SyntaxElement::from(mut_token)
}
@@ -439,28 +458,35 @@ fn build_pat(
module: hir::Module,
var: ExtendedVariant,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Option<ast::Pat> {
match var {
ExtendedVariant::Variant(var) => {
- let path =
- mod_path_to_ast(&module.find_use_path(db, ModuleDef::from(var), prefer_no_std)?);
+ let path = mod_path_to_ast(&module.find_use_path(
+ db,
+ ModuleDef::from(var),
+ prefer_no_std,
+ prefer_prelude,
+ )?);
// FIXME: use HIR for this; it doesn't currently expose struct vs. tuple vs. unit variants though
- let pat: ast::Pat = match var.source(db)?.value.kind() {
+ Some(match var.source(db)?.value.kind() {
ast::StructKind::Tuple(field_list) => {
let pats =
iter::repeat(make::wildcard_pat().into()).take(field_list.fields().count());
make::tuple_struct_pat(path, pats).into()
}
ast::StructKind::Record(field_list) => {
- let pats = field_list
- .fields()
- .map(|f| make::ext::simple_ident_pat(f.name().unwrap()).into());
+ let pats = field_list.fields().map(|f| {
+ make::ext::simple_ident_pat(
+ f.name().expect("Record field must have a name"),
+ )
+ .into()
+ });
make::record_pat(path, pats).into()
}
ast::StructKind::Unit => make::path_pat(path),
- };
- Some(pat)
+ })
}
ExtendedVariant::True => Some(ast::Pat::from(make::literal_pat("true"))),
ExtendedVariant::False => Some(ast::Pat::from(make::literal_pat("false"))),
@@ -1941,4 +1967,35 @@ fn main() {
"#,
);
}
+
+ /// See [`discussion`](https://github.com/rust-lang/rust-analyzer/pull/15594#discussion_r1322960614)
+ #[test]
+ fn missing_field_name() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A {
+ A,
+ Missing { a: u32, : u32, c: u32 }
+}
+
+fn a() {
+ let b = A::A;
+ match b$0 {}
+}"#,
+ r#"
+enum A {
+ A,
+ Missing { a: u32, : u32, c: u32 }
+}
+
+fn a() {
+ let b = A::A;
+ match b {
+ $0A::A => todo!(),
+ A::Missing { a, u32, c } => todo!(),
+ }
+}"#,
+ )
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs
index 36f68d176..88fd0b1b7 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs
@@ -1,6 +1,9 @@
+use either::Either;
use ide_db::defs::{Definition, NameRefClass};
-use itertools::Itertools;
-use syntax::{ast, AstNode, SyntaxKind, T};
+use syntax::{
+ ast::{self, make, HasArgList},
+ ted, AstNode,
+};
use crate::{
assist_context::{AssistContext, Assists},
@@ -25,21 +28,45 @@ use crate::{
// }
// ```
pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
- let ident = ctx.find_token_syntax_at_offset(SyntaxKind::IDENT).or_else(|| {
- let arg_list = ctx.find_node_at_offset::<ast::ArgList>()?;
- if arg_list.args().next().is_some() {
- return None;
- }
- cov_mark::hit!(add_turbo_fish_after_call);
- cov_mark::hit!(add_type_ascription_after_call);
- arg_list.l_paren_token()?.prev_token().filter(|it| it.kind() == SyntaxKind::IDENT)
- })?;
- let next_token = ident.next_token()?;
- if next_token.kind() == T![::] {
+ let turbofish_target =
+ ctx.find_node_at_offset::<ast::PathSegment>().map(Either::Left).or_else(|| {
+ let callable_expr = ctx.find_node_at_offset::<ast::CallableExpr>()?;
+
+ if callable_expr.arg_list()?.args().next().is_some() {
+ return None;
+ }
+
+ cov_mark::hit!(add_turbo_fish_after_call);
+ cov_mark::hit!(add_type_ascription_after_call);
+
+ match callable_expr {
+ ast::CallableExpr::Call(it) => {
+ let ast::Expr::PathExpr(path) = it.expr()? else {
+ return None;
+ };
+
+ Some(Either::Left(path.path()?.segment()?))
+ }
+ ast::CallableExpr::MethodCall(it) => Some(Either::Right(it)),
+ }
+ })?;
+
+ let already_has_turbofish = match &turbofish_target {
+ Either::Left(path_segment) => path_segment.generic_arg_list().is_some(),
+ Either::Right(method_call) => method_call.generic_arg_list().is_some(),
+ };
+
+ if already_has_turbofish {
cov_mark::hit!(add_turbo_fish_one_fish_is_enough);
return None;
}
- let name_ref = ast::NameRef::cast(ident.parent()?)?;
+
+ let name_ref = match &turbofish_target {
+ Either::Left(path_segment) => path_segment.name_ref()?,
+ Either::Right(method_call) => method_call.name_ref()?,
+ };
+ let ident = name_ref.ident_token()?;
+
let def = match NameRefClass::classify(&ctx.sema, &name_ref)? {
NameRefClass::Definition(def) => def,
NameRefClass::FieldShorthand { .. } | NameRefClass::ExternCrateShorthand { .. } => {
@@ -58,20 +85,27 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
if let Some(let_stmt) = ctx.find_node_at_offset::<ast::LetStmt>() {
if let_stmt.colon_token().is_none() {
- let type_pos = let_stmt.pat()?.syntax().last_token()?.text_range().end();
- let semi_pos = let_stmt.syntax().last_token()?.text_range().end();
+ if let_stmt.pat().is_none() {
+ return None;
+ }
acc.add(
AssistId("add_type_ascription", AssistKind::RefactorRewrite),
"Add `: _` before assignment operator",
ident.text_range(),
- |builder| {
+ |edit| {
+ let let_stmt = edit.make_mut(let_stmt);
+
if let_stmt.semicolon_token().is_none() {
- builder.insert(semi_pos, ";");
+ ted::append_child(let_stmt.syntax(), make::tokens::semicolon());
}
- match ctx.config.snippet_cap {
- Some(cap) => builder.insert_snippet(cap, type_pos, ": ${0:_}"),
- None => builder.insert(type_pos, ": _"),
+
+ let placeholder_ty = make::ty_placeholder().clone_for_update();
+
+ let_stmt.set_ty(Some(placeholder_ty.clone()));
+
+ if let Some(cap) = ctx.config.snippet_cap {
+ edit.add_placeholder_snippet(cap, placeholder_ty);
}
},
)?
@@ -91,38 +125,46 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
AssistId("add_turbo_fish", AssistKind::RefactorRewrite),
"Add `::<>`",
ident.text_range(),
- |builder| {
- builder.trigger_signature_help();
- match ctx.config.snippet_cap {
- Some(cap) => {
- let fish_head = get_snippet_fish_head(number_of_arguments);
- let snip = format!("::<{fish_head}>");
- builder.insert_snippet(cap, ident.text_range().end(), snip)
+ |edit| {
+ edit.trigger_signature_help();
+
+ let new_arg_list = match turbofish_target {
+ Either::Left(path_segment) => {
+ edit.make_mut(path_segment).get_or_create_generic_arg_list()
+ }
+ Either::Right(method_call) => {
+ edit.make_mut(method_call).get_or_create_generic_arg_list()
}
- None => {
- let fish_head = std::iter::repeat("_").take(number_of_arguments).format(", ");
- let snip = format!("::<{fish_head}>");
- builder.insert(ident.text_range().end(), snip);
+ };
+
+ let fish_head = get_fish_head(number_of_arguments).clone_for_update();
+
+ // Note: we need to replace the `new_arg_list` instead of being able to use something like
+ // `GenericArgList::add_generic_arg` as `PathSegment::get_or_create_generic_arg_list`
+ // always creates a non-turbofish form generic arg list.
+ ted::replace(new_arg_list.syntax(), fish_head.syntax());
+
+ if let Some(cap) = ctx.config.snippet_cap {
+ for arg in fish_head.generic_args() {
+ edit.add_placeholder_snippet(cap, arg)
}
}
},
)
}
-/// This will create a snippet string with tabstops marked
-fn get_snippet_fish_head(number_of_arguments: usize) -> String {
- let mut fish_head = (1..number_of_arguments)
- .format_with("", |i, f| f(&format_args!("${{{i}:_}}, ")))
- .to_string();
-
- // tabstop 0 is a special case and always the last one
- fish_head.push_str("${0:_}");
- fish_head
+/// This will create a turbofish generic arg list corresponding to the number of arguments
+fn get_fish_head(number_of_arguments: usize) -> ast::GenericArgList {
+ let args = (0..number_of_arguments).map(|_| make::type_arg(make::ty_placeholder()).into());
+ make::turbofish_generic_arg_list(args)
}
#[cfg(test)]
mod tests {
- use crate::tests::{check_assist, check_assist_by_label, check_assist_not_applicable};
+ use crate::tests::{
+ check_assist, check_assist_by_label, check_assist_not_applicable,
+ check_assist_not_applicable_by_label,
+ };
use super::*;
@@ -364,6 +406,20 @@ fn main() {
}
#[test]
+ fn add_type_ascription_missing_pattern() {
+ check_assist_not_applicable_by_label(
+ add_turbo_fish,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let = make$0()
+}
+"#,
+ "Add `: _` before assignment operator",
+ );
+ }
+
+ #[test]
fn add_turbo_fish_function_lifetime_parameter() {
check_assist(
add_turbo_fish,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs
index 66bc2f6da..2d41243c2 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs
@@ -1,7 +1,13 @@
use std::collections::VecDeque;
+use ide_db::{
+ assists::GroupLabel,
+ famous_defs::FamousDefs,
+ source_change::SourceChangeBuilder,
+ syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
+};
use syntax::{
- ast::{self, AstNode, Expr::BinExpr},
+ ast::{self, make, AstNode, Expr::BinExpr, HasArgList},
ted::{self, Position},
SyntaxKind,
};
@@ -89,7 +95,8 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let dm_lhs = demorganed.lhs()?;
- acc.add(
+ acc.add_group(
+ &GroupLabel("Apply De Morgan's law".to_string()),
AssistId("apply_demorgan", AssistKind::RefactorRewrite),
"Apply De Morgan's law",
op_range,
@@ -143,6 +150,127 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
)
}
+// Assist: apply_demorgan_iterator
+//
+// Apply https://en.wikipedia.org/wiki/De_Morgan%27s_laws[De Morgan's law] to
+// `Iterator::all` and `Iterator::any`.
+//
+// This transforms expressions of the form `!iter.any(|x| predicate(x))` into
+// `iter.all(|x| !predicate(x))` and vice versa. This also works the other way for
+// `Iterator::all` into `Iterator::any`.
+//
+// ```
+// # //- minicore: iterator
+// fn main() {
+// let arr = [1, 2, 3];
+// if !arr.into_iter().$0any(|num| num == 4) {
+// println!("foo");
+// }
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let arr = [1, 2, 3];
+// if arr.into_iter().all(|num| num != 4) {
+// println!("foo");
+// }
+// }
+// ```
+pub(crate) fn apply_demorgan_iterator(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let method_call: ast::MethodCallExpr = ctx.find_node_at_offset()?;
+ let (name, arg_expr) = validate_method_call_expr(ctx, &method_call)?;
+
+ let ast::Expr::ClosureExpr(closure_expr) = arg_expr else { return None };
+ let closure_body = closure_expr.body()?;
+
+ let op_range = method_call.syntax().text_range();
+ let label = format!("Apply De Morgan's law to `Iterator::{}`", name.text().as_str());
+ acc.add_group(
+ &GroupLabel("Apply De Morgan's law".to_string()),
+ AssistId("apply_demorgan_iterator", AssistKind::RefactorRewrite),
+ label,
+ op_range,
+ |edit| {
+ // replace the method name
+ let new_name = match name.text().as_str() {
+ "all" => make::name_ref("any"),
+ "any" => make::name_ref("all"),
+ _ => unreachable!(),
+ }
+ .clone_for_update();
+ edit.replace_ast(name, new_name);
+
+ // negate all tail expressions in the closure body
+ let tail_cb = &mut |e: &_| tail_cb_impl(edit, e);
+ walk_expr(&closure_body, &mut |expr| {
+ if let ast::Expr::ReturnExpr(ret_expr) = expr {
+ if let Some(ret_expr_arg) = &ret_expr.expr() {
+ for_each_tail_expr(ret_expr_arg, tail_cb);
+ }
+ }
+ });
+ for_each_tail_expr(&closure_body, tail_cb);
+
+ // negate the whole method call
+ if let Some(prefix_expr) = method_call
+ .syntax()
+ .parent()
+ .and_then(ast::PrefixExpr::cast)
+ .filter(|prefix_expr| matches!(prefix_expr.op_kind(), Some(ast::UnaryOp::Not)))
+ {
+ edit.delete(
+ prefix_expr
+ .op_token()
+ .expect("prefix expression always has an operator")
+ .text_range(),
+ );
+ } else {
+ edit.insert(method_call.syntax().text_range().start(), "!");
+ }
+ },
+ )
+}
+
+/// Ensures that the method call is to `Iterator::all` or `Iterator::any`.
+fn validate_method_call_expr(
+ ctx: &AssistContext<'_>,
+ method_call: &ast::MethodCallExpr,
+) -> Option<(ast::NameRef, ast::Expr)> {
+ let name_ref = method_call.name_ref()?;
+ if name_ref.text() != "all" && name_ref.text() != "any" {
+ return None;
+ }
+ let arg_expr = method_call.arg_list()?.args().next()?;
+
+ let sema = &ctx.sema;
+
+ let receiver = method_call.receiver()?;
+ let it_type = sema.type_of_expr(&receiver)?.adjusted();
+ let module = sema.scope(receiver.syntax())?.module();
+ let krate = module.krate();
+
+ let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
+ it_type.impls_trait(sema.db, iter_trait, &[]).then_some((name_ref, arg_expr))
+}
+
+fn tail_cb_impl(edit: &mut SourceChangeBuilder, e: &ast::Expr) {
+ match e {
+ ast::Expr::BreakExpr(break_expr) => {
+ if let Some(break_expr_arg) = break_expr.expr() {
+ for_each_tail_expr(&break_expr_arg, &mut |e| tail_cb_impl(edit, e))
+ }
+ }
+ ast::Expr::ReturnExpr(_) => {
+ // all return expressions have already been handled by the walk loop
+ }
+ e => {
+ let inverted_body = invert_boolean_expression(e.clone());
+ edit.replace(e.syntax().text_range(), inverted_body.syntax().text());
+ }
+ }
+}
+
#[cfg(test)]
mod tests {
use super::*;
@@ -255,4 +383,206 @@ fn f() { !(S <= S || S < S) }
"fn() { let x = a && b && c; }",
)
}
+
+ #[test]
+ fn demorgan_iterator_any_all_reverse() {
+ check_assist(
+ apply_demorgan_iterator,
+ r#"
+//- minicore: iterator
+fn main() {
+ let arr = [1, 2, 3];
+ if arr.into_iter().all(|num| num $0!= 4) {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+fn main() {
+ let arr = [1, 2, 3];
+ if !arr.into_iter().any(|num| num == 4) {
+ println!("foo");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn demorgan_iterator_all_any() {
+ check_assist(
+ apply_demorgan_iterator,
+ r#"
+//- minicore: iterator
+fn main() {
+ let arr = [1, 2, 3];
+ if !arr.into_iter().$0all(|num| num > 3) {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+fn main() {
+ let arr = [1, 2, 3];
+ if arr.into_iter().any(|num| num <= 3) {
+ println!("foo");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn demorgan_iterator_multiple_terms() {
+ check_assist(
+ apply_demorgan_iterator,
+ r#"
+//- minicore: iterator
+fn main() {
+ let arr = [1, 2, 3];
+ if !arr.into_iter().$0any(|num| num > 3 && num == 23 && num <= 30) {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+fn main() {
+ let arr = [1, 2, 3];
+ if arr.into_iter().all(|num| !(num > 3 && num == 23 && num <= 30)) {
+ println!("foo");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn demorgan_iterator_double_negation() {
+ check_assist(
+ apply_demorgan_iterator,
+ r#"
+//- minicore: iterator
+fn main() {
+ let arr = [1, 2, 3];
+ if !arr.into_iter().$0all(|num| !(num > 3)) {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+fn main() {
+ let arr = [1, 2, 3];
+ if arr.into_iter().any(|num| num > 3) {
+ println!("foo");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn demorgan_iterator_double_parens() {
+ check_assist(
+ apply_demorgan_iterator,
+ r#"
+//- minicore: iterator
+fn main() {
+ let arr = [1, 2, 3];
+ if !arr.into_iter().$0any(|num| (num > 3 && (num == 1 || num == 2))) {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+fn main() {
+ let arr = [1, 2, 3];
+ if arr.into_iter().all(|num| !(num > 3 && (num == 1 || num == 2))) {
+ println!("foo");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn demorgan_iterator_multiline() {
+ check_assist(
+ apply_demorgan_iterator,
+ r#"
+//- minicore: iterator
+fn main() {
+ let arr = [1, 2, 3];
+ if arr
+ .into_iter()
+ .all$0(|num| !num.is_negative())
+ {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+fn main() {
+ let arr = [1, 2, 3];
+ if !arr
+ .into_iter()
+ .any(|num| num.is_negative())
+ {
+ println!("foo");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn demorgan_iterator_block_closure() {
+ check_assist(
+ apply_demorgan_iterator,
+ r#"
+//- minicore: iterator
+fn main() {
+ let arr = [-1, 1, 2, 3];
+ if arr.into_iter().all(|num: i32| {
+ $0if num.is_positive() {
+ num <= 3
+ } else {
+ num >= -1
+ }
+ }) {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+fn main() {
+ let arr = [-1, 1, 2, 3];
+ if !arr.into_iter().any(|num: i32| {
+ if num.is_positive() {
+ num > 3
+ } else {
+ num < -1
+ }
+ }) {
+ println!("foo");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn demorgan_iterator_wrong_method() {
+ check_assist_not_applicable(
+ apply_demorgan_iterator,
+ r#"
+//- minicore: iterator
+fn main() {
+ let arr = [1, 2, 3];
+ if !arr.into_iter().$0map(|num| num > 3) {
+ println!("foo");
+ }
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs
index 7acf2ea0a..f508c42c5 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs
@@ -5,7 +5,7 @@ use ide_db::{
helpers::mod_path_to_ast,
imports::{
import_assets::{ImportAssets, ImportCandidate, LocatedImport},
- insert_use::{insert_use, ImportScope},
+ insert_use::{insert_use, insert_use_as_alias, ImportScope},
},
};
use syntax::{ast, AstNode, NodeOrToken, SyntaxElement};
@@ -93,6 +93,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
&ctx.sema,
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
+ ctx.config.prefer_no_std,
);
if proposed_imports.is_empty() {
return None;
@@ -129,10 +130,12 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
for import in proposed_imports {
let import_path = import.import_path;
+ let (assist_id, import_name) =
+ (AssistId("auto_import", AssistKind::QuickFix), import_path.display(ctx.db()));
acc.add_group(
&group_label,
- AssistId("auto_import", AssistKind::QuickFix),
- format!("Import `{}`", import_path.display(ctx.db())),
+ assist_id,
+ format!("Import `{}`", import_name),
range,
|builder| {
let scope = match scope.clone() {
@@ -143,6 +146,38 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
insert_use(&scope, mod_path_to_ast(&import_path), &ctx.config.insert_use);
},
);
+
+ match import_assets.import_candidate() {
+ ImportCandidate::TraitAssocItem(name) | ImportCandidate::TraitMethod(name) => {
+ let is_method =
+ matches!(import_assets.import_candidate(), ImportCandidate::TraitMethod(_));
+ let type_ = if is_method { "method" } else { "item" };
+ let group_label = GroupLabel(format!(
+ "Import a trait for {} {} by alias",
+ type_,
+ name.assoc_item_name.text()
+ ));
+ acc.add_group(
+ &group_label,
+ assist_id,
+ format!("Import `{} as _`", import_name),
+ range,
+ |builder| {
+ let scope = match scope.clone() {
+ ImportScope::File(it) => ImportScope::File(builder.make_mut(it)),
+ ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
+ ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
+ };
+ insert_use_as_alias(
+ &scope,
+ mod_path_to_ast(&import_path),
+ &ctx.config.insert_use,
+ );
+ },
+ );
+ }
+ _ => {}
+ }
}
Some(())
}
@@ -253,7 +288,8 @@ mod tests {
};
use crate::tests::{
- check_assist, check_assist_not_applicable, check_assist_target, TEST_CONFIG,
+ check_assist, check_assist_by_label, check_assist_not_applicable, check_assist_target,
+ TEST_CONFIG,
};
fn check_auto_import_order(before: &str, order: &[&str]) {
@@ -705,7 +741,7 @@ fn main() {
#[test]
fn associated_trait_function() {
- check_assist(
+ check_assist_by_label(
auto_import,
r"
mod test_mod {
@@ -739,6 +775,44 @@ fn main() {
test_mod::TestStruct::test_function
}
",
+ "Import `test_mod::TestTrait`",
+ );
+
+ check_assist_by_label(
+ auto_import,
+ r"
+ mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_function() {}
+ }
+ }
+
+ fn main() {
+ test_mod::TestStruct::test_function$0
+ }
+ ",
+ r"
+ use test_mod::TestTrait as _;
+
+ mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_function() {}
+ }
+ }
+
+ fn main() {
+ test_mod::TestStruct::test_function
+ }
+ ",
+ "Import `test_mod::TestTrait as _`",
);
}
@@ -776,7 +850,44 @@ fn main() {
#[test]
fn associated_trait_const() {
- check_assist(
+ check_assist_by_label(
+ auto_import,
+ r"
+ mod test_mod {
+ pub trait TestTrait {
+ const TEST_CONST: u8;
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const TEST_CONST: u8 = 42;
+ }
+ }
+
+ fn main() {
+ test_mod::TestStruct::TEST_CONST$0
+ }
+ ",
+ r"
+ use test_mod::TestTrait as _;
+
+ mod test_mod {
+ pub trait TestTrait {
+ const TEST_CONST: u8;
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const TEST_CONST: u8 = 42;
+ }
+ }
+
+ fn main() {
+ test_mod::TestStruct::TEST_CONST
+ }
+ ",
+ "Import `test_mod::TestTrait as _`",
+ );
+
+ check_assist_by_label(
auto_import,
r"
mod test_mod {
@@ -810,6 +921,7 @@ fn main() {
test_mod::TestStruct::TEST_CONST
}
",
+ "Import `test_mod::TestTrait`",
);
}
@@ -847,7 +959,46 @@ fn main() {
#[test]
fn trait_method() {
- check_assist(
+ check_assist_by_label(
+ auto_import,
+ r"
+ mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+ }
+
+ fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_meth$0od()
+ }
+ ",
+ r"
+ use test_mod::TestTrait as _;
+
+ mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+ }
+
+ fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_method()
+ }
+ ",
+ "Import `test_mod::TestTrait as _`",
+ );
+
+ check_assist_by_label(
auto_import,
r"
mod test_mod {
@@ -883,12 +1034,43 @@ fn main() {
test_struct.test_method()
}
",
+ "Import `test_mod::TestTrait`",
);
}
#[test]
fn trait_method_cross_crate() {
- check_assist(
+ check_assist_by_label(
+ auto_import,
+ r"
+ //- /main.rs crate:main deps:dep
+ fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.test_meth$0od()
+ }
+ //- /dep.rs crate:dep
+ pub mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+ }
+ ",
+ r"
+ use dep::test_mod::TestTrait as _;
+
+ fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.test_method()
+ }
+ ",
+ "Import `dep::test_mod::TestTrait as _`",
+ );
+
+ check_assist_by_label(
auto_import,
r"
//- /main.rs crate:main deps:dep
@@ -915,12 +1097,41 @@ fn main() {
test_struct.test_method()
}
",
+ "Import `dep::test_mod::TestTrait`",
);
}
#[test]
fn assoc_fn_cross_crate() {
- check_assist(
+ check_assist_by_label(
+ auto_import,
+ r"
+ //- /main.rs crate:main deps:dep
+ fn main() {
+ dep::test_mod::TestStruct::test_func$0tion
+ }
+ //- /dep.rs crate:dep
+ pub mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_function() {}
+ }
+ }
+ ",
+ r"
+ use dep::test_mod::TestTrait as _;
+
+ fn main() {
+ dep::test_mod::TestStruct::test_function
+ }
+ ",
+ "Import `dep::test_mod::TestTrait as _`",
+ );
+
+ check_assist_by_label(
auto_import,
r"
//- /main.rs crate:main deps:dep
@@ -945,12 +1156,41 @@ fn main() {
dep::test_mod::TestStruct::test_function
}
",
+ "Import `dep::test_mod::TestTrait`",
);
}
#[test]
fn assoc_const_cross_crate() {
- check_assist(
+ check_assist_by_label(
+ auto_import,
+ r"
+ //- /main.rs crate:main deps:dep
+ fn main() {
+ dep::test_mod::TestStruct::CONST$0
+ }
+ //- /dep.rs crate:dep
+ pub mod test_mod {
+ pub trait TestTrait {
+ const CONST: bool;
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const CONST: bool = true;
+ }
+ }
+ ",
+ r"
+ use dep::test_mod::TestTrait as _;
+
+ fn main() {
+ dep::test_mod::TestStruct::CONST
+ }
+ ",
+ "Import `dep::test_mod::TestTrait as _`",
+ );
+
+ check_assist_by_label(
auto_import,
r"
//- /main.rs crate:main deps:dep
@@ -975,6 +1215,7 @@ fn main() {
dep::test_mod::TestStruct::CONST
}
",
+ "Import `dep::test_mod::TestTrait`",
);
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs
new file mode 100644
index 000000000..0f2d1057c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs
@@ -0,0 +1,1675 @@
+use hir::ModuleDef;
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ defs::Definition,
+ helpers::mod_path_to_ast,
+ imports::insert_use::{insert_use, ImportScope},
+ search::{FileReference, UsageSearchResult},
+ source_change::SourceChangeBuilder,
+ FxHashSet,
+};
+use itertools::Itertools;
+use syntax::{
+ ast::{
+ self,
+ edit::IndentLevel,
+ edit_in_place::{AttrsOwnerEdit, Indent},
+ make, HasName,
+ },
+ ted, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T,
+};
+use text_edit::TextRange;
+
+use crate::assist_context::{AssistContext, Assists};
+
+// Assist: bool_to_enum
+//
+// This converts boolean local variables, fields, constants, and statics into a new
+// enum with two variants `Bool::True` and `Bool::False`, as well as replacing
+// all assignments with the variants and replacing all usages with `== Bool::True` or
+// `== Bool::False`.
+//
+// ```
+// fn main() {
+// let $0bool = true;
+//
+// if bool {
+// println!("foo");
+// }
+// }
+// ```
+// ->
+// ```
+// #[derive(PartialEq, Eq)]
+// enum Bool { True, False }
+//
+// fn main() {
+// let bool = Bool::True;
+//
+// if bool == Bool::True {
+// println!("foo");
+// }
+// }
+// ```
+pub(crate) fn bool_to_enum(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let BoolNodeData { target_node, name, ty_annotation, initializer, definition } =
+ find_bool_node(ctx)?;
+ let target_module = ctx.sema.scope(&target_node)?.module().nearest_non_block_module(ctx.db());
+
+ let target = name.syntax().text_range();
+ acc.add(
+ AssistId("bool_to_enum", AssistKind::RefactorRewrite),
+ "Convert boolean to enum",
+ target,
+ |edit| {
+ if let Some(ty) = &ty_annotation {
+ cov_mark::hit!(replaces_ty_annotation);
+ edit.replace(ty.syntax().text_range(), "Bool");
+ }
+
+ if let Some(initializer) = initializer {
+ replace_bool_expr(edit, initializer);
+ }
+
+ let usages = definition.usages(&ctx.sema).all();
+ add_enum_def(edit, ctx, &usages, target_node, &target_module);
+ replace_usages(edit, ctx, &usages, definition, &target_module);
+ },
+ )
+}
+
+struct BoolNodeData {
+ target_node: SyntaxNode,
+ name: ast::Name,
+ ty_annotation: Option<ast::Type>,
+ initializer: Option<ast::Expr>,
+ definition: Definition,
+}
+
+/// Attempts to find an appropriate node to apply the action to.
+fn find_bool_node(ctx: &AssistContext<'_>) -> Option<BoolNodeData> {
+ let name: ast::Name = ctx.find_node_at_offset()?;
+
+ if let Some(let_stmt) = name.syntax().ancestors().find_map(ast::LetStmt::cast) {
+ let bind_pat = match let_stmt.pat()? {
+ ast::Pat::IdentPat(pat) => pat,
+ _ => {
+ cov_mark::hit!(not_applicable_in_non_ident_pat);
+ return None;
+ }
+ };
+ let def = ctx.sema.to_def(&bind_pat)?;
+ if !def.ty(ctx.db()).is_bool() {
+ cov_mark::hit!(not_applicable_non_bool_local);
+ return None;
+ }
+
+ Some(BoolNodeData {
+ target_node: let_stmt.syntax().clone(),
+ name,
+ ty_annotation: let_stmt.ty(),
+ initializer: let_stmt.initializer(),
+ definition: Definition::Local(def),
+ })
+ } else if let Some(const_) = name.syntax().parent().and_then(ast::Const::cast) {
+ let def = ctx.sema.to_def(&const_)?;
+ if !def.ty(ctx.db()).is_bool() {
+ cov_mark::hit!(not_applicable_non_bool_const);
+ return None;
+ }
+
+ Some(BoolNodeData {
+ target_node: const_.syntax().clone(),
+ name,
+ ty_annotation: const_.ty(),
+ initializer: const_.body(),
+ definition: Definition::Const(def),
+ })
+ } else if let Some(static_) = name.syntax().parent().and_then(ast::Static::cast) {
+ let def = ctx.sema.to_def(&static_)?;
+ if !def.ty(ctx.db()).is_bool() {
+ cov_mark::hit!(not_applicable_non_bool_static);
+ return None;
+ }
+
+ Some(BoolNodeData {
+ target_node: static_.syntax().clone(),
+ name,
+ ty_annotation: static_.ty(),
+ initializer: static_.body(),
+ definition: Definition::Static(def),
+ })
+ } else {
+ let field = name.syntax().parent().and_then(ast::RecordField::cast)?;
+ if field.name()? != name {
+ return None;
+ }
+
+ let adt = field.syntax().ancestors().find_map(ast::Adt::cast)?;
+ let def = ctx.sema.to_def(&field)?;
+ if !def.ty(ctx.db()).is_bool() {
+ cov_mark::hit!(not_applicable_non_bool_field);
+ return None;
+ }
+ Some(BoolNodeData {
+ target_node: adt.syntax().clone(),
+ name,
+ ty_annotation: field.ty(),
+ initializer: None,
+ definition: Definition::Field(def),
+ })
+ }
+}
+
+fn replace_bool_expr(edit: &mut SourceChangeBuilder, expr: ast::Expr) {
+ let expr_range = expr.syntax().text_range();
+ let enum_expr = bool_expr_to_enum_expr(expr);
+ edit.replace(expr_range, enum_expr.syntax().text())
+}
+
+/// Converts an expression of type `bool` to one of the new enum type.
+fn bool_expr_to_enum_expr(expr: ast::Expr) -> ast::Expr {
+ let true_expr = make::expr_path(make::path_from_text("Bool::True")).clone_for_update();
+ let false_expr = make::expr_path(make::path_from_text("Bool::False")).clone_for_update();
+
+ if let ast::Expr::Literal(literal) = &expr {
+ match literal.kind() {
+ ast::LiteralKind::Bool(true) => true_expr,
+ ast::LiteralKind::Bool(false) => false_expr,
+ _ => expr,
+ }
+ } else {
+ make::expr_if(
+ expr,
+ make::tail_only_block_expr(true_expr),
+ Some(ast::ElseBranch::Block(make::tail_only_block_expr(false_expr))),
+ )
+ .clone_for_update()
+ }
+}
+
+/// Replaces all usages of the target identifier, both when read and written to.
+fn replace_usages(
+ edit: &mut SourceChangeBuilder,
+ ctx: &AssistContext<'_>,
+ usages: &UsageSearchResult,
+ target_definition: Definition,
+ target_module: &hir::Module,
+) {
+ for (file_id, references) in usages.iter() {
+ edit.edit_file(*file_id);
+
+ let refs_with_imports =
+ augment_references_with_imports(edit, ctx, references, target_module);
+
+ refs_with_imports.into_iter().rev().for_each(
+ |FileReferenceWithImport { range, old_name, new_name, import_data }| {
+ // replace the usages in patterns and expressions
+ if let Some(ident_pat) = old_name.syntax().ancestors().find_map(ast::IdentPat::cast)
+ {
+ cov_mark::hit!(replaces_record_pat_shorthand);
+
+ let definition = ctx.sema.to_def(&ident_pat).map(Definition::Local);
+ if let Some(def) = definition {
+ replace_usages(
+ edit,
+ ctx,
+ &def.usages(&ctx.sema).all(),
+ target_definition,
+ target_module,
+ )
+ }
+ } else if let Some(initializer) = find_assignment_usage(&new_name) {
+ cov_mark::hit!(replaces_assignment);
+
+ replace_bool_expr(edit, initializer);
+ } else if let Some((prefix_expr, inner_expr)) = find_negated_usage(&new_name) {
+ cov_mark::hit!(replaces_negation);
+
+ edit.replace(
+ prefix_expr.syntax().text_range(),
+ format!("{} == Bool::False", inner_expr),
+ );
+ } else if let Some((record_field, initializer)) = old_name
+ .as_name_ref()
+ .and_then(ast::RecordExprField::for_field_name)
+ .and_then(|record_field| ctx.sema.resolve_record_field(&record_field))
+ .and_then(|(got_field, _, _)| {
+ find_record_expr_usage(&new_name, got_field, target_definition)
+ })
+ {
+ cov_mark::hit!(replaces_record_expr);
+
+ let record_field = edit.make_mut(record_field);
+ let enum_expr = bool_expr_to_enum_expr(initializer);
+ record_field.replace_expr(enum_expr);
+ } else if let Some(pat) = find_record_pat_field_usage(&old_name) {
+ match pat {
+ ast::Pat::IdentPat(ident_pat) => {
+ cov_mark::hit!(replaces_record_pat);
+
+ let definition = ctx.sema.to_def(&ident_pat).map(Definition::Local);
+ if let Some(def) = definition {
+ replace_usages(
+ edit,
+ ctx,
+ &def.usages(&ctx.sema).all(),
+ target_definition,
+ target_module,
+ )
+ }
+ }
+ ast::Pat::LiteralPat(literal_pat) => {
+ cov_mark::hit!(replaces_literal_pat);
+
+ if let Some(expr) = literal_pat.literal().and_then(|literal| {
+ literal.syntax().ancestors().find_map(ast::Expr::cast)
+ }) {
+ replace_bool_expr(edit, expr);
+ }
+ }
+ _ => (),
+ }
+ } else if let Some((ty_annotation, initializer)) = find_assoc_const_usage(&new_name)
+ {
+ edit.replace(ty_annotation.syntax().text_range(), "Bool");
+ replace_bool_expr(edit, initializer);
+ } else if let Some(receiver) = find_method_call_expr_usage(&new_name) {
+ edit.replace(
+ receiver.syntax().text_range(),
+ format!("({} == Bool::True)", receiver),
+ );
+ } else if new_name.syntax().ancestors().find_map(ast::UseTree::cast).is_none() {
+ // for any other usage in an expression, replace it with a check that it is the true variant
+ if let Some((record_field, expr)) = new_name
+ .as_name_ref()
+ .and_then(ast::RecordExprField::for_field_name)
+ .and_then(|record_field| {
+ record_field.expr().map(|expr| (record_field, expr))
+ })
+ {
+ record_field.replace_expr(
+ make::expr_bin_op(
+ expr,
+ ast::BinaryOp::CmpOp(ast::CmpOp::Eq { negated: false }),
+ make::expr_path(make::path_from_text("Bool::True")),
+ )
+ .clone_for_update(),
+ );
+ } else {
+ edit.replace(range, format!("{} == Bool::True", new_name.text()));
+ }
+ }
+
+ // add imports across modules where needed
+ if let Some((import_scope, path)) = import_data {
+ insert_use(&import_scope, path, &ctx.config.insert_use);
+ }
+ },
+ )
+ }
+}
+
+struct FileReferenceWithImport {
+ range: TextRange,
+ old_name: ast::NameLike,
+ new_name: ast::NameLike,
+ import_data: Option<(ImportScope, ast::Path)>,
+}
+
+fn augment_references_with_imports(
+ edit: &mut SourceChangeBuilder,
+ ctx: &AssistContext<'_>,
+ references: &[FileReference],
+ target_module: &hir::Module,
+) -> Vec<FileReferenceWithImport> {
+ let mut visited_modules = FxHashSet::default();
+
+ references
+ .iter()
+ .filter_map(|FileReference { range, name, .. }| {
+ let name = name.clone().into_name_like()?;
+ ctx.sema.scope(name.syntax()).map(|scope| (*range, name, scope.module()))
+ })
+ .map(|(range, name, ref_module)| {
+ let old_name = name.clone();
+ let new_name = edit.make_mut(name.clone());
+
+ // if the referenced module is not the same as the target one and has not been seen before, add an import
+ let import_data = if ref_module.nearest_non_block_module(ctx.db()) != *target_module
+ && !visited_modules.contains(&ref_module)
+ {
+ visited_modules.insert(ref_module);
+
+ let import_scope =
+ ImportScope::find_insert_use_container(new_name.syntax(), &ctx.sema);
+ let path = ref_module
+ .find_use_path_prefixed(
+ ctx.sema.db,
+ ModuleDef::Module(*target_module),
+ ctx.config.insert_use.prefix_kind,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )
+ .map(|mod_path| {
+ make::path_concat(mod_path_to_ast(&mod_path), make::path_from_text("Bool"))
+ });
+
+ import_scope.zip(path)
+ } else {
+ None
+ };
+
+ FileReferenceWithImport { range, old_name, new_name, import_data }
+ })
+ .collect()
+}
+
+fn find_assignment_usage(name: &ast::NameLike) -> Option<ast::Expr> {
+ let bin_expr = name.syntax().ancestors().find_map(ast::BinExpr::cast)?;
+
+ if !bin_expr.lhs()?.syntax().descendants().contains(name.syntax()) {
+ cov_mark::hit!(dont_assign_incorrect_ref);
+ return None;
+ }
+
+ if let Some(ast::BinaryOp::Assignment { op: None }) = bin_expr.op_kind() {
+ bin_expr.rhs()
+ } else {
+ None
+ }
+}
+
+fn find_negated_usage(name: &ast::NameLike) -> Option<(ast::PrefixExpr, ast::Expr)> {
+ let prefix_expr = name.syntax().ancestors().find_map(ast::PrefixExpr::cast)?;
+
+ if !matches!(prefix_expr.expr()?, ast::Expr::PathExpr(_) | ast::Expr::FieldExpr(_)) {
+ cov_mark::hit!(dont_overwrite_expression_inside_negation);
+ return None;
+ }
+
+ if let Some(ast::UnaryOp::Not) = prefix_expr.op_kind() {
+ let inner_expr = prefix_expr.expr()?;
+ Some((prefix_expr, inner_expr))
+ } else {
+ None
+ }
+}
+
+fn find_record_expr_usage(
+ name: &ast::NameLike,
+ got_field: hir::Field,
+ target_definition: Definition,
+) -> Option<(ast::RecordExprField, ast::Expr)> {
+ let name_ref = name.as_name_ref()?;
+ let record_field = ast::RecordExprField::for_field_name(name_ref)?;
+ let initializer = record_field.expr()?;
+
+ if let Definition::Field(expected_field) = target_definition {
+ if got_field != expected_field {
+ return None;
+ }
+ }
+
+ Some((record_field, initializer))
+}
+
+fn find_record_pat_field_usage(name: &ast::NameLike) -> Option<ast::Pat> {
+ let record_pat_field = name.syntax().parent().and_then(ast::RecordPatField::cast)?;
+ let pat = record_pat_field.pat()?;
+
+ match pat {
+ ast::Pat::IdentPat(_) | ast::Pat::LiteralPat(_) | ast::Pat::WildcardPat(_) => Some(pat),
+ _ => None,
+ }
+}
+
+fn find_assoc_const_usage(name: &ast::NameLike) -> Option<(ast::Type, ast::Expr)> {
+ let const_ = name.syntax().parent().and_then(ast::Const::cast)?;
+ if const_.syntax().parent().and_then(ast::AssocItemList::cast).is_none() {
+ return None;
+ }
+
+ Some((const_.ty()?, const_.body()?))
+}
+
+fn find_method_call_expr_usage(name: &ast::NameLike) -> Option<ast::Expr> {
+ let method_call = name.syntax().ancestors().find_map(ast::MethodCallExpr::cast)?;
+ let receiver = method_call.receiver()?;
+
+ if !receiver.syntax().descendants().contains(name.syntax()) {
+ return None;
+ }
+
+ Some(receiver)
+}
+
+/// Adds the definition of the new enum before the target node.
+fn add_enum_def(
+ edit: &mut SourceChangeBuilder,
+ ctx: &AssistContext<'_>,
+ usages: &UsageSearchResult,
+ target_node: SyntaxNode,
+ target_module: &hir::Module,
+) {
+ let make_enum_pub = usages
+ .iter()
+ .flat_map(|(_, refs)| refs)
+ .filter_map(|FileReference { name, .. }| {
+ let name = name.clone().into_name_like()?;
+ ctx.sema.scope(name.syntax()).map(|scope| scope.module())
+ })
+ .any(|module| module.nearest_non_block_module(ctx.db()) != *target_module);
+ let enum_def = make_bool_enum(make_enum_pub);
+
+ let insert_before = node_to_insert_before(target_node);
+ let indent = IndentLevel::from_node(&insert_before);
+ enum_def.reindent_to(indent);
+
+ ted::insert_all(
+ ted::Position::before(&edit.make_syntax_mut(insert_before)),
+ vec![
+ enum_def.syntax().clone().into(),
+ make::tokens::whitespace(&format!("\n\n{indent}")).into(),
+ ],
+ );
+}
+
+/// Finds where to put the new enum definition.
+/// Tries to find the ast node at the nearest module or at top-level, otherwise just
+/// returns the input node.
+fn node_to_insert_before(target_node: SyntaxNode) -> SyntaxNode {
+ target_node
+ .ancestors()
+ .take_while(|it| !matches!(it.kind(), SyntaxKind::MODULE | SyntaxKind::SOURCE_FILE))
+ .filter(|it| ast::Item::can_cast(it.kind()))
+ .last()
+ .unwrap_or(target_node)
+}
+
+fn make_bool_enum(make_pub: bool) -> ast::Enum {
+ let enum_def = make::enum_(
+ if make_pub { Some(make::visibility_pub()) } else { None },
+ make::name("Bool"),
+ make::variant_list(vec![
+ make::variant(make::name("True"), None),
+ make::variant(make::name("False"), None),
+ ]),
+ )
+ .clone_for_update();
+
+ let derive_eq = make::attr_outer(make::meta_token_tree(
+ make::ext::ident_path("derive"),
+ make::token_tree(
+ T!['('],
+ vec![
+ NodeOrToken::Token(make::tokens::ident("PartialEq")),
+ NodeOrToken::Token(make::token(T![,])),
+ NodeOrToken::Token(make::tokens::single_space()),
+ NodeOrToken::Token(make::tokens::ident("Eq")),
+ ],
+ ),
+ ))
+ .clone_for_update();
+ enum_def.add_attr(derive_eq);
+
+ enum_def
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn local_variable_with_usage() {
+ check_assist(
+ bool_to_enum,
+ r#"
+fn main() {
+ let $0foo = true;
+
+ if foo {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn main() {
+ let foo = Bool::True;
+
+ if foo == Bool::True {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_variable_with_usage_negated() {
+ cov_mark::check!(replaces_negation);
+ check_assist(
+ bool_to_enum,
+ r#"
+fn main() {
+ let $0foo = true;
+
+ if !foo {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn main() {
+ let foo = Bool::True;
+
+ if foo == Bool::False {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_variable_with_type_annotation() {
+ cov_mark::check!(replaces_ty_annotation);
+ check_assist(
+ bool_to_enum,
+ r#"
+fn main() {
+ let $0foo: bool = false;
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn main() {
+ let foo: Bool = Bool::False;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_variable_with_non_literal_initializer() {
+ check_assist(
+ bool_to_enum,
+ r#"
+fn main() {
+ let $0foo = 1 == 2;
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn main() {
+ let foo = if 1 == 2 { Bool::True } else { Bool::False };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_variable_binexpr_usage() {
+ check_assist(
+ bool_to_enum,
+ r#"
+fn main() {
+ let $0foo = false;
+ let bar = true;
+
+ if !foo && bar {
+ println!("foobar");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn main() {
+ let foo = Bool::False;
+ let bar = true;
+
+ if foo == Bool::False && bar {
+ println!("foobar");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_variable_unop_usage() {
+ check_assist(
+ bool_to_enum,
+ r#"
+fn main() {
+ let $0foo = true;
+
+ if *&foo {
+ println!("foobar");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn main() {
+ let foo = Bool::True;
+
+ if *&foo == Bool::True {
+ println!("foobar");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_variable_assigned_later() {
+ cov_mark::check!(replaces_assignment);
+ check_assist(
+ bool_to_enum,
+ r#"
+fn main() {
+ let $0foo: bool;
+ foo = true;
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn main() {
+ let foo: Bool;
+ foo = Bool::True;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_variable_does_not_apply_recursively() {
+ check_assist(
+ bool_to_enum,
+ r#"
+fn main() {
+ let $0foo = true;
+ let bar = !foo;
+
+ if bar {
+ println!("bar");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn main() {
+ let foo = Bool::True;
+ let bar = foo == Bool::False;
+
+ if bar {
+ println!("bar");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_variable_nested_in_negation() {
+ cov_mark::check!(dont_overwrite_expression_inside_negation);
+ check_assist(
+ bool_to_enum,
+ r#"
+fn main() {
+ if !"foo".chars().any(|c| {
+ let $0foo = true;
+ foo
+ }) {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn main() {
+ if !"foo".chars().any(|c| {
+ let foo = Bool::True;
+ foo == Bool::True
+ }) {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_variable_non_bool() {
+ cov_mark::check!(not_applicable_non_bool_local);
+ check_assist_not_applicable(
+ bool_to_enum,
+ r#"
+fn main() {
+ let $0foo = 1;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_variable_cursor_not_on_ident() {
+ check_assist_not_applicable(
+ bool_to_enum,
+ r#"
+fn main() {
+ let foo = $0true;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_variable_non_ident_pat() {
+ cov_mark::check!(not_applicable_in_non_ident_pat);
+ check_assist_not_applicable(
+ bool_to_enum,
+ r#"
+fn main() {
+ let ($0foo, bar) = (true, false);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_struct_basic() {
+ cov_mark::check!(replaces_record_expr);
+ check_assist(
+ bool_to_enum,
+ r#"
+struct Foo {
+ $0bar: bool,
+ baz: bool,
+}
+
+fn main() {
+ let foo = Foo { bar: true, baz: false };
+
+ if foo.bar {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+struct Foo {
+ bar: Bool,
+ baz: bool,
+}
+
+fn main() {
+ let foo = Foo { bar: Bool::True, baz: false };
+
+ if foo.bar == Bool::True {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_enum_basic() {
+ cov_mark::check!(replaces_record_pat);
+ check_assist(
+ bool_to_enum,
+ r#"
+enum Foo {
+ Foo,
+ Bar { $0bar: bool },
+}
+
+fn main() {
+ let foo = Foo::Bar { bar: true };
+
+ if let Foo::Bar { bar: baz } = foo {
+ if baz {
+ println!("foo");
+ }
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+enum Foo {
+ Foo,
+ Bar { bar: Bool },
+}
+
+fn main() {
+ let foo = Foo::Bar { bar: Bool::True };
+
+ if let Foo::Bar { bar: baz } = foo {
+ if baz == Bool::True {
+ println!("foo");
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_enum_cross_file() {
+ check_assist(
+ bool_to_enum,
+ r#"
+//- /foo.rs
+pub enum Foo {
+ Foo,
+ Bar { $0bar: bool },
+}
+
+fn foo() {
+ let foo = Foo::Bar { bar: true };
+}
+
+//- /main.rs
+use foo::Foo;
+
+mod foo;
+
+fn main() {
+ let foo = Foo::Bar { bar: false };
+}
+"#,
+ r#"
+//- /foo.rs
+#[derive(PartialEq, Eq)]
+pub enum Bool { True, False }
+
+pub enum Foo {
+ Foo,
+ Bar { bar: Bool },
+}
+
+fn foo() {
+ let foo = Foo::Bar { bar: Bool::True };
+}
+
+//- /main.rs
+use foo::{Foo, Bool};
+
+mod foo;
+
+fn main() {
+ let foo = Foo::Bar { bar: Bool::False };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_enum_shorthand() {
+ cov_mark::check!(replaces_record_pat_shorthand);
+ check_assist(
+ bool_to_enum,
+ r#"
+enum Foo {
+ Foo,
+ Bar { $0bar: bool },
+}
+
+fn main() {
+ let foo = Foo::Bar { bar: true };
+
+ match foo {
+ Foo::Bar { bar } => {
+ if bar {
+ println!("foo");
+ }
+ }
+ _ => (),
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+enum Foo {
+ Foo,
+ Bar { bar: Bool },
+}
+
+fn main() {
+ let foo = Foo::Bar { bar: Bool::True };
+
+ match foo {
+ Foo::Bar { bar } => {
+ if bar == Bool::True {
+ println!("foo");
+ }
+ }
+ _ => (),
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_enum_replaces_literal_patterns() {
+ cov_mark::check!(replaces_literal_pat);
+ check_assist(
+ bool_to_enum,
+ r#"
+enum Foo {
+ Foo,
+ Bar { $0bar: bool },
+}
+
+fn main() {
+ let foo = Foo::Bar { bar: true };
+
+ if let Foo::Bar { bar: true } = foo {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+enum Foo {
+ Foo,
+ Bar { bar: Bool },
+}
+
+fn main() {
+ let foo = Foo::Bar { bar: Bool::True };
+
+ if let Foo::Bar { bar: Bool::True } = foo {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_enum_keeps_wildcard_patterns() {
+ check_assist(
+ bool_to_enum,
+ r#"
+enum Foo {
+ Foo,
+ Bar { $0bar: bool },
+}
+
+fn main() {
+ let foo = Foo::Bar { bar: true };
+
+ if let Foo::Bar { bar: _ } = foo {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+enum Foo {
+ Foo,
+ Bar { bar: Bool },
+}
+
+fn main() {
+ let foo = Foo::Bar { bar: Bool::True };
+
+ if let Foo::Bar { bar: _ } = foo {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_union_basic() {
+ check_assist(
+ bool_to_enum,
+ r#"
+union Foo {
+ $0foo: bool,
+ bar: usize,
+}
+
+fn main() {
+ let foo = Foo { foo: true };
+
+ if unsafe { foo.foo } {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+union Foo {
+ foo: Bool,
+ bar: usize,
+}
+
+fn main() {
+ let foo = Foo { foo: Bool::True };
+
+ if unsafe { foo.foo == Bool::True } {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_negated() {
+ check_assist(
+ bool_to_enum,
+ r#"
+struct Foo {
+ $0bar: bool,
+}
+
+fn main() {
+ let foo = Foo { bar: false };
+
+ if !foo.bar {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+struct Foo {
+ bar: Bool,
+}
+
+fn main() {
+ let foo = Foo { bar: Bool::False };
+
+ if foo.bar == Bool::False {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_in_mod_properly_indented() {
+ check_assist(
+ bool_to_enum,
+ r#"
+mod foo {
+ struct Bar {
+ $0baz: bool,
+ }
+
+ impl Bar {
+ fn new(baz: bool) -> Self {
+ Self { baz }
+ }
+ }
+}
+"#,
+ r#"
+mod foo {
+ #[derive(PartialEq, Eq)]
+ enum Bool { True, False }
+
+ struct Bar {
+ baz: Bool,
+ }
+
+ impl Bar {
+ fn new(baz: bool) -> Self {
+ Self { baz: if baz { Bool::True } else { Bool::False } }
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_multiple_initializations() {
+ check_assist(
+ bool_to_enum,
+ r#"
+struct Foo {
+ $0bar: bool,
+ baz: bool,
+}
+
+fn main() {
+ let foo1 = Foo { bar: true, baz: false };
+ let foo2 = Foo { bar: false, baz: false };
+
+ if foo1.bar && foo2.bar {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+struct Foo {
+ bar: Bool,
+ baz: bool,
+}
+
+fn main() {
+ let foo1 = Foo { bar: Bool::True, baz: false };
+ let foo2 = Foo { bar: Bool::False, baz: false };
+
+ if foo1.bar == Bool::True && foo2.bar == Bool::True {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_assigned_to_another() {
+ cov_mark::check!(dont_assign_incorrect_ref);
+ check_assist(
+ bool_to_enum,
+ r#"
+struct Foo {
+ $0foo: bool,
+}
+
+struct Bar {
+ bar: bool,
+}
+
+fn main() {
+ let foo = Foo { foo: true };
+ let mut bar = Bar { bar: true };
+
+ bar.bar = foo.foo;
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+struct Foo {
+ foo: Bool,
+}
+
+struct Bar {
+ bar: bool,
+}
+
+fn main() {
+ let foo = Foo { foo: Bool::True };
+ let mut bar = Bar { bar: true };
+
+ bar.bar = foo.foo == Bool::True;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_initialized_with_other() {
+ check_assist(
+ bool_to_enum,
+ r#"
+struct Foo {
+ $0foo: bool,
+}
+
+struct Bar {
+ bar: bool,
+}
+
+fn main() {
+ let foo = Foo { foo: true };
+ let bar = Bar { bar: foo.foo };
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+struct Foo {
+ foo: Bool,
+}
+
+struct Bar {
+ bar: bool,
+}
+
+fn main() {
+ let foo = Foo { foo: Bool::True };
+ let bar = Bar { bar: foo.foo == Bool::True };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_method_chain_usage() {
+ check_assist(
+ bool_to_enum,
+ r#"
+struct Foo {
+ $0bool: bool,
+}
+
+fn main() {
+ let foo = Foo { bool: true };
+
+ foo.bool.then(|| 2);
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+struct Foo {
+ bool: Bool,
+}
+
+fn main() {
+ let foo = Foo { bool: Bool::True };
+
+ (foo.bool == Bool::True).then(|| 2);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_non_bool() {
+ cov_mark::check!(not_applicable_non_bool_field);
+ check_assist_not_applicable(
+ bool_to_enum,
+ r#"
+struct Foo {
+ $0bar: usize,
+}
+
+fn main() {
+ let foo = Foo { bar: 1 };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn const_basic() {
+ check_assist(
+ bool_to_enum,
+ r#"
+const $0FOO: bool = false;
+
+fn main() {
+ if FOO {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+const FOO: Bool = Bool::False;
+
+fn main() {
+ if FOO == Bool::True {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn const_in_module() {
+ check_assist(
+ bool_to_enum,
+ r#"
+fn main() {
+ if foo::FOO {
+ println!("foo");
+ }
+}
+
+mod foo {
+ pub const $0FOO: bool = true;
+}
+"#,
+ r#"
+use foo::Bool;
+
+fn main() {
+ if foo::FOO == Bool::True {
+ println!("foo");
+ }
+}
+
+mod foo {
+ #[derive(PartialEq, Eq)]
+ pub enum Bool { True, False }
+
+ pub const FOO: Bool = Bool::True;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn const_in_module_with_import() {
+ check_assist(
+ bool_to_enum,
+ r#"
+fn main() {
+ use foo::FOO;
+
+ if FOO {
+ println!("foo");
+ }
+}
+
+mod foo {
+ pub const $0FOO: bool = true;
+}
+"#,
+ r#"
+use crate::foo::Bool;
+
+fn main() {
+ use foo::FOO;
+
+ if FOO == Bool::True {
+ println!("foo");
+ }
+}
+
+mod foo {
+ #[derive(PartialEq, Eq)]
+ pub enum Bool { True, False }
+
+ pub const FOO: Bool = Bool::True;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn const_cross_file() {
+ check_assist(
+ bool_to_enum,
+ r#"
+//- /main.rs
+mod foo;
+
+fn main() {
+ if foo::FOO {
+ println!("foo");
+ }
+}
+
+//- /foo.rs
+pub const $0FOO: bool = true;
+"#,
+ r#"
+//- /main.rs
+use foo::Bool;
+
+mod foo;
+
+fn main() {
+ if foo::FOO == Bool::True {
+ println!("foo");
+ }
+}
+
+//- /foo.rs
+#[derive(PartialEq, Eq)]
+pub enum Bool { True, False }
+
+pub const FOO: Bool = Bool::True;
+"#,
+ )
+ }
+
+ #[test]
+ fn const_cross_file_and_module() {
+ check_assist(
+ bool_to_enum,
+ r#"
+//- /main.rs
+mod foo;
+
+fn main() {
+ use foo::bar;
+
+ if bar::BAR {
+ println!("foo");
+ }
+}
+
+//- /foo.rs
+pub mod bar {
+ pub const $0BAR: bool = false;
+}
+"#,
+ r#"
+//- /main.rs
+use crate::foo::bar::Bool;
+
+mod foo;
+
+fn main() {
+ use foo::bar;
+
+ if bar::BAR == Bool::True {
+ println!("foo");
+ }
+}
+
+//- /foo.rs
+pub mod bar {
+ #[derive(PartialEq, Eq)]
+ pub enum Bool { True, False }
+
+ pub const BAR: Bool = Bool::False;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn const_in_impl_cross_file() {
+ check_assist(
+ bool_to_enum,
+ r#"
+//- /main.rs
+mod foo;
+
+struct Foo;
+
+impl Foo {
+ pub const $0BOOL: bool = true;
+}
+
+//- /foo.rs
+use crate::Foo;
+
+fn foo() -> bool {
+ Foo::BOOL
+}
+"#,
+ r#"
+//- /main.rs
+mod foo;
+
+struct Foo;
+
+#[derive(PartialEq, Eq)]
+pub enum Bool { True, False }
+
+impl Foo {
+ pub const BOOL: Bool = Bool::True;
+}
+
+//- /foo.rs
+use crate::{Foo, Bool};
+
+fn foo() -> bool {
+ Foo::BOOL == Bool::True
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn const_in_trait() {
+ check_assist(
+ bool_to_enum,
+ r#"
+trait Foo {
+ const $0BOOL: bool;
+}
+
+impl Foo for usize {
+ const BOOL: bool = true;
+}
+
+fn main() {
+ if <usize as Foo>::BOOL {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+trait Foo {
+ const BOOL: Bool;
+}
+
+impl Foo for usize {
+ const BOOL: Bool = Bool::True;
+}
+
+fn main() {
+ if <usize as Foo>::BOOL == Bool::True {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn const_non_bool() {
+ cov_mark::check!(not_applicable_non_bool_const);
+ check_assist_not_applicable(
+ bool_to_enum,
+ r#"
+const $0FOO: &str = "foo";
+
+fn main() {
+ println!("{FOO}");
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn static_basic() {
+ check_assist(
+ bool_to_enum,
+ r#"
+static mut $0BOOL: bool = true;
+
+fn main() {
+ unsafe { BOOL = false };
+ if unsafe { BOOL } {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+static mut BOOL: Bool = Bool::True;
+
+fn main() {
+ unsafe { BOOL = Bool::False };
+ if unsafe { BOOL == Bool::True } {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn static_non_bool() {
+ cov_mark::check!(not_applicable_non_bool_static);
+ check_assist_not_applicable(
+ bool_to_enum,
+ r#"
+static mut $0FOO: usize = 0;
+
+fn main() {
+ if unsafe { FOO } == 0 {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_to_other_names() {
+ check_assist_not_applicable(bool_to_enum, "fn $0main() {}")
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs
index 1acd5ee97..3f478ee7d 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs
@@ -25,9 +25,7 @@ pub(crate) fn convert_comment_block(acc: &mut Assists, ctx: &AssistContext<'_>)
let comment = ctx.find_token_at_offset::<ast::Comment>()?;
// Only allow comments which are alone on their line
if let Some(prev) = comment.syntax().prev_token() {
- if Whitespace::cast(prev).filter(|w| w.text().contains('\n')).is_none() {
- return None;
- }
+ Whitespace::cast(prev).filter(|w| w.text().contains('\n'))?;
}
match comment.kind().shape {
@@ -78,7 +76,7 @@ fn line_to_block(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
// Establish the target of our edit based on the comments we found
let target = TextRange::new(
comments[0].syntax().text_range().start(),
- comments.last().unwrap().syntax().text_range().end(),
+ comments.last()?.syntax().text_range().end(),
);
acc.add(
@@ -91,8 +89,12 @@ fn line_to_block(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
// contents of each line comment when they're put into the block comment.
let indentation = IndentLevel::from_token(comment.syntax());
- let block_comment_body =
- comments.into_iter().map(|c| line_comment_text(indentation, c)).join("\n");
+ let block_comment_body = comments
+ .into_iter()
+ .map(|c| line_comment_text(indentation, c))
+ .collect::<Vec<String>>()
+ .into_iter()
+ .join("\n");
let block_prefix =
CommentKind { shape: CommentShape::Block, ..comment.kind() }.prefix();
@@ -160,7 +162,8 @@ pub(crate) fn relevant_line_comments(comment: &ast::Comment) -> Vec<Comment> {
//
// But since such comments aren't idiomatic we're okay with this.
pub(crate) fn line_comment_text(indentation: IndentLevel, comm: ast::Comment) -> String {
- let contents_without_prefix = comm.text().strip_prefix(comm.prefix()).unwrap();
+ let text = comm.text();
+ let contents_without_prefix = text.strip_prefix(comm.prefix()).unwrap_or(text);
let contents = contents_without_prefix.strip_prefix(' ').unwrap_or(contents_without_prefix);
// Don't add the indentation if the line is empty
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs
index 872b52c98..d649f13d6 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs
@@ -50,7 +50,12 @@ pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext<'_>) -
_ => return None,
};
- mod_path_to_ast(&module.find_use_path(ctx.db(), src_type_def, ctx.config.prefer_no_std)?)
+ mod_path_to_ast(&module.find_use_path(
+ ctx.db(),
+ src_type_def,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )?)
};
let dest_type = match &ast_trait {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
index 7d0e42476..73ba3f5c4 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
@@ -51,22 +51,7 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext<'
// Check if there is an IfLet that we can handle.
let (if_let_pat, cond_expr) = if is_pattern_cond(cond.clone()) {
let let_ = single_let(cond)?;
- match let_.pat() {
- Some(ast::Pat::TupleStructPat(pat)) if pat.fields().count() == 1 => {
- let path = pat.path()?;
- if path.qualifier().is_some() {
- return None;
- }
-
- let bound_ident = pat.fields().next()?;
- if !ast::IdentPat::can_cast(bound_ident.syntax().kind()) {
- return None;
- }
-
- (Some((path, bound_ident)), let_.expr()?)
- }
- _ => return None, // Unsupported IfLet.
- }
+ (Some(let_.pat()?), let_.expr()?)
} else {
(None, cond)
};
@@ -136,11 +121,10 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext<'
};
new_expr.syntax().clone_for_update()
}
- Some((path, bound_ident)) => {
+ Some(pat) => {
// If-let.
- let pat = make::tuple_struct_pat(path, once(bound_ident));
let let_else_stmt = make::let_else_stmt(
- pat.into(),
+ pat,
None,
cond_expr,
ast::make::tail_only_block_expr(early_expression),
@@ -443,6 +427,60 @@ fn main() {
}
#[test]
+ fn convert_arbitrary_if_let_patterns() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ $0if let None = Some(92) {
+ foo();
+ }
+}
+"#,
+ r#"
+fn main() {
+ let None = Some(92) else { return };
+ foo();
+}
+"#,
+ );
+
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ $0if let [1, x] = [1, 92] {
+ foo(x);
+ }
+}
+"#,
+ r#"
+fn main() {
+ let [1, x] = [1, 92] else { return };
+ foo(x);
+}
+"#,
+ );
+
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ $0if let (Some(x), None) = (Some(92), None) {
+ foo(x);
+ }
+}
+"#,
+ r#"
+fn main() {
+ let (Some(x), None) = (Some(92), None) else { return };
+ foo(x);
+}
+"#,
+ );
+ }
+
+ #[test]
fn ignore_already_converted_if() {
check_assist_not_applicable(
convert_to_guarded_return,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs
new file mode 100644
index 000000000..79b46d661
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs
@@ -0,0 +1,889 @@
+use either::Either;
+use hir::ModuleDef;
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ defs::Definition,
+ helpers::mod_path_to_ast,
+ imports::insert_use::{insert_use, ImportScope},
+ search::{FileReference, UsageSearchResult},
+ source_change::SourceChangeBuilder,
+ syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
+ FxHashSet,
+};
+use syntax::{
+ ast::{self, edit::IndentLevel, edit_in_place::Indent, make, HasName},
+ match_ast, ted, AstNode, SyntaxNode,
+};
+
+use crate::assist_context::{AssistContext, Assists};
+
+// Assist: convert_tuple_return_type_to_struct
+//
+// This converts the return type of a function from a tuple type
+// into a tuple struct and updates the body accordingly.
+//
+// ```
+// fn bar() {
+// let (a, b, c) = foo();
+// }
+//
+// fn foo() -> ($0u32, u32, u32) {
+// (1, 2, 3)
+// }
+// ```
+// ->
+// ```
+// fn bar() {
+// let FooResult(a, b, c) = foo();
+// }
+//
+// struct FooResult(u32, u32, u32);
+//
+// fn foo() -> FooResult {
+// FooResult(1, 2, 3)
+// }
+// ```
+pub(crate) fn convert_tuple_return_type_to_struct(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let ret_type = ctx.find_node_at_offset::<ast::RetType>()?;
+ let type_ref = ret_type.ty()?;
+
+ let ast::Type::TupleType(tuple_ty) = &type_ref else { return None };
+ if tuple_ty.fields().any(|field| matches!(field, ast::Type::ImplTraitType(_))) {
+ return None;
+ }
+
+ let fn_ = ret_type.syntax().parent().and_then(ast::Fn::cast)?;
+ let fn_def = ctx.sema.to_def(&fn_)?;
+ let fn_name = fn_.name()?;
+ let target_module = ctx.sema.scope(fn_.syntax())?.module().nearest_non_block_module(ctx.db());
+
+ let target = type_ref.syntax().text_range();
+ acc.add(
+ AssistId("convert_tuple_return_type_to_struct", AssistKind::RefactorRewrite),
+ "Convert tuple return type to tuple struct",
+ target,
+ move |edit| {
+ let ret_type = edit.make_mut(ret_type);
+ let fn_ = edit.make_mut(fn_);
+
+ let usages = Definition::Function(fn_def).usages(&ctx.sema).all();
+ let struct_name = format!("{}Result", stdx::to_camel_case(&fn_name.to_string()));
+ let parent = fn_.syntax().ancestors().find_map(<Either<ast::Impl, ast::Trait>>::cast);
+ add_tuple_struct_def(
+ edit,
+ ctx,
+ &usages,
+ parent.as_ref().map(|it| it.syntax()).unwrap_or(fn_.syntax()),
+ tuple_ty,
+ &struct_name,
+ &target_module,
+ );
+
+ ted::replace(
+ ret_type.syntax(),
+ make::ret_type(make::ty(&struct_name)).syntax().clone_for_update(),
+ );
+
+ if let Some(fn_body) = fn_.body() {
+ replace_body_return_values(ast::Expr::BlockExpr(fn_body), &struct_name);
+ }
+
+ replace_usages(edit, ctx, &usages, &struct_name, &target_module);
+ },
+ )
+}
+
+/// Replaces tuple usages with the corresponding tuple struct pattern.
+fn replace_usages(
+ edit: &mut SourceChangeBuilder,
+ ctx: &AssistContext<'_>,
+ usages: &UsageSearchResult,
+ struct_name: &str,
+ target_module: &hir::Module,
+) {
+ for (file_id, references) in usages.iter() {
+ edit.edit_file(*file_id);
+
+ let refs_with_imports =
+ augment_references_with_imports(edit, ctx, references, struct_name, target_module);
+
+ refs_with_imports.into_iter().rev().for_each(|(name, import_data)| {
+ if let Some(fn_) = name.syntax().parent().and_then(ast::Fn::cast) {
+ cov_mark::hit!(replace_trait_impl_fns);
+
+ if let Some(ret_type) = fn_.ret_type() {
+ ted::replace(
+ ret_type.syntax(),
+ make::ret_type(make::ty(struct_name)).syntax().clone_for_update(),
+ );
+ }
+
+ if let Some(fn_body) = fn_.body() {
+ replace_body_return_values(ast::Expr::BlockExpr(fn_body), struct_name);
+ }
+ } else {
+ // replace tuple patterns
+ let pats = name
+ .syntax()
+ .ancestors()
+ .find(|node| {
+ ast::CallExpr::can_cast(node.kind())
+ || ast::MethodCallExpr::can_cast(node.kind())
+ })
+ .and_then(|node| node.parent())
+ .and_then(node_to_pats)
+ .unwrap_or(Vec::new());
+
+ let tuple_pats = pats.iter().filter_map(|pat| match pat {
+ ast::Pat::TuplePat(tuple_pat) => Some(tuple_pat),
+ _ => None,
+ });
+ for tuple_pat in tuple_pats {
+ ted::replace(
+ tuple_pat.syntax(),
+ make::tuple_struct_pat(
+ make::path_from_text(struct_name),
+ tuple_pat.fields(),
+ )
+ .clone_for_update()
+ .syntax(),
+ );
+ }
+ }
+ // add imports across modules where needed
+ if let Some((import_scope, path)) = import_data {
+ insert_use(&import_scope, path, &ctx.config.insert_use);
+ }
+ })
+ }
+}
+
+fn node_to_pats(node: SyntaxNode) -> Option<Vec<ast::Pat>> {
+ match_ast! {
+ match node {
+ ast::LetStmt(it) => it.pat().map(|pat| vec![pat]),
+ ast::LetExpr(it) => it.pat().map(|pat| vec![pat]),
+ ast::MatchExpr(it) => it.match_arm_list().map(|arm_list| {
+ arm_list.arms().filter_map(|arm| arm.pat()).collect()
+ }),
+ _ => None,
+ }
+ }
+}
+
+fn augment_references_with_imports(
+ edit: &mut SourceChangeBuilder,
+ ctx: &AssistContext<'_>,
+ references: &[FileReference],
+ struct_name: &str,
+ target_module: &hir::Module,
+) -> Vec<(ast::NameLike, Option<(ImportScope, ast::Path)>)> {
+ let mut visited_modules = FxHashSet::default();
+
+ references
+ .iter()
+ .filter_map(|FileReference { name, .. }| {
+ let name = name.clone().into_name_like()?;
+ ctx.sema.scope(name.syntax()).map(|scope| (name, scope.module()))
+ })
+ .map(|(name, ref_module)| {
+ let new_name = edit.make_mut(name.clone());
+
+ // if the referenced module is not the same as the target one and has not been seen before, add an import
+ let import_data = if ref_module.nearest_non_block_module(ctx.db()) != *target_module
+ && !visited_modules.contains(&ref_module)
+ {
+ visited_modules.insert(ref_module);
+
+ let import_scope =
+ ImportScope::find_insert_use_container(new_name.syntax(), &ctx.sema);
+ let path = ref_module
+ .find_use_path_prefixed(
+ ctx.sema.db,
+ ModuleDef::Module(*target_module),
+ ctx.config.insert_use.prefix_kind,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )
+ .map(|mod_path| {
+ make::path_concat(
+ mod_path_to_ast(&mod_path),
+ make::path_from_text(struct_name),
+ )
+ });
+
+ import_scope.zip(path)
+ } else {
+ None
+ };
+
+ (new_name, import_data)
+ })
+ .collect()
+}
+
+// Adds the definition of the tuple struct before the parent function.
+fn add_tuple_struct_def(
+ edit: &mut SourceChangeBuilder,
+ ctx: &AssistContext<'_>,
+ usages: &UsageSearchResult,
+ parent: &SyntaxNode,
+ tuple_ty: &ast::TupleType,
+ struct_name: &str,
+ target_module: &hir::Module,
+) {
+ let make_struct_pub = usages
+ .iter()
+ .flat_map(|(_, refs)| refs)
+ .filter_map(|FileReference { name, .. }| {
+ let name = name.clone().into_name_like()?;
+ ctx.sema.scope(name.syntax()).map(|scope| scope.module())
+ })
+ .any(|module| module.nearest_non_block_module(ctx.db()) != *target_module);
+ let visibility = if make_struct_pub { Some(make::visibility_pub()) } else { None };
+
+ let field_list = ast::FieldList::TupleFieldList(make::tuple_field_list(
+ tuple_ty.fields().map(|ty| make::tuple_field(visibility.clone(), ty)),
+ ));
+ let struct_name = make::name(struct_name);
+ let struct_def = make::struct_(visibility, struct_name, None, field_list).clone_for_update();
+
+ let indent = IndentLevel::from_node(parent);
+ struct_def.reindent_to(indent);
+
+ edit.insert(parent.text_range().start(), format!("{struct_def}\n\n{indent}"));
+}
+
+/// Replaces each returned tuple in `body` with the constructor of the tuple struct named `struct_name`.
+fn replace_body_return_values(body: ast::Expr, struct_name: &str) {
+ let mut exprs_to_wrap = Vec::new();
+
+ let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_wrap, e);
+ walk_expr(&body, &mut |expr| {
+ if let ast::Expr::ReturnExpr(ret_expr) = expr {
+ if let Some(ret_expr_arg) = &ret_expr.expr() {
+ for_each_tail_expr(ret_expr_arg, tail_cb);
+ }
+ }
+ });
+ for_each_tail_expr(&body, tail_cb);
+
+ for ret_expr in exprs_to_wrap {
+ if let ast::Expr::TupleExpr(tuple_expr) = &ret_expr {
+ let struct_constructor = make::expr_call(
+ make::expr_path(make::ext::ident_path(struct_name)),
+ make::arg_list(tuple_expr.fields()),
+ )
+ .clone_for_update();
+ ted::replace(ret_expr.syntax(), struct_constructor.syntax());
+ }
+ }
+}
+
+fn tail_cb_impl(acc: &mut Vec<ast::Expr>, e: &ast::Expr) {
+ match e {
+ ast::Expr::BreakExpr(break_expr) => {
+ if let Some(break_expr_arg) = break_expr.expr() {
+ for_each_tail_expr(&break_expr_arg, &mut |e| tail_cb_impl(acc, e))
+ }
+ }
+ ast::Expr::ReturnExpr(_) => {
+ // all return expressions have already been handled by the walk loop
+ }
+ e => acc.push(e.clone()),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn function_basic() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn bar() -> $0(&'static str, bool) {
+ ("bar", true)
+}
+"#,
+ r#"
+struct BarResult(&'static str, bool);
+
+fn bar() -> BarResult {
+ BarResult("bar", true)
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn struct_and_usages_indented() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+mod foo {
+ pub(crate) fn foo() {
+ let (bar, baz) = bar();
+ println!("{bar} {baz}");
+ }
+
+ pub(crate) fn bar() -> $0(usize, bool) {
+ (42, true)
+ }
+}
+"#,
+ r#"
+mod foo {
+ pub(crate) fn foo() {
+ let BarResult(bar, baz) = bar();
+ println!("{bar} {baz}");
+ }
+
+ struct BarResult(usize, bool);
+
+ pub(crate) fn bar() -> BarResult {
+ BarResult(42, true)
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_usage() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn bar() -> $0(usize, bool) {
+ (42, true)
+}
+
+fn main() {
+ let bar_result = bar();
+ println!("{} {}", bar_result.1, bar().0);
+}
+"#,
+ r#"
+struct BarResult(usize, bool);
+
+fn bar() -> BarResult {
+ BarResult(42, true)
+}
+
+fn main() {
+ let bar_result = bar();
+ println!("{} {}", bar_result.1, bar().0);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn method_usage() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+struct Foo;
+
+impl Foo {
+ fn foo(&self, x: usize) -> $0(usize, usize) {
+ (x, x)
+ }
+}
+
+fn main() {
+ let foo = Foo {};
+ let (x, y) = foo.foo(2);
+}
+"#,
+ r#"
+struct Foo;
+
+struct FooResult(usize, usize);
+
+impl Foo {
+ fn foo(&self, x: usize) -> FooResult {
+ FooResult(x, x)
+ }
+}
+
+fn main() {
+ let foo = Foo {};
+ let FooResult(x, y) = foo.foo(2);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn method_usage_within_same_impl() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+struct Foo;
+
+impl Foo {
+ fn new() -> $0(usize, usize) {
+ (0, 0)
+ }
+
+ fn foo() {
+ let (mut foo1, mut foo2) = Self::new();
+ }
+}
+"#,
+ r#"
+struct Foo;
+
+struct NewResult(usize, usize);
+
+impl Foo {
+ fn new() -> NewResult {
+ NewResult(0, 0)
+ }
+
+ fn foo() {
+ let NewResult(mut foo1, mut foo2) = Self::new();
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn multiple_usages() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn bar() -> $0(usize, usize) {
+ (42, 24)
+}
+
+fn main() {
+ let bar_result = bar();
+ let (foo, b) = bar();
+ let (b, baz) = bar();
+
+ if foo == b && b == baz {
+ println!("{} {}", bar_result.1, bar().0);
+ }
+}
+"#,
+ r#"
+struct BarResult(usize, usize);
+
+fn bar() -> BarResult {
+ BarResult(42, 24)
+}
+
+fn main() {
+ let bar_result = bar();
+ let BarResult(foo, b) = bar();
+ let BarResult(b, baz) = bar();
+
+ if foo == b && b == baz {
+ println!("{} {}", bar_result.1, bar().0);
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn usage_match_tuple_pat() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn bar() -> $0(usize, bool) {
+ (42, true)
+}
+
+fn main() {
+ match bar() {
+ x if x.0 == 0 => println!("0"),
+ (x, false) => println!("{x}"),
+ (42, true) => println!("bar"),
+ _ => println!("foo"),
+ }
+}
+"#,
+ r#"
+struct BarResult(usize, bool);
+
+fn bar() -> BarResult {
+ BarResult(42, true)
+}
+
+fn main() {
+ match bar() {
+ x if x.0 == 0 => println!("0"),
+ BarResult(x, false) => println!("{x}"),
+ BarResult(42, true) => println!("bar"),
+ _ => println!("foo"),
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn usage_if_let_tuple_pat() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn bar() -> $0(usize, bool) {
+ (42, true)
+}
+
+fn main() {
+ if let (42, true) = bar() {
+ println!("bar")
+ }
+}
+"#,
+ r#"
+struct BarResult(usize, bool);
+
+fn bar() -> BarResult {
+ BarResult(42, true)
+}
+
+fn main() {
+ if let BarResult(42, true) = bar() {
+ println!("bar")
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn function_nested_outer() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn bar() -> $0(usize, bool) {
+ fn foo() -> (usize, bool) {
+ (42, true)
+ }
+
+ foo()
+}
+"#,
+ r#"
+struct BarResult(usize, bool);
+
+fn bar() -> BarResult {
+ fn foo() -> (usize, bool) {
+ (42, true)
+ }
+
+ foo()
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn function_nested_inner() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn bar() -> (usize, bool) {
+ fn foo() -> $0(usize, bool) {
+ (42, true)
+ }
+
+ foo()
+}
+"#,
+ r#"
+fn bar() -> (usize, bool) {
+ struct FooResult(usize, bool);
+
+ fn foo() -> FooResult {
+ FooResult(42, true)
+ }
+
+ foo()
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn trait_impl_and_usage() {
+ cov_mark::check!(replace_trait_impl_fns);
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+struct Struct;
+
+trait Foo {
+ fn foo(&self) -> $0(usize, bool);
+}
+
+impl Foo for Struct {
+ fn foo(&self) -> (usize, bool) {
+ (0, true)
+ }
+}
+
+fn main() {
+ let s = Struct {};
+ let (foo, bar) = s.foo();
+ let (foo, bar) = Struct::foo(&s);
+ println!("{foo} {bar}");
+}
+"#,
+ r#"
+struct Struct;
+
+struct FooResult(usize, bool);
+
+trait Foo {
+ fn foo(&self) -> FooResult;
+}
+
+impl Foo for Struct {
+ fn foo(&self) -> FooResult {
+ FooResult(0, true)
+ }
+}
+
+fn main() {
+ let s = Struct {};
+ let FooResult(foo, bar) = s.foo();
+ let FooResult(foo, bar) = Struct::foo(&s);
+ println!("{foo} {bar}");
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn body_wraps_nested() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn foo() -> $0(u8, usize, u32) {
+ if true {
+ match 3 {
+ 0 => (1, 2, 3),
+ _ => return (4, 5, 6),
+ }
+ } else {
+ (2, 1, 3)
+ }
+}
+"#,
+ r#"
+struct FooResult(u8, usize, u32);
+
+fn foo() -> FooResult {
+ if true {
+ match 3 {
+ 0 => FooResult(1, 2, 3),
+ _ => return FooResult(4, 5, 6),
+ }
+ } else {
+ FooResult(2, 1, 3)
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn body_wraps_break_and_return() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn foo(mut i: isize) -> (usize, $0u32, u8) {
+ if i < 0 {
+ return (0, 0, 0);
+ }
+
+ loop {
+ if i == 2 {
+ println!("foo");
+ break (1, 2, 3);
+ }
+ i += 1;
+ }
+}
+"#,
+ r#"
+struct FooResult(usize, u32, u8);
+
+fn foo(mut i: isize) -> FooResult {
+ if i < 0 {
+ return FooResult(0, 0, 0);
+ }
+
+ loop {
+ if i == 2 {
+ println!("foo");
+ break FooResult(1, 2, 3);
+ }
+ i += 1;
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn body_doesnt_wrap_identifier() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn foo() -> $0(u8, usize, u32) {
+ let tuple = (1, 2, 3);
+ tuple
+}
+"#,
+ r#"
+struct FooResult(u8, usize, u32);
+
+fn foo() -> FooResult {
+ let tuple = (1, 2, 3);
+ tuple
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn body_doesnt_wrap_other_exprs() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn bar(num: usize) -> (u8, usize, u32) {
+ (1, num, 3)
+}
+
+fn foo() -> $0(u8, usize, u32) {
+ bar(2)
+}
+"#,
+ r#"
+fn bar(num: usize) -> (u8, usize, u32) {
+ (1, num, 3)
+}
+
+struct FooResult(u8, usize, u32);
+
+fn foo() -> FooResult {
+ bar(2)
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn cross_file_and_module() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+//- /main.rs
+mod foo;
+
+fn main() {
+ use foo::bar;
+
+ let (bar, baz) = bar::bar();
+ println!("{}", bar == baz);
+}
+
+//- /foo.rs
+pub mod bar {
+ pub fn bar() -> $0(usize, usize) {
+ (1, 3)
+ }
+}
+"#,
+ r#"
+//- /main.rs
+use crate::foo::bar::BarResult;
+
+mod foo;
+
+fn main() {
+ use foo::bar;
+
+ let BarResult(bar, baz) = bar::bar();
+ println!("{}", bar == baz);
+}
+
+//- /foo.rs
+pub mod bar {
+ pub struct BarResult(pub usize, pub usize);
+
+ pub fn bar() -> BarResult {
+ BarResult(1, 3)
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn does_not_replace_nested_usage() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn bar() -> $0(usize, bool) {
+ (42, true)
+}
+
+fn main() {
+ let ((bar1, bar2), foo) = (bar(), 3);
+ println!("{bar1} {bar2} {foo}");
+}
+"#,
+ r#"
+struct BarResult(usize, bool);
+
+fn bar() -> BarResult {
+ BarResult(42, true)
+}
+
+fn main() {
+ let ((bar1, bar2), foo) = (bar(), 3);
+ println!("{bar1} {bar2} {foo}");
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn function_with_non_tuple_return_type() {
+ check_assist_not_applicable(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn bar() -> $0usize {
+ 0
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn function_with_impl_type() {
+ check_assist_not_applicable(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn bar() -> $0(impl Clone, usize) {
+ ("bar", 0)
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
index 017853a4a..435d7c4a5 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
@@ -216,7 +216,7 @@ fn edit_field_references(
edit.edit_file(file_id);
for r in refs {
if let Some(name_ref) = r.name.as_name_ref() {
- edit.replace(name_ref.syntax().text_range(), name.text());
+ edit.replace(ctx.sema.original_range(name_ref.syntax()).range, name.text());
}
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
index f30ca2552..65b497e83 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
@@ -3,10 +3,12 @@ use ide_db::{
defs::Definition,
search::{FileReference, SearchScope, UsageSearchResult},
};
+use itertools::Itertools;
use syntax::{
- ast::{self, AstNode, FieldExpr, HasName, IdentPat, MethodCallExpr},
- TextRange,
+ ast::{self, make, AstNode, FieldExpr, HasName, IdentPat, MethodCallExpr},
+ ted, T,
};
+use text_edit::TextRange;
use crate::assist_context::{AssistContext, Assists, SourceChangeBuilder};
@@ -61,27 +63,36 @@ pub(crate) fn destructure_tuple_binding_impl(
acc.add(
AssistId("destructure_tuple_binding_in_sub_pattern", AssistKind::RefactorRewrite),
"Destructure tuple in sub-pattern",
- data.range,
- |builder| {
- edit_tuple_assignment(ctx, builder, &data, true);
- edit_tuple_usages(&data, builder, ctx, true);
- },
+ data.ident_pat.syntax().text_range(),
+ |edit| destructure_tuple_edit_impl(ctx, edit, &data, true),
);
}
acc.add(
AssistId("destructure_tuple_binding", AssistKind::RefactorRewrite),
if with_sub_pattern { "Destructure tuple in place" } else { "Destructure tuple" },
- data.range,
- |builder| {
- edit_tuple_assignment(ctx, builder, &data, false);
- edit_tuple_usages(&data, builder, ctx, false);
- },
+ data.ident_pat.syntax().text_range(),
+ |edit| destructure_tuple_edit_impl(ctx, edit, &data, false),
);
Some(())
}
+fn destructure_tuple_edit_impl(
+ ctx: &AssistContext<'_>,
+ edit: &mut SourceChangeBuilder,
+ data: &TupleData,
+ in_sub_pattern: bool,
+) {
+ let assignment_edit = edit_tuple_assignment(ctx, edit, &data, in_sub_pattern);
+ let current_file_usages_edit = edit_tuple_usages(&data, edit, ctx, in_sub_pattern);
+
+ assignment_edit.apply();
+ if let Some(usages_edit) = current_file_usages_edit {
+ usages_edit.into_iter().for_each(|usage_edit| usage_edit.apply(edit))
+ }
+}
+
fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option<TupleData> {
if ident_pat.at_token().is_some() {
// Cannot destructure pattern with sub-pattern:
@@ -109,7 +120,6 @@ fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option<TupleDat
}
let name = ident_pat.name()?.to_string();
- let range = ident_pat.syntax().text_range();
let usages = ctx.sema.to_def(&ident_pat).map(|def| {
Definition::Local(def)
@@ -122,7 +132,7 @@ fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option<TupleDat
.map(|i| generate_name(ctx, i, &name, &ident_pat, &usages))
.collect::<Vec<_>>();
- Some(TupleData { ident_pat, range, ref_type, field_names, usages })
+ Some(TupleData { ident_pat, ref_type, field_names, usages })
}
fn generate_name(
@@ -142,72 +152,100 @@ enum RefType {
}
struct TupleData {
ident_pat: IdentPat,
- // name: String,
- range: TextRange,
ref_type: Option<RefType>,
field_names: Vec<String>,
- // field_types: Vec<Type>,
usages: Option<UsageSearchResult>,
}
fn edit_tuple_assignment(
ctx: &AssistContext<'_>,
- builder: &mut SourceChangeBuilder,
+ edit: &mut SourceChangeBuilder,
data: &TupleData,
in_sub_pattern: bool,
-) {
+) -> AssignmentEdit {
+ let ident_pat = edit.make_mut(data.ident_pat.clone());
+
let tuple_pat = {
let original = &data.ident_pat;
let is_ref = original.ref_token().is_some();
let is_mut = original.mut_token().is_some();
- let fields = data.field_names.iter().map(|name| {
- ast::Pat::from(ast::make::ident_pat(is_ref, is_mut, ast::make::name(name)))
- });
- ast::make::tuple_pat(fields)
+ let fields = data
+ .field_names
+ .iter()
+ .map(|name| ast::Pat::from(make::ident_pat(is_ref, is_mut, make::name(name))));
+ make::tuple_pat(fields).clone_for_update()
};
- let add_cursor = |text: &str| {
- // place cursor on first tuple item
- let first_tuple = &data.field_names[0];
- text.replacen(first_tuple, &format!("$0{first_tuple}"), 1)
- };
+ if let Some(cap) = ctx.config.snippet_cap {
+ // place cursor on first tuple name
+ if let Some(ast::Pat::IdentPat(first_pat)) = tuple_pat.fields().next() {
+ edit.add_tabstop_before(
+ cap,
+ first_pat.name().expect("first ident pattern should have a name"),
+ )
+ }
+ }
- // with sub_pattern: keep original tuple and add subpattern: `tup @ (_0, _1)`
- if in_sub_pattern {
- let text = format!(" @ {tuple_pat}");
- match ctx.config.snippet_cap {
- Some(cap) => {
- let snip = add_cursor(&text);
- builder.insert_snippet(cap, data.range.end(), snip);
- }
- None => builder.insert(data.range.end(), text),
- };
- } else {
- let text = tuple_pat.to_string();
- match ctx.config.snippet_cap {
- Some(cap) => {
- let snip = add_cursor(&text);
- builder.replace_snippet(cap, data.range, snip);
- }
- None => builder.replace(data.range, text),
- };
+ AssignmentEdit { ident_pat, tuple_pat, in_sub_pattern }
+}
+struct AssignmentEdit {
+ ident_pat: ast::IdentPat,
+ tuple_pat: ast::TuplePat,
+ in_sub_pattern: bool,
+}
+
+impl AssignmentEdit {
+ fn apply(self) {
+ // with sub_pattern: keep original tuple and add subpattern: `tup @ (_0, _1)`
+ if self.in_sub_pattern {
+ self.ident_pat.set_pat(Some(self.tuple_pat.into()))
+ } else {
+ ted::replace(self.ident_pat.syntax(), self.tuple_pat.syntax())
+ }
}
}
fn edit_tuple_usages(
data: &TupleData,
- builder: &mut SourceChangeBuilder,
+ edit: &mut SourceChangeBuilder,
ctx: &AssistContext<'_>,
in_sub_pattern: bool,
-) {
+) -> Option<Vec<EditTupleUsage>> {
+ let mut current_file_usages = None;
+
if let Some(usages) = data.usages.as_ref() {
- for (file_id, refs) in usages.iter() {
- builder.edit_file(*file_id);
+ // We need to collect edits first before actually applying them
+ // as mapping nodes to their mutable node versions requires an
+ // unmodified syntax tree.
+ //
+ // We also defer editing usages in the current file first since
+ // tree mutation in the same file breaks when `builder.edit_file`
+ // is called
+
+ if let Some((_, refs)) = usages.iter().find(|(file_id, _)| **file_id == ctx.file_id()) {
+ current_file_usages = Some(
+ refs.iter()
+ .filter_map(|r| edit_tuple_usage(ctx, edit, r, data, in_sub_pattern))
+ .collect_vec(),
+ );
+ }
- for r in refs {
- edit_tuple_usage(ctx, builder, r, data, in_sub_pattern);
+ for (file_id, refs) in usages.iter() {
+ if *file_id == ctx.file_id() {
+ continue;
}
+
+ edit.edit_file(*file_id);
+
+ let tuple_edits = refs
+ .iter()
+ .filter_map(|r| edit_tuple_usage(ctx, edit, r, data, in_sub_pattern))
+ .collect_vec();
+
+ tuple_edits.into_iter().for_each(|tuple_edit| tuple_edit.apply(edit))
}
}
+
+ current_file_usages
}
fn edit_tuple_usage(
ctx: &AssistContext<'_>,
@@ -215,25 +253,14 @@ fn edit_tuple_usage(
usage: &FileReference,
data: &TupleData,
in_sub_pattern: bool,
-) {
+) -> Option<EditTupleUsage> {
match detect_tuple_index(usage, data) {
- Some(index) => edit_tuple_field_usage(ctx, builder, data, index),
- None => {
- if in_sub_pattern {
- cov_mark::hit!(destructure_tuple_call_with_subpattern);
- return;
- }
-
- // no index access -> make invalid -> requires handling by user
- // -> put usage in block comment
- //
- // Note: For macro invocations this might result in still valid code:
- // When a macro accepts the tuple as argument, as well as no arguments at all,
- // uncommenting the tuple still leaves the macro call working (see `tests::in_macro_call::empty_macro`).
- // But this is an unlikely case. Usually the resulting macro call will become erroneous.
- builder.insert(usage.range.start(), "/*");
- builder.insert(usage.range.end(), "*/");
+ Some(index) => Some(edit_tuple_field_usage(ctx, builder, data, index)),
+ None if in_sub_pattern => {
+ cov_mark::hit!(destructure_tuple_call_with_subpattern);
+ return None;
}
+ None => Some(EditTupleUsage::NoIndex(usage.range)),
}
}
@@ -242,19 +269,47 @@ fn edit_tuple_field_usage(
builder: &mut SourceChangeBuilder,
data: &TupleData,
index: TupleIndex,
-) {
+) -> EditTupleUsage {
let field_name = &data.field_names[index.index];
+ let field_name = make::expr_path(make::ext::ident_path(field_name));
if data.ref_type.is_some() {
- let ref_data = handle_ref_field_usage(ctx, &index.field_expr);
- builder.replace(ref_data.range, ref_data.format(field_name));
+ let (replace_expr, ref_data) = handle_ref_field_usage(ctx, &index.field_expr);
+ let replace_expr = builder.make_mut(replace_expr);
+ EditTupleUsage::ReplaceExpr(replace_expr, ref_data.wrap_expr(field_name))
} else {
- builder.replace(index.range, field_name);
+ let field_expr = builder.make_mut(index.field_expr);
+ EditTupleUsage::ReplaceExpr(field_expr.into(), field_name)
+ }
+}
+enum EditTupleUsage {
+ /// no index access -> make invalid -> requires handling by user
+ /// -> put usage in block comment
+ ///
+ /// Note: For macro invocations this might result in still valid code:
+ /// When a macro accepts the tuple as argument, as well as no arguments at all,
+ /// uncommenting the tuple still leaves the macro call working (see `tests::in_macro_call::empty_macro`).
+ /// But this is an unlikely case. Usually the resulting macro call will become erroneous.
+ NoIndex(TextRange),
+ ReplaceExpr(ast::Expr, ast::Expr),
+}
+
+impl EditTupleUsage {
+ fn apply(self, edit: &mut SourceChangeBuilder) {
+ match self {
+ EditTupleUsage::NoIndex(range) => {
+ edit.insert(range.start(), "/*");
+ edit.insert(range.end(), "*/");
+ }
+ EditTupleUsage::ReplaceExpr(target_expr, replace_with) => {
+ ted::replace(target_expr.syntax(), replace_with.clone_for_update().syntax())
+ }
+ }
}
}
+
struct TupleIndex {
index: usize,
- range: TextRange,
field_expr: FieldExpr,
}
fn detect_tuple_index(usage: &FileReference, data: &TupleData) -> Option<TupleIndex> {
@@ -296,7 +351,7 @@ fn detect_tuple_index(usage: &FileReference, data: &TupleData) -> Option<TupleIn
return None;
}
- Some(TupleIndex { index: idx, range: field_expr.syntax().text_range(), field_expr })
+ Some(TupleIndex { index: idx, field_expr })
} else {
// tuple index out of range
None
@@ -307,32 +362,34 @@ fn detect_tuple_index(usage: &FileReference, data: &TupleData) -> Option<TupleIn
}
struct RefData {
- range: TextRange,
needs_deref: bool,
needs_parentheses: bool,
}
impl RefData {
- fn format(&self, field_name: &str) -> String {
- match (self.needs_deref, self.needs_parentheses) {
- (true, true) => format!("(*{field_name})"),
- (true, false) => format!("*{field_name}"),
- (false, true) => format!("({field_name})"),
- (false, false) => field_name.to_string(),
+ fn wrap_expr(&self, mut expr: ast::Expr) -> ast::Expr {
+ if self.needs_deref {
+ expr = make::expr_prefix(T![*], expr);
}
+
+ if self.needs_parentheses {
+ expr = make::expr_paren(expr);
+ }
+
+ return expr;
}
}
-fn handle_ref_field_usage(ctx: &AssistContext<'_>, field_expr: &FieldExpr) -> RefData {
+fn handle_ref_field_usage(ctx: &AssistContext<'_>, field_expr: &FieldExpr) -> (ast::Expr, RefData) {
let s = field_expr.syntax();
- let mut ref_data =
- RefData { range: s.text_range(), needs_deref: true, needs_parentheses: true };
+ let mut ref_data = RefData { needs_deref: true, needs_parentheses: true };
+ let mut target_node = field_expr.clone().into();
let parent = match s.parent().map(ast::Expr::cast) {
Some(Some(parent)) => parent,
Some(None) => {
ref_data.needs_parentheses = false;
- return ref_data;
+ return (target_node, ref_data);
}
- None => return ref_data,
+ None => return (target_node, ref_data),
};
match parent {
@@ -342,7 +399,7 @@ fn handle_ref_field_usage(ctx: &AssistContext<'_>, field_expr: &FieldExpr) -> Re
// there might be a ref outside: `&(t.0)` -> can be removed
if let Some(it) = it.syntax().parent().and_then(ast::RefExpr::cast) {
ref_data.needs_deref = false;
- ref_data.range = it.syntax().text_range();
+ target_node = it.into();
}
}
ast::Expr::RefExpr(it) => {
@@ -351,8 +408,8 @@ fn handle_ref_field_usage(ctx: &AssistContext<'_>, field_expr: &FieldExpr) -> Re
ref_data.needs_parentheses = false;
// might be surrounded by parens -> can be removed too
match it.syntax().parent().and_then(ast::ParenExpr::cast) {
- Some(parent) => ref_data.range = parent.syntax().text_range(),
- None => ref_data.range = it.syntax().text_range(),
+ Some(parent) => target_node = parent.into(),
+ None => target_node = it.into(),
};
}
// higher precedence than deref `*`
@@ -414,7 +471,7 @@ fn handle_ref_field_usage(ctx: &AssistContext<'_>, field_expr: &FieldExpr) -> Re
}
};
- ref_data
+ (target_node, ref_data)
}
#[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs
index ddc8a50ed..c859e9852 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs
@@ -33,9 +33,7 @@ pub(crate) fn desugar_doc_comment(acc: &mut Assists, ctx: &AssistContext<'_>) ->
// Only allow comments which are alone on their line
if let Some(prev) = comment.syntax().prev_token() {
- if Whitespace::cast(prev).filter(|w| w.text().contains('\n')).is_none() {
- return None;
- }
+ Whitespace::cast(prev).filter(|w| w.text().contains('\n'))?;
}
let indentation = IndentLevel::from_token(comment.syntax()).to_string();
@@ -50,7 +48,7 @@ pub(crate) fn desugar_doc_comment(acc: &mut Assists, ctx: &AssistContext<'_>) ->
(
TextRange::new(
comments[0].syntax().text_range().start(),
- comments.last().unwrap().syntax().text_range().end(),
+ comments.last()?.syntax().text_range().end(),
),
Either::Right(comments),
)
@@ -71,9 +69,11 @@ pub(crate) fn desugar_doc_comment(acc: &mut Assists, ctx: &AssistContext<'_>) ->
.map(|l| l.strip_prefix(&indentation).unwrap_or(l))
.join("\n")
}
- Either::Right(comments) => {
- comments.into_iter().map(|c| line_comment_text(IndentLevel(0), c)).join("\n")
- }
+ Either::Right(comments) => comments
+ .into_iter()
+ .map(|cm| line_comment_text(IndentLevel(0), cm))
+ .collect::<Vec<_>>()
+ .join("\n"),
};
let hashes = "#".repeat(required_hashes(&text));
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs
index 31a1ff496..9d72d3af0 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs
@@ -1,4 +1,5 @@
use crate::{AssistContext, Assists};
+use hir::DescendPreference;
use ide_db::{
assists::{AssistId, AssistKind},
syntax_helpers::{
@@ -35,7 +36,8 @@ pub(crate) fn extract_expressions_from_format_string(
let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?;
let expanded_t = ast::String::cast(
- ctx.sema.descend_into_macros_with_kind_preference(fmt_string.syntax().clone(), 0.into()),
+ ctx.sema
+ .descend_into_macros_single(DescendPreference::SameKind, fmt_string.syntax().clone()),
)?;
if !is_format_string(&expanded_t) {
return None;
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
index de591cfde..347a3e9ba 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
@@ -3,8 +3,8 @@ use std::iter;
use ast::make;
use either::Either;
use hir::{
- HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef, PathResolution, Semantics,
- TypeInfo, TypeParam,
+ DescendPreference, HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef,
+ PathResolution, Semantics, TypeInfo, TypeParam,
};
use ide_db::{
defs::{Definition, NameRefClass},
@@ -147,7 +147,12 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
_ => format_function(ctx, module, &fun, old_indent, new_indent),
};
- if fn_def.contains("ControlFlow") {
+ // There are external control flows
+ if fun
+ .control_flow
+ .kind
+ .is_some_and(|kind| matches!(kind, FlowKind::Break(_, _) | FlowKind::Continue(_)))
+ {
let scope = match scope {
ImportScope::File(it) => ImportScope::File(builder.make_mut(it)),
ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
@@ -163,6 +168,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
ModuleDef::from(control_flow_enum),
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
);
if let Some(mod_path) = mod_path {
@@ -750,7 +756,7 @@ impl FunctionBody {
.descendants_with_tokens()
.filter_map(SyntaxElement::into_token)
.filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self]))
- .flat_map(|t| sema.descend_into_macros(t, 0.into()))
+ .flat_map(|t| sema.descend_into_macros(DescendPreference::None, t))
.for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast)));
}
}
@@ -4970,6 +4976,27 @@ fn $0fun_name(arg: &mut Foo) {
"#,
);
}
+ #[test]
+ fn does_not_import_control_flow() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: try
+fn func() {
+ $0let cf = "I'm ControlFlow";$0
+}
+"#,
+ r#"
+fn func() {
+ fun_name();
+}
+
+fn $0fun_name() {
+ let cf = "I'm ControlFlow";
+}
+"#,
+ );
+ }
#[test]
fn extract_function_copies_comment_at_start() {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
index 6839c5820..4b9fedc7e 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
@@ -3,7 +3,7 @@ use std::{
iter,
};
-use hir::{HasSource, ModuleSource};
+use hir::{HasSource, HirFileIdExt, ModuleSource};
use ide_db::{
assists::{AssistId, AssistKind},
base_db::FileId,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
index e4f64ccc7..37db27a8f 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
@@ -384,6 +384,7 @@ fn process_references(
*enum_module_def,
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
);
if let Some(mut mod_path) = mod_path {
mod_path.pop_segment();
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs
index 014c23197..e7c884dcb 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs
@@ -29,22 +29,31 @@ use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
// }
// ```
pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
- if ctx.has_empty_selection() {
- return None;
- }
-
- let node = match ctx.covering_element() {
- NodeOrToken::Node(it) => it,
- NodeOrToken::Token(it) if it.kind() == COMMENT => {
- cov_mark::hit!(extract_var_in_comment_is_not_applicable);
+ let node = if ctx.has_empty_selection() {
+ if let Some(expr_stmt) = ctx.find_node_at_offset::<ast::ExprStmt>() {
+ expr_stmt.syntax().clone()
+ } else if let Some(expr) = ctx.find_node_at_offset::<ast::Expr>() {
+ expr.syntax().ancestors().find_map(valid_target_expr)?.syntax().clone()
+ } else {
return None;
}
- NodeOrToken::Token(it) => it.parent()?,
+ } else {
+ match ctx.covering_element() {
+ NodeOrToken::Node(it) => it,
+ NodeOrToken::Token(it) if it.kind() == COMMENT => {
+ cov_mark::hit!(extract_var_in_comment_is_not_applicable);
+ return None;
+ }
+ NodeOrToken::Token(it) => it.parent()?,
+ }
};
+
let node = node.ancestors().take_while(|anc| anc.text_range() == node.text_range()).last()?;
+ let range = node.text_range();
+
let to_extract = node
.descendants()
- .take_while(|it| ctx.selection_trimmed().contains_range(it.text_range()))
+ .take_while(|it| range.contains_range(it.text_range()))
.find_map(valid_target_expr)?;
let ty = ctx.sema.type_of_expr(&to_extract).map(TypeInfo::adjusted);
@@ -236,6 +245,138 @@ mod tests {
use super::*;
#[test]
+ fn test_extract_var_simple_without_select() {
+ check_assist(
+ extract_variable,
+ r#"
+fn main() -> i32 {
+ if true {
+ 1
+ } else {
+ 2
+ }$0
+}
+"#,
+ r#"
+fn main() -> i32 {
+ let $0var_name = if true {
+ 1
+ } else {
+ 2
+ };
+ var_name
+}
+"#,
+ );
+
+ check_assist(
+ extract_variable,
+ r#"
+fn foo() -> i32 { 1 }
+fn main() {
+ foo();$0
+}
+"#,
+ r#"
+fn foo() -> i32 { 1 }
+fn main() {
+ let $0foo = foo();
+}
+"#,
+ );
+
+ check_assist(
+ extract_variable,
+ r#"
+fn main() {
+ let a = Some(2);
+ a.is_some();$0
+}
+"#,
+ r#"
+fn main() {
+ let a = Some(2);
+ let $0is_some = a.is_some();
+}
+"#,
+ );
+
+ check_assist(
+ extract_variable,
+ r#"
+fn main() {
+ "hello"$0;
+}
+"#,
+ r#"
+fn main() {
+ let $0var_name = "hello";
+}
+"#,
+ );
+
+ check_assist(
+ extract_variable,
+ r#"
+fn main() {
+ 1 + 2$0;
+}
+"#,
+ r#"
+fn main() {
+ let $0var_name = 1 + 2;
+}
+"#,
+ );
+
+ check_assist(
+ extract_variable,
+ r#"
+fn main() {
+ match () {
+ () if true => 1,
+ _ => 2,
+ };$0
+}
+"#,
+ r#"
+fn main() {
+ let $0var_name = match () {
+ () if true => 1,
+ _ => 2,
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_unit_expr_without_select_not_applicable() {
+ check_assist_not_applicable(
+ extract_variable,
+ r#"
+fn foo() {}
+fn main() {
+ foo()$0;
+}
+"#,
+ );
+
+ check_assist_not_applicable(
+ extract_variable,
+ r#"
+fn foo() {
+ let mut i = 3;
+ if i >= 0 {
+ i += 1;
+ } else {
+ i -= 1;
+ }$0
+}"#,
+ );
+ }
+
+ #[test]
fn test_extract_var_simple() {
check_assist(
extract_variable,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs
index c9f272474..204e796fa 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs
@@ -1,4 +1,6 @@
-use hir::{db::HirDatabase, HasSource, HasVisibility, ModuleDef, PathResolution, ScopeDef};
+use hir::{
+ db::HirDatabase, HasSource, HasVisibility, HirFileIdExt, ModuleDef, PathResolution, ScopeDef,
+};
use ide_db::base_db::FileId;
use syntax::{
ast::{self, edit_in_place::HasVisibilityEdit, make, HasVisibility as _},
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs
index 2ea6f58fa..8b46a23f9 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs
@@ -19,8 +19,19 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
// ```
pub(crate) fn flip_binexpr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let expr = ctx.find_node_at_offset::<BinExpr>()?;
- let lhs = expr.lhs()?.syntax().clone();
let rhs = expr.rhs()?.syntax().clone();
+ let lhs = expr.lhs()?.syntax().clone();
+
+ let lhs = if let Some(bin_expr) = BinExpr::cast(lhs.clone()) {
+ if bin_expr.op_kind() == expr.op_kind() {
+ bin_expr.rhs()?.syntax().clone()
+ } else {
+ lhs
+ }
+ } else {
+ lhs
+ };
+
let op_range = expr.op_token()?.text_range();
// The assist should be applied only if the cursor is on the operator
let cursor_in_range = op_range.contains_range(ctx.selection_trimmed());
@@ -115,6 +126,24 @@ mod tests {
}
#[test]
+ fn flip_binexpr_works_for_lhs_arith() {
+ check_assist(
+ flip_binexpr,
+ r"fn f() { let res = 1 + (2 - 3) +$0 4 + 5; }",
+ r"fn f() { let res = 1 + 4 + (2 - 3) + 5; }",
+ )
+ }
+
+ #[test]
+ fn flip_binexpr_works_for_lhs_cmp() {
+ check_assist(
+ flip_binexpr,
+ r"fn f() { let res = 1 + (2 - 3) >$0 4 + 5; }",
+ r"fn f() { let res = 4 + 5 < 1 + (2 - 3); }",
+ )
+ }
+
+ #[test]
fn flip_binexpr_works_inside_match() {
check_assist(
flip_binexpr,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs
index eccd7675f..a4e8e7388 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs
@@ -1,5 +1,5 @@
use crate::assist_context::{AssistContext, Assists};
-use hir::{HasVisibility, HirDisplay, Module};
+use hir::{HasVisibility, HirDisplay, HirFileIdExt, Module};
use ide_db::{
assists::{AssistId, AssistKind},
base_db::{FileId, Upcast},
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
index bbac0a26e..db1e0ceae 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
@@ -1,6 +1,7 @@
use std::collections::HashSet;
-use hir::{self, HasCrate, HasSource, HasVisibility};
+use hir::{self, HasCrate, HasVisibility};
+use ide_db::path_transform::PathTransform;
use syntax::{
ast::{
self, edit_in_place::Indent, make, AstNode, HasGenericParams, HasName, HasVisibility as _,
@@ -105,7 +106,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
target,
|edit| {
// Create the function
- let method_source = match method.source(ctx.db()) {
+ let method_source = match ctx.sema.source(method) {
Some(source) => source.value,
None => return,
};
@@ -130,7 +131,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
vis,
fn_name,
type_params,
- None,
+ method_source.where_clause(),
params,
body,
ret_type,
@@ -183,6 +184,12 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
let assoc_items = impl_def.get_or_create_assoc_item_list();
assoc_items.add_item(f.clone().into());
+ if let Some((target, source)) =
+ ctx.sema.scope(strukt.syntax()).zip(ctx.sema.scope(method_source.syntax()))
+ {
+ PathTransform::generic_transformation(&target, &source).apply(f.syntax());
+ }
+
if let Some(cap) = ctx.config.snippet_cap {
edit.add_tabstop_before(cap, f)
}
@@ -455,6 +462,209 @@ impl Person {
}
#[test]
+ fn test_preserve_where_clause() {
+ check_assist(
+ generate_delegate_methods,
+ r#"
+struct Inner<T>(T);
+impl<T> Inner<T> {
+ fn get(&self) -> T
+ where
+ T: Copy,
+ T: PartialEq,
+ {
+ self.0
+ }
+}
+
+struct Struct<T> {
+ $0field: Inner<T>,
+}
+"#,
+ r#"
+struct Inner<T>(T);
+impl<T> Inner<T> {
+ fn get(&self) -> T
+ where
+ T: Copy,
+ T: PartialEq,
+ {
+ self.0
+ }
+}
+
+struct Struct<T> {
+ field: Inner<T>,
+}
+
+impl<T> Struct<T> {
+ $0fn get(&self) -> T where
+ T: Copy,
+ T: PartialEq, {
+ self.field.get()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fixes_basic_self_references() {
+ check_assist(
+ generate_delegate_methods,
+ r#"
+struct Foo {
+ field: $0Bar,
+}
+
+struct Bar;
+
+impl Bar {
+ fn bar(&self, other: Self) -> Self {
+ other
+ }
+}
+"#,
+ r#"
+struct Foo {
+ field: Bar,
+}
+
+impl Foo {
+ $0fn bar(&self, other: Bar) -> Bar {
+ self.field.bar(other)
+ }
+}
+
+struct Bar;
+
+impl Bar {
+ fn bar(&self, other: Self) -> Self {
+ other
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fixes_nested_self_references() {
+ check_assist(
+ generate_delegate_methods,
+ r#"
+struct Foo {
+ field: $0Bar,
+}
+
+struct Bar;
+
+impl Bar {
+ fn bar(&mut self, a: (Self, [Self; 4]), b: Vec<Self>) {}
+}
+"#,
+ r#"
+struct Foo {
+ field: Bar,
+}
+
+impl Foo {
+ $0fn bar(&mut self, a: (Bar, [Bar; 4]), b: Vec<Bar>) {
+ self.field.bar(a, b)
+ }
+}
+
+struct Bar;
+
+impl Bar {
+ fn bar(&mut self, a: (Self, [Self; 4]), b: Vec<Self>) {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fixes_self_references_with_lifetimes_and_generics() {
+ check_assist(
+ generate_delegate_methods,
+ r#"
+struct Foo<'a, T> {
+ $0field: Bar<'a, T>,
+}
+
+struct Bar<'a, T>(&'a T);
+
+impl<'a, T> Bar<'a, T> {
+ fn bar(self, mut b: Vec<&'a Self>) -> &'a Self {
+ b.pop().unwrap()
+ }
+}
+"#,
+ r#"
+struct Foo<'a, T> {
+ field: Bar<'a, T>,
+}
+
+impl<'a, T> Foo<'a, T> {
+ $0fn bar(self, mut b: Vec<&'a Bar<'_, T>>) -> &'a Bar<'_, T> {
+ self.field.bar(b)
+ }
+}
+
+struct Bar<'a, T>(&'a T);
+
+impl<'a, T> Bar<'a, T> {
+ fn bar(self, mut b: Vec<&'a Self>) -> &'a Self {
+ b.pop().unwrap()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fixes_self_references_across_macros() {
+ check_assist(
+ generate_delegate_methods,
+ r#"
+//- /bar.rs
+macro_rules! test_method {
+ () => {
+ pub fn test(self, b: Bar) -> Self {
+ self
+ }
+ };
+}
+
+pub struct Bar;
+
+impl Bar {
+ test_method!();
+}
+
+//- /main.rs
+mod bar;
+
+struct Foo {
+ $0bar: bar::Bar,
+}
+"#,
+ r#"
+mod bar;
+
+struct Foo {
+ bar: bar::Bar,
+}
+
+impl Foo {
+ $0pub fn test(self,b:bar::Bar) ->bar::Bar {
+ self.bar.test(b)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
fn test_generate_delegate_visibility() {
check_assist_not_applicable(
generate_delegate_methods,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs
index 815453961..473c699b5 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs
@@ -58,8 +58,12 @@ fn generate_record_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
let module = ctx.sema.to_def(&strukt)?.module(ctx.db());
let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?;
- let trait_path =
- module.find_use_path(ctx.db(), ModuleDef::Trait(trait_), ctx.config.prefer_no_std)?;
+ let trait_path = module.find_use_path(
+ ctx.db(),
+ ModuleDef::Trait(trait_),
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )?;
let field_type = field.ty()?;
let field_name = field.name()?;
@@ -99,8 +103,12 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()
let module = ctx.sema.to_def(&strukt)?.module(ctx.db());
let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?;
- let trait_path =
- module.find_use_path(ctx.db(), ModuleDef::Trait(trait_), ctx.config.prefer_no_std)?;
+ let trait_path = module.find_use_path(
+ ctx.db(),
+ ModuleDef::Trait(trait_),
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )?;
let field_type = field.ty()?;
let target = field.syntax().text_range();
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs
index 184f523e0..1a1e992e2 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs
@@ -1,4 +1,4 @@
-use hir::{HasSource, HirDisplay, InFile};
+use hir::{HasSource, HirDisplay, InRealFile};
use ide_db::assists::{AssistId, AssistKind};
use syntax::{
ast::{self, make, HasArgList},
@@ -114,14 +114,14 @@ fn add_variant_to_accumulator(
parent: PathParent,
) -> Option<()> {
let db = ctx.db();
- let InFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?;
+ let InRealFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?;
acc.add(
AssistId("generate_enum_variant", AssistKind::Generate),
"Generate variant",
target,
|builder| {
- builder.edit_file(file_id.original_file(db));
+ builder.edit_file(file_id);
let node = builder.make_mut(enum_node);
let variant = make_variant(ctx, name_ref, parent);
node.variant_list().map(|it| it.add_variant(variant.clone_for_update()));
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
index 5b13e01b1..a113c817f 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
@@ -1,5 +1,6 @@
use hir::{
- Adt, AsAssocItem, HasSource, HirDisplay, Module, PathResolution, Semantics, Type, TypeInfo,
+ Adt, AsAssocItem, HasSource, HirDisplay, HirFileIdExt, Module, PathResolution, Semantics, Type,
+ TypeInfo,
};
use ide_db::{
base_db::FileId,
@@ -404,7 +405,11 @@ impl FunctionBuilder {
leading_ws,
ret_type: fn_def.ret_type(),
// PANIC: we guarantee we always create a function body with a tail expr
- tail_expr: fn_def.body().unwrap().tail_expr().unwrap(),
+ tail_expr: fn_def
+ .body()
+ .expect("generated function should have a body")
+ .tail_expr()
+ .expect("function body should have a tail expression"),
should_focus_return_type: self.should_focus_return_type,
fn_def,
trailing_ws,
@@ -506,7 +511,7 @@ fn assoc_fn_target_info(
}
fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize {
- match &target {
+ match target {
GeneratedFunctionTarget::BehindItem(it) => it.text_range().end(),
GeneratedFunctionTarget::InEmptyItemList(it) => it.text_range().start() + TextSize::of('{'),
}
@@ -683,7 +688,7 @@ where
{
// This function should be only called with `Impl`, `Trait`, or `Function`, for which it's
// infallible to get source ast.
- let node = ctx.sema.source(def).unwrap().value;
+ let node = ctx.sema.source(def).expect("definition's source couldn't be found").value;
let generic_params = node.generic_param_list().into_iter().flat_map(|it| it.generic_params());
let where_clauses = node.where_clause().into_iter().flat_map(|it| it.predicates());
(generic_params, where_clauses)
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
new file mode 100644
index 000000000..cb8ef3956
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
@@ -0,0 +1,202 @@
+use ide_db::famous_defs::FamousDefs;
+use syntax::{
+ ast::{self, make},
+ ted, AstNode,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// FIXME: Generate proper `index_mut` method body refer to `index` method body may impossible due to the unpredicable case [#15581].
+// Here just leave the `index_mut` method body be same as `index` method body, user can modify it manually to meet their need.
+
+// Assist: generate_mut_trait_impl
+//
+// Adds a IndexMut impl from the `Index` trait.
+//
+// ```
+// # //- minicore: index
+// pub enum Axis { X = 0, Y = 1, Z = 2 }
+//
+// impl<T> core::ops::Index$0<Axis> for [T; 3] {
+// type Output = T;
+//
+// fn index(&self, index: Axis) -> &Self::Output {
+// &self[index as usize]
+// }
+// }
+// ```
+// ->
+// ```
+// pub enum Axis { X = 0, Y = 1, Z = 2 }
+//
+// $0impl<T> core::ops::IndexMut<Axis> for [T; 3] {
+// fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
+// &self[index as usize]
+// }
+// }
+//
+// impl<T> core::ops::Index<Axis> for [T; 3] {
+// type Output = T;
+//
+// fn index(&self, index: Axis) -> &Self::Output {
+// &self[index as usize]
+// }
+// }
+// ```
+pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let impl_def = ctx.find_node_at_offset::<ast::Impl>()?.clone_for_update();
+
+ let trait_ = impl_def.trait_()?;
+ if let ast::Type::PathType(trait_path) = trait_.clone() {
+ let trait_type = ctx.sema.resolve_trait(&trait_path.path()?)?;
+ let scope = ctx.sema.scope(trait_path.syntax())?;
+ if trait_type != FamousDefs(&ctx.sema, scope.krate()).core_convert_Index()? {
+ return None;
+ }
+ }
+
+ // Index -> IndexMut
+ let index_trait = impl_def
+ .syntax()
+ .descendants()
+ .filter_map(ast::NameRef::cast)
+ .find(|it| it.text() == "Index")?;
+ ted::replace(
+ index_trait.syntax(),
+ make::path_segment(make::name_ref("IndexMut")).clone_for_update().syntax(),
+ );
+
+ // index -> index_mut
+ let trait_method_name = impl_def
+ .syntax()
+ .descendants()
+ .filter_map(ast::Name::cast)
+ .find(|it| it.text() == "index")?;
+ ted::replace(trait_method_name.syntax(), make::name("index_mut").clone_for_update().syntax());
+
+ let type_alias = impl_def.syntax().descendants().find_map(ast::TypeAlias::cast)?;
+ ted::remove(type_alias.syntax());
+
+ // &self -> &mut self
+ let mut_self_param = make::mut_self_param();
+ let self_param: ast::SelfParam =
+ impl_def.syntax().descendants().find_map(ast::SelfParam::cast)?;
+ ted::replace(self_param.syntax(), mut_self_param.clone_for_update().syntax());
+
+ // &Self::Output -> &mut Self::Output
+ let ret_type = impl_def.syntax().descendants().find_map(ast::RetType::cast)?;
+ ted::replace(
+ ret_type.syntax(),
+ make::ret_type(make::ty("&mut Self::Output")).clone_for_update().syntax(),
+ );
+
+ let fn_ = impl_def.assoc_item_list()?.assoc_items().find_map(|it| match it {
+ ast::AssocItem::Fn(f) => Some(f),
+ _ => None,
+ })?;
+
+ let assoc_list = make::assoc_item_list().clone_for_update();
+ assoc_list.add_item(syntax::ast::AssocItem::Fn(fn_));
+ ted::replace(impl_def.assoc_item_list()?.syntax(), assoc_list.syntax());
+
+ let target = impl_def.syntax().text_range();
+ acc.add(
+ AssistId("generate_mut_trait_impl", AssistKind::Generate),
+ "Generate `IndexMut` impl from this `Index` trait",
+ target,
+ |edit| {
+ edit.insert(target.start(), format!("$0{}\n\n", impl_def.to_string()));
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_generate_mut_trait_impl() {
+ check_assist(
+ generate_mut_trait_impl,
+ r#"
+//- minicore: index
+pub enum Axis { X = 0, Y = 1, Z = 2 }
+
+impl<T> core::ops::Index$0<Axis> for [T; 3] {
+ type Output = T;
+
+ fn index(&self, index: Axis) -> &Self::Output {
+ &self[index as usize]
+ }
+}
+"#,
+ r#"
+pub enum Axis { X = 0, Y = 1, Z = 2 }
+
+$0impl<T> core::ops::IndexMut<Axis> for [T; 3] {
+ fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
+ &self[index as usize]
+ }
+}
+
+impl<T> core::ops::Index<Axis> for [T; 3] {
+ type Output = T;
+
+ fn index(&self, index: Axis) -> &Self::Output {
+ &self[index as usize]
+ }
+}
+"#,
+ );
+
+ check_assist(
+ generate_mut_trait_impl,
+ r#"
+//- minicore: index
+pub enum Axis { X = 0, Y = 1, Z = 2 }
+
+impl<T> core::ops::Index$0<Axis> for [T; 3] where T: Copy {
+ type Output = T;
+
+ fn index(&self, index: Axis) -> &Self::Output {
+ let var_name = &self[index as usize];
+ var_name
+ }
+}
+"#,
+ r#"
+pub enum Axis { X = 0, Y = 1, Z = 2 }
+
+$0impl<T> core::ops::IndexMut<Axis> for [T; 3] where T: Copy {
+ fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
+ let var_name = &self[index as usize];
+ var_name
+ }
+}
+
+impl<T> core::ops::Index<Axis> for [T; 3] where T: Copy {
+ type Output = T;
+
+ fn index(&self, index: Axis) -> &Self::Output {
+ let var_name = &self[index as usize];
+ var_name
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_mut_trait_impl_not_applicable() {
+ check_assist_not_applicable(
+ generate_mut_trait_impl,
+ r#"
+pub trait Index<Idx: ?Sized> {}
+
+impl<T> Index$0<i32> for [T; 3] {}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs
index 824255e4f..7bfd59966 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs
@@ -67,6 +67,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
ctx.sema.db,
item_for_path_search(ctx.sema.db, item_in_ns)?,
ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
)?;
let expr = use_trivial_constructor(
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
index ffab58509..5b9cc5f66 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
@@ -8,7 +8,7 @@ use ide_db::{
defs::Definition,
imports::insert_use::remove_path_if_in_use_stmt,
path_transform::PathTransform,
- search::{FileReference, SearchScope},
+ search::{FileReference, FileReferenceNode, SearchScope},
source_change::SourceChangeBuilder,
syntax_helpers::{insert_whitespace_into_node::insert_ws_into, node_ext::expr_as_name_ref},
RootDatabase,
@@ -148,7 +148,7 @@ pub(super) fn split_refs_and_uses<T: ast::AstNode>(
) -> (Vec<T>, Vec<ast::Path>) {
iter.into_iter()
.filter_map(|file_ref| match file_ref.name {
- ast::NameLike::NameRef(name_ref) => Some(name_ref),
+ FileReferenceNode::NameRef(name_ref) => Some(name_ref),
_ => None,
})
.filter_map(|name_ref| match name_ref.syntax().ancestors().find_map(ast::UseTree::cast) {
@@ -224,7 +224,6 @@ pub(crate) fn inline_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
syntax.text_range(),
|builder| {
let replacement = inline(&ctx.sema, file_id, function, &fn_body, &params, &call_info);
-
builder.replace_ast(
match call_info.node {
ast::CallableExpr::Call(it) => ast::Expr::CallExpr(it),
@@ -347,7 +346,7 @@ fn inline(
match param.as_local(sema.db) {
Some(l) => usages_for_locals(l)
.map(|FileReference { name, range, .. }| match name {
- ast::NameLike::NameRef(_) => body
+ FileReferenceNode::NameRef(_) => body
.syntax()
.covering_element(range)
.ancestors()
@@ -363,16 +362,22 @@ fn inline(
.collect();
if function.self_param(sema.db).is_some() {
- let this = || make::name_ref("this").syntax().clone_for_update().first_token().unwrap();
+ let this = || {
+ make::name_ref("this")
+ .syntax()
+ .clone_for_update()
+ .first_token()
+ .expect("NameRef should have had a token.")
+ };
if let Some(self_local) = params[0].2.as_local(sema.db) {
usages_for_locals(self_local)
.filter_map(|FileReference { name, range, .. }| match name {
- ast::NameLike::NameRef(_) => Some(body.syntax().covering_element(range)),
+ FileReferenceNode::NameRef(_) => Some(body.syntax().covering_element(range)),
_ => None,
})
- .for_each(|it| {
- ted::replace(it, &this());
- })
+ .for_each(|usage| {
+ ted::replace(usage, &this());
+ });
}
}
@@ -470,7 +475,9 @@ fn inline(
}
} else if let Some(stmt_list) = body.stmt_list() {
ted::insert_all(
- ted::Position::after(stmt_list.l_curly_token().unwrap()),
+ ted::Position::after(
+ stmt_list.l_curly_token().expect("L_CURLY for StatementList is missing."),
+ ),
let_stmts.into_iter().map(|stmt| stmt.syntax().clone().into()).collect(),
);
}
@@ -481,8 +488,12 @@ fn inline(
};
body.reindent_to(original_indentation);
+ let no_stmts = body.statements().next().is_none();
match body.tail_expr() {
- Some(expr) if !is_async_fn && body.statements().next().is_none() => expr,
+ Some(expr) if matches!(expr, ast::Expr::ClosureExpr(_)) && no_stmts => {
+ make::expr_paren(expr).clone_for_update()
+ }
+ Some(expr) if !is_async_fn && no_stmts => expr,
_ => match node
.syntax()
.parent()
@@ -1474,4 +1485,29 @@ fn main() {
"#,
);
}
+
+ #[test]
+ fn inline_call_closure_body() {
+ check_assist(
+ inline_call,
+ r#"
+fn f() -> impl Fn() -> i32 {
+ || 2
+}
+
+fn main() {
+ let _ = $0f()();
+}
+"#,
+ r#"
+fn f() -> impl Fn() -> i32 {
+ || 2
+}
+
+fn main() {
+ let _ = (|| 2)();
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs
index e69d1a296..5d8ba43ec 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs
@@ -2,7 +2,7 @@ use hir::{PathResolution, Semantics};
use ide_db::{
base_db::FileId,
defs::Definition,
- search::{FileReference, UsageSearchResult},
+ search::{FileReference, FileReferenceNode, UsageSearchResult},
RootDatabase,
};
use syntax::{
@@ -63,7 +63,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>)
let wrap_in_parens = references
.into_iter()
.filter_map(|FileReference { range, name, .. }| match name {
- ast::NameLike::NameRef(name) => Some((range, name)),
+ FileReferenceNode::NameRef(name) => Some((range, name)),
_ => None,
})
.map(|(range, name_ref)| {
@@ -96,8 +96,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>)
);
let parent = matches!(
usage_parent,
- ast::Expr::CallExpr(_)
- | ast::Expr::TupleExpr(_)
+ ast::Expr::TupleExpr(_)
| ast::Expr::ArrayExpr(_)
| ast::Expr::ParenExpr(_)
| ast::Expr::ForExpr(_)
@@ -952,4 +951,22 @@ fn f() {
"#,
);
}
+
+ #[test]
+ fn test_inline_closure() {
+ check_assist(
+ inline_local_variable,
+ r#"
+fn main() {
+ let $0f = || 2;
+ let _ = f();
+}
+"#,
+ r#"
+fn main() {
+ let _ = (|| 2)();
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/into_to_qualified_from.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/into_to_qualified_from.rs
index 663df266b..965e4aa78 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/into_to_qualified_from.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/into_to_qualified_from.rs
@@ -52,9 +52,13 @@ pub(crate) fn into_to_qualified_from(acc: &mut Assists, ctx: &AssistContext<'_>)
== FamousDefs(sema, scope.krate()).core_convert_Into()?
{
let type_call = sema.type_of_expr(&method_call.clone().into())?;
- let type_call_disp =
- type_call.adjusted().display_source_code(db, scope.module().into(), true).ok()?;
+ let adjusted_tc = type_call.adjusted();
+ if adjusted_tc.contains_unknown() {
+ return None;
+ }
+
+ let sc = adjusted_tc.display_source_code(db, scope.module().into(), true).ok()?;
acc.add(
AssistId("into_to_qualified_from", AssistKind::Generate),
"Convert `into` to fully qualified `from`",
@@ -62,7 +66,11 @@ pub(crate) fn into_to_qualified_from(acc: &mut Assists, ctx: &AssistContext<'_>)
|edit| {
edit.replace(
method_call.syntax().text_range(),
- format!("{}::from({})", type_call_disp, receiver),
+ if sc.chars().all(|c| c.is_alphanumeric() || c == ':') {
+ format!("{}::from({})", sc, receiver)
+ } else {
+ format!("<{}>::from({})", sc, receiver)
+ },
);
},
);
@@ -202,4 +210,64 @@ fn main() -> () {
}"#,
)
}
+
+ #[test]
+ fn preceding_type_qualifier() {
+ check_assist(
+ into_to_qualified_from,
+ r#"
+//- minicore: from
+impl From<(i32,i32)> for [i32;2] {
+ fn from(value: (i32,i32)) -> Self {
+ [value.0, value.1]
+ }
+}
+
+fn tuple_to_array() -> [i32; 2] {
+ (0,1).in$0to()
+}"#,
+ r#"
+impl From<(i32,i32)> for [i32;2] {
+ fn from(value: (i32,i32)) -> Self {
+ [value.0, value.1]
+ }
+}
+
+fn tuple_to_array() -> [i32; 2] {
+ <[i32; 2]>::from((0,1))
+}"#,
+ )
+ }
+
+ #[test]
+ fn type_with_gens() {
+ check_assist(
+ into_to_qualified_from,
+ r#"
+//- minicore: from
+struct StructA<Gen>(Gen);
+
+impl From<i32> for StructA<i32> {
+ fn from(value: i32) -> Self {
+ StructA(value + 1)
+ }
+}
+
+fn main() -> () {
+ let a: StructA<i32> = 3.in$0to();
+}"#,
+ r#"
+struct StructA<Gen>(Gen);
+
+impl From<i32> for StructA<i32> {
+ fn from(value: i32) -> Self {
+ StructA(value + 1)
+ }
+}
+
+fn main() -> () {
+ let a: StructA<i32> = <StructA<i32>>::from(3);
+}"#,
+ )
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs
index 4bf974a56..ff65aac82 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs
@@ -48,6 +48,7 @@ pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) ->
ctx.sema.db,
item_for_path_search(ctx.sema.db, item_in_ns)?,
ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
)?;
let qualify_candidate = QualifyCandidate::ImplMethod(ctx.sema.db, call, resolved_call);
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs
index 239149dc4..fde46db30 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs
@@ -37,8 +37,11 @@ use crate::{
// ```
pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let (import_assets, syntax_under_caret) = find_importable_node(ctx)?;
- let mut proposed_imports =
- import_assets.search_for_relative_paths(&ctx.sema, ctx.config.prefer_no_std);
+ let mut proposed_imports = import_assets.search_for_relative_paths(
+ &ctx.sema,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ );
if proposed_imports.is_empty() {
return None;
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs
index ffc32f804..0281b29cd 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs
@@ -1,4 +1,4 @@
-use syntax::{ast, AstNode};
+use syntax::{ast, AstNode, SyntaxKind, T};
use crate::{AssistContext, AssistId, AssistKind, Assists};
@@ -39,7 +39,19 @@ pub(crate) fn remove_parentheses(acc: &mut Assists, ctx: &AssistContext<'_>) ->
AssistId("remove_parentheses", AssistKind::Refactor),
"Remove redundant parentheses",
target,
- |builder| builder.replace_ast(parens.into(), expr),
+ |builder| {
+ let prev_token = parens.syntax().first_token().and_then(|it| it.prev_token());
+ let need_to_add_ws = match prev_token {
+ Some(it) => {
+ let tokens = vec![T![&], T![!], T!['('], T!['['], T!['{']];
+ it.kind() != SyntaxKind::WHITESPACE && !tokens.contains(&it.kind())
+ }
+ None => false,
+ };
+ let expr = if need_to_add_ws { format!(" {}", expr) } else { expr.to_string() };
+
+ builder.replace(parens.syntax().text_range(), expr)
+ },
)
}
@@ -50,6 +62,15 @@ mod tests {
use super::*;
#[test]
+ fn remove_parens_space() {
+ check_assist(
+ remove_parentheses,
+ r#"fn f() { match$0(true) {} }"#,
+ r#"fn f() { match true {} }"#,
+ );
+ }
+
+ #[test]
fn remove_parens_simple() {
check_assist(remove_parentheses, r#"fn f() { $0(2) + 2; }"#, r#"fn f() { 2 + 2; }"#);
check_assist(remove_parentheses, r#"fn f() { ($02) + 2; }"#, r#"fn f() { 2 + 2; }"#);
@@ -94,8 +115,8 @@ mod tests {
check_assist(remove_parentheses, r#"fn f() { f(($02 + 2)); }"#, r#"fn f() { f(2 + 2); }"#);
check_assist(
remove_parentheses,
- r#"fn f() { (1<2)&&$0(3>4); }"#,
- r#"fn f() { (1<2)&&3>4; }"#,
+ r#"fn f() { (1<2) &&$0(3>4); }"#,
+ r#"fn f() { (1<2) && 3>4; }"#,
);
}
@@ -164,8 +185,8 @@ mod tests {
fn remove_parens_weird_places() {
check_assist(
remove_parentheses,
- r#"fn f() { match () { _=>$0(()) } }"#,
- r#"fn f() { match () { _=>() } }"#,
+ r#"fn f() { match () { _ =>$0(()) } }"#,
+ r#"fn f() { match () { _ => () } }"#,
);
check_assist(
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs
index 5fcab8c02..ee44064e7 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs
@@ -1,6 +1,6 @@
use std::collections::{hash_map::Entry, HashMap};
-use hir::{InFile, Module, ModuleSource};
+use hir::{HirFileIdExt, InFile, InRealFile, Module, ModuleSource};
use ide_db::{
base_db::FileRange,
defs::Definition,
@@ -167,7 +167,7 @@ fn used_once_in_scope(ctx: &AssistContext<'_>, def: Definition, scopes: &Vec<Sea
fn module_search_scope(db: &RootDatabase, module: hir::Module) -> Vec<SearchScope> {
let (file_id, range) = {
let InFile { file_id, value } = module.definition_source(db);
- if let Some((file_id, call_source)) = file_id.original_call_node(db) {
+ if let Some(InRealFile { file_id, value: call_source }) = file_id.original_call_node(db) {
(file_id, Some(call_source.text_range()))
} else {
(
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
index ac45581b7..b54e4204e 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
@@ -1,4 +1,4 @@
-use hir::{InFile, ModuleDef};
+use hir::{InFile, MacroFileIdExt, ModuleDef};
use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator};
use itertools::Itertools;
use syntax::{
@@ -43,12 +43,12 @@ pub(crate) fn replace_derive_with_manual_impl(
) -> Option<()> {
let attr = ctx.find_node_at_offset_with_descend::<ast::Attr>()?;
let path = attr.path()?;
- let hir_file = ctx.sema.hir_file_for(attr.syntax());
- if !hir_file.is_derive_attr_pseudo_expansion(ctx.db()) {
+ let macro_file = ctx.sema.hir_file_for(attr.syntax()).macro_file()?;
+ if !macro_file.is_derive_attr_pseudo_expansion(ctx.db()) {
return None;
}
- let InFile { file_id, value } = hir_file.call_node(ctx.db())?;
+ let InFile { file_id, value } = macro_file.call_node(ctx.db());
if file_id.is_macro() {
// FIXME: make this work in macro files
return None;
@@ -56,7 +56,7 @@ pub(crate) fn replace_derive_with_manual_impl(
// collect the derive paths from the #[derive] expansion
let current_derives = ctx
.sema
- .parse_or_expand(hir_file)
+ .parse_or_expand(macro_file.into())
.descendants()
.filter_map(ast::Attr::cast)
.filter_map(|attr| attr.path())
@@ -82,7 +82,12 @@ pub(crate) fn replace_derive_with_manual_impl(
})
.flat_map(|trait_| {
current_module
- .find_use_path(ctx.sema.db, hir::ModuleDef::Trait(trait_), ctx.config.prefer_no_std)
+ .find_use_path(
+ ctx.sema.db,
+ hir::ModuleDef::Trait(trait_),
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )
.as_ref()
.map(mod_path_to_ast)
.zip(Some(trait_))
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
new file mode 100644
index 000000000..b1daaea1e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
@@ -0,0 +1,172 @@
+use syntax::ast::{self, AstNode};
+
+use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: replace_is_some_with_if_let_some
+//
+// Replace `if x.is_some()` with `if let Some(_tmp) = x` or `if x.is_ok()` with `if let Ok(_tmp) = x`.
+//
+// ```
+// fn main() {
+// let x = Some(1);
+// if x.is_som$0e() {}
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let x = Some(1);
+// if let Some(${0:x}) = x {}
+// }
+// ```
+pub(crate) fn replace_is_method_with_if_let_method(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let if_expr = ctx.find_node_at_offset::<ast::IfExpr>()?;
+
+ let cond = if_expr.condition()?;
+ let call_expr = match cond {
+ ast::Expr::MethodCallExpr(call) => call,
+ _ => return None,
+ };
+
+ let name_ref = call_expr.name_ref()?;
+ match name_ref.text().as_str() {
+ "is_some" | "is_ok" => {
+ let receiver = call_expr.receiver()?;
+
+ let var_name = if let ast::Expr::PathExpr(path_expr) = receiver.clone() {
+ path_expr.path()?.to_string()
+ } else {
+ suggest_name::for_variable(&receiver, &ctx.sema)
+ };
+
+ let target = call_expr.syntax().text_range();
+
+ let (assist_id, message, text) = if name_ref.text() == "is_some" {
+ ("replace_is_some_with_if_let_some", "Replace `is_some` with `if let Some`", "Some")
+ } else {
+ ("replace_is_ok_with_if_let_ok", "Replace `is_ok` with `if let Ok`", "Ok")
+ };
+
+ acc.add(AssistId(assist_id, AssistKind::RefactorRewrite), message, target, |edit| {
+ let var_name = format!("${{0:{}}}", var_name);
+ let replacement = format!("let {}({}) = {}", text, var_name, receiver);
+ edit.replace(target, replacement);
+ })
+ }
+ _ => return None,
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::replace_is_method_with_if_let_method;
+
+ #[test]
+ fn replace_is_some_with_if_let_some_works() {
+ check_assist(
+ replace_is_method_with_if_let_method,
+ r#"
+fn main() {
+ let x = Some(1);
+ if x.is_som$0e() {}
+}
+"#,
+ r#"
+fn main() {
+ let x = Some(1);
+ if let Some(${0:x}) = x {}
+}
+"#,
+ );
+
+ check_assist(
+ replace_is_method_with_if_let_method,
+ r#"
+fn test() -> Option<i32> {
+ Some(1)
+}
+fn main() {
+ if test().is_som$0e() {}
+}
+"#,
+ r#"
+fn test() -> Option<i32> {
+ Some(1)
+}
+fn main() {
+ if let Some(${0:test}) = test() {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_is_some_with_if_let_some_not_applicable() {
+ check_assist_not_applicable(
+ replace_is_method_with_if_let_method,
+ r#"
+fn main() {
+ let x = Some(1);
+ if x.is_non$0e() {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_is_ok_with_if_let_ok_works() {
+ check_assist(
+ replace_is_method_with_if_let_method,
+ r#"
+fn main() {
+ let x = Ok(1);
+ if x.is_o$0k() {}
+}
+"#,
+ r#"
+fn main() {
+ let x = Ok(1);
+ if let Ok(${0:x}) = x {}
+}
+"#,
+ );
+
+ check_assist(
+ replace_is_method_with_if_let_method,
+ r#"
+fn test() -> Result<i32> {
+ Ok(1)
+}
+fn main() {
+ if test().is_o$0k() {}
+}
+"#,
+ r#"
+fn test() -> Result<i32> {
+ Ok(1)
+}
+fn main() {
+ if let Ok(${0:test}) = test() {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_is_ok_with_if_let_ok_not_applicable() {
+ check_assist_not_applicable(
+ replace_is_method_with_if_let_method,
+ r#"
+fn main() {
+ let x = Ok(1);
+ if x.is_e$0rr() {}
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs
index c7c0be4c7..e61ce4817 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs
@@ -59,7 +59,10 @@ pub(crate) fn replace_named_generic_with_impl(
let mut path_types_to_replace = Vec::new();
for (_a, refs) in usage_refs.iter() {
for usage_ref in refs {
- let param_node = find_path_type(&ctx.sema, &type_param_name, &usage_ref.name)?;
+ let Some(name_like) = usage_ref.name.clone().into_name_like() else {
+ continue;
+ };
+ let param_node = find_path_type(&ctx.sema, &type_param_name, &name_like)?;
path_types_to_replace.push(param_node);
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
index dbbc56958..f03eb6118 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
@@ -68,6 +68,7 @@ pub(crate) fn replace_qualified_name_with_use(
module,
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
)
})
.flatten();
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs
index b7d57f02b..f864ee50c 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs
@@ -55,7 +55,7 @@ pub(crate) fn toggle_ignore(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio
}
fn has_ignore_attribute(fn_def: &ast::Fn) -> Option<ast::Attr> {
- fn_def.attrs().find(|attr| attr.path().map(|it| it.syntax().text() == "ignore") == Some(true))
+ fn_def.attrs().find(|attr| attr.path().is_some_and(|it| it.syntax().text() == "ignore"))
}
#[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs
index dac216b69..52df30d96 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs
@@ -36,29 +36,25 @@ pub(crate) fn unmerge_use(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
let old_parent_range = use_.syntax().parent()?.text_range();
let new_parent = use_.syntax().parent()?;
+ // If possible, explain what is going to be done.
+ let label = match tree.path().and_then(|path| path.first_segment()) {
+ Some(name) => format!("Unmerge use of `{name}`"),
+ None => "Unmerge use".into(),
+ };
+
let target = tree.syntax().text_range();
- acc.add(
- AssistId("unmerge_use", AssistKind::RefactorRewrite),
- "Unmerge use",
- target,
- |builder| {
- let new_use = make::use_(
- use_.visibility(),
- make::use_tree(
- path,
- tree.use_tree_list(),
- tree.rename(),
- tree.star_token().is_some(),
- ),
- )
- .clone_for_update();
-
- tree.remove();
- ted::insert(Position::after(use_.syntax()), new_use.syntax());
-
- builder.replace(old_parent_range, new_parent.to_string());
- },
- )
+ acc.add(AssistId("unmerge_use", AssistKind::RefactorRewrite), label, target, |builder| {
+ let new_use = make::use_(
+ use_.visibility(),
+ make::use_tree(path, tree.use_tree_list(), tree.rename(), tree.star_token().is_some()),
+ )
+ .clone_for_update();
+
+ tree.remove();
+ ted::insert(Position::after(use_.syntax()), new_use.syntax());
+
+ builder.replace(old_parent_range, new_parent.to_string());
+ })
}
fn resolve_full_path(tree: &ast::UseTree) -> Option<ast::Path> {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs
index 7f612c2a1..1cfa291a2 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs
@@ -2,11 +2,11 @@ use ide_db::{
assists::{AssistId, AssistKind},
base_db::FileId,
defs::Definition,
- search::FileReference,
+ search::{FileReference, FileReferenceNode},
syntax_helpers::node_ext::full_path_of_name_ref,
};
use syntax::{
- ast::{self, NameLike, NameRef},
+ ast::{self, NameRef},
AstNode, SyntaxKind, TextRange,
};
@@ -76,7 +76,7 @@ pub(crate) fn unnecessary_async(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
for await_expr in find_all_references(ctx, &Definition::Function(fn_def))
// Keep only references that correspond NameRefs.
.filter_map(|(_, reference)| match reference.name {
- NameLike::NameRef(nameref) => Some(nameref),
+ FileReferenceNode::NameRef(nameref) => Some(nameref),
_ => None,
})
// Keep only references that correspond to await expressions
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unqualify_method_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unqualify_method_call.rs
index e9d4e270c..0876246e9 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unqualify_method_call.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unqualify_method_call.rs
@@ -1,3 +1,4 @@
+use ide_db::imports::insert_use::ImportScope;
use syntax::{
ast::{self, make, AstNode, HasArgList},
TextRange,
@@ -17,6 +18,8 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
// ```
// ->
// ```
+// use std::ops::Add;
+//
// fn main() {
// 1.add(2);
// }
@@ -38,7 +41,7 @@ pub(crate) fn unqualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>)
let first_arg = args_iter.next()?;
let second_arg = args_iter.next();
- _ = path.qualifier()?;
+ let qualifier = path.qualifier()?;
let method_name = path.segment()?.name_ref()?;
let res = ctx.sema.resolve_path(&path)?;
@@ -76,10 +79,51 @@ pub(crate) fn unqualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>)
edit.insert(close, ")");
}
edit.replace(replace_comma, format!(".{method_name}("));
+ add_import(qualifier, ctx, edit);
},
)
}
+fn add_import(
+ qualifier: ast::Path,
+ ctx: &AssistContext<'_>,
+ edit: &mut ide_db::source_change::SourceChangeBuilder,
+) {
+ if let Some(path_segment) = qualifier.segment() {
+ // for `<i32 as std::ops::Add>`
+ let path_type = path_segment.qualifying_trait();
+ let import = match path_type {
+ Some(it) => {
+ if let Some(path) = it.path() {
+ path
+ } else {
+ return;
+ }
+ }
+ None => qualifier,
+ };
+
+ // in case for `<_>`
+ if import.coloncolon_token().is_none() {
+ return;
+ }
+
+ let scope = ide_db::imports::insert_use::ImportScope::find_insert_use_container(
+ import.syntax(),
+ &ctx.sema,
+ );
+
+ if let Some(scope) = scope {
+ let scope = match scope {
+ ImportScope::File(it) => ImportScope::File(edit.make_mut(it)),
+ ImportScope::Module(it) => ImportScope::Module(edit.make_mut(it)),
+ ImportScope::Block(it) => ImportScope::Block(edit.make_mut(it)),
+ };
+ ide_db::imports::insert_use::insert_use(&scope, import, &ctx.config.insert_use);
+ }
+ }
+}
+
fn needs_parens_as_receiver(expr: &ast::Expr) -> bool {
// Make `(expr).dummy()`
let dummy_call = make::expr_method_call(
@@ -127,6 +171,8 @@ fn f() { S.f(S); }"#,
//- minicore: add
fn f() { <u32 as core::ops::Add>::$0add(2, 2); }"#,
r#"
+use core::ops::Add;
+
fn f() { 2.add(2); }"#,
);
@@ -136,6 +182,8 @@ fn f() { 2.add(2); }"#,
//- minicore: add
fn f() { core::ops::Add::$0add(2, 2); }"#,
r#"
+use core::ops::Add;
+
fn f() { 2.add(2); }"#,
);
@@ -179,6 +227,8 @@ impl core::ops::Deref for S {
}
fn f() { core::ops::Deref::$0deref(&S); }"#,
r#"
+use core::ops::Deref;
+
struct S;
impl core::ops::Deref for S {
type Target = S;
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs
index f235b554e..03e6dfebe 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs
@@ -123,10 +123,8 @@ fn tail_cb_impl(acc: &mut Vec<ast::Expr>, e: &ast::Expr) {
for_each_tail_expr(&break_expr_arg, &mut |e| tail_cb_impl(acc, e))
}
}
- Expr::ReturnExpr(ret_expr) => {
- if let Some(ret_expr_arg) = &ret_expr.expr() {
- for_each_tail_expr(ret_expr_arg, &mut |e| tail_cb_impl(acc, e));
- }
+ Expr::ReturnExpr(_) => {
+ // all return expressions have already been handled by the walk loop
}
e => acc.push(e.clone()),
}
@@ -801,6 +799,24 @@ fn foo() -> i32 {
}
#[test]
+ fn wrap_return_in_tail_position() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo(num: i32) -> $0Result<i32, String> {
+ return Ok(num)
+}
+"#,
+ r#"
+fn foo(num: i32) -> i32 {
+ return num
+}
+"#,
+ );
+ }
+
+ #[test]
fn unwrap_result_return_type_simple_with_closure() {
check_assist(
unwrap_result_return_type,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs
index 61e9bcdcc..b68ed00f7 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs
@@ -98,10 +98,8 @@ fn tail_cb_impl(acc: &mut Vec<ast::Expr>, e: &ast::Expr) {
for_each_tail_expr(&break_expr_arg, &mut |e| tail_cb_impl(acc, e))
}
}
- Expr::ReturnExpr(ret_expr) => {
- if let Some(ret_expr_arg) = &ret_expr.expr() {
- for_each_tail_expr(ret_expr_arg, &mut |e| tail_cb_impl(acc, e));
- }
+ Expr::ReturnExpr(_) => {
+ // all return expressions have already been handled by the walk loop
}
e => acc.push(e.clone()),
}
@@ -733,6 +731,24 @@ fn foo() -> Result<i32, ${0:_}> {
}
#[test]
+ fn wrap_return_in_tail_position() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo(num: i32) -> $0i32 {
+ return num
+}
+"#,
+ r#"
+fn foo(num: i32) -> Result<i32, ${0:_}> {
+ return Ok(num)
+}
+"#,
+ );
+ }
+
+ #[test]
fn wrap_return_type_in_result_simple_with_closure() {
check_assist(
wrap_return_type_in_result,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
index 6f973ab53..1e4d1c94f 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
@@ -58,7 +58,7 @@
//! See also this post:
//! <https://rust-analyzer.github.io/blog/2020/09/28/how-to-make-a-light-bulb.html>
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
#[allow(unused)]
macro_rules! eprintln {
@@ -115,6 +115,7 @@ mod handlers {
mod apply_demorgan;
mod auto_import;
mod bind_unused_param;
+ mod bool_to_enum;
mod change_visibility;
mod convert_bool_then;
mod convert_comment_block;
@@ -124,6 +125,7 @@ mod handlers {
mod convert_let_else_to_match;
mod convert_match_to_let_else;
mod convert_nested_function_to_closure;
+ mod convert_tuple_return_type_to_struct;
mod convert_tuple_struct_to_named_struct;
mod convert_named_struct_to_tuple_struct;
mod convert_to_guarded_return;
@@ -158,6 +160,7 @@ mod handlers {
mod generate_getter_or_setter;
mod generate_impl;
mod generate_is_empty_from_len;
+ mod generate_mut_trait_impl;
mod generate_new;
mod generate_delegate_methods;
mod generate_trait_from_impl;
@@ -193,6 +196,7 @@ mod handlers {
mod replace_try_expr_with_match;
mod replace_derive_with_manual_impl;
mod replace_if_let_with_match;
+ mod replace_is_method_with_if_let_method;
mod replace_method_eager_lazy;
mod replace_arith_op;
mod introduce_named_generic;
@@ -225,8 +229,10 @@ mod handlers {
add_return_type::add_return_type,
add_turbo_fish::add_turbo_fish,
apply_demorgan::apply_demorgan,
+ apply_demorgan::apply_demorgan_iterator,
auto_import::auto_import,
bind_unused_param::bind_unused_param,
+ bool_to_enum::bool_to_enum,
change_visibility::change_visibility,
convert_bool_then::convert_bool_then_to_if,
convert_bool_then::convert_if_to_bool_then,
@@ -237,6 +243,7 @@ mod handlers {
convert_iter_for_each_to_for::convert_for_loop_with_for_each,
convert_let_else_to_match::convert_let_else_to_match,
convert_match_to_let_else::convert_match_to_let_else,
+ convert_tuple_return_type_to_struct::convert_tuple_return_type_to_struct,
convert_named_struct_to_tuple_struct::convert_named_struct_to_tuple_struct,
convert_nested_function_to_closure::convert_nested_function_to_closure,
convert_to_guarded_return::convert_to_guarded_return,
@@ -268,6 +275,7 @@ mod handlers {
generate_function::generate_function,
generate_impl::generate_impl,
generate_impl::generate_trait_impl,
+ generate_mut_trait_impl::generate_mut_trait_impl,
generate_is_empty_from_len::generate_is_empty_from_len,
generate_new::generate_new,
generate_trait_from_impl::generate_trait_from_impl,
@@ -308,6 +316,7 @@ mod handlers {
replace_derive_with_manual_impl::replace_derive_with_manual_impl,
replace_if_let_with_match::replace_if_let_with_match,
replace_if_let_with_match::replace_match_with_if_let,
+ replace_is_method_with_if_let_method::replace_is_method_with_if_let_method,
replace_let_with_if_let::replace_let_with_if_let,
replace_method_eager_lazy::replace_with_eager_method,
replace_method_eager_lazy::replace_with_lazy_method,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
index cc3e251a8..25b3d6d9d 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
@@ -30,6 +30,7 @@ pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig {
skip_glob_imports: true,
},
prefer_no_std: false,
+ prefer_prelude: true,
assist_emit_must_use: false,
};
@@ -44,6 +45,7 @@ pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig {
skip_glob_imports: true,
},
prefer_no_std: false,
+ prefer_prelude: true,
assist_emit_must_use: false,
};
@@ -98,6 +100,11 @@ pub(crate) fn check_assist_not_applicable(assist: Handler, ra_fixture: &str) {
check(assist, ra_fixture, ExpectedResult::NotApplicable, None);
}
+#[track_caller]
+pub(crate) fn check_assist_not_applicable_by_label(assist: Handler, ra_fixture: &str, label: &str) {
+ check(assist, ra_fixture, ExpectedResult::NotApplicable, Some(label));
+}
+
/// Check assist in unresolved state. Useful to check assists for lazy computation.
#[track_caller]
pub(crate) fn check_assist_unresolved(assist: Handler, ra_fixture: &str) {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
index dfaa53449..da5822bba 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
@@ -245,6 +245,30 @@ fn main() {
}
#[test]
+fn doctest_apply_demorgan_iterator() {
+ check_doc_test(
+ "apply_demorgan_iterator",
+ r#####"
+//- minicore: iterator
+fn main() {
+ let arr = [1, 2, 3];
+ if !arr.into_iter().$0any(|num| num == 4) {
+ println!("foo");
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ let arr = [1, 2, 3];
+ if arr.into_iter().all(|num| num != 4) {
+ println!("foo");
+ }
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_auto_import() {
check_doc_test(
"auto_import",
@@ -281,6 +305,34 @@ fn some_function(x: i32) {
}
#[test]
+fn doctest_bool_to_enum() {
+ check_doc_test(
+ "bool_to_enum",
+ r#####"
+fn main() {
+ let $0bool = true;
+
+ if bool {
+ println!("foo");
+ }
+}
+"#####,
+ r#####"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn main() {
+ let bool = Bool::True;
+
+ if bool == Bool::True {
+ println!("foo");
+ }
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_change_visibility() {
check_doc_test(
"change_visibility",
@@ -559,6 +611,33 @@ fn main() {
}
#[test]
+fn doctest_convert_tuple_return_type_to_struct() {
+ check_doc_test(
+ "convert_tuple_return_type_to_struct",
+ r#####"
+fn bar() {
+ let (a, b, c) = foo();
+}
+
+fn foo() -> ($0u32, u32, u32) {
+ (1, 2, 3)
+}
+"#####,
+ r#####"
+fn bar() {
+ let FooResult(a, b, c) = foo();
+}
+
+struct FooResult(u32, u32, u32);
+
+fn foo() -> FooResult {
+ FooResult(1, 2, 3)
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_convert_tuple_struct_to_named_struct() {
check_doc_test(
"convert_tuple_struct_to_named_struct",
@@ -1460,6 +1539,42 @@ impl MyStruct {
}
#[test]
+fn doctest_generate_mut_trait_impl() {
+ check_doc_test(
+ "generate_mut_trait_impl",
+ r#####"
+//- minicore: index
+pub enum Axis { X = 0, Y = 1, Z = 2 }
+
+impl<T> core::ops::Index$0<Axis> for [T; 3] {
+ type Output = T;
+
+ fn index(&self, index: Axis) -> &Self::Output {
+ &self[index as usize]
+ }
+}
+"#####,
+ r#####"
+pub enum Axis { X = 0, Y = 1, Z = 2 }
+
+$0impl<T> core::ops::IndexMut<Axis> for [T; 3] {
+ fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
+ &self[index as usize]
+ }
+}
+
+impl<T> core::ops::Index<Axis> for [T; 3] {
+ type Output = T;
+
+ fn index(&self, index: Axis) -> &Self::Output {
+ &self[index as usize]
+ }
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_generate_new() {
check_doc_test(
"generate_new",
@@ -2480,6 +2595,25 @@ fn handle(action: Action) {
}
#[test]
+fn doctest_replace_is_some_with_if_let_some() {
+ check_doc_test(
+ "replace_is_some_with_if_let_some",
+ r#####"
+fn main() {
+ let x = Some(1);
+ if x.is_som$0e() {}
+}
+"#####,
+ r#####"
+fn main() {
+ let x = Some(1);
+ if let Some(${0:x}) = x {}
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_replace_let_with_if_let() {
check_doc_test(
"replace_let_with_if_let",
@@ -2850,6 +2984,8 @@ fn main() {
mod std { pub mod ops { pub trait Add { fn add(self, _: Self) {} } impl Add for i32 {} } }
"#####,
r#####"
+use std::ops::Add;
+
fn main() {
1.add(2);
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
index a262570d9..f51e99a91 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
@@ -106,8 +106,18 @@ pub fn filter_assoc_items(
.iter()
.copied()
.filter(|assoc_item| {
- !(ignore_items == IgnoreAssocItems::DocHiddenAttrPresent
- && assoc_item.attrs(sema.db).has_doc_hidden())
+ if ignore_items == IgnoreAssocItems::DocHiddenAttrPresent
+ && assoc_item.attrs(sema.db).has_doc_hidden()
+ {
+ if let hir::AssocItem::Function(f) = assoc_item {
+ if !f.has_body(sema.db) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ return true;
})
// Note: This throws away items with no source.
.filter_map(|assoc_item| {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml
index 092fb3036..60f90a41b 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml
@@ -13,7 +13,7 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
-itertools = "0.10.5"
+itertools.workspace = true
once_cell = "1.17.0"
smallvec.workspace = true
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs
index f60ac1501..7d38c638a 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs
@@ -626,6 +626,7 @@ fn enum_variants_with_paths(
ctx.db,
hir::ModuleDef::from(variant),
ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
) {
// Variants with trivial paths are already added by the existing completion logic,
// so we should avoid adding these twice
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs
index c5bbb7f8d..613a35dcb 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs
@@ -26,17 +26,17 @@ pub(crate) fn complete_dot(
item.add_to(acc, ctx.db);
}
- if let DotAccessKind::Method { .. } = dot_access.kind {
- cov_mark::hit!(test_no_struct_field_completion_for_method_call);
- } else {
- complete_fields(
- acc,
- ctx,
- receiver_ty,
- |acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty),
- |acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty),
- );
- }
+ let is_field_access = matches!(dot_access.kind, DotAccessKind::Field { .. });
+
+ complete_fields(
+ acc,
+ ctx,
+ receiver_ty,
+ |acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty),
+ |acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty),
+ is_field_access,
+ );
+
complete_methods(ctx, receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None));
}
@@ -82,6 +82,7 @@ pub(crate) fn complete_undotted_self(
)
},
|acc, field, ty| acc.add_tuple_field(ctx, Some(hir::known::SELF_PARAM), field, &ty),
+ true,
);
complete_methods(ctx, &ty, |func| {
acc.add_method(
@@ -104,18 +105,23 @@ fn complete_fields(
receiver: &hir::Type,
mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type),
mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type),
+ is_field_access: bool,
) {
let mut seen_names = FxHashSet::default();
for receiver in receiver.autoderef(ctx.db) {
for (field, ty) in receiver.fields(ctx.db) {
- if seen_names.insert(field.name(ctx.db)) {
+ if seen_names.insert(field.name(ctx.db))
+ && (is_field_access || ty.is_fn() || ty.is_closure())
+ {
named_field(acc, field, ty);
}
}
for (i, ty) in receiver.tuple_fields(ctx.db).into_iter().enumerate() {
// Tuples are always the last type in a deref chain, so just check if the name is
// already seen without inserting into the hashset.
- if !seen_names.contains(&hir::Name::new_tuple_field(i)) {
+ if !seen_names.contains(&hir::Name::new_tuple_field(i))
+ && (is_field_access || ty.is_fn() || ty.is_closure())
+ {
// Tuple fields are always public (tuple struct fields are handled above).
tuple_index(acc, i, ty);
}
@@ -250,7 +256,6 @@ impl A {
#[test]
fn test_no_struct_field_completion_for_method_call() {
- cov_mark::check!(test_no_struct_field_completion_for_method_call);
check(
r#"
struct A { the_field: u32 }
@@ -935,9 +940,9 @@ impl Foo { fn foo(&self) { $0 } }"#,
expect![[r#"
fd self.field i32
lc self &Foo
- sp Self
- st Foo
- bt u32
+ sp Self Foo
+ st Foo Foo
+ bt u32 u32
me self.foo() fn(&self)
"#]],
);
@@ -949,9 +954,9 @@ impl Foo { fn foo(&mut self) { $0 } }"#,
expect![[r#"
fd self.0 i32
lc self &mut Foo
- sp Self
- st Foo
- bt u32
+ sp Self Foo
+ st Foo Foo
+ bt u32 u32
me self.foo() fn(&mut self)
"#]],
);
@@ -1095,4 +1100,140 @@ fn test(s: S<Unknown>) {
"#]],
);
}
+
+ #[test]
+ fn assoc_impl_1() {
+ check(
+ r#"
+//- minicore: deref
+fn main() {
+ let foo: Foo<&u8> = Foo::new(&42_u8);
+ foo.$0
+}
+
+trait Bar {
+ fn bar(&self);
+}
+
+impl Bar for u8 {
+ fn bar(&self) {}
+}
+
+struct Foo<F> {
+ foo: F,
+}
+
+impl<F> Foo<F> {
+ fn new(foo: F) -> Foo<F> {
+ Foo { foo }
+ }
+}
+
+impl<F: core::ops::Deref<Target = impl Bar>> Foo<F> {
+ fn foobar(&self) {
+ self.foo.deref().bar()
+ }
+}
+"#,
+ expect![[r#"
+ fd foo &u8
+ me foobar() fn(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn assoc_impl_2() {
+ check(
+ r#"
+//- minicore: deref
+fn main() {
+ let foo: Foo<&u8> = Foo::new(&42_u8);
+ foo.$0
+}
+
+trait Bar {
+ fn bar(&self);
+}
+
+struct Foo<F> {
+ foo: F,
+}
+
+impl<F> Foo<F> {
+ fn new(foo: F) -> Foo<F> {
+ Foo { foo }
+ }
+}
+
+impl<B: Bar, F: core::ops::Deref<Target = B>> Foo<F> {
+ fn foobar(&self) {
+ self.foo.deref().bar()
+ }
+}
+"#,
+ expect![[r#"
+ fd foo &u8
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_struct_function_field_completion() {
+ check(
+ r#"
+struct S { va_field: u32, fn_field: fn() }
+fn foo() { S { va_field: 0, fn_field: || {} }.fi$0() }
+"#,
+ expect![[r#"
+ fd fn_field fn()
+ "#]],
+ );
+
+ check_edit(
+ "fn_field",
+ r#"
+struct S { va_field: u32, fn_field: fn() }
+fn foo() { S { va_field: 0, fn_field: || {} }.fi$0() }
+"#,
+ r#"
+struct S { va_field: u32, fn_field: fn() }
+fn foo() { (S { va_field: 0, fn_field: || {} }.fn_field)() }
+"#,
+ );
+ }
+
+ #[test]
+ fn test_tuple_function_field_completion() {
+ check(
+ r#"
+struct B(u32, fn())
+fn foo() {
+ let b = B(0, || {});
+ b.$0()
+}
+"#,
+ expect![[r#"
+ fd 1 fn()
+ "#]],
+ );
+
+ check_edit(
+ "1",
+ r#"
+struct B(u32, fn())
+fn foo() {
+ let b = B(0, || {});
+ b.$0()
+}
+"#,
+ r#"
+struct B(u32, fn())
+fn foo() {
+ let b = B(0, || {});
+ (b.1)()
+}
+"#,
+ )
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs
index 9daa6984c..d3c817d4b 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs
@@ -175,6 +175,7 @@ pub(crate) fn complete_expr_path(
ctx.db,
hir::ModuleDef::from(strukt),
ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
)
.filter(|it| it.len() > 1);
@@ -197,6 +198,7 @@ pub(crate) fn complete_expr_path(
ctx.db,
hir::ModuleDef::from(un),
ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
)
.filter(|it| it.len() > 1);
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
index 39c1b7f7b..d74d3b264 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
@@ -13,10 +13,9 @@ use crate::{
TypeLocation,
},
render::{render_resolution_with_import, render_resolution_with_import_pat, RenderContext},
+ Completions,
};
-use super::Completions;
-
// Feature: Completion With Autoimport
//
// When completing names in the current scope, proposes additional imports from other modules or crates,
@@ -258,7 +257,12 @@ fn import_on_the_fly(
let user_input_lowercased = potential_import_name.to_lowercase();
import_assets
- .search_for_imports(&ctx.sema, ctx.config.insert_use.prefix_kind, ctx.config.prefer_no_std)
+ .search_for_imports(
+ &ctx.sema,
+ ctx.config.insert_use.prefix_kind,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )
.into_iter()
.filter(ns_filter)
.filter(|import| {
@@ -300,7 +304,12 @@ fn import_on_the_fly_pat_(
let user_input_lowercased = potential_import_name.to_lowercase();
import_assets
- .search_for_imports(&ctx.sema, ctx.config.insert_use.prefix_kind, ctx.config.prefer_no_std)
+ .search_for_imports(
+ &ctx.sema,
+ ctx.config.insert_use.prefix_kind,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )
.into_iter()
.filter(ns_filter)
.filter(|import| {
@@ -337,7 +346,12 @@ fn import_on_the_fly_method(
let user_input_lowercased = potential_import_name.to_lowercase();
import_assets
- .search_for_imports(&ctx.sema, ctx.config.insert_use.prefix_kind, ctx.config.prefer_no_std)
+ .search_for_imports(
+ &ctx.sema,
+ ctx.config.insert_use.prefix_kind,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )
.into_iter()
.filter(|import| {
!ctx.is_item_hidden(&import.item_to_import)
@@ -377,9 +391,12 @@ fn import_assets_for_path(
&ctx.sema,
ctx.token.parent()?,
)?;
- if fuzzy_name_length < 3 {
- cov_mark::hit!(flyimport_exact_on_short_path);
- assets_for_path.path_fuzzy_name_to_exact(false);
+ if fuzzy_name_length == 0 {
+ // nothing matches the empty string exactly, but we still compute assoc items in this case
+ assets_for_path.path_fuzzy_name_to_exact();
+ } else if fuzzy_name_length < 3 {
+ cov_mark::hit!(flyimport_prefix_on_short_path);
+ assets_for_path.path_fuzzy_name_to_prefix();
}
Some(assets_for_path)
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs
index 42dfbfc7d..b0e4d8a5a 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs
@@ -417,10 +417,10 @@ impl Test for T {
}
",
expect![[r#"
- sp Self
- st T
+ sp Self T
+ st T T
tt Test
- bt u32
+ bt u32 u32
"#]],
);
@@ -526,10 +526,10 @@ impl Test for T {
}
",
expect![[r#"
- sp Self
- st T
+ sp Self T
+ st T T
tt Test
- bt u32
+ bt u32 u32
"#]],
);
@@ -543,10 +543,10 @@ impl Test for T {
}
",
expect![[r#"
- sp Self
- st T
+ sp Self T
+ st T T
tt Test
- bt u32
+ bt u32 u32
"#]],
);
@@ -562,10 +562,10 @@ impl Test for T {
}
",
expect![[r#"
- sp Self
- st T
+ sp Self T
+ st T T
tt Test
- bt u32
+ bt u32 u32
"#]],
);
@@ -610,10 +610,10 @@ impl Test for T {
}
",
expect![[r#"
- sp Self
- st T
+ sp Self T
+ st T T
tt Test
- bt u32
+ bt u32 u32
"#]],
);
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
index 1e0989405..5d138eea4 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
@@ -2,7 +2,7 @@
use std::iter;
-use hir::{Module, ModuleSource};
+use hir::{HirFileIdExt, Module, ModuleSource};
use ide_db::{
base_db::{SourceDatabaseExt, VfsPath},
FxHashSet, RootDatabase, SymbolKind,
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs
index 945c3945b..46213deb0 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs
@@ -430,4 +430,29 @@ fn foo() {
"#,
);
}
+
+ #[test]
+ fn callable_field_struct_init() {
+ check_edit(
+ "field",
+ r#"
+struct S {
+ field: fn(),
+}
+
+fn main() {
+ S {fi$0
+}
+"#,
+ r#"
+struct S {
+ field: fn(),
+}
+
+fn main() {
+ S {field
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs
index 7a60030e9..81107c1f4 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs
@@ -71,9 +71,9 @@ pub(crate) fn complete_use_path(
if add_resolution {
let mut builder = Builder::from_resolution(ctx, path_ctx, name, def);
- builder.set_relevance(CompletionRelevance {
+ builder.with_relevance(|r| CompletionRelevance {
is_name_already_imported,
- ..Default::default()
+ ..r
});
acc.add(builder.build(ctx.db));
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs
index 8f6a97e1e..ed5ddde8f 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs
@@ -14,10 +14,12 @@ pub struct CompletionConfig {
pub enable_imports_on_the_fly: bool,
pub enable_self_on_the_fly: bool,
pub enable_private_editable: bool,
+ pub full_function_signatures: bool,
pub callable: Option<CallableSnippets>,
pub snippet_cap: Option<SnippetCap>,
pub insert_use: InsertUseConfig,
pub prefer_no_std: bool,
+ pub prefer_prelude: bool,
pub snippets: Vec<Snippet>,
pub limit: Option<usize>,
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
index c45cc8d7b..de41a5bd7 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
@@ -1,6 +1,6 @@
//! See `CompletionItem` structure.
-use std::fmt;
+use std::{fmt, mem};
use hir::Mutability;
use ide_db::{
@@ -26,6 +26,10 @@ use crate::{
pub struct CompletionItem {
/// Label in the completion pop up which identifies completion.
pub label: SmolStr,
+ /// Additional label details in the completion pop up that are
+ /// displayed and aligned on the right side after the label.
+ pub label_detail: Option<SmolStr>,
+
/// Range of identifier that is being completed.
///
/// It should be used primarily for UI, but we also use this to convert
@@ -89,7 +93,7 @@ impl fmt::Debug for CompletionItem {
let mut s = f.debug_struct("CompletionItem");
s.field("label", &self.label).field("source_range", &self.source_range);
if self.text_edit.len() == 1 {
- let atom = &self.text_edit.iter().next().unwrap();
+ let atom = self.text_edit.iter().next().unwrap();
s.field("delete", &atom.delete);
s.field("insert", &atom.insert);
} else {
@@ -425,13 +429,14 @@ impl Builder {
pub(crate) fn build(self, db: &RootDatabase) -> CompletionItem {
let _p = profile::span("item::Builder::build");
- let mut label = self.label;
+ let label = self.label;
+ let mut label_detail = None;
let mut lookup = self.lookup.unwrap_or_else(|| label.clone());
let insert_text = self.insert_text.unwrap_or_else(|| label.to_string());
if !self.doc_aliases.is_empty() {
let doc_aliases = self.doc_aliases.iter().join(", ");
- label = SmolStr::from(format!("{label} (alias {doc_aliases})"));
+ label_detail.replace(SmolStr::from(format!(" (alias {doc_aliases})")));
let lookup_doc_aliases = self
.doc_aliases
.iter()
@@ -453,11 +458,16 @@ impl Builder {
}
if let [import_edit] = &*self.imports_to_add {
// snippets can have multiple imports, but normal completions only have up to one
- if let Some(original_path) = import_edit.original_path.as_ref() {
- label = SmolStr::from(format!("{label} (use {})", original_path.display(db)));
- }
+ label_detail.replace(SmolStr::from(format!(
+ "{} (use {})",
+ label_detail.as_deref().unwrap_or_default(),
+ import_edit.import_path.display(db)
+ )));
} else if let Some(trait_name) = self.trait_name {
- label = SmolStr::from(format!("{label} (as {trait_name})"));
+ label_detail.replace(SmolStr::from(format!(
+ "{} (as {trait_name})",
+ label_detail.as_deref().unwrap_or_default(),
+ )));
}
let text_edit = match self.text_edit {
@@ -479,6 +489,7 @@ impl Builder {
CompletionItem {
source_range: self.source_range,
label,
+ label_detail,
text_edit,
is_snippet: self.is_snippet,
detail: self.detail,
@@ -557,6 +568,13 @@ impl Builder {
self.relevance = relevance;
self
}
+ pub(crate) fn with_relevance(
+ &mut self,
+ relevance: impl FnOnce(CompletionRelevance) -> CompletionRelevance,
+ ) -> &mut Builder {
+ self.relevance = relevance(mem::take(&mut self.relevance));
+ self
+ }
pub(crate) fn trigger_call_info(&mut self) -> &mut Builder {
self.trigger_call_info = true;
self
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
index 2eaa42040..37a2828e8 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
@@ -1,6 +1,6 @@
//! `completions` crate provides utilities for generating completions of user input.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod completions;
mod config;
@@ -169,6 +169,28 @@ pub fn completions(
return Some(completions.into());
}
+ // when the user types a bare `_` (that is it does not belong to an identifier)
+ // the user might just wanted to type a `_` for type inference or pattern discarding
+ // so try to suppress completions in those cases
+ if trigger_character == Some('_') && ctx.original_token.kind() == syntax::SyntaxKind::UNDERSCORE
+ {
+ if let CompletionAnalysis::NameRef(NameRefContext {
+ kind:
+ NameRefKind::Path(
+ path_ctx @ PathCompletionCtx {
+ kind: PathKind::Type { .. } | PathKind::Pat { .. },
+ ..
+ },
+ ),
+ ..
+ }) = analysis
+ {
+ if path_ctx.is_trivial_path() {
+ return None;
+ }
+ }
+ }
+
{
let acc = &mut completions;
@@ -241,6 +263,7 @@ pub fn resolve_completion_edits(
candidate,
config.insert_use.prefix_kind,
config.prefer_no_std,
+ config.prefer_prelude,
)
})
.find(|mod_path| mod_path.display(db).to_string() == full_import_path);
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
index dfe8fe7e2..2ea3f74d1 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
@@ -10,7 +10,7 @@ pub(crate) mod variant;
pub(crate) mod union_literal;
pub(crate) mod literal;
-use hir::{AsAssocItem, HasAttrs, HirDisplay, ScopeDef};
+use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type};
use ide_db::{
documentation::{Documentation, HasDocs},
helpers::item_name,
@@ -18,9 +18,10 @@ use ide_db::{
RootDatabase, SnippetCap, SymbolKind,
};
use syntax::{AstNode, SmolStr, SyntaxKind, TextRange};
+use text_edit::TextEdit;
use crate::{
- context::{DotAccess, PathCompletionCtx, PathKind, PatternContext},
+ context::{DotAccess, DotAccessKind, PathCompletionCtx, PathKind, PatternContext},
item::{Builder, CompletionRelevanceTypeMatch},
render::{
function::render_fn,
@@ -147,7 +148,42 @@ pub(crate) fn render_field(
.set_documentation(field.docs(db))
.set_deprecated(is_deprecated)
.lookup_by(name);
- item.insert_text(field_with_receiver(db, receiver.as_ref(), &escaped_name));
+
+ let is_field_access = matches!(dot_access.kind, DotAccessKind::Field { .. });
+ if !is_field_access || ty.is_fn() || ty.is_closure() {
+ let mut builder = TextEdit::builder();
+ // Using TextEdit, insert '(' before the struct name and ')' before the
+ // dot access, then comes the field name and optionally insert function
+ // call parens.
+
+ builder.replace(
+ ctx.source_range(),
+ field_with_receiver(db, receiver.as_ref(), &escaped_name).into(),
+ );
+
+ let expected_fn_type =
+ ctx.completion.expected_type.as_ref().is_some_and(|ty| ty.is_fn() || ty.is_closure());
+
+ if !expected_fn_type {
+ if let Some(receiver) = &dot_access.receiver {
+ if let Some(receiver) = ctx.completion.sema.original_ast_node(receiver.clone()) {
+ builder.insert(receiver.syntax().text_range().start(), "(".to_string());
+ builder.insert(ctx.source_range().end(), ")".to_string());
+
+ let is_parens_needed =
+ !matches!(dot_access.kind, DotAccessKind::Method { has_parens: true });
+
+ if is_parens_needed {
+ builder.insert(ctx.source_range().end(), "()".to_string());
+ }
+ }
+ }
+ }
+
+ item.text_edit(builder.finish());
+ } else {
+ item.insert_text(field_with_receiver(db, receiver.as_ref(), &escaped_name));
+ }
if let Some(receiver) = &dot_access.receiver {
if let Some(original) = ctx.completion.sema.original_ast_node(receiver.clone()) {
if let Some(ref_match) = compute_ref_match(ctx.completion, ty) {
@@ -304,6 +340,7 @@ fn render_resolution_path(
let cap = ctx.snippet_cap();
let db = completion.db;
let config = completion.config;
+ let requires_import = import_to_add.is_some();
let name = local_name.to_smol_str();
let mut item = render_resolution_simple_(ctx, &local_name, import_to_add, resolution);
@@ -334,8 +371,8 @@ fn render_resolution_path(
}
}
}
- if let ScopeDef::Local(local) = resolution {
- let ty = local.ty(db);
+
+ let mut set_item_relevance = |ty: Type| {
if !ty.is_unknown() {
item.detail(ty.display(db).to_string());
}
@@ -343,12 +380,38 @@ fn render_resolution_path(
item.set_relevance(CompletionRelevance {
type_match: compute_type_match(completion, &ty),
exact_name_match: compute_exact_name_match(completion, &name),
- is_local: true,
+ is_local: matches!(resolution, ScopeDef::Local(_)),
+ requires_import,
..CompletionRelevance::default()
});
path_ref_match(completion, path_ctx, &ty, &mut item);
};
+
+ match resolution {
+ ScopeDef::Local(local) => set_item_relevance(local.ty(db)),
+ ScopeDef::ModuleDef(ModuleDef::Adt(adt)) | ScopeDef::AdtSelfType(adt) => {
+ set_item_relevance(adt.ty(db))
+ }
+ // Filtered out above
+ ScopeDef::ModuleDef(
+ ModuleDef::Function(_) | ModuleDef::Variant(_) | ModuleDef::Macro(_),
+ ) => (),
+ ScopeDef::ModuleDef(ModuleDef::Const(konst)) => set_item_relevance(konst.ty(db)),
+ ScopeDef::ModuleDef(ModuleDef::Static(stat)) => set_item_relevance(stat.ty(db)),
+ ScopeDef::ModuleDef(ModuleDef::BuiltinType(bt)) => set_item_relevance(bt.ty(db)),
+ ScopeDef::ImplSelfType(imp) => set_item_relevance(imp.self_ty(db)),
+ ScopeDef::GenericParam(_)
+ | ScopeDef::Label(_)
+ | ScopeDef::Unknown
+ | ScopeDef::ModuleDef(
+ ModuleDef::Trait(_)
+ | ModuleDef::TraitAlias(_)
+ | ModuleDef::Module(_)
+ | ModuleDef::TypeAlias(_),
+ ) => (),
+ };
+
item
}
@@ -435,6 +498,21 @@ fn scope_def_is_deprecated(ctx: &RenderContext<'_>, resolution: ScopeDef) -> boo
}
}
+// FIXME: This checks types without possible coercions which some completions might want to do
+fn match_types(
+ ctx: &CompletionContext<'_>,
+ ty1: &hir::Type,
+ ty2: &hir::Type,
+) -> Option<CompletionRelevanceTypeMatch> {
+ if ty1 == ty2 {
+ Some(CompletionRelevanceTypeMatch::Exact)
+ } else if ty1.could_unify_with(ctx.db, ty2) {
+ Some(CompletionRelevanceTypeMatch::CouldUnify)
+ } else {
+ None
+ }
+}
+
fn compute_type_match(
ctx: &CompletionContext<'_>,
completion_ty: &hir::Type,
@@ -447,13 +525,7 @@ fn compute_type_match(
return None;
}
- if completion_ty == expected_type {
- Some(CompletionRelevanceTypeMatch::Exact)
- } else if expected_type.could_unify_with(ctx.db, completion_ty) {
- Some(CompletionRelevanceTypeMatch::CouldUnify)
- } else {
- None
- }
+ match_types(ctx, expected_type, completion_ty)
}
fn compute_exact_name_match(ctx: &CompletionContext<'_>, completion_name: &str) -> bool {
@@ -557,7 +629,11 @@ mod tests {
let tag = it.kind.tag();
let relevance = display_relevance(it.relevance);
- items.push(format!("{tag} {} {relevance}\n", it.label));
+ items.push(format!(
+ "{tag} {}{} {relevance}\n",
+ it.label,
+ it.label_detail.clone().unwrap_or_default(),
+ ));
if let Some((label, _indel, relevance)) = it.ref_match() {
let relevance = display_relevance(relevance);
@@ -596,6 +672,330 @@ mod tests {
}
#[test]
+ fn set_struct_type_completion_info() {
+ check_relevance(
+ r#"
+//- /lib.rs crate:dep
+
+pub mod test_mod_b {
+ pub struct Struct {}
+}
+
+pub mod test_mod_a {
+ pub struct Struct {}
+}
+
+//- /main.rs crate:main deps:dep
+
+fn test(input: dep::test_mod_b::Struct) { }
+
+fn main() {
+ test(Struct$0);
+}
+"#,
+ expect![[r#"
+ st dep::test_mod_b::Struct {…} [type_could_unify]
+ st Struct (use dep::test_mod_b::Struct) [type_could_unify+requires_import]
+ fn main() []
+ fn test(…) []
+ md dep []
+ st Struct (use dep::test_mod_a::Struct) [requires_import]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn set_union_type_completion_info() {
+ check_relevance(
+ r#"
+//- /lib.rs crate:dep
+
+pub mod test_mod_b {
+ pub union Union {
+ a: i32,
+ b: i32
+ }
+}
+
+pub mod test_mod_a {
+ pub enum Union {
+ a: i32,
+ b: i32
+ }
+}
+
+//- /main.rs crate:main deps:dep
+
+fn test(input: dep::test_mod_b::Union) { }
+
+fn main() {
+ test(Union$0);
+}
+"#,
+ expect![[r#"
+ un Union (use dep::test_mod_b::Union) [type_could_unify+requires_import]
+ fn main() []
+ fn test(…) []
+ md dep []
+ en Union (use dep::test_mod_a::Union) [requires_import]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn set_enum_type_completion_info() {
+ check_relevance(
+ r#"
+//- /lib.rs crate:dep
+
+pub mod test_mod_b {
+ pub enum Enum {
+ variant
+ }
+}
+
+pub mod test_mod_a {
+ pub enum Enum {
+ variant
+ }
+}
+
+//- /main.rs crate:main deps:dep
+
+fn test(input: dep::test_mod_b::Enum) { }
+
+fn main() {
+ test(Enum$0);
+}
+"#,
+ expect![[r#"
+ ev dep::test_mod_b::Enum::variant [type_could_unify]
+ en Enum (use dep::test_mod_b::Enum) [type_could_unify+requires_import]
+ fn main() []
+ fn test(…) []
+ md dep []
+ en Enum (use dep::test_mod_a::Enum) [requires_import]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn set_enum_variant_type_completion_info() {
+ check_relevance(
+ r#"
+//- /lib.rs crate:dep
+
+pub mod test_mod_b {
+ pub enum Enum {
+ Variant
+ }
+}
+
+pub mod test_mod_a {
+ pub enum Enum {
+ Variant
+ }
+}
+
+//- /main.rs crate:main deps:dep
+
+fn test(input: dep::test_mod_b::Enum) { }
+
+fn main() {
+ test(Variant$0);
+}
+"#,
+ expect![[r#"
+ ev dep::test_mod_b::Enum::Variant [type_could_unify]
+ fn main() []
+ fn test(…) []
+ md dep []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn set_fn_type_completion_info() {
+ check_relevance(
+ r#"
+//- /lib.rs crate:dep
+
+pub mod test_mod_b {
+ pub fn function(j: isize) -> i32 {}
+}
+
+pub mod test_mod_a {
+ pub fn function(i: usize) -> i32 {}
+}
+
+//- /main.rs crate:main deps:dep
+
+fn test(input: fn(usize) -> i32) { }
+
+fn main() {
+ test(function$0);
+}
+"#,
+ expect![[r#"
+ fn main []
+ fn test []
+ md dep []
+ fn function (use dep::test_mod_a::function) [requires_import]
+ fn function (use dep::test_mod_b::function) [requires_import]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn set_const_type_completion_info() {
+ check_relevance(
+ r#"
+//- /lib.rs crate:dep
+
+pub mod test_mod_b {
+ pub const CONST: i32 = 1;
+}
+
+pub mod test_mod_a {
+ pub const CONST: i64 = 2;
+}
+
+//- /main.rs crate:main deps:dep
+
+fn test(input: i32) { }
+
+fn main() {
+ test(CONST$0);
+}
+"#,
+ expect![[r#"
+ ct CONST (use dep::test_mod_b::CONST) [type_could_unify+requires_import]
+ fn main() []
+ fn test(…) []
+ md dep []
+ ct CONST (use dep::test_mod_a::CONST) [requires_import]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn set_static_type_completion_info() {
+ check_relevance(
+ r#"
+//- /lib.rs crate:dep
+
+pub mod test_mod_b {
+ pub static STATIC: i32 = 5;
+}
+
+pub mod test_mod_a {
+ pub static STATIC: i64 = 5;
+}
+
+//- /main.rs crate:main deps:dep
+
+fn test(input: i32) { }
+
+fn main() {
+ test(STATIC$0);
+}
+"#,
+ expect![[r#"
+ sc STATIC (use dep::test_mod_b::STATIC) [type_could_unify+requires_import]
+ fn main() []
+ fn test(…) []
+ md dep []
+ sc STATIC (use dep::test_mod_a::STATIC) [requires_import]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn set_self_type_completion_info_with_params() {
+ check_relevance(
+ r#"
+//- /lib.rs crate:dep
+pub struct Struct;
+
+impl Struct {
+ pub fn Function(&self, input: i32) -> bool {
+ false
+ }
+}
+
+
+//- /main.rs crate:main deps:dep
+
+use dep::Struct;
+
+
+fn test(input: fn(&dep::Struct, i32) -> bool) { }
+
+fn main() {
+ test(Struct::Function$0);
+}
+
+"#,
+ expect![[r#"
+ me Function []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn set_self_type_completion_info() {
+ check_relevance(
+ r#"
+//- /main.rs crate:main
+
+struct Struct;
+
+impl Struct {
+fn test(&self) {
+ func(Self$0);
+ }
+}
+
+fn func(input: Struct) { }
+
+"#,
+ expect![[r#"
+ st Struct [type]
+ st Self [type]
+ sp Self [type]
+ st Struct [type]
+ lc self [local]
+ fn func(…) []
+ me self.test() []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn set_builtin_type_completion_info() {
+ check_relevance(
+ r#"
+//- /main.rs crate:main
+
+fn test(input: bool) { }
+ pub Input: bool = false;
+
+fn main() {
+ let input = false;
+ let inputbad = 3;
+ test(inp$0);
+}
+"#,
+ expect![[r#"
+ lc input [type+name+local]
+ lc inputbad [local]
+ fn main() []
+ fn test(…) []
+ "#]],
+ );
+ }
+
+ #[test]
fn enum_detail_includes_record_fields() {
check(
r#"
@@ -986,6 +1386,7 @@ use self::E::*;
kind: SymbolKind(
Enum,
),
+ detail: "E",
documentation: Documentation(
"enum docs",
),
@@ -1230,6 +1631,7 @@ fn go(world: &WorldSnapshot) { go(w$0) }
st WorldSnapshot {…} []
st &WorldSnapshot {…} [type]
st WorldSnapshot []
+ st &WorldSnapshot [type]
fn go(…) []
"#]],
);
@@ -1329,6 +1731,7 @@ fn main() {
st S []
st &mut S [type]
st S []
+ st &mut S [type]
fn foo(…) []
fn main() []
"#]],
@@ -1345,7 +1748,7 @@ fn main() {
expect![[r#"
lc s [type+name+local]
st S [type]
- st S []
+ st S [type]
fn foo(…) []
fn main() []
"#]],
@@ -1362,7 +1765,7 @@ fn main() {
expect![[r#"
lc ssss [type+local]
st S [type]
- st S []
+ st S [type]
fn foo(…) []
fn main() []
"#]],
@@ -1401,7 +1804,9 @@ fn main() {
st S []
st &S [type]
st S []
+ st &S [type]
st T []
+ st &T [type]
fn foo(…) []
fn main() []
md core []
@@ -1447,7 +1852,9 @@ fn main() {
st S []
st &mut S [type]
st S []
+ st &mut S [type]
st T []
+ st &mut T [type]
fn foo(…) []
fn main() []
md core []
@@ -1486,7 +1893,7 @@ fn bar(t: Foo) {}
expect![[r#"
ev Foo::A [type]
ev Foo::B [type]
- en Foo []
+ en Foo [type]
fn bar(…) []
fn foo() []
"#]],
@@ -1509,6 +1916,7 @@ fn bar(t: &Foo) {}
ev Foo::B []
ev &Foo::B [type]
en Foo []
+ en &Foo [type]
fn bar(…) []
fn foo() []
"#]],
@@ -1542,7 +1950,9 @@ fn main() {
st S []
st &S [type]
st S []
+ st &S [type]
st T []
+ st &T [type]
fn bar() []
fn &bar() [type]
fn foo(…) []
@@ -1596,7 +2006,7 @@ fn main() {
fn struct_field_method_ref() {
check_kinds(
r#"
-struct Foo { bar: u32 }
+struct Foo { bar: u32, qux: fn() }
impl Foo { fn baz(&self) -> u32 { 0 } }
fn foo(f: Foo) { let _: &u32 = f.b$0 }
@@ -1606,24 +2016,44 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 }
[
CompletionItem {
label: "baz()",
- source_range: 98..99,
- delete: 98..99,
+ source_range: 109..110,
+ delete: 109..110,
insert: "baz()$0",
kind: Method,
lookup: "baz",
detail: "fn(&self) -> u32",
- ref_match: "&@96",
+ ref_match: "&@107",
},
CompletionItem {
label: "bar",
- source_range: 98..99,
- delete: 98..99,
+ source_range: 109..110,
+ delete: 109..110,
insert: "bar",
kind: SymbolKind(
Field,
),
detail: "u32",
- ref_match: "&@96",
+ ref_match: "&@107",
+ },
+ CompletionItem {
+ label: "qux",
+ source_range: 109..110,
+ text_edit: TextEdit {
+ indels: [
+ Indel {
+ insert: "(",
+ delete: 107..107,
+ },
+ Indel {
+ insert: "qux)()",
+ delete: 109..110,
+ },
+ ],
+ },
+ kind: SymbolKind(
+ Field,
+ ),
+ detail: "fn()",
},
]
"#]],
@@ -1631,6 +2061,48 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 }
}
#[test]
+ fn expected_fn_type_ref() {
+ check_kinds(
+ r#"
+struct S { field: fn() }
+
+fn foo() {
+ let foo: fn() = S { fields: || {}}.fi$0;
+}
+"#,
+ &[CompletionItemKind::SymbolKind(SymbolKind::Field)],
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "field",
+ source_range: 76..78,
+ delete: 76..78,
+ insert: "field",
+ kind: SymbolKind(
+ Field,
+ ),
+ detail: "fn()",
+ relevance: CompletionRelevance {
+ exact_name_match: false,
+ type_match: Some(
+ Exact,
+ ),
+ is_local: false,
+ is_item_from_trait: false,
+ is_name_already_imported: false,
+ requires_import: false,
+ is_op_method: false,
+ is_private_editable: false,
+ postfix_match: None,
+ is_definite: false,
+ },
+ },
+ ]
+ "#]],
+ )
+ }
+
+ #[test]
fn qualified_path_ref() {
check_kinds(
r#"
@@ -1689,8 +2161,8 @@ fn foo() {
lc foo [type+local]
ev Foo::A(…) [type_could_unify]
ev Foo::B [type_could_unify]
+ en Foo [type_could_unify]
fn foo() []
- en Foo []
fn bar() []
fn baz() []
"#]],
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs
index 8afce8db5..d23ed71fd 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs
@@ -62,6 +62,7 @@ fn render(
),
_ => (name.unescaped().to_smol_str(), name.to_smol_str()),
};
+
let mut item = CompletionItem::new(
if func.self_param(db).is_some() {
CompletionItemKind::Method
@@ -77,8 +78,31 @@ fn render(
.as_assoc_item(ctx.db())
.and_then(|trait_| trait_.containing_trait_or_trait_impl(ctx.db()))
.map_or(false, |trait_| completion.is_ops_trait(trait_));
+
+ let (has_dot_receiver, has_call_parens, cap) = match func_kind {
+ FuncKind::Function(&PathCompletionCtx {
+ kind: PathKind::Expr { .. },
+ has_call_parens,
+ ..
+ }) => (false, has_call_parens, ctx.completion.config.snippet_cap),
+ FuncKind::Method(&DotAccess { kind: DotAccessKind::Method { has_parens }, .. }, _) => {
+ (true, has_parens, ctx.completion.config.snippet_cap)
+ }
+ FuncKind::Method(DotAccess { kind: DotAccessKind::Field { .. }, .. }, _) => {
+ (true, false, ctx.completion.config.snippet_cap)
+ }
+ _ => (false, false, None),
+ };
+ let complete_call_parens = cap
+ .filter(|_| !has_call_parens)
+ .and_then(|cap| Some((cap, params(ctx.completion, func, &func_kind, has_dot_receiver)?)));
+
item.set_relevance(CompletionRelevance {
- type_match: compute_type_match(completion, &ret_type),
+ type_match: if has_call_parens || complete_call_parens.is_some() {
+ compute_type_match(completion, &ret_type)
+ } else {
+ compute_type_match(completion, &func.ty(db))
+ },
exact_name_match: compute_exact_name_match(completion, &call),
is_op_method,
..ctx.completion_relevance()
@@ -98,47 +122,19 @@ fn render(
_ => (),
}
+ let detail = if ctx.completion.config.full_function_signatures {
+ detail_full(db, func)
+ } else {
+ detail(db, func)
+ };
item.set_documentation(ctx.docs(func))
.set_deprecated(ctx.is_deprecated(func) || ctx.is_deprecated_assoc_item(func))
- .detail(detail(db, func))
+ .detail(detail)
.lookup_by(name.unescaped().to_smol_str());
- match ctx.completion.config.snippet_cap {
- Some(cap) => {
- let complete_params = match func_kind {
- FuncKind::Function(PathCompletionCtx {
- kind: PathKind::Expr { .. },
- has_call_parens: false,
- ..
- }) => Some(false),
- FuncKind::Method(
- DotAccess {
- kind:
- DotAccessKind::Method { has_parens: false } | DotAccessKind::Field { .. },
- ..
- },
- _,
- ) => Some(true),
- _ => None,
- };
- if let Some(has_dot_receiver) = complete_params {
- if let Some((self_param, params)) =
- params(ctx.completion, func, &func_kind, has_dot_receiver)
- {
- add_call_parens(
- &mut item,
- completion,
- cap,
- call,
- escaped_call,
- self_param,
- params,
- );
- }
- }
- }
- _ => (),
- };
+ if let Some((cap, (self_param, params))) = complete_call_parens {
+ add_call_parens(&mut item, completion, cap, call, escaped_call, self_param, params);
+ }
match ctx.import_to_add {
Some(import_to_add) => {
@@ -263,6 +259,21 @@ fn detail(db: &dyn HirDatabase, func: hir::Function) -> String {
detail
}
+fn detail_full(db: &dyn HirDatabase, func: hir::Function) -> String {
+ let signature = format!("{}", func.display(db));
+ let mut detail = String::with_capacity(signature.len());
+
+ for segment in signature.split_whitespace() {
+ if !detail.is_empty() {
+ detail.push(' ');
+ }
+
+ detail.push_str(segment);
+ }
+
+ detail
+}
+
fn params_display(db: &dyn HirDatabase, func: hir::Function) -> String {
if let Some(self_param) = func.self_param(db) {
let assoc_fn_params = func.assoc_fn_params(db);
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs
index 343719c53..e667e2e01 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs
@@ -179,8 +179,9 @@ fn import_edits(ctx: &CompletionContext<'_>, requires: &[GreenNode]) -> Option<V
item,
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
)?;
- Some((path.len() > 1).then(|| LocatedImport::new(path.clone(), item, item, None)))
+ Some((path.len() > 1).then(|| LocatedImport::new(path.clone(), item, item)))
};
let mut res = Vec::with_capacity(requires.len());
for import in requires {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
index 2464e8d5f..f28afacc5 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
@@ -64,9 +64,11 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig {
enable_imports_on_the_fly: true,
enable_self_on_the_fly: true,
enable_private_editable: false,
+ full_function_signatures: false,
callable: Some(CallableSnippets::FillArguments),
snippet_cap: SnippetCap::new(true),
prefer_no_std: false,
+ prefer_prelude: true,
insert_use: InsertUseConfig {
granularity: ImportGranularity::Crate,
prefix_kind: PrefixKind::Plain,
@@ -148,16 +150,29 @@ fn render_completion_list(completions: Vec<CompletionItem>) -> String {
fn monospace_width(s: &str) -> usize {
s.chars().count()
}
- let label_width =
- completions.iter().map(|it| monospace_width(&it.label)).max().unwrap_or_default().min(22);
+ let label_width = completions
+ .iter()
+ .map(|it| {
+ monospace_width(&it.label)
+ + monospace_width(it.label_detail.as_deref().unwrap_or_default())
+ })
+ .max()
+ .unwrap_or_default()
+ .min(22);
completions
.into_iter()
.map(|it| {
let tag = it.kind.tag();
let var_name = format!("{tag} {}", it.label);
let mut buf = var_name;
+ if let Some(ref label_detail) = it.label_detail {
+ format_to!(buf, "{label_detail}");
+ }
if let Some(detail) = it.detail {
- let width = label_width.saturating_sub(monospace_width(&it.label));
+ let width = label_width.saturating_sub(
+ monospace_width(&it.label)
+ + monospace_width(&it.label_detail.unwrap_or_default()),
+ );
format_to!(buf, "{:width$} {}", "", detail, width = width);
}
if it.deprecated {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
index be5b7f8a3..b4f936b35 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
@@ -26,22 +26,22 @@ fn baz() {
"#,
// This should not contain `FooDesc {…}`.
expect![[r#"
- ct CONST
- en Enum
+ ct CONST Unit
+ en Enum Enum
fn baz() fn()
fn create_foo(…) fn(&FooDesc)
fn function() fn()
ma makro!(…) macro_rules! makro
md _69latrick
md module
- sc STATIC
- st FooDesc
- st Record
- st Tuple
- st Unit
- un Union
+ sc STATIC Unit
+ st FooDesc FooDesc
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
+ un Union Union
ev TupleV(…) TupleV(u32)
- bt u32
+ bt u32 u32
kw crate::
kw false
kw for
@@ -83,7 +83,7 @@ fn func(param0 @ (param1, param2): (i32, i32)) {
lc param0 (i32, i32)
lc param1 i32
lc param2 i32
- bt u32
+ bt u32 u32
kw crate::
kw false
kw for
@@ -117,24 +117,24 @@ impl Unit {
"#,
// `self` is in here twice, once as the module, once as the local
expect![[r#"
- ct CONST
+ ct CONST Unit
cp CONST_PARAM
- en Enum
+ en Enum Enum
fn function() fn()
fn local_func() fn()
lc self Unit
ma makro!(…) macro_rules! makro
md module
md qualified
- sp Self
- sc STATIC
- st Record
- st Tuple
- st Unit
+ sp Self Unit
+ sc STATIC Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tp TypeParam
- un Union
+ un Union Union
ev TupleV(…) TupleV(u32)
- bt u32
+ bt u32 u32
kw const
kw crate::
kw enum
@@ -181,18 +181,18 @@ impl Unit {
}
"#,
expect![[r#"
- ct CONST
- en Enum
+ ct CONST Unit
+ en Enum Enum
fn function() fn()
ma makro!(…) macro_rules! makro
md module
md qualified
- sc STATIC
- st Record
- st Tuple
- st Unit
+ sc STATIC Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
+ un Union Union
ev TupleV(…) TupleV(u32)
?? Unresolved
"#]],
@@ -211,7 +211,7 @@ fn complete_in_block() {
"#,
expect![[r#"
fn foo() fn()
- bt u32
+ bt u32 u32
kw const
kw crate::
kw enum
@@ -256,7 +256,7 @@ fn complete_after_if_expr() {
"#,
expect![[r#"
fn foo() fn()
- bt u32
+ bt u32 u32
kw const
kw crate::
kw else
@@ -304,7 +304,7 @@ fn complete_in_match_arm() {
"#,
expect![[r#"
fn foo() fn()
- bt u32
+ bt u32 u32
kw crate::
kw false
kw for
@@ -328,7 +328,7 @@ fn completes_in_loop_ctx() {
r"fn my() { loop { $0 } }",
expect![[r#"
fn my() fn()
- bt u32
+ bt u32 u32
kw break
kw const
kw continue
@@ -370,7 +370,7 @@ fn completes_in_let_initializer() {
r#"fn main() { let _ = $0 }"#,
expect![[r#"
fn main() fn()
- bt u32
+ bt u32 u32
kw crate::
kw false
kw for
@@ -403,8 +403,8 @@ fn foo() {
"#,
expect![[r#"
fn foo() fn()
- st Foo
- bt u32
+ st Foo Foo
+ bt u32 u32
kw crate::
kw false
kw for
@@ -439,7 +439,7 @@ fn foo() {
expect![[r#"
fn foo() fn()
lc bar i32
- bt u32
+ bt u32 u32
kw crate::
kw false
kw for
@@ -470,7 +470,7 @@ fn quux(x: i32) {
fn quux(…) fn(i32)
lc x i32
ma m!(…) macro_rules! m
- bt u32
+ bt u32 u32
kw crate::
kw false
kw for
@@ -497,7 +497,7 @@ fn quux(x: i32) {
fn quux(…) fn(i32)
lc x i32
ma m!(…) macro_rules! m
- bt u32
+ bt u32 u32
kw crate::
kw false
kw for
@@ -683,11 +683,11 @@ fn brr() {
}
"#,
expect![[r#"
- en HH
+ en HH HH
fn brr() fn()
- st YoloVariant
+ st YoloVariant YoloVariant
st YoloVariant {…} YoloVariant { f: usize }
- bt u32
+ bt u32 u32
kw crate::
kw false
kw for
@@ -749,7 +749,7 @@ fn foo() { if foo {} $0 }
"#,
expect![[r#"
fn foo() fn()
- bt u32
+ bt u32 u32
kw const
kw crate::
kw else
@@ -789,7 +789,7 @@ fn foo() { if foo {} el$0 }
"#,
expect![[r#"
fn foo() fn()
- bt u32
+ bt u32 u32
kw const
kw crate::
kw else
@@ -829,7 +829,7 @@ fn foo() { bar(if foo {} $0) }
"#,
expect![[r#"
fn foo() fn()
- bt u32
+ bt u32 u32
kw crate::
kw else
kw else if
@@ -853,7 +853,7 @@ fn foo() { bar(if foo {} el$0) }
"#,
expect![[r#"
fn foo() fn()
- bt u32
+ bt u32 u32
kw crate::
kw else
kw else if
@@ -877,7 +877,7 @@ fn foo() { if foo {} $0 let x = 92; }
"#,
expect![[r#"
fn foo() fn()
- bt u32
+ bt u32 u32
kw const
kw crate::
kw else
@@ -917,7 +917,7 @@ fn foo() { if foo {} el$0 let x = 92; }
"#,
expect![[r#"
fn foo() fn()
- bt u32
+ bt u32 u32
kw const
kw crate::
kw else
@@ -957,7 +957,7 @@ fn foo() { if foo {} el$0 { let x = 92; } }
"#,
expect![[r#"
fn foo() fn()
- bt u32
+ bt u32 u32
kw const
kw crate::
kw else
@@ -1009,7 +1009,7 @@ pub struct UnstableThisShouldNotBeListed;
expect![[r#"
fn main() fn()
md std
- bt u32
+ bt u32 u32
kw const
kw crate::
kw enum
@@ -1060,8 +1060,8 @@ pub struct UnstableButWeAreOnNightlyAnyway;
expect![[r#"
fn main() fn()
md std
- st UnstableButWeAreOnNightlyAnyway
- bt u32
+ st UnstableButWeAreOnNightlyAnyway UnstableButWeAreOnNightlyAnyway
+ bt u32 u32
kw const
kw crate::
kw enum
@@ -1094,3 +1094,157 @@ pub struct UnstableButWeAreOnNightlyAnyway;
"#]],
);
}
+
+#[test]
+fn inside_format_args_completions_work() {
+ check_empty(
+ r#"
+//- minicore: fmt
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+fn main() {
+ format_args!("{}", Foo.$0);
+}
+"#,
+ expect![[r#"
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn unsafe unsafe {}
+ "#]],
+ );
+ check_empty(
+ r#"
+//- minicore: fmt
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+fn main() {
+ format_args!("{}", Foo.f$0);
+}
+"#,
+ expect![[r#"
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn unsafe unsafe {}
+ "#]],
+ );
+}
+
+#[test]
+fn inside_faulty_format_args_completions_work() {
+ check_empty(
+ r#"
+//- minicore: fmt
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+fn main() {
+ format_args!("", Foo.$0);
+}
+"#,
+ expect![[r#"
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn unsafe unsafe {}
+ "#]],
+ );
+ check_empty(
+ r#"
+//- minicore: fmt
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+fn main() {
+ format_args!("", Foo.f$0);
+}
+"#,
+ expect![[r#"
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn unsafe unsafe {}
+ "#]],
+ );
+ check_empty(
+ r#"
+//- minicore: fmt
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+fn main() {
+ format_args!("{} {named} {captured} {named} {}", a, named = c, Foo.f$0);
+}
+"#,
+ expect![[r#"
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn unsafe unsafe {}
+ "#]],
+ );
+ check_empty(
+ r#"
+//- minicore: fmt
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+fn main() {
+ format_args!("{", Foo.f$0);
+}
+"#,
+ expect![[r#"
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn if if expr {}
+ sn match match expr {}
+ sn not !expr
+ sn ref &expr
+ sn refm &mut expr
+ sn unsafe unsafe {}
+ sn while while expr {}
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs
index 4cdfd546f..c58374f2e 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs
@@ -116,19 +116,47 @@ fn main() {
}
#[test]
-fn short_paths_are_ignored() {
- cov_mark::check!(flyimport_exact_on_short_path);
+fn short_paths_are_prefix_matched() {
+ cov_mark::check!(flyimport_prefix_on_short_path);
check(
r#"
//- /lib.rs crate:dep
-pub struct Bar;
+pub struct Barc;
pub struct Rcar;
pub struct Rc;
+pub const RC: () = ();
pub mod some_module {
pub struct Bar;
pub struct Rcar;
pub struct Rc;
+ pub const RC: () = ();
+}
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ Rc$0
+}
+"#,
+ expect![[r#"
+ st Rc (use dep::Rc) Rc
+ st Rcar (use dep::Rcar) Rcar
+ st Rc (use dep::some_module::Rc) Rc
+ st Rcar (use dep::some_module::Rcar) Rcar
+ "#]],
+ );
+ check(
+ r#"
+//- /lib.rs crate:dep
+pub struct Barc;
+pub struct Rcar;
+pub struct Rc;
+pub const RC: () = ();
+pub mod some_module {
+ pub struct Bar;
+ pub struct Rcar;
+ pub struct Rc;
+ pub const RC: () = ();
}
//- /main.rs crate:main deps:dep
@@ -137,8 +165,36 @@ fn main() {
}
"#,
expect![[r#"
- st Rc (use dep::Rc)
- st Rc (use dep::some_module::Rc)
+ ct RC (use dep::RC) ()
+ st Rc (use dep::Rc) Rc
+ st Rcar (use dep::Rcar) Rcar
+ ct RC (use dep::some_module::RC) ()
+ st Rc (use dep::some_module::Rc) Rc
+ st Rcar (use dep::some_module::Rcar) Rcar
+ "#]],
+ );
+ check(
+ r#"
+//- /lib.rs crate:dep
+pub struct Barc;
+pub struct Rcar;
+pub struct Rc;
+pub const RC: () = ();
+pub mod some_module {
+ pub struct Bar;
+ pub struct Rcar;
+ pub struct Rc;
+ pub const RC: () = ();
+}
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ RC$0
+}
+"#,
+ expect![[r#"
+ ct RC (use dep::RC) ()
+ ct RC (use dep::some_module::RC) ()
"#]],
);
}
@@ -171,10 +227,10 @@ fn main() {
}
"#,
expect![[r#"
- st ThirdStruct (use dep::some_module::ThirdStruct)
- st AfterThirdStruct (use dep::some_module::AfterThirdStruct)
- st ThiiiiiirdStruct (use dep::some_module::ThiiiiiirdStruct)
- "#]],
+ st ThirdStruct (use dep::some_module::ThirdStruct) ThirdStruct
+ st AfterThirdStruct (use dep::some_module::AfterThirdStruct) AfterThirdStruct
+ st ThiiiiiirdStruct (use dep::some_module::ThiiiiiirdStruct) ThiiiiiirdStruct
+ "#]],
);
}
@@ -253,7 +309,7 @@ fn trait_const_fuzzy_completion() {
check(
fixture,
expect![[r#"
- ct SPECIAL_CONST (use dep::test_mod::TestTrait)
+ ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8
"#]],
);
@@ -541,8 +597,8 @@ fn main() {
}
"#,
expect![[r#"
- ct SPECIAL_CONST (use dep::test_mod::TestTrait) DEPRECATED
fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED
+ ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED
"#]],
);
}
@@ -661,7 +717,7 @@ fn main() {
check(
fixture,
expect![[r#"
- st Item (use foo::bar::baz::Item)
+ st Item (use foo::bar) Item
"#]],
);
@@ -669,19 +725,19 @@ fn main() {
"Item",
fixture,
r#"
- use foo::bar;
+use foo::bar;
- mod foo {
- pub mod bar {
- pub mod baz {
- pub struct Item;
- }
- }
+mod foo {
+ pub mod bar {
+ pub mod baz {
+ pub struct Item;
}
+ }
+}
- fn main() {
- bar::baz::Item
- }"#,
+fn main() {
+ bar::baz::Item
+}"#,
);
}
@@ -703,7 +759,7 @@ fn main() {
check(
fixture,
expect![[r#"
- ct TEST_ASSOC (use foo::Item)
+ ct TEST_ASSOC (use foo::Item) usize
"#]],
);
@@ -747,8 +803,8 @@ fn main() {
check(
fixture,
expect![[r#"
- ct TEST_ASSOC (use foo::bar::Item)
- "#]],
+ ct TEST_ASSOC (use foo::bar) usize
+ "#]],
);
check_edit(
@@ -841,8 +897,8 @@ fn main() {
TES$0
}"#,
expect![[r#"
- ct TEST_CONST (use foo::TEST_CONST)
- "#]],
+ ct TEST_CONST (use foo::TEST_CONST) usize
+ "#]],
);
check(
@@ -858,9 +914,9 @@ fn main() {
tes$0
}"#,
expect![[r#"
- ct TEST_CONST (use foo::TEST_CONST)
- fn test_function() (use foo::test_function) fn() -> i32
- "#]],
+ ct TEST_CONST (use foo::TEST_CONST) usize
+ fn test_function() (use foo::test_function) fn() -> i32
+ "#]],
);
check(
@@ -873,9 +929,9 @@ mod foo {
}
fn main() {
- Te$0
+ Tes$0
}"#,
- expect![[]],
+ expect![""],
);
}
@@ -1082,8 +1138,8 @@ mod mud {
}
"#,
expect![[r#"
- st Struct (use crate::Struct)
- "#]],
+ st Struct (use crate::Struct) Struct
+ "#]],
);
}
@@ -1194,7 +1250,7 @@ enum Foo {
}
}"#,
expect![[r#"
- st Barbara (use foo::Barbara)
+ st Barbara (use foo::Barbara) Barbara
"#]],
)
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs
index 3ef2a7c94..de3fd0518 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs
@@ -18,15 +18,15 @@ fn target_type_or_trait_in_impl_block() {
impl Tra$0
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -40,15 +40,15 @@ fn target_type_in_trait_impl_block() {
impl Trait for Str$0
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs
index 8af6cce98..67cf551fc 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs
@@ -355,6 +355,35 @@ fn outer(Foo { bar$0 }: Foo) {}
}
#[test]
+fn completes_in_record_field_pat_with_generic_type_alias() {
+ check_empty(
+ r#"
+type Wrap<T> = T;
+
+enum X {
+ A { cool: u32, stuff: u32 },
+ B,
+}
+
+fn main() {
+ let wrapped = Wrap::<X>::A {
+ cool: 100,
+ stuff: 100,
+ };
+
+ if let Wrap::<X>::A { $0 } = &wrapped {};
+}
+"#,
+ expect![[r#"
+ fd cool u32
+ fd stuff u32
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
fn completes_in_fn_param() {
check_empty(
r#"
@@ -406,7 +435,7 @@ fn foo() {
}
"#,
expect![[r#"
- st Bar
+ st Bar Bar
kw crate::
kw self::
"#]],
@@ -421,7 +450,7 @@ fn foo() {
}
"#,
expect![[r#"
- st Foo
+ st Foo Foo
kw crate::
kw self::
"#]],
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs
index 789ad6634..46a3e97d3 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs
@@ -16,16 +16,16 @@ fn predicate_start() {
struct Foo<'lt, T, const C: usize> where $0 {}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Foo<…>
- st Record
- st Tuple
- st Unit
+ st Foo<…> Foo<'_, {unknown}, _>
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -89,16 +89,16 @@ fn param_list_for_for_pred() {
struct Foo<'lt, T, const C: usize> where for<'a> $0 {}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Foo<…>
- st Record
- st Tuple
- st Unit
+ st Foo<…> Foo<'_, {unknown}, _>
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -114,16 +114,16 @@ impl Record {
}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- sp Self
- st Record
- st Tuple
- st Unit
+ sp Self Record
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs
index 65cefdb08..18afde1b7 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs
@@ -186,10 +186,10 @@ fn main() {
lc foo Foo
lc thing i32
md core
- st Foo
+ st Foo Foo
st Foo {…} Foo { foo1: u32, foo2: u32 }
tt Default
- bt u32
+ bt u32 u32
kw crate::
kw self::
"#]],
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
index e80a28904..f96fb71f2 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
@@ -2,10 +2,15 @@
use expect_test::{expect, Expect};
-use crate::tests::{
- check_edit, completion_list, completion_list_no_kw, completion_list_with_trigger_character,
+use crate::{
+ tests::{
+ check_edit, completion_list, completion_list_no_kw, completion_list_with_trigger_character,
+ },
+ CompletionItemKind,
};
+use super::{do_completion_with_config, TEST_CONFIG};
+
fn check_no_kw(ra_fixture: &str, expect: Expect) {
let actual = completion_list_no_kw(ra_fixture);
expect.assert_eq(&actual)
@@ -79,10 +84,10 @@ pub mod prelude {
}
"#,
expect![[r#"
- md std
- st Option
- bt u32
- "#]],
+ md std
+ st Option Option
+ bt u32 u32
+ "#]],
);
}
@@ -107,11 +112,11 @@ mod macros {
}
"#,
expect![[r#"
- fn f() fn()
- ma concat!(…) macro_rules! concat
- md std
- bt u32
- "#]],
+ fn f() fn()
+ ma concat!(…) macro_rules! concat
+ md std
+ bt u32 u32
+ "#]],
);
}
@@ -137,11 +142,11 @@ pub mod prelude {
}
"#,
expect![[r#"
- md core
- md std
- st String
- bt u32
- "#]],
+ md core
+ md std
+ st String String
+ bt u32 u32
+ "#]],
);
}
@@ -166,10 +171,10 @@ pub mod prelude {
}
"#,
expect![[r#"
- fn f() fn()
- md std
- bt u32
- "#]],
+ fn f() fn()
+ md std
+ bt u32 u32
+ "#]],
);
}
@@ -441,10 +446,10 @@ mod p {
}
"#,
expect![[r#"
- ct RIGHT_CONST
- fn right_fn() fn()
- st RightType
- "#]],
+ ct RIGHT_CONST u32
+ fn right_fn() fn()
+ st RightType WrongType
+ "#]],
);
check_edit(
@@ -876,7 +881,7 @@ fn main() {
fn main() fn()
lc foobar i32
ma x!(…) macro_rules! x
- bt u32
+ bt u32 u32
"#]],
)
}
@@ -1003,8 +1008,8 @@ fn here_we_go() {
"#,
expect![[r#"
fn here_we_go() fn()
- st Foo (alias Bar)
- bt u32
+ st Foo (alias Bar) Foo
+ bt u32 u32
kw const
kw crate::
kw enum
@@ -1052,8 +1057,8 @@ fn here_we_go() {
"#,
expect![[r#"
fn here_we_go() fn()
- st Foo (alias Bar, Qux, Baz)
- bt u32
+ st Foo (alias Bar, Qux, Baz) Foo
+ bt u32 u32
kw const
kw crate::
kw enum
@@ -1173,7 +1178,7 @@ fn bar() { qu$0 }
expect![[r#"
fn bar() fn()
fn foo() (alias qux) fn()
- bt u32
+ bt u32 u32
kw const
kw crate::
kw enum
@@ -1222,7 +1227,7 @@ fn here_we_go() {
}
"#,
expect![[r#"
- st Bar (alias Qux)
+ st Bar (alias Qux) Bar
"#]],
);
}
@@ -1241,7 +1246,7 @@ fn here_we_go() {
}
"#,
expect![[r#"
- st Bar (alias Qux)
+ st Bar (alias Qux) Bar
"#]],
);
}
@@ -1262,8 +1267,8 @@ fn here_we_go() {
expect![[r#"
fn here_we_go() fn()
md foo
- st Bar (alias Qux) (use foo::Bar)
- bt u32
+ st Bar (alias Qux) (use foo::Bar) Bar
+ bt u32 u32
kw crate::
kw false
kw for
@@ -1282,6 +1287,30 @@ fn here_we_go() {
}
#[test]
+fn completes_only_public() {
+ check(
+ r#"
+//- /e.rs
+pub(self) fn i_should_be_hidden() {}
+pub(in crate::e) fn i_should_also_be_hidden() {}
+pub fn i_am_public () {}
+
+//- /lib.rs crate:krate
+pub mod e;
+
+//- /main.rs deps:krate crate:main
+use krate::e;
+fn main() {
+ e::$0
+}"#,
+ expect![
+ "fn i_am_public() fn()
+"
+ ],
+ )
+}
+
+#[test]
fn completion_filtering_excludes_non_identifier_doc_aliases() {
check_edit(
"PartialOrdcmporder",
@@ -1303,3 +1332,176 @@ struct Foo<T: PartialOrd
"#,
);
}
+
+fn check_signatures(src: &str, kind: CompletionItemKind, reduced: Expect, full: Expect) {
+ const FULL_SIGNATURES_CONFIG: crate::CompletionConfig = {
+ let mut x = TEST_CONFIG;
+ x.full_function_signatures = true;
+ x
+ };
+
+ // reduced signature
+ let completion = do_completion_with_config(TEST_CONFIG, src, kind);
+ assert!(completion[0].detail.is_some());
+ reduced.assert_eq(completion[0].detail.as_ref().unwrap());
+
+ // full signature
+ let completion = do_completion_with_config(FULL_SIGNATURES_CONFIG, src, kind);
+ assert!(completion[0].detail.is_some());
+ full.assert_eq(completion[0].detail.as_ref().unwrap());
+}
+
+#[test]
+fn respects_full_function_signatures() {
+ check_signatures(
+ r#"
+pub fn foo<'x, T>(x: &'x mut T) -> u8 where T: Clone, { 0u8 }
+fn main() { fo$0 }
+"#,
+ CompletionItemKind::SymbolKind(ide_db::SymbolKind::Function),
+ expect!("fn(&mut T) -> u8"),
+ expect!("pub fn foo<'x, T>(x: &'x mut T) -> u8 where T: Clone,"),
+ );
+
+ check_signatures(
+ r#"
+struct Foo;
+struct Bar;
+impl Bar {
+ pub const fn baz(x: Foo) -> ! { loop {} };
+}
+
+fn main() { Bar::b$0 }
+"#,
+ CompletionItemKind::SymbolKind(ide_db::SymbolKind::Function),
+ expect!("const fn(Foo) -> !"),
+ expect!("pub const fn baz(x: Foo) -> !"),
+ );
+
+ check_signatures(
+ r#"
+struct Foo;
+struct Bar;
+impl Bar {
+ pub const fn baz<'foo>(&'foo mut self, x: &'foo Foo) -> ! { loop {} };
+}
+
+fn main() {
+ let mut bar = Bar;
+ bar.b$0
+}
+"#,
+ CompletionItemKind::Method,
+ expect!("const fn(&'foo mut self, &Foo) -> !"),
+ expect!("pub const fn baz<'foo>(&'foo mut self, x: &'foo Foo) -> !"),
+ );
+}
+
+#[test]
+fn skips_underscore() {
+ check_with_trigger_character(
+ r#"
+fn foo(_$0) { }
+"#,
+ Some('_'),
+ expect![[r#""#]],
+ );
+ check_with_trigger_character(
+ r#"
+fn foo(_: _$0) { }
+"#,
+ Some('_'),
+ expect![[r#""#]],
+ );
+ check_with_trigger_character(
+ r#"
+fn foo<T>() {
+ foo::<_$0>();
+}
+"#,
+ Some('_'),
+ expect![[r#""#]],
+ );
+ // underscore expressions are fine, they are invalid so the user definitely meant to type an
+ // underscored name here
+ check_with_trigger_character(
+ r#"
+fn foo() {
+ _$0
+}
+"#,
+ Some('_'),
+ expect![[r#"
+ fn foo() fn()
+ bt u32 u32
+ kw const
+ kw crate::
+ kw enum
+ kw extern
+ kw false
+ kw fn
+ kw for
+ kw if
+ kw if let
+ kw impl
+ kw let
+ kw loop
+ kw match
+ kw mod
+ kw return
+ kw self::
+ kw static
+ kw struct
+ kw trait
+ kw true
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ kw while
+ kw while let
+ sn macro_rules
+ sn pd
+ sn ppd
+ "#]],
+ );
+}
+
+#[test]
+fn no_skip_underscore_ident() {
+ check_with_trigger_character(
+ r#"
+fn foo(a_$0) { }
+"#,
+ Some('_'),
+ expect![[r#"
+ kw mut
+ kw ref
+ "#]],
+ );
+ check_with_trigger_character(
+ r#"
+fn foo(_: a_$0) { }
+"#,
+ Some('_'),
+ expect![[r#"
+ bt u32 u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+ check_with_trigger_character(
+ r#"
+fn foo<T>() {
+ foo::<a_$0>();
+}
+"#,
+ Some('_'),
+ expect![[r#"
+ tp T
+ bt u32 u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs
index d518dd764..c7161f82c 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs
@@ -17,18 +17,18 @@ struct Foo<'lt, T, const C: usize> {
}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- sp Self
- st Foo<…>
- st Record
- st Tuple
- st Unit
+ sp Self Foo<'_, {unknown}, _>
+ st Foo<…> Foo<'_, {unknown}, _>
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tp T
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -42,18 +42,18 @@ fn tuple_struct_field() {
struct Foo<'lt, T, const C: usize>(f$0);
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- sp Self
- st Foo<…>
- st Record
- st Tuple
- st Unit
+ sp Self Foo<'_, {unknown}, _>
+ st Foo<…> Foo<'_, {unknown}, _>
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tp T
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw pub
kw pub(crate)
@@ -70,16 +70,16 @@ fn fn_return_type() {
fn x<'lt, T, const C: usize>() -> $0
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tp T
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -100,19 +100,19 @@ fn foo() -> B$0 {
}
"#,
expect![[r#"
- en Enum
- ma makro!(…) macro_rules! makro
- md module
- st Record
- st Tuple
- st Unit
- tt Trait
- un Union
- bt u32
- it ()
- kw crate::
- kw self::
- "#]],
+ en Enum Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
+ tt Trait
+ un Union Union
+ bt u32 u32
+ it ()
+ kw crate::
+ kw self::
+ "#]],
)
}
@@ -124,16 +124,16 @@ struct Foo<T>(T);
const FOO: $0 = Foo(2);
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Foo<…>
- st Record
- st Tuple
- st Unit
+ st Foo<…> Foo<{unknown}>
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
it Foo<i32>
kw crate::
kw self::
@@ -151,15 +151,15 @@ fn f2() {
}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
it i32
kw crate::
kw self::
@@ -179,15 +179,15 @@ fn f2() {
}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
it u64
kw crate::
kw self::
@@ -204,15 +204,15 @@ fn f2(x: u64) -> $0 {
}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
it u64
kw crate::
kw self::
@@ -230,15 +230,15 @@ fn f2(x: $0) {
}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
it i32
kw crate::
kw self::
@@ -262,17 +262,17 @@ fn foo<'lt, T, const C: usize>() {
}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md a
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tp T
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
it a::Foo<a::Foo<i32>>
kw crate::
kw self::
@@ -291,17 +291,17 @@ fn foo<'lt, T, const C: usize>() {
}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Foo<…>
- st Record
- st Tuple
- st Unit
+ st Foo<…> Foo<{unknown}>
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tp T
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
it Foo<i32>
kw crate::
kw self::
@@ -319,16 +319,16 @@ fn foo<'lt, T, const C: usize>() {
}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tp T
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -341,14 +341,14 @@ fn foo<'lt, T, const C: usize>() {
}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
+ un Union Union
"#]],
);
}
@@ -384,18 +384,18 @@ trait Trait2<T>: Trait1 {
fn foo<'lt, T: Trait2<$0>, const CONST_PARAM: usize>(_: T) {}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tt Trait1
tt Trait2
tp T
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -409,15 +409,15 @@ trait Trait2<T> {
fn foo<'lt, T: Trait2<self::$0>, const CONST_PARAM: usize>(_: T) {}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tt Trait2
- un Union
+ un Union Union
"#]],
);
}
@@ -434,18 +434,18 @@ trait Tr<T> {
impl Tr<$0
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- sp Self
- st Record
- st S
- st Tuple
- st Unit
+ sp Self dyn Tr<{unknown}>
+ st Record Record
+ st S S
+ st Tuple Tuple
+ st Unit Unit
tt Tr
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -481,16 +481,16 @@ trait MyTrait<T, U> {
fn f(t: impl MyTrait<u$0
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt MyTrait
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -506,16 +506,16 @@ trait MyTrait<T, U> {
fn f(t: impl MyTrait<u8, u$0
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt MyTrait
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -549,16 +549,16 @@ trait MyTrait<T, U = u8> {
fn f(t: impl MyTrait<u$0
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt MyTrait
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -574,18 +574,18 @@ trait MyTrait<T, U = u8> {
fn f(t: impl MyTrait<u8, u$0
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt MyTrait
tt Trait
ta Item1 = (as MyTrait) type Item1
ta Item2 = (as MyTrait) type Item2
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -619,16 +619,16 @@ trait MyTrait {
fn f(t: impl MyTrait<Item1 = $0
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt MyTrait
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -644,16 +644,16 @@ trait MyTrait {
fn f(t: impl MyTrait<Item1 = u8, Item2 = $0
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt MyTrait
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -668,7 +668,7 @@ trait MyTrait {
fn f(t: impl MyTrait<C = $0
"#,
expect![[r#"
- ct CONST
+ ct CONST Unit
ma makro!(…) macro_rules! makro
kw crate::
kw self::
@@ -691,9 +691,9 @@ pub struct S;
"#,
expect![[r#"
md std
- sp Self
- st Foo
- bt u32
+ sp Self Foo
+ st Foo Foo
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -716,10 +716,10 @@ pub struct S;
"#,
expect![[r#"
md std
- sp Self
- st Foo
- st S
- bt u32
+ sp Self Foo
+ st Foo Foo
+ st S S
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -739,19 +739,19 @@ fn completes_const_and_type_generics_separately() {
}
"#,
expect![[r#"
- en Enum
- ma makro!(…) macro_rules! makro
- md module
- st Foo
- st Record
- st Tuple
- st Unit
- tt Trait
- un Union
- bt u32
- kw crate::
- kw self::
- "#]],
+ en Enum Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Foo Foo
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
+ tt Trait
+ un Union Union
+ bt u32 u32
+ kw crate::
+ kw self::
+ "#]],
);
// FIXME: This should probably also suggest completions for types, at least those that have
// associated constants usable in this position. For example, a user could be typing
@@ -766,12 +766,12 @@ fn completes_const_and_type_generics_separately() {
}
"#,
expect![[r#"
- ct CONST
- ct X
- ma makro!(…) macro_rules! makro
- kw crate::
- kw self::
- "#]],
+ ct CONST Unit
+ ct X usize
+ ma makro!(…) macro_rules! makro
+ kw crate::
+ kw self::
+ "#]],
);
// Method generic params
@@ -785,19 +785,19 @@ fn completes_const_and_type_generics_separately() {
}
"#,
expect![[r#"
- en Enum
- ma makro!(…) macro_rules! makro
- md module
- st Foo
- st Record
- st Tuple
- st Unit
- tt Trait
- un Union
- bt u32
- kw crate::
- kw self::
- "#]],
+ en Enum Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Foo Foo
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
+ tt Trait
+ un Union Union
+ bt u32 u32
+ kw crate::
+ kw self::
+ "#]],
);
check(
r#"
@@ -809,12 +809,12 @@ fn completes_const_and_type_generics_separately() {
}
"#,
expect![[r#"
- ct CONST
- ct X
- ma makro!(…) macro_rules! makro
- kw crate::
- kw self::
- "#]],
+ ct CONST Unit
+ ct X usize
+ ma makro!(…) macro_rules! makro
+ kw crate::
+ kw self::
+ "#]],
);
// Associated type generic params
@@ -828,20 +828,20 @@ fn completes_const_and_type_generics_separately() {
fn foo(_: impl Bar<Baz<F$0, 0> = ()>) {}
"#,
expect![[r#"
- en Enum
- ma makro!(…) macro_rules! makro
- md module
- st Foo
- st Record
- st Tuple
- st Unit
- tt Bar
- tt Trait
- un Union
- bt u32
- kw crate::
- kw self::
- "#]],
+ en Enum Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Foo Foo
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
+ tt Bar
+ tt Trait
+ un Union Union
+ bt u32 u32
+ kw crate::
+ kw self::
+ "#]],
);
check(
r#"
@@ -853,12 +853,12 @@ fn completes_const_and_type_generics_separately() {
fn foo<T: Bar<Baz<(), $0> = ()>>() {}
"#,
expect![[r#"
- ct CONST
- ct X
- ma makro!(…) macro_rules! makro
- kw crate::
- kw self::
- "#]],
+ ct CONST Unit
+ ct X usize
+ ma makro!(…) macro_rules! makro
+ kw crate::
+ kw self::
+ "#]],
);
// Type generic params
@@ -871,12 +871,12 @@ fn completes_const_and_type_generics_separately() {
}
"#,
expect![[r#"
- ct CONST
- ct X
- ma makro!(…) macro_rules! makro
- kw crate::
- kw self::
- "#]],
+ ct CONST Unit
+ ct X usize
+ ma makro!(…) macro_rules! makro
+ kw crate::
+ kw self::
+ "#]],
);
// Type alias generic params
@@ -890,12 +890,12 @@ fn completes_const_and_type_generics_separately() {
}
"#,
expect![[r#"
- ct CONST
- ct X
- ma makro!(…) macro_rules! makro
- kw crate::
- kw self::
- "#]],
+ ct CONST Unit
+ ct X usize
+ ma makro!(…) macro_rules! makro
+ kw crate::
+ kw self::
+ "#]],
);
// Enum variant params
@@ -908,12 +908,12 @@ fn completes_const_and_type_generics_separately() {
}
"#,
expect![[r#"
- ct CONST
- ct X
- ma makro!(…) macro_rules! makro
- kw crate::
- kw self::
- "#]],
+ ct CONST Unit
+ ct X usize
+ ma makro!(…) macro_rules! makro
+ kw crate::
+ kw self::
+ "#]],
);
// Trait params
@@ -924,12 +924,12 @@ fn completes_const_and_type_generics_separately() {
impl Foo<(), $0> for () {}
"#,
expect![[r#"
- ct CONST
- ct X
- ma makro!(…) macro_rules! makro
- kw crate::
- kw self::
- "#]],
+ ct CONST Unit
+ ct X usize
+ ma makro!(…) macro_rules! makro
+ kw crate::
+ kw self::
+ "#]],
);
// Trait alias params
@@ -942,12 +942,12 @@ fn completes_const_and_type_generics_separately() {
fn foo<T: Bar<X$0, ()>>() {}
"#,
expect![[r#"
- ct CONST
- ct X
- ma makro!(…) macro_rules! makro
- kw crate::
- kw self::
- "#]],
+ ct CONST Unit
+ ct X usize
+ ma makro!(…) macro_rules! makro
+ kw crate::
+ kw self::
+ "#]],
);
// Omitted lifetime params
@@ -957,7 +957,7 @@ struct S<'a, 'b, const C: usize, T>(core::marker::PhantomData<&'a &'b T>);
fn foo<'a>() { S::<F$0, _>; }
"#,
expect![[r#"
- ct CONST
+ ct CONST Unit
ma makro!(…) macro_rules! makro
kw crate::
kw self::
@@ -970,7 +970,7 @@ struct S<'a, 'b, const C: usize, T>(core::marker::PhantomData<&'a &'b T>);
fn foo<'a>() { S::<'static, 'static, F$0, _>; }
"#,
expect![[r#"
- ct CONST
+ ct CONST Unit
ma makro!(…) macro_rules! makro
kw crate::
kw self::
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/use_tree.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/use_tree.rs
index 4c74dba52..167bdec54 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/use_tree.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/use_tree.rs
@@ -65,7 +65,7 @@ use self::{foo::*, bar$0};
"#,
expect![[r#"
md foo
- st S
+ st S S
"#]],
);
}
@@ -82,7 +82,7 @@ mod foo {
use foo::{bar::$0}
"#,
expect![[r#"
- st FooBar
+ st FooBar FooBar
"#]],
);
check(
@@ -115,7 +115,7 @@ mod foo {
use foo::{bar::{baz::$0}}
"#,
expect![[r#"
- st FooBarBaz
+ st FooBarBaz FooBarBaz
"#]],
);
check(
@@ -152,7 +152,7 @@ struct Bar;
"#,
expect![[r#"
ma foo macro_rules! foo_
- st Foo
+ st Foo Foo
"#]],
);
}
@@ -193,7 +193,7 @@ struct Bar;
"#,
expect![[r#"
md foo
- st Bar
+ st Bar Bar
"#]],
);
}
@@ -212,7 +212,7 @@ struct Bar;
expect![[r#"
md bar
md foo
- st Bar
+ st Bar Bar
"#]],
);
}
@@ -230,7 +230,7 @@ mod a {
}
"#,
expect![[r#"
- ct A
+ ct A usize
md b
kw super::
"#]],
@@ -248,7 +248,7 @@ struct Bar;
"#,
expect![[r#"
md foo
- st Bar
+ st Bar Bar
"#]],
);
}
@@ -265,7 +265,7 @@ pub mod foo {}
"#,
expect![[r#"
md foo
- st Foo
+ st Foo Foo
"#]],
);
}
@@ -425,7 +425,7 @@ marco_rules! m { () => {} }
expect![[r#"
fn foo fn()
md simd
- st S
+ st S S
"#]],
);
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
index faec74206..4a2e770f1 100644
--- a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
@@ -13,16 +13,16 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
-tracing = "0.1.35"
-rayon = "1.6.1"
+tracing.workspace = true
+rayon.workspace = true
fst = { version = "0.4.7", default-features = false }
rustc-hash = "1.1.0"
once_cell = "1.17.0"
-either = "1.7.0"
-itertools = "0.10.5"
+either.workspace = true
+itertools.workspace = true
arrayvec = "0.7.2"
-indexmap = "2.0.0"
-memchr = "2.5.0"
+indexmap.workspace = true
+memchr = "2.6.4"
triomphe.workspace = true
nohash-hasher.workspace = true
@@ -43,7 +43,7 @@ line-index.workspace = true
[dev-dependencies]
expect-test = "1.4.0"
oorandom = "11.1.3"
-xshell = "0.2.2"
+xshell.workspace = true
# local deps
test-utils.workspace = true
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
index a0b05c87a..343be870c 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
@@ -97,13 +97,13 @@ impl RootDatabase {
// ExpandDatabase
hir::db::AstIdMapQuery
- hir::db::ParseMacroExpansionQuery
- hir::db::InternMacroCallQuery
- hir::db::MacroArgNodeQuery
hir::db::DeclMacroExpanderQuery
- hir::db::MacroExpandQuery
hir::db::ExpandProcMacroQuery
- hir::db::HygieneFrameQuery
+ hir::db::InternMacroCallQuery
+ hir::db::InternSyntaxContextQuery
+ hir::db::MacroArgQuery
+ hir::db::ParseMacroExpansionQuery
+ hir::db::RealSpanMapQuery
// DefDatabase
hir::db::FileItemTreeQuery
@@ -143,6 +143,13 @@ impl RootDatabase {
hir::db::FunctionVisibilityQuery
hir::db::ConstVisibilityQuery
hir::db::CrateSupportsNoStdQuery
+ hir::db::BlockItemTreeQueryQuery
+ hir::db::ExternCrateDeclDataQuery
+ hir::db::LangAttrQuery
+ hir::db::InternAnonymousConstQuery
+ hir::db::InternExternCrateQuery
+ hir::db::InternInTypeConstQuery
+ hir::db::InternUseQuery
// HirDatabase
hir::db::InferQueryQuery
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
index 4ce80532e..ded5d4e3d 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
@@ -161,8 +161,8 @@ impl IdentClass {
ast::AwaitExpr(await_expr) => OperatorClass::classify_await(sema, &await_expr).map(IdentClass::Operator),
ast::BinExpr(bin_expr) => OperatorClass::classify_bin(sema, &bin_expr).map(IdentClass::Operator),
ast::IndexExpr(index_expr) => OperatorClass::classify_index(sema, &index_expr).map(IdentClass::Operator),
- ast::PrefixExpr(prefix_expr) => OperatorClass::classify_prefix(sema,&prefix_expr).map(IdentClass::Operator),
- ast::TryExpr(try_expr) => OperatorClass::classify_try(sema,&try_expr).map(IdentClass::Operator),
+ ast::PrefixExpr(prefix_expr) => OperatorClass::classify_prefix(sema, &prefix_expr).map(IdentClass::Operator),
+ ast::TryExpr(try_expr) => OperatorClass::classify_try(sema, &try_expr).map(IdentClass::Operator),
_ => None,
}
}
@@ -492,7 +492,7 @@ impl NameRefClass {
match_ast! {
match parent {
ast::MethodCallExpr(method_call) => {
- sema.resolve_method_call_field_fallback(&method_call)
+ sema.resolve_method_call_fallback(&method_call)
.map(|it| {
it.map_left(Definition::Function)
.map_right(Definition::Field)
@@ -500,9 +500,12 @@ impl NameRefClass {
})
},
ast::FieldExpr(field_expr) => {
- sema.resolve_field(&field_expr)
- .map(Definition::Field)
- .map(NameRefClass::Definition)
+ sema.resolve_field_fallback(&field_expr)
+ .map(|it| {
+ it.map_left(Definition::Field)
+ .map_right(Definition::Function)
+ .either(NameRefClass::Definition, NameRefClass::Definition)
+ })
},
ast::RecordPatField(record_pat_field) => {
sema.resolve_record_pat_field(&record_pat_field)
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs
index b63dde2c2..722517a76 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs
@@ -54,6 +54,10 @@ impl FamousDefs<'_, '_> {
self.find_trait("core:convert:Into")
}
+ pub fn core_convert_Index(&self) -> Option<Trait> {
+ self.find_trait("core:ops:Index")
+ }
+
pub fn core_option_Option(&self) -> Option<Enum> {
self.find_enum("core:option:Option")
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs
index 57563a174..1cb6ff862 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs
@@ -9,12 +9,18 @@ pub struct LintGroup {
pub lint: Lint,
pub children: &'static [&'static str],
}
+
pub const DEFAULT_LINTS: &[Lint] = &[
Lint {
label: "absolute_paths_not_starting_with_crate",
description: r##"fully qualified paths that start with a module name instead of `crate`, `self`, or an extern crate name"##,
},
Lint { label: "ambiguous_associated_items", description: r##"ambiguous associated items"## },
+ Lint {
+ label: "ambiguous_glob_imports",
+ description: r##"detects certain glob imports that require reporting an ambiguity error"##,
+ },
+ Lint { label: "ambiguous_glob_reexports", description: r##"ambiguous glob re-exports"## },
Lint { label: "anonymous_parameters", description: r##"detects anonymous parameters"## },
Lint { label: "arithmetic_overflow", description: r##"arithmetic operation overflows"## },
Lint {
@@ -25,6 +31,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
label: "asm_sub_register",
description: r##"using only a subset of a register for inline asm inputs"##,
},
+ Lint {
+ label: "async_fn_in_trait",
+ description: r##"use of `async fn` in definition of a publicly-reachable trait"##,
+ },
Lint { label: "bad_asm_style", description: r##"incorrect use of inline assembly"## },
Lint {
label: "bare_trait_objects",
@@ -40,6 +50,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"`break` expression with label and unlabeled loop as value expression"##,
},
Lint {
+ label: "byte_slice_in_packed_struct_with_derive",
+ description: r##"`[u8]` or `str` used in a packed struct with `derive`"##,
+ },
+ Lint {
label: "cenum_impl_drop_cast",
description: r##"a C-like enum implementing Drop is cast"##,
},
@@ -52,6 +66,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"distinct impls distinguished only by the leak-check code"##,
},
Lint {
+ label: "coinductive_overlap_in_coherence",
+ description: r##"impls that are not considered to overlap may be considered to overlap in the future"##,
+ },
+ Lint {
label: "conflicting_repr_hints",
description: r##"conflicts between `#[repr(..)]` hints that were previously accepted and used in practice"##,
},
@@ -60,10 +78,6 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"detects visually confusable pairs between identifiers"##,
},
Lint {
- label: "const_err",
- description: r##"constant evaluation encountered erroneous expression"##,
- },
- Lint {
label: "const_evaluatable_unchecked",
description: r##"detects a generic constant is used in a type without a emitting a warning"##,
},
@@ -71,13 +85,25 @@ pub const DEFAULT_LINTS: &[Lint] = &[
label: "const_item_mutation",
description: r##"detects attempts to mutate a `const` item"##,
},
+ Lint {
+ label: "const_patterns_without_partial_eq",
+ description: r##"constant in pattern does not implement `PartialEq`"##,
+ },
Lint { label: "dead_code", description: r##"detect unused, unexported items"## },
Lint { label: "deprecated", description: r##"detects use of deprecated items"## },
Lint {
+ label: "deprecated_cfg_attr_crate_type_name",
+ description: r##"detects usage of `#![cfg_attr(..., crate_type/crate_name = "...")]`"##,
+ },
+ Lint {
label: "deprecated_in_future",
description: r##"detects use of items that will be deprecated in a future version"##,
},
Lint {
+ label: "deprecated_where_clause_location",
+ description: r##"deprecated where clause location"##,
+ },
+ Lint {
label: "deref_into_dyn_supertrait",
description: r##"`Deref` implementation usage with a supertrait trait object for output might be shadowed in the future"##,
},
@@ -90,10 +116,23 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"bounds of the form `T: Drop` are most likely incorrect"##,
},
Lint {
+ label: "dropping_copy_types",
+ description: r##"calls to `std::mem::drop` with a value that implements Copy"##,
+ },
+ Lint {
+ label: "dropping_references",
+ description: r##"calls to `std::mem::drop` with a reference instead of an owned value"##,
+ },
+ Lint { label: "duplicate_macro_attributes", description: r##"duplicated attribute"## },
+ Lint {
label: "dyn_drop",
description: r##"trait objects of the form `dyn Drop` are useless"##,
},
Lint {
+ label: "elided_lifetimes_in_associated_constant",
+ description: r##"elided lifetimes cannot be used in associated constants in impls"##,
+ },
+ Lint {
label: "elided_lifetimes_in_paths",
description: r##"hidden lifetime parameters in types are deprecated"##,
},
@@ -113,14 +152,38 @@ pub const DEFAULT_LINTS: &[Lint] = &[
label: "exported_private_dependencies",
description: r##"public interface leaks type from a private dependency"##,
},
+ Lint {
+ label: "ffi_unwind_calls",
+ description: r##"call to foreign functions or function pointers with FFI-unwind ABI"##,
+ },
+ Lint {
+ label: "for_loops_over_fallibles",
+ description: r##"for-looping over an `Option` or a `Result`, which is more clearly expressed as an `if let`"##,
+ },
Lint { label: "forbidden_lint_groups", description: r##"applying forbid to lint-groups"## },
Lint {
+ label: "forgetting_copy_types",
+ description: r##"calls to `std::mem::forget` with a value that implements Copy"##,
+ },
+ Lint {
+ label: "forgetting_references",
+ description: r##"calls to `std::mem::forget` with a reference instead of an owned value"##,
+ },
+ Lint {
label: "function_item_references",
description: r##"suggest casting to a function pointer when attempting to take references to function items"##,
},
Lint {
label: "future_incompatible",
- description: r##"lint group for: forbidden-lint-groups, illegal-floating-point-literal-pattern, private-in-public, pub-use-of-private-extern-crate, invalid-type-param-default, const-err, unaligned-references, patterns-in-fns-without-body, missing-fragment-specifier, late-bound-lifetime-arguments, order-dependent-trait-objects, coherence-leak-check, unstable-name-collisions, where-clauses-object-safety, proc-macro-derive-resolution-fallback, macro-expanded-macro-exports-accessed-by-absolute-paths, ill-formed-attribute-input, conflicting-repr-hints, ambiguous-associated-items, mutable-borrow-reservation-conflict, indirect-structural-match, pointer-structural-match, nontrivial-structural-match, soft-unstable, cenum-impl-drop-cast, const-evaluatable-unchecked, uninhabited-static, unsupported-naked-functions, invalid-doc-attributes, semicolon-in-expressions-from-macros, legacy-derive-helpers, proc-macro-back-compat, unsupported-calling-conventions, deref-into-dyn-supertrait"##,
+ description: r##"lint group for: deref-into-dyn-supertrait, ambiguous-associated-items, ambiguous-glob-imports, byte-slice-in-packed-struct-with-derive, cenum-impl-drop-cast, coherence-leak-check, coinductive-overlap-in-coherence, conflicting-repr-hints, const-evaluatable-unchecked, const-patterns-without-partial-eq, deprecated-cfg-attr-crate-type-name, elided-lifetimes-in-associated-constant, forbidden-lint-groups, ill-formed-attribute-input, illegal-floating-point-literal-pattern, implied-bounds-entailment, indirect-structural-match, invalid-alignment, invalid-doc-attributes, invalid-type-param-default, late-bound-lifetime-arguments, legacy-derive-helpers, macro-expanded-macro-exports-accessed-by-absolute-paths, missing-fragment-specifier, nontrivial-structural-match, order-dependent-trait-objects, patterns-in-fns-without-body, pointer-structural-match, proc-macro-back-compat, proc-macro-derive-resolution-fallback, pub-use-of-private-extern-crate, repr-transparent-external-private-fields, semicolon-in-expressions-from-macros, soft-unstable, suspicious-auto-trait-impls, uninhabited-static, unstable-name-collisions, unstable-syntax-pre-expansion, unsupported-calling-conventions, where-clauses-object-safety"##,
+ },
+ Lint {
+ label: "fuzzy_provenance_casts",
+ description: r##"a fuzzy integer to pointer cast is used"##,
+ },
+ Lint {
+ label: "hidden_glob_reexports",
+ description: r##"name introduced by a private item shadows a name introduced by a public glob re-export"##,
},
Lint {
label: "ill_formed_attribute_input",
@@ -131,6 +194,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"floating-point literals cannot be used in patterns"##,
},
Lint {
+ label: "implied_bounds_entailment",
+ description: r##"impl method assumes more implied bounds than its corresponding trait method"##,
+ },
+ Lint {
label: "improper_ctypes",
description: r##"proper use of libc types in foreign modules"##,
},
@@ -156,6 +223,14 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"detects incompatible use of `#[inline(always)]` and `#[no_sanitize(...)]`"##,
},
Lint {
+ label: "internal_features",
+ description: r##"internal features are not supposed to be used"##,
+ },
+ Lint {
+ label: "invalid_alignment",
+ description: r##"raw pointers must be aligned before dereferencing"##,
+ },
+ Lint {
label: "invalid_atomic_ordering",
description: r##"usage of invalid atomic ordering in atomic operations and memory fences"##,
},
@@ -164,6 +239,26 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"detects invalid `#[doc(...)]` attributes"##,
},
Lint {
+ label: "invalid_from_utf8",
+ description: r##"using a non UTF-8 literal in `std::str::from_utf8`"##,
+ },
+ Lint {
+ label: "invalid_from_utf8_unchecked",
+ description: r##"using a non UTF-8 literal in `std::str::from_utf8_unchecked`"##,
+ },
+ Lint {
+ label: "invalid_macro_export_arguments",
+ description: r##""invalid_parameter" isn't a valid argument for `#[macro_export]`"##,
+ },
+ Lint {
+ label: "invalid_nan_comparisons",
+ description: r##"detects invalid floating point NaN comparisons"##,
+ },
+ Lint {
+ label: "invalid_reference_casting",
+ description: r##"casts of `&T` to `&mut T` without interior mutability"##,
+ },
+ Lint {
label: "invalid_type_param_default",
description: r##"type parameter default erroneously allowed in invalid location"##,
},
@@ -189,6 +284,26 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"detects derive helper attributes that are used before they are introduced"##,
},
Lint {
+ label: "let_underscore",
+ description: r##"lint group for: let-underscore-drop, let-underscore-lock"##,
+ },
+ Lint {
+ label: "let_underscore_drop",
+ description: r##"non-binding let on a type that implements `Drop`"##,
+ },
+ Lint {
+ label: "let_underscore_lock",
+ description: r##"non-binding let on a synchronization lock"##,
+ },
+ Lint {
+ label: "long_running_const_eval",
+ description: r##"detects long const eval operations"##,
+ },
+ Lint {
+ label: "lossy_provenance_casts",
+ description: r##"a lossy pointer to integer cast is used"##,
+ },
+ Lint {
label: "macro_expanded_macro_exports_accessed_by_absolute_paths",
description: r##"macro-expanded `macro_export` macros from the current crate cannot be referred to by absolute paths"##,
},
@@ -197,6 +312,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"the `#[macro_use]` attribute is now deprecated in favor of using macros via the module system"##,
},
Lint {
+ label: "map_unit_fn",
+ description: r##"`Iterator::map` call that discard the iterator's values"##,
+ },
+ Lint {
label: "meta_variable_misuse",
description: r##"possible meta-variable misuse at macro definition"##,
},
@@ -222,16 +341,20 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"detects Unicode scripts whose mixed script confusables codepoints are solely used"##,
},
Lint {
+ label: "multiple_supertrait_upcastable",
+ description: r##"detect when an object-safe trait has multiple supertraits"##,
+ },
+ Lint {
label: "must_not_suspend",
description: r##"use of a `#[must_not_suspend]` value across a yield point"##,
},
Lint {
- label: "mutable_borrow_reservation_conflict",
- description: r##"reservation of a two-phased borrow conflicts with other shared borrows"##,
+ label: "mutable_transmutes",
+ description: r##"transmuting &T to &mut T is undefined behavior, even if the reference is unused"##,
},
Lint {
- label: "mutable_transmutes",
- description: r##"mutating transmuted &mut T from &T may cause undefined behavior"##,
+ label: "named_arguments_used_positionally",
+ description: r##"named arguments in format used positionally"##,
},
Lint { label: "named_asm_labels", description: r##"named labels in inline assembly"## },
Lint {
@@ -277,6 +400,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"detects the use of well-known noop methods"##,
},
Lint {
+ label: "opaque_hidden_inferred_bound",
+ description: r##"detects the use of nested `impl Trait` types in associated type bounds that are not general enough"##,
+ },
+ Lint {
label: "order_dependent_trait_objects",
description: r##"trait-object types were treated as different depending on marker-trait order"##,
},
@@ -295,8 +422,12 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"pointers are not structural-match"##,
},
Lint {
- label: "private_in_public",
- description: r##"detect private items in public interfaces not caught by the old implementation"##,
+ label: "private_bounds",
+ description: r##"private type in secondary interface of an item"##,
+ },
+ Lint {
+ label: "private_interfaces",
+ description: r##"private type in primary interface of an item"##,
},
Lint {
label: "proc_macro_back_compat",
@@ -315,12 +446,20 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"detects unnecessary trailing semicolons"##,
},
Lint {
+ label: "refining_impl_trait",
+ description: r##"impl trait in impl method signature does not match trait method signature"##,
+ },
+ Lint {
label: "renamed_and_removed_lints",
description: r##"lints that have been renamed or removed"##,
},
Lint {
+ label: "repr_transparent_external_private_fields",
+ description: r##"transparent type contains an external ZST that is marked #[non_exhaustive] or contains private fields"##,
+ },
+ Lint {
label: "rust_2018_compatibility",
- description: r##"lint group for: keyword-idents, anonymous-parameters, tyvar-behind-raw-pointer, absolute-paths-not-starting-with-crate"##,
+ description: r##"lint group for: keyword-idents, anonymous-parameters, absolute-paths-not-starting-with-crate, tyvar-behind-raw-pointer"##,
},
Lint {
label: "rust_2018_idioms",
@@ -328,7 +467,7 @@ pub const DEFAULT_LINTS: &[Lint] = &[
},
Lint {
label: "rust_2021_compatibility",
- description: r##"lint group for: ellipsis-inclusive-range-patterns, bare-trait-objects, rust-2021-incompatible-closure-captures, rust-2021-incompatible-or-patterns, rust-2021-prelude-collisions, rust-2021-prefixes-incompatible-syntax, array-into-iter, non-fmt-panics"##,
+ description: r##"lint group for: ellipsis-inclusive-range-patterns, bare-trait-objects, rust-2021-incompatible-closure-captures, rust-2021-incompatible-or-patterns, rust-2021-prefixes-incompatible-syntax, rust-2021-prelude-collisions, array-into-iter, non-fmt-panics"##,
},
Lint {
label: "rust_2021_incompatible_closure_captures",
@@ -359,14 +498,30 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"a feature gate that doesn't break dependent crates"##,
},
Lint {
+ label: "special_module_name",
+ description: r##"module declarations for files with a special meaning"##,
+ },
+ Lint {
label: "stable_features",
description: r##"stable features found in `#[feature]` directive"##,
},
Lint {
+ label: "suspicious_auto_trait_impls",
+ description: r##"the rules governing auto traits have recently changed resulting in potential breakage"##,
+ },
+ Lint {
+ label: "suspicious_double_ref_op",
+ description: r##"suspicious call of trait method on `&&T`"##,
+ },
+ Lint {
label: "temporary_cstring_as_ptr",
description: r##"detects getting the inner pointer of a temporary `CString`"##,
},
Lint {
+ label: "test_unstable_lint",
+ description: r##"this unstable lint is only for testing"##,
+ },
+ Lint {
label: "text_direction_codepoint_in_comment",
description: r##"invisible directionality-changing codepoints in comment"##,
},
@@ -395,10 +550,6 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"raw pointer to an inference variable"##,
},
Lint {
- label: "unaligned_references",
- description: r##"detects unaligned references to fields of packed structs"##,
- },
- Lint {
label: "uncommon_codepoints",
description: r##"detects uncommon Unicode codepoints in identifiers"##,
},
@@ -410,6 +561,26 @@ pub const DEFAULT_LINTS: &[Lint] = &[
label: "unconditional_recursion",
description: r##"functions that cannot return without calling themselves"##,
},
+ Lint {
+ label: "undefined_naked_function_abi",
+ description: r##"undefined naked function ABI"##,
+ },
+ Lint {
+ label: "undropped_manually_drops",
+ description: r##"calls to `std::mem::drop` with `std::mem::ManuallyDrop` instead of it's inner value"##,
+ },
+ Lint {
+ label: "unexpected_cfgs",
+ description: r##"detects unexpected names and values in `#[cfg]` conditions"##,
+ },
+ Lint {
+ label: "unfulfilled_lint_expectations",
+ description: r##"unfulfilled lint expectation"##,
+ },
+ Lint {
+ label: "ungated_async_fn_track_caller",
+ description: r##"enabling track_caller on an async fn is a no-op unless the async_fn_track_caller feature is enabled"##,
+ },
Lint { label: "uninhabited_static", description: r##"uninhabited static"## },
Lint {
label: "unknown_crate_types",
@@ -417,16 +588,27 @@ pub const DEFAULT_LINTS: &[Lint] = &[
},
Lint { label: "unknown_lints", description: r##"unrecognized lint attribute"## },
Lint {
+ label: "unknown_or_malformed_diagnostic_attributes",
+ description: r##"unrecognized or malformed diagnostic attribute"##,
+ },
+ Lint {
label: "unnameable_test_items",
description: r##"detects an item that cannot be named being marked as `#[test_case]`"##,
},
+ Lint {
+ label: "unnameable_types",
+ description: r##"effective visibility of a type is larger than the area in which it can be named"##,
+ },
Lint { label: "unreachable_code", description: r##"detects unreachable code paths"## },
Lint { label: "unreachable_patterns", description: r##"detects unreachable patterns"## },
Lint {
label: "unreachable_pub",
description: r##"`pub` items not reachable from crate root"##,
},
- Lint { label: "unsafe_code", description: r##"usage of `unsafe` code"## },
+ Lint {
+ label: "unsafe_code",
+ description: r##"usage of `unsafe` code and other potentially unsound constructs"##,
+ },
Lint {
label: "unsafe_op_in_unsafe_fn",
description: r##"unsafe operations in unsafe functions without an explicit unsafe block are deprecated"##,
@@ -440,16 +622,16 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"detects name collision with an existing but unstable method"##,
},
Lint {
- label: "unsupported_calling_conventions",
- description: r##"use of unsupported calling convention"##,
+ label: "unstable_syntax_pre_expansion",
+ description: r##"unstable syntax can change at any point in the future, causing a hard error!"##,
},
Lint {
- label: "unsupported_naked_functions",
- description: r##"unsupported naked function definitions"##,
+ label: "unsupported_calling_conventions",
+ description: r##"use of unsupported calling convention"##,
},
Lint {
label: "unused",
- description: r##"lint group for: unused-imports, unused-variables, unused-assignments, dead-code, unused-mut, unreachable-code, unreachable-patterns, unused-must-use, unused-unsafe, path-statements, unused-attributes, unused-macros, unused-allocation, unused-doc-comments, unused-extern-crates, unused-features, unused-labels, unused-parens, unused-braces, redundant-semicolons"##,
+ description: r##"lint group for: unused-imports, unused-variables, unused-assignments, dead-code, unused-mut, unreachable-code, unreachable-patterns, unused-must-use, unused-unsafe, path-statements, unused-attributes, unused-macros, unused-macro-rules, unused-allocation, unused-doc-comments, unused-extern-crates, unused-features, unused-labels, unused-parens, unused-braces, redundant-semicolons, map-unit-fn"##,
},
Lint {
label: "unused_allocation",
@@ -460,6 +642,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"detect assignments that will never be read"##,
},
Lint {
+ label: "unused_associated_type_bounds",
+ description: r##"detects unused `Foo = Bar` bounds in `dyn Trait<Foo = Bar>`"##,
+ },
+ Lint {
label: "unused_attributes",
description: r##"detects attributes that were not used by the compiler"##,
},
@@ -491,6 +677,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
label: "unused_lifetimes",
description: r##"detects lifetime parameters that are never used"##,
},
+ Lint {
+ label: "unused_macro_rules",
+ description: r##"detects macro rules that were not used"##,
+ },
Lint { label: "unused_macros", description: r##"detects macros that were not used"## },
Lint {
label: "unused_must_use",
@@ -512,6 +702,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
label: "unused_results",
description: r##"unused result of an expression in a statement"##,
},
+ Lint {
+ label: "unused_tuple_struct_fields",
+ description: r##"detects tuple struct fields that are never read"##,
+ },
Lint { label: "unused_unsafe", description: r##"unnecessary use of an `unsafe` block"## },
Lint {
label: "unused_variables",
@@ -522,6 +716,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"detects deprecation attributes with no effect"##,
},
Lint {
+ label: "useless_ptr_null_checks",
+ description: r##"useless checking of non-null-typed pointer"##,
+ },
+ Lint {
label: "variant_size_differences",
description: r##"detects enums with widely varying variant sizes"##,
},
@@ -542,51 +740,65 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"suggest using `loop { }` instead of `while true { }`"##,
},
];
+
pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[
LintGroup {
lint: Lint {
label: "future_incompatible",
- description: r##"lint group for: forbidden-lint-groups, illegal-floating-point-literal-pattern, private-in-public, pub-use-of-private-extern-crate, invalid-type-param-default, const-err, unaligned-references, patterns-in-fns-without-body, missing-fragment-specifier, late-bound-lifetime-arguments, order-dependent-trait-objects, coherence-leak-check, unstable-name-collisions, where-clauses-object-safety, proc-macro-derive-resolution-fallback, macro-expanded-macro-exports-accessed-by-absolute-paths, ill-formed-attribute-input, conflicting-repr-hints, ambiguous-associated-items, mutable-borrow-reservation-conflict, indirect-structural-match, pointer-structural-match, nontrivial-structural-match, soft-unstable, cenum-impl-drop-cast, const-evaluatable-unchecked, uninhabited-static, unsupported-naked-functions, invalid-doc-attributes, semicolon-in-expressions-from-macros, legacy-derive-helpers, proc-macro-back-compat, unsupported-calling-conventions, deref-into-dyn-supertrait"##,
+ description: r##"lint group for: deref-into-dyn-supertrait, ambiguous-associated-items, ambiguous-glob-imports, byte-slice-in-packed-struct-with-derive, cenum-impl-drop-cast, coherence-leak-check, coinductive-overlap-in-coherence, conflicting-repr-hints, const-evaluatable-unchecked, const-patterns-without-partial-eq, deprecated-cfg-attr-crate-type-name, elided-lifetimes-in-associated-constant, forbidden-lint-groups, ill-formed-attribute-input, illegal-floating-point-literal-pattern, implied-bounds-entailment, indirect-structural-match, invalid-alignment, invalid-doc-attributes, invalid-type-param-default, late-bound-lifetime-arguments, legacy-derive-helpers, macro-expanded-macro-exports-accessed-by-absolute-paths, missing-fragment-specifier, nontrivial-structural-match, order-dependent-trait-objects, patterns-in-fns-without-body, pointer-structural-match, proc-macro-back-compat, proc-macro-derive-resolution-fallback, pub-use-of-private-extern-crate, repr-transparent-external-private-fields, semicolon-in-expressions-from-macros, soft-unstable, suspicious-auto-trait-impls, uninhabited-static, unstable-name-collisions, unstable-syntax-pre-expansion, unsupported-calling-conventions, where-clauses-object-safety"##,
},
children: &[
+ "deref_into_dyn_supertrait",
+ "ambiguous_associated_items",
+ "ambiguous_glob_imports",
+ "byte_slice_in_packed_struct_with_derive",
+ "cenum_impl_drop_cast",
+ "coherence_leak_check",
+ "coinductive_overlap_in_coherence",
+ "conflicting_repr_hints",
+ "const_evaluatable_unchecked",
+ "const_patterns_without_partial_eq",
+ "deprecated_cfg_attr_crate_type_name",
+ "elided_lifetimes_in_associated_constant",
"forbidden_lint_groups",
+ "ill_formed_attribute_input",
"illegal_floating_point_literal_pattern",
- "private_in_public",
- "pub_use_of_private_extern_crate",
+ "implied_bounds_entailment",
+ "indirect_structural_match",
+ "invalid_alignment",
+ "invalid_doc_attributes",
"invalid_type_param_default",
- "const_err",
- "unaligned_references",
- "patterns_in_fns_without_body",
- "missing_fragment_specifier",
"late_bound_lifetime_arguments",
- "order_dependent_trait_objects",
- "coherence_leak_check",
- "unstable_name_collisions",
- "where_clauses_object_safety",
- "proc_macro_derive_resolution_fallback",
+ "legacy_derive_helpers",
"macro_expanded_macro_exports_accessed_by_absolute_paths",
- "ill_formed_attribute_input",
- "conflicting_repr_hints",
- "ambiguous_associated_items",
- "mutable_borrow_reservation_conflict",
- "indirect_structural_match",
- "pointer_structural_match",
+ "missing_fragment_specifier",
"nontrivial_structural_match",
+ "order_dependent_trait_objects",
+ "patterns_in_fns_without_body",
+ "pointer_structural_match",
+ "proc_macro_back_compat",
+ "proc_macro_derive_resolution_fallback",
+ "pub_use_of_private_extern_crate",
+ "repr_transparent_external_private_fields",
+ "semicolon_in_expressions_from_macros",
"soft_unstable",
- "cenum_impl_drop_cast",
- "const_evaluatable_unchecked",
+ "suspicious_auto_trait_impls",
"uninhabited_static",
- "unsupported_naked_functions",
- "invalid_doc_attributes",
- "semicolon_in_expressions_from_macros",
- "legacy_derive_helpers",
- "proc_macro_back_compat",
+ "unstable_name_collisions",
+ "unstable_syntax_pre_expansion",
"unsupported_calling_conventions",
- "deref_into_dyn_supertrait",
+ "where_clauses_object_safety",
],
},
LintGroup {
lint: Lint {
+ label: "let_underscore",
+ description: r##"lint group for: let-underscore-drop, let-underscore-lock"##,
+ },
+ children: &["let_underscore_drop", "let_underscore_lock"],
+ },
+ LintGroup {
+ lint: Lint {
label: "nonstandard_style",
description: r##"lint group for: non-camel-case-types, non-snake-case, non-upper-case-globals"##,
},
@@ -595,13 +807,13 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[
LintGroup {
lint: Lint {
label: "rust_2018_compatibility",
- description: r##"lint group for: keyword-idents, anonymous-parameters, tyvar-behind-raw-pointer, absolute-paths-not-starting-with-crate"##,
+ description: r##"lint group for: keyword-idents, anonymous-parameters, absolute-paths-not-starting-with-crate, tyvar-behind-raw-pointer"##,
},
children: &[
"keyword_idents",
"anonymous_parameters",
- "tyvar_behind_raw_pointer",
"absolute_paths_not_starting_with_crate",
+ "tyvar_behind_raw_pointer",
],
},
LintGroup {
@@ -620,15 +832,15 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[
LintGroup {
lint: Lint {
label: "rust_2021_compatibility",
- description: r##"lint group for: ellipsis-inclusive-range-patterns, bare-trait-objects, rust-2021-incompatible-closure-captures, rust-2021-incompatible-or-patterns, rust-2021-prelude-collisions, rust-2021-prefixes-incompatible-syntax, array-into-iter, non-fmt-panics"##,
+ description: r##"lint group for: ellipsis-inclusive-range-patterns, bare-trait-objects, rust-2021-incompatible-closure-captures, rust-2021-incompatible-or-patterns, rust-2021-prefixes-incompatible-syntax, rust-2021-prelude-collisions, array-into-iter, non-fmt-panics"##,
},
children: &[
"ellipsis_inclusive_range_patterns",
"bare_trait_objects",
"rust_2021_incompatible_closure_captures",
"rust_2021_incompatible_or_patterns",
- "rust_2021_prelude_collisions",
"rust_2021_prefixes_incompatible_syntax",
+ "rust_2021_prelude_collisions",
"array_into_iter",
"non_fmt_panics",
],
@@ -636,7 +848,7 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[
LintGroup {
lint: Lint {
label: "unused",
- description: r##"lint group for: unused-imports, unused-variables, unused-assignments, dead-code, unused-mut, unreachable-code, unreachable-patterns, unused-must-use, unused-unsafe, path-statements, unused-attributes, unused-macros, unused-allocation, unused-doc-comments, unused-extern-crates, unused-features, unused-labels, unused-parens, unused-braces, redundant-semicolons"##,
+ description: r##"lint group for: unused-imports, unused-variables, unused-assignments, dead-code, unused-mut, unreachable-code, unreachable-patterns, unused-must-use, unused-unsafe, path-statements, unused-attributes, unused-macros, unused-macro-rules, unused-allocation, unused-doc-comments, unused-extern-crates, unused-features, unused-labels, unused-parens, unused-braces, redundant-semicolons, map-unit-fn"##,
},
children: &[
"unused_imports",
@@ -651,6 +863,7 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[
"path_statements",
"unused_attributes",
"unused_macros",
+ "unused_macro_rules",
"unused_allocation",
"unused_doc_comments",
"unused_extern_crates",
@@ -659,6 +872,7 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[
"unused_parens",
"unused_braces",
"redundant_semicolons",
+ "map_unit_fn",
],
},
LintGroup {
@@ -673,7 +887,7 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[
pub const RUSTDOC_LINTS: &[Lint] = &[
Lint {
label: "rustdoc::all",
- description: r##"lint group for: rustdoc::broken-intra-doc-links, rustdoc::private-intra-doc-links, rustdoc::missing-doc-code-examples, rustdoc::private-doc-tests, rustdoc::invalid-codeblock-attributes, rustdoc::invalid-rust-codeblocks, rustdoc::invalid-html-tags, rustdoc::bare-urls, rustdoc::missing-crate-level-docs"##,
+ description: r##"lint group for: rustdoc::broken-intra-doc-links, rustdoc::private-intra-doc-links, rustdoc::private-doc-tests, rustdoc::invalid-codeblock-attributes, rustdoc::invalid-rust-codeblocks, rustdoc::invalid-html-tags, rustdoc::bare-urls, rustdoc::missing-crate-level-docs, rustdoc::unescaped-backticks, rustdoc::redundant-explicit-links"##,
},
Lint { label: "rustdoc::bare_urls", description: r##"detects URLs that are not hyperlinks"## },
Lint {
@@ -708,27 +922,70 @@ pub const RUSTDOC_LINTS: &[Lint] = &[
label: "rustdoc::private_intra_doc_links",
description: r##"linking from a public item to a private one"##,
},
+ Lint {
+ label: "rustdoc::redundant_explicit_links",
+ description: r##"detects redundant explicit links in doc comments"##,
+ },
+ Lint {
+ label: "rustdoc::unescaped_backticks",
+ description: r##"detects unescaped backticks in doc comments"##,
+ },
];
+
pub const RUSTDOC_LINT_GROUPS: &[LintGroup] = &[LintGroup {
lint: Lint {
label: "rustdoc::all",
- description: r##"lint group for: rustdoc::broken-intra-doc-links, rustdoc::private-intra-doc-links, rustdoc::missing-doc-code-examples, rustdoc::private-doc-tests, rustdoc::invalid-codeblock-attributes, rustdoc::invalid-rust-codeblocks, rustdoc::invalid-html-tags, rustdoc::bare-urls, rustdoc::missing-crate-level-docs"##,
+ description: r##"lint group for: rustdoc::broken-intra-doc-links, rustdoc::private-intra-doc-links, rustdoc::private-doc-tests, rustdoc::invalid-codeblock-attributes, rustdoc::invalid-rust-codeblocks, rustdoc::invalid-html-tags, rustdoc::bare-urls, rustdoc::missing-crate-level-docs, rustdoc::unescaped-backticks, rustdoc::redundant-explicit-links"##,
},
children: &[
"rustdoc::broken_intra_doc_links",
"rustdoc::private_intra_doc_links",
- "rustdoc::missing_doc_code_examples",
"rustdoc::private_doc_tests",
"rustdoc::invalid_codeblock_attributes",
"rustdoc::invalid_rust_codeblocks",
"rustdoc::invalid_html_tags",
"rustdoc::bare_urls",
"rustdoc::missing_crate_level_docs",
+ "rustdoc::unescaped_backticks",
+ "rustdoc::redundant_explicit_links",
],
}];
pub const FEATURES: &[Lint] = &[
Lint {
+ label: "aarch64_ver_target_feature",
+ description: r##"# `aarch64_ver_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "abi_amdgpu_kernel",
+ description: r##"# `abi_amdgpu_kernel`
+
+The tracking issue for this feature is: [#51575]
+
+[#51575]: https://github.com/rust-lang/rust/issues/51575
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "abi_avr_interrupt",
+ description: r##"# `abi_avr_interrupt`
+
+The tracking issue for this feature is: [#69664]
+
+[#69664]: https://github.com/rust-lang/rust/issues/69664
+
+------------------------
+"##,
+ },
+ Lint {
label: "abi_c_cmse_nonsecure_call",
description: r##"# `abi_c_cmse_nonsecure_call`
@@ -931,6 +1188,121 @@ $ cat $(find -name '*.s')
"##,
},
Lint {
+ label: "abi_riscv_interrupt",
+ description: r##"# `abi_riscv_interrupt`
+
+The tracking issue for this feature is: [#111889]
+
+[#111889]: https://github.com/rust-lang/rust/issues/111889
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "abi_unadjusted",
+ description: r##"# `abi_unadjusted`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "abi_vectorcall",
+ description: r##"# `abi_vectorcall`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "abi_x86_interrupt",
+ description: r##"# `abi_x86_interrupt`
+
+The tracking issue for this feature is: [#40180]
+
+[#40180]: https://github.com/rust-lang/rust/issues/40180
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "absolute_path",
+ description: r##"# `absolute_path`
+
+The tracking issue for this feature is: [#92750]
+
+[#92750]: https://github.com/rust-lang/rust/issues/92750
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "addr_parse_ascii",
+ description: r##"# `addr_parse_ascii`
+
+The tracking issue for this feature is: [#101035]
+
+[#101035]: https://github.com/rust-lang/rust/issues/101035
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "adt_const_params",
+ description: r##"# `adt_const_params`
+
+The tracking issue for this feature is: [#95174]
+
+[#95174]: https://github.com/rust-lang/rust/issues/95174
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "alloc_error_handler",
+ description: r##"# `alloc_error_handler`
+
+The tracking issue for this feature is: [#51540]
+
+[#51540]: https://github.com/rust-lang/rust/issues/51540
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "alloc_error_hook",
+ description: r##"# `alloc_error_hook`
+
+The tracking issue for this feature is: [#51245]
+
+[#51245]: https://github.com/rust-lang/rust/issues/51245
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "alloc_internals",
+ description: r##"# `alloc_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "alloc_layout_extra",
+ description: r##"# `alloc_layout_extra`
+
+The tracking issue for this feature is: [#55724]
+
+[#55724]: https://github.com/rust-lang/rust/issues/55724
+
+------------------------
+"##,
+ },
+ Lint {
label: "allocator_api",
description: r##"# `allocator_api`
@@ -961,53 +1333,171 @@ compiler.
"##,
},
Lint {
- label: "arbitrary_enum_discriminant",
- description: r##"# `arbitrary_enum_discriminant`
+ label: "allow_internal_unsafe",
+ description: r##"# `allow_internal_unsafe`
-The tracking issue for this feature is: [#60553]
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
-[#60553]: https://github.com/rust-lang/rust/issues/60553
+------------------------
+"##,
+ },
+ Lint {
+ label: "allow_internal_unstable",
+ description: r##"# `allow_internal_unstable`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
------------------------
+"##,
+ },
+ Lint {
+ label: "anonymous_lifetime_in_impl_trait",
+ description: r##"# `anonymous_lifetime_in_impl_trait`
-The `arbitrary_enum_discriminant` feature permits tuple-like and
-struct-like enum variants with `#[repr(<int-type>)]` to have explicit discriminants.
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
-## Examples
+------------------------
+"##,
+ },
+ Lint {
+ label: "arbitrary_self_types",
+ description: r##"# `arbitrary_self_types`
-```rust
-#![feature(arbitrary_enum_discriminant)]
-
-#[allow(dead_code)]
-#[repr(u8)]
-enum Enum {
- Unit = 3,
- Tuple(u16) = 2,
- Struct {
- a: u8,
- b: u16,
- } = 1,
-}
+The tracking issue for this feature is: [#44874]
-impl Enum {
- fn tag(&self) -> u8 {
- unsafe { *(self as *const Self as *const u8) }
- }
-}
+[#44874]: https://github.com/rust-lang/rust/issues/44874
-assert_eq!(3, Enum::Unit.tag());
-assert_eq!(2, Enum::Tuple(5).tag());
-assert_eq!(1, Enum::Struct{a: 7, b: 11}.tag());
-```
+------------------------
+"##,
+ },
+ Lint {
+ label: "arc_unwrap_or_clone",
+ description: r##"# `arc_unwrap_or_clone`
+
+The tracking issue for this feature is: [#93610]
+
+[#93610]: https://github.com/rust-lang/rust/issues/93610
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "arm_target_feature",
+ description: r##"# `arm_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "array_chunks",
+ description: r##"# `array_chunks`
+
+The tracking issue for this feature is: [#74985]
+
+[#74985]: https://github.com/rust-lang/rust/issues/74985
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "array_into_iter_constructors",
+ description: r##"# `array_into_iter_constructors`
+
+The tracking issue for this feature is: [#91583]
+
+[#91583]: https://github.com/rust-lang/rust/issues/91583
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "array_methods",
+ description: r##"# `array_methods`
+
+The tracking issue for this feature is: [#76118]
+
+[#76118]: https://github.com/rust-lang/rust/issues/76118
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "array_try_from_fn",
+ description: r##"# `array_try_from_fn`
+
+The tracking issue for this feature is: [#89379]
+
+[#89379]: https://github.com/rust-lang/rust/issues/89379
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "array_try_map",
+ description: r##"# `array_try_map`
+
+The tracking issue for this feature is: [#79711]
+
+[#79711]: https://github.com/rust-lang/rust/issues/79711
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "array_windows",
+ description: r##"# `array_windows`
+
+The tracking issue for this feature is: [#75027]
+
+[#75027]: https://github.com/rust-lang/rust/issues/75027
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "as_array_of_cells",
+ description: r##"# `as_array_of_cells`
+
+The tracking issue for this feature is: [#88248]
+
+[#88248]: https://github.com/rust-lang/rust/issues/88248
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ascii_char",
+ description: r##"# `ascii_char`
+
+The tracking issue for this feature is: [#110998]
+
+[#110998]: https://github.com/rust-lang/rust/issues/110998
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ascii_char_variants",
+ description: r##"# `ascii_char_variants`
+
+The tracking issue for this feature is: [#110998]
+
+[#110998]: https://github.com/rust-lang/rust/issues/110998
+
+------------------------
"##,
},
Lint {
label: "asm_const",
description: r##"# `asm_const`
-The tracking issue for this feature is: [#72016]
+The tracking issue for this feature is: [#93332]
-[#72016]: https://github.com/rust-lang/rust/issues/72016
+[#93332]: https://github.com/rust-lang/rust/issues/93332
------------------------
@@ -1020,9 +1510,9 @@ This feature adds a `const <expr>` operand type to `asm!` and `global_asm!`.
label: "asm_experimental_arch",
description: r##"# `asm_experimental_arch`
-The tracking issue for this feature is: [#72016]
+The tracking issue for this feature is: [#93335]
-[#72016]: https://github.com/rust-lang/rust/issues/72016
+[#93335]: https://github.com/rust-lang/rust/issues/93335
------------------------
@@ -1035,6 +1525,10 @@ This feature tracks `asm!` and `global_asm!` support for the following architect
- BPF
- SPIR-V
- AVR
+- MSP430
+- M68k
+- CSKY
+- s390x
## Register classes
@@ -1059,6 +1553,14 @@ This feature tracks `asm!` and `global_asm!` support for the following architect
| AVR | `reg_pair` | `r3r2` .. `r25r24`, `X`, `Z` | `r` |
| AVR | `reg_iw` | `r25r24`, `X`, `Z` | `w` |
| AVR | `reg_ptr` | `X`, `Z` | `e` |
+| MSP430 | `reg` | `r[0-15]` | `r` |
+| M68k | `reg` | `d[0-7]`, `a[0-7]` | `r` |
+| M68k | `reg_data` | `d[0-7]` | `d` |
+| M68k | `reg_addr` | `a[0-3]` | `a` |
+| CSKY | `reg` | `r[0-31]` | `r` |
+| CSKY | `freg` | `f[0-31]` | `f` |
+| s390x | `reg` | `r[0-10]`, `r[12-14]` | `r` |
+| s390x | `freg` | `f[0-15]` | `f` |
> **Notes**:
> - NVPTX doesn't have a fixed register set, so named registers are not supported.
@@ -1087,6 +1589,13 @@ This feature tracks `asm!` and `global_asm!` support for the following architect
| BPF | `wreg` | `alu32` | `i8` `i16` `i32` |
| AVR | `reg`, `reg_upper` | None | `i8` |
| AVR | `reg_pair`, `reg_iw`, `reg_ptr` | None | `i16` |
+| MSP430 | `reg` | None | `i8`, `i16` |
+| M68k | `reg`, `reg_addr` | None | `i16`, `i32` |
+| M68k | `reg_data` | None | `i8`, `i16`, `i32` |
+| CSKY | `reg` | None | `i8`, `i16`, `i32` |
+| CSKY | `freg` | None | `f32`, |
+| s390x | `reg` | None | `i8`, `i16`, `i32`, `i64` |
+| s390x | `freg` | None | `f32`, `f64` |
## Register aliases
@@ -1100,13 +1609,36 @@ This feature tracks `asm!` and `global_asm!` support for the following architect
| AVR | `XL` | `r26` |
| AVR | `ZH` | `r31` |
| AVR | `ZL` | `r30` |
+| MSP430 | `r0` | `pc` |
+| MSP430 | `r1` | `sp` |
+| MSP430 | `r2` | `sr` |
+| MSP430 | `r3` | `cg` |
+| MSP430 | `r4` | `fp` |
+| M68k | `a5` | `bp` |
+| M68k | `a6` | `fp` |
+| M68k | `a7` | `sp`, `usp`, `ssp`, `isp` |
+| CSKY | `r[0-3]` | `a[0-3]` |
+| CSKY | `r[4-11]` | `l[0-7]` |
+| CSKY | `r[12-13]` | `t[0-1]` |
+| CSKY | `r14` | `sp` |
+| CSKY | `r15` | `lr` |
+| CSKY | `r[16-17]` | `l[8-9]` |
+| CSKY | `r[18-25]` | `t[2-9]` |
+| CSKY | `r28` | `rgb` |
+| CSKY | `r29` | `rtb` |
+| CSKY | `r30` | `svbr` |
+| CSKY | `r31` | `tls` |
+
+> **Notes**:
+> - TI does not mandate a frame pointer for MSP430, but toolchains are allowed
+ to use one; LLVM uses `r4`.
## Unsupported registers
| Architecture | Unsupported register | Reason |
| ------------ | --------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
-| All | `sp` | The stack pointer must be restored to its original value at the end of an asm code block. |
-| All | `fr` (Hexagon), `$fp` (MIPS), `Y` (AVR) | The frame pointer cannot be used as an input or output. |
+| All | `sp`, `r15` (s390x) | The stack pointer must be restored to its original value at the end of an asm code block. |
+| All | `fr` (Hexagon), `$fp` (MIPS), `Y` (AVR), `r4` (MSP430), `a6` (M68k), `r11` (s390x) | The frame pointer cannot be used as an input or output. |
| All | `r19` (Hexagon) | This is used internally by LLVM as a "base pointer" for functions with complex stack frames. |
| MIPS | `$0` or `$zero` | This is a constant zero register which can't be modified. |
| MIPS | `$1` or `$at` | Reserved for assembler. |
@@ -1115,6 +1647,15 @@ This feature tracks `asm!` and `global_asm!` support for the following architect
| MIPS | `$ra` | Return address cannot be used as inputs or outputs. |
| Hexagon | `lr` | This is the link register which cannot be used as an input or output. |
| AVR | `r0`, `r1`, `r1r0` | Due to an issue in LLVM, the `r0` and `r1` registers cannot be used as inputs or outputs. If modified, they must be restored to their original values before the end of the block. |
+|MSP430 | `r0`, `r2`, `r3` | These are the program counter, status register, and constant generator respectively. Neither the status register nor constant generator can be written to. |
+| M68k | `a4`, `a5` | Used internally by LLVM for the base pointer and global base pointer. |
+| CSKY | `r7`, `r28` | Used internally by LLVM for the base pointer and global base pointer. |
+| CSKY | `r8` | Used internally by LLVM for the frame pointer. |
+| CSKY | `r14` | Used internally by LLVM for the stack pointer. |
+| CSKY | `r15` | This is the link register. |
+| CSKY | `r[26-30]` | Reserved by its ABI. |
+| CSKY | `r31` | This is the TLS register. |
+
## Template modifiers
@@ -1129,42 +1670,167 @@ This feature tracks `asm!` and `global_asm!` support for the following architect
| PowerPC | `reg` | None | `0` | None |
| PowerPC | `reg_nonzero` | None | `3` | `b` |
| PowerPC | `freg` | None | `0` | None |
+| s390x | `reg` | None | `%r0` | None |
+| s390x | `freg` | None | `%f0` | None |
+| CSKY | `reg` | None | `r0` | None |
+| CSKY | `freg` | None | `f0` | None |
# Flags covered by `preserves_flags`
These flags registers must be restored upon exiting the asm block if the `preserves_flags` option is set:
- AVR
- The status register `SREG`.
+- MSP430
+ - The status register `r2`.
+- M68k
+ - The condition code register `ccr`.
+- s390x
+ - The condition code register `cc`.
"##,
},
Lint {
- label: "asm_sym",
- description: r##"# `asm_sym`
+ label: "asm_unwind",
+ description: r##"# `asm_unwind`
-The tracking issue for this feature is: [#72016]
+The tracking issue for this feature is: [#93334]
-[#72016]: https://github.com/rust-lang/rust/issues/72016
+[#93334]: https://github.com/rust-lang/rust/issues/93334
------------------------
-This feature adds a `sym <path>` operand type to `asm!` and `global_asm!`.
-- `<path>` must refer to a `fn` or `static`.
-- A mangled symbol name referring to the item is substituted into the asm template string.
-- The substituted string does not include any modifiers (e.g. GOT, PLT, relocations, etc).
-- `<path>` is allowed to point to a `#[thread_local]` static, in which case the asm code can combine the symbol with relocations (e.g. `@plt`, `@TPOFF`) to read from thread-local data.
+This feature adds a `may_unwind` option to `asm!` which allows an `asm` block to unwind stack and be part of the stack unwinding process. This option is only supported by the LLVM backend right now.
"##,
},
Lint {
- label: "asm_unwind",
- description: r##"# `asm_unwind`
+ label: "assert_matches",
+ description: r##"# `assert_matches`
-The tracking issue for this feature is: [#72016]
+The tracking issue for this feature is: [#82775]
-[#72016]: https://github.com/rust-lang/rust/issues/72016
+[#82775]: https://github.com/rust-lang/rust/issues/82775
------------------------
+"##,
+ },
+ Lint {
+ label: "associated_const_equality",
+ description: r##"# `associated_const_equality`
-This feature adds a `may_unwind` option to `asm!` which allows an `asm` block to unwind stack and be part of the stack unwinding process. This option is only supported by the LLVM backend right now.
+The tracking issue for this feature is: [#92827]
+
+[#92827]: https://github.com/rust-lang/rust/issues/92827
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "associated_type_bounds",
+ description: r##"# `associated_type_bounds`
+
+The tracking issue for this feature is: [#52662]
+
+[#52662]: https://github.com/rust-lang/rust/issues/52662
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "associated_type_defaults",
+ description: r##"# `associated_type_defaults`
+
+The tracking issue for this feature is: [#29661]
+
+[#29661]: https://github.com/rust-lang/rust/issues/29661
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "async_closure",
+ description: r##"# `async_closure`
+
+The tracking issue for this feature is: [#62290]
+
+[#62290]: https://github.com/rust-lang/rust/issues/62290
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "async_fn_in_trait",
+ description: r##"# `async_fn_in_trait`
+
+The tracking issue for this feature is: [#91611]
+
+[#91611]: https://github.com/rust-lang/rust/issues/91611
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "async_fn_track_caller",
+ description: r##"# `async_fn_track_caller`
+
+The tracking issue for this feature is: [#110011]
+
+[#110011]: https://github.com/rust-lang/rust/issues/110011
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "async_iter_from_iter",
+ description: r##"# `async_iter_from_iter`
+
+The tracking issue for this feature is: [#81798]
+
+[#81798]: https://github.com/rust-lang/rust/issues/81798
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "async_iterator",
+ description: r##"# `async_iterator`
+
+The tracking issue for this feature is: [#79024]
+
+[#79024]: https://github.com/rust-lang/rust/issues/79024
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "atomic_bool_fetch_not",
+ description: r##"# `atomic_bool_fetch_not`
+
+The tracking issue for this feature is: [#98485]
+
+[#98485]: https://github.com/rust-lang/rust/issues/98485
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "atomic_from_mut",
+ description: r##"# `atomic_from_mut`
+
+The tracking issue for this feature is: [#76314]
+
+[#76314]: https://github.com/rust-lang/rust/issues/76314
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "atomic_from_ptr",
+ description: r##"# `atomic_from_ptr`
+
+The tracking issue for this feature is: [#108652]
+
+[#108652]: https://github.com/rust-lang/rust/issues/108652
+
+------------------------
"##,
},
Lint {
@@ -1184,8 +1850,8 @@ that are automatically implemented for every type, unless the type, or a type it
has explicitly opted out via a negative impl. (Negative impls are separately controlled
by the `negative_impls` feature.)
-[`Send`]: https://doc.rust-lang.org/std/marker/trait.Send.html
-[`Sync`]: https://doc.rust-lang.org/std/marker/trait.Sync.html
+[`Send`]: ../../std/marker/trait.Send.html
+[`Sync`]: ../../std/marker/trait.Sync.html
```rust,ignore (partial-example)
impl !Trait for Type {}
@@ -1278,6 +1944,116 @@ Auto traits cannot have supertraits. This is for soundness reasons, as the inter
"##,
},
Lint {
+ label: "avx512_target_feature",
+ description: r##"# `avx512_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "backtrace_frames",
+ description: r##"# `backtrace_frames`
+
+The tracking issue for this feature is: [#79676]
+
+[#79676]: https://github.com/rust-lang/rust/issues/79676
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "bigint_helper_methods",
+ description: r##"# `bigint_helper_methods`
+
+The tracking issue for this feature is: [#85532]
+
+[#85532]: https://github.com/rust-lang/rust/issues/85532
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "binary_heap_as_slice",
+ description: r##"# `binary_heap_as_slice`
+
+The tracking issue for this feature is: [#83659]
+
+[#83659]: https://github.com/rust-lang/rust/issues/83659
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "binary_heap_drain_sorted",
+ description: r##"# `binary_heap_drain_sorted`
+
+The tracking issue for this feature is: [#59278]
+
+[#59278]: https://github.com/rust-lang/rust/issues/59278
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "binary_heap_into_iter_sorted",
+ description: r##"# `binary_heap_into_iter_sorted`
+
+The tracking issue for this feature is: [#59278]
+
+[#59278]: https://github.com/rust-lang/rust/issues/59278
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "bound_as_ref",
+ description: r##"# `bound_as_ref`
+
+The tracking issue for this feature is: [#80996]
+
+[#80996]: https://github.com/rust-lang/rust/issues/80996
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "bound_map",
+ description: r##"# `bound_map`
+
+The tracking issue for this feature is: [#86026]
+
+[#86026]: https://github.com/rust-lang/rust/issues/86026
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "box_into_boxed_slice",
+ description: r##"# `box_into_boxed_slice`
+
+The tracking issue for this feature is: [#71582]
+
+[#71582]: https://github.com/rust-lang/rust/issues/71582
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "box_into_inner",
+ description: r##"# `box_into_inner`
+
+The tracking issue for this feature is: [#80437]
+
+[#80437]: https://github.com/rust-lang/rust/issues/80437
+
+------------------------
+"##,
+ },
+ Lint {
label: "box_patterns",
description: r##"# `box_patterns`
@@ -1285,8 +2061,6 @@ The tracking issue for this feature is: [#29641]
[#29641]: https://github.com/rust-lang/rust/issues/29641
-See also [`box_syntax`](box-syntax.md)
-
------------------------
Box patterns let you match on `Box<T>`s:
@@ -1299,10 +2073,10 @@ fn main() {
let b = Some(Box::new(5));
match b {
Some(box n) if n < 0 => {
- println!("Box contains negative number {}", n);
+ println!("Box contains negative number {n}");
},
Some(box n) if n >= 0 => {
- println!("Box contains non-negative number {}", n);
+ println!("Box contains non-negative number {n}");
},
None => {
println!("No box");
@@ -1314,29 +2088,102 @@ fn main() {
"##,
},
Lint {
- label: "box_syntax",
- description: r##"# `box_syntax`
+ label: "bpf_target_feature",
+ description: r##"# `bpf_target_feature`
-The tracking issue for this feature is: [#49733]
+The tracking issue for this feature is: [#44839]
-[#49733]: https://github.com/rust-lang/rust/issues/49733
+[#44839]: https://github.com/rust-lang/rust/issues/44839
-See also [`box_patterns`](box-patterns.md)
+------------------------
+"##,
+ },
+ Lint {
+ label: "btree_cursors",
+ description: r##"# `btree_cursors`
+
+The tracking issue for this feature is: [#107540]
+
+[#107540]: https://github.com/rust-lang/rust/issues/107540
------------------------
+"##,
+ },
+ Lint {
+ label: "btree_extract_if",
+ description: r##"# `btree_extract_if`
-Currently the only stable way to create a `Box` is via the `Box::new` method.
-Also it is not possible in stable Rust to destructure a `Box` in a match
-pattern. The unstable `box` keyword can be used to create a `Box`. An example
-usage would be:
+The tracking issue for this feature is: [#70530]
-```rust
-#![feature(box_syntax)]
+[#70530]: https://github.com/rust-lang/rust/issues/70530
-fn main() {
- let b = box 5;
-}
-```
+------------------------
+"##,
+ },
+ Lint {
+ label: "btreemap_alloc",
+ description: r##"# `btreemap_alloc`
+
+The tracking issue for this feature is: [#32838]
+
+[#32838]: https://github.com/rust-lang/rust/issues/32838
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "buf_read_has_data_left",
+ description: r##"# `buf_read_has_data_left`
+
+The tracking issue for this feature is: [#86423]
+
+[#86423]: https://github.com/rust-lang/rust/issues/86423
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "builtin_syntax",
+ description: r##"# `builtin_syntax`
+
+The tracking issue for this feature is: [#110680]
+
+[#110680]: https://github.com/rust-lang/rust/issues/110680
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "byte_slice_trim_ascii",
+ description: r##"# `byte_slice_trim_ascii`
+
+The tracking issue for this feature is: [#94035]
+
+[#94035]: https://github.com/rust-lang/rust/issues/94035
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "c_size_t",
+ description: r##"# `c_size_t`
+
+The tracking issue for this feature is: [#88345]
+
+[#88345]: https://github.com/rust-lang/rust/issues/88345
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "c_str_literals",
+ description: r##"# `c_str_literals`
+
+The tracking issue for this feature is: [#105723]
+
+[#105723]: https://github.com/rust-lang/rust/issues/105723
+
+------------------------
"##,
},
Lint {
@@ -1349,9 +2196,20 @@ The tracking issue for this feature is: [#74990]
------------------------
-Introduces four new ABI strings: "C-unwind", "stdcall-unwind",
-"thiscall-unwind", and "system-unwind". These enable unwinding from other
-languages (such as C++) into Rust frames and from Rust into other languages.
+Introduces new ABI strings:
+- "C-unwind"
+- "cdecl-unwind"
+- "stdcall-unwind"
+- "fastcall-unwind"
+- "vectorcall-unwind"
+- "thiscall-unwind"
+- "aapcs-unwind"
+- "win64-unwind"
+- "sysv64-unwind"
+- "system-unwind"
+
+These enable unwinding from other languages (such as C++) into Rust frames and
+from Rust into other languages.
See [RFC 2945] for more information.
@@ -1369,7 +2227,7 @@ The tracking issue for this feature is: [#44930]
------------------------
The `c_variadic` language feature enables C-variadic functions to be
-defined in Rust. The may be called both from within Rust and via FFI.
+defined in Rust. They may be called both from within Rust and via FFI.
## Examples
@@ -1426,45 +2284,91 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
- label: "cfg_panic",
- description: r##"# `cfg_panic`
+ label: "can_vector",
+ description: r##"# `can_vector`
-The tracking issue for this feature is: [#77443]
+The tracking issue for this feature is: [#69941]
-[#77443]: https://github.com/rust-lang/rust/issues/77443
+[#69941]: https://github.com/rust-lang/rust/issues/69941
------------------------
+"##,
+ },
+ Lint {
+ label: "cell_leak",
+ description: r##"# `cell_leak`
-The `cfg_panic` feature makes it possible to execute different code
-depending on the panic strategy.
+The tracking issue for this feature is: [#69099]
-Possible values at the moment are `"unwind"` or `"abort"`, although
-it is possible that new panic strategies may be added to Rust in the
-future.
+[#69099]: https://github.com/rust-lang/rust/issues/69099
-## Examples
+------------------------
+"##,
+ },
+ Lint {
+ label: "cell_update",
+ description: r##"# `cell_update`
-```rust
-#![feature(cfg_panic)]
+The tracking issue for this feature is: [#50186]
-#[cfg(panic = "unwind")]
-fn a() {
- // ...
-}
+[#50186]: https://github.com/rust-lang/rust/issues/50186
-#[cfg(not(panic = "unwind"))]
-fn a() {
- // ...
-}
+------------------------
+"##,
+ },
+ Lint {
+ label: "cfg_accessible",
+ description: r##"# `cfg_accessible`
-fn b() {
- if cfg!(panic = "abort") {
- // ...
- } else {
- // ...
- }
-}
-```
+The tracking issue for this feature is: [#64797]
+
+[#64797]: https://github.com/rust-lang/rust/issues/64797
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "cfg_eval",
+ description: r##"# `cfg_eval`
+
+The tracking issue for this feature is: [#82679]
+
+[#82679]: https://github.com/rust-lang/rust/issues/82679
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "cfg_match",
+ description: r##"# `cfg_match`
+
+The tracking issue for this feature is: [#115585]
+
+[#115585]: https://github.com/rust-lang/rust/issues/115585
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "cfg_overflow_checks",
+ description: r##"# `cfg_overflow_checks`
+
+The tracking issue for this feature is: [#111466]
+
+[#111466]: https://github.com/rust-lang/rust/issues/111466
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "cfg_relocation_model",
+ description: r##"# `cfg_relocation_model`
+
+The tracking issue for this feature is: [#114929]
+
+[#114929]: https://github.com/rust-lang/rust/issues/114929
+
+------------------------
"##,
},
Lint {
@@ -1506,6 +2410,61 @@ fn b() {
"##,
},
Lint {
+ label: "cfg_target_abi",
+ description: r##"# `cfg_target_abi`
+
+The tracking issue for this feature is: [#80970]
+
+[#80970]: https://github.com/rust-lang/rust/issues/80970
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "cfg_target_compact",
+ description: r##"# `cfg_target_compact`
+
+The tracking issue for this feature is: [#96901]
+
+[#96901]: https://github.com/rust-lang/rust/issues/96901
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "cfg_target_has_atomic",
+ description: r##"# `cfg_target_has_atomic`
+
+The tracking issue for this feature is: [#94039]
+
+[#94039]: https://github.com/rust-lang/rust/issues/94039
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "cfg_target_has_atomic_equal_alignment",
+ description: r##"# `cfg_target_has_atomic_equal_alignment`
+
+The tracking issue for this feature is: [#93822]
+
+[#93822]: https://github.com/rust-lang/rust/issues/93822
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "cfg_target_thread_local",
+ description: r##"# `cfg_target_thread_local`
+
+The tracking issue for this feature is: [#29594]
+
+[#29594]: https://github.com/rust-lang/rust/issues/29594
+
+------------------------
+"##,
+ },
+ Lint {
label: "cfg_version",
description: r##"# `cfg_version`
@@ -1545,10 +2504,72 @@ fn b() {
"##,
},
Lint {
- label: "char_error_internals",
- description: r##"# `char_error_internals`
+ label: "cfi_encoding",
+ description: r##"# `cfi_encoding`
-This feature is internal to the Rust compiler and is not intended for general use.
+The tracking issue for this feature is: [#89653]
+
+[#89653]: https://github.com/rust-lang/rust/issues/89653
+
+------------------------
+
+The `cfi_encoding` feature allows the user to define a CFI encoding for a type.
+It allows the user to use a different names for types that otherwise would be
+required to have the same name as used in externally defined C functions.
+
+## Examples
+
+```rust
+#![feature(cfi_encoding, extern_types)]
+
+#[cfi_encoding = "3Foo"]
+pub struct Type1(i32);
+
+extern {
+ #[cfi_encoding = "3Bar"]
+ type Type2;
+}
+```
+"##,
+ },
+ Lint {
+ label: "char_indices_offset",
+ description: r##"# `char_indices_offset`
+
+The tracking issue for this feature is: [#83871]
+
+[#83871]: https://github.com/rust-lang/rust/issues/83871
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "char_internals",
+ description: r##"# `char_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "char_min",
+ description: r##"# `char_min`
+
+The tracking issue for this feature is: [#114298]
+
+[#114298]: https://github.com/rust-lang/rust/issues/114298
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "closure_lifetime_binder",
+ description: r##"# `closure_lifetime_binder`
+
+The tracking issue for this feature is: [#97362]
+
+[#97362]: https://github.com/rust-lang/rust/issues/97362
------------------------
"##,
@@ -1570,6 +2591,17 @@ available through `std::panic::Location::caller()`, just like using
"##,
},
Lint {
+ label: "cmp_minmax",
+ description: r##"# `cmp_minmax`
+
+The tracking issue for this feature is: [#115939]
+
+[#115939]: https://github.com/rust-lang/rust/issues/115939
+
+------------------------
+"##,
+ },
+ Lint {
label: "cmse_nonsecure_entry",
description: r##"# `cmse_nonsecure_entry`
@@ -1655,6 +2687,28 @@ $ arm-none-eabi-objdump -D function.o
"##,
},
Lint {
+ label: "coerce_unsized",
+ description: r##"# `coerce_unsized`
+
+The tracking issue for this feature is: [#18598]
+
+[#18598]: https://github.com/rust-lang/rust/issues/18598
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "collapse_debuginfo",
+ description: r##"# `collapse_debuginfo`
+
+The tracking issue for this feature is: [#100758]
+
+[#100758]: https://github.com/rust-lang/rust/issues/100758
+
+------------------------
+"##,
+ },
+ Lint {
label: "compiler_builtins",
description: r##"# `compiler_builtins`
@@ -1664,6 +2718,17 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "concat_bytes",
+ description: r##"# `concat_bytes`
+
+The tracking issue for this feature is: [#87555]
+
+[#87555]: https://github.com/rust-lang/rust/issues/87555
+
+------------------------
+"##,
+ },
+ Lint {
label: "concat_idents",
description: r##"# `concat_idents`
@@ -1690,14 +2755,1027 @@ fn main() {
"##,
},
Lint {
- label: "const_eval_limit",
- description: r##"# `const_eval_limit`
+ label: "const_align_of_val",
+ description: r##"# `const_align_of_val`
+
+The tracking issue for this feature is: [#46571]
+
+[#46571]: https://github.com/rust-lang/rust/issues/46571
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_align_of_val_raw",
+ description: r##"# `const_align_of_val_raw`
+
+The tracking issue for this feature is: [#46571]
+
+[#46571]: https://github.com/rust-lang/rust/issues/46571
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_align_offset",
+ description: r##"# `const_align_offset`
+
+The tracking issue for this feature is: [#90962]
+
+[#90962]: https://github.com/rust-lang/rust/issues/90962
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_alloc_error",
+ description: r##"# `const_alloc_error`
+
+The tracking issue for this feature is: [#92523]
+
+[#92523]: https://github.com/rust-lang/rust/issues/92523
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_alloc_layout",
+ description: r##"# `const_alloc_layout`
+
+The tracking issue for this feature is: [#67521]
+
+[#67521]: https://github.com/rust-lang/rust/issues/67521
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_arguments_as_str",
+ description: r##"# `const_arguments_as_str`
+
+The tracking issue for this feature is: [#103900]
+
+[#103900]: https://github.com/rust-lang/rust/issues/103900
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_array_from_ref",
+ description: r##"# `const_array_from_ref`
+
+The tracking issue for this feature is: [#90206]
+
+[#90206]: https://github.com/rust-lang/rust/issues/90206
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_array_into_iter_constructors",
+ description: r##"# `const_array_into_iter_constructors`
+
+The tracking issue for this feature is: [#91583]
+
+[#91583]: https://github.com/rust-lang/rust/issues/91583
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_assert_type2",
+ description: r##"# `const_assert_type2`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_assume",
+ description: r##"# `const_assume`
+
+The tracking issue for this feature is: [#76972]
+
+[#76972]: https://github.com/rust-lang/rust/issues/76972
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_async_blocks",
+ description: r##"# `const_async_blocks`
+
+The tracking issue for this feature is: [#85368]
+
+[#85368]: https://github.com/rust-lang/rust/issues/85368
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_bigint_helper_methods",
+ description: r##"# `const_bigint_helper_methods`
+
+The tracking issue for this feature is: [#85532]
+
+[#85532]: https://github.com/rust-lang/rust/issues/85532
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_black_box",
+ description: r##"# `const_black_box`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_box",
+ description: r##"# `const_box`
+
+The tracking issue for this feature is: [#92521]
+
+[#92521]: https://github.com/rust-lang/rust/issues/92521
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_btree_len",
+ description: r##"# `const_btree_len`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_caller_location",
+ description: r##"# `const_caller_location`
+
+The tracking issue for this feature is: [#76156]
+
+[#76156]: https://github.com/rust-lang/rust/issues/76156
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_cell_into_inner",
+ description: r##"# `const_cell_into_inner`
+
+The tracking issue for this feature is: [#78729]
+
+[#78729]: https://github.com/rust-lang/rust/issues/78729
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_char_from_u32_unchecked",
+ description: r##"# `const_char_from_u32_unchecked`
+
+The tracking issue for this feature is: [#89259]
+
+[#89259]: https://github.com/rust-lang/rust/issues/89259
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_closures",
+ description: r##"# `const_closures`
+
+The tracking issue for this feature is: [#106003]
+
+[#106003]: https://github.com/rust-lang/rust/issues/106003
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_collections_with_hasher",
+ description: r##"# `const_collections_with_hasher`
+
+The tracking issue for this feature is: [#102575]
+
+[#102575]: https://github.com/rust-lang/rust/issues/102575
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_cow_is_borrowed",
+ description: r##"# `const_cow_is_borrowed`
+
+The tracking issue for this feature is: [#65143]
+
+[#65143]: https://github.com/rust-lang/rust/issues/65143
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_cstr_from_ptr",
+ description: r##"# `const_cstr_from_ptr`
+
+The tracking issue for this feature is: [#113219]
+
+[#113219]: https://github.com/rust-lang/rust/issues/113219
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_discriminant",
+ description: r##"# `const_discriminant`
+
+The tracking issue for this feature is: [#69821]
+
+[#69821]: https://github.com/rust-lang/rust/issues/69821
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_eval_select",
+ description: r##"# `const_eval_select`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_exact_div",
+ description: r##"# `const_exact_div`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_extern_fn",
+ description: r##"# `const_extern_fn`
+
+The tracking issue for this feature is: [#64926]
+
+[#64926]: https://github.com/rust-lang/rust/issues/64926
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_float_bits_conv",
+ description: r##"# `const_float_bits_conv`
+
+The tracking issue for this feature is: [#72447]
+
+[#72447]: https://github.com/rust-lang/rust/issues/72447
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_float_classify",
+ description: r##"# `const_float_classify`
+
+The tracking issue for this feature is: [#72505]
+
+[#72505]: https://github.com/rust-lang/rust/issues/72505
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_fmt_arguments_new",
+ description: r##"# `const_fmt_arguments_new`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_fn_floating_point_arithmetic",
+ description: r##"# `const_fn_floating_point_arithmetic`
+
+The tracking issue for this feature is: [#57241]
+
+[#57241]: https://github.com/rust-lang/rust/issues/57241
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_for",
+ description: r##"# `const_for`
+
+The tracking issue for this feature is: [#87575]
+
+[#87575]: https://github.com/rust-lang/rust/issues/87575
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_format_args",
+ description: r##"# `const_format_args`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_hash",
+ description: r##"# `const_hash`
+
+The tracking issue for this feature is: [#104061]
+
+[#104061]: https://github.com/rust-lang/rust/issues/104061
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_heap",
+ description: r##"# `const_heap`
+
+The tracking issue for this feature is: [#79597]
+
+[#79597]: https://github.com/rust-lang/rust/issues/79597
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_index_range_slice_index",
+ description: r##"# `const_index_range_slice_index`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_inherent_unchecked_arith",
+ description: r##"# `const_inherent_unchecked_arith`
+
+The tracking issue for this feature is: [#85122]
+
+[#85122]: https://github.com/rust-lang/rust/issues/85122
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_int_unchecked_arith",
+ description: r##"# `const_int_unchecked_arith`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_intoiterator_identity",
+ description: r##"# `const_intoiterator_identity`
+
+The tracking issue for this feature is: [#90603]
+
+[#90603]: https://github.com/rust-lang/rust/issues/90603
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_intrinsic_compare_bytes",
+ description: r##"# `const_intrinsic_compare_bytes`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_intrinsic_forget",
+ description: r##"# `const_intrinsic_forget`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_intrinsic_raw_eq",
+ description: r##"# `const_intrinsic_raw_eq`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_io_structs",
+ description: r##"# `const_io_structs`
+
+The tracking issue for this feature is: [#78812]
+
+[#78812]: https://github.com/rust-lang/rust/issues/78812
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_ip",
+ description: r##"# `const_ip`
+
+The tracking issue for this feature is: [#76205]
+
+[#76205]: https://github.com/rust-lang/rust/issues/76205
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_ipv4",
+ description: r##"# `const_ipv4`
+
+The tracking issue for this feature is: [#76205]
+
+[#76205]: https://github.com/rust-lang/rust/issues/76205
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_ipv6",
+ description: r##"# `const_ipv6`
+
+The tracking issue for this feature is: [#76205]
+
+[#76205]: https://github.com/rust-lang/rust/issues/76205
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_likely",
+ description: r##"# `const_likely`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_location_fields",
+ description: r##"# `const_location_fields`
+
+The tracking issue for this feature is: [#102911]
+
+[#102911]: https://github.com/rust-lang/rust/issues/102911
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_maybe_uninit_array_assume_init",
+ description: r##"# `const_maybe_uninit_array_assume_init`
+
+The tracking issue for this feature is: [#96097]
+
+[#96097]: https://github.com/rust-lang/rust/issues/96097
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_maybe_uninit_as_mut_ptr",
+ description: r##"# `const_maybe_uninit_as_mut_ptr`
+
+The tracking issue for this feature is: [#75251]
+
+[#75251]: https://github.com/rust-lang/rust/issues/75251
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_maybe_uninit_assume_init",
+ description: r##"# `const_maybe_uninit_assume_init`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_maybe_uninit_assume_init_read",
+ description: r##"# `const_maybe_uninit_assume_init_read`
+
+The tracking issue for this feature is: [#63567]
+
+[#63567]: https://github.com/rust-lang/rust/issues/63567
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_maybe_uninit_uninit_array",
+ description: r##"# `const_maybe_uninit_uninit_array`
+
+The tracking issue for this feature is: [#96097]
+
+[#96097]: https://github.com/rust-lang/rust/issues/96097
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_maybe_uninit_write",
+ description: r##"# `const_maybe_uninit_write`
+
+The tracking issue for this feature is: [#63567]
+
+[#63567]: https://github.com/rust-lang/rust/issues/63567
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_maybe_uninit_zeroed",
+ description: r##"# `const_maybe_uninit_zeroed`
+
+The tracking issue for this feature is: [#91850]
+
+[#91850]: https://github.com/rust-lang/rust/issues/91850
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_mut_refs",
+ description: r##"# `const_mut_refs`
-The tracking issue for this feature is: [#67217]
+The tracking issue for this feature is: [#57349]
-[#67217]: https://github.com/rust-lang/rust/issues/67217
+[#57349]: https://github.com/rust-lang/rust/issues/57349
-The `const_eval_limit` allows someone to limit the evaluation steps the CTFE undertakes to evaluate a `const fn`.
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_nonnull_new",
+ description: r##"# `const_nonnull_new`
+
+The tracking issue for this feature is: [#93235]
+
+[#93235]: https://github.com/rust-lang/rust/issues/93235
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_num_midpoint",
+ description: r##"# `const_num_midpoint`
+
+The tracking issue for this feature is: [#110840]
+
+[#110840]: https://github.com/rust-lang/rust/issues/110840
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_option",
+ description: r##"# `const_option`
+
+The tracking issue for this feature is: [#67441]
+
+[#67441]: https://github.com/rust-lang/rust/issues/67441
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_option_ext",
+ description: r##"# `const_option_ext`
+
+The tracking issue for this feature is: [#91930]
+
+[#91930]: https://github.com/rust-lang/rust/issues/91930
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_pin",
+ description: r##"# `const_pin`
+
+The tracking issue for this feature is: [#76654]
+
+[#76654]: https://github.com/rust-lang/rust/issues/76654
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_pointer_byte_offsets",
+ description: r##"# `const_pointer_byte_offsets`
+
+The tracking issue for this feature is: [#96283]
+
+[#96283]: https://github.com/rust-lang/rust/issues/96283
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_pointer_is_aligned",
+ description: r##"# `const_pointer_is_aligned`
+
+The tracking issue for this feature is: [#104203]
+
+[#104203]: https://github.com/rust-lang/rust/issues/104203
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_precise_live_drops",
+ description: r##"# `const_precise_live_drops`
+
+The tracking issue for this feature is: [#73255]
+
+[#73255]: https://github.com/rust-lang/rust/issues/73255
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_pref_align_of",
+ description: r##"# `const_pref_align_of`
+
+The tracking issue for this feature is: [#91971]
+
+[#91971]: https://github.com/rust-lang/rust/issues/91971
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_ptr_as_ref",
+ description: r##"# `const_ptr_as_ref`
+
+The tracking issue for this feature is: [#91822]
+
+[#91822]: https://github.com/rust-lang/rust/issues/91822
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_ptr_is_null",
+ description: r##"# `const_ptr_is_null`
+
+The tracking issue for this feature is: [#74939]
+
+[#74939]: https://github.com/rust-lang/rust/issues/74939
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_ptr_sub_ptr",
+ description: r##"# `const_ptr_sub_ptr`
+
+The tracking issue for this feature is: [#95892]
+
+[#95892]: https://github.com/rust-lang/rust/issues/95892
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_ptr_write",
+ description: r##"# `const_ptr_write`
+
+The tracking issue for this feature is: [#86302]
+
+[#86302]: https://github.com/rust-lang/rust/issues/86302
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_range_bounds",
+ description: r##"# `const_range_bounds`
+
+The tracking issue for this feature is: [#108082]
+
+[#108082]: https://github.com/rust-lang/rust/issues/108082
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_raw_ptr_comparison",
+ description: r##"# `const_raw_ptr_comparison`
+
+The tracking issue for this feature is: [#53020]
+
+[#53020]: https://github.com/rust-lang/rust/issues/53020
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_refs_to_cell",
+ description: r##"# `const_refs_to_cell`
+
+The tracking issue for this feature is: [#80384]
+
+[#80384]: https://github.com/rust-lang/rust/issues/80384
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_replace",
+ description: r##"# `const_replace`
+
+The tracking issue for this feature is: [#83164]
+
+[#83164]: https://github.com/rust-lang/rust/issues/83164
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_result",
+ description: r##"# `const_result`
+
+The tracking issue for this feature is: [#82814]
+
+[#82814]: https://github.com/rust-lang/rust/issues/82814
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_size_of_val",
+ description: r##"# `const_size_of_val`
+
+The tracking issue for this feature is: [#46571]
+
+[#46571]: https://github.com/rust-lang/rust/issues/46571
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_size_of_val_raw",
+ description: r##"# `const_size_of_val_raw`
+
+The tracking issue for this feature is: [#46571]
+
+[#46571]: https://github.com/rust-lang/rust/issues/46571
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_slice_first_last",
+ description: r##"# `const_slice_first_last`
+
+The tracking issue for this feature is: [#83570]
+
+[#83570]: https://github.com/rust-lang/rust/issues/83570
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_slice_from_mut_ptr_range",
+ description: r##"# `const_slice_from_mut_ptr_range`
+
+The tracking issue for this feature is: [#89792]
+
+[#89792]: https://github.com/rust-lang/rust/issues/89792
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_slice_from_ptr_range",
+ description: r##"# `const_slice_from_ptr_range`
+
+The tracking issue for this feature is: [#89792]
+
+[#89792]: https://github.com/rust-lang/rust/issues/89792
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_slice_from_raw_parts_mut",
+ description: r##"# `const_slice_from_raw_parts_mut`
+
+The tracking issue for this feature is: [#67456]
+
+[#67456]: https://github.com/rust-lang/rust/issues/67456
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_slice_from_ref",
+ description: r##"# `const_slice_from_ref`
+
+The tracking issue for this feature is: [#90206]
+
+[#90206]: https://github.com/rust-lang/rust/issues/90206
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_slice_index",
+ description: r##"# `const_slice_index`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_slice_ptr_len",
+ description: r##"# `const_slice_ptr_len`
+
+The tracking issue for this feature is: [#71146]
+
+[#71146]: https://github.com/rust-lang/rust/issues/71146
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_slice_split_at_mut",
+ description: r##"# `const_slice_split_at_mut`
+
+The tracking issue for this feature is: [#101804]
+
+[#101804]: https://github.com/rust-lang/rust/issues/101804
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_str_from_utf8",
+ description: r##"# `const_str_from_utf8`
+
+The tracking issue for this feature is: [#91006]
+
+[#91006]: https://github.com/rust-lang/rust/issues/91006
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_str_from_utf8_unchecked_mut",
+ description: r##"# `const_str_from_utf8_unchecked_mut`
+
+The tracking issue for this feature is: [#91005]
+
+[#91005]: https://github.com/rust-lang/rust/issues/91005
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_swap",
+ description: r##"# `const_swap`
+
+The tracking issue for this feature is: [#83163]
+
+[#83163]: https://github.com/rust-lang/rust/issues/83163
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_trait_impl",
+ description: r##"# `const_trait_impl`
+
+The tracking issue for this feature is: [#67792]
+
+[#67792]: https://github.com/rust-lang/rust/issues/67792
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_try",
+ description: r##"# `const_try`
+
+The tracking issue for this feature is: [#74935]
+
+[#74935]: https://github.com/rust-lang/rust/issues/74935
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_type_id",
+ description: r##"# `const_type_id`
+
+The tracking issue for this feature is: [#77125]
+
+[#77125]: https://github.com/rust-lang/rust/issues/77125
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_type_name",
+ description: r##"# `const_type_name`
+
+The tracking issue for this feature is: [#63084]
+
+[#63084]: https://github.com/rust-lang/rust/issues/63084
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_unicode_case_lookup",
+ description: r##"# `const_unicode_case_lookup`
+
+The tracking issue for this feature is: [#101400]
+
+[#101400]: https://github.com/rust-lang/rust/issues/101400
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_unsafecell_get_mut",
+ description: r##"# `const_unsafecell_get_mut`
+
+The tracking issue for this feature is: [#88836]
+
+[#88836]: https://github.com/rust-lang/rust/issues/88836
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_waker",
+ description: r##"# `const_waker`
+
+The tracking issue for this feature is: [#102012]
+
+[#102012]: https://github.com/rust-lang/rust/issues/102012
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "container_error_extra",
+ description: r##"# `container_error_extra`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "control_flow_enum",
+ description: r##"# `control_flow_enum`
+
+The tracking issue for this feature is: [#75744]
+
+[#75744]: https://github.com/rust-lang/rust/issues/75744
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "convert_float_to_int",
+ description: r##"# `convert_float_to_int`
+
+The tracking issue for this feature is: [#67057]
+
+[#67057]: https://github.com/rust-lang/rust/issues/67057
+
+------------------------
"##,
},
Lint {
@@ -1737,30 +3815,115 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
- label: "crate_visibility_modifier",
- description: r##"# `crate_visibility_modifier`
+ label: "coverage_attribute",
+ description: r##"# `coverage_attribute`
-The tracking issue for this feature is: [#53120]
+The tracking issue for this feature is: [#84605]
-[#53120]: https://github.com/rust-lang/rust/issues/53120
+[#84605]: https://github.com/rust-lang/rust/issues/84605
------
+---
-The `crate_visibility_modifier` feature allows the `crate` keyword to be used
-as a visibility modifier synonymous to `pub(crate)`, indicating that a type
-(function, _&c._) is to be visible to the entire enclosing crate, but not to
-other crates.
+The `coverage` attribute can be used to selectively disable coverage
+instrumentation in an annotated function. This might be useful to:
+
+- Avoid instrumentation overhead in a performance critical function
+- Avoid generating coverage for a function that is not meant to be executed,
+ but still target 100% coverage for the rest of the program.
+
+## Example
```rust
-#![feature(crate_visibility_modifier)]
+#![feature(coverage_attribute)]
-crate struct Foo {
- bar: usize,
+// `foo()` will get coverage instrumentation (by default)
+fn foo() {
+ // ...
+}
+
+#[coverage(off)]
+fn bar() {
+ // ...
}
```
"##,
},
Lint {
+ label: "cow_is_borrowed",
+ description: r##"# `cow_is_borrowed`
+
+The tracking issue for this feature is: [#65143]
+
+[#65143]: https://github.com/rust-lang/rust/issues/65143
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "csky_target_feature",
+ description: r##"# `csky_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "cstr_count_bytes",
+ description: r##"# `cstr_count_bytes`
+
+The tracking issue for this feature is: [#114441]
+
+[#114441]: https://github.com/rust-lang/rust/issues/114441
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "cursor_remaining",
+ description: r##"# `cursor_remaining`
+
+The tracking issue for this feature is: [#86369]
+
+[#86369]: https://github.com/rust-lang/rust/issues/86369
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "custom_code_classes_in_docs",
+ description: r##"# `custom_code_classes_in_docs`
+
+The tracking issue for this feature is: [#79483]
+
+[#79483]: https://github.com/rust-lang/rust/issues/79483
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "custom_inner_attributes",
+ description: r##"# `custom_inner_attributes`
+
+The tracking issue for this feature is: [#54726]
+
+[#54726]: https://github.com/rust-lang/rust/issues/54726
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "custom_mir",
+ description: r##"# `custom_mir`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
label: "custom_test_frameworks",
description: r##"# `custom_test_frameworks`
@@ -1797,6 +3960,17 @@ const WILL_FAIL: i32 = 4;
"##,
},
Lint {
+ label: "deadline_api",
+ description: r##"# `deadline_api`
+
+The tracking issue for this feature is: [#46316]
+
+[#46316]: https://github.com/rust-lang/rust/issues/46316
+
+------------------------
+"##,
+ },
+ Lint {
label: "dec2flt",
description: r##"# `dec2flt`
@@ -1806,54 +3980,47 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
- label: "default_free_fn",
- description: r##"# `default_free_fn`
+ label: "decl_macro",
+ description: r##"# `decl_macro`
-The tracking issue for this feature is: [#73014]
+The tracking issue for this feature is: [#39412]
-[#73014]: https://github.com/rust-lang/rust/issues/73014
+[#39412]: https://github.com/rust-lang/rust/issues/39412
------------------------
+"##,
+ },
+ Lint {
+ label: "default_type_parameter_fallback",
+ description: r##"# `default_type_parameter_fallback`
-Adds a free `default()` function to the `std::default` module. This function
-just forwards to [`Default::default()`], but may remove repetition of the word
-"default" from the call site.
+The tracking issue for this feature is: [#27336]
-[`Default::default()`]: https://doc.rust-lang.org/nightly/std/default/trait.Default.html#tymethod.default
+[#27336]: https://github.com/rust-lang/rust/issues/27336
-Here is an example:
+------------------------
+"##,
+ },
+ Lint {
+ label: "deprecated_safe",
+ description: r##"# `deprecated_safe`
-```rust
-#![feature(default_free_fn)]
-use std::default::default;
+The tracking issue for this feature is: [#94978]
-#[derive(Default)]
-struct AppConfig {
- foo: FooConfig,
- bar: BarConfig,
-}
+[#94978]: https://github.com/rust-lang/rust/issues/94978
-#[derive(Default)]
-struct FooConfig {
- foo: i32,
-}
+------------------------
+"##,
+ },
+ Lint {
+ label: "deprecated_suggestion",
+ description: r##"# `deprecated_suggestion`
-#[derive(Default)]
-struct BarConfig {
- bar: f32,
- baz: u8,
-}
+The tracking issue for this feature is: [#94785]
-fn main() {
- let options = AppConfig {
- foo: default(),
- bar: BarConfig {
- bar: 10.1,
- ..default()
- },
- };
-}
-```
+[#94785]: https://github.com/rust-lang/rust/issues/94785
+
+------------------------
"##,
},
Lint {
@@ -1866,6 +4033,15 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "derive_const",
+ description: r##"# `derive_const`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
label: "derive_eq",
description: r##"# `derive_eq`
@@ -1875,6 +4051,79 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "diagnostic_namespace",
+ description: r##"# `diagnostic_namespace`
+
+The tracking issue for this feature is: [#111996]
+
+[#111996]: https://github.com/rust-lang/rust/issues/111996
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "dir_entry_ext2",
+ description: r##"# `dir_entry_ext2`
+
+The tracking issue for this feature is: [#85573]
+
+[#85573]: https://github.com/rust-lang/rust/issues/85573
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "discriminant_kind",
+ description: r##"# `discriminant_kind`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "dispatch_from_dyn",
+ description: r##"# `dispatch_from_dyn`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "div_duration",
+ description: r##"# `div_duration`
+
+The tracking issue for this feature is: [#63139]
+
+[#63139]: https://github.com/rust-lang/rust/issues/63139
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "do_not_recommend",
+ description: r##"# `do_not_recommend`
+
+The tracking issue for this feature is: [#51992]
+
+[#51992]: https://github.com/rust-lang/rust/issues/51992
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "doc_auto_cfg",
+ description: r##"# `doc_auto_cfg`
+
+The tracking issue for this feature is: [#43781]
+
+[#43781]: https://github.com/rust-lang/rust/issues/43781
+
+------------------------
+"##,
+ },
+ Lint {
label: "doc_cfg",
description: r##"# `doc_cfg`
@@ -1885,7 +4134,7 @@ The tracking issue for this feature is: [#43781]
The `doc_cfg` feature allows an API be documented as only available in some specific platforms.
This attribute has two effects:
-1. In the annotated item's documentation, there will be a message saying "This is supported on
+1. In the annotated item's documentation, there will be a message saying "Available on
(platform) only".
2. The item's doc-tests will only run on the specific platform.
@@ -1925,6 +4174,17 @@ pub struct Icon {
"##,
},
Lint {
+ label: "doc_cfg_hide",
+ description: r##"# `doc_cfg_hide`
+
+The tracking issue for this feature is: [#43781]
+
+[#43781]: https://github.com/rust-lang/rust/issues/43781
+
+------------------------
+"##,
+ },
+ Lint {
label: "doc_masked",
description: r##"# `doc_masked`
@@ -1990,6 +4250,180 @@ See also its documentation in [the rustdoc book][rustdoc-book-notable_trait].
"##,
},
Lint {
+ label: "downcast_unchecked",
+ description: r##"# `downcast_unchecked`
+
+The tracking issue for this feature is: [#90850]
+
+[#90850]: https://github.com/rust-lang/rust/issues/90850
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "drain_keep_rest",
+ description: r##"# `drain_keep_rest`
+
+The tracking issue for this feature is: [#101122]
+
+[#101122]: https://github.com/rust-lang/rust/issues/101122
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "dropck_eyepatch",
+ description: r##"# `dropck_eyepatch`
+
+The tracking issue for this feature is: [#34761]
+
+[#34761]: https://github.com/rust-lang/rust/issues/34761
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "duration_constants",
+ description: r##"# `duration_constants`
+
+The tracking issue for this feature is: [#57391]
+
+[#57391]: https://github.com/rust-lang/rust/issues/57391
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "duration_consts_float",
+ description: r##"# `duration_consts_float`
+
+The tracking issue for this feature is: [#72440]
+
+[#72440]: https://github.com/rust-lang/rust/issues/72440
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "dyn_star",
+ description: r##"# `dyn_star`
+
+The tracking issue for this feature is: [#102425]
+
+[#102425]: https://github.com/rust-lang/rust/issues/102425
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "edition_panic",
+ description: r##"# `edition_panic`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "effects",
+ description: r##"# `effects`
+
+The tracking issue for this feature is: [#102090]
+
+[#102090]: https://github.com/rust-lang/rust/issues/102090
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "entry_insert",
+ description: r##"# `entry_insert`
+
+The tracking issue for this feature is: [#65225]
+
+[#65225]: https://github.com/rust-lang/rust/issues/65225
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ermsb_target_feature",
+ description: r##"# `ermsb_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "error_generic_member_access",
+ description: r##"# `error_generic_member_access`
+
+The tracking issue for this feature is: [#99301]
+
+[#99301]: https://github.com/rust-lang/rust/issues/99301
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "error_in_core",
+ description: r##"# `error_in_core`
+
+The tracking issue for this feature is: [#103765]
+
+[#103765]: https://github.com/rust-lang/rust/issues/103765
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "error_iter",
+ description: r##"# `error_iter`
+
+The tracking issue for this feature is: [#58520]
+
+[#58520]: https://github.com/rust-lang/rust/issues/58520
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "error_reporter",
+ description: r##"# `error_reporter`
+
+The tracking issue for this feature is: [#90172]
+
+[#90172]: https://github.com/rust-lang/rust/issues/90172
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "error_type_id",
+ description: r##"# `error_type_id`
+
+The tracking issue for this feature is: [#60784]
+
+[#60784]: https://github.com/rust-lang/rust/issues/60784
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "exact_size_is_empty",
+ description: r##"# `exact_size_is_empty`
+
+The tracking issue for this feature is: [#35428]
+
+[#35428]: https://github.com/rust-lang/rust/issues/35428
+
+------------------------
+"##,
+ },
+ Lint {
label: "exclusive_range_pattern",
description: r##"# `exclusive_range_pattern`
@@ -2020,60 +4454,105 @@ stabilized.
"##,
},
Lint {
- label: "explicit_generic_args_with_impl_trait",
- description: r##"# `explicit_generic_args_with_impl_trait`
+ label: "exclusive_wrapper",
+ description: r##"# `exclusive_wrapper`
-The tracking issue for this feature is: [#83701]
+The tracking issue for this feature is: [#98407]
-[#83701]: https://github.com/rust-lang/rust/issues/83701
+[#98407]: https://github.com/rust-lang/rust/issues/98407
------------------------
+"##,
+ },
+ Lint {
+ label: "exhaustive_patterns",
+ description: r##"# `exhaustive_patterns`
-The `explicit_generic_args_with_impl_trait` feature gate lets you specify generic arguments even
-when `impl Trait` is used in argument position.
+The tracking issue for this feature is: [#51085]
-A simple example is:
+[#51085]: https://github.com/rust-lang/rust/issues/51085
-```rust
-#![feature(explicit_generic_args_with_impl_trait)]
+------------------------
+"##,
+ },
+ Lint {
+ label: "exit_status_error",
+ description: r##"# `exit_status_error`
-fn foo<T: ?Sized>(_f: impl AsRef<T>) {}
+The tracking issue for this feature is: [#84908]
-fn main() {
- foo::<str>("".to_string());
-}
-```
+[#84908]: https://github.com/rust-lang/rust/issues/84908
-This is currently rejected:
+------------------------
+"##,
+ },
+ Lint {
+ label: "exitcode_exit_method",
+ description: r##"# `exitcode_exit_method`
-```text
-error[E0632]: cannot provide explicit generic arguments when `impl Trait` is used in argument position
- --> src/main.rs:6:11
- |
-6 | foo::<str>("".to_string());
- | ^^^ explicit generic argument not allowed
+The tracking issue for this feature is: [#97100]
-```
+[#97100]: https://github.com/rust-lang/rust/issues/97100
-However it would compile if `explicit_generic_args_with_impl_trait` is enabled.
+------------------------
+"##,
+ },
+ Lint {
+ label: "explicit_tail_calls",
+ description: r##"# `explicit_tail_calls`
-Note that the synthetic type parameters from `impl Trait` are still implicit and you
-cannot explicitly specify these:
+The tracking issue for this feature is: [#112788]
-```rust,compile_fail
-#![feature(explicit_generic_args_with_impl_trait)]
+[#112788]: https://github.com/rust-lang/rust/issues/112788
-fn foo<T: ?Sized>(_f: impl AsRef<T>) {}
-fn bar<T: ?Sized, F: AsRef<T>>(_f: F) {}
+------------------------
+"##,
+ },
+ Lint {
+ label: "extend_one",
+ description: r##"# `extend_one`
-fn main() {
- bar::<str, _>("".to_string()); // Okay
- bar::<str, String>("".to_string()); // Okay
+The tracking issue for this feature is: [#72631]
- foo::<str>("".to_string()); // Okay
- foo::<str, String>("".to_string()); // Error, you cannot specify `impl Trait` explicitly
-}
-```
+[#72631]: https://github.com/rust-lang/rust/issues/72631
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "extended_varargs_abi_support",
+ description: r##"# `extended_varargs_abi_support`
+
+The tracking issue for this feature is: [#100189]
+
+[#100189]: https://github.com/rust-lang/rust/issues/100189
+
+------------------------
+
+This feature adds the possibility of using `sysv64`, `win64` or `efiapi` calling
+conventions on functions with varargs.
+"##,
+ },
+ Lint {
+ label: "extern_types",
+ description: r##"# `extern_types`
+
+The tracking issue for this feature is: [#43467]
+
+[#43467]: https://github.com/rust-lang/rust/issues/43467
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "extract_if",
+ description: r##"# `extract_if`
+
+The tracking issue for this feature is: [#43244]
+
+[#43244]: https://github.com/rust-lang/rust/issues/43244
+
+------------------------
"##,
},
Lint {
@@ -2211,6 +4690,72 @@ against are compatible with those of the `#[ffi_pure]`.
"##,
},
Lint {
+ label: "ffi_returns_twice",
+ description: r##"# `ffi_returns_twice`
+
+The tracking issue for this feature is: [#58314]
+
+[#58314]: https://github.com/rust-lang/rust/issues/58314
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "file_create_new",
+ description: r##"# `file_create_new`
+
+The tracking issue for this feature is: [#105135]
+
+[#105135]: https://github.com/rust-lang/rust/issues/105135
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "file_set_times",
+ description: r##"# `file_set_times`
+
+The tracking issue for this feature is: [#98245]
+
+[#98245]: https://github.com/rust-lang/rust/issues/98245
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "float_gamma",
+ description: r##"# `float_gamma`
+
+The tracking issue for this feature is: [#99842]
+
+[#99842]: https://github.com/rust-lang/rust/issues/99842
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "float_minimum_maximum",
+ description: r##"# `float_minimum_maximum`
+
+The tracking issue for this feature is: [#91079]
+
+[#91079]: https://github.com/rust-lang/rust/issues/91079
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "float_next_up_down",
+ description: r##"# `float_next_up_down`
+
+The tracking issue for this feature is: [#91399]
+
+[#91399]: https://github.com/rust-lang/rust/issues/91399
+
+------------------------
+"##,
+ },
+ Lint {
label: "flt2dec",
description: r##"# `flt2dec`
@@ -2220,6 +4765,15 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "fmt_helpers_for_derive",
+ description: r##"# `fmt_helpers_for_derive`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
label: "fmt_internals",
description: r##"# `fmt_internals`
@@ -2229,6 +4783,26 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "fn_align",
+ description: r##"# `fn_align`
+
+The tracking issue for this feature is: [#82232]
+
+[#82232]: https://github.com/rust-lang/rust/issues/82232
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "fn_ptr_trait",
+ description: r##"# `fn_ptr_trait`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
label: "fn_traits",
description: r##"# `fn_traits`
@@ -2243,7 +4817,7 @@ See Also: [`unboxed_closures`](../language-features/unboxed-closures.md)
The `fn_traits` feature allows for implementation of the [`Fn*`] traits
for creating custom closure-like types.
-[`Fn*`]: https://doc.rust-lang.org/std/ops/trait.Fn.html
+[`Fn*`]: ../../std/ops/trait.Fn.html
```rust
#![feature(unboxed_closures)]
@@ -2268,6 +4842,90 @@ fn main() {
"##,
},
Lint {
+ label: "forget_unsized",
+ description: r##"# `forget_unsized`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "format_args_nl",
+ description: r##"# `format_args_nl`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "fs_try_exists",
+ description: r##"# `fs_try_exists`
+
+The tracking issue for this feature is: [#83186]
+
+[#83186]: https://github.com/rust-lang/rust/issues/83186
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "fundamental",
+ description: r##"# `fundamental`
+
+The tracking issue for this feature is: [#29635]
+
+[#29635]: https://github.com/rust-lang/rust/issues/29635
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "future_join",
+ description: r##"# `future_join`
+
+The tracking issue for this feature is: [#91642]
+
+[#91642]: https://github.com/rust-lang/rust/issues/91642
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "gen_future",
+ description: r##"# `gen_future`
+
+The tracking issue for this feature is: [#50547]
+
+[#50547]: https://github.com/rust-lang/rust/issues/50547
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "generator_clone",
+ description: r##"# `generator_clone`
+
+The tracking issue for this feature is: [#95360]
+
+[#95360]: https://github.com/rust-lang/rust/issues/95360
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "generator_trait",
+ description: r##"# `generator_trait`
+
+The tracking issue for this feature is: [#43122]
+
+[#43122]: https://github.com/rust-lang/rust/issues/43122
+
+------------------------
+"##,
+ },
+ Lint {
label: "generators",
description: r##"# `generators`
@@ -2518,82 +5176,253 @@ does.
"##,
},
Lint {
- label: "half_open_range_patterns",
- description: r##"# `half_open_range_patterns`
+ label: "generic_arg_infer",
+ description: r##"# `generic_arg_infer`
+
+The tracking issue for this feature is: [#85077]
+
+[#85077]: https://github.com/rust-lang/rust/issues/85077
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "generic_assert",
+ description: r##"# `generic_assert`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "generic_assert_internals",
+ description: r##"# `generic_assert_internals`
+
+The tracking issue for this feature is: [#44838]
+
+[#44838]: https://github.com/rust-lang/rust/issues/44838
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "generic_associated_types_extended",
+ description: r##"# `generic_associated_types_extended`
+
+The tracking issue for this feature is: [#95451]
+
+[#95451]: https://github.com/rust-lang/rust/issues/95451
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "generic_const_exprs",
+ description: r##"# `generic_const_exprs`
+
+The tracking issue for this feature is: [#76560]
+
+[#76560]: https://github.com/rust-lang/rust/issues/76560
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "generic_const_items",
+ description: r##"# `generic_const_items`
+
+The tracking issue for this feature is: [#113521]
+
+[#113521]: https://github.com/rust-lang/rust/issues/113521
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "get_many_mut",
+ description: r##"# `get_many_mut`
+
+The tracking issue for this feature is: [#104642]
+
+[#104642]: https://github.com/rust-lang/rust/issues/104642
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "get_mut_unchecked",
+ description: r##"# `get_mut_unchecked`
+
+The tracking issue for this feature is: [#63292]
+
+[#63292]: https://github.com/rust-lang/rust/issues/63292
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "half_open_range_patterns_in_slices",
+ description: r##"# `half_open_range_patterns_in_slices`
The tracking issue for this feature is: [#67264]
-It is part of the `#![exclusive_range_pattern]` feature,
+It is part of the `exclusive_range_pattern` feature,
tracked at [#37854].
[#67264]: https://github.com/rust-lang/rust/issues/67264
[#37854]: https://github.com/rust-lang/rust/issues/37854
-----
-The `half_open_range_patterns` feature allows RangeTo patterns
-(`..10`) to be used in appropriate pattern matching contexts.
-This requires also enabling the `exclusive_range_pattern` feature.
-
-It also enabled RangeFrom patterns but that has since been
-stabilized.
+This feature allow using top-level half-open range patterns in slices.
```rust
-#![feature(half_open_range_patterns)]
+#![feature(half_open_range_patterns_in_slices)]
#![feature(exclusive_range_pattern)]
- let x = 5;
- match x {
- ..0 => println!("negative!"), // "RangeTo" pattern. Unstable.
- 0 => println!("zero!"),
- 1.. => println!("positive!"), // "RangeFrom" pattern. Stable.
- }
+
+fn main() {
+ let xs = [13, 1, 5, 2, 3, 1, 21, 8];
+ let [a @ 3.., b @ ..3, c @ 4..6, ..] = xs else { return; };
+}
+```
+
+Note that this feature is not required if the patterns are wrapped between parenthesis.
+
+```rust
+fn main() {
+ let xs = [13, 1];
+ let [(a @ 3..), c] = xs else { return; };
+}
```
"##,
},
Lint {
- label: "infer_static_outlives_requirements",
- description: r##"# `infer_static_outlives_requirements`
+ label: "hash_extract_if",
+ description: r##"# `hash_extract_if`
-The tracking issue for this feature is: [#54185]
+The tracking issue for this feature is: [#59618]
-[#54185]: https://github.com/rust-lang/rust/issues/54185
+[#59618]: https://github.com/rust-lang/rust/issues/59618
------------------------
-The `infer_static_outlives_requirements` feature indicates that certain
-`'static` outlives requirements can be inferred by the compiler rather than
-stating them explicitly.
+"##,
+ },
+ Lint {
+ label: "hash_raw_entry",
+ description: r##"# `hash_raw_entry`
-Note: It is an accompanying feature to `infer_outlives_requirements`,
-which must be enabled to infer outlives requirements.
+The tracking issue for this feature is: [#56167]
-For example, currently generic struct definitions that contain
-references, require where-clauses of the form T: 'static. By using
-this feature the outlives predicates will be inferred, although
-they may still be written explicitly.
+[#56167]: https://github.com/rust-lang/rust/issues/56167
-```rust,ignore (pseudo-Rust)
-struct Foo<U> where U: 'static { // <-- currently required
- bar: Bar<U>
-}
-struct Bar<T: 'static> {
- x: T,
-}
-```
+------------------------
+"##,
+ },
+ Lint {
+ label: "hash_set_entry",
+ description: r##"# `hash_set_entry`
+The tracking issue for this feature is: [#60896]
-## Examples:
+[#60896]: https://github.com/rust-lang/rust/issues/60896
-```rust,ignore (pseudo-Rust)
-#![feature(infer_outlives_requirements)]
-#![feature(infer_static_outlives_requirements)]
+------------------------
+"##,
+ },
+ Lint {
+ label: "hasher_prefixfree_extras",
+ description: r##"# `hasher_prefixfree_extras`
-#[rustc_outlives]
-// Implicitly infer U: 'static
-struct Foo<U> {
- bar: Bar<U>
-}
-struct Bar<T: 'static> {
- x: T,
-}
-```
+The tracking issue for this feature is: [#96762]
+
+[#96762]: https://github.com/rust-lang/rust/issues/96762
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "hashmap_internals",
+ description: r##"# `hashmap_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "hexagon_target_feature",
+ description: r##"# `hexagon_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "hint_must_use",
+ description: r##"# `hint_must_use`
+
+The tracking issue for this feature is: [#94745]
+
+[#94745]: https://github.com/rust-lang/rust/issues/94745
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "if_let_guard",
+ description: r##"# `if_let_guard`
+
+The tracking issue for this feature is: [#51114]
+
+[#51114]: https://github.com/rust-lang/rust/issues/51114
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "impl_trait_in_assoc_type",
+ description: r##"# `impl_trait_in_assoc_type`
+
+The tracking issue for this feature is: [#63063]
+
+[#63063]: https://github.com/rust-lang/rust/issues/63063
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "impl_trait_in_fn_trait_return",
+ description: r##"# `impl_trait_in_fn_trait_return`
+
+The tracking issue for this feature is: [#99697]
+
+[#99697]: https://github.com/rust-lang/rust/issues/99697
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "imported_main",
+ description: r##"# `imported_main`
+
+The tracking issue for this feature is: [#28937]
+
+[#28937]: https://github.com/rust-lang/rust/issues/28937
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "inherent_associated_types",
+ description: r##"# `inherent_associated_types`
+
+The tracking issue for this feature is: [#8995]
+
+[#8995]: https://github.com/rust-lang/rust/issues/8995
+
+------------------------
"##,
},
Lint {
@@ -2661,10 +5490,41 @@ match some_int {
"##,
},
Lint {
- label: "int_error_internals",
- description: r##"# `int_error_internals`
+ label: "inplace_iteration",
+ description: r##"# `inplace_iteration`
-This feature is internal to the Rust compiler and is not intended for general use.
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "int_roundings",
+ description: r##"# `int_roundings`
+
+The tracking issue for this feature is: [#88581]
+
+[#88581]: https://github.com/rust-lang/rust/issues/88581
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "integer_atomics",
+ description: r##"# `integer_atomics`
+
+The tracking issue for this feature is: [#99069]
+
+[#99069]: https://github.com/rust-lang/rust/issues/99069
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "internal_impls_macro",
+ description: r##"# `internal_impls_macro`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
------------------------
"##,
@@ -2718,12 +5578,13 @@ via a declaration like
```rust
#![feature(intrinsics)]
+#![allow(internal_features)]
# fn main() {}
extern "rust-intrinsic" {
fn transmute<T, U>(x: T) -> U;
- fn offset<T>(dst: *const T, offset: isize) -> *const T;
+ fn arith_offset<T>(dst: *const T, offset: isize) -> *const T;
}
```
@@ -2731,6 +5592,92 @@ As with any other FFI functions, these are always `unsafe` to call.
"##,
},
Lint {
+ label: "io_error_downcast",
+ description: r##"# `io_error_downcast`
+
+The tracking issue for this feature is: [#99262]
+
+[#99262]: https://github.com/rust-lang/rust/issues/99262
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "io_error_more",
+ description: r##"# `io_error_more`
+
+The tracking issue for this feature is: [#86442]
+
+[#86442]: https://github.com/rust-lang/rust/issues/86442
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "io_error_uncategorized",
+ description: r##"# `io_error_uncategorized`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "io_slice_advance",
+ description: r##"# `io_slice_advance`
+
+The tracking issue for this feature is: [#62726]
+
+[#62726]: https://github.com/rust-lang/rust/issues/62726
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ip",
+ description: r##"# `ip`
+
+The tracking issue for this feature is: [#27709]
+
+[#27709]: https://github.com/rust-lang/rust/issues/27709
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ip_bits",
+ description: r##"# `ip_bits`
+
+The tracking issue for this feature is: [#113744]
+
+[#113744]: https://github.com/rust-lang/rust/issues/113744
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ip_in_core",
+ description: r##"# `ip_in_core`
+
+The tracking issue for this feature is: [#108443]
+
+[#108443]: https://github.com/rust-lang/rust/issues/108443
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "is_ascii_octdigit",
+ description: r##"# `is_ascii_octdigit`
+
+The tracking issue for this feature is: [#101288]
+
+[#101288]: https://github.com/rust-lang/rust/issues/101288
+
+------------------------
+"##,
+ },
+ Lint {
label: "is_sorted",
description: r##"# `is_sorted`
@@ -2746,6 +5693,160 @@ add the methods `is_sorted`, `is_sorted_by` and `is_sorted_by_key` to
"##,
},
Lint {
+ label: "isqrt",
+ description: r##"# `isqrt`
+
+The tracking issue for this feature is: [#116226]
+
+[#116226]: https://github.com/rust-lang/rust/issues/116226
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iter_advance_by",
+ description: r##"# `iter_advance_by`
+
+The tracking issue for this feature is: [#77404]
+
+[#77404]: https://github.com/rust-lang/rust/issues/77404
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iter_array_chunks",
+ description: r##"# `iter_array_chunks`
+
+The tracking issue for this feature is: [#100450]
+
+[#100450]: https://github.com/rust-lang/rust/issues/100450
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iter_collect_into",
+ description: r##"# `iter_collect_into`
+
+The tracking issue for this feature is: [#94780]
+
+[#94780]: https://github.com/rust-lang/rust/issues/94780
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iter_from_generator",
+ description: r##"# `iter_from_generator`
+
+The tracking issue for this feature is: [#43122]
+
+[#43122]: https://github.com/rust-lang/rust/issues/43122
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iter_intersperse",
+ description: r##"# `iter_intersperse`
+
+The tracking issue for this feature is: [#79524]
+
+[#79524]: https://github.com/rust-lang/rust/issues/79524
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iter_is_partitioned",
+ description: r##"# `iter_is_partitioned`
+
+The tracking issue for this feature is: [#62544]
+
+[#62544]: https://github.com/rust-lang/rust/issues/62544
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iter_map_windows",
+ description: r##"# `iter_map_windows`
+
+The tracking issue for this feature is: [#87155]
+
+[#87155]: https://github.com/rust-lang/rust/issues/87155
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iter_next_chunk",
+ description: r##"# `iter_next_chunk`
+
+The tracking issue for this feature is: [#98326]
+
+[#98326]: https://github.com/rust-lang/rust/issues/98326
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iter_order_by",
+ description: r##"# `iter_order_by`
+
+The tracking issue for this feature is: [#64295]
+
+[#64295]: https://github.com/rust-lang/rust/issues/64295
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iter_partition_in_place",
+ description: r##"# `iter_partition_in_place`
+
+The tracking issue for this feature is: [#62543]
+
+[#62543]: https://github.com/rust-lang/rust/issues/62543
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iter_repeat_n",
+ description: r##"# `iter_repeat_n`
+
+The tracking issue for this feature is: [#104434]
+
+[#104434]: https://github.com/rust-lang/rust/issues/104434
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iterator_try_collect",
+ description: r##"# `iterator_try_collect`
+
+The tracking issue for this feature is: [#94047]
+
+[#94047]: https://github.com/rust-lang/rust/issues/94047
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iterator_try_reduce",
+ description: r##"# `iterator_try_reduce`
+
+The tracking issue for this feature is: [#87053]
+
+[#87053]: https://github.com/rust-lang/rust/issues/87053
+
+------------------------
+"##,
+ },
+ Lint {
label: "lang_items",
description: r##"# `lang_items`
@@ -2758,304 +5859,190 @@ functionality that isn't hard-coded into the language, but is
implemented in libraries, with a special marker to tell the compiler
it exists. The marker is the attribute `#[lang = "..."]` and there are
various different values of `...`, i.e. various different 'lang
-items'.
+items'. Most of them can only be defined once.
-For example, `Box` pointers require two lang items, one for allocation
-and one for deallocation. A freestanding program that uses the `Box`
-sugar for dynamic allocations via `malloc` and `free`:
+Lang items are loaded lazily by the compiler; e.g. if one never uses `Box`
+then there is no need to define a function for `exchange_malloc`.
+`rustc` will emit an error when an item is needed but not found in the current
+crate or any that it depends on.
+
+Some features provided by lang items:
+
+- overloadable operators via traits: the traits corresponding to the
+ `==`, `<`, dereferencing (`*`) and `+` (etc.) operators are all
+ marked with lang items; those specific four are `eq`, `partial_ord`,
+ `deref`/`deref_mut`, and `add` respectively.
+- panicking: the `panic` and `panic_impl` lang items, among others.
+- stack unwinding: the lang item `eh_personality` is a function used by the
+ failure mechanisms of the compiler. This is often mapped to GCC's personality
+ function (see the [`std` implementation][personality] for more information),
+ but programs which don't trigger a panic can be assured that this function is
+ never called. Additionally, a `eh_catch_typeinfo` static is needed for certain
+ targets which implement Rust panics on top of C++ exceptions.
+- the traits in `core::marker` used to indicate types of
+ various kinds; e.g. lang items `sized`, `sync` and `copy`.
+- memory allocation, see below.
+
+Most lang items are defined by `core`, but if you're trying to build
+an executable without the `std` crate, you might run into the need
+for lang item definitions.
+
+[personality]: https://github.com/rust-lang/rust/blob/master/library/std/src/sys/personality/gcc.rs
+
+## Example: Implementing a `Box`
+
+`Box` pointers require two lang items: one for the type itself and one for
+allocation. A freestanding program that uses the `Box` sugar for dynamic
+allocations via `malloc` and `free`:
```rust,ignore (libc-is-finicky)
-#![feature(lang_items, box_syntax, start, libc, core_intrinsics, rustc_private)]
+#![feature(lang_items, start, core_intrinsics, rustc_private, panic_unwind, rustc_attrs)]
+#![allow(internal_features)]
#![no_std]
+
+extern crate libc;
+extern crate unwind;
+
+use core::ffi::c_void;
use core::intrinsics;
use core::panic::PanicInfo;
+use core::ptr::NonNull;
-extern crate libc;
+pub struct Global; // the global allocator
+struct Unique<T>(NonNull<T>);
#[lang = "owned_box"]
-pub struct Box<T>(*mut T);
+pub struct Box<T, A = Global>(Unique<T>, A);
+
+impl<T> Box<T> {
+ pub fn new(x: T) -> Self {
+ #[rustc_box]
+ Box::new(x)
+ }
+}
+
+impl<T, A> Drop for Box<T, A> {
+ fn drop(&mut self) {
+ unsafe {
+ libc::free(self.0.0.as_ptr() as *mut c_void);
+ }
+ }
+}
#[lang = "exchange_malloc"]
unsafe fn allocate(size: usize, _align: usize) -> *mut u8 {
- let p = libc::malloc(size as libc::size_t) as *mut u8;
+ let p = libc::malloc(size) as *mut u8;
// Check if `malloc` failed:
- if p as usize == 0 {
+ if p.is_null() {
intrinsics::abort();
}
p
}
-#[lang = "box_free"]
-unsafe fn box_free<T: ?Sized>(ptr: *mut T) {
- libc::free(ptr as *mut libc::c_void)
-}
-
#[start]
fn main(_argc: isize, _argv: *const *const u8) -> isize {
- let _x = box 1;
+ let _x = Box::new(1);
0
}
-#[lang = "eh_personality"] extern fn rust_eh_personality() {}
-#[lang = "panic_impl"] extern fn rust_begin_panic(info: &PanicInfo) -> ! { unsafe { intrinsics::abort() } }
-#[no_mangle] pub extern fn rust_eh_register_frames () {}
-#[no_mangle] pub extern fn rust_eh_unregister_frames () {}
+#[lang = "eh_personality"]
+fn rust_eh_personality() {}
+
+#[panic_handler]
+fn panic_handler(_info: &PanicInfo) -> ! { intrinsics::abort() }
```
Note the use of `abort`: the `exchange_malloc` lang item is assumed to
return a valid pointer, and so needs to do the check internally.
-Other features provided by lang items include:
-
-- overloadable operators via traits: the traits corresponding to the
- `==`, `<`, dereferencing (`*`) and `+` (etc.) operators are all
- marked with lang items; those specific four are `eq`, `ord`,
- `deref`, and `add` respectively.
-- stack unwinding and general failure; the `eh_personality`,
- `panic` and `panic_bounds_check` lang items.
-- the traits in `std::marker` used to indicate types of
- various kinds; lang items `send`, `sync` and `copy`.
-- the marker types and variance indicators found in
- `std::marker`; lang items `covariant_type`,
- `contravariant_lifetime`, etc.
-
-Lang items are loaded lazily by the compiler; e.g. if one never uses
-`Box` then there is no need to define functions for `exchange_malloc`
-and `box_free`. `rustc` will emit an error when an item is needed
-but not found in the current crate or any that it depends on.
-
-Most lang items are defined by `libcore`, but if you're trying to build
-an executable without the standard library, you'll run into the need
-for lang items. The rest of this page focuses on this use-case, even though
-lang items are a bit broader than that.
-
-### Using libc
-
-In order to build a `#[no_std]` executable we will need libc as a dependency.
-We can specify this using our `Cargo.toml` file:
-
-```toml
-[dependencies]
-libc = { version = "0.2.14", default-features = false }
-```
-
-Note that the default features have been disabled. This is a critical step -
-**the default features of libc include the standard library and so must be
-disabled.**
-
-### Writing an executable without stdlib
-
-Controlling the entry point is possible in two ways: the `#[start]` attribute,
-or overriding the default shim for the C `main` function with your own.
-
-The function marked `#[start]` is passed the command line parameters
-in the same format as C:
+## List of all language items
-```rust,ignore (libc-is-finicky)
-#![feature(lang_items, core_intrinsics, rustc_private)]
-#![feature(start)]
-#![no_std]
-use core::intrinsics;
-use core::panic::PanicInfo;
+An up-to-date list of all language items can be found [here] in the compiler code.
-// Pull in the system libc library for what crt0.o likely requires.
-extern crate libc;
+[here]: https://github.com/rust-lang/rust/blob/master/compiler/rustc_hir/src/lang_items.rs
+"##,
+ },
+ Lint {
+ label: "large_assignments",
+ description: r##"# `large_assignments`
-// Entry point for this program.
-#[start]
-fn start(_argc: isize, _argv: *const *const u8) -> isize {
- 0
-}
+The tracking issue for this feature is: [#83518]
-// These functions are used by the compiler, but not
-// for a bare-bones hello world. These are normally
-// provided by libstd.
-#[lang = "eh_personality"]
-#[no_mangle]
-pub extern fn rust_eh_personality() {
-}
+[#83518]: https://github.com/rust-lang/rust/issues/83518
-#[lang = "panic_impl"]
-#[no_mangle]
-pub extern fn rust_begin_panic(info: &PanicInfo) -> ! {
- unsafe { intrinsics::abort() }
-}
-```
+------------------------
+"##,
+ },
+ Lint {
+ label: "layout_for_ptr",
+ description: r##"# `layout_for_ptr`
-To override the compiler-inserted `main` shim, one has to disable it
-with `#![no_main]` and then create the appropriate symbol with the
-correct ABI and the correct name, which requires overriding the
-compiler's name mangling too:
+The tracking issue for this feature is: [#69835]
-```rust,ignore (libc-is-finicky)
-#![feature(lang_items, core_intrinsics, rustc_private)]
-#![feature(start)]
-#![no_std]
-#![no_main]
-use core::intrinsics;
-use core::panic::PanicInfo;
+[#69835]: https://github.com/rust-lang/rust/issues/69835
-// Pull in the system libc library for what crt0.o likely requires.
-extern crate libc;
+------------------------
+"##,
+ },
+ Lint {
+ label: "lazy_cell",
+ description: r##"# `lazy_cell`
-// Entry point for this program.
-#[no_mangle] // ensure that this symbol is called `main` in the output
-pub extern fn main(_argc: i32, _argv: *const *const u8) -> i32 {
- 0
-}
+The tracking issue for this feature is: [#109736]
-// These functions are used by the compiler, but not
-// for a bare-bones hello world. These are normally
-// provided by libstd.
-#[lang = "eh_personality"]
-#[no_mangle]
-pub extern fn rust_eh_personality() {
-}
+[#109736]: https://github.com/rust-lang/rust/issues/109736
-#[lang = "panic_impl"]
-#[no_mangle]
-pub extern fn rust_begin_panic(info: &PanicInfo) -> ! {
- unsafe { intrinsics::abort() }
-}
-```
+------------------------
+"##,
+ },
+ Lint {
+ label: "lazy_cell_consume",
+ description: r##"# `lazy_cell_consume`
-In many cases, you may need to manually link to the `compiler_builtins` crate
-when building a `no_std` binary. You may observe this via linker error messages
-such as "```undefined reference to `__rust_probestack'```".
+The tracking issue for this feature is: [#109736]
-## More about the language items
+[#109736]: https://github.com/rust-lang/rust/issues/109736
-The compiler currently makes a few assumptions about symbols which are
-available in the executable to call. Normally these functions are provided by
-the standard library, but without it you must define your own. These symbols
-are called "language items", and they each have an internal name, and then a
-signature that an implementation must conform to.
+------------------------
+"##,
+ },
+ Lint {
+ label: "lazy_type_alias",
+ description: r##"# `lazy_type_alias`
-The first of these functions, `rust_eh_personality`, is used by the failure
-mechanisms of the compiler. This is often mapped to GCC's personality function
-(see the [libstd implementation][unwind] for more information), but crates
-which do not trigger a panic can be assured that this function is never
-called. The language item's name is `eh_personality`.
+The tracking issue for this feature is: [#112792]
-[unwind]: https://github.com/rust-lang/rust/blob/master/library/panic_unwind/src/gcc.rs
+[#112792]: https://github.com/rust-lang/rust/issues/112792
-The second function, `rust_begin_panic`, is also used by the failure mechanisms of the
-compiler. When a panic happens, this controls the message that's displayed on
-the screen. While the language item's name is `panic_impl`, the symbol name is
-`rust_begin_panic`.
+------------------------
+"##,
+ },
+ Lint {
+ label: "let_chains",
+ description: r##"# `let_chains`
-Finally, a `eh_catch_typeinfo` static is needed for certain targets which
-implement Rust panics on top of C++ exceptions.
+The tracking issue for this feature is: [#53667]
-## List of all language items
+[#53667]: https://github.com/rust-lang/rust/issues/53667
-This is a list of all language items in Rust along with where they are located in
-the source code.
-
-- Primitives
- - `i8`: `libcore/num/mod.rs`
- - `i16`: `libcore/num/mod.rs`
- - `i32`: `libcore/num/mod.rs`
- - `i64`: `libcore/num/mod.rs`
- - `i128`: `libcore/num/mod.rs`
- - `isize`: `libcore/num/mod.rs`
- - `u8`: `libcore/num/mod.rs`
- - `u16`: `libcore/num/mod.rs`
- - `u32`: `libcore/num/mod.rs`
- - `u64`: `libcore/num/mod.rs`
- - `u128`: `libcore/num/mod.rs`
- - `usize`: `libcore/num/mod.rs`
- - `f32`: `libstd/f32.rs`
- - `f64`: `libstd/f64.rs`
- - `char`: `libcore/char.rs`
- - `slice`: `liballoc/slice.rs`
- - `str`: `liballoc/str.rs`
- - `const_ptr`: `libcore/ptr.rs`
- - `mut_ptr`: `libcore/ptr.rs`
- - `unsafe_cell`: `libcore/cell.rs`
-- Runtime
- - `start`: `libstd/rt.rs`
- - `eh_personality`: `libpanic_unwind/emcc.rs` (EMCC)
- - `eh_personality`: `libpanic_unwind/gcc.rs` (GNU)
- - `eh_personality`: `libpanic_unwind/seh.rs` (SEH)
- - `eh_catch_typeinfo`: `libpanic_unwind/emcc.rs` (EMCC)
- - `panic`: `libcore/panicking.rs`
- - `panic_bounds_check`: `libcore/panicking.rs`
- - `panic_impl`: `libcore/panicking.rs`
- - `panic_impl`: `libstd/panicking.rs`
-- Allocations
- - `owned_box`: `liballoc/boxed.rs`
- - `exchange_malloc`: `liballoc/heap.rs`
- - `box_free`: `liballoc/heap.rs`
-- Operands
- - `not`: `libcore/ops/bit.rs`
- - `bitand`: `libcore/ops/bit.rs`
- - `bitor`: `libcore/ops/bit.rs`
- - `bitxor`: `libcore/ops/bit.rs`
- - `shl`: `libcore/ops/bit.rs`
- - `shr`: `libcore/ops/bit.rs`
- - `bitand_assign`: `libcore/ops/bit.rs`
- - `bitor_assign`: `libcore/ops/bit.rs`
- - `bitxor_assign`: `libcore/ops/bit.rs`
- - `shl_assign`: `libcore/ops/bit.rs`
- - `shr_assign`: `libcore/ops/bit.rs`
- - `deref`: `libcore/ops/deref.rs`
- - `deref_mut`: `libcore/ops/deref.rs`
- - `index`: `libcore/ops/index.rs`
- - `index_mut`: `libcore/ops/index.rs`
- - `add`: `libcore/ops/arith.rs`
- - `sub`: `libcore/ops/arith.rs`
- - `mul`: `libcore/ops/arith.rs`
- - `div`: `libcore/ops/arith.rs`
- - `rem`: `libcore/ops/arith.rs`
- - `neg`: `libcore/ops/arith.rs`
- - `add_assign`: `libcore/ops/arith.rs`
- - `sub_assign`: `libcore/ops/arith.rs`
- - `mul_assign`: `libcore/ops/arith.rs`
- - `div_assign`: `libcore/ops/arith.rs`
- - `rem_assign`: `libcore/ops/arith.rs`
- - `eq`: `libcore/cmp.rs`
- - `ord`: `libcore/cmp.rs`
-- Functions
- - `fn`: `libcore/ops/function.rs`
- - `fn_mut`: `libcore/ops/function.rs`
- - `fn_once`: `libcore/ops/function.rs`
- - `generator_state`: `libcore/ops/generator.rs`
- - `generator`: `libcore/ops/generator.rs`
-- Other
- - `coerce_unsized`: `libcore/ops/unsize.rs`
- - `drop`: `libcore/ops/drop.rs`
- - `drop_in_place`: `libcore/ptr.rs`
- - `clone`: `libcore/clone.rs`
- - `copy`: `libcore/marker.rs`
- - `send`: `libcore/marker.rs`
- - `sized`: `libcore/marker.rs`
- - `unsize`: `libcore/marker.rs`
- - `sync`: `libcore/marker.rs`
- - `phantom_data`: `libcore/marker.rs`
- - `discriminant_kind`: `libcore/marker.rs`
- - `freeze`: `libcore/marker.rs`
- - `debug_trait`: `libcore/fmt/mod.rs`
- - `non_zero`: `libcore/nonzero.rs`
- - `arc`: `liballoc/sync.rs`
- - `rc`: `liballoc/rc.rs`
+------------------------
"##,
},
Lint {
- label: "libstd_sys_internals",
- description: r##"# `libstd_sys_internals`
+ label: "liballoc_internals",
+ description: r##"# `liballoc_internals`
-This feature is internal to the Rust compiler and is not intended for general use.
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
------------------------
"##,
},
Lint {
- label: "libstd_thread_internals",
- description: r##"# `libstd_thread_internals`
+ label: "libstd_sys_internals",
+ description: r##"# `libstd_sys_internals`
This feature is internal to the Rust compiler and is not intended for general use.
@@ -3072,197 +6059,124 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
- label: "llvm_asm",
- description: r##"# `llvm_asm`
+ label: "link_llvm_intrinsics",
+ description: r##"# `link_llvm_intrinsics`
-The tracking issue for this feature is: [#70173]
+The tracking issue for this feature is: [#29602]
-[#70173]: https://github.com/rust-lang/rust/issues/70173
+[#29602]: https://github.com/rust-lang/rust/issues/29602
------------------------
+"##,
+ },
+ Lint {
+ label: "linkage",
+ description: r##"# `linkage`
-For extremely low-level manipulations and performance reasons, one
-might wish to control the CPU directly. Rust supports using inline
-assembly to do this via the `llvm_asm!` macro.
-
-```rust,ignore (pseudo-code)
-llvm_asm!(assembly template
- : output operands
- : input operands
- : clobbers
- : options
- );
-```
-
-Any use of `llvm_asm` is feature gated (requires `#![feature(llvm_asm)]` on the
-crate to allow) and of course requires an `unsafe` block.
-
-> **Note**: the examples here are given in x86/x86-64 assembly, but
-> all platforms are supported.
-
-## Assembly template
+The tracking issue for this feature is: [#29603]
-The `assembly template` is the only required parameter and must be a
-literal string (i.e. `""`)
+[#29603]: https://github.com/rust-lang/rust/issues/29603
-```rust
-#![feature(llvm_asm)]
+------------------------
+"##,
+ },
+ Lint {
+ label: "linked_list_cursors",
+ description: r##"# `linked_list_cursors`
-#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
-fn foo() {
- unsafe {
- llvm_asm!("NOP");
- }
-}
+The tracking issue for this feature is: [#58533]
-// Other platforms:
-#[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
-fn foo() { /* ... */ }
+[#58533]: https://github.com/rust-lang/rust/issues/58533
-fn main() {
- // ...
- foo();
- // ...
-}
-```
+------------------------
+"##,
+ },
+ Lint {
+ label: "linked_list_remove",
+ description: r##"# `linked_list_remove`
-(The `feature(llvm_asm)` and `#[cfg]`s are omitted from now on.)
+The tracking issue for this feature is: [#69210]
-Output operands, input operands, clobbers and options are all optional
-but you must add the right number of `:` if you skip them:
+[#69210]: https://github.com/rust-lang/rust/issues/69210
-```rust
-# #![feature(llvm_asm)]
-# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
-# fn main() { unsafe {
-llvm_asm!("xor %eax, %eax"
- :
- :
- : "eax"
- );
-# } }
-# #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
-# fn main() {}
-```
+------------------------
+"##,
+ },
+ Lint {
+ label: "lint_reasons",
+ description: r##"# `lint_reasons`
-Whitespace also doesn't matter:
+The tracking issue for this feature is: [#54503]
-```rust
-# #![feature(llvm_asm)]
-# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
-# fn main() { unsafe {
-llvm_asm!("xor %eax, %eax" ::: "eax");
-# } }
-# #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
-# fn main() {}
-```
+[#54503]: https://github.com/rust-lang/rust/issues/54503
-## Operands
+------------------------
+"##,
+ },
+ Lint {
+ label: "linux_pidfd",
+ description: r##"# `linux_pidfd`
-Input and output operands follow the same format: `:
-"constraints1"(expr1), "constraints2"(expr2), ..."`. Output operand
-expressions must be mutable place, or not yet assigned:
+The tracking issue for this feature is: [#82971]
-```rust
-# #![feature(llvm_asm)]
-# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
-fn add(a: i32, b: i32) -> i32 {
- let c: i32;
- unsafe {
- llvm_asm!("add $2, $0"
- : "=r"(c)
- : "0"(a), "r"(b)
- );
- }
- c
-}
-# #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
-# fn add(a: i32, b: i32) -> i32 { a + b }
+[#82971]: https://github.com/rust-lang/rust/issues/82971
-fn main() {
- assert_eq!(add(3, 14159), 14162)
-}
-```
+------------------------
+"##,
+ },
+ Lint {
+ label: "log_syntax",
+ description: r##"# `log_syntax`
-If you would like to use real operands in this position, however,
-you are required to put curly braces `{}` around the register that
-you want, and you are required to put the specific size of the
-operand. This is useful for very low level programming, where
-which register you use is important:
+The tracking issue for this feature is: [#29598]
-```rust
-# #![feature(llvm_asm)]
-# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
-# unsafe fn read_byte_in(port: u16) -> u8 {
-let result: u8;
-llvm_asm!("in %dx, %al" : "={al}"(result) : "{dx}"(port));
-result
-# }
-```
+[#29598]: https://github.com/rust-lang/rust/issues/29598
-## Clobbers
+------------------------
+"##,
+ },
+ Lint {
+ label: "macro_metavar_expr",
+ description: r##"# `macro_metavar_expr`
-Some instructions modify registers which might otherwise have held
-different values so we use the clobbers list to indicate to the
-compiler not to assume any values loaded into those registers will
-stay valid.
+The tracking issue for this feature is: [#83527]
-```rust
-# #![feature(llvm_asm)]
-# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
-# fn main() { unsafe {
-// Put the value 0x200 in eax:
-llvm_asm!("mov $$0x200, %eax" : /* no outputs */ : /* no inputs */ : "eax");
-# } }
-# #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
-# fn main() {}
-```
+[#83527]: https://github.com/rust-lang/rust/issues/83527
-Input and output registers need not be listed since that information
-is already communicated by the given constraints. Otherwise, any other
-registers used either implicitly or explicitly should be listed.
+------------------------
+"##,
+ },
+ Lint {
+ label: "map_entry_replace",
+ description: r##"# `map_entry_replace`
-If the assembly changes the condition code register `cc` should be
-specified as one of the clobbers. Similarly, if the assembly modifies
-memory, `memory` should also be specified.
+The tracking issue for this feature is: [#44286]
-## Options
+[#44286]: https://github.com/rust-lang/rust/issues/44286
-The last section, `options` is specific to Rust. The format is comma
-separated literal strings (i.e. `:"foo", "bar", "baz"`). It's used to
-specify some extra info about the inline assembly:
+------------------------
+"##,
+ },
+ Lint {
+ label: "map_many_mut",
+ description: r##"# `map_many_mut`
-Current valid options are:
+The tracking issue for this feature is: [#97601]
-1. `volatile` - specifying this is analogous to
- `__asm__ __volatile__ (...)` in gcc/clang.
-2. `alignstack` - certain instructions expect the stack to be
- aligned a certain way (i.e. SSE) and specifying this indicates to
- the compiler to insert its usual stack alignment code
-3. `intel` - use intel syntax instead of the default AT&T.
+[#97601]: https://github.com/rust-lang/rust/issues/97601
-```rust
-# #![feature(llvm_asm)]
-# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
-# fn main() {
-let result: i32;
-unsafe {
- llvm_asm!("mov eax, 2" : "={eax}"(result) : : : "intel")
-}
-println!("eax is currently {}", result);
-# }
-# #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
-# fn main() {}
-```
+------------------------
+"##,
+ },
+ Lint {
+ label: "map_try_insert",
+ description: r##"# `map_try_insert`
-## More Information
+The tracking issue for this feature is: [#82766]
-The current implementation of the `llvm_asm!` macro is a direct binding to [LLVM's
-inline assembler expressions][llvm-docs], so be sure to check out [their
-documentation as well][llvm-docs] for more information about clobbers,
-constraints, etc.
+[#82766]: https://github.com/rust-lang/rust/issues/82766
-[llvm-docs]: http://llvm.org/docs/LangRef.html#inline-assembler-expressions
+------------------------
"##,
},
Lint {
@@ -3305,6 +6219,116 @@ feature, which applied to all empty traits (without needing an opt-in).
"##,
},
Lint {
+ label: "maybe_uninit_array_assume_init",
+ description: r##"# `maybe_uninit_array_assume_init`
+
+The tracking issue for this feature is: [#96097]
+
+[#96097]: https://github.com/rust-lang/rust/issues/96097
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "maybe_uninit_as_bytes",
+ description: r##"# `maybe_uninit_as_bytes`
+
+The tracking issue for this feature is: [#93092]
+
+[#93092]: https://github.com/rust-lang/rust/issues/93092
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "maybe_uninit_slice",
+ description: r##"# `maybe_uninit_slice`
+
+The tracking issue for this feature is: [#63569]
+
+[#63569]: https://github.com/rust-lang/rust/issues/63569
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "maybe_uninit_uninit_array",
+ description: r##"# `maybe_uninit_uninit_array`
+
+The tracking issue for this feature is: [#96097]
+
+[#96097]: https://github.com/rust-lang/rust/issues/96097
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "maybe_uninit_uninit_array_transpose",
+ description: r##"# `maybe_uninit_uninit_array_transpose`
+
+The tracking issue for this feature is: [#96097]
+
+[#96097]: https://github.com/rust-lang/rust/issues/96097
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "maybe_uninit_write_slice",
+ description: r##"# `maybe_uninit_write_slice`
+
+The tracking issue for this feature is: [#79995]
+
+[#79995]: https://github.com/rust-lang/rust/issues/79995
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "mem_copy_fn",
+ description: r##"# `mem_copy_fn`
+
+The tracking issue for this feature is: [#98262]
+
+[#98262]: https://github.com/rust-lang/rust/issues/98262
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "min_specialization",
+ description: r##"# `min_specialization`
+
+The tracking issue for this feature is: [#31844]
+
+[#31844]: https://github.com/rust-lang/rust/issues/31844
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "mips_target_feature",
+ description: r##"# `mips_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "more_float_constants",
+ description: r##"# `more_float_constants`
+
+The tracking issue for this feature is: [#103883]
+
+[#103883]: https://github.com/rust-lang/rust/issues/103883
+
+------------------------
+"##,
+ },
+ Lint {
label: "more_qualified_paths",
description: r##"# `more_qualified_paths`
@@ -3338,68 +6362,61 @@ impl A for Foo {
"##,
},
Lint {
- label: "native_link_modifiers",
- description: r##"# `native_link_modifiers`
+ label: "multiple_supertrait_upcastable",
+ description: r##"# `multiple_supertrait_upcastable`
-The tracking issue for this feature is: [#81490]
-
-[#81490]: https://github.com/rust-lang/rust/issues/81490
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
------------------------
-
-The `native_link_modifiers` feature allows you to use the `modifiers` syntax with the `#[link(..)]` attribute.
-
-Modifiers are specified as a comma-delimited string with each modifier prefixed with either a `+` or `-` to indicate that the modifier is enabled or disabled, respectively. The last boolean value specified for a given modifier wins.
"##,
},
Lint {
- label: "native_link_modifiers_as_needed",
- description: r##"# `native_link_modifiers_as_needed`
+ label: "must_not_suspend",
+ description: r##"# `must_not_suspend`
-The tracking issue for this feature is: [#81490]
+The tracking issue for this feature is: [#83310]
-[#81490]: https://github.com/rust-lang/rust/issues/81490
+[#83310]: https://github.com/rust-lang/rust/issues/83310
------------------------
-
-The `native_link_modifiers_as_needed` feature allows you to use the `as-needed` modifier.
-
-`as-needed` is only compatible with the `dynamic` and `framework` linking kinds. Using any other kind will result in a compiler error.
-
-`+as-needed` means that the library will be actually linked only if it satisfies some undefined symbols at the point at which it is specified on the command line, making it similar to static libraries in this regard.
-
-This modifier translates to `--as-needed` for ld-like linkers, and to `-dead_strip_dylibs` / `-needed_library` / `-needed_framework` for ld64.
-The modifier does nothing for linkers that don't support it (e.g. `link.exe`).
-
-The default for this modifier is unclear, some targets currently specify it as `+as-needed`, some do not. We may want to try making `+as-needed` a default for all targets.
"##,
},
Lint {
- label: "native_link_modifiers_bundle",
- description: r##"# `native_link_modifiers_bundle`
+ label: "mutex_unlock",
+ description: r##"# `mutex_unlock`
-The tracking issue for this feature is: [#81490]
+The tracking issue for this feature is: [#81872]
-[#81490]: https://github.com/rust-lang/rust/issues/81490
+[#81872]: https://github.com/rust-lang/rust/issues/81872
------------------------
+"##,
+ },
+ Lint {
+ label: "mutex_unpoison",
+ description: r##"# `mutex_unpoison`
-The `native_link_modifiers_bundle` feature allows you to use the `bundle` modifier.
+The tracking issue for this feature is: [#96469]
-Only compatible with the `static` linking kind. Using any other kind will result in a compiler error.
+[#96469]: https://github.com/rust-lang/rust/issues/96469
-`+bundle` means objects from the static library are bundled into the produced crate (a rlib, for example) and are used from this crate later during linking of the final binary.
+------------------------
+"##,
+ },
+ Lint {
+ label: "naked_functions",
+ description: r##"# `naked_functions`
-`-bundle` means the static library is included into the produced rlib "by name" and object files from it are included only during linking of the final binary, the file search by that name is also performed during final linking.
+The tracking issue for this feature is: [#32408]
-This modifier is supposed to supersede the `static-nobundle` linking kind defined by [RFC 1717](https://github.com/rust-lang/rfcs/pull/1717).
+[#32408]: https://github.com/rust-lang/rust/issues/32408
-The default for this modifier is currently `+bundle`, but it could be changed later on some future edition boundary.
+------------------------
"##,
},
Lint {
- label: "native_link_modifiers_verbatim",
- description: r##"# `native_link_modifiers_verbatim`
+ label: "native_link_modifiers_as_needed",
+ description: r##"# `native_link_modifiers_as_needed`
The tracking issue for this feature is: [#81490]
@@ -3407,40 +6424,36 @@ The tracking issue for this feature is: [#81490]
------------------------
-The `native_link_modifiers_verbatim` feature allows you to use the `verbatim` modifier.
+The `native_link_modifiers_as_needed` feature allows you to use the `as-needed` modifier.
-`+verbatim` means that rustc itself won't add any target-specified library prefixes or suffixes (like `lib` or `.a`) to the library name, and will try its best to ask for the same thing from the linker.
+`as-needed` is only compatible with the `dynamic` and `framework` linking kinds. Using any other kind will result in a compiler error.
-For `ld`-like linkers rustc will use the `-l:filename` syntax (note the colon) when passing the library, so the linker won't add any prefixes or suffixes as well.
-See [`-l namespec`](https://sourceware.org/binutils/docs/ld/Options.html) in ld documentation for more details.
-For linkers not supporting any verbatim modifiers (e.g. `link.exe` or `ld64`) the library name will be passed as is.
+`+as-needed` means that the library will be actually linked only if it satisfies some undefined symbols at the point at which it is specified on the command line, making it similar to static libraries in this regard.
-The default for this modifier is `-verbatim`.
+This modifier translates to `--as-needed` for ld-like linkers, and to `-dead_strip_dylibs` / `-needed_library` / `-needed_framework` for ld64.
+The modifier does nothing for linkers that don't support it (e.g. `link.exe`).
-This RFC changes the behavior of `raw-dylib` linking kind specified by [RFC 2627](https://github.com/rust-lang/rfcs/pull/2627). The `.dll` suffix (or other target-specified suffixes for other targets) is now added automatically.
-If your DLL doesn't have the `.dll` suffix, it can be specified with `+verbatim`.
+The default for this modifier is unclear, some targets currently specify it as `+as-needed`, some do not. We may want to try making `+as-needed` a default for all targets.
"##,
},
Lint {
- label: "native_link_modifiers_whole_archive",
- description: r##"# `native_link_modifiers_whole_archive`
+ label: "needs_panic_runtime",
+ description: r##"# `needs_panic_runtime`
-The tracking issue for this feature is: [#81490]
+The tracking issue for this feature is: [#32837]
-[#81490]: https://github.com/rust-lang/rust/issues/81490
+[#32837]: https://github.com/rust-lang/rust/issues/32837
------------------------
+"##,
+ },
+ Lint {
+ label: "negative_bounds",
+ description: r##"# `negative_bounds`
-The `native_link_modifiers_whole_archive` feature allows you to use the `whole-archive` modifier.
-
-Only compatible with the `static` linking kind. Using any other kind will result in a compiler error.
-
-`+whole-archive` means that the static library is linked as a whole archive without throwing any object files away.
-
-This modifier translates to `--whole-archive` for `ld`-like linkers, to `/WHOLEARCHIVE` for `link.exe`, and to `-force_load` for `ld64`.
-The modifier does nothing for linkers that don't support it.
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
-The default for this modifier is `-whole-archive`.
+------------------------
"##,
},
Lint {
@@ -3505,37 +6518,47 @@ This serves two purposes:
"##,
},
Lint {
- label: "coverage",
- description: r##"# `coverage`
+ label: "never_type",
+ description: r##"# `never_type`
-The tracking issue for this feature is: [#84605]
+The tracking issue for this feature is: [#35121]
-[#84605]: https://github.com/rust-lang/rust/issues/84605
+[#35121]: https://github.com/rust-lang/rust/issues/35121
----
+------------------------
+"##,
+ },
+ Lint {
+ label: "never_type_fallback",
+ description: r##"# `never_type_fallback`
-The `coverage` attribute can be used to selectively disable coverage
-instrumentation in an annotated function. This might be useful to:
+The tracking issue for this feature is: [#65992]
-- Avoid instrumentation overhead in a performance critical function
-- Avoid generating coverage for a function that is not meant to be executed,
- but still target 100% coverage for the rest of the program.
+[#65992]: https://github.com/rust-lang/rust/issues/65992
-## Example
+------------------------
+"##,
+ },
+ Lint {
+ label: "new_uninit",
+ description: r##"# `new_uninit`
-```rust
-#![feature(coverage)]
+The tracking issue for this feature is: [#63291]
-// `foo()` will get coverage instrumentation (by default)
-fn foo() {
- // ...
-}
+[#63291]: https://github.com/rust-lang/rust/issues/63291
-#[coverage(off)]
-fn bar() {
- // ...
-}
-```
+------------------------
+"##,
+ },
+ Lint {
+ label: "no_core",
+ description: r##"# `no_core`
+
+The tracking issue for this feature is: [#29639]
+
+[#29639]: https://github.com/rust-lang/rust/issues/29639
+
+------------------------
"##,
},
Lint {
@@ -3572,6 +6595,319 @@ fn foo() {
"##,
},
Lint {
+ label: "non_exhaustive_omitted_patterns_lint",
+ description: r##"# `non_exhaustive_omitted_patterns_lint`
+
+The tracking issue for this feature is: [#89554]
+
+[#89554]: https://github.com/rust-lang/rust/issues/89554
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "non_lifetime_binders",
+ description: r##"# `non_lifetime_binders`
+
+The tracking issue for this feature is: [#108185]
+
+[#108185]: https://github.com/rust-lang/rust/issues/108185
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "nonzero_ops",
+ description: r##"# `nonzero_ops`
+
+The tracking issue for this feature is: [#84186]
+
+[#84186]: https://github.com/rust-lang/rust/issues/84186
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "noop_waker",
+ description: r##"# `noop_waker`
+
+The tracking issue for this feature is: [#98286]
+
+[#98286]: https://github.com/rust-lang/rust/issues/98286
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "num_midpoint",
+ description: r##"# `num_midpoint`
+
+The tracking issue for this feature is: [#110840]
+
+[#110840]: https://github.com/rust-lang/rust/issues/110840
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "numfmt",
+ description: r##"# `numfmt`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "object_safe_for_dispatch",
+ description: r##"# `object_safe_for_dispatch`
+
+The tracking issue for this feature is: [#43561]
+
+[#43561]: https://github.com/rust-lang/rust/issues/43561
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "offset_of",
+ description: r##"# `offset_of`
+
+The tracking issue for this feature is: [#106655]
+
+[#106655]: https://github.com/rust-lang/rust/issues/106655
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "omit_gdb_pretty_printer_section",
+ description: r##"# `omit_gdb_pretty_printer_section`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "once_cell_try",
+ description: r##"# `once_cell_try`
+
+The tracking issue for this feature is: [#109737]
+
+[#109737]: https://github.com/rust-lang/rust/issues/109737
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "one_sided_range",
+ description: r##"# `one_sided_range`
+
+The tracking issue for this feature is: [#69780]
+
+[#69780]: https://github.com/rust-lang/rust/issues/69780
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "optimize_attribute",
+ description: r##"# `optimize_attribute`
+
+The tracking issue for this feature is: [#54882]
+
+[#54882]: https://github.com/rust-lang/rust/issues/54882
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "option_get_or_insert_default",
+ description: r##"# `option_get_or_insert_default`
+
+The tracking issue for this feature is: [#82901]
+
+[#82901]: https://github.com/rust-lang/rust/issues/82901
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "option_take_if",
+ description: r##"# `option_take_if`
+
+The tracking issue for this feature is: [#98934]
+
+[#98934]: https://github.com/rust-lang/rust/issues/98934
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "option_zip",
+ description: r##"# `option_zip`
+
+The tracking issue for this feature is: [#70086]
+
+[#70086]: https://github.com/rust-lang/rust/issues/70086
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "panic_abort",
+ description: r##"# `panic_abort`
+
+The tracking issue for this feature is: [#32837]
+
+[#32837]: https://github.com/rust-lang/rust/issues/32837
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "panic_always_abort",
+ description: r##"# `panic_always_abort`
+
+The tracking issue for this feature is: [#84438]
+
+[#84438]: https://github.com/rust-lang/rust/issues/84438
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "panic_backtrace_config",
+ description: r##"# `panic_backtrace_config`
+
+The tracking issue for this feature is: [#93346]
+
+[#93346]: https://github.com/rust-lang/rust/issues/93346
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "panic_can_unwind",
+ description: r##"# `panic_can_unwind`
+
+The tracking issue for this feature is: [#92988]
+
+[#92988]: https://github.com/rust-lang/rust/issues/92988
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "panic_info_message",
+ description: r##"# `panic_info_message`
+
+The tracking issue for this feature is: [#66745]
+
+[#66745]: https://github.com/rust-lang/rust/issues/66745
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "panic_internals",
+ description: r##"# `panic_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "panic_runtime",
+ description: r##"# `panic_runtime`
+
+The tracking issue for this feature is: [#32837]
+
+[#32837]: https://github.com/rust-lang/rust/issues/32837
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "panic_unwind",
+ description: r##"# `panic_unwind`
+
+The tracking issue for this feature is: [#32837]
+
+[#32837]: https://github.com/rust-lang/rust/issues/32837
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "panic_update_hook",
+ description: r##"# `panic_update_hook`
+
+The tracking issue for this feature is: [#92649]
+
+[#92649]: https://github.com/rust-lang/rust/issues/92649
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "path_file_prefix",
+ description: r##"# `path_file_prefix`
+
+The tracking issue for this feature is: [#86319]
+
+[#86319]: https://github.com/rust-lang/rust/issues/86319
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "pattern",
+ description: r##"# `pattern`
+
+The tracking issue for this feature is: [#27721]
+
+[#27721]: https://github.com/rust-lang/rust/issues/27721
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "peer_credentials_unix_socket",
+ description: r##"# `peer_credentials_unix_socket`
+
+The tracking issue for this feature is: [#42839]
+
+[#42839]: https://github.com/rust-lang/rust/issues/42839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "pin_deref_mut",
+ description: r##"# `pin_deref_mut`
+
+The tracking issue for this feature is: [#86918]
+
+[#86918]: https://github.com/rust-lang/rust/issues/86918
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "platform_intrinsics",
+ description: r##"# `platform_intrinsics`
+
+The tracking issue for this feature is: [#27731]
+
+[#27731]: https://github.com/rust-lang/rust/issues/27731
+
+------------------------
+"##,
+ },
+ Lint {
label: "plugin",
description: r##"# `plugin`
@@ -3608,24 +6944,24 @@ of a library.
Plugins can extend [Rust's lint
infrastructure](../../reference/attributes/diagnostics.md#lint-check-attributes) with
additional checks for code style, safety, etc. Now let's write a plugin
-[`lint-plugin-test.rs`](https://github.com/rust-lang/rust/blob/master/tests/ui-fulldeps/auxiliary/lint-plugin-test.rs)
+[`lint-plugin-test.rs`](https://github.com/rust-lang/rust/blob/master/tests/ui-fulldeps/plugin/auxiliary/lint-plugin-test.rs)
that warns about any item named `lintme`.
```rust,ignore (requires-stage-2)
-#![feature(box_syntax, rustc_private)]
+#![feature(rustc_private)]
extern crate rustc_ast;
// Load rustc as a plugin to get macros
extern crate rustc_driver;
-#[macro_use]
extern crate rustc_lint;
#[macro_use]
extern crate rustc_session;
-use rustc_driver::plugin::Registry;
-use rustc_lint::{EarlyContext, EarlyLintPass, LintArray, LintContext, LintPass};
use rustc_ast::ast;
+use rustc_driver::plugin::Registry;
+use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
+
declare_lint!(TEST_LINT, Warn, "Warn about items named 'lintme'");
declare_lint_pass!(Pass => [TEST_LINT]);
@@ -3633,9 +6969,7 @@ declare_lint_pass!(Pass => [TEST_LINT]);
impl EarlyLintPass for Pass {
fn check_item(&mut self, cx: &EarlyContext, it: &ast::Item) {
if it.ident.name.as_str() == "lintme" {
- cx.lint(TEST_LINT, |lint| {
- lint.build("item is named 'lintme'").set_span(it.span).emit()
- });
+ cx.lint(TEST_LINT, "item is named 'lintme'", |lint| lint.set_span(it.span));
}
}
}
@@ -3643,7 +6977,7 @@ impl EarlyLintPass for Pass {
#[no_mangle]
fn __rustc_plugin_registrar(reg: &mut Registry) {
reg.lint_store.register_lints(&[&TEST_LINT]);
- reg.lint_store.register_early_pass(|| box Pass);
+ reg.lint_store.register_early_pass(|| Box::new(Pass));
}
```
@@ -3677,7 +7011,7 @@ The components of a lint plugin are:
Lint passes are syntax traversals, but they run at a late stage of compilation
where type information is available. `rustc`'s [built-in
-lints](https://github.com/rust-lang/rust/blob/master/src/librustc_session/lint/builtin.rs)
+lints](https://github.com/rust-lang/rust/blob/master/compiler/rustc_lint_defs/src/builtin.rs)
mostly use the same infrastructure as lint plugins, and provide examples of how
to access type information.
@@ -3692,6 +7026,88 @@ including those provided by plugins loaded by `foo.rs`.
"##,
},
Lint {
+ label: "pointer_byte_offsets",
+ description: r##"# `pointer_byte_offsets`
+
+The tracking issue for this feature is: [#96283]
+
+[#96283]: https://github.com/rust-lang/rust/issues/96283
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "pointer_is_aligned",
+ description: r##"# `pointer_is_aligned`
+
+The tracking issue for this feature is: [#96284]
+
+[#96284]: https://github.com/rust-lang/rust/issues/96284
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "pointer_like_trait",
+ description: r##"# `pointer_like_trait`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "portable_simd",
+ description: r##"# `portable_simd`
+
+The tracking issue for this feature is: [#86656]
+
+[#86656]: https://github.com/rust-lang/rust/issues/86656
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "powerpc_target_feature",
+ description: r##"# `powerpc_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "precise_pointer_size_matching",
+ description: r##"# `precise_pointer_size_matching`
+
+The tracking issue for this feature is: [#56354]
+
+[#56354]: https://github.com/rust-lang/rust/issues/56354
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "prelude_2024",
+ description: r##"# `prelude_2024`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "prelude_import",
+ description: r##"# `prelude_import`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
label: "print_internals",
description: r##"# `print_internals`
@@ -3701,6 +7117,123 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "proc_macro_byte_character",
+ description: r##"# `proc_macro_byte_character`
+
+The tracking issue for this feature is: [#115268]
+
+[#115268]: https://github.com/rust-lang/rust/issues/115268
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "proc_macro_def_site",
+ description: r##"# `proc_macro_def_site`
+
+The tracking issue for this feature is: [#54724]
+
+[#54724]: https://github.com/rust-lang/rust/issues/54724
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "proc_macro_diagnostic",
+ description: r##"# `proc_macro_diagnostic`
+
+The tracking issue for this feature is: [#54140]
+
+[#54140]: https://github.com/rust-lang/rust/issues/54140
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "proc_macro_expand",
+ description: r##"# `proc_macro_expand`
+
+The tracking issue for this feature is: [#90765]
+
+[#90765]: https://github.com/rust-lang/rust/issues/90765
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "proc_macro_hygiene",
+ description: r##"# `proc_macro_hygiene`
+
+The tracking issue for this feature is: [#54727]
+
+[#54727]: https://github.com/rust-lang/rust/issues/54727
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "proc_macro_internals",
+ description: r##"# `proc_macro_internals`
+
+The tracking issue for this feature is: [#27812]
+
+[#27812]: https://github.com/rust-lang/rust/issues/27812
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "proc_macro_quote",
+ description: r##"# `proc_macro_quote`
+
+The tracking issue for this feature is: [#54722]
+
+[#54722]: https://github.com/rust-lang/rust/issues/54722
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "proc_macro_span",
+ description: r##"# `proc_macro_span`
+
+The tracking issue for this feature is: [#54725]
+
+[#54725]: https://github.com/rust-lang/rust/issues/54725
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "proc_macro_tracked_env",
+ description: r##"# `proc_macro_tracked_env`
+
+The tracking issue for this feature is: [#99515]
+
+[#99515]: https://github.com/rust-lang/rust/issues/99515
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "process_exitcode_internals",
+ description: r##"# `process_exitcode_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "process_internals",
+ description: r##"# `process_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
label: "profiler_runtime",
description: r##"# `profiler_runtime`
@@ -3719,41 +7252,204 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
- label: "raw_dylib",
- description: r##"# `raw_dylib`
+ label: "ptr_addr_eq",
+ description: r##"# `ptr_addr_eq`
-The tracking issue for this feature is: [#58713]
+The tracking issue for this feature is: [#116324]
-[#58713]: https://github.com/rust-lang/rust/issues/58713
+[#116324]: https://github.com/rust-lang/rust/issues/116324
------------------------
+"##,
+ },
+ Lint {
+ label: "ptr_alignment_type",
+ description: r##"# `ptr_alignment_type`
-The `raw_dylib` feature allows you to link against the implementations of functions in an `extern`
-block without, on Windows, linking against an import library.
+The tracking issue for this feature is: [#102070]
-```rust,ignore (partial-example)
-#![feature(raw_dylib)]
+[#102070]: https://github.com/rust-lang/rust/issues/102070
-#[link(name="library", kind="raw-dylib")]
-extern {
- fn extern_function(x: i32);
-}
+------------------------
+"##,
+ },
+ Lint {
+ label: "ptr_as_uninit",
+ description: r##"# `ptr_as_uninit`
-fn main() {
- unsafe {
- extern_function(14);
- }
-}
-```
+The tracking issue for this feature is: [#75402]
+
+[#75402]: https://github.com/rust-lang/rust/issues/75402
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ptr_from_ref",
+ description: r##"# `ptr_from_ref`
+
+The tracking issue for this feature is: [#106116]
+
+[#106116]: https://github.com/rust-lang/rust/issues/106116
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ptr_internals",
+ description: r##"# `ptr_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ptr_mask",
+ description: r##"# `ptr_mask`
+
+The tracking issue for this feature is: [#98290]
+
+[#98290]: https://github.com/rust-lang/rust/issues/98290
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ptr_metadata",
+ description: r##"# `ptr_metadata`
+
+The tracking issue for this feature is: [#81513]
+
+[#81513]: https://github.com/rust-lang/rust/issues/81513
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ptr_sub_ptr",
+ description: r##"# `ptr_sub_ptr`
+
+The tracking issue for this feature is: [#95892]
+
+[#95892]: https://github.com/rust-lang/rust/issues/95892
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ptr_to_from_bits",
+ description: r##"# `ptr_to_from_bits`
+
+The tracking issue for this feature is: [#91126]
+
+[#91126]: https://github.com/rust-lang/rust/issues/91126
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "pub_crate_should_not_need_unstable_attr",
+ description: r##"# `pub_crate_should_not_need_unstable_attr`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "raw_os_error_ty",
+ description: r##"# `raw_os_error_ty`
+
+The tracking issue for this feature is: [#107792]
+
+[#107792]: https://github.com/rust-lang/rust/issues/107792
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "raw_os_nonzero",
+ description: r##"# `raw_os_nonzero`
+
+The tracking issue for this feature is: [#82363]
+
+[#82363]: https://github.com/rust-lang/rust/issues/82363
-## Limitations
+------------------------
+"##,
+ },
+ Lint {
+ label: "raw_ref_op",
+ description: r##"# `raw_ref_op`
+
+The tracking issue for this feature is: [#64490]
+
+[#64490]: https://github.com/rust-lang/rust/issues/64490
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "raw_slice_split",
+ description: r##"# `raw_slice_split`
+
+The tracking issue for this feature is: [#95595]
-Currently, this feature is only supported on `-windows-msvc` targets. Non-Windows platforms don't have import
-libraries, and an incompatibility between LLVM and the BFD linker means that it is not currently supported on
-`-windows-gnu` targets.
+[#95595]: https://github.com/rust-lang/rust/issues/95595
-On the `i686-pc-windows-msvc` target, this feature supports only the `cdecl`, `stdcall`, `system`, and `fastcall`
-calling conventions.
+------------------------
+"##,
+ },
+ Lint {
+ label: "raw_vec_internals",
+ description: r##"# `raw_vec_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "read_buf",
+ description: r##"# `read_buf`
+
+The tracking issue for this feature is: [#78485]
+
+[#78485]: https://github.com/rust-lang/rust/issues/78485
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ready_into_inner",
+ description: r##"# `ready_into_inner`
+
+The tracking issue for this feature is: [#101196]
+
+[#101196]: https://github.com/rust-lang/rust/issues/101196
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "receiver_trait",
+ description: r##"# `receiver_trait`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "register_tool",
+ description: r##"# `register_tool`
+
+The tracking issue for this feature is: [#66079]
+
+[#66079]: https://github.com/rust-lang/rust/issues/66079
+
+------------------------
"##,
},
Lint {
@@ -3779,6 +7475,103 @@ enum Foo {
"##,
},
Lint {
+ label: "repr_simd",
+ description: r##"# `repr_simd`
+
+The tracking issue for this feature is: [#27731]
+
+[#27731]: https://github.com/rust-lang/rust/issues/27731
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "restricted_std",
+ description: r##"# `restricted_std`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "result_flattening",
+ description: r##"# `result_flattening`
+
+The tracking issue for this feature is: [#70142]
+
+[#70142]: https://github.com/rust-lang/rust/issues/70142
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "result_option_inspect",
+ description: r##"# `result_option_inspect`
+
+The tracking issue for this feature is: [#91345]
+
+[#91345]: https://github.com/rust-lang/rust/issues/91345
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "return_position_impl_trait_in_trait",
+ description: r##"# `return_position_impl_trait_in_trait`
+
+The tracking issue for this feature is: [#91611]
+
+[#91611]: https://github.com/rust-lang/rust/issues/91611
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "return_type_notation",
+ description: r##"# `return_type_notation`
+
+The tracking issue for this feature is: [#109417]
+
+[#109417]: https://github.com/rust-lang/rust/issues/109417
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "riscv_target_feature",
+ description: r##"# `riscv_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "round_char_boundary",
+ description: r##"# `round_char_boundary`
+
+The tracking issue for this feature is: [#93743]
+
+[#93743]: https://github.com/rust-lang/rust/issues/93743
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "round_ties_even",
+ description: r##"# `round_ties_even`
+
+The tracking issue for this feature is: [#96710]
+
+[#96710]: https://github.com/rust-lang/rust/issues/96710
+
+------------------------
+"##,
+ },
+ Lint {
label: "rt",
description: r##"# `rt`
@@ -3788,6 +7581,39 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "rtm_target_feature",
+ description: r##"# `rtm_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "rust_cold_cc",
+ description: r##"# `rust_cold_cc`
+
+The tracking issue for this feature is: [#97544]
+
+[#97544]: https://github.com/rust-lang/rust/issues/97544
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "rustc_allow_const_fn_unstable",
+ description: r##"# `rustc_allow_const_fn_unstable`
+
+The tracking issue for this feature is: [#69399]
+
+[#69399]: https://github.com/rust-lang/rust/issues/69399
+
+------------------------
+"##,
+ },
+ Lint {
label: "rustc_attrs",
description: r##"# `rustc_attrs`
@@ -3845,6 +7671,326 @@ error: aborting due to 2 previous errors
"##,
},
Lint {
+ label: "rustc_private",
+ description: r##"# `rustc_private`
+
+The tracking issue for this feature is: [#27812]
+
+[#27812]: https://github.com/rust-lang/rust/issues/27812
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "rustdoc_internals",
+ description: r##"# `rustdoc_internals`
+
+The tracking issue for this feature is: [#90418]
+
+[#90418]: https://github.com/rust-lang/rust/issues/90418
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "rustdoc_missing_doc_code_examples",
+ description: r##"# `rustdoc_missing_doc_code_examples`
+
+The tracking issue for this feature is: [#101730]
+
+[#101730]: https://github.com/rust-lang/rust/issues/101730
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "sealed",
+ description: r##"# `sealed`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "seek_stream_len",
+ description: r##"# `seek_stream_len`
+
+The tracking issue for this feature is: [#59359]
+
+[#59359]: https://github.com/rust-lang/rust/issues/59359
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "set_ptr_value",
+ description: r##"# `set_ptr_value`
+
+The tracking issue for this feature is: [#75091]
+
+[#75091]: https://github.com/rust-lang/rust/issues/75091
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "setgroups",
+ description: r##"# `setgroups`
+
+The tracking issue for this feature is: [#90747]
+
+[#90747]: https://github.com/rust-lang/rust/issues/90747
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "sgx_platform",
+ description: r##"# `sgx_platform`
+
+The tracking issue for this feature is: [#56975]
+
+[#56975]: https://github.com/rust-lang/rust/issues/56975
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "simd_ffi",
+ description: r##"# `simd_ffi`
+
+The tracking issue for this feature is: [#27731]
+
+[#27731]: https://github.com/rust-lang/rust/issues/27731
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "sized_type_properties",
+ description: r##"# `sized_type_properties`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_as_chunks",
+ description: r##"# `slice_as_chunks`
+
+The tracking issue for this feature is: [#74985]
+
+[#74985]: https://github.com/rust-lang/rust/issues/74985
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_concat_ext",
+ description: r##"# `slice_concat_ext`
+
+The tracking issue for this feature is: [#27747]
+
+[#27747]: https://github.com/rust-lang/rust/issues/27747
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_concat_trait",
+ description: r##"# `slice_concat_trait`
+
+The tracking issue for this feature is: [#27747]
+
+[#27747]: https://github.com/rust-lang/rust/issues/27747
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_first_last_chunk",
+ description: r##"# `slice_first_last_chunk`
+
+The tracking issue for this feature is: [#111774]
+
+[#111774]: https://github.com/rust-lang/rust/issues/111774
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_flatten",
+ description: r##"# `slice_flatten`
+
+The tracking issue for this feature is: [#95629]
+
+[#95629]: https://github.com/rust-lang/rust/issues/95629
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_from_ptr_range",
+ description: r##"# `slice_from_ptr_range`
+
+The tracking issue for this feature is: [#89792]
+
+[#89792]: https://github.com/rust-lang/rust/issues/89792
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_group_by",
+ description: r##"# `slice_group_by`
+
+The tracking issue for this feature is: [#80552]
+
+[#80552]: https://github.com/rust-lang/rust/issues/80552
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_index_methods",
+ description: r##"# `slice_index_methods`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_internals",
+ description: r##"# `slice_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_iter_mut_as_mut_slice",
+ description: r##"# `slice_iter_mut_as_mut_slice`
+
+The tracking issue for this feature is: [#93079]
+
+[#93079]: https://github.com/rust-lang/rust/issues/93079
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_partition_dedup",
+ description: r##"# `slice_partition_dedup`
+
+The tracking issue for this feature is: [#54279]
+
+[#54279]: https://github.com/rust-lang/rust/issues/54279
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_pattern",
+ description: r##"# `slice_pattern`
+
+The tracking issue for this feature is: [#56345]
+
+[#56345]: https://github.com/rust-lang/rust/issues/56345
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_ptr_get",
+ description: r##"# `slice_ptr_get`
+
+The tracking issue for this feature is: [#74265]
+
+[#74265]: https://github.com/rust-lang/rust/issues/74265
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_ptr_len",
+ description: r##"# `slice_ptr_len`
+
+The tracking issue for this feature is: [#71146]
+
+[#71146]: https://github.com/rust-lang/rust/issues/71146
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_range",
+ description: r##"# `slice_range`
+
+The tracking issue for this feature is: [#76393]
+
+[#76393]: https://github.com/rust-lang/rust/issues/76393
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_split_at_unchecked",
+ description: r##"# `slice_split_at_unchecked`
+
+The tracking issue for this feature is: [#76014]
+
+[#76014]: https://github.com/rust-lang/rust/issues/76014
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_swap_unchecked",
+ description: r##"# `slice_swap_unchecked`
+
+The tracking issue for this feature is: [#88539]
+
+[#88539]: https://github.com/rust-lang/rust/issues/88539
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_take",
+ description: r##"# `slice_take`
+
+The tracking issue for this feature is: [#62280]
+
+[#62280]: https://github.com/rust-lang/rust/issues/62280
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "solid_ext",
+ description: r##"# `solid_ext`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "sort_floats",
+ description: r##"# `sort_floats`
+
+The tracking issue for this feature is: [#93396]
+
+[#93396]: https://github.com/rust-lang/rust/issues/93396
+
+------------------------
+"##,
+ },
+ Lint {
label: "sort_internals",
description: r##"# `sort_internals`
@@ -3854,6 +8000,184 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "spec_option_partial_eq",
+ description: r##"# `spec_option_partial_eq`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "specialization",
+ description: r##"# `specialization`
+
+The tracking issue for this feature is: [#31844]
+
+[#31844]: https://github.com/rust-lang/rust/issues/31844
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "split_array",
+ description: r##"# `split_array`
+
+The tracking issue for this feature is: [#90091]
+
+[#90091]: https://github.com/rust-lang/rust/issues/90091
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "split_as_slice",
+ description: r##"# `split_as_slice`
+
+The tracking issue for this feature is: [#96137]
+
+[#96137]: https://github.com/rust-lang/rust/issues/96137
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "sse4a_target_feature",
+ description: r##"# `sse4a_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "staged_api",
+ description: r##"# `staged_api`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "start",
+ description: r##"# `start`
+
+The tracking issue for this feature is: [#29633]
+
+[#29633]: https://github.com/rust-lang/rust/issues/29633
+
+------------------------
+
+Allows you to mark a function as the entry point of the executable, which is
+necessary in `#![no_std]` environments.
+
+The function marked `#[start]` is passed the command line parameters in the same
+format as the C main function (aside from the integer types being used).
+It has to be non-generic and have the following signature:
+
+```rust,ignore (only-for-syntax-highlight)
+# let _:
+fn(isize, *const *const u8) -> isize
+# ;
+```
+
+This feature should not be confused with the `start` *lang item* which is
+defined by the `std` crate and is written `#[lang = "start"]`.
+
+## Usage together with the `std` crate
+
+`#[start]` can be used in combination with the `std` crate, in which case the
+normal `main` function (which would get called from the `std` crate) won't be
+used as an entry point.
+The initialization code in `std` will be skipped this way.
+
+Example:
+
+```rust
+#![feature(start)]
+
+#[start]
+fn start(_argc: isize, _argv: *const *const u8) -> isize {
+ 0
+}
+```
+
+Unwinding the stack past the `#[start]` function is currently considered
+Undefined Behavior (for any unwinding implementation):
+
+```rust,ignore (UB)
+#![feature(start)]
+
+#[start]
+fn start(_argc: isize, _argv: *const *const u8) -> isize {
+ std::panic::catch_unwind(|| {
+ panic!(); // panic safely gets caught or safely aborts execution
+ });
+
+ panic!(); // UB!
+
+ 0
+}
+```
+"##,
+ },
+ Lint {
+ label: "std_internals",
+ description: r##"# `std_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "stdio_makes_pipe",
+ description: r##"# `stdio_makes_pipe`
+
+The tracking issue for this feature is: [#98288]
+
+[#98288]: https://github.com/rust-lang/rust/issues/98288
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "stdsimd",
+ description: r##"# `stdsimd`
+
+The tracking issue for this feature is: [#48556]
+
+[#48556]: https://github.com/rust-lang/rust/issues/48556
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "step_trait",
+ description: r##"# `step_trait`
+
+The tracking issue for this feature is: [#42168]
+
+[#42168]: https://github.com/rust-lang/rust/issues/42168
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "stmt_expr_attributes",
+ description: r##"# `stmt_expr_attributes`
+
+The tracking issue for this feature is: [#15701]
+
+[#15701]: https://github.com/rust-lang/rust/issues/15701
+
+------------------------
+"##,
+ },
+ Lint {
label: "str_internals",
description: r##"# `str_internals`
@@ -3863,6 +8187,186 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "str_split_inclusive_remainder",
+ description: r##"# `str_split_inclusive_remainder`
+
+The tracking issue for this feature is: [#77998]
+
+[#77998]: https://github.com/rust-lang/rust/issues/77998
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "str_split_remainder",
+ description: r##"# `str_split_remainder`
+
+The tracking issue for this feature is: [#77998]
+
+[#77998]: https://github.com/rust-lang/rust/issues/77998
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "str_split_whitespace_remainder",
+ description: r##"# `str_split_whitespace_remainder`
+
+The tracking issue for this feature is: [#77998]
+
+[#77998]: https://github.com/rust-lang/rust/issues/77998
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "strict_provenance",
+ description: r##"# `strict_provenance`
+
+The tracking issue for this feature is: [#95228]
+
+[#95228]: https://github.com/rust-lang/rust/issues/95228
+-----
+
+The `strict_provenance` feature allows to enable the `fuzzy_provenance_casts` and `lossy_provenance_casts` lints.
+These lint on casts between integers and pointers, that are recommended against or invalid in the strict provenance model.
+The same feature gate is also used for the experimental strict provenance API in `std` (actually `core`).
+
+## Example
+
+```rust
+#![feature(strict_provenance)]
+#![warn(fuzzy_provenance_casts)]
+
+fn main() {
+ let _dangling = 16_usize as *const u8;
+ //~^ WARNING: strict provenance disallows casting integer `usize` to pointer `*const u8`
+}
+```
+"##,
+ },
+ Lint {
+ label: "strict_provenance_atomic_ptr",
+ description: r##"# `strict_provenance_atomic_ptr`
+
+The tracking issue for this feature is: [#99108]
+
+[#99108]: https://github.com/rust-lang/rust/issues/99108
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "string_deref_patterns",
+ description: r##"# `string_deref_patterns`
+
+The tracking issue for this feature is: [#87121]
+
+[#87121]: https://github.com/rust-lang/rust/issues/87121
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "string_extend_from_within",
+ description: r##"# `string_extend_from_within`
+
+The tracking issue for this feature is: [#103806]
+
+[#103806]: https://github.com/rust-lang/rust/issues/103806
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "string_remove_matches",
+ description: r##"# `string_remove_matches`
+
+The tracking issue for this feature is: [#72826]
+
+[#72826]: https://github.com/rust-lang/rust/issues/72826
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "structural_match",
+ description: r##"# `structural_match`
+
+The tracking issue for this feature is: [#31434]
+
+[#31434]: https://github.com/rust-lang/rust/issues/31434
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "sync_unsafe_cell",
+ description: r##"# `sync_unsafe_cell`
+
+The tracking issue for this feature is: [#95439]
+
+[#95439]: https://github.com/rust-lang/rust/issues/95439
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "target_feature_11",
+ description: r##"# `target_feature_11`
+
+The tracking issue for this feature is: [#69098]
+
+[#69098]: https://github.com/rust-lang/rust/issues/69098
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "tbm_target_feature",
+ description: r##"# `tbm_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "tcp_linger",
+ description: r##"# `tcp_linger`
+
+The tracking issue for this feature is: [#88494]
+
+[#88494]: https://github.com/rust-lang/rust/issues/88494
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "tcp_quickack",
+ description: r##"# `tcp_quickack`
+
+The tracking issue for this feature is: [#96256]
+
+[#96256]: https://github.com/rust-lang/rust/issues/96256
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "tcplistener_into_incoming",
+ description: r##"# `tcplistener_into_incoming`
+
+The tracking issue for this feature is: [#88339]
+
+[#88339]: https://github.com/rust-lang/rust/issues/88339
+
+------------------------
+"##,
+ },
+ Lint {
label: "test",
description: r##"# `test`
@@ -4025,6 +8529,57 @@ even when using either of the above.
"##,
},
Lint {
+ label: "test_2018_feature",
+ description: r##"# `test_2018_feature`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "test_unstable_lint",
+ description: r##"# `test_unstable_lint`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "thin_box",
+ description: r##"# `thin_box`
+
+The tracking issue for this feature is: [#92791]
+
+[#92791]: https://github.com/rust-lang/rust/issues/92791
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "thread_id_value",
+ description: r##"# `thread_id_value`
+
+The tracking issue for this feature is: [#67939]
+
+[#67939]: https://github.com/rust-lang/rust/issues/67939
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "thread_local",
+ description: r##"# `thread_local`
+
+The tracking issue for this feature is: [#29594]
+
+[#29594]: https://github.com/rust-lang/rust/issues/29594
+
+------------------------
+"##,
+ },
+ Lint {
label: "thread_local_internals",
description: r##"# `thread_local_internals`
@@ -4034,6 +8589,28 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "thread_sleep_until",
+ description: r##"# `thread_sleep_until`
+
+The tracking issue for this feature is: [#113752]
+
+[#113752]: https://github.com/rust-lang/rust/issues/113752
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "thread_spawn_unchecked",
+ description: r##"# `thread_spawn_unchecked`
+
+The tracking issue for this feature is: [#55132]
+
+[#55132]: https://github.com/rust-lang/rust/issues/55132
+
+------------------------
+"##,
+ },
+ Lint {
label: "trace_macros",
description: r##"# `trace_macros`
@@ -4077,6 +8654,17 @@ note: trace_macro
"##,
},
Lint {
+ label: "track_path",
+ description: r##"# `track_path`
+
+The tracking issue for this feature is: [#99515]
+
+[#99515]: https://github.com/rust-lang/rust/issues/99515
+
+------------------------
+"##,
+ },
+ Lint {
label: "trait_alias",
description: r##"# `trait_alias`
@@ -4146,6 +8734,28 @@ let foo: &dyn Foo = bar;
"##,
},
Lint {
+ label: "transmutability",
+ description: r##"# `transmutability`
+
+The tracking issue for this feature is: [#99571]
+
+[#99571]: https://github.com/rust-lang/rust/issues/99571
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "transmute_generic_consts",
+ description: r##"# `transmute_generic_consts`
+
+The tracking issue for this feature is: [#109929]
+
+[#109929]: https://github.com/rust-lang/rust/issues/109929
+
+------------------------
+"##,
+ },
+ Lint {
label: "transparent_unions",
description: r##"# `transparent_unions`
@@ -4233,6 +8843,59 @@ their application of these optimizations.
"##,
},
Lint {
+ label: "trivial_bounds",
+ description: r##"# `trivial_bounds`
+
+The tracking issue for this feature is: [#48214]
+
+[#48214]: https://github.com/rust-lang/rust/issues/48214
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "trusted_len",
+ description: r##"# `trusted_len`
+
+The tracking issue for this feature is: [#37572]
+
+[#37572]: https://github.com/rust-lang/rust/issues/37572
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "trusted_len_next_unchecked",
+ description: r##"# `trusted_len_next_unchecked`
+
+The tracking issue for this feature is: [#37572]
+
+[#37572]: https://github.com/rust-lang/rust/issues/37572
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "trusted_random_access",
+ description: r##"# `trusted_random_access`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "trusted_step",
+ description: r##"# `trusted_step`
+
+The tracking issue for this feature is: [#85731]
+
+[#85731]: https://github.com/rust-lang/rust/issues/85731
+
+------------------------
+"##,
+ },
+ Lint {
label: "try_blocks",
description: r##"# `try_blocks`
@@ -4267,6 +8930,92 @@ assert!(result.is_err());
"##,
},
Lint {
+ label: "try_find",
+ description: r##"# `try_find`
+
+The tracking issue for this feature is: [#63178]
+
+[#63178]: https://github.com/rust-lang/rust/issues/63178
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "try_reserve_kind",
+ description: r##"# `try_reserve_kind`
+
+The tracking issue for this feature is: [#48043]
+
+[#48043]: https://github.com/rust-lang/rust/issues/48043
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "try_trait_v2",
+ description: r##"# `try_trait_v2`
+
+The tracking issue for this feature is: [#84277]
+
+[#84277]: https://github.com/rust-lang/rust/issues/84277
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "try_trait_v2_residual",
+ description: r##"# `try_trait_v2_residual`
+
+The tracking issue for this feature is: [#91285]
+
+[#91285]: https://github.com/rust-lang/rust/issues/91285
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "try_trait_v2_yeet",
+ description: r##"# `try_trait_v2_yeet`
+
+The tracking issue for this feature is: [#96374]
+
+[#96374]: https://github.com/rust-lang/rust/issues/96374
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "tuple_trait",
+ description: r##"# `tuple_trait`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "type_alias_impl_trait",
+ description: r##"# `type_alias_impl_trait`
+
+The tracking issue for this feature is: [#63063]
+
+[#63063]: https://github.com/rust-lang/rust/issues/63063
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "type_ascription",
+ description: r##"# `type_ascription`
+
+The tracking issue for this feature is: [#23416]
+
+[#23416]: https://github.com/rust-lang/rust/issues/23416
+
+------------------------
+"##,
+ },
+ Lint {
label: "type_changing_struct_update",
description: r##"# `type_changing_struct_update`
@@ -4304,6 +9053,39 @@ fn main () {
"##,
},
Lint {
+ label: "type_name_of_val",
+ description: r##"# `type_name_of_val`
+
+The tracking issue for this feature is: [#66359]
+
+[#66359]: https://github.com/rust-lang/rust/issues/66359
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "type_privacy_lints",
+ description: r##"# `type_privacy_lints`
+
+The tracking issue for this feature is: [#48054]
+
+[#48054]: https://github.com/rust-lang/rust/issues/48054
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "uefi_std",
+ description: r##"# `uefi_std`
+
+The tracking issue for this feature is: [#100499]
+
+[#100499]: https://github.com/rust-lang/rust/issues/100499
+
+------------------------
+"##,
+ },
+ Lint {
label: "unboxed_closures",
description: r##"# `unboxed_closures`
@@ -4319,7 +9101,7 @@ The `unboxed_closures` feature allows you to write functions using the `"rust-ca
required for implementing the [`Fn*`] family of traits. `"rust-call"` functions must have
exactly one (non self) argument, a tuple representing the argument list.
-[`Fn*`]: https://doc.rust-lang.org/std/ops/trait.Fn.html
+[`Fn*`]: ../../std/ops/trait.Fn.html
```rust
#![feature(unboxed_closures)]
@@ -4333,6 +9115,200 @@ fn main() {}
"##,
},
Lint {
+ label: "unchecked_math",
+ description: r##"# `unchecked_math`
+
+The tracking issue for this feature is: [#85122]
+
+[#85122]: https://github.com/rust-lang/rust/issues/85122
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "unicode_internals",
+ description: r##"# `unicode_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "unique_rc_arc",
+ description: r##"# `unique_rc_arc`
+
+The tracking issue for this feature is: [#112566]
+
+[#112566]: https://github.com/rust-lang/rust/issues/112566
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "unix_file_vectored_at",
+ description: r##"# `unix_file_vectored_at`
+
+The tracking issue for this feature is: [#89517]
+
+[#89517]: https://github.com/rust-lang/rust/issues/89517
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "unix_set_mark",
+ description: r##"# `unix_set_mark`
+
+The tracking issue for this feature is: [#96467]
+
+[#96467]: https://github.com/rust-lang/rust/issues/96467
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "unix_sigpipe",
+ description: r##"# `unix_sigpipe`
+
+The tracking issue for this feature is: [#97889]
+
+[#97889]: https://github.com/rust-lang/rust/issues/97889
+
+---
+
+The `#[unix_sigpipe = "..."]` attribute on `fn main()` can be used to specify how libstd shall setup `SIGPIPE` on Unix platforms before invoking `fn main()`. This attribute is ignored on non-Unix targets. There are three variants:
+* `#[unix_sigpipe = "inherit"]`
+* `#[unix_sigpipe = "sig_dfl"]`
+* `#[unix_sigpipe = "sig_ign"]`
+
+## `#[unix_sigpipe = "inherit"]`
+
+Leave `SIGPIPE` untouched before entering `fn main()`. Unless the parent process has changed the default `SIGPIPE` handler from `SIG_DFL` to something else, this will behave the same as `#[unix_sigpipe = "sig_dfl"]`.
+
+## `#[unix_sigpipe = "sig_dfl"]`
+
+Set the `SIGPIPE` handler to `SIG_DFL`. This will result in your program getting killed if it tries to write to a closed pipe. This is normally what you want if your program produces textual output.
+
+### Example
+
+```rust,no_run
+#![feature(unix_sigpipe)]
+#[unix_sigpipe = "sig_dfl"]
+fn main() { loop { println!("hello world"); } }
+```
+
+```bash
+% ./main | head -n 1
+hello world
+```
+
+## `#[unix_sigpipe = "sig_ign"]`
+
+Set the `SIGPIPE` handler to `SIG_IGN` before invoking `fn main()`. This will result in `ErrorKind::BrokenPipe` errors if you program tries to write to a closed pipe. This is normally what you want if you for example write socket servers, socket clients, or pipe peers.
+
+This is what libstd has done by default since 2014. (However, see the note on child processes below.)
+
+### Example
+
+```rust,no_run
+#![feature(unix_sigpipe)]
+#[unix_sigpipe = "sig_ign"]
+fn main() { loop { println!("hello world"); } }
+```
+
+```bash
+% ./main | head -n 1
+hello world
+thread 'main' panicked at 'failed printing to stdout: Broken pipe (os error 32)', library/std/src/io/stdio.rs:1016:9
+note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
+```
+
+### Note on child processes
+
+When spawning child processes, the legacy Rust behavior if `#[unix_sigpipe]` is not specified is to
+reset `SIGPIPE` to `SIG_DFL`.
+
+If `#[unix_sigpipe = "..."]` is specified, no matter what its value is, the signal disposition of
+`SIGPIPE` is no longer reset. This means that the child inherits the parent's `SIGPIPE` behavior.
+"##,
+ },
+ Lint {
+ label: "unix_socket_ancillary_data",
+ description: r##"# `unix_socket_ancillary_data`
+
+The tracking issue for this feature is: [#76915]
+
+[#76915]: https://github.com/rust-lang/rust/issues/76915
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "unix_socket_peek",
+ description: r##"# `unix_socket_peek`
+
+The tracking issue for this feature is: [#76923]
+
+[#76923]: https://github.com/rust-lang/rust/issues/76923
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "unnamed_fields",
+ description: r##"# `unnamed_fields`
+
+The tracking issue for this feature is: [#49804]
+
+[#49804]: https://github.com/rust-lang/rust/issues/49804
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "unsafe_cell_from_mut",
+ description: r##"# `unsafe_cell_from_mut`
+
+The tracking issue for this feature is: [#111645]
+
+[#111645]: https://github.com/rust-lang/rust/issues/111645
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "unsafe_pin_internals",
+ description: r##"# `unsafe_pin_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "unsize",
+ description: r##"# `unsize`
+
+The tracking issue for this feature is: [#18598]
+
+[#18598]: https://github.com/rust-lang/rust/issues/18598
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "unsized_fn_params",
+ description: r##"# `unsized_fn_params`
+
+The tracking issue for this feature is: [#48055]
+
+[#48055]: https://github.com/rust-lang/rust/issues/48055
+
+------------------------
+"##,
+ },
+ Lint {
label: "unsized_locals",
description: r##"# `unsized_locals`
@@ -4543,6 +9519,17 @@ fn main() {
"##,
},
Lint {
+ label: "unwrap_infallible",
+ description: r##"# `unwrap_infallible`
+
+The tracking issue for this feature is: [#61695]
+
+[#61695]: https://github.com/rust-lang/rust/issues/61695
+
+------------------------
+"##,
+ },
+ Lint {
label: "update_panic_count",
description: r##"# `update_panic_count`
@@ -4552,6 +9539,149 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "used_with_arg",
+ description: r##"# `used_with_arg`
+
+The tracking issue for this feature is: [#93798]
+
+[#93798]: https://github.com/rust-lang/rust/issues/93798
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "utf16_extra",
+ description: r##"# `utf16_extra`
+
+The tracking issue for this feature is: [#94919]
+
+[#94919]: https://github.com/rust-lang/rust/issues/94919
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "utf16_extra_const",
+ description: r##"# `utf16_extra_const`
+
+The tracking issue for this feature is: [#94919]
+
+[#94919]: https://github.com/rust-lang/rust/issues/94919
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "utf8_chunks",
+ description: r##"# `utf8_chunks`
+
+The tracking issue for this feature is: [#99543]
+
+[#99543]: https://github.com/rust-lang/rust/issues/99543
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "variant_count",
+ description: r##"# `variant_count`
+
+The tracking issue for this feature is: [#73662]
+
+[#73662]: https://github.com/rust-lang/rust/issues/73662
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "vec_into_raw_parts",
+ description: r##"# `vec_into_raw_parts`
+
+The tracking issue for this feature is: [#65816]
+
+[#65816]: https://github.com/rust-lang/rust/issues/65816
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "vec_push_within_capacity",
+ description: r##"# `vec_push_within_capacity`
+
+The tracking issue for this feature is: [#100486]
+
+[#100486]: https://github.com/rust-lang/rust/issues/100486
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "vec_split_at_spare",
+ description: r##"# `vec_split_at_spare`
+
+The tracking issue for this feature is: [#81944]
+
+[#81944]: https://github.com/rust-lang/rust/issues/81944
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "waker_getters",
+ description: r##"# `waker_getters`
+
+The tracking issue for this feature is: [#87021]
+
+[#87021]: https://github.com/rust-lang/rust/issues/87021
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "wasi_ext",
+ description: r##"# `wasi_ext`
+
+The tracking issue for this feature is: [#71213]
+
+[#71213]: https://github.com/rust-lang/rust/issues/71213
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "wasm_abi",
+ description: r##"# `wasm_abi`
+
+The tracking issue for this feature is: [#83788]
+
+[#83788]: https://github.com/rust-lang/rust/issues/83788
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "wasm_target_feature",
+ description: r##"# `wasm_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "windows_by_handle",
+ description: r##"# `windows_by_handle`
+
+The tracking issue for this feature is: [#63010]
+
+[#63010]: https://github.com/rust-lang/rust/issues/63010
+
+------------------------
+"##,
+ },
+ Lint {
label: "windows_c",
description: r##"# `windows_c`
@@ -4579,6 +9709,59 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "windows_process_exit_code_from",
+ description: r##"# `windows_process_exit_code_from`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "windows_process_extensions_async_pipes",
+ description: r##"# `windows_process_extensions_async_pipes`
+
+The tracking issue for this feature is: [#98289]
+
+[#98289]: https://github.com/rust-lang/rust/issues/98289
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "windows_process_extensions_force_quotes",
+ description: r##"# `windows_process_extensions_force_quotes`
+
+The tracking issue for this feature is: [#82227]
+
+[#82227]: https://github.com/rust-lang/rust/issues/82227
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "windows_process_extensions_main_thread_handle",
+ description: r##"# `windows_process_extensions_main_thread_handle`
+
+The tracking issue for this feature is: [#96723]
+
+[#96723]: https://github.com/rust-lang/rust/issues/96723
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "windows_process_extensions_raw_attribute",
+ description: r##"# `windows_process_extensions_raw_attribute`
+
+The tracking issue for this feature is: [#114854]
+
+[#114854]: https://github.com/rust-lang/rust/issues/114854
+
+------------------------
+"##,
+ },
+ Lint {
label: "windows_stdio",
description: r##"# `windows_stdio`
@@ -4587,10 +9770,95 @@ This feature is internal to the Rust compiler and is not intended for general us
------------------------
"##,
},
+ Lint {
+ label: "with_negative_coherence",
+ description: r##"# `with_negative_coherence`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "wrapping_int_impl",
+ description: r##"# `wrapping_int_impl`
+
+The tracking issue for this feature is: [#32463]
+
+[#32463]: https://github.com/rust-lang/rust/issues/32463
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "wrapping_next_power_of_two",
+ description: r##"# `wrapping_next_power_of_two`
+
+The tracking issue for this feature is: [#32463]
+
+[#32463]: https://github.com/rust-lang/rust/issues/32463
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "write_all_vectored",
+ description: r##"# `write_all_vectored`
+
+The tracking issue for this feature is: [#70436]
+
+[#70436]: https://github.com/rust-lang/rust/issues/70436
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "yeet_desugar_details",
+ description: r##"# `yeet_desugar_details`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "yeet_expr",
+ description: r##"# `yeet_expr`
+
+The tracking issue for this feature is: [#96373]
+
+[#96373]: https://github.com/rust-lang/rust/issues/96373
+
+------------------------
+
+The `yeet_expr` feature adds support for `do yeet` expressions,
+which can be used to early-exit from a function or `try` block.
+
+These are highly experimental, thus the placeholder syntax.
+
+```rust,edition2021
+#![feature(yeet_expr)]
+
+fn foo() -> Result<String, i32> {
+ do yeet 4;
+}
+assert_eq!(foo(), Err(4));
+
+fn bar() -> Option<String> {
+ do yeet;
+}
+assert_eq!(bar(), None);
+```
+"##,
+ },
];
pub const CLIPPY_LINTS: &[Lint] = &[
Lint {
+ label: "clippy::absolute_paths",
+ description: r##"Checks for usage of items through absolute paths, like `std::env::current_dir`."##,
+ },
+ Lint {
label: "clippy::absurd_extreme_comparisons",
description: r##"Checks for comparisons where one side of the relation is
either the minimum or maximum value for its type and warns if it involves a
@@ -4598,6 +9866,33 @@ case that is always true or always false. Only integer and boolean types are
checked."##,
},
Lint {
+ label: "clippy::alloc_instead_of_core",
+ description: r##"Finds items imported through `alloc` when available through `core`."##,
+ },
+ Lint {
+ label: "clippy::allow_attributes",
+ description: r##"Checks for usage of the `#[allow]` attribute and suggests replacing it with
+the `#[expect]` (See [RFC 2383](https://rust-lang.github.io/rfcs/2383-lint-reasons.html))
+
+The expect attribute is still unstable and requires the `lint_reasons`
+on nightly. It can be enabled by adding `#![feature(lint_reasons)]` to
+the crate root.
+
+This lint only warns outer attributes (`#[allow]`), as inner attributes
+(`#![allow]`) are usually used to enable or disable lints on a global scale."##,
+ },
+ Lint {
+ label: "clippy::allow_attributes_without_reason",
+ description: r##"Checks for attributes that allow lints without a reason.
+
+(This requires the `lint_reasons` feature)"##,
+ },
+ Lint {
+ label: "clippy::almost_complete_range",
+ description: r##"Checks for ranges which almost include the entire range of letters from 'a' to 'z'
+or digits from '0' to '9', but don't because they're a half open range."##,
+ },
+ Lint {
label: "clippy::almost_swapped",
description: r##"Checks for `foo = bar; bar = foo` sequences."##,
},
@@ -4611,22 +9906,50 @@ or
respectively, suggesting to use the predefined constant."##,
},
Lint {
+ label: "clippy::arc_with_non_send_sync",
+ description: r##".
+This lint warns when you use `Arc` with a type that does not implement `Send` or `Sync`."##,
+ },
+ Lint {
+ label: "clippy::arithmetic_side_effects",
+ description: r##"Checks any kind of arithmetic operation of any type.
+
+Operators like `+`, `-`, `*` or `<<` are usually capable of overflowing according to the [Rust
+Reference](https://doc.rust-lang.org/reference/expressions/operator-expr.html#overflow),
+or can panic (`/`, `%`).
+
+Known safe built-in types like `Wrapping` or `Saturating`, floats, operations in constant
+environments, allowed types and non-constant operations that won't overflow are ignored."##,
+ },
+ Lint {
label: "clippy::as_conversions",
description: r##"Checks for usage of `as` conversions.
Note that this lint is specialized in linting *every single* use of `as`
regardless of whether good alternatives exist or not.
If you want more precise lints for `as`, please consider using these separate lints:
-`unnecessary_cast`, `cast_lossless/possible_truncation/possible_wrap/precision_loss/sign_loss`,
+`unnecessary_cast`, `cast_lossless/cast_possible_truncation/cast_possible_wrap/cast_precision_loss/cast_sign_loss`,
`fn_to_numeric_cast(_with_truncation)`, `char_lit_as_u8`, `ref_to_mut` and `ptr_as_ptr`.
There is a good explanation the reason why this lint should work in this way and how it is useful
[in this issue](https://github.com/rust-lang/rust-clippy/issues/5122)."##,
},
Lint {
+ label: "clippy::as_ptr_cast_mut",
+ description: r##"Checks for the result of a `&self`-taking `as_ptr` being cast to a mutable pointer"##,
+ },
+ Lint {
+ label: "clippy::as_underscore",
+ description: r##"Checks for the usage of `as _` conversion using inferred type."##,
+ },
+ Lint {
label: "clippy::assertions_on_constants",
description: r##"Checks for `assert!(true)` and `assert!(false)` calls."##,
},
Lint {
+ label: "clippy::assertions_on_result_states",
+ description: r##"Checks for `assert!(r.is_ok())` or `assert!(r.is_err())` calls."##,
+ },
+ Lint {
label: "clippy::assign_op_pattern",
description: r##"Checks for `a = a op b` or `a = b commutative_op a`
patterns."##,
@@ -4641,14 +9964,17 @@ patterns."##,
that can themselves be awaited."##,
},
Lint {
+ label: "clippy::await_holding_invalid_type",
+ description: r##"Allows users to configure types which should not be held across `await`
+suspension points."##,
+ },
+ Lint {
label: "clippy::await_holding_lock",
- description: r##"Checks for calls to await while holding a
-non-async-aware MutexGuard."##,
+ description: r##"Checks for calls to await while holding a non-async-aware MutexGuard."##,
},
Lint {
label: "clippy::await_holding_refcell_ref",
- description: r##"Checks for calls to await while holding a
-`RefCell` `Ref` or `RefMut`."##,
+ description: r##"Checks for calls to await while holding a `RefCell` `Ref` or `RefMut`."##,
},
Lint {
label: "clippy::bad_bit_mask",
@@ -4659,14 +9985,18 @@ The formula for detecting if an expression of the type `_ <bit_op> m
{`!=`, `>=`, `>`, `!=`, `>=`, `>`}) can be determined from the following
table:
-|Comparison |Bit Op|Example |is always|Formula |
-|------------|------|------------|---------|----------------------|
-|`==` or `!=`| `&` |`x & 2 == 3`|`false` |`c & m != c` |
-|`<` or `>=`| `&` |`x & 2 < 3` |`true` |`m < c` |
-|`>` or `<=`| `&` |`x & 1 > 1` |`false` |`m <= c` |
-|`==` or `!=`| `|` |`x | 1 == 0`|`false` |`c | m != c` |
-|`<` or `>=`| `|` |`x | 1 < 1` |`false` |`m >= c` |
-|`<=` or `>` | `|` |`x | 1 > 0` |`true` |`m > c` |"##,
+|Comparison |Bit Op|Example |is always|Formula |
+|------------|------|-------------|---------|----------------------|
+|`==` or `!=`| `&` |`x & 2 == 3` |`false` |`c & m != c` |
+|`<` or `>=`| `&` |`x & 2 < 3` |`true` |`m < c` |
+|`>` or `<=`| `&` |`x & 1 > 1` |`false` |`m <= c` |
+|`==` or `!=`| `\\|` |`x \\| 1 == 0`|`false` |`c \\| m != c` |
+|`<` or `>=`| `\\|` |`x \\| 1 < 1` |`false` |`m >= c` |
+|`<=` or `>` | `\\|` |`x \\| 1 > 0` |`true` |`m > c` |"##,
+ },
+ Lint {
+ label: "clippy::big_endian_bytes",
+ description: r##"Checks for the usage of the `to_be_bytes` method and/or the function `from_be_bytes`."##,
},
Lint {
label: "clippy::bind_instead_of_map",
@@ -4674,11 +10004,6 @@ table:
`_.or_else(|x| Err(y))`."##,
},
Lint {
- label: "clippy::blacklisted_name",
- description: r##"Checks for usage of blacklisted names for variables, such
-as `foo`."##,
- },
- Lint {
label: "clippy::blanket_clippy_restriction_lints",
description: r##"Checks for `warn`/`deny`/`forbid` attributes targeting the whole clippy::restriction category."##,
},
@@ -4698,21 +10023,38 @@ expression, statements or conditions that use closures with blocks."##,
suggest using the variable directly."##,
},
Lint {
+ label: "clippy::bool_to_int_with_if",
+ description: r##"Instead of using an if statement to convert a bool to an int,
+this lint suggests using a `from()` function or an `as` coercion."##,
+ },
+ Lint {
+ label: "clippy::borrow_as_ptr",
+ description: r##"Checks for the usage of `&expr as *const T` or
+`&mut expr as *mut T`, and suggest using `ptr::addr_of` or
+`ptr::addr_of_mut` instead."##,
+ },
+ Lint { label: "clippy::borrow_deref_ref", description: r##"Checks for `&*(&T)`."## },
+ Lint {
label: "clippy::borrow_interior_mutable_const",
description: r##"Checks if `const` items which is interior mutable (e.g.,
contains a `Cell`, `Mutex`, `AtomicXxxx`, etc.) has been borrowed directly."##,
},
Lint {
label: "clippy::borrowed_box",
- description: r##"Checks for use of `&Box<T>` anywhere in the code.
+ description: r##"Checks for usage of `&Box<T>` anywhere in the code.
Check the [Box documentation](https://doc.rust-lang.org/std/boxed/index.html) for more information."##,
},
Lint {
label: "clippy::box_collection",
- description: r##"Checks for use of `Box<T>` where T is a collection such as Vec anywhere in the code.
+ description: r##"Checks for usage of `Box<T>` where T is a collection such as Vec anywhere in the code.
Check the [Box documentation](https://doc.rust-lang.org/std/boxed/index.html) for more information."##,
},
Lint {
+ label: "clippy::box_default",
+ description: r##"checks for `Box::new(T::default())`, which is better written as
+`Box::<T>::default()`."##,
+ },
+ Lint {
label: "clippy::boxed_local",
description: r##"Checks for usage of `Box<T>` where an unboxed `T` would
work fine."##,
@@ -4727,6 +10069,11 @@ moved out of the blocks."##,
description: r##"Warns if a generic shadows a built-in type."##,
},
Lint {
+ label: "clippy::bytes_count_to_len",
+ description: r##"It checks for `str::bytes().count()` and suggests replacing it with
+`str::len()`."##,
+ },
+ Lint {
label: "clippy::bytes_nth",
description: r##"Checks for the use of `.bytes().nth()`."##,
},
@@ -4741,22 +10088,41 @@ moved out of the blocks."##,
and suggests to use a case-insensitive approach instead."##,
},
Lint {
+ label: "clippy::cast_abs_to_unsigned",
+ description: r##"Checks for usage of the `abs()` method that cast the result to unsigned."##,
+ },
+ Lint {
+ label: "clippy::cast_enum_constructor",
+ description: r##"Checks for casts from an enum tuple constructor to an integer."##,
+ },
+ Lint {
+ label: "clippy::cast_enum_truncation",
+ description: r##"Checks for casts from an enum type to an integral type which will definitely truncate the
+value."##,
+ },
+ Lint {
label: "clippy::cast_lossless",
description: r##"Checks for casts between numerical types that may
be replaced by safe conversion functions."##,
},
Lint {
+ label: "clippy::cast_nan_to_int",
+ description: r##"Checks for a known NaN float being cast to an integer"##,
+ },
+ Lint {
label: "clippy::cast_possible_truncation",
description: r##"Checks for casts between numerical types that may
truncate large values. This is expected behavior, so the cast is `Allow` by
-default."##,
+default. It suggests user either explicitly ignore the lint,
+or use `try_from()` and handle the truncation, default, or panic explicitly."##,
},
Lint {
label: "clippy::cast_possible_wrap",
description: r##"Checks for casts from an unsigned type to a signed type of
-the same size. Performing such a cast is a 'no-op' for the compiler,
-i.e., nothing is changed at the bit level, and the binary representation of
-the value is reinterpreted. This can cause wrapping if the value is too big
+the same size, or possibly smaller due to target dependent integers.
+Performing such a cast is a 'no-op' for the compiler, i.e., nothing is
+changed at the bit level, and the binary representation of the value is
+reinterpreted. This can cause wrapping if the value is too big
for the target signed type. However, the cast works as defined, so this lint
is `Allow` by default."##,
},
@@ -4776,10 +10142,6 @@ or any 64-bit integer to `f64`."##,
from a less-strictly-aligned pointer to a more-strictly-aligned pointer"##,
},
Lint {
- label: "clippy::cast_ref_to_mut",
- description: r##"Checks for casts of `&T` to `&mut T` anywhere in the code."##,
- },
- Lint {
label: "clippy::cast_sign_loss",
description: r##"Checks for casts from a signed to an unsigned numerical
type. In this case, negative values wrap around to large positive values,
@@ -4787,6 +10149,14 @@ which can be quite surprising in practice. However, as the cast works as
defined, this lint is `Allow` by default."##,
},
Lint {
+ label: "clippy::cast_slice_different_sizes",
+ description: r##"Checks for `as` casts between raw pointers to slices with differently sized elements."##,
+ },
+ Lint {
+ label: "clippy::cast_slice_from_raw_parts",
+ description: r##"Checks for a raw slice being cast to a slice pointer"##,
+ },
+ Lint {
label: "clippy::char_lit_as_u8",
description: r##"Checks for expressions where a character literal is cast
to `u8` and suggests using a byte literal instead."##,
@@ -4806,8 +10176,8 @@ if it starts with a given char."##,
description: r##"Checks for explicit bounds checking when casting."##,
},
Lint {
- label: "clippy::clone_double_ref",
- description: r##"Checks for usage of `.clone()` on an `&&T`."##,
+ label: "clippy::clear_with_drain",
+ description: r##"Checks for usage of `.drain(..)` for the sole purpose of clearing a container."##,
},
Lint {
label: "clippy::clone_on_copy",
@@ -4821,10 +10191,9 @@ function syntax instead (e.g., `Rc::clone(foo)`)."##,
},
Lint {
label: "clippy::cloned_instead_of_copied",
- description: r##"Checks for usages of `cloned()` on an `Iterator` or `Option` where
+ description: r##"Checks for usage of `cloned()` on an `Iterator` or `Option` where
`copied()` could be used instead."##,
},
- Lint { label: "clippy::cmp_nan", description: r##"Checks for comparisons to NaN."## },
Lint {
label: "clippy::cmp_null",
description: r##"This lint checks for equality comparisons with `ptr::null`"##,
@@ -4857,6 +10226,15 @@ Note that this lint is not intended to find _all_ cases where nested match patte
cases where merging would most likely make the code more readable."##,
},
Lint {
+ label: "clippy::collapsible_str_replace",
+ description: r##"Checks for consecutive calls to `str::replace` (2 or more)
+that can be collapsed into a single call."##,
+ },
+ Lint {
+ label: "clippy::collection_is_never_read",
+ description: r##"Checks for collections that are never queried."##,
+ },
+ Lint {
label: "clippy::comparison_chain",
description: r##"Checks comparison chains written with `if` that can be
rewritten with `match` and `cmp`."##,
@@ -4872,6 +10250,10 @@ and suggests using `.is_empty()` where applicable."##,
`Iterator`."##,
},
Lint {
+ label: "clippy::crate_in_macro_def",
+ description: r##"Checks for usage of `crate` as opposed to `$crate` in a macro definition."##,
+ },
+ Lint {
label: "clippy::create_dir",
description: r##"Checks usage of `std::fs::create_dir` and suggest using `std::fs::create_dir_all` instead."##,
},
@@ -4879,7 +10261,10 @@ and suggests using `.is_empty()` where applicable."##,
label: "clippy::crosspointer_transmute",
description: r##"Checks for transmutes between a type `T` and `*T`."##,
},
- Lint { label: "clippy::dbg_macro", description: r##"Checks for usage of dbg!() macro."## },
+ Lint {
+ label: "clippy::dbg_macro",
+ description: r##"Checks for usage of the [`dbg!`](https://doc.rust-lang.org/std/macro.dbg.html) macro."##,
+ },
Lint {
label: "clippy::debug_assert_with_mut_call",
description: r##"Checks for function/method calls with a mutable
@@ -4895,6 +10280,15 @@ parameter in `debug_assert!`, `debug_assert_eq!` and `debug_assert_ne!` macros."
mutable (e.g., contains a `Cell`, `Mutex`, `AtomicXxxx`, etc.)."##,
},
Lint {
+ label: "clippy::default_constructed_unit_structs",
+ description: r##"Checks for construction on unit struct using `default`."##,
+ },
+ Lint {
+ label: "clippy::default_instead_of_iter_empty",
+ description: r##"It checks for `std::iter::Empty::default()` and suggests replacing it with
+`std::iter::empty()`."##,
+ },
+ Lint {
label: "clippy::default_numeric_fallback",
description: r##"Checks for usage of unconstrained numeric literals which may cause default numeric fallback in type
inference.
@@ -4910,6 +10304,10 @@ See [RFC0212](https://github.com/rust-lang/rfcs/blob/master/text/0212-restore-in
description: r##"Checks for literal calls to `Default::default()`."##,
},
Lint {
+ label: "clippy::default_union_representation",
+ description: r##"Displays a warning when a union is declared with the default representation (without a `#[repr(C)]` attribute)."##,
+ },
+ Lint {
label: "clippy::deprecated_cfg_attr",
description: r##"Checks for `#[cfg_attr(rustfmt, rustfmt_skip)]` and suggests to replace it
with `#[rustfmt::skip]`."##,
@@ -4924,22 +10322,46 @@ field that is not a valid semantic version."##,
description: r##"Checks for usage of `*&` and `*&mut` in expressions."##,
},
Lint {
+ label: "clippy::deref_by_slicing",
+ description: r##"Checks for slicing expressions which are equivalent to dereferencing the
+value."##,
+ },
+ Lint {
label: "clippy::derivable_impls",
description: r##"Detects manual `std::default::Default` implementations that are identical to a derived implementation."##,
},
Lint {
- label: "clippy::derive_hash_xor_eq",
- description: r##"Checks for deriving `Hash` but implementing `PartialEq`
-explicitly or vice versa."##,
+ label: "clippy::derive_ord_xor_partial_ord",
+ description: r##"Lints against manual `PartialOrd` and `Ord` implementations for types with a derived `Ord`
+or `PartialOrd` implementation."##,
},
Lint {
- label: "clippy::derive_ord_xor_partial_ord",
- description: r##"Checks for deriving `Ord` but implementing `PartialOrd`
-explicitly or vice versa."##,
+ label: "clippy::derive_partial_eq_without_eq",
+ description: r##"Checks for types that derive `PartialEq` and could implement `Eq`."##,
+ },
+ Lint {
+ label: "clippy::derived_hash_with_manual_eq",
+ description: r##"Lints against manual `PartialEq` implementations for types with a derived `Hash`
+implementation."##,
+ },
+ Lint {
+ label: "clippy::disallowed_macros",
+ description: r##"Denies the configured macros in clippy.toml
+
+Note: Even though this lint is warn-by-default, it will only trigger if
+macros are defined in the clippy.toml file."##,
},
Lint {
label: "clippy::disallowed_methods",
- description: r##"Denies the configured methods and functions in clippy.toml"##,
+ description: r##"Denies the configured methods and functions in clippy.toml
+
+Note: Even though this lint is warn-by-default, it will only trigger if
+methods are defined in the clippy.toml file."##,
+ },
+ Lint {
+ label: "clippy::disallowed_names",
+ description: r##"Checks for usage of disallowed names for variables, such
+as `foo`."##,
},
Lint {
label: "clippy::disallowed_script_idents",
@@ -4958,7 +10380,10 @@ See also: [`non_ascii_idents`].
},
Lint {
label: "clippy::disallowed_types",
- description: r##"Denies the configured types in clippy.toml."##,
+ description: r##"Denies the configured types in clippy.toml.
+
+Note: Even though this lint is warn-by-default, it will only trigger if
+types are defined in the clippy.toml file."##,
},
Lint {
label: "clippy::diverging_sub_expression",
@@ -4966,6 +10391,11 @@ See also: [`non_ascii_idents`].
statements."##,
},
Lint {
+ label: "clippy::doc_link_with_quotes",
+ description: r##"Detects the syntax `['foo']` in documentation comments (notice quotes instead of backticks)
+outside of code blocks"##,
+ },
+ Lint {
label: "clippy::doc_markdown",
description: r##"Checks for the presence of `_`, `::` or camel-case words
outside ticks in documentation."##,
@@ -4989,14 +10419,19 @@ marked as `#[must_use]`."##,
description: r##"Checks for unnecessary double parentheses."##,
},
Lint {
- label: "clippy::drop_copy",
- description: r##"Checks for calls to `std::mem::drop` with a value
-that derives the Copy trait"##,
+ label: "clippy::drain_collect",
+ description: r##"Checks for calls to `.drain()` that clear the collection, immediately followed by a call to `.collect()`.
+
+> Collection in this context refers to any type with a `drain` method:
+> `Vec`, `VecDeque`, `BinaryHeap`, `HashSet`,`HashMap`, `String`"##,
},
Lint {
- label: "clippy::drop_ref",
- description: r##"Checks for calls to `std::mem::drop` with a reference
-instead of an owned value."##,
+ label: "clippy::drop_non_drop",
+ description: r##"Checks for calls to `std::mem::drop` with a value that does not implement `Drop`."##,
+ },
+ Lint {
+ label: "clippy::duplicate_mod",
+ description: r##"Checks for files that are included as modules multiple times."##,
},
Lint {
label: "clippy::duplicate_underscore_argument",
@@ -5014,6 +10449,10 @@ from other `Duration` methods."##,
but without a final `else` branch."##,
},
Lint {
+ label: "clippy::empty_drop",
+ description: r##"Checks for empty `Drop` implementations."##,
+ },
+ Lint {
label: "clippy::empty_enum",
description: r##"Checks for `enum`s with no variants.
@@ -5022,11 +10461,19 @@ nightly-only experimental API. Therefore, this lint is only triggered
if the `never_type` is enabled."##,
},
Lint {
+ label: "clippy::empty_line_after_doc_comments",
+ description: r##"Checks for empty lines after documenation comments."##,
+ },
+ Lint {
label: "clippy::empty_line_after_outer_attr",
description: r##"Checks for empty lines after outer attributes"##,
},
Lint { label: "clippy::empty_loop", description: r##"Checks for empty `loop` expressions."## },
Lint {
+ label: "clippy::empty_structs_with_brackets",
+ description: r##"Finds structs without fields (a so-called empty struct) that are declared with brackets."##,
+ },
+ Lint {
label: "clippy::enum_clike_unportable_variant",
description: r##"Checks for C-like enumerations that are
`repr(isize/usize)` and have values that don't fit into an `i32`."##,
@@ -5052,10 +10499,18 @@ bitwise, difference and division binary operators (`==`, `>`, etc., `&&`,
description: r##"Checks for erasing operations, e.g., `x * 0`."##,
},
Lint {
- label: "clippy::eval_order_dependence",
- description: r##"Checks for a read and a write to the same variable where
-whether the read occurs before or after the write depends on the evaluation
-order of sub-expressions."##,
+ label: "clippy::err_expect",
+ description: r##"Checks for `.err().expect()` calls on the `Result` type."##,
+ },
+ Lint {
+ label: "clippy::error_impl_error",
+ description: r##"Checks for types named `Error` that implement `Error`."##,
+ },
+ Lint {
+ label: "clippy::excessive_nesting",
+ description: r##"Checks for blocks which are nested beyond a certain threshold.
+
+Note: Even though this lint is warn-by-default, it will only trigger if a maximum nesting level is defined in the clippy.toml file."##,
},
Lint {
label: "clippy::excessive_precision",
@@ -5072,8 +10527,7 @@ than that supported by the underlying type."##,
},
Lint {
label: "clippy::exit",
- description: r##"`exit()` terminates the program and doesn't provide a
-stack trace."##,
+ description: r##"Detects calls to the `exit()` function which terminates the program."##,
},
Lint {
label: "clippy::expect_fun_call",
@@ -5082,7 +10536,7 @@ etc., and suggests to use `unwrap_or_else` instead"##,
},
Lint {
label: "clippy::expect_used",
- description: r##"Checks for `.expect()` calls on `Option`s and `Result`s."##,
+ description: r##"Checks for `.expect()` or `.expect_err()` calls on `Result`s and `.expect()` call on `Option`s."##,
},
Lint {
label: "clippy::expl_impl_clone_on_copy",
@@ -5090,6 +10544,10 @@ etc., and suggests to use `unwrap_or_else` instead"##,
types."##,
},
Lint {
+ label: "clippy::explicit_auto_deref",
+ description: r##"Checks for dereferencing expressions which would be covered by auto-deref."##,
+ },
+ Lint {
label: "clippy::explicit_counter_loop",
description: r##"Checks `for` loops over slices with an explicit counter
and suggests the use of `.enumerate()`."##,
@@ -5127,6 +10585,10 @@ replaced with `(e)print!()` / `(e)println!()`"##,
anywhere else."##,
},
Lint {
+ label: "clippy::extra_unused_type_parameters",
+ description: r##"Checks for type parameters in generics that are never used anywhere else."##,
+ },
+ Lint {
label: "clippy::fallible_impl_from",
description: r##"Checks for impls of `From<..>` that contain `panic!()` or `unwrap()`"##,
},
@@ -5144,6 +10606,10 @@ with Default::default()."##,
description: r##"Nothing. This lint has been deprecated."##,
},
Lint {
+ label: "clippy::filter_map_bool_then",
+ description: r##"Checks for usage of `bool::then` in `Iterator::filter_map`."##,
+ },
+ Lint {
label: "clippy::filter_map_identity",
description: r##"Checks for usage of `filter_map(|x| x)`."##,
},
@@ -5162,7 +10628,7 @@ with Default::default()."##,
},
Lint {
label: "clippy::flat_map_option",
- description: r##"Checks for usages of `Iterator::flat_map()` where `filter_map()` could be
+ description: r##"Checks for usage of `Iterator::flat_map()` where `filter_map()` could be
used instead."##,
},
Lint { label: "clippy::float_arithmetic", description: r##"Checks for float arithmetic."## },
@@ -5211,18 +10677,12 @@ store address."##,
ignoring either the keys or values."##,
},
Lint {
- label: "clippy::for_loops_over_fallibles",
- description: r##"Checks for `for` loops over `Option` or `Result` values."##,
+ label: "clippy::forget_non_drop",
+ description: r##"Checks for calls to `std::mem::forget` with a value that does not implement `Drop`."##,
},
Lint {
- label: "clippy::forget_copy",
- description: r##"Checks for calls to `std::mem::forget` with a value that
-derives the Copy trait"##,
- },
- Lint {
- label: "clippy::forget_ref",
- description: r##"Checks for calls to `std::mem::forget` with a reference
-instead of an owned value."##,
+ label: "clippy::format_collect",
+ description: r##"Checks for usage of `.map(|_| format!(..)).collect::<String>()`."##,
},
Lint {
label: "clippy::format_in_format_args",
@@ -5231,6 +10691,15 @@ formatting such as `format!` itself, `write!` or `println!`. Suggests
inlining the `format!` call."##,
},
Lint {
+ label: "clippy::format_push_string",
+ description: r##"Detects cases where the result of a `format!` call is
+appended to an existing `String`."##,
+ },
+ Lint {
+ label: "clippy::four_forward_slashes",
+ description: r##"Checks for outer doc comments written with 4 forward slashes (`////`)."##,
+ },
+ Lint {
label: "clippy::from_iter_instead_of_collect",
description: r##"Checks for `from_iter()` function calls on types that implement the `FromIterator`
trait."##,
@@ -5240,6 +10709,10 @@ trait."##,
description: r##"Searches for implementations of the `Into<..>` trait and suggests to implement `From<..>` instead."##,
},
Lint {
+ label: "clippy::from_raw_with_void_ptr",
+ description: r##"Checks if we're passing a `c_void` raw pointer to `{Box,Rc,Arc,Weak}::from_raw(_)`"##,
+ },
+ Lint {
label: "clippy::from_str_radix_10",
description: r##"Checks for function invocations of the form `primitive::from_str_radix(s, 10)`"##,
},
@@ -5251,16 +10724,25 @@ used by library authors (public and internal) that target an audience where
multithreaded executors are likely to be used for running these Futures."##,
},
Lint {
+ label: "clippy::get_first",
+ description: r##"Checks for usage of `x.get(0)` instead of
+`x.first()`."##,
+ },
+ Lint {
label: "clippy::get_last_with_len",
- description: r##"Checks for using `x.get(x.len() - 1)` instead of
+ description: r##"Checks for usage of `x.get(x.len() - 1)` instead of
`x.last()`."##,
},
Lint {
label: "clippy::get_unwrap",
- description: r##"Checks for use of `.get().unwrap()` (or
+ description: r##"Checks for usage of `.get().unwrap()` (or
`.get_mut().unwrap`) on a standard library type which implements `Index`"##,
},
Lint {
+ label: "clippy::host_endian_bytes",
+ description: r##"Checks for the usage of the `to_ne_bytes` method and/or the function `from_ne_bytes`."##,
+ },
+ Lint {
label: "clippy::identity_op",
description: r##"Checks for identity operations, e.g., `x + 0`."##,
},
@@ -5285,13 +10767,21 @@ and the *else* part."##,
},
Lint {
label: "clippy::if_then_some_else_none",
- description: r##"Checks for if-else that could be written to `bool::then`."##,
+ description: r##"Checks for if-else that could be written using either `bool::then` or `bool::then_some`."##,
},
Lint {
label: "clippy::ifs_same_cond",
description: r##"Checks for consecutive `if`s with the same condition."##,
},
Lint {
+ label: "clippy::ignored_unit_patterns",
+ description: r##"Checks for usage of `_` in patterns of type `()`."##,
+ },
+ Lint {
+ label: "clippy::impl_trait_in_params",
+ description: r##"Lints when `impl Trait` is being used in a function's parameters."##,
+ },
+ Lint {
label: "clippy::implicit_clone",
description: r##"Checks for the usage of `_.to_owned()`, `vec.to_vec()`, or similar when calling `_.clone()` would be clearer."##,
},
@@ -5306,10 +10796,25 @@ algorithm (`SipHash`)."##,
description: r##"Checks for missing return statements at the end of a block."##,
},
Lint {
+ label: "clippy::implicit_saturating_add",
+ description: r##"Checks for implicit saturating addition."##,
+ },
+ Lint {
label: "clippy::implicit_saturating_sub",
description: r##"Checks for implicit saturating subtraction."##,
},
Lint {
+ label: "clippy::implied_bounds_in_impls",
+ description: r##"Looks for bounds in `impl Trait` in return position that are implied by other bounds.
+This can happen when a trait is specified that another trait already has as a supertrait
+(e.g. `fn() -> impl Deref + DerefMut<Target = i32>` has an unnecessary `Deref` bound,
+because `Deref` is a supertrait of `DerefMut`)"##,
+ },
+ Lint {
+ label: "clippy::impossible_comparisons",
+ description: r##"Checks for double comparisons that can never succeed"##,
+ },
+ Lint {
label: "clippy::imprecise_flops",
description: r##"Looks for floating-point expressions that
can be expressed using built-in methods to improve accuracy
@@ -5343,10 +10848,10 @@ lint on constant `usize` indexing on arrays because that is handled by rustc's `
without changing the outcome. The basic structure can be seen in the
following table:
-|Comparison| Bit Op |Example |equals |
-|----------|---------|-----------|-------|
-|`>` / `<=`|`|` / `^`|`x | 2 > 3`|`x > 3`|
-|`<` / `>=`|`|` / `^`|`x ^ 1 < 4`|`x < 4`|"##,
+|Comparison| Bit Op |Example |equals |
+|----------|----------|------------|-------|
+|`>` / `<=`|`\\|` / `^`|`x \\| 2 > 3`|`x > 3`|
+|`<` / `>=`|`\\|` / `^`|`x ^ 1 < 4` |`x < 4`|"##,
},
Lint {
label: "clippy::inefficient_to_string",
@@ -5371,6 +10876,12 @@ or tuple struct where a `let` will suffice."##,
description: r##"Checks for the definition of inherent methods with a signature of `to_string(&self) -> String` and if the type implementing this method also implements the `Display` trait."##,
},
Lint {
+ label: "clippy::init_numbered_fields",
+ description: r##"Checks for tuple structs initialized with field syntax.
+It will however not lint if a base initializer is present.
+The lint will also ignore code in macros."##,
+ },
+ Lint {
label: "clippy::inline_always",
description: r##"Checks for items annotated with `#[inline(always)]`,
unless the annotated function is empty or simply panics."##,
@@ -5395,16 +10906,6 @@ unless the annotated function is empty or simply panics."##,
label: "clippy::int_plus_one",
description: r##"Checks for usage of `x >= y + 1` or `x - 1 >= y` (and `<=`) in a block"##,
},
- Lint {
- label: "clippy::integer_arithmetic",
- description: r##"Checks for integer arithmetic operations which could overflow or panic.
-
-Specifically, checks for any operators (`+`, `-`, `*`, `<<`, etc) which are capable
-of overflowing according to the [Rust
-Reference](https://doc.rust-lang.org/reference/expressions/operator-expr.html#overflow),
-or which can panic (`/`, `%`). No bounds analysis or sophisticated reasoning is
-attempted."##,
- },
Lint { label: "clippy::integer_division", description: r##"Checks for division of integers"## },
Lint {
label: "clippy::into_iter_on_ref",
@@ -5412,6 +10913,12 @@ attempted."##,
or `iter_mut`."##,
},
Lint {
+ label: "clippy::into_iter_without_iter",
+ description: r##"This is the opposite of the `iter_without_into_iter` lint.
+It looks for `IntoIterator for (&|&mut) Type` implementations without an inherent `iter` or `iter_mut` method
+on the type or on any of the types in its `Deref` chain."##,
+ },
+ Lint {
label: "clippy::invalid_null_ptr_usage",
description: r##"This lint checks for invalid usages of `ptr::null`."##,
},
@@ -5432,10 +10939,20 @@ necessary. Only integer types are checked."##,
description: r##"Checks for invisible Unicode characters in the code."##,
},
Lint {
+ label: "clippy::is_digit_ascii_radix",
+ description: r##"Finds usages of [`char::is_digit`](https://doc.rust-lang.org/stable/std/primitive.char.html#method.is_digit) that
+can be replaced with [`is_ascii_digit`](https://doc.rust-lang.org/stable/std/primitive.char.html#method.is_ascii_digit) or
+[`is_ascii_hexdigit`](https://doc.rust-lang.org/stable/std/primitive.char.html#method.is_ascii_hexdigit)."##,
+ },
+ Lint {
label: "clippy::items_after_statements",
description: r##"Checks for items declared after some statement in a block."##,
},
Lint {
+ label: "clippy::items_after_test_module",
+ description: r##"Triggers if an item is declared after the testing module marked with `#[cfg(test)]`."##,
+ },
+ Lint {
label: "clippy::iter_cloned_collect",
description: r##"Checks for the use of `.cloned().collect()` on slice to
create a `Vec`."##,
@@ -5444,6 +10961,11 @@ create a `Vec`."##,
label: "clippy::iter_count",
description: r##"Checks for the use of `.iter().count()`."##,
},
+ Lint {
+ label: "clippy::iter_kv_map",
+ description: r##"Checks for iterating a map (`HashMap` or `BTreeMap`) and
+ignoring either the keys or values."##,
+ },
Lint { label: "clippy::iter_next_loop", description: r##"Checks for loops on `x.next()`."## },
Lint {
label: "clippy::iter_next_slice",
@@ -5455,7 +10977,7 @@ create a `Vec`."##,
},
Lint {
label: "clippy::iter_nth",
- description: r##"Checks for use of `.iter().nth()` (and the related
+ description: r##"Checks for usage of `.iter().nth()` (and the related
`.iter_mut().nth()`) on standard library types with *O*(1) element access."##,
},
Lint {
@@ -5463,8 +10985,37 @@ create a `Vec`."##,
description: r##"Checks for the use of `iter.nth(0)`."##,
},
Lint {
+ label: "clippy::iter_on_empty_collections",
+ description: r##"Checks for calls to `iter`, `iter_mut` or `into_iter` on empty collections"##,
+ },
+ Lint {
+ label: "clippy::iter_on_single_items",
+ description: r##"Checks for calls to `iter`, `iter_mut` or `into_iter` on collections containing a single item"##,
+ },
+ Lint {
+ label: "clippy::iter_out_of_bounds",
+ description: r##"Looks for iterator combinator calls such as `.take(x)` or `.skip(x)`
+where `x` is greater than the amount of items that an iterator will produce."##,
+ },
+ Lint {
+ label: "clippy::iter_overeager_cloned",
+ description: r##"Checks for usage of `_.cloned().<func>()` where call to `.cloned()` can be postponed."##,
+ },
+ Lint {
label: "clippy::iter_skip_next",
- description: r##"Checks for use of `.skip(x).next()` on iterators."##,
+ description: r##"Checks for usage of `.skip(x).next()` on iterators."##,
+ },
+ Lint {
+ label: "clippy::iter_skip_zero",
+ description: r##"Checks for usage of `.skip(0)` on iterators."##,
+ },
+ Lint {
+ label: "clippy::iter_with_drain",
+ description: r##"Checks for usage of `.drain(..)` on `Vec` and `VecDeque` for iteration."##,
+ },
+ Lint {
+ label: "clippy::iter_without_into_iter",
+ description: r##"Looks for `iter` and `iter_mut` methods without an associated `IntoIterator for (&|&mut) Type` implementation."##,
},
Lint {
label: "clippy::iterator_step_by_zero",
@@ -5492,10 +11043,29 @@ are too large."##,
`enum`s."##,
},
Lint {
+ label: "clippy::large_futures",
+ description: r##"It checks for the size of a `Future` created by `async fn` or `async {}`."##,
+ },
+ Lint {
+ label: "clippy::large_include_file",
+ description: r##"Checks for the inclusion of large files via `include_bytes!()`
+and `include_str!()`"##,
+ },
+ Lint {
label: "clippy::large_stack_arrays",
description: r##"Checks for local arrays that may be too large."##,
},
Lint {
+ label: "clippy::large_stack_frames",
+ description: r##"Checks for functions that use a lot of stack space.
+
+This often happens when constructing a large type, such as an array with a lot of elements,
+or constructing *many* smaller-but-still-large structs, or copying around a lot of large types.
+
+This lint is a more general version of [`large_stack_arrays`](https://rust-lang.github.io/rust-clippy/master/#large_stack_arrays)
+that is intended to look at functions as a whole instead of only individual array expressions inside of a function."##,
+ },
+ Lint {
label: "clippy::large_types_passed_by_value",
description: r##"Checks for functions taking arguments by value, where
the argument type is `Copy` and large enough to be worth considering
@@ -5519,29 +11089,42 @@ just to compare to zero, and suggests using `.is_empty()` where applicable."##,
returned."##,
},
Lint {
- label: "clippy::let_underscore_drop",
- description: r##"Checks for `let _ = <expr>`
-where expr has a type that implements `Drop`"##,
+ label: "clippy::let_underscore_future",
+ description: r##"Checks for `let _ = <expr>` where the resulting type of expr implements `Future`"##,
},
Lint {
label: "clippy::let_underscore_lock",
- description: r##"Checks for `let _ = sync_lock`.
-This supports `mutex` and `rwlock` in `std::sync` and `parking_lot`."##,
+ description: r##"Checks for `let _ = sync_lock`. This supports `mutex` and `rwlock` in
+`parking_lot`. For `std` locks see the `rustc` lint
+[`let_underscore_lock`](https://doc.rust-lang.org/nightly/rustc/lints/listing/deny-by-default.html#let-underscore-lock)"##,
},
Lint {
label: "clippy::let_underscore_must_use",
description: r##"Checks for `let _ = <expr>` where expr is `#[must_use]`"##,
},
+ Lint {
+ label: "clippy::let_underscore_untyped",
+ description: r##"Checks for `let _ = <expr>` without a type annotation, and suggests to either provide one,
+or remove the `let` keyword altogether."##,
+ },
Lint { label: "clippy::let_unit_value", description: r##"Checks for binding a unit value."## },
Lint {
+ label: "clippy::let_with_type_underscore",
+ description: r##"Detects when a variable is declared with an explicit type of `_`."##,
+ },
+ Lint {
+ label: "clippy::lines_filter_map_ok",
+ description: r##"Checks for usage of `lines.filter_map(Result::ok)` or `lines.flat_map(Result::ok)`
+when `lines` has type `std::io::Lines`."##,
+ },
+ Lint {
label: "clippy::linkedlist",
description: r##"Checks for usage of any `LinkedList`, suggesting to use a
`Vec` or a `VecDeque` (formerly called `RingBuf`)."##,
},
Lint {
- label: "clippy::logic_bug",
- description: r##"Checks for boolean expressions that contain terminals that
-can be eliminated."##,
+ label: "clippy::little_endian_bytes",
+ description: r##"Checks for the usage of the `to_le_bytes` method and/or the function `from_le_bytes`."##,
},
Lint {
label: "clippy::lossy_float_literal",
@@ -5565,23 +11148,74 @@ cannot be represented as the underlying type without loss."##,
description: r##"It checks for manual implementations of `async` functions."##,
},
Lint {
+ label: "clippy::manual_bits",
+ description: r##"Checks for usage of `std::mem::size_of::<T>() * 8` when
+`T::BITS` is available."##,
+ },
+ Lint {
+ label: "clippy::manual_clamp",
+ description: r##"Identifies good opportunities for a clamp function from std or core, and suggests using it."##,
+ },
+ Lint {
+ label: "clippy::manual_filter",
+ description: r##"Checks for usage of `match` which could be implemented using `filter`"##,
+ },
+ Lint {
label: "clippy::manual_filter_map",
description: r##"Checks for usage of `_.filter(_).map(_)` that can be written more simply
as `filter_map(_)`."##,
},
Lint {
+ label: "clippy::manual_find",
+ description: r##"Checks for manual implementations of Iterator::find"##,
+ },
+ Lint {
label: "clippy::manual_find_map",
description: r##"Checks for usage of `_.find(_).map(_)` that can be written more simply
as `find_map(_)`."##,
},
Lint {
label: "clippy::manual_flatten",
- description: r##"Check for unnecessary `if let` usage in a for loop
+ description: r##"Checks for unnecessary `if let` usage in a for loop
where only the `Some` or `Ok` variant of the iterator element is used."##,
},
Lint {
+ label: "clippy::manual_hash_one",
+ description: r##"Checks for cases where [`BuildHasher::hash_one`] can be used.
+
+[`BuildHasher::hash_one`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html#method.hash_one"##,
+ },
+ Lint {
+ label: "clippy::manual_instant_elapsed",
+ description: r##"Lints subtraction between `Instant::now()` and another `Instant`."##,
+ },
+ Lint {
+ label: "clippy::manual_is_ascii_check",
+ description: r##"Suggests to use dedicated built-in methods,
+`is_ascii_(lowercase|uppercase|digit)` for checking on corresponding ascii range"##,
+ },
+ Lint {
+ label: "clippy::manual_is_finite",
+ description: r##"Checks for manual `is_finite` reimplementations
+(i.e., `x != <float>::INFINITY && x != <float>::NEG_INFINITY`)."##,
+ },
+ Lint {
+ label: "clippy::manual_is_infinite",
+ description: r##"Checks for manual `is_infinite` reimplementations
+(i.e., `x == <float>::INFINITY || x == <float>::NEG_INFINITY`)."##,
+ },
+ Lint {
+ label: "clippy::manual_let_else",
+ description: r##"Warn of cases where `let...else` could be used"##,
+ },
+ Lint {
+ label: "clippy::manual_main_separator_str",
+ description: r##"Checks for references on `std::path::MAIN_SEPARATOR.to_string()` used
+to build a `&str`."##,
+ },
+ Lint {
label: "clippy::manual_map",
- description: r##"Checks for usages of `match` which could be implemented using `map`"##,
+ description: r##"Checks for usage of `match` which could be implemented using `map`"##,
},
Lint {
label: "clippy::manual_memcpy",
@@ -5589,6 +11223,10 @@ where only the `Some` or `Ok` variant of the iterator element is used."##,
slices that could be optimized by having a memcpy."##,
},
Lint {
+ label: "clippy::manual_next_back",
+ description: r##"Checks for `.rev().next()` on a `DoubleEndedIterator`"##,
+ },
+ Lint {
label: "clippy::manual_non_exhaustive",
description: r##"Checks for manual implementations of the non-exhaustive pattern."##,
},
@@ -5602,28 +11240,66 @@ slices that could be optimized by having a memcpy."##,
be more readably expressed as `(3..8).contains(x)`."##,
},
Lint {
+ label: "clippy::manual_range_patterns",
+ description: r##"Looks for combined OR patterns that are all contained in a specific range,
+e.g. `6 | 4 | 5 | 9 | 7 | 8` can be rewritten as `4..=9`."##,
+ },
+ Lint {
+ label: "clippy::manual_rem_euclid",
+ description: r##"Checks for an expression like `((x % 4) + 4) % 4` which is a common manual reimplementation
+of `x.rem_euclid(4)`."##,
+ },
+ Lint {
+ label: "clippy::manual_retain",
+ description: r##"Checks for code to be replaced by `.retain()`."##,
+ },
+ Lint {
label: "clippy::manual_saturating_arithmetic",
description: r##"Checks for `.checked_add/sub(x).unwrap_or(MAX/MIN)`."##,
},
Lint {
+ label: "clippy::manual_slice_size_calculation",
+ description: r##"When `a` is `&[T]`, detect `a.len() * size_of::<T>()` and suggest `size_of_val(a)`
+instead."##,
+ },
+ Lint {
label: "clippy::manual_split_once",
- description: r##"Checks for usages of `str::splitn(2, _)`"##,
+ description: r##"Checks for usage of `str::splitn(2, _)`"##,
},
Lint {
label: "clippy::manual_str_repeat",
description: r##"Checks for manual implementations of `str::repeat`"##,
},
Lint {
+ label: "clippy::manual_string_new",
+ description: r##"Checks for usage of `` to create a `String`, such as `.to_string()`, `.to_owned()`,
+`String::from()` and others."##,
+ },
+ Lint {
label: "clippy::manual_strip",
description: r##"Suggests using `strip_{prefix,suffix}` over `str::{starts,ends}_with` and slicing using
the pattern's length."##,
},
- Lint { label: "clippy::manual_swap", description: r##"Checks for manual swapping."## },
+ Lint {
+ label: "clippy::manual_swap",
+ description: r##"Checks for manual swapping.
+
+Note that the lint will not be emitted in const blocks, as the suggestion would not be applicable."##,
+ },
+ Lint {
+ label: "clippy::manual_try_fold",
+ description: r##"Checks for usage of `Iterator::fold` with a type that implements `Try`."##,
+ },
Lint {
label: "clippy::manual_unwrap_or",
description: r##"Finds patterns that reimplement `Option::unwrap_or` or `Result::unwrap_or`."##,
},
Lint {
+ label: "clippy::manual_while_let_some",
+ description: r##"Looks for loops that check for emptiness of a `Vec` in the condition and pop an element
+in the body as a separate operation."##,
+ },
+ Lint {
label: "clippy::many_single_char_names",
description: r##"Checks for too many variables whose name consists of a
single character."##,
@@ -5640,7 +11316,7 @@ and suggests `cloned()` or `copied()` instead"##,
},
Lint {
label: "clippy::map_entry",
- description: r##"Checks for uses of `contains_key` + `insert` on `HashMap`
+ description: r##"Checks for usage of `contains_key` + `insert` on `HashMap`
or `BTreeMap`."##,
},
Lint {
@@ -5695,7 +11371,10 @@ instead. It also checks for `if let &foo = bar` blocks."##,
},
Lint {
label: "clippy::match_same_arms",
- description: r##"Checks for `match` with identical arm bodies."##,
+ description: r##"Checks for `match` with identical arm bodies.
+
+Note: Does not lint on wildcards if the `non_exhaustive_omitted_patterns_lint` feature is
+enabled and disallowed."##,
},
Lint {
label: "clippy::match_single_binding",
@@ -5719,9 +11398,14 @@ and take drastic actions like `panic!`."##,
description: r##"Checks for iteration that may be infinite."##,
},
Lint {
+ label: "clippy::maybe_misused_cfg",
+ description: r##"Checks for `#[cfg(features = ...)]` and suggests to replace it with
+`#[cfg(feature = ...)]`."##,
+ },
+ Lint {
label: "clippy::mem_forget",
description: r##"Checks for usage of `std::mem::forget(t)` where `t` is
-`Drop`."##,
+`Drop` or has a field that implements `Drop`."##,
},
Lint {
label: "clippy::mem_replace_option_with_none",
@@ -5739,6 +11423,13 @@ and take drastic actions like `panic!`."##,
and `mem::replace(&mut _, mem::zeroed())`."##,
},
Lint {
+ label: "clippy::min_ident_chars",
+ description: r##"Checks for idents which comprise of a single letter.
+
+Note: This lint can be very noisy when enabled; it may be desirable to only enable it
+temporarily."##,
+ },
+ Lint {
label: "clippy::min_max",
description: r##"Checks for expressions where `std::cmp::min` and `max` are
used to clamp values, but switched so that the result is constant."##,
@@ -5752,17 +11443,37 @@ used to clamp values, but switched so that the result is constant."##,
description: r##"Checks for cfg attributes having operating systems used in target family position."##,
},
Lint {
+ label: "clippy::mismatching_type_param_order",
+ description: r##"Checks for type parameters which are positioned inconsistently between
+a type definition and impl block. Specifically, a parameter in an impl
+block which has the same name as a parameter in the type def, but is in
+a different place."##,
+ },
+ Lint {
+ label: "clippy::misnamed_getters",
+ description: r##"Checks for getter methods that return a field that doesn't correspond
+to the name of the method, when there is a field's whose name matches that of the method."##,
+ },
+ Lint {
label: "clippy::misrefactored_assign_op",
description: r##"Checks for `a op= a op b` or `a op= b op a` patterns."##,
},
Lint {
+ label: "clippy::missing_assert_message",
+ description: r##"Checks assertions without a custom panic message."##,
+ },
+ Lint {
+ label: "clippy::missing_asserts_for_indexing",
+ description: r##"Checks for repeated slice indexing without asserting beforehand that the length
+is greater than the largest index used to index into the slice."##,
+ },
+ Lint {
label: "clippy::missing_const_for_fn",
description: r##"Suggests the use of `const` in functions and methods where possible."##,
},
Lint {
label: "clippy::missing_docs_in_private_items",
- description: r##"Warns if there is missing doc for any documentable item
-(public or private)."##,
+ description: r##"Warns if there is missing doc for any private documentable item"##,
},
Lint {
label: "clippy::missing_enforced_import_renames",
@@ -5775,6 +11486,10 @@ in the `enforce-import-renames` config option."##,
return a `Result` type and warns if there is no `# Errors` section."##,
},
Lint {
+ label: "clippy::missing_fields_in_debug",
+ description: r##"Checks for manual [`core::fmt::Debug`](https://doc.rust-lang.org/core/fmt/trait.Debug.html) implementations that do not use all fields."##,
+ },
+ Lint {
label: "clippy::missing_inline_in_public_items",
description: r##"It lints if an exported function, method, trait method with default impl,
or trait method impl is not `#[inline]`."##,
@@ -5789,6 +11504,17 @@ may panic and warns if there is no `# Panics` section."##,
description: r##"Checks for the doc comments of publicly visible
unsafe functions and warns if there is no `# Safety` section."##,
},
+ Lint { label: "clippy::missing_spin_loop", description: r##"Checks for empty spin loops"## },
+ Lint {
+ label: "clippy::missing_trait_methods",
+ description: r##"Checks if a provided method is used implicitly by a trait
+implementation. A usage example would be a wrapper where every method
+should perform some operation before delegating to the inner type's
+implementation.
+
+This lint should typically be enabled on a specific trait `impl` item
+rather than globally."##,
+ },
Lint {
label: "clippy::mistyped_literal_suffixes",
description: r##"Warns for mistyped suffix in literals"##,
@@ -5799,8 +11525,14 @@ unsafe functions and warns if there is no `# Safety` section."##,
digits."##,
},
Lint {
+ label: "clippy::mixed_read_write_in_expression",
+ description: r##"Checks for a read and a write to the same variable where
+whether the read occurs before or after the write depends on the evaluation
+order of sub-expressions."##,
+ },
+ Lint {
label: "clippy::mod_module_files",
- description: r##"Checks that module layout uses only self named module files, bans mod.rs files."##,
+ description: r##"Checks that module layout uses only self named module files, bans `mod.rs` files."##,
},
Lint {
label: "clippy::module_inception",
@@ -5818,6 +11550,7 @@ containing module's name."##,
description: r##"Checks for getting the remainder of a division by one or minus
one."##,
},
+ Lint { label: "clippy::multi_assignments", description: r##"Checks for nested assignments."## },
Lint {
label: "clippy::multiple_crate_versions",
description: r##"Checks to see if multiple versions of a crate are being
@@ -5828,6 +11561,10 @@ used."##,
description: r##"Checks for multiple inherent implementations of a struct"##,
},
Lint {
+ label: "clippy::multiple_unsafe_ops_per_block",
+ description: r##"Checks for `unsafe` blocks that contain more than one unsafe operation."##,
+ },
+ Lint {
label: "clippy::must_use_candidate",
description: r##"Checks for public functions that have no
`#[must_use]` attribute, but return something not already marked
@@ -5840,8 +11577,12 @@ unit-returning functions and methods."##,
},
Lint {
label: "clippy::mut_from_ref",
- description: r##"This lint checks for functions that take immutable
-references and return mutable ones."##,
+ description: r##"This lint checks for functions that take immutable references and return
+mutable ones. This will not trigger if no unsafe code exists as there
+are multiple safe functions which will do this transformation
+
+To be on the conservative side, if there's at least one mutable
+reference with the output lifetime, this lint will not trigger."##,
},
Lint {
label: "clippy::mut_mut",
@@ -5861,11 +11602,11 @@ references and return mutable ones."##,
},
Lint {
label: "clippy::mutex_atomic",
- description: r##"Checks for usages of `Mutex<X>` where an atomic will do."##,
+ description: r##"Checks for usage of `Mutex<X>` where an atomic will do."##,
},
Lint {
label: "clippy::mutex_integer",
- description: r##"Checks for usages of `Mutex<X>` where `X` is an integral
+ description: r##"Checks for usage of `Mutex<X>` where `X` is an integral
type."##,
},
Lint { label: "clippy::naive_bytecount", description: r##"Checks for naive byte counts"## },
@@ -5876,7 +11617,7 @@ specify the `Self`-type explicitly"##,
},
Lint {
label: "clippy::needless_bitwise_bool",
- description: r##"Checks for uses of bitwise and/or operators between booleans, where performance may be improved by using
+ description: r##"Checks for usage of bitwise and/or operators between booleans, where performance may be improved by using
a lazy and."##,
},
Lint {
@@ -5885,16 +11626,27 @@ a lazy and."##,
false }` (or vice versa) and suggests using the condition directly."##,
},
Lint {
+ label: "clippy::needless_bool_assign",
+ description: r##"Checks for expressions of the form `if c { x = true } else { x = false }`
+(or vice versa) and suggest assigning the variable directly from the
+condition."##,
+ },
+ Lint {
label: "clippy::needless_borrow",
description: r##"Checks for address of operations (`&`) that are going to
be dereferenced immediately by the compiler."##,
},
Lint {
label: "clippy::needless_borrowed_reference",
- description: r##"Checks for bindings that destructure a reference and borrow the inner
+ description: r##"Checks for bindings that needlessly destructure a reference and borrow the inner
value with `&ref`."##,
},
Lint {
+ label: "clippy::needless_borrows_for_generic_args",
+ description: r##"Checks for borrow operations (`&`) that used as a generic argument to a
+function when the borrowed value could be used."##,
+ },
+ Lint {
label: "clippy::needless_collect",
description: r##"Checks for functions collecting an iterator when collect
is not needed."##,
@@ -5910,12 +11662,17 @@ rearrangement of code can make the code easier to understand."##,
label: "clippy::needless_doctest_main",
description: r##"Checks for `fn main() { .. }` in doctests"##,
},
+ Lint { label: "clippy::needless_else", description: r##"Checks for empty `else` branches."## },
Lint {
label: "clippy::needless_for_each",
description: r##"Checks for usage of `for_each` that would be more simply written as a
`for` loop."##,
},
Lint {
+ label: "clippy::needless_if",
+ description: r##"Checks for empty `if` branches with no else branch."##,
+ },
+ Lint {
label: "clippy::needless_late_init",
description: r##"Checks for late initializations that can be replaced by a `let` statement
with an initializer."##,
@@ -5926,17 +11683,42 @@ with an initializer."##,
relying on lifetime elision."##,
},
Lint {
+ label: "clippy::needless_match",
+ description: r##"Checks for unnecessary `match` or match-like `if let` returns for `Option` and `Result`
+when function signatures are the same."##,
+ },
+ Lint {
label: "clippy::needless_option_as_deref",
- description: r##"Checks for no-op uses of Option::{as_deref,as_deref_mut},
+ description: r##"Checks for no-op uses of `Option::{as_deref, as_deref_mut}`,
for example, `Option<&T>::as_deref()` returns the same type."##,
},
Lint {
+ label: "clippy::needless_option_take",
+ description: r##"Checks for calling `take` function after `as_ref`."##,
+ },
+ Lint {
+ label: "clippy::needless_parens_on_range_literals",
+ description: r##"The lint checks for parenthesis on literals in range statements that are
+superfluous."##,
+ },
+ Lint {
+ label: "clippy::needless_pass_by_ref_mut",
+ description: r##"Check if a `&mut` function argument is actually used mutably.
+
+Be careful if the function is publicly reexported as it would break compatibility with
+users of this function."##,
+ },
+ Lint {
label: "clippy::needless_pass_by_value",
description: r##"Checks for functions taking arguments by value, but not
consuming them in its
body."##,
},
Lint {
+ label: "clippy::needless_pub_self",
+ description: r##"Checks for usage of `pub(self)` and `pub(in self)`."##,
+ },
+ Lint {
label: "clippy::needless_question_mark",
description: r##"Suggests alternatives for useless applications of `?` in terminating expressions"##,
},
@@ -5946,12 +11728,24 @@ body."##,
collection just to get the values by index."##,
},
Lint {
+ label: "clippy::needless_raw_string_hashes",
+ description: r##"Checks for raw string literals with an unnecessary amount of hashes around them."##,
+ },
+ Lint {
+ label: "clippy::needless_raw_strings",
+ description: r##"Checks for raw string literals where a string literal can be used instead."##,
+ },
+ Lint {
label: "clippy::needless_return",
description: r##"Checks for return statements at the end of a block."##,
},
Lint {
+ label: "clippy::needless_return_with_question_mark",
+ description: r##"Checks for return statements on `Err` paired with the `?` operator."##,
+ },
+ Lint {
label: "clippy::needless_splitn",
- description: r##"Checks for usages of `str::splitn` (or `str::rsplitn`) where using `str::split` would be the same."##,
+ description: r##"Checks for usage of `str::splitn` (or `str::rsplitn`) where using `str::split` would be the same."##,
},
Lint {
label: "clippy::needless_update",
@@ -5985,7 +11779,7 @@ This lint is not applied to structs marked with
},
Lint {
label: "clippy::new_without_default",
- description: r##"Checks for types with a `fn new() -> Self` method and no
+ description: r##"Checks for public types with a `pub fn new() -> Self` method and no
implementation of
[`Default`](https://doc.rust-lang.org/std/default/trait.Default.html)."##,
},
@@ -5994,14 +11788,34 @@ implementation of
description: r##"Checks for statements which have no effect."##,
},
Lint {
+ label: "clippy::no_effect_replace",
+ description: r##"Checks for `replace` statements which have no effect."##,
+ },
+ Lint {
label: "clippy::no_effect_underscore_binding",
description: r##"Checks for binding to underscore prefixed variable without side-effects."##,
},
Lint {
+ label: "clippy::no_mangle_with_rust_abi",
+ description: r##"Checks for Rust ABI functions with the `#[no_mangle]` attribute."##,
+ },
+ Lint {
label: "clippy::non_ascii_literal",
description: r##"Checks for non-ASCII characters in string and char literals."##,
},
Lint {
+ label: "clippy::non_canonical_clone_impl",
+ description: r##"Checks for non-canonical implementations of `Clone` when `Copy` is already implemented."##,
+ },
+ Lint {
+ label: "clippy::non_canonical_partial_ord_impl",
+ description: r##"Checks for non-canonical implementations of `PartialOrd` when `Ord` is already implemented."##,
+ },
+ Lint {
+ label: "clippy::non_minimal_cfg",
+ description: r##"Checks for `any` and `all` combinators in `cfg` with only one condition."##,
+ },
+ Lint {
label: "clippy::non_octal_unix_permissions",
description: r##"Checks for non-octal values used to set Unix file permissions."##,
},
@@ -6033,12 +11847,20 @@ that make no sense."##,
arguments but are not marked `unsafe`."##,
},
Lint {
+ label: "clippy::obfuscated_if_else",
+ description: r##"Checks for usage of `.then_some(..).unwrap_or(..)`"##,
+ },
+ Lint {
label: "clippy::octal_escapes",
description: r##"Checks for `\\0` escapes in string and byte literals that look like octal
character escapes in C."##,
},
Lint { label: "clippy::ok_expect", description: r##"Checks for usage of `ok().expect(..)`."## },
Lint {
+ label: "clippy::only_used_in_recursion",
+ description: r##"Checks for arguments that are only used in recursion with no side-effects."##,
+ },
+ Lint {
label: "clippy::op_ref",
description: r##"Checks for arguments to `==` which have their address
taken to satisfy a bound
@@ -6046,7 +11868,7 @@ and suggests to dereference the other argument instead"##,
},
Lint {
label: "clippy::option_as_ref_deref",
- description: r##"Checks for usage of `_.as_ref().map(Deref::deref)` or it's aliases (such as String::as_str)."##,
+ description: r##"Checks for usage of `_.as_ref().map(Deref::deref)` or its aliases (such as String::as_str)."##,
},
Lint {
label: "clippy::option_env_unwrap",
@@ -6059,7 +11881,8 @@ suggests usage of the `env!` macro."##,
},
Lint {
label: "clippy::option_if_let_else",
- description: r##"Lints usage of `if let Some(v) = ... { y } else { x }` which is more
+ description: r##"Lints usage of `if let Some(v) = ... { y } else { x }` and
+`match .. { Some(v) => y, None/_ => x }` which are more
idiomatically done with `Option::map_or` (if the else bit is a pure
expression) or `Option::map_or_else` (if the else bit is an impure
expression)."##,
@@ -6075,14 +11898,19 @@ or closure that returns the unit type `()`."##,
},
Lint {
label: "clippy::option_option",
- description: r##"Checks for use of `Option<Option<_>>` in function signatures and type
+ description: r##"Checks for usage of `Option<Option<_>>` in function signatures and type
definitions"##,
},
Lint {
label: "clippy::or_fun_call",
description: r##"Checks for calls to `.or(foo(..))`, `.unwrap_or(foo(..))`,
-etc., and suggests to use `or_else`, `unwrap_or_else`, etc., or
-`unwrap_or_default` instead."##,
+`.or_insert(foo(..))` etc., and suggests to use `.or_else(|| foo(..))`,
+`.unwrap_or_else(|| foo(..))`, `.unwrap_or_default()` or `.or_default()`
+etc. instead."##,
+ },
+ Lint {
+ label: "clippy::or_then_unwrap",
+ description: r##"Checks for `.or(…).unwrap()` calls to Options and Results."##,
},
Lint {
label: "clippy::out_of_bounds_indexing",
@@ -6093,25 +11921,49 @@ index."##,
label: "clippy::overflow_check_conditional",
description: r##"Detects classic underflow/overflow checks."##,
},
+ Lint {
+ label: "clippy::overly_complex_bool_expr",
+ description: r##"Checks for boolean expressions that contain terminals that
+can be eliminated."##,
+ },
Lint { label: "clippy::panic", description: r##"Checks for usage of `panic!`."## },
Lint {
label: "clippy::panic_in_result_fn",
- description: r##"Checks for usage of `panic!`, `unimplemented!`, `todo!`, `unreachable!` or assertions in a function of type result."##,
+ description: r##"Checks for usage of `panic!` or assertions in a function of type result."##,
},
Lint {
label: "clippy::panicking_unwrap",
description: r##"Checks for calls of `unwrap[_err]()` that will always fail."##,
},
Lint {
+ label: "clippy::partial_pub_fields",
+ description: r##"Checks whether partial fields of a struct are public.
+
+Either make all fields of a type public, or make none of them public"##,
+ },
+ Lint {
label: "clippy::partialeq_ne_impl",
description: r##"Checks for manual re-implementations of `PartialEq::ne`."##,
},
Lint {
+ label: "clippy::partialeq_to_none",
+ description: r##"Checks for binary comparisons to a literal `Option::None`."##,
+ },
+ Lint {
label: "clippy::path_buf_push_overwrite",
description: r##"* Checks for [push](https://doc.rust-lang.org/std/path/struct.PathBuf.html#method.push)
calls on `PathBuf` that can cause overwrites."##,
},
Lint {
+ label: "clippy::path_ends_with_ext",
+ description: r##"Looks for calls to `Path::ends_with` calls where the argument looks like a file extension.
+
+By default, Clippy has a short list of known filenames that start with a dot
+but aren't necessarily file extensions (e.g. the `.git` folder), which are allowed by default.
+The `allowed-dotfiles` configuration can be used to allow additional
+file extensions that Clippy should not lint."##,
+ },
+ Lint {
label: "clippy::pattern_type_mismatch",
description: r##"Checks for patterns that aren't exact representations of the types
they are applied to.
@@ -6133,6 +11985,10 @@ this lint can still be used to highlight areas of interest and ensure a good und
of ownership semantics."##,
},
Lint {
+ label: "clippy::permissions_set_readonly_false",
+ description: r##"Checks for calls to `std::fs::Permissions.set_readonly` with argument `false`."##,
+ },
+ Lint {
label: "clippy::possible_missing_comma",
description: r##"Checks for possible missing comma in an array. It lints if
an array element is a binary operator expression and it lies on two lines."##,
@@ -6148,6 +12004,11 @@ numeric literal)
followed by a method call"##,
},
Lint {
+ label: "clippy::print_in_format_impl",
+ description: r##"Checks for usage of `println`, `print`, `eprintln` or `eprint` in an
+implementation of a formatting trait."##,
+ },
+ Lint {
label: "clippy::print_literal",
description: r##"This lint warns about the use of literals as `print!`/`println!` args."##,
},
@@ -6173,16 +12034,20 @@ print a newline."##,
},
Lint {
label: "clippy::ptr_arg",
- description: r##"This lint checks for function arguments of type `&String`
-or `&Vec` unless the references are mutable. It will also suggest you
-replace `.clone()` calls with the appropriate `.to_owned()`/`to_string()`
-calls."##,
+ description: r##"This lint checks for function arguments of type `&String`, `&Vec`,
+`&PathBuf`, and `Cow<_>`. It will also suggest you replace `.clone()` calls
+with the appropriate `.to_owned()`/`to_string()` calls."##,
},
Lint {
label: "clippy::ptr_as_ptr",
description: r##"Checks for `as` casts between raw pointers without changing its mutability,
namely `*const T` to `*const U` and `*mut T` to `*mut U`."##,
},
+ Lint {
+ label: "clippy::ptr_cast_constness",
+ description: r##"Checks for `as` casts between raw pointers which change its constness, namely `*const T` to
+`*mut T` and `*mut T` to `*const T`."##,
+ },
Lint { label: "clippy::ptr_eq", description: r##"Use `std::ptr::eq` when applicable"## },
Lint {
label: "clippy::ptr_offset_with_cast",
@@ -6193,11 +12058,27 @@ namely `*const T` to `*const U` and `*mut T` to `*mut U`."##,
label: "clippy::pub_enum_variant_names",
description: r##"Nothing. This lint has been deprecated."##,
},
+ Lint { label: "clippy::pub_use", description: r##"Restricts the usage of `pub use ...`"## },
+ Lint {
+ label: "clippy::pub_with_shorthand",
+ description: r##"Checks for usage of `pub(<loc>)` with `in`."##,
+ },
+ Lint {
+ label: "clippy::pub_without_shorthand",
+ description: r##"Checks for usage of `pub(<loc>)` without `in`.
+
+Note: As you cannot write a module's path in `pub(<loc>)`, this will only trigger on
+`pub(super)` and the like."##,
+ },
Lint {
label: "clippy::question_mark",
description: r##"Checks for expressions that could be replaced by the question mark operator."##,
},
Lint {
+ label: "clippy::question_mark_used",
+ description: r##"Checks for expressions that use the question mark operator and rejects them."##,
+ },
+ Lint {
label: "clippy::range_minus_one",
description: r##"Checks for inclusive ranges where 1 is subtracted from
the upper bound, e.g., `x..=(y-1)`."##,
@@ -6220,10 +12101,49 @@ upper bound, e.g., `x..(y+1)`."##,
label: "clippy::rc_buffer",
description: r##"Checks for `Rc<T>` and `Arc<T>` when `T` is a mutable buffer type such as `String` or `Vec`."##,
},
+ Lint {
+ label: "clippy::rc_clone_in_vec_init",
+ description: r##"Checks for reference-counted pointers (`Arc`, `Rc`, `rc::Weak`, and `sync::Weak`)
+in `vec![elem; len]`"##,
+ },
Lint { label: "clippy::rc_mutex", description: r##"Checks for `Rc<Mutex<T>>`."## },
Lint {
+ label: "clippy::read_line_without_trim",
+ description: r##"Looks for calls to [`Stdin::read_line`] to read a line from the standard input
+into a string, then later attempting to parse this string into a type without first trimming it, which will
+always fail because the string has a trailing newline in it."##,
+ },
+ Lint {
+ label: "clippy::read_zero_byte_vec",
+ description: r##"This lint catches reads into a zero-length `Vec`.
+Especially in the case of a call to `with_capacity`, this lint warns that read
+gets the number of bytes from the `Vec`'s length, not its capacity."##,
+ },
+ Lint {
+ label: "clippy::readonly_write_lock",
+ description: r##"Looks for calls to `RwLock::write` where the lock is only used for reading."##,
+ },
+ Lint {
+ label: "clippy::recursive_format_impl",
+ description: r##"Checks for format trait implementations (e.g. `Display`) with a recursive call to itself
+which uses `self` as a parameter.
+This is typically done indirectly with the `write!` macro or with `to_string()`."##,
+ },
+ Lint {
label: "clippy::redundant_allocation",
- description: r##"Checks for use of redundant allocations anywhere in the code."##,
+ description: r##"Checks for usage of redundant allocations anywhere in the code."##,
+ },
+ Lint {
+ label: "clippy::redundant_as_str",
+ description: r##"Checks for usage of `as_str()` on a `String`` chained with a method available on the `String` itself."##,
+ },
+ Lint {
+ label: "clippy::redundant_async_block",
+ description: r##"Checks for `async` block that only returns `await` on a future."##,
+ },
+ Lint {
+ label: "clippy::redundant_at_rest_pattern",
+ description: r##"Checks for `[all @ ..]` patterns."##,
},
Lint {
label: "clippy::redundant_clone",
@@ -6247,6 +12167,10 @@ are defined."##,
argument and can be replaced by referencing the method directly."##,
},
Lint {
+ label: "clippy::redundant_comparisons",
+ description: r##"Checks for ineffective double comparisons against constants."##,
+ },
+ Lint {
label: "clippy::redundant_else",
description: r##"Checks for `else` blocks that can be removed without changing semantics."##,
},
@@ -6260,6 +12184,14 @@ argument and can be replaced by referencing the method directly."##,
could be used."##,
},
Lint {
+ label: "clippy::redundant_guards",
+ description: r##"Checks for unnecessary guards in match expressions."##,
+ },
+ Lint {
+ label: "clippy::redundant_locals",
+ description: r##"Checks for redundant redefinitions of local bindings."##,
+ },
+ Lint {
label: "clippy::redundant_pattern",
description: r##"Checks for patterns in the form `name @ _`."##,
},
@@ -6283,19 +12215,22 @@ do not change the type."##,
description: r##"Checks for constants and statics with an explicit `'static` lifetime."##,
},
Lint {
- label: "clippy::ref_binding_to_reference",
- description: r##"Checks for `ref` bindings which create a reference to a reference."##,
+ label: "clippy::redundant_type_annotations",
+ description: r##"Warns about needless / redundant type annotations."##,
},
Lint {
- label: "clippy::ref_in_deref",
- description: r##"Checks for references in expressions that use
-auto dereference."##,
+ label: "clippy::ref_binding_to_reference",
+ description: r##"Checks for `ref` bindings which create a reference to a reference."##,
},
Lint {
label: "clippy::ref_option_ref",
description: r##"Checks for usage of `&Option<&T>`."##,
},
Lint {
+ label: "clippy::ref_patterns",
+ description: r##"Checks for usages of the `ref` keyword."##,
+ },
+ Lint {
label: "clippy::regex_macro",
description: r##"Nothing. This lint has been deprecated."##,
},
@@ -6314,10 +12249,19 @@ they are equivalent to `1`. (Related discussion in [rust-clippy#7306](https://gi
description: r##"Nothing. This lint has been deprecated."##,
},
Lint {
+ label: "clippy::reserve_after_initialization",
+ description: r##"Informs the user about a more concise way to create a vector with a known capacity."##,
+ },
+ Lint {
label: "clippy::rest_pat_in_fully_bound_structs",
description: r##"Checks for unnecessary '..' pattern binding on struct when all fields are explicitly matched."##,
},
Lint {
+ label: "clippy::result_large_err",
+ description: r##"Checks for functions that return `Result` with an unusually large
+`Err`-variant."##,
+ },
+ Lint {
label: "clippy::result_map_or_into_option",
description: r##"Checks for usage of `_.map_or(None, Some)`."##,
},
@@ -6339,7 +12283,7 @@ implements `std::error::Error`."##,
Lint {
label: "clippy::reversed_empty_ranges",
description: r##"Checks for range expressions `x..y` where both `x` and `y`
-are constant and `x` is greater or equal to `y`."##,
+are constant and `x` is greater to `y`. Also triggers if `x` is equal to `y` when they are conditions to a `for` loop."##,
},
Lint {
label: "clippy::same_functions_in_if_condition",
@@ -6361,6 +12305,16 @@ one from a trait, another not from trait."##,
`position()`, or `rposition()`) followed by a call to `is_some()` or `is_none()`."##,
},
Lint {
+ label: "clippy::seek_from_current",
+ description: r##"Checks an argument of `seek` method of `Seek` trait
+and if it start seek from `SeekFrom::Current(0)`, suggests `stream_position` instead."##,
+ },
+ Lint {
+ label: "clippy::seek_to_start_instead_of_rewind",
+ description: r##"Checks for jumps to the start of a stream that implements `Seek`
+and uses the `seek` method providing `Start` as parameter."##,
+ },
+ Lint {
label: "clippy::self_assignment",
description: r##"Checks for explicit self-assignments."##,
},
@@ -6370,7 +12324,7 @@ one from a trait, another not from trait."##,
},
Lint {
label: "clippy::self_named_module_files",
- description: r##"Checks that module layout uses only mod.rs files."##,
+ description: r##"Checks that module layout uses only `mod.rs` files."##,
},
Lint {
label: "clippy::semicolon_if_nothing_returned",
@@ -6378,6 +12332,15 @@ one from a trait, another not from trait."##,
`()` but is not followed by a semicolon."##,
},
Lint {
+ label: "clippy::semicolon_inside_block",
+ description: r##"Suggests moving the semicolon after a block to the inside of the block, after its last
+expression."##,
+ },
+ Lint {
+ label: "clippy::semicolon_outside_block",
+ description: r##"Suggests moving the semicolon from a block's final expression outside of the block."##,
+ },
+ Lint {
label: "clippy::separated_literal_suffix",
description: r##"Warns if literal suffixes are separated by an underscore.
To enforce separated literal suffix style,
@@ -6421,8 +12384,31 @@ post](http://llogiq.github.io/2015/07/30/traits.html) for further
information) instead of an inherent implementation."##,
},
Lint {
+ label: "clippy::should_panic_without_expect",
+ description: r##"Checks for `#[should_panic]` attributes without specifying the expected panic message."##,
+ },
+ Lint {
+ label: "clippy::significant_drop_in_scrutinee",
+ description: r##"Checks for temporaries returned from function calls in a match scrutinee that have the
+`clippy::has_significant_drop` attribute."##,
+ },
+ Lint {
+ label: "clippy::significant_drop_tightening",
+ description: r##"Searches for elements marked with `#[clippy::has_significant_drop]` that could be early
+dropped but are in fact dropped at the end of their scopes. In other words, enforces the
+tightening of their possible lifetimes."##,
+ },
+ Lint {
label: "clippy::similar_names",
- description: r##"Checks for names that are very similar and thus confusing."##,
+ description: r##"Checks for names that are very similar and thus confusing.
+
+Note: this lint looks for similar names throughout each
+scope. To allow it, you need to allow it on the scope
+level, not on the name that is reported."##,
+ },
+ Lint {
+ label: "clippy::single_call_fn",
+ description: r##"Checks for functions that are only used once. Does not lint tests."##,
},
Lint {
label: "clippy::single_char_add_str",
@@ -6430,6 +12416,11 @@ information) instead of an inherent implementation."##,
where `push`/`insert` with a `char` would work fine."##,
},
Lint {
+ label: "clippy::single_char_lifetime_names",
+ description: r##"Checks for lifetimes with names which are one character
+long."##,
+ },
+ Lint {
label: "clippy::single_char_pattern",
description: r##"Checks for string methods that receive a single-character
`str` as an argument, e.g., `_.split(x)`."##,
@@ -6445,7 +12436,12 @@ where `push`/`insert` with a `char` would work fine."##,
Lint {
label: "clippy::single_match",
description: r##"Checks for matches with a single arm where an `if let`
-will usually suffice."##,
+will usually suffice.
+
+This intentionally does not lint if there are comments
+inside of the other arm, so as to allow the user to document
+why having another explicit pattern with an empty body is necessary,
+or because the comments need to be preserved for other reasons."##,
},
Lint {
label: "clippy::single_match_else",
@@ -6453,12 +12449,21 @@ will usually suffice."##,
usually suffice."##,
},
Lint {
+ label: "clippy::single_range_in_vec_init",
+ description: r##"Checks for `Vec` or array initializations that contain only one range."##,
+ },
+ Lint {
label: "clippy::size_of_in_element_count",
description: r##"Detects expressions where
`size_of::<T>` or `size_of_val::<T>` is used as a
count of elements of type `T`"##,
},
Lint {
+ label: "clippy::size_of_ref",
+ description: r##"Checks for calls to `std::mem::size_of_val()` where the argument is
+a reference to a reference."##,
+ },
+ Lint {
label: "clippy::skip_while_next",
description: r##"Checks for usage of `_.skip_while(condition).next()`."##,
},
@@ -6469,10 +12474,18 @@ count of elements of type `T`"##,
Lint {
label: "clippy::stable_sort_primitive",
description: r##"When sorting primitive values (integers, bools, chars, as well
-as arrays, slices, and tuples of such items), it is better to
+as arrays, slices, and tuples of such items), it is typically better to
use an unstable sort than a stable sort."##,
},
Lint {
+ label: "clippy::std_instead_of_alloc",
+ description: r##"Finds items imported through `std` when available through `alloc`."##,
+ },
+ Lint {
+ label: "clippy::std_instead_of_core",
+ description: r##"Finds items imported through `std` when available through `core`."##,
+ },
+ Lint {
label: "clippy::str_to_string",
description: r##"This lint checks for `.to_string()` method calls on values of type `&str`."##,
},
@@ -6502,6 +12515,10 @@ match."##,
that contain only ASCII characters."##,
},
Lint {
+ label: "clippy::string_lit_chars_any",
+ description: r##"Checks for `<string_lit>.chars().any(|i| i == c)`."##,
+ },
+ Lint {
label: "clippy::string_slice",
description: r##"Checks for slice operations on strings"##,
},
@@ -6532,10 +12549,19 @@ subtracting elements in an Add impl."##,
},
Lint {
label: "clippy::suspicious_assignment_formatting",
- description: r##"Checks for use of the nonexistent `=*`, `=!` and `=-`
+ description: r##"Checks for usage of the non-existent `=*`, `=!` and `=-`
operators."##,
},
Lint {
+ label: "clippy::suspicious_command_arg_space",
+ description: r##"Checks for `Command::arg()` invocations that look like they
+should be multiple arguments instead, such as `arg(-t ext2)`."##,
+ },
+ Lint {
+ label: "clippy::suspicious_doc_comments",
+ description: r##"Detects the use of outer doc comments (`///`, `/**`) followed by a bang (`!`): `///!`"##,
+ },
+ Lint {
label: "clippy::suspicious_else_formatting",
description: r##"Checks for formatting of `else`. It lints if the `else`
is followed immediately by a newline or the `else` seems to be missing."##,
@@ -6562,12 +12588,24 @@ of binary operators nearby."##,
related functions with either zero or one splits."##,
},
Lint {
+ label: "clippy::suspicious_to_owned",
+ description: r##"Checks for the usage of `_.to_owned()`, on a `Cow<'_, _>`."##,
+ },
+ Lint {
label: "clippy::suspicious_unary_op_formatting",
description: r##"Checks the formatting of a unary operator on the right hand side
of a binary operator. It lints if there is no space between the binary and unary operators,
but there is a space between the unary and its operand."##,
},
Lint {
+ label: "clippy::suspicious_xor_used_as_pow",
+ description: r##"Warns for a Bitwise XOR (`^`) operator being probably confused as a powering. It will not trigger if any of the numbers are not in decimal."##,
+ },
+ Lint {
+ label: "clippy::swap_ptr_to_ref",
+ description: r##"Checks for calls to `core::mem::swap` where either parameter is derived from a pointer"##,
+ },
+ Lint {
label: "clippy::tabs_in_doc_comments",
description: r##"Checks doc comments for usage of tab characters."##,
},
@@ -6577,12 +12615,13 @@ but there is a space between the unary and its operand."##,
assign a value in it."##,
},
Lint {
- label: "clippy::to_digit_is_some",
- description: r##"Checks for `.to_digit(..).is_some()` on `char`s."##,
+ label: "clippy::tests_outside_test_module",
+ description: r##"Triggers when a testing function (marked with the `#[test]` attribute) isn't inside a testing module
+(marked with `#[cfg(test)]`)."##,
},
Lint {
- label: "clippy::to_string_in_display",
- description: r##"Checks for uses of `to_string()` in `Display` traits."##,
+ label: "clippy::to_digit_is_some",
+ description: r##"Checks for `.to_digit(..).is_some()` on `char`s."##,
},
Lint {
label: "clippy::to_string_in_format_args",
@@ -6610,7 +12649,7 @@ in a macro that does formatting."##,
},
Lint {
label: "clippy::trait_duplication_in_bounds",
- description: r##"Checks for cases where generics are being used and multiple
+ description: r##"Checks for cases where generics or trait objects are being used and multiple
syntax specifications for trait bounds are used simultaneously."##,
},
Lint {
@@ -6634,6 +12673,15 @@ syntax specifications for trait bounds are used simultaneously."##,
description: r##"Checks for transmutes from an integer to a float."##,
},
Lint {
+ label: "clippy::transmute_int_to_non_zero",
+ description: r##"Checks for transmutes from integers to `NonZero*` types, and suggests their `new_unchecked`
+method instead."##,
+ },
+ Lint {
+ label: "clippy::transmute_null_to_fn",
+ description: r##"Checks for null function pointer creation through transmute."##,
+ },
+ Lint {
label: "clippy::transmute_num_to_bytes",
description: r##"Checks for transmutes from a number to an array of `u8`"##,
},
@@ -6647,6 +12695,11 @@ from a reference to a reference."##,
description: r##"Checks for transmutes from a pointer to a reference."##,
},
Lint {
+ label: "clippy::transmute_undefined_repr",
+ description: r##"Checks for transmutes between types which do not have a representation defined relative to
+each other."##,
+ },
+ Lint {
label: "clippy::transmutes_expressible_as_ptr_casts",
description: r##"Checks for transmutes that could be a pointer cast."##,
},
@@ -6655,6 +12708,10 @@ from a reference to a reference."##,
description: r##"Checks for transmute calls which would receive a null pointer."##,
},
Lint {
+ label: "clippy::trim_split_whitespace",
+ description: r##"Warns about calling `str::trim` (or variants) before `str::split_whitespace`."##,
+ },
+ Lint {
label: "clippy::trivial_regex",
description: r##"Checks for trivial [regex](https://crates.io/crates/regex)
creation (with `Regex::new`, `RegexBuilder::new`, or `RegexSet::new`)."##,
@@ -6665,25 +12722,51 @@ creation (with `Regex::new`, `RegexBuilder::new`, or `RegexSet::new`)."##,
the argument type is `Copy` and small enough to be more efficient to always
pass by value."##,
},
- Lint { label: "clippy::try_err", description: r##"Checks for usages of `Err(x)?`."## },
+ Lint { label: "clippy::try_err", description: r##"Checks for usage of `Err(x)?`."## },
+ Lint {
+ label: "clippy::tuple_array_conversions",
+ description: r##"Checks for tuple<=>array conversions that are not done with `.into()`."##,
+ },
Lint {
label: "clippy::type_complexity",
description: r##"Checks for types used in structs, parameters and `let`
declarations above a certain complexity threshold."##,
},
Lint {
+ label: "clippy::type_id_on_box",
+ description: r##"Looks for calls to `<Box<dyn Any> as Any>::type_id`."##,
+ },
+ Lint {
label: "clippy::type_repetition_in_bounds",
description: r##"This lint warns about unnecessary type repetitions in trait bounds"##,
},
Lint {
- label: "clippy::undocumented_unsafe_blocks",
- description: r##"Checks for `unsafe` blocks without a `// Safety: ` comment
-explaining why the unsafe operations performed inside
-the block are safe."##,
+ label: "clippy::unchecked_duration_subtraction",
+ description: r##"Lints subtraction between an [`Instant`] and a [`Duration`]."##,
},
Lint {
- label: "clippy::undropped_manually_drops",
- description: r##"Prevents the safe `std::mem::drop` function from being called on `std::mem::ManuallyDrop`."##,
+ label: "clippy::undocumented_unsafe_blocks",
+ description: r##"Checks for `unsafe` blocks and impls without a `// SAFETY: ` comment
+explaining why the unsafe operations performed inside
+the block are safe.
+
+Note the comment must appear on the line(s) preceding the unsafe block
+with nothing appearing in between. The following is ok:
+```rust
+foo(
+ // SAFETY:
+ // This is a valid safety comment
+ unsafe { *x }
+)
+```
+But neither of these are:
+```rust
+// SAFETY:
+// This is not a valid safety comment
+foo(
+ /* SAFETY: Neither is this */ unsafe { *x },
+);
+```"##,
},
Lint {
label: "clippy::unicode_not_nfc",
@@ -6706,6 +12789,11 @@ This is commonly caused by calling `set_len()` right after allocating or
reserving a buffer with `new()`, `default()`, `with_capacity()`, or `reserve()`."##,
},
Lint {
+ label: "clippy::uninlined_format_args",
+ description: r##"Detect when a variable is not inlined in a format string,
+and suggests to inline it."##,
+ },
+ Lint {
label: "clippy::unit_arg",
description: r##"Checks for passing a unit value as an argument to a function without using a
unit literal (`()`)."##,
@@ -6723,23 +12811,40 @@ Fn(...) -> Ord where the implemented closure returns the unit type.
The lint also suggests to remove the semi-colon at the end of the statement if present."##,
},
Lint {
+ label: "clippy::unnecessary_box_returns",
+ description: r##"Checks for a return type containing a `Box<T>` where `T` implements `Sized`
+
+The lint ignores `Box<T>` where `T` is larger than `unnecessary_box_size`,
+as returning a large `T` directly may be detrimental to performance."##,
+ },
+ Lint {
label: "clippy::unnecessary_cast",
- description: r##"Checks for casts to the same type, casts of int literals to integer types
-and casts of float literals to float types."##,
+ description: r##"Checks for casts to the same type, casts of int literals to integer types, casts of float
+literals to float types and casts between raw pointers without changing type or constness."##,
},
Lint {
label: "clippy::unnecessary_filter_map",
- description: r##"Checks for `filter_map` calls which could be replaced by `filter` or `map`.
+ description: r##"Checks for `filter_map` calls that could be replaced by `filter` or `map`.
More specifically it checks if the closure provided is only performing one of the
filter or map operations and suggests the appropriate option."##,
},
Lint {
+ label: "clippy::unnecessary_find_map",
+ description: r##"Checks for `find_map` calls that could be replaced by `find` or `map`. More
+specifically it checks if the closure provided is only performing one of the
+find or map operations and suggests the appropriate option."##,
+ },
+ Lint {
label: "clippy::unnecessary_fold",
- description: r##"Checks for using `fold` when a more succinct alternative exists.
+ description: r##"Checks for usage of `fold` when a more succinct alternative exists.
Specifically, this checks for `fold`s which could be replaced by `any`, `all`,
`sum` or `product`."##,
},
Lint {
+ label: "clippy::unnecessary_join",
+ description: r##"Checks for usage of `.collect::<Vec<String>>().join()` on iterators."##,
+ },
+ Lint {
label: "clippy::unnecessary_lazy_evaluations",
description: r##"As the counterpart to `or_fun_call`, this lint looks for unnecessary
lazily evaluated closures on `Option` and `Result`.
@@ -6750,7 +12855,16 @@ simpler code:
- `and_then` to `and`
- `or_else` to `or`
- `get_or_insert_with` to `get_or_insert`
- - `ok_or_else` to `ok_or`"##,
+ - `ok_or_else` to `ok_or`
+ - `then` to `then_some` (for msrv >= 1.62.0)"##,
+ },
+ Lint {
+ label: "clippy::unnecessary_literal_unwrap",
+ description: r##"Checks for `.unwrap()` related calls on `Result`s and `Option`s that are constructed."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_map_on_constructor",
+ description: r##"Suggest removing the use of a may (or map_err) method when an Option or Result is being construted."##,
},
Lint {
label: "clippy::unnecessary_mut_passed",
@@ -6763,15 +12877,33 @@ requires an immutable reference."##,
sub-expression."##,
},
Lint {
+ label: "clippy::unnecessary_owned_empty_strings",
+ description: r##"Detects cases of owned empty strings being passed as an argument to a function expecting `&str`"##,
+ },
+ Lint {
+ label: "clippy::unnecessary_safety_comment",
+ description: r##"Checks for `// SAFETY: ` comments on safe code."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_safety_doc",
+ description: r##"Checks for the doc comments of publicly visible
+safe functions and traits and warns if there is a `# Safety` section."##,
+ },
+ Lint {
label: "clippy::unnecessary_self_imports",
description: r##"Checks for imports ending in `::{self}`."##,
},
Lint {
label: "clippy::unnecessary_sort_by",
- description: r##"Detects uses of `Vec::sort_by` passing in a closure
+ description: r##"Checks for usage of `Vec::sort_by` passing in a closure
which compares the two arguments, either directly or indirectly."##,
},
Lint {
+ label: "clippy::unnecessary_struct_initialization",
+ description: r##"Checks for initialization of a `struct` by copying a base without setting
+any field."##,
+ },
+ Lint {
label: "clippy::unnecessary_to_owned",
description: r##"Checks for unnecessary calls to [`ToOwned::to_owned`](https://doc.rust-lang.org/std/borrow/trait.ToOwned.html#tymethod.to_owned)
and other `to_owned`-like functions."##,
@@ -6855,10 +12987,24 @@ types have different ABI, size or alignment."##,
description: r##"Nothing. This lint has been deprecated."##,
},
Lint {
+ label: "clippy::unused_format_specs",
+ description: r##"Detects [formatting parameters] that have no effect on the output of
+`format!()`, `println!()` or similar macros."##,
+ },
+ Lint {
label: "clippy::unused_io_amount",
description: r##"Checks for unused written/read amount."##,
},
Lint {
+ label: "clippy::unused_peekable",
+ description: r##"Checks for the creation of a `peekable` iterator that is never `.peek()`ed"##,
+ },
+ Lint {
+ label: "clippy::unused_rounding",
+ description: r##"Detects cases where a whole-number literal float is being rounded, using
+the `floor`, `ceil`, or `round` methods."##,
+ },
+ Lint {
label: "clippy::unused_self",
description: r##"Checks methods that contain a `self` argument but don't use it"##,
},
@@ -6876,13 +13022,17 @@ by nibble or byte."##,
description: r##"Checks for functions of type `Result` that contain `expect()` or `unwrap()`"##,
},
Lint {
- label: "clippy::unwrap_or_else_default",
- description: r##"Checks for usages of `_.unwrap_or_else(Default::default)` on `Option` and
-`Result` values."##,
+ label: "clippy::unwrap_or_default",
+ description: r##"Checks for usages of the following functions with an argument that constructs a default value
+(e.g., `Default::default` or `String::new`):
+- `unwrap_or`
+- `unwrap_or_else`
+- `or_insert`
+- `or_insert_with`"##,
},
Lint {
label: "clippy::unwrap_used",
- description: r##"Checks for `.unwrap()` calls on `Option`s and on `Result`s."##,
+ description: r##"Checks for `.unwrap()` or `.unwrap_err()` calls on `Result`s and `.unwrap()` call on `Option`s."##,
},
Lint {
label: "clippy::upper_case_acronyms",
@@ -6890,7 +13040,7 @@ by nibble or byte."##,
},
Lint {
label: "clippy::use_debug",
- description: r##"Checks for use of `Debug` formatting. The purpose of this
+ description: r##"Checks for usage of `Debug` formatting. The purpose of this
lint is to catch debugging remnants."##,
},
Lint {
@@ -6913,10 +13063,17 @@ types before and after the call are the same."##,
description: r##"Checks for `extern crate` and `use` items annotated with
lint attributes.
-This lint permits `#[allow(unused_imports)]`, `#[allow(deprecated)]`,
-`#[allow(unreachable_pub)]`, `#[allow(clippy::wildcard_imports)]` and
-`#[allow(clippy::enum_glob_use)]` on `use` items and `#[allow(unused_imports)]` on
-`extern crate` items with a `#[macro_use]` attribute."##,
+This lint permits lint attributes for lints emitted on the items themself.
+For `use` items these lints are:
+* deprecated
+* unreachable_pub
+* unused_imports
+* clippy::enum_glob_use
+* clippy::macro_use_imports
+* clippy::wildcard_imports
+
+For `extern crate` items these lints are:
+* `unused_imports` on items with `#[macro_use]`"##,
},
Lint {
label: "clippy::useless_conversion",
@@ -6940,17 +13097,24 @@ and transmutes that could be a cast."##,
},
Lint {
label: "clippy::useless_vec",
- description: r##"Checks for usage of `&vec![..]` when using `&[..]` would
+ description: r##"Checks for usage of `vec![..]` when using `[..]` would
be possible."##,
},
Lint {
label: "clippy::vec_box",
- description: r##"Checks for use of `Vec<Box<T>>` where T: Sized anywhere in the code.
+ description: r##"Checks for usage of `Vec<Box<T>>` where T: Sized anywhere in the code.
Check the [Box documentation](https://doc.rust-lang.org/std/boxed/index.html) for more information."##,
},
Lint {
label: "clippy::vec_init_then_push",
- description: r##"Checks for calls to `push` immediately after creating a new `Vec`."##,
+ description: r##"Checks for calls to `push` immediately after creating a new `Vec`.
+
+If the `Vec` is created using `with_capacity` this will only lint if the capacity is a
+constant and the number of pushes is greater than or equal to the initial capacity.
+
+If the `Vec` is extended after the initial sequence of pushes and it was default initialized
+then this will only lint after there were at least four pushes. This number may change in
+the future."##,
},
Lint {
label: "clippy::vec_resize_to_zero",
@@ -6963,7 +13127,7 @@ to `trailing_zeros`"##,
},
Lint {
label: "clippy::verbose_file_reads",
- description: r##"Checks for use of File::read_to_end and File::read_to_string."##,
+ description: r##"Checks for usage of File::read_to_end and File::read_to_string."##,
},
Lint {
label: "clippy::vtable_address_comparisons",
@@ -7020,18 +13184,19 @@ print a newline."##,
},
Lint {
label: "clippy::wrong_self_convention",
- description: r##"Checks for methods with certain name prefixes and which
-doesn't match how self is taken. The actual rules are:
-
-|Prefix |Postfix |`self` taken | `self` type |
-|-------|------------|-----------------------|--------------|
-|`as_` | none |`&self` or `&mut self` | any |
-|`from_`| none | none | any |
-|`into_`| none |`self` | any |
-|`is_` | none |`&self` or none | any |
-|`to_` | `_mut` |`&mut self` | any |
-|`to_` | not `_mut` |`self` | `Copy` |
-|`to_` | not `_mut` |`&self` | not `Copy` |
+ description: r##"Checks for methods with certain name prefixes or suffixes, and which
+do not adhere to standard conventions regarding how `self` is taken.
+The actual rules are:
+
+|Prefix |Postfix |`self` taken | `self` type |
+|-------|------------|-------------------------------|--------------|
+|`as_` | none |`&self` or `&mut self` | any |
+|`from_`| none | none | any |
+|`into_`| none |`self` | any |
+|`is_` | none |`&mut self` or `&self` or none | any |
+|`to_` | `_mut` |`&mut self` | any |
+|`to_` | not `_mut` |`self` | `Copy` |
+|`to_` | not `_mut` |`&self` | not `Copy` |
Note: Clippy doesn't trigger methods with `to_` prefix in:
- Traits definition.
@@ -7086,15 +13251,18 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
LintGroup {
lint: Lint {
label: "clippy::complexity",
- description: r##"lint group for: clippy::bind_instead_of_map, clippy::bool_comparison, clippy::borrowed_box, clippy::char_lit_as_u8, clippy::clone_on_copy, clippy::crosspointer_transmute, clippy::deprecated_cfg_attr, clippy::deref_addrof, clippy::derivable_impls, clippy::diverging_sub_expression, clippy::double_comparisons, clippy::double_parens, clippy::duration_subsec, clippy::explicit_counter_loop, clippy::explicit_write, clippy::extra_unused_lifetimes, clippy::filter_map_identity, clippy::filter_next, clippy::flat_map_identity, clippy::get_last_with_len, clippy::identity_op, clippy::inspect_for_each, clippy::int_plus_one, clippy::iter_count, clippy::manual_filter_map, clippy::manual_find_map, clippy::manual_flatten, clippy::manual_split_once, clippy::manual_strip, clippy::manual_swap, clippy::manual_unwrap_or, clippy::map_flatten, clippy::map_identity, clippy::match_as_ref, clippy::match_single_binding, clippy::needless_arbitrary_self_type, clippy::needless_bool, clippy::needless_borrowed_reference, clippy::needless_lifetimes, clippy::needless_option_as_deref, clippy::needless_question_mark, clippy::needless_splitn, clippy::needless_update, clippy::neg_cmp_op_on_partial_ord, clippy::no_effect, clippy::nonminimal_bool, clippy::option_as_ref_deref, clippy::option_filter_map, clippy::option_map_unit_fn, clippy::overflow_check_conditional, clippy::partialeq_ne_impl, clippy::precedence, clippy::ptr_offset_with_cast, clippy::range_zip_with_len, clippy::redundant_closure_call, clippy::redundant_slicing, clippy::ref_in_deref, clippy::repeat_once, clippy::result_map_unit_fn, clippy::search_is_some, clippy::short_circuit_statement, clippy::single_element_loop, clippy::skip_while_next, clippy::string_from_utf8_as_bytes, clippy::strlen_on_c_strings, clippy::temporary_assignment, clippy::too_many_arguments, clippy::transmute_bytes_to_str, clippy::transmute_float_to_int, clippy::transmute_int_to_bool, clippy::transmute_int_to_char, clippy::transmute_int_to_float, clippy::transmute_num_to_bytes, clippy::transmute_ptr_to_ref, clippy::transmutes_expressible_as_ptr_casts, clippy::type_complexity, clippy::unit_arg, clippy::unnecessary_cast, clippy::unnecessary_filter_map, clippy::unnecessary_operation, clippy::unnecessary_sort_by, clippy::unnecessary_unwrap, clippy::unneeded_wildcard_pattern, clippy::useless_asref, clippy::useless_conversion, clippy::useless_format, clippy::vec_box, clippy::while_let_loop, clippy::wildcard_in_or_patterns, clippy::zero_divided_by_zero, clippy::zero_prefixed_literal"##,
+ description: r##"lint group for: clippy::bind_instead_of_map, clippy::bool_comparison, clippy::borrow_deref_ref, clippy::borrowed_box, clippy::bytes_count_to_len, clippy::char_lit_as_u8, clippy::clone_on_copy, clippy::crosspointer_transmute, clippy::default_constructed_unit_structs, clippy::deprecated_cfg_attr, clippy::deref_addrof, clippy::derivable_impls, clippy::diverging_sub_expression, clippy::double_comparisons, clippy::double_parens, clippy::duration_subsec, clippy::excessive_nesting, clippy::explicit_auto_deref, clippy::explicit_counter_loop, clippy::explicit_write, clippy::extra_unused_lifetimes, clippy::extra_unused_type_parameters, clippy::filter_map_identity, clippy::filter_next, clippy::flat_map_identity, clippy::get_last_with_len, clippy::identity_op, clippy::inspect_for_each, clippy::int_plus_one, clippy::iter_count, clippy::iter_kv_map, clippy::let_with_type_underscore, clippy::manual_filter, clippy::manual_filter_map, clippy::manual_find, clippy::manual_find_map, clippy::manual_flatten, clippy::manual_hash_one, clippy::manual_main_separator_str, clippy::manual_range_patterns, clippy::manual_rem_euclid, clippy::manual_slice_size_calculation, clippy::manual_split_once, clippy::manual_strip, clippy::manual_swap, clippy::manual_unwrap_or, clippy::map_flatten, clippy::map_identity, clippy::match_as_ref, clippy::match_single_binding, clippy::needless_arbitrary_self_type, clippy::needless_bool, clippy::needless_bool_assign, clippy::needless_borrowed_reference, clippy::needless_if, clippy::needless_lifetimes, clippy::needless_match, clippy::needless_option_as_deref, clippy::needless_option_take, clippy::needless_question_mark, clippy::needless_splitn, clippy::needless_update, clippy::neg_cmp_op_on_partial_ord, clippy::no_effect, clippy::nonminimal_bool, clippy::only_used_in_recursion, clippy::option_as_ref_deref, clippy::option_filter_map, clippy::option_map_unit_fn, clippy::or_then_unwrap, clippy::overflow_check_conditional, clippy::partialeq_ne_impl, clippy::precedence, clippy::ptr_offset_with_cast, clippy::range_zip_with_len, clippy::redundant_as_str, clippy::redundant_async_block, clippy::redundant_at_rest_pattern, clippy::redundant_closure_call, clippy::redundant_guards, clippy::redundant_slicing, clippy::repeat_once, clippy::reserve_after_initialization, clippy::result_map_unit_fn, clippy::search_is_some, clippy::seek_from_current, clippy::seek_to_start_instead_of_rewind, clippy::short_circuit_statement, clippy::single_element_loop, clippy::skip_while_next, clippy::string_from_utf8_as_bytes, clippy::strlen_on_c_strings, clippy::temporary_assignment, clippy::too_many_arguments, clippy::transmute_bytes_to_str, clippy::transmute_float_to_int, clippy::transmute_int_to_bool, clippy::transmute_int_to_char, clippy::transmute_int_to_float, clippy::transmute_int_to_non_zero, clippy::transmute_num_to_bytes, clippy::transmute_ptr_to_ref, clippy::transmutes_expressible_as_ptr_casts, clippy::type_complexity, clippy::unit_arg, clippy::unnecessary_cast, clippy::unnecessary_filter_map, clippy::unnecessary_find_map, clippy::unnecessary_literal_unwrap, clippy::unnecessary_map_on_constructor, clippy::unnecessary_operation, clippy::unnecessary_sort_by, clippy::unnecessary_unwrap, clippy::unneeded_wildcard_pattern, clippy::unused_format_specs, clippy::useless_asref, clippy::useless_conversion, clippy::useless_format, clippy::useless_transmute, clippy::vec_box, clippy::while_let_loop, clippy::wildcard_in_or_patterns, clippy::zero_divided_by_zero, clippy::zero_prefixed_literal"##,
},
children: &[
"clippy::bind_instead_of_map",
"clippy::bool_comparison",
+ "clippy::borrow_deref_ref",
"clippy::borrowed_box",
+ "clippy::bytes_count_to_len",
"clippy::char_lit_as_u8",
"clippy::clone_on_copy",
"clippy::crosspointer_transmute",
+ "clippy::default_constructed_unit_structs",
"clippy::deprecated_cfg_attr",
"clippy::deref_addrof",
"clippy::derivable_impls",
@@ -7102,9 +13270,12 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::double_comparisons",
"clippy::double_parens",
"clippy::duration_subsec",
+ "clippy::excessive_nesting",
+ "clippy::explicit_auto_deref",
"clippy::explicit_counter_loop",
"clippy::explicit_write",
"clippy::extra_unused_lifetimes",
+ "clippy::extra_unused_type_parameters",
"clippy::filter_map_identity",
"clippy::filter_next",
"clippy::flat_map_identity",
@@ -7113,9 +13284,18 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::inspect_for_each",
"clippy::int_plus_one",
"clippy::iter_count",
+ "clippy::iter_kv_map",
+ "clippy::let_with_type_underscore",
+ "clippy::manual_filter",
"clippy::manual_filter_map",
+ "clippy::manual_find",
"clippy::manual_find_map",
"clippy::manual_flatten",
+ "clippy::manual_hash_one",
+ "clippy::manual_main_separator_str",
+ "clippy::manual_range_patterns",
+ "clippy::manual_rem_euclid",
+ "clippy::manual_slice_size_calculation",
"clippy::manual_split_once",
"clippy::manual_strip",
"clippy::manual_swap",
@@ -7126,29 +13306,41 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::match_single_binding",
"clippy::needless_arbitrary_self_type",
"clippy::needless_bool",
+ "clippy::needless_bool_assign",
"clippy::needless_borrowed_reference",
+ "clippy::needless_if",
"clippy::needless_lifetimes",
+ "clippy::needless_match",
"clippy::needless_option_as_deref",
+ "clippy::needless_option_take",
"clippy::needless_question_mark",
"clippy::needless_splitn",
"clippy::needless_update",
"clippy::neg_cmp_op_on_partial_ord",
"clippy::no_effect",
"clippy::nonminimal_bool",
+ "clippy::only_used_in_recursion",
"clippy::option_as_ref_deref",
"clippy::option_filter_map",
"clippy::option_map_unit_fn",
+ "clippy::or_then_unwrap",
"clippy::overflow_check_conditional",
"clippy::partialeq_ne_impl",
"clippy::precedence",
"clippy::ptr_offset_with_cast",
"clippy::range_zip_with_len",
+ "clippy::redundant_as_str",
+ "clippy::redundant_async_block",
+ "clippy::redundant_at_rest_pattern",
"clippy::redundant_closure_call",
+ "clippy::redundant_guards",
"clippy::redundant_slicing",
- "clippy::ref_in_deref",
"clippy::repeat_once",
+ "clippy::reserve_after_initialization",
"clippy::result_map_unit_fn",
"clippy::search_is_some",
+ "clippy::seek_from_current",
+ "clippy::seek_to_start_instead_of_rewind",
"clippy::short_circuit_statement",
"clippy::single_element_loop",
"clippy::skip_while_next",
@@ -7161,6 +13353,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::transmute_int_to_bool",
"clippy::transmute_int_to_char",
"clippy::transmute_int_to_float",
+ "clippy::transmute_int_to_non_zero",
"clippy::transmute_num_to_bytes",
"clippy::transmute_ptr_to_ref",
"clippy::transmutes_expressible_as_ptr_casts",
@@ -7168,13 +13361,18 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::unit_arg",
"clippy::unnecessary_cast",
"clippy::unnecessary_filter_map",
+ "clippy::unnecessary_find_map",
+ "clippy::unnecessary_literal_unwrap",
+ "clippy::unnecessary_map_on_constructor",
"clippy::unnecessary_operation",
"clippy::unnecessary_sort_by",
"clippy::unnecessary_unwrap",
"clippy::unneeded_wildcard_pattern",
+ "clippy::unused_format_specs",
"clippy::useless_asref",
"clippy::useless_conversion",
"clippy::useless_format",
+ "clippy::useless_transmute",
"clippy::vec_box",
"clippy::while_let_loop",
"clippy::wildcard_in_or_patterns",
@@ -7185,7 +13383,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
LintGroup {
lint: Lint {
label: "clippy::correctness",
- description: r##"lint group for: clippy::absurd_extreme_comparisons, clippy::almost_swapped, clippy::approx_constant, clippy::async_yields_async, clippy::bad_bit_mask, clippy::cast_ref_to_mut, clippy::clone_double_ref, clippy::cmp_nan, clippy::deprecated_semver, clippy::derive_hash_xor_eq, clippy::derive_ord_xor_partial_ord, clippy::drop_copy, clippy::drop_ref, clippy::enum_clike_unportable_variant, clippy::eq_op, clippy::erasing_op, clippy::fn_address_comparisons, clippy::forget_copy, clippy::forget_ref, clippy::if_let_mutex, clippy::if_same_then_else, clippy::ifs_same_cond, clippy::ineffective_bit_mask, clippy::infinite_iter, clippy::inherent_to_string_shadow_display, clippy::inline_fn_without_body, clippy::invalid_null_ptr_usage, clippy::invalid_regex, clippy::invisible_characters, clippy::iter_next_loop, clippy::iterator_step_by_zero, clippy::let_underscore_lock, clippy::logic_bug, clippy::match_str_case_mismatch, clippy::mem_replace_with_uninit, clippy::min_max, clippy::mismatched_target_os, clippy::mistyped_literal_suffixes, clippy::modulo_one, clippy::mut_from_ref, clippy::never_loop, clippy::non_octal_unix_permissions, clippy::nonsensical_open_options, clippy::not_unsafe_ptr_arg_deref, clippy::option_env_unwrap, clippy::out_of_bounds_indexing, clippy::panicking_unwrap, clippy::possible_missing_comma, clippy::reversed_empty_ranges, clippy::self_assignment, clippy::serde_api_misuse, clippy::size_of_in_element_count, clippy::suspicious_splitn, clippy::to_string_in_display, clippy::transmuting_null, clippy::undropped_manually_drops, clippy::uninit_assumed_init, clippy::uninit_vec, clippy::unit_cmp, clippy::unit_hash, clippy::unit_return_expecting_ord, clippy::unsound_collection_transmute, clippy::unused_io_amount, clippy::useless_attribute, clippy::vec_resize_to_zero, clippy::vtable_address_comparisons, clippy::while_immutable_condition, clippy::wrong_transmute, clippy::zst_offset"##,
+ description: r##"lint group for: clippy::absurd_extreme_comparisons, clippy::almost_swapped, clippy::approx_constant, clippy::async_yields_async, clippy::bad_bit_mask, clippy::cast_slice_different_sizes, clippy::deprecated_semver, clippy::derive_ord_xor_partial_ord, clippy::derived_hash_with_manual_eq, clippy::enum_clike_unportable_variant, clippy::eq_op, clippy::erasing_op, clippy::fn_address_comparisons, clippy::if_let_mutex, clippy::if_same_then_else, clippy::ifs_same_cond, clippy::impossible_comparisons, clippy::ineffective_bit_mask, clippy::infinite_iter, clippy::inherent_to_string_shadow_display, clippy::inline_fn_without_body, clippy::invalid_null_ptr_usage, clippy::invalid_regex, clippy::invisible_characters, clippy::iter_next_loop, clippy::iter_skip_zero, clippy::iterator_step_by_zero, clippy::let_underscore_lock, clippy::match_str_case_mismatch, clippy::mem_replace_with_uninit, clippy::min_max, clippy::mismatched_target_os, clippy::mistyped_literal_suffixes, clippy::modulo_one, clippy::mut_from_ref, clippy::never_loop, clippy::non_octal_unix_permissions, clippy::nonsensical_open_options, clippy::not_unsafe_ptr_arg_deref, clippy::option_env_unwrap, clippy::out_of_bounds_indexing, clippy::overly_complex_bool_expr, clippy::panicking_unwrap, clippy::possible_missing_comma, clippy::read_line_without_trim, clippy::read_zero_byte_vec, clippy::recursive_format_impl, clippy::redundant_comparisons, clippy::redundant_locals, clippy::reversed_empty_ranges, clippy::self_assignment, clippy::serde_api_misuse, clippy::size_of_in_element_count, clippy::suspicious_splitn, clippy::transmute_null_to_fn, clippy::transmuting_null, clippy::uninit_assumed_init, clippy::uninit_vec, clippy::unit_cmp, clippy::unit_hash, clippy::unit_return_expecting_ord, clippy::unsound_collection_transmute, clippy::unused_io_amount, clippy::useless_attribute, clippy::vec_resize_to_zero, clippy::vtable_address_comparisons, clippy::while_immutable_condition, clippy::wrong_transmute, clippy::zst_offset"##,
},
children: &[
"clippy::absurd_extreme_comparisons",
@@ -7193,23 +13391,18 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::approx_constant",
"clippy::async_yields_async",
"clippy::bad_bit_mask",
- "clippy::cast_ref_to_mut",
- "clippy::clone_double_ref",
- "clippy::cmp_nan",
+ "clippy::cast_slice_different_sizes",
"clippy::deprecated_semver",
- "clippy::derive_hash_xor_eq",
"clippy::derive_ord_xor_partial_ord",
- "clippy::drop_copy",
- "clippy::drop_ref",
+ "clippy::derived_hash_with_manual_eq",
"clippy::enum_clike_unportable_variant",
"clippy::eq_op",
"clippy::erasing_op",
"clippy::fn_address_comparisons",
- "clippy::forget_copy",
- "clippy::forget_ref",
"clippy::if_let_mutex",
"clippy::if_same_then_else",
"clippy::ifs_same_cond",
+ "clippy::impossible_comparisons",
"clippy::ineffective_bit_mask",
"clippy::infinite_iter",
"clippy::inherent_to_string_shadow_display",
@@ -7218,9 +13411,9 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::invalid_regex",
"clippy::invisible_characters",
"clippy::iter_next_loop",
+ "clippy::iter_skip_zero",
"clippy::iterator_step_by_zero",
"clippy::let_underscore_lock",
- "clippy::logic_bug",
"clippy::match_str_case_mismatch",
"clippy::mem_replace_with_uninit",
"clippy::min_max",
@@ -7234,16 +13427,21 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::not_unsafe_ptr_arg_deref",
"clippy::option_env_unwrap",
"clippy::out_of_bounds_indexing",
+ "clippy::overly_complex_bool_expr",
"clippy::panicking_unwrap",
"clippy::possible_missing_comma",
+ "clippy::read_line_without_trim",
+ "clippy::read_zero_byte_vec",
+ "clippy::recursive_format_impl",
+ "clippy::redundant_comparisons",
+ "clippy::redundant_locals",
"clippy::reversed_empty_ranges",
"clippy::self_assignment",
"clippy::serde_api_misuse",
"clippy::size_of_in_element_count",
"clippy::suspicious_splitn",
- "clippy::to_string_in_display",
+ "clippy::transmute_null_to_fn",
"clippy::transmuting_null",
- "clippy::undropped_manually_drops",
"clippy::uninit_assumed_init",
"clippy::uninit_vec",
"clippy::unit_cmp",
@@ -7286,45 +13484,66 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
LintGroup {
lint: Lint {
label: "clippy::nursery",
- description: r##"lint group for: clippy::branches_sharing_code, clippy::cognitive_complexity, clippy::debug_assert_with_mut_call, clippy::disallowed_methods, clippy::disallowed_types, clippy::empty_line_after_outer_attr, clippy::equatable_if_let, clippy::fallible_impl_from, clippy::future_not_send, clippy::imprecise_flops, clippy::index_refutable_slice, clippy::missing_const_for_fn, clippy::mutex_integer, clippy::non_send_fields_in_send_ty, clippy::nonstandard_macro_braces, clippy::option_if_let_else, clippy::path_buf_push_overwrite, clippy::redundant_pub_crate, clippy::string_lit_as_bytes, clippy::suboptimal_flops, clippy::suspicious_operation_groupings, clippy::trailing_empty_array, clippy::trivial_regex, clippy::use_self, clippy::useless_let_if_seq, clippy::useless_transmute"##,
+ description: r##"lint group for: clippy::as_ptr_cast_mut, clippy::branches_sharing_code, clippy::clear_with_drain, clippy::cognitive_complexity, clippy::collection_is_never_read, clippy::debug_assert_with_mut_call, clippy::derive_partial_eq_without_eq, clippy::empty_line_after_doc_comments, clippy::empty_line_after_outer_attr, clippy::equatable_if_let, clippy::fallible_impl_from, clippy::future_not_send, clippy::implied_bounds_in_impls, clippy::imprecise_flops, clippy::iter_on_empty_collections, clippy::iter_on_single_items, clippy::iter_with_drain, clippy::large_stack_frames, clippy::manual_clamp, clippy::missing_const_for_fn, clippy::mutex_integer, clippy::needless_collect, clippy::needless_pass_by_ref_mut, clippy::non_send_fields_in_send_ty, clippy::nonstandard_macro_braces, clippy::option_if_let_else, clippy::or_fun_call, clippy::path_buf_push_overwrite, clippy::readonly_write_lock, clippy::redundant_clone, clippy::redundant_pub_crate, clippy::significant_drop_in_scrutinee, clippy::significant_drop_tightening, clippy::string_lit_as_bytes, clippy::suboptimal_flops, clippy::suspicious_operation_groupings, clippy::trailing_empty_array, clippy::trait_duplication_in_bounds, clippy::transmute_undefined_repr, clippy::trivial_regex, clippy::tuple_array_conversions, clippy::type_repetition_in_bounds, clippy::unnecessary_struct_initialization, clippy::unused_peekable, clippy::unused_rounding, clippy::use_self, clippy::useless_let_if_seq"##,
},
children: &[
+ "clippy::as_ptr_cast_mut",
"clippy::branches_sharing_code",
+ "clippy::clear_with_drain",
"clippy::cognitive_complexity",
+ "clippy::collection_is_never_read",
"clippy::debug_assert_with_mut_call",
- "clippy::disallowed_methods",
- "clippy::disallowed_types",
+ "clippy::derive_partial_eq_without_eq",
+ "clippy::empty_line_after_doc_comments",
"clippy::empty_line_after_outer_attr",
"clippy::equatable_if_let",
"clippy::fallible_impl_from",
"clippy::future_not_send",
+ "clippy::implied_bounds_in_impls",
"clippy::imprecise_flops",
- "clippy::index_refutable_slice",
+ "clippy::iter_on_empty_collections",
+ "clippy::iter_on_single_items",
+ "clippy::iter_with_drain",
+ "clippy::large_stack_frames",
+ "clippy::manual_clamp",
"clippy::missing_const_for_fn",
"clippy::mutex_integer",
+ "clippy::needless_collect",
+ "clippy::needless_pass_by_ref_mut",
"clippy::non_send_fields_in_send_ty",
"clippy::nonstandard_macro_braces",
"clippy::option_if_let_else",
+ "clippy::or_fun_call",
"clippy::path_buf_push_overwrite",
+ "clippy::readonly_write_lock",
+ "clippy::redundant_clone",
"clippy::redundant_pub_crate",
+ "clippy::significant_drop_in_scrutinee",
+ "clippy::significant_drop_tightening",
"clippy::string_lit_as_bytes",
"clippy::suboptimal_flops",
"clippy::suspicious_operation_groupings",
"clippy::trailing_empty_array",
+ "clippy::trait_duplication_in_bounds",
+ "clippy::transmute_undefined_repr",
"clippy::trivial_regex",
+ "clippy::tuple_array_conversions",
+ "clippy::type_repetition_in_bounds",
+ "clippy::unnecessary_struct_initialization",
+ "clippy::unused_peekable",
+ "clippy::unused_rounding",
"clippy::use_self",
"clippy::useless_let_if_seq",
- "clippy::useless_transmute",
],
},
LintGroup {
lint: Lint {
label: "clippy::pedantic",
- description: r##"lint group for: clippy::await_holding_lock, clippy::await_holding_refcell_ref, clippy::case_sensitive_file_extension_comparisons, clippy::cast_lossless, clippy::cast_possible_truncation, clippy::cast_possible_wrap, clippy::cast_precision_loss, clippy::cast_ptr_alignment, clippy::cast_sign_loss, clippy::checked_conversions, clippy::cloned_instead_of_copied, clippy::copy_iterator, clippy::default_trait_access, clippy::doc_markdown, clippy::empty_enum, clippy::enum_glob_use, clippy::expl_impl_clone_on_copy, clippy::explicit_deref_methods, clippy::explicit_into_iter_loop, clippy::explicit_iter_loop, clippy::filter_map_next, clippy::flat_map_option, clippy::float_cmp, clippy::fn_params_excessive_bools, clippy::from_iter_instead_of_collect, clippy::if_not_else, clippy::implicit_clone, clippy::implicit_hasher, clippy::implicit_saturating_sub, clippy::inconsistent_struct_constructor, clippy::inefficient_to_string, clippy::inline_always, clippy::invalid_upcast_comparisons, clippy::items_after_statements, clippy::iter_not_returning_iterator, clippy::large_digit_groups, clippy::large_stack_arrays, clippy::large_types_passed_by_value, clippy::let_underscore_drop, clippy::let_unit_value, clippy::linkedlist, clippy::macro_use_imports, clippy::manual_assert, clippy::manual_ok_or, clippy::many_single_char_names, clippy::map_unwrap_or, clippy::match_bool, clippy::match_on_vec_items, clippy::match_same_arms, clippy::match_wild_err_arm, clippy::match_wildcard_for_single_variants, clippy::maybe_infinite_iter, clippy::missing_errors_doc, clippy::missing_panics_doc, clippy::module_name_repetitions, clippy::must_use_candidate, clippy::mut_mut, clippy::naive_bytecount, clippy::needless_bitwise_bool, clippy::needless_continue, clippy::needless_for_each, clippy::needless_pass_by_value, clippy::no_effect_underscore_binding, clippy::option_option, clippy::ptr_as_ptr, clippy::range_minus_one, clippy::range_plus_one, clippy::redundant_closure_for_method_calls, clippy::redundant_else, clippy::ref_binding_to_reference, clippy::ref_option_ref, clippy::same_functions_in_if_condition, clippy::semicolon_if_nothing_returned, clippy::similar_names, clippy::single_match_else, clippy::string_add_assign, clippy::struct_excessive_bools, clippy::too_many_lines, clippy::trait_duplication_in_bounds, clippy::transmute_ptr_to_ptr, clippy::trivially_copy_pass_by_ref, clippy::type_repetition_in_bounds, clippy::unicode_not_nfc, clippy::unnecessary_wraps, clippy::unnested_or_patterns, clippy::unreadable_literal, clippy::unsafe_derive_deserialize, clippy::unused_async, clippy::unused_self, clippy::used_underscore_binding, clippy::verbose_bit_mask, clippy::wildcard_imports, clippy::zero_sized_map_values"##,
+ description: r##"lint group for: clippy::bool_to_int_with_if, clippy::borrow_as_ptr, clippy::case_sensitive_file_extension_comparisons, clippy::cast_lossless, clippy::cast_possible_truncation, clippy::cast_possible_wrap, clippy::cast_precision_loss, clippy::cast_ptr_alignment, clippy::cast_sign_loss, clippy::checked_conversions, clippy::cloned_instead_of_copied, clippy::copy_iterator, clippy::default_trait_access, clippy::doc_link_with_quotes, clippy::doc_markdown, clippy::empty_enum, clippy::enum_glob_use, clippy::expl_impl_clone_on_copy, clippy::explicit_deref_methods, clippy::explicit_into_iter_loop, clippy::explicit_iter_loop, clippy::filter_map_next, clippy::flat_map_option, clippy::float_cmp, clippy::fn_params_excessive_bools, clippy::from_iter_instead_of_collect, clippy::if_not_else, clippy::ignored_unit_patterns, clippy::implicit_clone, clippy::implicit_hasher, clippy::inconsistent_struct_constructor, clippy::index_refutable_slice, clippy::inefficient_to_string, clippy::inline_always, clippy::into_iter_without_iter, clippy::invalid_upcast_comparisons, clippy::items_after_statements, clippy::iter_not_returning_iterator, clippy::iter_without_into_iter, clippy::large_digit_groups, clippy::large_futures, clippy::large_stack_arrays, clippy::large_types_passed_by_value, clippy::linkedlist, clippy::macro_use_imports, clippy::manual_assert, clippy::manual_instant_elapsed, clippy::manual_let_else, clippy::manual_ok_or, clippy::manual_string_new, clippy::many_single_char_names, clippy::map_unwrap_or, clippy::match_bool, clippy::match_on_vec_items, clippy::match_same_arms, clippy::match_wild_err_arm, clippy::match_wildcard_for_single_variants, clippy::maybe_infinite_iter, clippy::mismatching_type_param_order, clippy::missing_errors_doc, clippy::missing_fields_in_debug, clippy::missing_panics_doc, clippy::module_name_repetitions, clippy::must_use_candidate, clippy::mut_mut, clippy::naive_bytecount, clippy::needless_bitwise_bool, clippy::needless_continue, clippy::needless_for_each, clippy::needless_pass_by_value, clippy::needless_raw_string_hashes, clippy::no_effect_underscore_binding, clippy::no_mangle_with_rust_abi, clippy::option_option, clippy::ptr_as_ptr, clippy::ptr_cast_constness, clippy::range_minus_one, clippy::range_plus_one, clippy::redundant_closure_for_method_calls, clippy::redundant_else, clippy::ref_binding_to_reference, clippy::ref_option_ref, clippy::return_self_not_must_use, clippy::same_functions_in_if_condition, clippy::semicolon_if_nothing_returned, clippy::should_panic_without_expect, clippy::similar_names, clippy::single_match_else, clippy::stable_sort_primitive, clippy::string_add_assign, clippy::struct_excessive_bools, clippy::too_many_lines, clippy::transmute_ptr_to_ptr, clippy::trivially_copy_pass_by_ref, clippy::unchecked_duration_subtraction, clippy::unicode_not_nfc, clippy::uninlined_format_args, clippy::unnecessary_box_returns, clippy::unnecessary_join, clippy::unnecessary_wraps, clippy::unnested_or_patterns, clippy::unreadable_literal, clippy::unsafe_derive_deserialize, clippy::unused_async, clippy::unused_self, clippy::used_underscore_binding, clippy::verbose_bit_mask, clippy::wildcard_imports, clippy::zero_sized_map_values"##,
},
children: &[
- "clippy::await_holding_lock",
- "clippy::await_holding_refcell_ref",
+ "clippy::bool_to_int_with_if",
+ "clippy::borrow_as_ptr",
"clippy::case_sensitive_file_extension_comparisons",
"clippy::cast_lossless",
"clippy::cast_possible_truncation",
@@ -7336,6 +13555,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::cloned_instead_of_copied",
"clippy::copy_iterator",
"clippy::default_trait_access",
+ "clippy::doc_link_with_quotes",
"clippy::doc_markdown",
"clippy::empty_enum",
"clippy::enum_glob_use",
@@ -7349,24 +13569,29 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::fn_params_excessive_bools",
"clippy::from_iter_instead_of_collect",
"clippy::if_not_else",
+ "clippy::ignored_unit_patterns",
"clippy::implicit_clone",
"clippy::implicit_hasher",
- "clippy::implicit_saturating_sub",
"clippy::inconsistent_struct_constructor",
+ "clippy::index_refutable_slice",
"clippy::inefficient_to_string",
"clippy::inline_always",
+ "clippy::into_iter_without_iter",
"clippy::invalid_upcast_comparisons",
"clippy::items_after_statements",
"clippy::iter_not_returning_iterator",
+ "clippy::iter_without_into_iter",
"clippy::large_digit_groups",
+ "clippy::large_futures",
"clippy::large_stack_arrays",
"clippy::large_types_passed_by_value",
- "clippy::let_underscore_drop",
- "clippy::let_unit_value",
"clippy::linkedlist",
"clippy::macro_use_imports",
"clippy::manual_assert",
+ "clippy::manual_instant_elapsed",
+ "clippy::manual_let_else",
"clippy::manual_ok_or",
+ "clippy::manual_string_new",
"clippy::many_single_char_names",
"clippy::map_unwrap_or",
"clippy::match_bool",
@@ -7375,7 +13600,9 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::match_wild_err_arm",
"clippy::match_wildcard_for_single_variants",
"clippy::maybe_infinite_iter",
+ "clippy::mismatching_type_param_order",
"clippy::missing_errors_doc",
+ "clippy::missing_fields_in_debug",
"clippy::missing_panics_doc",
"clippy::module_name_repetitions",
"clippy::must_use_candidate",
@@ -7385,27 +13612,35 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::needless_continue",
"clippy::needless_for_each",
"clippy::needless_pass_by_value",
+ "clippy::needless_raw_string_hashes",
"clippy::no_effect_underscore_binding",
+ "clippy::no_mangle_with_rust_abi",
"clippy::option_option",
"clippy::ptr_as_ptr",
+ "clippy::ptr_cast_constness",
"clippy::range_minus_one",
"clippy::range_plus_one",
"clippy::redundant_closure_for_method_calls",
"clippy::redundant_else",
"clippy::ref_binding_to_reference",
"clippy::ref_option_ref",
+ "clippy::return_self_not_must_use",
"clippy::same_functions_in_if_condition",
"clippy::semicolon_if_nothing_returned",
+ "clippy::should_panic_without_expect",
"clippy::similar_names",
"clippy::single_match_else",
+ "clippy::stable_sort_primitive",
"clippy::string_add_assign",
"clippy::struct_excessive_bools",
"clippy::too_many_lines",
- "clippy::trait_duplication_in_bounds",
"clippy::transmute_ptr_to_ptr",
"clippy::trivially_copy_pass_by_ref",
- "clippy::type_repetition_in_bounds",
+ "clippy::unchecked_duration_subtraction",
"clippy::unicode_not_nfc",
+ "clippy::uninlined_format_args",
+ "clippy::unnecessary_box_returns",
+ "clippy::unnecessary_join",
"clippy::unnecessary_wraps",
"clippy::unnested_or_patterns",
"clippy::unreadable_literal",
@@ -7421,29 +13656,33 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
LintGroup {
lint: Lint {
label: "clippy::perf",
- description: r##"lint group for: clippy::box_collection, clippy::boxed_local, clippy::cmp_owned, clippy::expect_fun_call, clippy::extend_with_drain, clippy::format_in_format_args, clippy::iter_nth, clippy::large_const_arrays, clippy::large_enum_variant, clippy::manual_memcpy, clippy::manual_str_repeat, clippy::map_entry, clippy::mutex_atomic, clippy::needless_collect, clippy::or_fun_call, clippy::redundant_allocation, clippy::redundant_clone, clippy::single_char_pattern, clippy::slow_vector_initialization, clippy::stable_sort_primitive, clippy::to_string_in_format_args, clippy::unnecessary_to_owned, clippy::useless_vec, clippy::vec_init_then_push"##,
+ description: r##"lint group for: clippy::box_collection, clippy::box_default, clippy::boxed_local, clippy::cmp_owned, clippy::collapsible_str_replace, clippy::drain_collect, clippy::expect_fun_call, clippy::extend_with_drain, clippy::format_collect, clippy::format_in_format_args, clippy::iter_nth, clippy::iter_overeager_cloned, clippy::large_const_arrays, clippy::large_enum_variant, clippy::manual_memcpy, clippy::manual_retain, clippy::manual_str_repeat, clippy::manual_try_fold, clippy::map_entry, clippy::missing_spin_loop, clippy::redundant_allocation, clippy::result_large_err, clippy::single_char_pattern, clippy::slow_vector_initialization, clippy::to_string_in_format_args, clippy::unnecessary_to_owned, clippy::useless_vec, clippy::vec_init_then_push"##,
},
children: &[
"clippy::box_collection",
+ "clippy::box_default",
"clippy::boxed_local",
"clippy::cmp_owned",
+ "clippy::collapsible_str_replace",
+ "clippy::drain_collect",
"clippy::expect_fun_call",
"clippy::extend_with_drain",
+ "clippy::format_collect",
"clippy::format_in_format_args",
"clippy::iter_nth",
+ "clippy::iter_overeager_cloned",
"clippy::large_const_arrays",
"clippy::large_enum_variant",
"clippy::manual_memcpy",
+ "clippy::manual_retain",
"clippy::manual_str_repeat",
+ "clippy::manual_try_fold",
"clippy::map_entry",
- "clippy::mutex_atomic",
- "clippy::needless_collect",
- "clippy::or_fun_call",
+ "clippy::missing_spin_loop",
"clippy::redundant_allocation",
- "clippy::redundant_clone",
+ "clippy::result_large_err",
"clippy::single_char_pattern",
"clippy::slow_vector_initialization",
- "clippy::stable_sort_primitive",
"clippy::to_string_in_format_args",
"clippy::unnecessary_to_owned",
"clippy::useless_vec",
@@ -7453,17 +13692,30 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
LintGroup {
lint: Lint {
label: "clippy::restriction",
- description: r##"lint group for: clippy::as_conversions, clippy::clone_on_ref_ptr, clippy::create_dir, clippy::dbg_macro, clippy::decimal_literal_representation, clippy::default_numeric_fallback, clippy::disallowed_script_idents, clippy::else_if_without_else, clippy::exhaustive_enums, clippy::exhaustive_structs, clippy::exit, clippy::expect_used, clippy::filetype_is_file, clippy::float_arithmetic, clippy::float_cmp_const, clippy::fn_to_numeric_cast_any, clippy::get_unwrap, clippy::if_then_some_else_none, clippy::implicit_return, clippy::indexing_slicing, clippy::inline_asm_x86_att_syntax, clippy::inline_asm_x86_intel_syntax, clippy::integer_arithmetic, clippy::integer_division, clippy::let_underscore_must_use, clippy::lossy_float_literal, clippy::map_err_ignore, clippy::mem_forget, clippy::missing_docs_in_private_items, clippy::missing_enforced_import_renames, clippy::missing_inline_in_public_items, clippy::mod_module_files, clippy::modulo_arithmetic, clippy::multiple_inherent_impl, clippy::non_ascii_literal, clippy::panic, clippy::panic_in_result_fn, clippy::pattern_type_mismatch, clippy::print_stderr, clippy::print_stdout, clippy::rc_buffer, clippy::rc_mutex, clippy::rest_pat_in_fully_bound_structs, clippy::same_name_method, clippy::self_named_module_files, clippy::separated_literal_suffix, clippy::shadow_reuse, clippy::shadow_same, clippy::shadow_unrelated, clippy::str_to_string, clippy::string_add, clippy::string_slice, clippy::string_to_string, clippy::todo, clippy::undocumented_unsafe_blocks, clippy::unimplemented, clippy::unnecessary_self_imports, clippy::unneeded_field_pattern, clippy::unreachable, clippy::unseparated_literal_suffix, clippy::unwrap_in_result, clippy::unwrap_used, clippy::use_debug, clippy::verbose_file_reads, clippy::wildcard_enum_match_arm"##,
+ description: r##"lint group for: clippy::absolute_paths, clippy::alloc_instead_of_core, clippy::allow_attributes, clippy::allow_attributes_without_reason, clippy::arithmetic_side_effects, clippy::as_conversions, clippy::as_underscore, clippy::assertions_on_result_states, clippy::big_endian_bytes, clippy::clone_on_ref_ptr, clippy::create_dir, clippy::dbg_macro, clippy::decimal_literal_representation, clippy::default_numeric_fallback, clippy::default_union_representation, clippy::deref_by_slicing, clippy::disallowed_script_idents, clippy::else_if_without_else, clippy::empty_drop, clippy::empty_structs_with_brackets, clippy::error_impl_error, clippy::exhaustive_enums, clippy::exhaustive_structs, clippy::exit, clippy::expect_used, clippy::filetype_is_file, clippy::float_arithmetic, clippy::float_cmp_const, clippy::fn_to_numeric_cast_any, clippy::format_push_string, clippy::get_unwrap, clippy::host_endian_bytes, clippy::if_then_some_else_none, clippy::impl_trait_in_params, clippy::implicit_return, clippy::indexing_slicing, clippy::inline_asm_x86_att_syntax, clippy::inline_asm_x86_intel_syntax, clippy::integer_division, clippy::large_include_file, clippy::let_underscore_must_use, clippy::let_underscore_untyped, clippy::little_endian_bytes, clippy::lossy_float_literal, clippy::map_err_ignore, clippy::mem_forget, clippy::min_ident_chars, clippy::missing_assert_message, clippy::missing_asserts_for_indexing, clippy::missing_docs_in_private_items, clippy::missing_enforced_import_renames, clippy::missing_inline_in_public_items, clippy::missing_trait_methods, clippy::mixed_read_write_in_expression, clippy::mod_module_files, clippy::modulo_arithmetic, clippy::multiple_inherent_impl, clippy::multiple_unsafe_ops_per_block, clippy::mutex_atomic, clippy::needless_raw_strings, clippy::non_ascii_literal, clippy::panic, clippy::panic_in_result_fn, clippy::partial_pub_fields, clippy::pattern_type_mismatch, clippy::print_stderr, clippy::print_stdout, clippy::pub_use, clippy::pub_with_shorthand, clippy::pub_without_shorthand, clippy::question_mark_used, clippy::rc_buffer, clippy::rc_mutex, clippy::redundant_type_annotations, clippy::ref_patterns, clippy::rest_pat_in_fully_bound_structs, clippy::same_name_method, clippy::self_named_module_files, clippy::semicolon_inside_block, clippy::semicolon_outside_block, clippy::separated_literal_suffix, clippy::shadow_reuse, clippy::shadow_same, clippy::shadow_unrelated, clippy::single_call_fn, clippy::single_char_lifetime_names, clippy::std_instead_of_alloc, clippy::std_instead_of_core, clippy::str_to_string, clippy::string_add, clippy::string_lit_chars_any, clippy::string_slice, clippy::string_to_string, clippy::suspicious_xor_used_as_pow, clippy::tests_outside_test_module, clippy::todo, clippy::try_err, clippy::undocumented_unsafe_blocks, clippy::unimplemented, clippy::unnecessary_safety_comment, clippy::unnecessary_safety_doc, clippy::unnecessary_self_imports, clippy::unneeded_field_pattern, clippy::unreachable, clippy::unseparated_literal_suffix, clippy::unwrap_in_result, clippy::unwrap_used, clippy::use_debug, clippy::verbose_file_reads, clippy::wildcard_enum_match_arm"##,
},
children: &[
+ "clippy::absolute_paths",
+ "clippy::alloc_instead_of_core",
+ "clippy::allow_attributes",
+ "clippy::allow_attributes_without_reason",
+ "clippy::arithmetic_side_effects",
"clippy::as_conversions",
+ "clippy::as_underscore",
+ "clippy::assertions_on_result_states",
+ "clippy::big_endian_bytes",
"clippy::clone_on_ref_ptr",
"clippy::create_dir",
"clippy::dbg_macro",
"clippy::decimal_literal_representation",
"clippy::default_numeric_fallback",
+ "clippy::default_union_representation",
+ "clippy::deref_by_slicing",
"clippy::disallowed_script_idents",
"clippy::else_if_without_else",
+ "clippy::empty_drop",
+ "clippy::empty_structs_with_brackets",
+ "clippy::error_impl_error",
"clippy::exhaustive_enums",
"clippy::exhaustive_structs",
"clippy::exit",
@@ -7472,46 +13724,78 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::float_arithmetic",
"clippy::float_cmp_const",
"clippy::fn_to_numeric_cast_any",
+ "clippy::format_push_string",
"clippy::get_unwrap",
+ "clippy::host_endian_bytes",
"clippy::if_then_some_else_none",
+ "clippy::impl_trait_in_params",
"clippy::implicit_return",
"clippy::indexing_slicing",
"clippy::inline_asm_x86_att_syntax",
"clippy::inline_asm_x86_intel_syntax",
- "clippy::integer_arithmetic",
"clippy::integer_division",
+ "clippy::large_include_file",
"clippy::let_underscore_must_use",
+ "clippy::let_underscore_untyped",
+ "clippy::little_endian_bytes",
"clippy::lossy_float_literal",
"clippy::map_err_ignore",
"clippy::mem_forget",
+ "clippy::min_ident_chars",
+ "clippy::missing_assert_message",
+ "clippy::missing_asserts_for_indexing",
"clippy::missing_docs_in_private_items",
"clippy::missing_enforced_import_renames",
"clippy::missing_inline_in_public_items",
+ "clippy::missing_trait_methods",
+ "clippy::mixed_read_write_in_expression",
"clippy::mod_module_files",
"clippy::modulo_arithmetic",
"clippy::multiple_inherent_impl",
+ "clippy::multiple_unsafe_ops_per_block",
+ "clippy::mutex_atomic",
+ "clippy::needless_raw_strings",
"clippy::non_ascii_literal",
"clippy::panic",
"clippy::panic_in_result_fn",
+ "clippy::partial_pub_fields",
"clippy::pattern_type_mismatch",
"clippy::print_stderr",
"clippy::print_stdout",
+ "clippy::pub_use",
+ "clippy::pub_with_shorthand",
+ "clippy::pub_without_shorthand",
+ "clippy::question_mark_used",
"clippy::rc_buffer",
"clippy::rc_mutex",
+ "clippy::redundant_type_annotations",
+ "clippy::ref_patterns",
"clippy::rest_pat_in_fully_bound_structs",
"clippy::same_name_method",
"clippy::self_named_module_files",
+ "clippy::semicolon_inside_block",
+ "clippy::semicolon_outside_block",
"clippy::separated_literal_suffix",
"clippy::shadow_reuse",
"clippy::shadow_same",
"clippy::shadow_unrelated",
+ "clippy::single_call_fn",
+ "clippy::single_char_lifetime_names",
+ "clippy::std_instead_of_alloc",
+ "clippy::std_instead_of_core",
"clippy::str_to_string",
"clippy::string_add",
+ "clippy::string_lit_chars_any",
"clippy::string_slice",
"clippy::string_to_string",
+ "clippy::suspicious_xor_used_as_pow",
+ "clippy::tests_outside_test_module",
"clippy::todo",
+ "clippy::try_err",
"clippy::undocumented_unsafe_blocks",
"clippy::unimplemented",
+ "clippy::unnecessary_safety_comment",
+ "clippy::unnecessary_safety_doc",
"clippy::unnecessary_self_imports",
"clippy::unneeded_field_pattern",
"clippy::unreachable",
@@ -7526,12 +13810,11 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
LintGroup {
lint: Lint {
label: "clippy::style",
- description: r##"lint group for: clippy::assertions_on_constants, clippy::assign_op_pattern, clippy::blacklisted_name, clippy::blocks_in_if_conditions, clippy::bool_assert_comparison, clippy::borrow_interior_mutable_const, clippy::builtin_type_shadow, clippy::bytes_nth, clippy::chars_last_cmp, clippy::chars_next_cmp, clippy::cmp_null, clippy::collapsible_else_if, clippy::collapsible_if, clippy::collapsible_match, clippy::comparison_chain, clippy::comparison_to_empty, clippy::declare_interior_mutable_const, clippy::double_must_use, clippy::double_neg, clippy::duplicate_underscore_argument, clippy::enum_variant_names, clippy::excessive_precision, clippy::field_reassign_with_default, clippy::fn_to_numeric_cast, clippy::fn_to_numeric_cast_with_truncation, clippy::for_kv_map, clippy::from_over_into, clippy::from_str_radix_10, clippy::inconsistent_digit_grouping, clippy::infallible_destructuring_match, clippy::inherent_to_string, clippy::into_iter_on_ref, clippy::iter_cloned_collect, clippy::iter_next_slice, clippy::iter_nth_zero, clippy::iter_skip_next, clippy::just_underscores_and_digits, clippy::len_without_is_empty, clippy::len_zero, clippy::let_and_return, clippy::main_recursion, clippy::manual_async_fn, clippy::manual_map, clippy::manual_non_exhaustive, clippy::manual_range_contains, clippy::manual_saturating_arithmetic, clippy::map_clone, clippy::map_collect_result_unit, clippy::match_like_matches_macro, clippy::match_overlapping_arm, clippy::match_ref_pats, clippy::match_result_ok, clippy::mem_replace_option_with_none, clippy::mem_replace_with_default, clippy::missing_safety_doc, clippy::mixed_case_hex_literals, clippy::module_inception, clippy::must_use_unit, clippy::mut_mutex_lock, clippy::needless_borrow, clippy::needless_doctest_main, clippy::needless_late_init, clippy::needless_range_loop, clippy::needless_return, clippy::neg_multiply, clippy::new_ret_no_self, clippy::new_without_default, clippy::ok_expect, clippy::op_ref, clippy::option_map_or_none, clippy::print_literal, clippy::print_with_newline, clippy::println_empty_string, clippy::ptr_arg, clippy::ptr_eq, clippy::question_mark, clippy::redundant_closure, clippy::redundant_field_names, clippy::redundant_pattern, clippy::redundant_pattern_matching, clippy::redundant_static_lifetimes, clippy::result_map_or_into_option, clippy::result_unit_err, clippy::same_item_push, clippy::self_named_constructors, clippy::should_implement_trait, clippy::single_char_add_str, clippy::single_component_path_imports, clippy::single_match, clippy::string_extend_chars, clippy::tabs_in_doc_comments, clippy::to_digit_is_some, clippy::toplevel_ref_arg, clippy::try_err, clippy::unnecessary_fold, clippy::unnecessary_lazy_evaluations, clippy::unnecessary_mut_passed, clippy::unsafe_removed_from_name, clippy::unused_unit, clippy::unusual_byte_groupings, clippy::unwrap_or_else_default, clippy::upper_case_acronyms, clippy::while_let_on_iterator, clippy::write_literal, clippy::write_with_newline, clippy::writeln_empty_string, clippy::wrong_self_convention, clippy::zero_ptr"##,
+ description: r##"lint group for: clippy::assertions_on_constants, clippy::assign_op_pattern, clippy::blocks_in_if_conditions, clippy::bool_assert_comparison, clippy::borrow_interior_mutable_const, clippy::builtin_type_shadow, clippy::bytes_nth, clippy::chars_last_cmp, clippy::chars_next_cmp, clippy::cmp_null, clippy::collapsible_else_if, clippy::collapsible_if, clippy::collapsible_match, clippy::comparison_chain, clippy::comparison_to_empty, clippy::declare_interior_mutable_const, clippy::default_instead_of_iter_empty, clippy::disallowed_macros, clippy::disallowed_methods, clippy::disallowed_names, clippy::disallowed_types, clippy::double_must_use, clippy::double_neg, clippy::duplicate_underscore_argument, clippy::enum_variant_names, clippy::err_expect, clippy::excessive_precision, clippy::field_reassign_with_default, clippy::filter_map_bool_then, clippy::fn_to_numeric_cast, clippy::fn_to_numeric_cast_with_truncation, clippy::for_kv_map, clippy::from_over_into, clippy::from_str_radix_10, clippy::get_first, clippy::implicit_saturating_add, clippy::implicit_saturating_sub, clippy::inconsistent_digit_grouping, clippy::infallible_destructuring_match, clippy::inherent_to_string, clippy::init_numbered_fields, clippy::into_iter_on_ref, clippy::is_digit_ascii_radix, clippy::items_after_test_module, clippy::iter_cloned_collect, clippy::iter_next_slice, clippy::iter_nth_zero, clippy::iter_skip_next, clippy::just_underscores_and_digits, clippy::len_without_is_empty, clippy::len_zero, clippy::let_and_return, clippy::let_unit_value, clippy::main_recursion, clippy::manual_async_fn, clippy::manual_bits, clippy::manual_is_ascii_check, clippy::manual_is_finite, clippy::manual_is_infinite, clippy::manual_map, clippy::manual_next_back, clippy::manual_non_exhaustive, clippy::manual_range_contains, clippy::manual_saturating_arithmetic, clippy::manual_while_let_some, clippy::map_clone, clippy::map_collect_result_unit, clippy::match_like_matches_macro, clippy::match_overlapping_arm, clippy::match_ref_pats, clippy::match_result_ok, clippy::mem_replace_option_with_none, clippy::mem_replace_with_default, clippy::missing_safety_doc, clippy::mixed_case_hex_literals, clippy::module_inception, clippy::must_use_unit, clippy::mut_mutex_lock, clippy::needless_borrow, clippy::needless_borrows_for_generic_args, clippy::needless_doctest_main, clippy::needless_else, clippy::needless_late_init, clippy::needless_parens_on_range_literals, clippy::needless_pub_self, clippy::needless_range_loop, clippy::needless_return, clippy::needless_return_with_question_mark, clippy::neg_multiply, clippy::new_ret_no_self, clippy::new_without_default, clippy::non_minimal_cfg, clippy::obfuscated_if_else, clippy::ok_expect, clippy::op_ref, clippy::option_map_or_none, clippy::partialeq_to_none, clippy::print_literal, clippy::print_with_newline, clippy::println_empty_string, clippy::ptr_arg, clippy::ptr_eq, clippy::question_mark, clippy::redundant_closure, clippy::redundant_field_names, clippy::redundant_pattern, clippy::redundant_pattern_matching, clippy::redundant_static_lifetimes, clippy::result_map_or_into_option, clippy::result_unit_err, clippy::same_item_push, clippy::self_named_constructors, clippy::should_implement_trait, clippy::single_char_add_str, clippy::single_component_path_imports, clippy::single_match, clippy::string_extend_chars, clippy::tabs_in_doc_comments, clippy::to_digit_is_some, clippy::toplevel_ref_arg, clippy::trim_split_whitespace, clippy::unnecessary_fold, clippy::unnecessary_lazy_evaluations, clippy::unnecessary_mut_passed, clippy::unnecessary_owned_empty_strings, clippy::unsafe_removed_from_name, clippy::unused_unit, clippy::unusual_byte_groupings, clippy::unwrap_or_default, clippy::upper_case_acronyms, clippy::while_let_on_iterator, clippy::write_literal, clippy::write_with_newline, clippy::writeln_empty_string, clippy::wrong_self_convention, clippy::zero_ptr"##,
},
children: &[
"clippy::assertions_on_constants",
"clippy::assign_op_pattern",
- "clippy::blacklisted_name",
"clippy::blocks_in_if_conditions",
"clippy::bool_assert_comparison",
"clippy::borrow_interior_mutable_const",
@@ -7546,21 +13829,34 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::comparison_chain",
"clippy::comparison_to_empty",
"clippy::declare_interior_mutable_const",
+ "clippy::default_instead_of_iter_empty",
+ "clippy::disallowed_macros",
+ "clippy::disallowed_methods",
+ "clippy::disallowed_names",
+ "clippy::disallowed_types",
"clippy::double_must_use",
"clippy::double_neg",
"clippy::duplicate_underscore_argument",
"clippy::enum_variant_names",
+ "clippy::err_expect",
"clippy::excessive_precision",
"clippy::field_reassign_with_default",
+ "clippy::filter_map_bool_then",
"clippy::fn_to_numeric_cast",
"clippy::fn_to_numeric_cast_with_truncation",
"clippy::for_kv_map",
"clippy::from_over_into",
"clippy::from_str_radix_10",
+ "clippy::get_first",
+ "clippy::implicit_saturating_add",
+ "clippy::implicit_saturating_sub",
"clippy::inconsistent_digit_grouping",
"clippy::infallible_destructuring_match",
"clippy::inherent_to_string",
+ "clippy::init_numbered_fields",
"clippy::into_iter_on_ref",
+ "clippy::is_digit_ascii_radix",
+ "clippy::items_after_test_module",
"clippy::iter_cloned_collect",
"clippy::iter_next_slice",
"clippy::iter_nth_zero",
@@ -7569,12 +13865,19 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::len_without_is_empty",
"clippy::len_zero",
"clippy::let_and_return",
+ "clippy::let_unit_value",
"clippy::main_recursion",
"clippy::manual_async_fn",
+ "clippy::manual_bits",
+ "clippy::manual_is_ascii_check",
+ "clippy::manual_is_finite",
+ "clippy::manual_is_infinite",
"clippy::manual_map",
+ "clippy::manual_next_back",
"clippy::manual_non_exhaustive",
"clippy::manual_range_contains",
"clippy::manual_saturating_arithmetic",
+ "clippy::manual_while_let_some",
"clippy::map_clone",
"clippy::map_collect_result_unit",
"clippy::match_like_matches_macro",
@@ -7589,16 +13892,24 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::must_use_unit",
"clippy::mut_mutex_lock",
"clippy::needless_borrow",
+ "clippy::needless_borrows_for_generic_args",
"clippy::needless_doctest_main",
+ "clippy::needless_else",
"clippy::needless_late_init",
+ "clippy::needless_parens_on_range_literals",
+ "clippy::needless_pub_self",
"clippy::needless_range_loop",
"clippy::needless_return",
+ "clippy::needless_return_with_question_mark",
"clippy::neg_multiply",
"clippy::new_ret_no_self",
"clippy::new_without_default",
+ "clippy::non_minimal_cfg",
+ "clippy::obfuscated_if_else",
"clippy::ok_expect",
"clippy::op_ref",
"clippy::option_map_or_none",
+ "clippy::partialeq_to_none",
"clippy::print_literal",
"clippy::print_with_newline",
"clippy::println_empty_string",
@@ -7622,14 +13933,15 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::tabs_in_doc_comments",
"clippy::to_digit_is_some",
"clippy::toplevel_ref_arg",
- "clippy::try_err",
+ "clippy::trim_split_whitespace",
"clippy::unnecessary_fold",
"clippy::unnecessary_lazy_evaluations",
"clippy::unnecessary_mut_passed",
+ "clippy::unnecessary_owned_empty_strings",
"clippy::unsafe_removed_from_name",
"clippy::unused_unit",
"clippy::unusual_byte_groupings",
- "clippy::unwrap_or_else_default",
+ "clippy::unwrap_or_default",
"clippy::upper_case_acronyms",
"clippy::while_let_on_iterator",
"clippy::write_literal",
@@ -7642,25 +13954,58 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
LintGroup {
lint: Lint {
label: "clippy::suspicious",
- description: r##"lint group for: clippy::blanket_clippy_restriction_lints, clippy::empty_loop, clippy::eval_order_dependence, clippy::float_equality_without_abs, clippy::for_loops_over_fallibles, clippy::misrefactored_assign_op, clippy::mut_range_bound, clippy::mutable_key_type, clippy::octal_escapes, clippy::return_self_not_must_use, clippy::suspicious_arithmetic_impl, clippy::suspicious_assignment_formatting, clippy::suspicious_else_formatting, clippy::suspicious_map, clippy::suspicious_op_assign_impl, clippy::suspicious_unary_op_formatting"##,
+ description: r##"lint group for: clippy::almost_complete_range, clippy::arc_with_non_send_sync, clippy::await_holding_invalid_type, clippy::await_holding_lock, clippy::await_holding_refcell_ref, clippy::blanket_clippy_restriction_lints, clippy::cast_abs_to_unsigned, clippy::cast_enum_constructor, clippy::cast_enum_truncation, clippy::cast_nan_to_int, clippy::cast_slice_from_raw_parts, clippy::crate_in_macro_def, clippy::drop_non_drop, clippy::duplicate_mod, clippy::empty_loop, clippy::float_equality_without_abs, clippy::forget_non_drop, clippy::four_forward_slashes, clippy::from_raw_with_void_ptr, clippy::iter_out_of_bounds, clippy::let_underscore_future, clippy::lines_filter_map_ok, clippy::maybe_misused_cfg, clippy::misnamed_getters, clippy::misrefactored_assign_op, clippy::multi_assignments, clippy::mut_range_bound, clippy::mutable_key_type, clippy::no_effect_replace, clippy::non_canonical_clone_impl, clippy::non_canonical_partial_ord_impl, clippy::octal_escapes, clippy::path_ends_with_ext, clippy::permissions_set_readonly_false, clippy::print_in_format_impl, clippy::rc_clone_in_vec_init, clippy::single_range_in_vec_init, clippy::size_of_ref, clippy::suspicious_arithmetic_impl, clippy::suspicious_assignment_formatting, clippy::suspicious_command_arg_space, clippy::suspicious_doc_comments, clippy::suspicious_else_formatting, clippy::suspicious_map, clippy::suspicious_op_assign_impl, clippy::suspicious_to_owned, clippy::suspicious_unary_op_formatting, clippy::swap_ptr_to_ref, clippy::type_id_on_box"##,
},
children: &[
+ "clippy::almost_complete_range",
+ "clippy::arc_with_non_send_sync",
+ "clippy::await_holding_invalid_type",
+ "clippy::await_holding_lock",
+ "clippy::await_holding_refcell_ref",
"clippy::blanket_clippy_restriction_lints",
+ "clippy::cast_abs_to_unsigned",
+ "clippy::cast_enum_constructor",
+ "clippy::cast_enum_truncation",
+ "clippy::cast_nan_to_int",
+ "clippy::cast_slice_from_raw_parts",
+ "clippy::crate_in_macro_def",
+ "clippy::drop_non_drop",
+ "clippy::duplicate_mod",
"clippy::empty_loop",
- "clippy::eval_order_dependence",
"clippy::float_equality_without_abs",
- "clippy::for_loops_over_fallibles",
+ "clippy::forget_non_drop",
+ "clippy::four_forward_slashes",
+ "clippy::from_raw_with_void_ptr",
+ "clippy::iter_out_of_bounds",
+ "clippy::let_underscore_future",
+ "clippy::lines_filter_map_ok",
+ "clippy::maybe_misused_cfg",
+ "clippy::misnamed_getters",
"clippy::misrefactored_assign_op",
+ "clippy::multi_assignments",
"clippy::mut_range_bound",
"clippy::mutable_key_type",
+ "clippy::no_effect_replace",
+ "clippy::non_canonical_clone_impl",
+ "clippy::non_canonical_partial_ord_impl",
"clippy::octal_escapes",
- "clippy::return_self_not_must_use",
+ "clippy::path_ends_with_ext",
+ "clippy::permissions_set_readonly_false",
+ "clippy::print_in_format_impl",
+ "clippy::rc_clone_in_vec_init",
+ "clippy::single_range_in_vec_init",
+ "clippy::size_of_ref",
"clippy::suspicious_arithmetic_impl",
"clippy::suspicious_assignment_formatting",
+ "clippy::suspicious_command_arg_space",
+ "clippy::suspicious_doc_comments",
"clippy::suspicious_else_formatting",
"clippy::suspicious_map",
"clippy::suspicious_op_assign_impl",
+ "clippy::suspicious_to_owned",
"clippy::suspicious_unary_op_formatting",
+ "clippy::swap_ptr_to_ref",
+ "clippy::type_id_on_box",
],
},
];
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
index 330af442f..9363bdfa1 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
@@ -3,7 +3,7 @@
use std::collections::VecDeque;
use base_db::{FileId, SourceDatabaseExt};
-use hir::{Crate, ItemInNs, ModuleDef, Name, Semantics};
+use hir::{Crate, DescendPreference, ItemInNs, ModuleDef, Name, Semantics};
use syntax::{
ast::{self, make},
AstToken, SyntaxKind, SyntaxToken, TokenAtOffset,
@@ -117,7 +117,7 @@ pub fn get_definition(
sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken,
) -> Option<Definition> {
- for token in sema.descend_into_macros(token, 0.into()) {
+ for token in sema.descend_into_macros(DescendPreference::None, token) {
let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops);
if let Some(&[x]) = def.as_deref() {
return Some(x);
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
index e475c5cd6..a4f0a6df7 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
@@ -1,14 +1,14 @@
//! Look up accessible paths for items.
+
use hir::{
- AsAssocItem, AssocItem, AssocItemContainer, Crate, ItemInNs, ModPath, Module, ModuleDef,
+ AsAssocItem, AssocItem, AssocItemContainer, Crate, ItemInNs, ModPath, Module, ModuleDef, Name,
PathResolution, PrefixKind, ScopeDef, Semantics, SemanticsScope, Type,
};
-use itertools::Itertools;
-use rustc_hash::FxHashSet;
+use itertools::{EitherOrBoth, Itertools};
+use rustc_hash::{FxHashMap, FxHashSet};
use syntax::{
ast::{self, make, HasName},
- utils::path_to_string_stripping_turbo_fish,
- AstNode, SyntaxNode,
+ AstNode, SmolStr, SyntaxNode,
};
use crate::{
@@ -51,39 +51,39 @@ pub struct TraitImportCandidate {
#[derive(Debug)]
pub struct PathImportCandidate {
/// Optional qualifier before name.
- pub qualifier: Option<FirstSegmentUnresolved>,
+ pub qualifier: Option<Vec<SmolStr>>,
/// The name the item (struct, trait, enum, etc.) should have.
pub name: NameToImport,
}
-/// A qualifier that has a first segment and it's unresolved.
-#[derive(Debug)]
-pub struct FirstSegmentUnresolved {
- fist_segment: ast::NameRef,
- full_qualifier: ast::Path,
-}
-
/// A name that will be used during item lookups.
#[derive(Debug, Clone)]
pub enum NameToImport {
/// Requires items with names that exactly match the given string, bool indicates case-sensitivity.
Exact(String, bool),
- /// Requires items with names that case-insensitively contain all letters from the string,
+ /// Requires items with names that match the given string by prefix, bool indicates case-sensitivity.
+ Prefix(String, bool),
+ /// Requires items with names contain all letters from the string,
/// in the same order, but not necessary adjacent.
- Fuzzy(String),
+ Fuzzy(String, bool),
}
impl NameToImport {
pub fn exact_case_sensitive(s: String) -> NameToImport {
NameToImport::Exact(s, true)
}
-}
-impl NameToImport {
+ pub fn fuzzy(s: String) -> NameToImport {
+ // unless all chars are lowercase, we do a case sensitive search
+ let case_sensitive = s.chars().any(|c| c.is_uppercase());
+ NameToImport::Fuzzy(s, case_sensitive)
+ }
+
pub fn text(&self) -> &str {
match self {
- NameToImport::Exact(text, _) => text.as_str(),
- NameToImport::Fuzzy(text) => text.as_str(),
+ NameToImport::Prefix(text, _)
+ | NameToImport::Exact(text, _)
+ | NameToImport::Fuzzy(text, _) => text.as_str(),
}
}
}
@@ -165,7 +165,7 @@ impl ImportAssets {
Some(Self {
import_candidate: ImportCandidate::TraitMethod(TraitImportCandidate {
receiver_ty,
- assoc_item_name: NameToImport::Fuzzy(fuzzy_method_name),
+ assoc_item_name: NameToImport::fuzzy(fuzzy_method_name),
}),
module_with_candidate: module_with_method_call,
candidate_node,
@@ -188,18 +188,11 @@ pub struct LocatedImport {
/// the original item is the associated constant, but the import has to be a trait that
/// defines this constant.
pub original_item: ItemInNs,
- /// A path of the original item.
- pub original_path: Option<ModPath>,
}
impl LocatedImport {
- pub fn new(
- import_path: ModPath,
- item_to_import: ItemInNs,
- original_item: ItemInNs,
- original_path: Option<ModPath>,
- ) -> Self {
- Self { import_path, item_to_import, original_item, original_path }
+ pub fn new(import_path: ModPath, item_to_import: ItemInNs, original_item: ItemInNs) -> Self {
+ Self { import_path, item_to_import, original_item }
}
}
@@ -213,9 +206,10 @@ impl ImportAssets {
sema: &Semantics<'_, RootDatabase>,
prefix_kind: PrefixKind,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Vec<LocatedImport> {
let _p = profile::span("import_assets::search_for_imports");
- self.search_for(sema, Some(prefix_kind), prefer_no_std)
+ self.search_for(sema, Some(prefix_kind), prefer_no_std, prefer_prelude)
}
/// This may return non-absolute paths if a part of the returned path is already imported into scope.
@@ -223,17 +217,36 @@ impl ImportAssets {
&self,
sema: &Semantics<'_, RootDatabase>,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Vec<LocatedImport> {
let _p = profile::span("import_assets::search_for_relative_paths");
- self.search_for(sema, None, prefer_no_std)
+ self.search_for(sema, None, prefer_no_std, prefer_prelude)
}
- pub fn path_fuzzy_name_to_exact(&mut self, case_sensitive: bool) {
+ /// Requires imports to by prefix instead of fuzzily.
+ pub fn path_fuzzy_name_to_prefix(&mut self) {
if let ImportCandidate::Path(PathImportCandidate { name: to_import, .. }) =
&mut self.import_candidate
{
- let name = match to_import {
- NameToImport::Fuzzy(name) => std::mem::take(name),
+ let (name, case_sensitive) = match to_import {
+ NameToImport::Fuzzy(name, case_sensitive) => {
+ (std::mem::take(name), *case_sensitive)
+ }
+ _ => return,
+ };
+ *to_import = NameToImport::Prefix(name, case_sensitive);
+ }
+ }
+
+ /// Requires imports to match exactly instead of fuzzily.
+ pub fn path_fuzzy_name_to_exact(&mut self) {
+ if let ImportCandidate::Path(PathImportCandidate { name: to_import, .. }) =
+ &mut self.import_candidate
+ {
+ let (name, case_sensitive) = match to_import {
+ NameToImport::Fuzzy(name, case_sensitive) => {
+ (std::mem::take(name), *case_sensitive)
+ }
_ => return,
};
*to_import = NameToImport::Exact(name, case_sensitive);
@@ -245,6 +258,7 @@ impl ImportAssets {
sema: &Semantics<'_, RootDatabase>,
prefixed: Option<PrefixKind>,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Vec<LocatedImport> {
let _p = profile::span("import_assets::search_for");
@@ -256,6 +270,7 @@ impl ImportAssets {
&self.module_with_candidate,
prefixed,
prefer_no_std,
+ prefer_prelude,
)
};
@@ -322,64 +337,75 @@ fn path_applicable_imports(
)
.filter_map(|item| {
let mod_path = mod_path(item)?;
- Some(LocatedImport::new(mod_path.clone(), item, item, Some(mod_path)))
- })
- .collect()
- }
- Some(first_segment_unresolved) => {
- let unresolved_qualifier =
- path_to_string_stripping_turbo_fish(&first_segment_unresolved.full_qualifier);
- let unresolved_first_segment = first_segment_unresolved.fist_segment.text();
- items_locator::items_with_name(
- sema,
- current_crate,
- path_candidate.name.clone(),
- AssocSearchMode::Include,
- Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
- )
- .filter_map(|item| {
- import_for_item(
- sema.db,
- mod_path,
- &unresolved_first_segment,
- &unresolved_qualifier,
- item,
- )
+ Some(LocatedImport::new(mod_path, item, item))
})
.collect()
}
+ Some(qualifier) => items_locator::items_with_name(
+ sema,
+ current_crate,
+ path_candidate.name.clone(),
+ AssocSearchMode::Include,
+ Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
+ )
+ .filter_map(|item| import_for_item(sema.db, mod_path, &qualifier, item))
+ .collect(),
}
}
fn import_for_item(
db: &RootDatabase,
mod_path: impl Fn(ItemInNs) -> Option<ModPath>,
- unresolved_first_segment: &str,
- unresolved_qualifier: &str,
+ unresolved_qualifier: &[SmolStr],
original_item: ItemInNs,
) -> Option<LocatedImport> {
let _p = profile::span("import_assets::import_for_item");
+ let [first_segment, ..] = unresolved_qualifier else { return None };
- let original_item_candidate = item_for_path_search(db, original_item)?;
+ let item_as_assoc = item_as_assoc(db, original_item);
+
+ let (original_item_candidate, trait_item_to_import) = match item_as_assoc {
+ Some(assoc_item) => match assoc_item.container(db) {
+ AssocItemContainer::Trait(trait_) => {
+ let trait_ = ItemInNs::from(ModuleDef::from(trait_));
+ (trait_, Some(trait_))
+ }
+ AssocItemContainer::Impl(impl_) => {
+ (ItemInNs::from(ModuleDef::from(impl_.self_ty(db).as_adt()?)), None)
+ }
+ },
+ None => (original_item, None),
+ };
let import_path_candidate = mod_path(original_item_candidate)?;
- let import_path_string = import_path_candidate.display(db).to_string();
- let expected_import_end = if item_as_assoc(db, original_item).is_some() {
- unresolved_qualifier.to_string()
- } else {
- format!("{unresolved_qualifier}::{}", item_name(db, original_item)?.display(db))
+ let mut import_path_candidate_segments = import_path_candidate.segments().iter().rev();
+ let predicate = |it: EitherOrBoth<&SmolStr, &Name>| match it {
+ // segments match, check next one
+ EitherOrBoth::Both(a, b) if b.as_str() == Some(&**a) => None,
+ // segments mismatch / qualifier is longer than the path, bail out
+ EitherOrBoth::Both(..) | EitherOrBoth::Left(_) => Some(false),
+ // all segments match and we have exhausted the qualifier, proceed
+ EitherOrBoth::Right(_) => Some(true),
};
- if !import_path_string.contains(unresolved_first_segment)
- || !import_path_string.ends_with(&expected_import_end)
- {
+ if item_as_assoc.is_none() {
+ let item_name = item_name(db, original_item)?.as_text()?;
+ let last_segment = import_path_candidate_segments.next()?;
+ if last_segment.as_str() != Some(&*item_name) {
+ return None;
+ }
+ }
+ let ends_with = unresolved_qualifier
+ .iter()
+ .rev()
+ .zip_longest(import_path_candidate_segments)
+ .find_map(predicate)
+ .unwrap_or(true);
+ if !ends_with {
return None;
}
- let segment_import =
- find_import_for_segment(db, original_item_candidate, unresolved_first_segment)?;
- let trait_item_to_import = item_as_assoc(db, original_item)
- .and_then(|assoc| assoc.containing_trait(db))
- .map(|trait_| ItemInNs::from(ModuleDef::from(trait_)));
+ let segment_import = find_import_for_segment(db, original_item_candidate, first_segment)?;
+
Some(match (segment_import == original_item_candidate, trait_item_to_import) {
(true, Some(_)) => {
// FIXME we should be able to import both the trait and the segment,
@@ -387,42 +413,37 @@ fn import_for_item(
// especially in case of lazy completion edit resolutions.
return None;
}
- (false, Some(trait_to_import)) => LocatedImport::new(
- mod_path(trait_to_import)?,
- trait_to_import,
- original_item,
- mod_path(original_item),
- ),
- (true, None) => LocatedImport::new(
- import_path_candidate,
- original_item_candidate,
- original_item,
- mod_path(original_item),
- ),
- (false, None) => LocatedImport::new(
- mod_path(segment_import)?,
- segment_import,
- original_item,
- mod_path(original_item),
- ),
+ (false, Some(trait_to_import)) => {
+ LocatedImport::new(mod_path(trait_to_import)?, trait_to_import, original_item)
+ }
+ (true, None) => {
+ LocatedImport::new(import_path_candidate, original_item_candidate, original_item)
+ }
+ (false, None) => {
+ LocatedImport::new(mod_path(segment_import)?, segment_import, original_item)
+ }
})
}
pub fn item_for_path_search(db: &RootDatabase, item: ItemInNs) -> Option<ItemInNs> {
Some(match item {
ItemInNs::Types(_) | ItemInNs::Values(_) => match item_as_assoc(db, item) {
- Some(assoc_item) => match assoc_item.container(db) {
- AssocItemContainer::Trait(trait_) => ItemInNs::from(ModuleDef::from(trait_)),
- AssocItemContainer::Impl(impl_) => {
- ItemInNs::from(ModuleDef::from(impl_.self_ty(db).as_adt()?))
- }
- },
+ Some(assoc_item) => item_for_path_search_assoc(db, assoc_item)?,
None => item,
},
ItemInNs::Macros(_) => item,
})
}
+fn item_for_path_search_assoc(db: &RootDatabase, assoc_item: AssocItem) -> Option<ItemInNs> {
+ Some(match assoc_item.container(db) {
+ AssocItemContainer::Trait(trait_) => ItemInNs::from(ModuleDef::from(trait_)),
+ AssocItemContainer::Impl(impl_) => {
+ ItemInNs::from(ModuleDef::from(impl_.self_ty(db).as_adt()?))
+ }
+ })
+}
+
fn find_import_for_segment(
db: &RootDatabase,
original_item: ItemInNs,
@@ -499,6 +520,7 @@ fn trait_applicable_items(
.collect();
let mut located_imports = FxHashSet::default();
+ let mut trait_import_paths = FxHashMap::default();
if trait_assoc_item {
trait_candidate.receiver_ty.iterate_path_candidates(
@@ -516,12 +538,14 @@ fn trait_applicable_items(
}
let located_trait = assoc.containing_trait(db)?;
let trait_item = ItemInNs::from(ModuleDef::from(located_trait));
- let original_item = assoc_to_item(assoc);
+ let import_path = trait_import_paths
+ .entry(trait_item)
+ .or_insert_with(|| mod_path(trait_item))
+ .clone()?;
located_imports.insert(LocatedImport::new(
- mod_path(trait_item)?,
+ import_path,
trait_item,
- original_item,
- mod_path(original_item),
+ assoc_to_item(assoc),
));
}
None::<()>
@@ -539,12 +563,14 @@ fn trait_applicable_items(
if required_assoc_items.contains(&assoc) {
let located_trait = assoc.containing_trait(db)?;
let trait_item = ItemInNs::from(ModuleDef::from(located_trait));
- let original_item = assoc_to_item(assoc);
+ let import_path = trait_import_paths
+ .entry(trait_item)
+ .or_insert_with(|| mod_path(trait_item))
+ .clone()?;
located_imports.insert(LocatedImport::new(
- mod_path(trait_item)?,
+ import_path,
trait_item,
- original_item,
- mod_path(original_item),
+ assoc_to_item(assoc),
));
}
None::<()>
@@ -569,11 +595,18 @@ fn get_mod_path(
module_with_candidate: &Module,
prefixed: Option<PrefixKind>,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Option<ModPath> {
if let Some(prefix_kind) = prefixed {
- module_with_candidate.find_use_path_prefixed(db, item_to_search, prefix_kind, prefer_no_std)
+ module_with_candidate.find_use_path_prefixed(
+ db,
+ item_to_search,
+ prefix_kind,
+ prefer_no_std,
+ prefer_prelude,
+ )
} else {
- module_with_candidate.find_use_path(db, item_to_search, prefer_no_std)
+ module_with_candidate.find_use_path(db, item_to_search, prefer_no_std, prefer_prelude)
}
}
@@ -623,7 +656,7 @@ impl ImportCandidate {
fuzzy_name: String,
sema: &Semantics<'_, RootDatabase>,
) -> Option<Self> {
- path_import_candidate(sema, qualifier, NameToImport::Fuzzy(fuzzy_name))
+ path_import_candidate(sema, qualifier, NameToImport::fuzzy(fuzzy_name))
}
}
@@ -635,18 +668,13 @@ fn path_import_candidate(
Some(match qualifier {
Some(qualifier) => match sema.resolve_path(&qualifier) {
None => {
- let qualifier_start =
- qualifier.syntax().descendants().find_map(ast::NameRef::cast)?;
- let qualifier_start_path =
- qualifier_start.syntax().ancestors().find_map(ast::Path::cast)?;
- if sema.resolve_path(&qualifier_start_path).is_none() {
- ImportCandidate::Path(PathImportCandidate {
- qualifier: Some(FirstSegmentUnresolved {
- fist_segment: qualifier_start,
- full_qualifier: qualifier,
- }),
- name,
- })
+ if qualifier.first_qualifier().map_or(true, |it| sema.resolve_path(&it).is_none()) {
+ let mut qualifier = qualifier
+ .segments_of_this_path_only_rev()
+ .map(|seg| seg.name_ref().map(|name| SmolStr::new(name.text())))
+ .collect::<Option<Vec<_>>>()?;
+ qualifier.reverse();
+ ImportCandidate::Path(PathImportCandidate { qualifier: Some(qualifier), name })
} else {
return None;
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs
index 9be1d3663..a0cfd3836 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs
@@ -9,7 +9,7 @@ use syntax::{
algo,
ast::{
self, edit_in_place::Removable, make, AstNode, HasAttrs, HasModuleItem, HasVisibility,
- PathSegmentKind,
+ PathSegmentKind, UseTree,
},
ted, Direction, NodeOrToken, SyntaxKind, SyntaxNode,
};
@@ -157,6 +157,29 @@ impl ImportScope {
/// Insert an import path into the given file/node. A `merge` value of none indicates that no import merging is allowed to occur.
pub fn insert_use(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) {
+ insert_use_with_alias_option(scope, path, cfg, None);
+}
+
+pub fn insert_use_as_alias(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) {
+ let text: &str = "use foo as _";
+ let parse = syntax::SourceFile::parse(text);
+ let node = parse
+ .tree()
+ .syntax()
+ .descendants()
+ .find_map(UseTree::cast)
+ .expect("Failed to make ast node `Rename`");
+ let alias = node.rename();
+
+ insert_use_with_alias_option(scope, path, cfg, alias);
+}
+
+fn insert_use_with_alias_option(
+ scope: &ImportScope,
+ path: ast::Path,
+ cfg: &InsertUseConfig,
+ alias: Option<ast::Rename>,
+) {
let _p = profile::span("insert_use");
let mut mb = match cfg.granularity {
ImportGranularity::Crate => Some(MergeBehavior::Crate),
@@ -176,7 +199,8 @@ pub fn insert_use(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) {
}
let use_item =
- make::use_(None, make::use_tree(path.clone(), None, None, false)).clone_for_update();
+ make::use_(None, make::use_tree(path.clone(), None, alias, false)).clone_for_update();
+
// merge into existing imports if possible
if let Some(mb) = mb {
let filter = |it: &_| !(cfg.skip_glob_imports && ast::Use::is_simple_glob(it));
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs
index b92e367f7..01d2f1970 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs
@@ -993,6 +993,46 @@ use foo::bar::qux;
);
}
+#[test]
+fn insert_with_renamed_import_simple_use() {
+ check_with_config(
+ "use self::foo::Foo",
+ r#"
+use self::foo::Foo as _;
+"#,
+ r#"
+use self::foo::Foo;
+"#,
+ &InsertUseConfig {
+ granularity: ImportGranularity::Crate,
+ prefix_kind: hir::PrefixKind::BySelf,
+ enforce_granularity: true,
+ group: true,
+ skip_glob_imports: true,
+ },
+ );
+}
+
+#[test]
+fn insert_with_renamed_import_complex_use() {
+ check_with_config(
+ "use self::foo::Foo;",
+ r#"
+use self::foo::{self, Foo as _, Bar};
+"#,
+ r#"
+use self::foo::{self, Foo, Bar};
+"#,
+ &InsertUseConfig {
+ granularity: ImportGranularity::Crate,
+ prefix_kind: hir::PrefixKind::BySelf,
+ enforce_granularity: true,
+ group: true,
+ skip_glob_imports: true,
+ },
+ );
+}
+
fn check_with_config(
path: &str,
ra_fixture_before: &str,
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs
index 27b6321f3..ff84e9ffa 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs
@@ -78,6 +78,10 @@ fn try_merge_trees_mut(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehav
{
lhs.split_prefix(&lhs_prefix);
rhs.split_prefix(&rhs_prefix);
+ } else {
+ ted::replace(lhs.syntax(), rhs.syntax());
+ // we can safely return here, in this case `recursive_merge` doesn't do anything
+ return Some(());
}
recursive_merge(lhs, rhs, merge)
}
@@ -123,6 +127,13 @@ fn recursive_merge(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehavior)
// so they need to be handled explicitly
.or_else(|| tree.star_token().map(|_| false))
};
+
+ if lhs_t.rename().and_then(|x| x.underscore_token()).is_some() {
+ ted::replace(lhs_t.syntax(), rhs_t.syntax());
+ *lhs_t = rhs_t;
+ continue;
+ }
+
match (tree_contains_self(lhs_t), tree_contains_self(&rhs_t)) {
(Some(true), None) => continue,
(None, Some(true)) => {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs
index 3f7a3ec2d..4a5d234f7 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs
@@ -3,13 +3,13 @@
//! The main reason for this module to exist is the fact that project's items and dependencies' items
//! are located in different caches, with different APIs.
use either::Either;
-use hir::{import_map, AsAssocItem, Crate, ItemInNs, Semantics};
+use hir::{import_map, Crate, ItemInNs, Semantics};
use limit::Limit;
use crate::{imports::import_assets::NameToImport, symbol_index, RootDatabase};
/// A value to use, when uncertain which limit to pick.
-pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(40);
+pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(100);
pub use import_map::AssocSearchMode;
@@ -31,26 +31,36 @@ pub fn items_with_name<'a>(
)
});
+ let prefix = matches!(name, NameToImport::Prefix(..));
let (mut local_query, mut external_query) = match name {
- NameToImport::Exact(exact_name, case_sensitive) => {
+ NameToImport::Prefix(exact_name, case_sensitive)
+ | NameToImport::Exact(exact_name, case_sensitive) => {
let mut local_query = symbol_index::Query::new(exact_name.clone());
- local_query.exact();
-
- let external_query = import_map::Query::new(exact_name);
-
- (
- local_query,
- if case_sensitive { external_query.case_sensitive() } else { external_query },
- )
+ let mut external_query =
+ // import_map::Query::new(exact_name).assoc_search_mode(assoc_item_search);
+ import_map::Query::new(exact_name);
+ if prefix {
+ local_query.prefix();
+ external_query = external_query.prefix();
+ } else {
+ local_query.exact();
+ external_query = external_query.exact();
+ }
+ if case_sensitive {
+ local_query.case_sensitive();
+ external_query = external_query.case_sensitive();
+ }
+ (local_query, external_query)
}
- NameToImport::Fuzzy(fuzzy_search_string) => {
+ NameToImport::Fuzzy(fuzzy_search_string, case_sensitive) => {
let mut local_query = symbol_index::Query::new(fuzzy_search_string.clone());
+ local_query.fuzzy();
let mut external_query = import_map::Query::new(fuzzy_search_string.clone())
.fuzzy()
.assoc_search_mode(assoc_item_search);
- if fuzzy_search_string.to_lowercase() != fuzzy_search_string {
+ if case_sensitive {
local_query.case_sensitive();
external_query = external_query.case_sensitive();
}
@@ -93,8 +103,8 @@ fn find_items<'a>(
.into_iter()
.filter(move |candidate| match assoc_item_search {
AssocSearchMode::Include => true,
- AssocSearchMode::Exclude => candidate.def.as_assoc_item(db).is_none(),
- AssocSearchMode::AssocItemsOnly => candidate.def.as_assoc_item(db).is_some(),
+ AssocSearchMode::Exclude => !candidate.is_assoc,
+ AssocSearchMode::AssocItemsOnly => candidate.is_assoc,
})
.map(|local_candidate| match local_candidate.def {
hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def),
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
index 226def4d5..fefc05e53 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
@@ -2,7 +2,7 @@
//!
//! It is mainly a `HirDatabase` for semantic analysis, plus a `SymbolsDatabase`, for fuzzy search.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod apply_change;
@@ -144,6 +144,7 @@ impl RootDatabase {
db.set_library_roots_with_durability(Default::default(), Durability::HIGH);
db.set_expand_proc_attr_macros_with_durability(false, Durability::HIGH);
db.update_parse_query_lru_capacity(lru_capacity);
+ db.setup_syntax_context_root();
db
}
@@ -156,7 +157,6 @@ impl RootDatabase {
base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
// macro expansions are usually rather small, so we can afford to keep more of them alive
hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity);
- hir::db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity);
}
pub fn update_lru_capacities(&mut self, lru_capacities: &FxHashMap<Box<str>, usize>) {
@@ -174,12 +174,6 @@ impl RootDatabase {
.copied()
.unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP),
);
- hir_db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(
- lru_capacities
- .get(stringify!(MacroExpandQuery))
- .copied()
- .unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP),
- );
macro_rules! update_lru_capacity_per_query {
($( $module:ident :: $query:ident )*) => {$(
@@ -204,11 +198,10 @@ impl RootDatabase {
hir_db::AstIdMapQuery
// hir_db::ParseMacroExpansionQuery
// hir_db::InternMacroCallQuery
- hir_db::MacroArgNodeQuery
+ hir_db::MacroArgQuery
hir_db::DeclMacroExpanderQuery
// hir_db::MacroExpandQuery
hir_db::ExpandProcMacroQuery
- hir_db::HygieneFrameQuery
hir_db::ParseMacroExpansionErrorQuery
// DefDatabase
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
index fb75b5b45..fb4c0c126 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
@@ -2,7 +2,7 @@
use crate::helpers::mod_path_to_ast;
use either::Either;
-use hir::{AsAssocItem, HirDisplay, SemanticsScope};
+use hir::{AsAssocItem, HirDisplay, ModuleDef, SemanticsScope};
use rustc_hash::FxHashMap;
use syntax::{
ast::{self, make, AstNode},
@@ -183,6 +183,7 @@ impl<'a> PathTransform<'a> {
lifetime_substs,
target_module,
source_scope: self.source_scope,
+ same_self_type: self.target_scope.has_same_self_type(self.source_scope),
};
ctx.transform_default_values(defaulted_params);
ctx
@@ -195,6 +196,7 @@ struct Ctx<'a> {
lifetime_substs: FxHashMap<LifetimeName, ast::Lifetime>,
target_module: hir::Module,
source_scope: &'a SemanticsScope<'a>,
+ same_self_type: bool,
}
fn postorder(item: &SyntaxNode) -> impl Iterator<Item = SyntaxNode> {
@@ -277,6 +279,7 @@ impl Ctx<'_> {
self.source_scope.db.upcast(),
hir::ModuleDef::Trait(trait_ref),
false,
+ true,
)?;
match make::ty_path(mod_path_to_ast(&found_path)) {
ast::Type::PathType(path_ty) => Some(path_ty),
@@ -311,8 +314,12 @@ impl Ctx<'_> {
}
}
- let found_path =
- self.target_module.find_use_path(self.source_scope.db.upcast(), def, false)?;
+ let found_path = self.target_module.find_use_path(
+ self.source_scope.db.upcast(),
+ def,
+ false,
+ true,
+ )?;
let res = mod_path_to_ast(&found_path).clone_for_update();
if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) {
if let Some(segment) = res.segment() {
@@ -327,8 +334,42 @@ impl Ctx<'_> {
ted::replace(path.syntax(), subst.clone_subtree().clone_for_update());
}
}
+ hir::PathResolution::SelfType(imp) => {
+ // keep Self type if it does not need to be replaced
+ if self.same_self_type {
+ return None;
+ }
+
+ let ty = imp.self_ty(self.source_scope.db);
+ let ty_str = &ty
+ .display_source_code(
+ self.source_scope.db,
+ self.source_scope.module().into(),
+ true,
+ )
+ .ok()?;
+ let ast_ty = make::ty(&ty_str).clone_for_update();
+
+ if let Some(adt) = ty.as_adt() {
+ if let ast::Type::PathType(path_ty) = &ast_ty {
+ let found_path = self.target_module.find_use_path(
+ self.source_scope.db.upcast(),
+ ModuleDef::from(adt),
+ false,
+ true,
+ )?;
+
+ if let Some(qual) = mod_path_to_ast(&found_path).qualifier() {
+ let res = make::path_concat(qual, path_ty.path()?).clone_for_update();
+ ted::replace(path.syntax(), res.syntax());
+ return Some(());
+ }
+ }
+ }
+
+ ted::replace(path.syntax(), ast_ty.syntax());
+ }
hir::PathResolution::Local(_)
- | hir::PathResolution::SelfType(_)
| hir::PathResolution::Def(_)
| hir::PathResolution::BuiltinAttr(_)
| hir::PathResolution::ToolModule(_)
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
index 353a9749a..d2b6a7326 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
@@ -22,10 +22,10 @@
//! Our current behavior is ¯\_(ツ)_/¯.
use std::fmt;
-use base_db::{AnchoredPathBuf, FileId, FileRange};
+use base_db::{span::SyntaxContextId, AnchoredPathBuf, FileId, FileRange};
use either::Either;
-use hir::{FieldSource, HasSource, InFile, ModuleSource, Semantics};
-use stdx::never;
+use hir::{FieldSource, HasSource, HirFileIdExt, InFile, ModuleSource, Semantics};
+use stdx::{never, TupleExt};
use syntax::{
ast::{self, HasName},
AstNode, SyntaxKind, TextRange, T,
@@ -34,7 +34,7 @@ use text_edit::{TextEdit, TextEditBuilder};
use crate::{
defs::Definition,
- search::FileReference,
+ search::{FileReference, FileReferenceNode},
source_change::{FileSystemEdit, SourceChange},
syntax_helpers::node_ext::expr_as_name_ref,
traits::convert_to_def_in_trait,
@@ -103,6 +103,7 @@ impl Definition {
/// renamed and extern crate names will report its range, though a rename will introduce
/// an alias instead.
pub fn range_for_rename(self, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange> {
+ let syn_ctx_is_root = |(range, ctx): (_, SyntaxContextId)| ctx.is_root().then(|| range);
let res = match self {
Definition::Macro(mac) => {
let src = mac.source(sema.db)?;
@@ -110,14 +111,18 @@ impl Definition {
Either::Left(it) => it.name()?,
Either::Right(it) => it.name()?,
};
- src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ src.with_value(name.syntax())
+ .original_file_range_opt(sema.db)
+ .and_then(syn_ctx_is_root)
}
Definition::Field(field) => {
let src = field.source(sema.db)?;
match &src.value {
FieldSource::Named(record_field) => {
let name = record_field.name()?;
- src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ src.with_value(name.syntax())
+ .original_file_range_opt(sema.db)
+ .and_then(syn_ctx_is_root)
}
FieldSource::Pos(_) => None,
}
@@ -125,25 +130,31 @@ impl Definition {
Definition::Module(module) => {
let src = module.declaration_source(sema.db)?;
let name = src.value.name()?;
- src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ src.with_value(name.syntax())
+ .original_file_range_opt(sema.db)
+ .and_then(syn_ctx_is_root)
}
- Definition::Function(it) => name_range(it, sema),
+ Definition::Function(it) => name_range(it, sema).and_then(syn_ctx_is_root),
Definition::Adt(adt) => match adt {
- hir::Adt::Struct(it) => name_range(it, sema),
- hir::Adt::Union(it) => name_range(it, sema),
- hir::Adt::Enum(it) => name_range(it, sema),
+ hir::Adt::Struct(it) => name_range(it, sema).and_then(syn_ctx_is_root),
+ hir::Adt::Union(it) => name_range(it, sema).and_then(syn_ctx_is_root),
+ hir::Adt::Enum(it) => name_range(it, sema).and_then(syn_ctx_is_root),
},
- Definition::Variant(it) => name_range(it, sema),
- Definition::Const(it) => name_range(it, sema),
- Definition::Static(it) => name_range(it, sema),
- Definition::Trait(it) => name_range(it, sema),
- Definition::TraitAlias(it) => name_range(it, sema),
- Definition::TypeAlias(it) => name_range(it, sema),
- Definition::Local(it) => name_range(it.primary_source(sema.db), sema),
+ Definition::Variant(it) => name_range(it, sema).and_then(syn_ctx_is_root),
+ Definition::Const(it) => name_range(it, sema).and_then(syn_ctx_is_root),
+ Definition::Static(it) => name_range(it, sema).and_then(syn_ctx_is_root),
+ Definition::Trait(it) => name_range(it, sema).and_then(syn_ctx_is_root),
+ Definition::TraitAlias(it) => name_range(it, sema).and_then(syn_ctx_is_root),
+ Definition::TypeAlias(it) => name_range(it, sema).and_then(syn_ctx_is_root),
+ Definition::Local(it) => {
+ name_range(it.primary_source(sema.db), sema).and_then(syn_ctx_is_root)
+ }
Definition::GenericParam(generic_param) => match generic_param {
hir::GenericParam::LifetimeParam(lifetime_param) => {
let src = lifetime_param.source(sema.db)?;
- src.with_value(src.value.lifetime()?.syntax()).original_file_range_opt(sema.db)
+ src.with_value(src.value.lifetime()?.syntax())
+ .original_file_range_opt(sema.db)
+ .and_then(syn_ctx_is_root)
}
_ => {
let x = match generic_param {
@@ -156,22 +167,30 @@ impl Definition {
Either::Left(x) => x.name()?,
Either::Right(_) => return None,
};
- src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ src.with_value(name.syntax())
+ .original_file_range_opt(sema.db)
+ .and_then(syn_ctx_is_root)
}
},
Definition::Label(label) => {
let src = label.source(sema.db);
let lifetime = src.value.lifetime()?;
- src.with_value(lifetime.syntax()).original_file_range_opt(sema.db)
+ src.with_value(lifetime.syntax())
+ .original_file_range_opt(sema.db)
+ .and_then(syn_ctx_is_root)
}
Definition::ExternCrateDecl(it) => {
let src = it.source(sema.db)?;
if let Some(rename) = src.value.rename() {
let name = rename.name()?;
- src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ src.with_value(name.syntax())
+ .original_file_range_opt(sema.db)
+ .and_then(syn_ctx_is_root)
} else {
let name = src.value.name_ref()?;
- src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ src.with_value(name.syntax())
+ .original_file_range_opt(sema.db)
+ .and_then(syn_ctx_is_root)
}
}
Definition::BuiltinType(_) => return None,
@@ -183,7 +202,10 @@ impl Definition {
};
return res;
- fn name_range<D>(def: D, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange>
+ fn name_range<D>(
+ def: D,
+ sema: &Semantics<'_, RootDatabase>,
+ ) -> Option<(FileRange, SyntaxContextId)>
where
D: HasSource,
D::Ast: ast::HasName,
@@ -256,8 +278,10 @@ fn rename_mod(
let file_id = src.file_id.original_file(sema.db);
match src.value.name() {
Some(name) => {
- if let Some(file_range) =
- src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ if let Some(file_range) = src
+ .with_value(name.syntax())
+ .original_file_range_opt(sema.db)
+ .map(TupleExt::head)
{
source_change.insert_source_edit(
file_id,
@@ -337,7 +361,7 @@ pub fn source_edit_from_references(
// macros can cause multiple refs to occur for the same text range, so keep track of what we have edited so far
let mut edited_ranges = Vec::new();
for &FileReference { range, ref name, .. } in references {
- let name_range = name.syntax().text_range();
+ let name_range = name.text_range();
if name_range.len() != range.len() {
// This usage comes from a different token kind that was downmapped to a NameLike in a macro
// Renaming this will most likely break things syntax-wise
@@ -347,17 +371,17 @@ pub fn source_edit_from_references(
// if the ranges differ then the node is inside a macro call, we can't really attempt
// to make special rewrites like shorthand syntax and such, so just rename the node in
// the macro input
- ast::NameLike::NameRef(name_ref) if name_range == range => {
+ FileReferenceNode::NameRef(name_ref) if name_range == range => {
source_edit_from_name_ref(&mut edit, name_ref, new_name, def)
}
- ast::NameLike::Name(name) if name_range == range => {
+ FileReferenceNode::Name(name) if name_range == range => {
source_edit_from_name(&mut edit, name, new_name)
}
_ => false,
};
if !has_emitted_edit && !edited_ranges.contains(&range.start()) {
let (range, new_name) = match name {
- ast::NameLike::Lifetime(_) => (
+ FileReferenceNode::Lifetime(_) => (
TextRange::new(range.start() + syntax::TextSize::from(1), range.end()),
new_name.strip_prefix('\'').unwrap_or(new_name).to_owned(),
),
@@ -493,7 +517,12 @@ fn source_edit_from_def(
for source in local.sources(sema.db) {
let source = match source.source.clone().original_ast_node(sema.db) {
Some(source) => source,
- None => match source.source.syntax().original_file_range_opt(sema.db) {
+ None => match source
+ .source
+ .syntax()
+ .original_file_range_opt(sema.db)
+ .map(TupleExt::head)
+ {
Some(FileRange { file_id: file_id2, range }) => {
file_id = Some(file_id2);
edit.replace(range, new_name.to_owned());
@@ -504,7 +533,7 @@ fn source_edit_from_def(
}
},
};
- file_id = source.file_id.file_id();
+ file_id = Some(source.file_id);
if let Either::Left(pat) = source.value {
let name_range = pat.name().unwrap().syntax().text_range();
// special cases required for renaming fields/locals in Record patterns
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
index 9c4f0ac8c..dbef36026 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
@@ -8,13 +8,14 @@ use std::mem;
use base_db::{salsa::Database, FileId, FileRange, SourceDatabase, SourceDatabaseExt};
use hir::{
- AsAssocItem, DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility,
+ AsAssocItem, DefWithBody, DescendPreference, HasAttrs, HasSource, HirFileIdExt, InFile,
+ InRealFile, ModuleSource, PathResolution, Semantics, Visibility,
};
use memchr::memmem::Finder;
use nohash_hasher::IntMap;
use once_cell::unsync::Lazy;
use parser::SyntaxKind;
-use syntax::{ast, match_ast, AstNode, TextRange, TextSize};
+use syntax::{ast, match_ast, AstNode, AstToken, SyntaxElement, TextRange, TextSize};
use triomphe::Arc;
use crate::{
@@ -62,10 +63,67 @@ pub struct FileReference {
/// The range of the reference in the original file
pub range: TextRange,
/// The node of the reference in the (macro-)file
- pub name: ast::NameLike,
+ pub name: FileReferenceNode,
pub category: Option<ReferenceCategory>,
}
+#[derive(Debug, Clone)]
+pub enum FileReferenceNode {
+ Name(ast::Name),
+ NameRef(ast::NameRef),
+ Lifetime(ast::Lifetime),
+ FormatStringEntry(ast::String, TextRange),
+}
+
+impl FileReferenceNode {
+ pub fn text_range(&self) -> TextRange {
+ match self {
+ FileReferenceNode::Name(it) => it.syntax().text_range(),
+ FileReferenceNode::NameRef(it) => it.syntax().text_range(),
+ FileReferenceNode::Lifetime(it) => it.syntax().text_range(),
+ FileReferenceNode::FormatStringEntry(_, range) => *range,
+ }
+ }
+ pub fn syntax(&self) -> SyntaxElement {
+ match self {
+ FileReferenceNode::Name(it) => it.syntax().clone().into(),
+ FileReferenceNode::NameRef(it) => it.syntax().clone().into(),
+ FileReferenceNode::Lifetime(it) => it.syntax().clone().into(),
+ FileReferenceNode::FormatStringEntry(it, _) => it.syntax().clone().into(),
+ }
+ }
+ pub fn into_name_like(self) -> Option<ast::NameLike> {
+ match self {
+ FileReferenceNode::Name(it) => Some(ast::NameLike::Name(it)),
+ FileReferenceNode::NameRef(it) => Some(ast::NameLike::NameRef(it)),
+ FileReferenceNode::Lifetime(it) => Some(ast::NameLike::Lifetime(it)),
+ FileReferenceNode::FormatStringEntry(_, _) => None,
+ }
+ }
+ pub fn as_name_ref(&self) -> Option<&ast::NameRef> {
+ match self {
+ FileReferenceNode::NameRef(name_ref) => Some(name_ref),
+ _ => None,
+ }
+ }
+ pub fn as_lifetime(&self) -> Option<&ast::Lifetime> {
+ match self {
+ FileReferenceNode::Lifetime(lifetime) => Some(lifetime),
+ _ => None,
+ }
+ }
+ pub fn text(&self) -> syntax::TokenText<'_> {
+ match self {
+ FileReferenceNode::NameRef(name_ref) => name_ref.text(),
+ FileReferenceNode::Name(name) => name.text(),
+ FileReferenceNode::Lifetime(lifetime) => lifetime.text(),
+ FileReferenceNode::FormatStringEntry(it, range) => {
+ syntax::TokenText::borrowed(&it.text()[*range - it.syntax().text_range().start()])
+ }
+ }
+ }
+}
+
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum ReferenceCategory {
// FIXME: Add this variant and delete the `retain_adt_literal_usages` function.
@@ -132,7 +190,8 @@ impl SearchScope {
let (file_id, range) = {
let InFile { file_id, value } = module.definition_source(db);
- if let Some((file_id, call_source)) = file_id.original_call_node(db) {
+ if let Some(InRealFile { file_id, value: call_source }) = file_id.original_call_node(db)
+ {
(file_id, Some(call_source.text_range()))
} else {
(
@@ -465,7 +524,9 @@ impl<'a> FindUsages<'a> {
// every textual hit. That function is notoriously
// expensive even for things that do not get down mapped
// into macros.
- sema.descend_into_macros(token, offset).into_iter().filter_map(|it| it.parent())
+ sema.descend_into_macros(DescendPreference::None, token)
+ .into_iter()
+ .filter_map(|it| it.parent())
})
};
@@ -475,6 +536,17 @@ impl<'a> FindUsages<'a> {
// Search for occurrences of the items name
for offset in match_indices(&text, finder, search_range) {
+ tree.token_at_offset(offset).into_iter().for_each(|token| {
+ let Some(str_token) = ast::String::cast(token.clone()) else { return };
+ if let Some((range, nameres)) =
+ sema.check_for_format_args_template(token.clone(), offset)
+ {
+ if self.found_format_args_ref(file_id, range, str_token, nameres, sink) {
+ return;
+ }
+ }
+ });
+
for name in find_nodes(name, &tree, offset).filter_map(ast::NameLike::cast) {
if match name {
ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink),
@@ -584,12 +656,12 @@ impl<'a> FindUsages<'a> {
) -> bool {
match NameRefClass::classify(self.sema, name_ref) {
Some(NameRefClass::Definition(Definition::SelfType(impl_)))
- if impl_.self_ty(self.sema.db) == *self_ty =>
+ if impl_.self_ty(self.sema.db).as_adt() == self_ty.as_adt() =>
{
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let reference = FileReference {
range,
- name: ast::NameLike::NameRef(name_ref.clone()),
+ name: FileReferenceNode::NameRef(name_ref.clone()),
category: None,
};
sink(file_id, reference)
@@ -608,7 +680,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let reference = FileReference {
range,
- name: ast::NameLike::NameRef(name_ref.clone()),
+ name: FileReferenceNode::NameRef(name_ref.clone()),
category: is_name_ref_in_import(name_ref).then_some(ReferenceCategory::Import),
};
sink(file_id, reference)
@@ -617,6 +689,27 @@ impl<'a> FindUsages<'a> {
}
}
+ fn found_format_args_ref(
+ &self,
+ file_id: FileId,
+ range: TextRange,
+ token: ast::String,
+ res: Option<PathResolution>,
+ sink: &mut dyn FnMut(FileId, FileReference) -> bool,
+ ) -> bool {
+ match res.map(Definition::from) {
+ Some(def) if def == self.def => {
+ let reference = FileReference {
+ range,
+ name: FileReferenceNode::FormatStringEntry(token, range),
+ category: Some(ReferenceCategory::Read),
+ };
+ sink(file_id, reference)
+ }
+ _ => false,
+ }
+ }
+
fn found_lifetime(
&self,
lifetime: &ast::Lifetime,
@@ -627,7 +720,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(lifetime.syntax());
let reference = FileReference {
range,
- name: ast::NameLike::Lifetime(lifetime.clone()),
+ name: FileReferenceNode::Lifetime(lifetime.clone()),
category: None,
};
sink(file_id, reference)
@@ -651,7 +744,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let reference = FileReference {
range,
- name: ast::NameLike::NameRef(name_ref.clone()),
+ name: FileReferenceNode::NameRef(name_ref.clone()),
category: ReferenceCategory::new(&def, name_ref),
};
sink(file_id, reference)
@@ -667,7 +760,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let reference = FileReference {
range,
- name: ast::NameLike::NameRef(name_ref.clone()),
+ name: FileReferenceNode::NameRef(name_ref.clone()),
category: ReferenceCategory::new(&def, name_ref),
};
sink(file_id, reference)
@@ -677,7 +770,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let reference = FileReference {
range,
- name: ast::NameLike::NameRef(name_ref.clone()),
+ name: FileReferenceNode::NameRef(name_ref.clone()),
category: ReferenceCategory::new(&def, name_ref),
};
sink(file_id, reference)
@@ -701,7 +794,7 @@ impl<'a> FindUsages<'a> {
};
let reference = FileReference {
range,
- name: ast::NameLike::NameRef(name_ref.clone()),
+ name: FileReferenceNode::NameRef(name_ref.clone()),
category: access,
};
sink(file_id, reference)
@@ -724,7 +817,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name.syntax());
let reference = FileReference {
range,
- name: ast::NameLike::Name(name.clone()),
+ name: FileReferenceNode::Name(name.clone()),
// FIXME: mutable patterns should have `Write` access
category: Some(ReferenceCategory::Read),
};
@@ -734,7 +827,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name.syntax());
let reference = FileReference {
range,
- name: ast::NameLike::Name(name.clone()),
+ name: FileReferenceNode::Name(name.clone()),
category: None,
};
sink(file_id, reference)
@@ -759,7 +852,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name.syntax());
let reference = FileReference {
range,
- name: ast::NameLike::Name(name.clone()),
+ name: FileReferenceNode::Name(name.clone()),
category: None,
};
sink(file_id, reference)
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
index 39763479c..c7188f1f7 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
@@ -140,10 +140,10 @@ impl SnippetEdit {
.with_position()
.map(|pos| {
let (snippet, index) = match pos {
- itertools::Position::First(it) | itertools::Position::Middle(it) => it,
+ (itertools::Position::First, it) | (itertools::Position::Middle, it) => it,
// last/only snippet gets index 0
- itertools::Position::Last((snippet, _))
- | itertools::Position::Only((snippet, _)) => (snippet, 0),
+ (itertools::Position::Last, (snippet, _))
+ | (itertools::Position::Only, (snippet, _)) => (snippet, 0),
};
let range = match snippet {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
index f699f999b..be8566b75 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
@@ -43,13 +43,20 @@ use triomphe::Arc;
use crate::RootDatabase;
-#[derive(Debug)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+enum SearchMode {
+ Fuzzy,
+ Exact,
+ Prefix,
+}
+
+#[derive(Debug, Clone)]
pub struct Query {
query: String,
lowercased: String,
only_types: bool,
libs: bool,
- exact: bool,
+ mode: SearchMode,
case_sensitive: bool,
limit: usize,
}
@@ -62,7 +69,7 @@ impl Query {
lowercased,
only_types: false,
libs: false,
- exact: false,
+ mode: SearchMode::Fuzzy,
case_sensitive: false,
limit: usize::max_value(),
}
@@ -76,8 +83,16 @@ impl Query {
self.libs = true;
}
+ pub fn fuzzy(&mut self) {
+ self.mode = SearchMode::Fuzzy;
+ }
+
pub fn exact(&mut self) {
- self.exact = true;
+ self.mode = SearchMode::Exact;
+ }
+
+ pub fn prefix(&mut self) {
+ self.mode = SearchMode::Prefix;
}
pub fn case_sensitive(&mut self) {
@@ -329,13 +344,23 @@ impl Query {
{
continue;
}
- if self.exact {
- if symbol.name != self.query {
- continue;
+ let skip = match self.mode {
+ SearchMode::Fuzzy => {
+ self.case_sensitive
+ && self.query.chars().any(|c| !symbol.name.contains(c))
}
- } else if self.case_sensitive
- && self.query.chars().any(|c| !symbol.name.contains(c))
- {
+ SearchMode::Exact => symbol.name != self.query,
+ SearchMode::Prefix if self.case_sensitive => {
+ !symbol.name.starts_with(&self.query)
+ }
+ SearchMode::Prefix => symbol
+ .name
+ .chars()
+ .zip(self.lowercased.chars())
+ .all(|(n, q)| n.to_lowercase().next() == Some(q)),
+ };
+
+ if skip {
continue;
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt
index 7834c6603..7c01ac069 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt
+++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt
@@ -21,21 +21,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 83..119,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 109..118,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 109..118,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "Struct",
@@ -50,21 +51,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 0..81,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 74..80,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 74..80,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "mul1",
@@ -79,21 +81,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 0..81,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 74..80,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 74..80,
+ },
+ ),
},
container_name: None,
is_alias: true,
+ is_assoc: false,
},
FileSymbol {
name: "mul2",
@@ -108,21 +111,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 0..81,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 74..80,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 74..80,
+ },
+ ),
},
container_name: None,
is_alias: true,
+ is_assoc: false,
},
FileSymbol {
name: "s1",
@@ -137,21 +141,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 0..81,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 74..80,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 74..80,
+ },
+ ),
},
container_name: None,
is_alias: true,
+ is_assoc: false,
},
FileSymbol {
name: "s1",
@@ -166,21 +171,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 83..119,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 109..118,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 109..118,
+ },
+ ),
},
container_name: None,
is_alias: true,
+ is_assoc: false,
},
FileSymbol {
name: "s2",
@@ -195,21 +201,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 0..81,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 74..80,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 74..80,
+ },
+ ),
},
container_name: None,
is_alias: true,
+ is_assoc: false,
},
],
),
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
index 87ad5844c..c9875c7f8 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
+++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
@@ -19,21 +19,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: TYPE_ALIAS,
range: 397..417,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 402..407,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 402..407,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "CONST",
@@ -46,21 +47,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: CONST,
range: 340..361,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 346..351,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 346..351,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "CONST_WITH_INNER",
@@ -73,21 +75,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: CONST,
range: 520..592,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 526..542,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 526..542,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "Enum",
@@ -102,21 +105,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: ENUM,
range: 185..207,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 190..194,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 190..194,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "ItemLikeMacro",
@@ -131,21 +135,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 654..676,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 663..676,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 663..676,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "Macro",
@@ -160,21 +165,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: MACRO_DEF,
range: 153..168,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 159..164,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 159..164,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "STATIC",
@@ -187,21 +193,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STATIC,
range: 362..396,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 369..375,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 369..375,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "Struct",
@@ -216,21 +223,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 170..184,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 177..183,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 177..183,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "StructFromMacro",
@@ -245,23 +253,22 @@
),
loc: DeclarationLocation {
hir_file_id: MacroFile(
- MacroFile {
- macro_call_id: MacroCallId(
- 0,
- ),
- },
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 0..22,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 6..21,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 6..21,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "StructInFn",
@@ -276,23 +283,24 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 318..336,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 325..335,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 325..335,
+ },
+ ),
},
container_name: Some(
"main",
),
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "StructInNamedConst",
@@ -307,23 +315,24 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 555..581,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 562..580,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 562..580,
+ },
+ ),
},
container_name: Some(
"CONST_WITH_INNER",
),
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "StructInUnnamedConst",
@@ -338,21 +347,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 479..507,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 486..506,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 486..506,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "Trait",
@@ -365,21 +375,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: TRAIT,
range: 261..300,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 267..272,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 267..272,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "Trait",
@@ -394,21 +405,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 682..696,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 691..696,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 691..696,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "Union",
@@ -423,21 +435,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: UNION,
range: 208..222,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 214..219,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 214..219,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "a_mod",
@@ -452,21 +465,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: MODULE,
range: 419..457,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 423..428,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 423..428,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "b_mod",
@@ -481,21 +495,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: MODULE,
range: 594..604,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 598..603,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 598..603,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "define_struct",
@@ -510,21 +525,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: MACRO_RULES,
range: 51..131,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 64..77,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 64..77,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "impl_fn",
@@ -537,21 +553,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: FN,
range: 242..257,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 245..252,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 245..252,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: true,
},
FileSymbol {
name: "macro_rules_macro",
@@ -566,21 +583,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: MACRO_RULES,
range: 1..48,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 14..31,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 14..31,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "main",
@@ -593,21 +611,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: FN,
range: 302..338,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 305..309,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 305..309,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "really_define_struct",
@@ -622,21 +641,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 611..648,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 628..648,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 628..648,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "trait_fn",
@@ -649,23 +669,24 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: FN,
range: 279..298,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 282..290,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 282..290,
+ },
+ ),
},
container_name: Some(
"Trait",
),
is_alias: false,
+ is_assoc: true,
},
],
),
@@ -691,21 +712,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 435..455,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 442..454,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 442..454,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
],
),
@@ -731,21 +753,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 1,
- ),
+ 1,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 111..143,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 127..143,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 127..143,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "StructInModB",
@@ -760,21 +783,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 1,
- ),
+ 1,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 0..20,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 7..19,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 7..19,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "SuperItemLikeMacro",
@@ -789,21 +813,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 1,
- ),
+ 1,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 25..59,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 41..59,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 41..59,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "ThisStruct",
@@ -818,21 +843,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 1,
- ),
+ 1,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 65..105,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 95..105,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 95..105,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "ThisStruct",
@@ -847,21 +873,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 1,
- ),
+ 1,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 65..105,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 95..105,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 95..105,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
],
),
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/tests/sourcegen_lints.rs b/src/tools/rust-analyzer/crates/ide-db/src/tests/sourcegen_lints.rs
index c7d5f3613..c8cf87d3c 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/tests/sourcegen_lints.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/tests/sourcegen_lints.rs
@@ -1,4 +1,5 @@
-//! Generates descriptors structure for unstable feature from Unstable Book
+//! Generates descriptor structures for unstable features from the unstable book
+//! and lints from rustc, rustdoc, and clippy.
use std::{borrow::Cow, fs, path::Path};
use itertools::Itertools;
@@ -6,6 +7,8 @@ use stdx::format_to;
use test_utils::project_root;
use xshell::{cmd, Shell};
+const DESTINATION: &str = "crates/ide-db/src/generated/lints.rs";
+
/// This clones rustc repo, and so is not worth to keep up-to-date. We update
/// manually by un-ignoring the test from time to time.
#[test]
@@ -14,11 +17,21 @@ fn sourcegen_lint_completions() {
let sh = &Shell::new().unwrap();
let rust_repo = project_root().join("./target/rust");
- if !rust_repo.exists() {
+ if rust_repo.exists() {
+ cmd!(sh, "git -C {rust_repo} pull --rebase").run().unwrap();
+ } else {
cmd!(sh, "git clone --depth=1 https://github.com/rust-lang/rust {rust_repo}")
.run()
.unwrap();
}
+ // need submodules for Cargo to parse the workspace correctly
+ cmd!(
+ sh,
+ "git -C {rust_repo} submodule update --init --recursive --depth=1 --
+ compiler library src/tools"
+ )
+ .run()
+ .unwrap();
let mut contents = String::from(
r"
@@ -27,17 +40,28 @@ pub struct Lint {
pub label: &'static str,
pub description: &'static str,
}
+
pub struct LintGroup {
pub lint: Lint,
pub children: &'static [&'static str],
}
+
",
);
generate_lint_descriptor(sh, &mut contents);
contents.push('\n');
- generate_feature_descriptor(&mut contents, &rust_repo.join("src/doc/unstable-book/src"));
+ let cargo = std::env::var("CARGO").unwrap_or_else(|_| "cargo".to_string());
+ let unstable_book = project_root().join("./target/unstable-book-gen");
+ cmd!(
+ sh,
+ "{cargo} run --manifest-path {rust_repo}/src/tools/unstable-book-gen/Cargo.toml --
+ {rust_repo}/library {rust_repo}/compiler {rust_repo}/src {unstable_book}"
+ )
+ .run()
+ .unwrap();
+ generate_feature_descriptor(&mut contents, &unstable_book.join("src"));
contents.push('\n');
let lints_json = project_root().join("./target/clippy_lints.json");
@@ -51,41 +75,60 @@ pub struct LintGroup {
let contents = sourcegen::add_preamble("sourcegen_lints", sourcegen::reformat(contents));
- let destination = project_root().join("crates/ide_db/src/generated/lints.rs");
+ let destination = project_root().join(DESTINATION);
sourcegen::ensure_file_contents(destination.as_path(), &contents);
}
+/// Parses the output of `rustdoc -Whelp` and prints `Lint` and `LintGroup` constants into `buf`.
+///
+/// As of writing, the output of `rustc -Whelp` (not rustdoc) has the following format:
+///
+/// ```text
+/// Lint checks provided by rustc:
+///
+/// name default meaning
+/// ---- ------- -------
+///
+/// ...
+///
+/// Lint groups provided by rustc:
+///
+/// name sub-lints
+/// ---- ---------
+///
+/// ...
+/// ```
+///
+/// `rustdoc -Whelp` (and any other custom `rustc` driver) adds another two
+/// tables after the `rustc` ones, with a different title but the same format.
fn generate_lint_descriptor(sh: &Shell, buf: &mut String) {
- // FIXME: rustdoc currently requires an input file for -Whelp cc https://github.com/rust-lang/rust/pull/88831
- let file = project_root().join(file!());
- let stdout = cmd!(sh, "rustdoc -W help {file}").read().unwrap();
- let start_lints = stdout.find("---- ------- -------").unwrap();
- let start_lint_groups = stdout.find("---- ---------").unwrap();
- let start_lints_rustdoc =
- stdout.find("Lint checks provided by plugins loaded by this crate:").unwrap();
- let start_lint_groups_rustdoc =
- stdout.find("Lint groups provided by plugins loaded by this crate:").unwrap();
+ let stdout = cmd!(sh, "rustdoc -Whelp").read().unwrap();
+ let lints_pat = "---- ------- -------\n";
+ let lint_groups_pat = "---- ---------\n";
+ let lints = find_and_slice(&stdout, lints_pat);
+ let lint_groups = find_and_slice(lints, lint_groups_pat);
+ let lints_rustdoc = find_and_slice(lint_groups, lints_pat);
+ let lint_groups_rustdoc = find_and_slice(lints_rustdoc, lint_groups_pat);
buf.push_str(r#"pub const DEFAULT_LINTS: &[Lint] = &["#);
buf.push('\n');
- let lints = stdout[start_lints..].lines().skip(1).take_while(|l| !l.is_empty()).map(|line| {
+ let lints = lints.lines().take_while(|l| !l.is_empty()).map(|line| {
let (name, rest) = line.trim().split_once(char::is_whitespace).unwrap();
let (_default_level, description) = rest.trim().split_once(char::is_whitespace).unwrap();
(name.trim(), Cow::Borrowed(description.trim()), vec![])
});
- let lint_groups =
- stdout[start_lint_groups..].lines().skip(1).take_while(|l| !l.is_empty()).map(|line| {
- let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
- (
- name.trim(),
- format!("lint group for: {}", lints.trim()).into(),
- lints
- .split_ascii_whitespace()
- .map(|s| s.trim().trim_matches(',').replace('-', "_"))
- .collect(),
- )
- });
+ let lint_groups = lint_groups.lines().take_while(|l| !l.is_empty()).map(|line| {
+ let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
+ (
+ name.trim(),
+ format!("lint group for: {}", lints.trim()).into(),
+ lints
+ .split_ascii_whitespace()
+ .map(|s| s.trim().trim_matches(',').replace('-', "_"))
+ .collect(),
+ )
+ });
let lints = lints
.chain(lint_groups)
@@ -94,7 +137,8 @@ fn generate_lint_descriptor(sh: &Shell, buf: &mut String) {
for (name, description, ..) in &lints {
push_lint_completion(buf, &name.replace('-', "_"), description);
}
- buf.push_str("];\n");
+ buf.push_str("];\n\n");
+
buf.push_str(r#"pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &["#);
for (name, description, children) in &lints {
if !children.is_empty() {
@@ -115,27 +159,23 @@ fn generate_lint_descriptor(sh: &Shell, buf: &mut String) {
buf.push_str(r#"pub const RUSTDOC_LINTS: &[Lint] = &["#);
buf.push('\n');
- let lints_rustdoc =
- stdout[start_lints_rustdoc..].lines().skip(2).take_while(|l| !l.is_empty()).map(|line| {
- let (name, rest) = line.trim().split_once(char::is_whitespace).unwrap();
- let (_default_level, description) =
- rest.trim().split_once(char::is_whitespace).unwrap();
- (name.trim(), Cow::Borrowed(description.trim()), vec![])
- });
+ let lints_rustdoc = lints_rustdoc.lines().take_while(|l| !l.is_empty()).map(|line| {
+ let (name, rest) = line.trim().split_once(char::is_whitespace).unwrap();
+ let (_default_level, description) = rest.trim().split_once(char::is_whitespace).unwrap();
+ (name.trim(), Cow::Borrowed(description.trim()), vec![])
+ });
let lint_groups_rustdoc =
- stdout[start_lint_groups_rustdoc..].lines().skip(2).take_while(|l| !l.is_empty()).map(
- |line| {
- let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
- (
- name.trim(),
- format!("lint group for: {}", lints.trim()).into(),
- lints
- .split_ascii_whitespace()
- .map(|s| s.trim().trim_matches(',').replace('-', "_"))
- .collect(),
- )
- },
- );
+ lint_groups_rustdoc.lines().take_while(|l| !l.is_empty()).map(|line| {
+ let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
+ (
+ name.trim(),
+ format!("lint group for: {}", lints.trim()).into(),
+ lints
+ .split_ascii_whitespace()
+ .map(|s| s.trim().trim_matches(',').replace('-', "_"))
+ .collect(),
+ )
+ });
let lints_rustdoc = lints_rustdoc
.chain(lint_groups_rustdoc)
@@ -145,7 +185,7 @@ fn generate_lint_descriptor(sh: &Shell, buf: &mut String) {
for (name, description, ..) in &lints_rustdoc {
push_lint_completion(buf, &name.replace('-', "_"), description)
}
- buf.push_str("];\n");
+ buf.push_str("];\n\n");
buf.push_str(r#"pub const RUSTDOC_LINT_GROUPS: &[LintGroup] = &["#);
for (name, description, children) in &lints_rustdoc {
@@ -157,14 +197,24 @@ fn generate_lint_descriptor(sh: &Shell, buf: &mut String) {
buf.push_str("];\n");
}
+#[track_caller]
+fn find_and_slice<'a>(i: &'a str, p: &str) -> &'a str {
+ let idx = i.find(p).unwrap();
+ &i[idx + p.len()..]
+}
+
+/// Parses the unstable book `src_dir` and prints a constant with the list of
+/// unstable features into `buf`.
+///
+/// It does this by looking for all `.md` files in the `language-features` and
+/// `library-features` directories, and using the file name as the feature
+/// name, and the file contents as the feature description.
fn generate_feature_descriptor(buf: &mut String, src_dir: &Path) {
let mut features = ["language-features", "library-features"]
.into_iter()
.flat_map(|it| sourcegen::list_files(&src_dir.join(it)))
- .filter(|path| {
- // Get all `.md ` files
- path.extension().unwrap_or_default().to_str().unwrap_or_default() == "md"
- })
+ // Get all `.md` files
+ .filter(|path| path.extension() == Some("md".as_ref()))
.map(|path| {
let feature_ident = path.file_stem().unwrap().to_str().unwrap().replace('-', "_");
let doc = fs::read_to_string(path).unwrap();
@@ -196,7 +246,7 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) {
let mut clippy_lints: Vec<ClippyLint> = Vec::new();
let mut clippy_groups: std::collections::BTreeMap<String, Vec<String>> = Default::default();
- for line in file_content.lines().map(|line| line.trim()) {
+ for line in file_content.lines().map(str::trim) {
if let Some(line) = line.strip_prefix(r#""id": ""#) {
let clippy_lint = ClippyLint {
id: line.strip_suffix(r#"","#).expect("should be suffixed by comma").into(),
@@ -211,12 +261,19 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) {
.push(clippy_lints.last().unwrap().id.clone());
}
} else if let Some(line) = line.strip_prefix(r#""docs": ""#) {
- let prefix_to_strip = r#" ### What it does"#;
- let line = match line.strip_prefix(prefix_to_strip) {
- Some(line) => line,
+ let header = "### What it does";
+ let line = match line.find(header) {
+ Some(idx) => &line[idx + header.len()..],
None => {
- eprintln!("unexpected clippy prefix for {}", clippy_lints.last().unwrap().id);
- continue;
+ let id = &clippy_lints.last().unwrap().id;
+ // these just don't have the common header
+ let allowed = ["allow_attributes", "read_line_without_trim"];
+ if allowed.contains(&id.as_str()) {
+ line
+ } else {
+ eprintln!("\nunexpected clippy prefix for {id}, line={line:?}\n",);
+ continue;
+ }
}
};
// Only take the description, any more than this is a lot of additional data we would embed into the exe
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
index 14aa39401..f4055024c 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
@@ -13,9 +13,9 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
-either = "1.7.0"
-itertools = "0.10.5"
-serde_json = "1.0.86"
+either.workspace = true
+itertools.workspace = true
+serde_json.workspace = true
once_cell = "1.17.0"
# local deps
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs
index 3b69640af..45fc6f8e6 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs
@@ -1,7 +1,10 @@
//! Suggests shortening `Foo { field: field }` to `Foo { field }` in both
//! expressions and patterns.
-use ide_db::{base_db::FileId, source_change::SourceChange};
+use ide_db::{
+ base_db::{FileId, FileRange},
+ source_change::SourceChange,
+};
use syntax::{ast, match_ast, AstNode, SyntaxNode};
use text_edit::TextEdit;
@@ -49,7 +52,7 @@ fn check_expr_field_shorthand(
Diagnostic::new(
DiagnosticCode::Clippy("redundant_field_names"),
"Shorthand struct initialization",
- field_range,
+ FileRange { file_id, range: field_range },
)
.with_fixes(Some(vec![fix(
"use_expr_field_shorthand",
@@ -93,7 +96,7 @@ fn check_pat_field_shorthand(
Diagnostic::new(
DiagnosticCode::Clippy("redundant_field_names"),
"Shorthand struct pattern",
- field_range,
+ FileRange { file_id, range: field_range },
)
.with_fixes(Some(vec![fix(
"use_pat_field_shorthand",
@@ -166,7 +169,7 @@ fn main() {
check_diagnostics(
r#"
struct A { a: &'static str }
-fn f(a: A) { let A { a: hello } = a; }
+fn f(a: A) { let A { a: _hello } = a; }
"#,
);
check_diagnostics(
@@ -181,12 +184,14 @@ fn f(a: A) { let A { 0: 0 } = a; }
struct A { a: &'static str }
fn f(a: A) {
let A { a$0: a } = a;
+ _ = a;
}
"#,
r#"
struct A { a: &'static str }
fn f(a: A) {
let A { a } = a;
+ _ = a;
}
"#,
);
@@ -196,12 +201,14 @@ fn f(a: A) {
struct A { a: &'static str, b: &'static str }
fn f(a: A) {
let A { a$0: a, b } = a;
+ _ = (a, b);
}
"#,
r#"
struct A { a: &'static str, b: &'static str }
fn f(a: A) {
let A { a, b } = a;
+ _ = (a, b);
}
"#,
);
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
index 9eb763d3e..3b2e15a17 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
@@ -31,7 +31,7 @@ pub(crate) fn inactive_code(
let res = Diagnostic::new(
DiagnosticCode::Ra("inactive-code", Severity::WeakWarning),
message,
- ctx.sema.diagnostics_display_range(d.node.clone()).range,
+ ctx.sema.diagnostics_display_range(d.node.clone()),
)
.with_unused(true);
Some(res)
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
index 235062bf5..0f12e814b 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
@@ -113,6 +113,31 @@ fn some_fn() {
}
"#,
);
+
+ check_fix(
+ r#"
+static S: i32 = M::A;
+
+mod $0M {
+ pub const A: i32 = 10;
+}
+
+mod other {
+ use crate::M::A;
+}
+"#,
+ r#"
+static S: i32 = m::A;
+
+mod m {
+ pub const A: i32 = 10;
+}
+
+mod other {
+ use crate::m::A;
+}
+"#,
+ );
}
#[test]
@@ -175,10 +200,10 @@ fn NonSnakeCaseName() {}
fn incorrect_function_params() {
check_diagnostics(
r#"
-fn foo(SomeParam: u8) {}
+fn foo(SomeParam: u8) { _ = SomeParam; }
// ^^^^^^^^^ 💡 warn: Parameter `SomeParam` should have snake_case name, e.g. `some_param`
-fn foo2(ok_param: &str, CAPS_PARAM: u8) {}
+fn foo2(ok_param: &str, CAPS_PARAM: u8) { _ = (ok_param, CAPS_PARAM); }
// ^^^^^^^^^^ 💡 warn: Parameter `CAPS_PARAM` should have snake_case name, e.g. `caps_param`
"#,
);
@@ -188,6 +213,7 @@ fn foo2(ok_param: &str, CAPS_PARAM: u8) {}
fn incorrect_variable_names() {
check_diagnostics(
r#"
+#[allow(unused)]
fn foo() {
let SOME_VALUE = 10;
// ^^^^^^^^^^ 💡 warn: Variable `SOME_VALUE` should have snake_case name, e.g. `some_value`
@@ -294,6 +320,7 @@ impl someStruct {
// ^^^^^^^^ 💡 warn: Function `SomeFunc` should have snake_case name, e.g. `some_func`
let WHY_VAR_IS_CAPS = 10;
// ^^^^^^^^^^^^^^^ 💡 warn: Variable `WHY_VAR_IS_CAPS` should have snake_case name, e.g. `why_var_is_caps`
+ _ = WHY_VAR_IS_CAPS;
}
}
"#,
@@ -306,6 +333,7 @@ impl someStruct {
r#"
enum Option { Some, None }
+#[allow(unused)]
fn main() {
match Option::None {
None => (),
@@ -322,6 +350,7 @@ fn main() {
r#"
enum Option { Some, None }
+#[allow(unused)]
fn main() {
match Option::None {
SOME_VAR @ None => (),
@@ -349,7 +378,9 @@ enum E {
}
mod F {
- fn CheckItWorksWithCrateAttr(BAD_NAME_HI: u8) {}
+ fn CheckItWorksWithCrateAttr(BAD_NAME_HI: u8) {
+ _ = BAD_NAME_HI;
+ }
}
"#,
);
@@ -395,7 +426,7 @@ fn qualify() {
#[test] // Issue #8809.
fn parenthesized_parameter() {
- check_diagnostics(r#"fn f((O): _) {}"#)
+ check_diagnostics(r#"fn f((O): _) { _ = O; }"#)
}
#[test]
@@ -472,7 +503,9 @@ mod CheckBadStyle {
mod F {
#![allow(non_snake_case)]
- fn CheckItWorksWithModAttr(BAD_NAME_HI: u8) {}
+ fn CheckItWorksWithModAttr(BAD_NAME_HI: u8) {
+ _ = BAD_NAME_HI;
+ }
}
#[allow(non_snake_case, non_camel_case_types)]
@@ -510,17 +543,20 @@ fn NonSnakeCaseName(some_var: u8) -> u8 {
#[deny(nonstandard_style)]
mod CheckNonstandardStyle {
+ //^^^^^^^^^^^^^^^^^^^^^ 💡 error: Module `CheckNonstandardStyle` should have snake_case name, e.g. `check_nonstandard_style`
fn HiImABadFnName() {}
//^^^^^^^^^^^^^^ 💡 error: Function `HiImABadFnName` should have snake_case name, e.g. `hi_im_abad_fn_name`
}
#[deny(warnings)]
mod CheckBadStyle {
+ //^^^^^^^^^^^^^ 💡 error: Module `CheckBadStyle` should have snake_case name, e.g. `check_bad_style`
struct fooo;
//^^^^ 💡 error: Structure `fooo` should have CamelCase name, e.g. `Fooo`
}
mod F {
+ //^ 💡 warn: Module `F` should have snake_case name, e.g. `f`
#![deny(non_snake_case)]
fn CheckItWorksWithModAttr() {}
//^^^^^^^^^^^^^^^^^^^^^^^ 💡 error: Function `CheckItWorksWithModAttr` should have snake_case name, e.g. `check_it_works_with_mod_attr`
@@ -563,12 +599,12 @@ fn main() {
//^^^ 💡 warn: Static variable `bar` should have UPPER_SNAKE_CASE name, e.g. `BAR`
fn BAZ() {
//^^^ 💡 warn: Function `BAZ` should have snake_case name, e.g. `baz`
- let INNER_INNER = 42;
- //^^^^^^^^^^^ 💡 warn: Variable `INNER_INNER` should have snake_case name, e.g. `inner_inner`
+ let _INNER_INNER = 42;
+ //^^^^^^^^^^^^ 💡 warn: Variable `_INNER_INNER` should have snake_case name, e.g. `_inner_inner`
}
- let INNER_LOCAL = 42;
- //^^^^^^^^^^^ 💡 warn: Variable `INNER_LOCAL` should have snake_case name, e.g. `inner_local`
+ let _INNER_LOCAL = 42;
+ //^^^^^^^^^^^^ 💡 warn: Variable `_INNER_LOCAL` should have snake_case name, e.g. `_inner_local`
}
}
"#,
@@ -641,4 +677,30 @@ enum E {
"#,
);
}
+
+ #[test]
+ fn module_name_inline() {
+ check_diagnostics(
+ r#"
+mod M {
+ //^ 💡 warn: Module `M` should have snake_case name, e.g. `m`
+ mod IncorrectCase {}
+ //^^^^^^^^^^^^^ 💡 warn: Module `IncorrectCase` should have snake_case name, e.g. `incorrect_case`
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn module_name_decl() {
+ check_diagnostics(
+ r#"
+//- /Foo.rs
+
+//- /main.rs
+mod Foo;
+ //^^^ 💡 warn: Module `Foo` should have snake_case name, e.g. `foo`
+"#,
+ )
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
index 1ec17952b..f68f5b44b 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
@@ -8,7 +8,7 @@ pub(crate) fn invalid_derive_target(
ctx: &DiagnosticsContext<'_>,
d: &hir::InvalidDeriveTarget,
) -> Diagnostic {
- let display_range = ctx.sema.diagnostics_display_range(d.node.clone()).range;
+ let display_range = ctx.sema.diagnostics_display_range(d.node.clone());
Diagnostic::new(
DiagnosticCode::RustcHardError("E0774"),
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
index a337e2660..d330973aa 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
@@ -3,7 +3,7 @@
use hir::{PathResolution, Semantics};
use ide_db::{
- base_db::FileId,
+ base_db::{FileId, FileRange},
helpers::mod_path_to_ast,
imports::insert_use::{insert_use, ImportScope},
source_change::SourceChangeBuilder,
@@ -119,7 +119,7 @@ pub(crate) fn json_in_items(
Diagnostic::new(
DiagnosticCode::Ra("json-is-not-rust", Severity::WeakWarning),
"JSON syntax is not valid as a Rust item",
- range,
+ FileRange { file_id, range },
)
.with_fixes(Some(vec![{
let mut scb = SourceChangeBuilder::new(file_id);
@@ -136,6 +136,7 @@ pub(crate) fn json_in_items(
it,
config.insert_use.prefix_kind,
config.prefer_no_std,
+ config.prefer_prelude,
) {
insert_use(&scope, mod_path_to_ast(&it), &config.insert_use);
}
@@ -148,6 +149,7 @@ pub(crate) fn json_in_items(
it,
config.insert_use.prefix_kind,
config.prefer_no_std,
+ config.prefer_prelude,
) {
insert_use(&scope, mod_path_to_ast(&it), &config.insert_use);
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
index 7ca0a0eab..099de4528 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
@@ -60,9 +60,6 @@ macro_rules! compile_error { () => {} }
#[test]
fn eager_macro_concat() {
- // FIXME: this is incorrectly handling `$crate`, resulting in a wrong diagnostic.
- // See: https://github.com/rust-lang/rust-analyzer/issues/10300
-
check_diagnostics(
r#"
//- /lib.rs crate:lib deps:core
@@ -80,7 +77,6 @@ macro_rules! m {
fn f() {
m!();
- //^^^^ error: unresolved macro $crate::private::concat
}
//- /core.rs crate:core
@@ -268,4 +264,24 @@ fn f() {
"#,
)
}
+
+ #[test]
+ fn include_does_not_break_diagnostics() {
+ let mut config = DiagnosticsConfig::test_sample();
+ config.disabled.insert("inactive-code".to_string());
+ config.disabled.insert("unlinked-file".to_string());
+ check_diagnostics_with_config(
+ config,
+ r#"
+//- minicore: include
+//- /lib.rs crate:lib
+include!("include-me.rs");
+//- /include-me.rs
+/// long doc that pushes the diagnostic range beyond the first file's text length
+ #[err]
+//^^^^^^error: unresolved macro `err`
+mod prim_never {}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs
index fc57dde69..6202d1585 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs
@@ -7,7 +7,7 @@ pub(crate) fn malformed_derive(
ctx: &DiagnosticsContext<'_>,
d: &hir::MalformedDerive,
) -> Diagnostic {
- let display_range = ctx.sema.diagnostics_display_range(d.node.clone()).range;
+ let display_range = ctx.sema.diagnostics_display_range(d.node.clone());
Diagnostic::new(
DiagnosticCode::RustcHardError("E0777"),
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
index 8265e0b1c..829601802 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
@@ -1,8 +1,9 @@
use either::Either;
use hir::InFile;
+use ide_db::base_db::FileRange;
use syntax::{
ast::{self, HasArgList},
- AstNode, SyntaxNodePtr, TextRange,
+ AstNode, SyntaxNodePtr,
};
use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext};
@@ -23,12 +24,7 @@ pub(crate) fn mismatched_tuple_struct_pat_arg_count(
Diagnostic::new(
DiagnosticCode::RustcHardError("E0023"),
message,
- invalid_args_range(
- ctx,
- d.expr_or_pat.clone().map(|it| it.either(Into::into, Into::into)),
- d.expected,
- d.found,
- ),
+ invalid_args_range(ctx, d.expr_or_pat.clone().map(Into::into), d.expected, d.found),
)
}
@@ -53,7 +49,7 @@ fn invalid_args_range(
source: InFile<SyntaxNodePtr>,
expected: usize,
found: usize,
-) -> TextRange {
+) -> FileRange {
adjusted_display_range::<Either<ast::Expr, ast::TupleStructPat>>(ctx, source, &|expr| {
let (text_range, r_paren_token, expected_arg) = match expr {
Either::Left(ast::Expr::CallExpr(call)) => {
@@ -131,7 +127,7 @@ fn f() { zero(); }
fn simple_free_fn_one() {
check_diagnostics(
r#"
-fn one(arg: u8) {}
+fn one(_arg: u8) {}
fn f() { one(); }
//^^ error: expected 1 argument, found 0
"#,
@@ -139,7 +135,7 @@ fn f() { one(); }
check_diagnostics(
r#"
-fn one(arg: u8) {}
+fn one(_arg: u8) {}
fn f() { one(1); }
"#,
);
@@ -176,7 +172,7 @@ fn f() {
check_diagnostics(
r#"
struct S;
-impl S { fn method(&self, arg: u8) {} }
+impl S { fn method(&self, _arg: u8) {} }
fn f() {
S.method();
@@ -187,7 +183,7 @@ impl S { fn method(&self, arg: u8) {} }
check_diagnostics(
r#"
struct S;
-impl S { fn method(&self, arg: u8) {} }
+impl S { fn method(&self, _arg: u8) {} }
fn f() {
S::method(&S, 0);
@@ -335,8 +331,8 @@ struct S;
impl S {
fn method(#[cfg(NEVER)] self) {}
- fn method2(#[cfg(NEVER)] self, arg: u8) {}
- fn method3(self, #[cfg(NEVER)] arg: u8) {}
+ fn method2(#[cfg(NEVER)] self, _arg: u8) {}
+ fn method3(self, #[cfg(NEVER)] _arg: u8) {}
}
extern "C" {
@@ -365,8 +361,8 @@ fn main() {
r#"
#[rustc_legacy_const_generics(1, 3)]
fn mixed<const N1: &'static str, const N2: bool>(
- a: u8,
- b: i8,
+ _a: u8,
+ _b: i8,
) {}
fn f() {
@@ -376,8 +372,8 @@ fn f() {
#[rustc_legacy_const_generics(1, 3)]
fn b<const N1: u8, const N2: u8>(
- a: u8,
- b: u8,
+ _a: u8,
+ _b: u8,
) {}
fn g() {
@@ -403,7 +399,7 @@ fn f(
// ^^ error: this pattern has 0 fields, but the corresponding tuple struct has 2 fields
S(e, f, .., g, d): S
// ^^^^^^^^^ error: this pattern has 4 fields, but the corresponding tuple struct has 2 fields
-) {}
+) { _ = (a, b, c, d, e, f, g); }
"#,
)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
index acc31cd11..cb38bc54d 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
@@ -1,7 +1,7 @@
use either::Either;
use hir::{
db::{ExpandDatabase, HirDatabase},
- known, AssocItem, HirDisplay, InFile, Type,
+ known, AssocItem, HirDisplay, HirFileIdExt, InFile, Type,
};
use ide_db::{
assists::Assist, famous_defs::FamousDefs, imports::import_assets::item_for_path_search,
@@ -39,7 +39,7 @@ pub(crate) fn missing_fields(ctx: &DiagnosticsContext<'_>, d: &hir::MissingField
d.field_list_parent_path
.clone()
.map(SyntaxNodePtr::from)
- .unwrap_or_else(|| d.field_list_parent.clone().either(|it| it.into(), |it| it.into())),
+ .unwrap_or_else(|| d.field_list_parent.clone().into()),
);
Diagnostic::new_with_syntax_node_ptr(ctx, DiagnosticCode::RustcHardError("E0063"), message, ptr)
@@ -58,10 +58,8 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
let root = ctx.sema.db.parse_or_expand(d.file);
- let current_module = match &d.field_list_parent {
- Either::Left(ptr) => ctx.sema.scope(ptr.to_node(&root).syntax()).map(|it| it.module()),
- Either::Right(ptr) => ctx.sema.scope(ptr.to_node(&root).syntax()).map(|it| it.module()),
- };
+ let current_module =
+ ctx.sema.scope(d.field_list_parent.to_node(&root).syntax()).map(|it| it.module());
let build_text_edit = |parent_syntax, new_syntax: &SyntaxNode, old_syntax| {
let edit = {
@@ -87,9 +85,8 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
)])
};
- match &d.field_list_parent {
- Either::Left(record_expr) => {
- let field_list_parent = record_expr.to_node(&root);
+ match &d.field_list_parent.to_node(&root) {
+ Either::Left(field_list_parent) => {
let missing_fields = ctx.sema.record_literal_missing_fields(&field_list_parent);
let mut locals = FxHashMap::default();
@@ -125,6 +122,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
ctx.sema.db,
item_for_path_search(ctx.sema.db, item_in_ns)?,
ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
)?;
use_trivial_constructor(
@@ -152,8 +150,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
old_field_list.syntax(),
)
}
- Either::Right(record_pat) => {
- let field_list_parent = record_pat.to_node(&root);
+ Either::Right(field_list_parent) => {
let missing_fields = ctx.sema.record_pattern_missing_fields(&field_list_parent);
let old_field_list = field_list_parent.record_pat_field_list()?;
@@ -290,6 +287,7 @@ fn x(a: S) {
struct S { s: u32 }
fn x(a: S) {
let S { ref s } = a;
+ _ = s;
}
",
)
@@ -626,7 +624,7 @@ struct TestStruct { one: i32, two: i64 }
fn test_fn() {
let one = 1;
- let s = TestStruct{ one, two: 2 };
+ let _s = TestStruct{ one, two: 2 };
}
"#,
);
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
index 06b03d3d1..ef6a273ed 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
@@ -17,14 +17,32 @@ pub(crate) fn missing_match_arms(
#[cfg(test)]
mod tests {
- use crate::tests::check_diagnostics;
+ use crate::{
+ tests::{check_diagnostics, check_diagnostics_with_config},
+ DiagnosticsConfig,
+ };
+ #[track_caller]
fn check_diagnostics_no_bails(ra_fixture: &str) {
cov_mark::check_count!(validate_match_bailed_out, 0);
crate::tests::check_diagnostics(ra_fixture)
}
#[test]
+ fn empty_body() {
+ let mut config = DiagnosticsConfig::test_sample();
+ config.disabled.insert("syntax-error".to_string());
+ check_diagnostics_with_config(
+ config,
+ r#"
+fn main() {
+ match 0;
+}
+"#,
+ );
+ }
+
+ #[test]
fn empty_tuple() {
check_diagnostics_no_bails(
r#"
@@ -564,6 +582,7 @@ fn bang(never: !) {
r#"
enum Option<T> { Some(T), None }
+#[allow(unused)]
fn main() {
// `Never` is deliberately not defined so that it's an uninferred type.
match Option::<Never>::None {
@@ -719,7 +738,7 @@ fn main() {
r#"
struct S { a: char}
fn main(v: S) {
- match v { S{ a } => {} }
+ match v { S{ a } => { _ = a; } }
match v { S{ a: _x } => {} }
match v { S{ a: 'a' } => {} }
match v { S{..} => {} }
@@ -901,7 +920,7 @@ enum E{ A, B }
fn foo() {
match &E::A {
E::A => {}
- x => {}
+ _x => {}
}
}",
);
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
index 70b26009b..f93a35cf1 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
@@ -1,4 +1,5 @@
use hir::db::ExpandDatabase;
+use hir::HirFileIdExt;
use ide_db::{assists::Assist, source_change::SourceChange};
use syntax::{ast, SyntaxNode};
use syntax::{match_ast, AstNode};
@@ -100,9 +101,9 @@ mod tests {
r#"
fn main() {
let x = &5 as *const usize;
- unsafe { let y = *x; }
- let z = *x;
-} //^^💡 error: this operation is unsafe and requires an unsafe function or block
+ unsafe { let _y = *x; }
+ let _z = *x;
+} //^^💡 error: this operation is unsafe and requires an unsafe function or block
"#,
)
}
@@ -116,13 +117,13 @@ struct HasUnsafe;
impl HasUnsafe {
unsafe fn unsafe_fn(&self) {
let x = &5 as *const usize;
- let y = *x;
+ let _y = *x;
}
}
unsafe fn unsafe_fn() {
let x = &5 as *const usize;
- let y = *x;
+ let _y = *x;
}
fn main() {
@@ -152,10 +153,10 @@ struct Ty {
static mut STATIC_MUT: Ty = Ty { a: 0 };
fn main() {
- let x = STATIC_MUT.a;
- //^^^^^^^^^^💡 error: this operation is unsafe and requires an unsafe function or block
+ let _x = STATIC_MUT.a;
+ //^^^^^^^^^^💡 error: this operation is unsafe and requires an unsafe function or block
unsafe {
- let x = STATIC_MUT.a;
+ let _x = STATIC_MUT.a;
}
}
"#,
@@ -187,13 +188,13 @@ fn main() {
r#"
fn main() {
let x = &5 as *const usize;
- let z = *x$0;
+ let _z = *x$0;
}
"#,
r#"
fn main() {
let x = &5 as *const usize;
- let z = unsafe { *x };
+ let _z = unsafe { *x };
}
"#,
);
@@ -231,7 +232,7 @@ struct S(usize);
impl S {
unsafe fn func(&self) {
let x = &self.0 as *const usize;
- let z = *x;
+ let _z = *x;
}
}
fn main() {
@@ -244,7 +245,7 @@ struct S(usize);
impl S {
unsafe fn func(&self) {
let x = &self.0 as *const usize;
- let z = *x;
+ let _z = *x;
}
}
fn main() {
@@ -267,7 +268,7 @@ struct Ty {
static mut STATIC_MUT: Ty = Ty { a: 0 };
fn main() {
- let x = STATIC_MUT$0.a;
+ let _x = STATIC_MUT$0.a;
}
"#,
r#"
@@ -278,7 +279,7 @@ struct Ty {
static mut STATIC_MUT: Ty = Ty { a: 0 };
fn main() {
- let x = unsafe { STATIC_MUT.a };
+ let _x = unsafe { STATIC_MUT.a };
}
"#,
)
@@ -382,16 +383,16 @@ fn main() {
static mut STATIC_MUT: u8 = 0;
fn main() {
- let x;
- x = STATIC_MUT$0;
+ let _x;
+ _x = STATIC_MUT$0;
}
"#,
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
- let x;
- x = unsafe { STATIC_MUT };
+ let _x;
+ _x = unsafe { STATIC_MUT };
}
"#,
)
@@ -405,14 +406,14 @@ fn main() {
static mut STATIC_MUT: u8 = 0;
fn main() {
- let x = STATIC_MUT$0 + 1;
+ let _x = STATIC_MUT$0 + 1;
}
"#,
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
- let x = unsafe { STATIC_MUT } + 1;
+ let _x = unsafe { STATIC_MUT } + 1;
}
"#,
)
@@ -425,14 +426,14 @@ fn main() {
static mut STATIC_MUT: u8 = 0;
fn main() {
- let x = &STATIC_MUT$0;
+ let _x = &STATIC_MUT$0;
}
"#,
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
- let x = unsafe { &STATIC_MUT };
+ let _x = unsafe { &STATIC_MUT };
}
"#,
)
@@ -445,14 +446,14 @@ fn main() {
static mut STATIC_MUT: u8 = 0;
fn main() {
- let x = &&STATIC_MUT$0;
+ let _x = &&STATIC_MUT$0;
}
"#,
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
- let x = unsafe { &&STATIC_MUT };
+ let _x = unsafe { &&STATIC_MUT };
}
"#,
)
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
index 3aa4aa970..886aefeb5 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
@@ -29,6 +29,7 @@ fn main() {
let a = &X;
let b = *a;
//^ error: cannot move `X` out of reference
+ _ = b;
}
"#,
);
@@ -46,6 +47,7 @@ fn main() {
let b = a.0;
//^ error: cannot move `X` out of reference
let y = a.1;
+ _ = (b, y);
}
"#,
);
@@ -59,8 +61,8 @@ fn main() {
struct X;
fn main() {
static S: X = X;
- let s = S;
- //^ error: cannot move `X` out of reference
+ let _s = S;
+ //^^ error: cannot move `X` out of reference
}
"#,
);
@@ -165,7 +167,7 @@ enum X {
fn main() {
let x = &X::Bar;
- let c = || match *x {
+ let _c = || match *x {
X::Foo(t) => t,
_ => 5,
};
@@ -173,4 +175,19 @@ fn main() {
"#,
);
}
+
+ #[test]
+ fn regression_15787() {
+ check_diagnostics(
+ r#"
+//- minicore: coerce_unsized, slice, copy
+fn foo(mut slice: &[u32]) -> usize {
+ slice = match slice {
+ [0, rest @ ..] | rest => rest,
+ };
+ slice.len()
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs
index d056e5c85..187511149 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs
@@ -324,6 +324,7 @@ fn main() {
let x_own = 2;
let ref mut x_ref = x_own;
//^^^^^^^^^^^^^ 💡 error: cannot mutate immutable variable `x_own`
+ _ = x_ref;
}
"#,
);
@@ -331,7 +332,7 @@ fn main() {
r#"
struct Foo;
impl Foo {
- fn method(&mut self, x: i32) {}
+ fn method(&mut self, _x: i32) {}
}
fn main() {
let x = Foo;
@@ -391,6 +392,7 @@ fn main() {
//^^^^^ 💡 warn: variable does not need to be mutable
x = 7;
//^^^^^ 💡 error: cannot mutate immutable variable `x`
+ _ = y;
}
}
}
@@ -404,12 +406,14 @@ fn main() {
// there would be no mutability error for locals in dead code. Rustc tries to
// not emit `unused_mut` in this case, but since it works without `mut`, and
// special casing it is not trivial, we emit it.
+
+ // Update: now MIR based `unused-variable` is taking over `unused-mut` for the same reason.
check_diagnostics(
r#"
fn main() {
return;
let mut x = 2;
- //^^^^^ 💡 warn: variable does not need to be mutable
+ //^^^^^ warn: unused variable
&mut x;
}
"#,
@@ -419,7 +423,7 @@ fn main() {
fn main() {
loop {}
let mut x = 2;
- //^^^^^ 💡 warn: variable does not need to be mutable
+ //^^^^^ warn: unused variable
&mut x;
}
"#,
@@ -440,7 +444,7 @@ fn main(b: bool) {
g();
}
let mut x = 2;
- //^^^^^ 💡 warn: variable does not need to be mutable
+ //^^^^^ warn: unused variable
&mut x;
}
"#,
@@ -454,7 +458,7 @@ fn main(b: bool) {
return;
}
let mut x = 2;
- //^^^^^ 💡 warn: variable does not need to be mutable
+ //^^^^^ warn: unused variable
&mut x;
}
"#,
@@ -536,6 +540,7 @@ fn main() {
(k @ 5, ref mut t) if { continue; } => {
//^^^^^^^^^ 💡 error: cannot mutate immutable variable `z`
*t = 5;
+ _ = k;
}
_ => {
let y = (1, 2);
@@ -588,6 +593,7 @@ fn main() {
b = 1;
c = (2, 3);
d = 3;
+ _ = (c, b, d);
}
}
"#,
@@ -600,6 +606,7 @@ fn main() {
r#"
fn f(mut x: i32) {
//^^^^^ 💡 warn: variable does not need to be mutable
+ f(x + 2);
}
"#,
);
@@ -615,8 +622,11 @@ fn f(x: i32) {
r#"
fn f((x, y): (i32, i32)) {
let t = [0; 2];
- x = 5;
- //^^^^^ 💡 error: cannot mutate immutable variable `x`
+ x = 5;
+ //^^^^^ 💡 error: cannot mutate immutable variable `x`
+ _ = x;
+ _ = y;
+ _ = t;
}
"#,
);
@@ -645,6 +655,7 @@ fn f(x: [(i32, u8); 10]) {
//^^^^^ 💡 warn: variable does not need to be mutable
a = 2;
//^^^^^ 💡 error: cannot mutate immutable variable `a`
+ _ = b;
}
}
"#,
@@ -666,6 +677,7 @@ fn f(x: [(i32, u8); 10]) {
//^^^^^ 💡 error: cannot mutate immutable variable `a`
c = 2;
//^^^^^ 💡 error: cannot mutate immutable variable `c`
+ _ = (b, d);
}
}
}
@@ -696,18 +708,18 @@ fn f() {
fn overloaded_index() {
check_diagnostics(
r#"
-//- minicore: index
+//- minicore: index, copy
use core::ops::{Index, IndexMut};
struct Foo;
impl Index<usize> for Foo {
type Output = (i32, u8);
- fn index(&self, index: usize) -> &(i32, u8) {
+ fn index(&self, _index: usize) -> &(i32, u8) {
&(5, 2)
}
}
impl IndexMut<usize> for Foo {
- fn index_mut(&mut self, index: usize) -> &mut (i32, u8) {
+ fn index_mut(&mut self, _index: usize) -> &mut (i32, u8) {
&mut (5, 2)
}
}
@@ -715,26 +727,32 @@ fn f() {
let mut x = Foo;
//^^^^^ 💡 warn: variable does not need to be mutable
let y = &x[2];
+ _ = (x, y);
let x = Foo;
let y = &mut x[2];
//^💡 error: cannot mutate immutable variable `x`
+ _ = (x, y);
let mut x = &mut Foo;
//^^^^^ 💡 warn: variable does not need to be mutable
let y: &mut (i32, u8) = &mut x[2];
+ _ = (x, y);
let x = Foo;
let ref mut y = x[7];
//^ 💡 error: cannot mutate immutable variable `x`
+ _ = (x, y);
let (ref mut y, _) = x[3];
//^ 💡 error: cannot mutate immutable variable `x`
+ _ = y;
match x[10] {
//^ 💡 error: cannot mutate immutable variable `x`
- (ref y, _) => (),
- (_, ref mut y) => (),
+ (ref y, 5) => _ = y,
+ (_, ref mut y) => _ = y,
}
let mut x = Foo;
let mut i = 5;
//^^^^^ 💡 warn: variable does not need to be mutable
let y = &mut x[i];
+ _ = y;
}
"#,
);
@@ -744,7 +762,7 @@ fn f() {
fn overloaded_deref() {
check_diagnostics(
r#"
-//- minicore: deref_mut
+//- minicore: deref_mut, copy
use core::ops::{Deref, DerefMut};
struct Foo;
@@ -763,21 +781,27 @@ fn f() {
let mut x = Foo;
//^^^^^ 💡 warn: variable does not need to be mutable
let y = &*x;
+ _ = (x, y);
let x = Foo;
let y = &mut *x;
//^^ 💡 error: cannot mutate immutable variable `x`
+ _ = (x, y);
let x = Foo;
+ //^ warn: unused variable
let x = Foo;
let y: &mut (i32, u8) = &mut x;
//^^^^^^ 💡 error: cannot mutate immutable variable `x`
+ _ = (x, y);
let ref mut y = *x;
//^^ 💡 error: cannot mutate immutable variable `x`
+ _ = y;
let (ref mut y, _) = *x;
//^^ 💡 error: cannot mutate immutable variable `x`
+ _ = y;
match *x {
//^^ 💡 error: cannot mutate immutable variable `x`
- (ref y, _) => (),
- (_, ref mut y) => (),
+ (ref y, 5) => _ = y,
+ (_, ref mut y) => _ = y,
}
}
"#,
@@ -866,6 +890,7 @@ pub fn test() {
data: 0
}
);
+ _ = tree;
}
"#,
);
@@ -925,6 +950,7 @@ fn fn_once(mut x: impl FnOnce(u8) -> u8) -> u8 {
let x = X;
let closure4 = || { x.mutate(); };
//^ 💡 error: cannot mutate immutable variable `x`
+ _ = (closure2, closure3, closure4);
}
"#,
);
@@ -941,7 +967,9 @@ fn fn_once(mut x: impl FnOnce(u8) -> u8) -> u8 {
z = 3;
let mut k = z;
//^^^^^ 💡 warn: variable does not need to be mutable
+ _ = k;
};
+ _ = (x, closure);
}
"#,
);
@@ -958,6 +986,7 @@ fn f() {
}
}
};
+ _ = closure;
}
"#,
);
@@ -972,7 +1001,8 @@ fn f() {
let mut x = X;
let c2 = || { x = X; x };
let mut x = X;
- let c2 = move || { x = X; };
+ let c3 = move || { x = X; };
+ _ = (c1, c2, c3);
}
"#,
);
@@ -1023,7 +1053,7 @@ fn x(t: &[u8]) {
a = 2;
//^^^^^ 💡 error: cannot mutate immutable variable `a`
-
+ _ = b;
}
_ => {}
}
@@ -1079,6 +1109,7 @@ fn f() {
let x = Box::new(5);
let closure = || *x = 2;
//^ 💡 error: cannot mutate immutable variable `x`
+ _ = closure;
}
"#,
);
@@ -1156,6 +1187,7 @@ macro_rules! mac {
fn main2() {
let mut x = mac![];
//^^^^^ 💡 warn: variable does not need to be mutable
+ _ = x;
}
"#,
);
@@ -1196,4 +1228,20 @@ fn foo(mut foo: Foo) {
"#,
);
}
+
+ #[test]
+ fn regression_15670() {
+ check_diagnostics(
+ r#"
+//- minicore: fn
+
+pub struct A {}
+pub unsafe fn foo(a: *mut A) {
+ let mut b = || -> *mut A { &mut *a };
+ //^^^^^ 💡 warn: variable does not need to be mutable
+ let _ = b();
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
index 290c16c9d..0abcbffe7 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
@@ -1,5 +1,5 @@
use either::Either;
-use hir::{db::ExpandDatabase, HasSource, HirDisplay, Semantics};
+use hir::{db::ExpandDatabase, HasSource, HirDisplay, HirFileIdExt, Semantics};
use ide_db::{base_db::FileId, source_change::SourceChange, RootDatabase};
use syntax::{
ast::{self, edit::IndentLevel, make},
@@ -13,7 +13,7 @@ use crate::{fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext};
//
// This diagnostic is triggered if created structure does not have field provided in record.
pub(crate) fn no_such_field(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Diagnostic {
- let node = d.field.clone().map(|it| it.either(Into::into, Into::into));
+ let node = d.field.clone().map(Into::into);
if d.private {
// FIXME: quickfix to add required visibility
Diagnostic::new_with_syntax_node_ptr(
@@ -35,15 +35,13 @@ pub(crate) fn no_such_field(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField)
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Option<Vec<Assist>> {
// FIXME: quickfix for pattern
- match &d.field.value {
- Either::Left(ptr) => {
- let root = ctx.sema.db.parse_or_expand(d.field.file_id);
- missing_record_expr_field_fixes(
- &ctx.sema,
- d.field.file_id.original_file(ctx.sema.db),
- &ptr.to_node(&root),
- )
- }
+ let root = ctx.sema.db.parse_or_expand(d.field.file_id);
+ match &d.field.value.to_node(&root) {
+ Either::Left(node) => missing_record_expr_field_fixes(
+ &ctx.sema,
+ d.field.file_id.original_file(ctx.sema.db),
+ node,
+ ),
_ => None,
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs
index c44d28e77..a828b8b4f 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs
@@ -1,5 +1,3 @@
-use either::Either;
-
use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: private-assoc-item
@@ -28,13 +26,7 @@ pub(crate) fn private_assoc_item(
},
name,
),
- d.expr_or_pat.clone().map(|it| match it {
- Either::Left(it) => it.into(),
- Either::Right(it) => match it {
- Either::Left(it) => it.into(),
- Either::Right(it) => it.into(),
- },
- }),
+ d.expr_or_pat.clone().map(Into::into),
)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
index 083ef3e8d..258ac6cd8 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
@@ -1,4 +1,4 @@
-use hir::{db::ExpandDatabase, InFile};
+use hir::{db::ExpandDatabase, HirFileIdExt, InFile};
use ide_db::source_change::SourceChange;
use syntax::{
ast::{self, HasArgList},
@@ -74,8 +74,8 @@ mod tests {
r#"
//- minicore: iterators
fn foo() {
- let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
-} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 weak: replace filter_map(..).next() with find_map(..)
+ let _m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 weak: replace filter_map(..).next() with find_map(..)
"#,
);
}
@@ -117,7 +117,7 @@ fn foo() {
fn foo() {
let mut m = core::iter::repeat(())
.filter_map(|()| Some(92));
- let n = m.next();
+ let _n = m.next();
}
"#,
);
@@ -148,22 +148,22 @@ fn foo() {
fn foo() {
#[allow(clippy::filter_map_next)]
- let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+ let _m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
}
#[deny(clippy::filter_map_next)]
fn foo() {
- let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
-} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 error: replace filter_map(..).next() with find_map(..)
+ let _m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 error: replace filter_map(..).next() with find_map(..)
fn foo() {
- let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
-} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 weak: replace filter_map(..).next() with find_map(..)
+ let _m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 weak: replace filter_map(..).next() with find_map(..)
#[warn(clippy::filter_map_next)]
fn foo() {
- let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
-} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 warn: replace filter_map(..).next() with find_map(..)
+ let _m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 warn: replace filter_map(..).next() with find_map(..)
"#,
);
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs
new file mode 100644
index 000000000..251a64529
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs
@@ -0,0 +1,129 @@
+use hir::InFile;
+use syntax::ast;
+
+use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext, Severity};
+
+// Diagnostic: trait-impl-incorrect-safety
+//
+// Diagnoses incorrect safety annotations of trait impls.
+pub(crate) fn trait_impl_incorrect_safety(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::TraitImplIncorrectSafety,
+) -> Diagnostic {
+ Diagnostic::new(
+ DiagnosticCode::Ra("trait-impl-incorrect-safety", Severity::Error),
+ if d.should_be_safe {
+ "unsafe impl for safe trait"
+ } else {
+ "impl for unsafe trait needs to be unsafe"
+ },
+ adjusted_display_range::<ast::Impl>(
+ ctx,
+ InFile { file_id: d.file_id, value: d.impl_.syntax_node_ptr() },
+ &|impl_| {
+ if d.should_be_safe {
+ Some(match (impl_.unsafe_token(), impl_.impl_token()) {
+ (None, None) => return None,
+ (None, Some(t)) | (Some(t), None) => t.text_range(),
+ (Some(t1), Some(t2)) => t1.text_range().cover(t2.text_range()),
+ })
+ } else {
+ impl_.impl_token().map(|t| t.text_range())
+ }
+ },
+ ),
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn simple() {
+ check_diagnostics(
+ r#"
+trait Safe {}
+unsafe trait Unsafe {}
+
+ impl Safe for () {}
+
+ impl Unsafe for () {}
+//^^^^ error: impl for unsafe trait needs to be unsafe
+
+ unsafe impl Safe for () {}
+//^^^^^^^^^^^ error: unsafe impl for safe trait
+
+ unsafe impl Unsafe for () {}
+"#,
+ );
+ }
+
+ #[test]
+ fn drop_may_dangle() {
+ check_diagnostics(
+ r#"
+#[lang = "drop"]
+trait Drop {}
+struct S<T>;
+struct L<'l>;
+
+ impl<T> Drop for S<T> {}
+
+ impl<#[may_dangle] T> Drop for S<T> {}
+//^^^^ error: impl for unsafe trait needs to be unsafe
+
+ unsafe impl<T> Drop for S<T> {}
+//^^^^^^^^^^^ error: unsafe impl for safe trait
+
+ unsafe impl<#[may_dangle] T> Drop for S<T> {}
+
+ impl<'l> Drop for L<'l> {}
+
+ impl<#[may_dangle] 'l> Drop for L<'l> {}
+//^^^^ error: impl for unsafe trait needs to be unsafe
+
+ unsafe impl<'l> Drop for L<'l> {}
+//^^^^^^^^^^^ error: unsafe impl for safe trait
+
+ unsafe impl<#[may_dangle] 'l> Drop for L<'l> {}
+"#,
+ );
+ }
+
+ #[test]
+ fn negative() {
+ check_diagnostics(
+ r#"
+trait Trait {}
+
+ impl !Trait for () {}
+
+ unsafe impl !Trait for () {}
+//^^^^^^^^^^^ error: unsafe impl for safe trait
+
+unsafe trait UnsafeTrait {}
+
+ impl !UnsafeTrait for () {}
+
+ unsafe impl !UnsafeTrait for () {}
+//^^^^^^^^^^^ error: unsafe impl for safe trait
+
+"#,
+ );
+ }
+
+ #[test]
+ fn inherent() {
+ check_diagnostics(
+ r#"
+struct S;
+
+ impl S {}
+
+ unsafe impl S {}
+//^^^^^^^^^^^ error: unsafe impl for safe trait
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs
new file mode 100644
index 000000000..56188cddf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs
@@ -0,0 +1,129 @@
+use hir::InFile;
+use itertools::Itertools;
+use syntax::{ast, AstNode};
+
+use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext};
+
+// Diagnostic: trait-impl-missing-assoc_item
+//
+// Diagnoses missing trait items in a trait impl.
+pub(crate) fn trait_impl_missing_assoc_item(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::TraitImplMissingAssocItems,
+) -> Diagnostic {
+ let missing = d.missing.iter().format_with(", ", |(name, item), f| {
+ f(&match *item {
+ hir::AssocItem::Function(_) => "`fn ",
+ hir::AssocItem::Const(_) => "`const ",
+ hir::AssocItem::TypeAlias(_) => "`type ",
+ })?;
+ f(&name.display(ctx.sema.db))?;
+ f(&"`")
+ });
+ Diagnostic::new(
+ DiagnosticCode::RustcHardError("E0046"),
+ format!("not all trait items implemented, missing: {missing}"),
+ adjusted_display_range::<ast::Impl>(
+ ctx,
+ InFile { file_id: d.file_id, value: d.impl_.syntax_node_ptr() },
+ &|impl_| impl_.trait_().map(|t| t.syntax().text_range()),
+ ),
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn trait_with_default_value() {
+ check_diagnostics(
+ r#"
+trait Marker {
+ const FLAG: bool = false;
+}
+struct Foo;
+impl Marker for Foo {}
+ "#,
+ )
+ }
+
+ #[test]
+ fn simple() {
+ check_diagnostics(
+ r#"
+trait Trait {
+ const C: ();
+ type T;
+ fn f();
+}
+
+impl Trait for () {
+ const C: () = ();
+ type T = ();
+ fn f() {}
+}
+
+impl Trait for () {
+ //^^^^^ error: not all trait items implemented, missing: `const C`
+ type T = ();
+ fn f() {}
+}
+
+impl Trait for () {
+ //^^^^^ error: not all trait items implemented, missing: `const C`, `type T`, `fn f`
+}
+
+"#,
+ );
+ }
+
+ #[test]
+ fn default() {
+ check_diagnostics(
+ r#"
+trait Trait {
+ const C: ();
+ type T = ();
+ fn f() {}
+}
+
+impl Trait for () {
+ const C: () = ();
+ type T = ();
+ fn f() {}
+}
+
+impl Trait for () {
+ //^^^^^ error: not all trait items implemented, missing: `const C`
+ type T = ();
+ fn f() {}
+}
+
+impl Trait for () {
+ //^^^^^ error: not all trait items implemented, missing: `const C`
+ type T = ();
+ }
+
+impl Trait for () {
+ //^^^^^ error: not all trait items implemented, missing: `const C`
+}
+
+"#,
+ );
+ }
+
+ #[test]
+ fn negative_impl() {
+ check_diagnostics(
+ r#"
+trait Trait {
+ fn item();
+}
+
+// Negative impls don't require any items (in fact, the forbid providing any)
+impl !Trait for () {}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs
new file mode 100644
index 000000000..159d87d26
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs
@@ -0,0 +1,106 @@
+use hir::InFile;
+
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
+
+// Diagnostic: trait-impl-orphan
+//
+// Only traits defined in the current crate can be implemented for arbitrary types
+pub(crate) fn trait_impl_orphan(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::TraitImplOrphan,
+) -> Diagnostic {
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0117"),
+ format!("only traits defined in the current crate can be implemented for arbitrary types"),
+ InFile::new(d.file_id, d.impl_.clone().into()),
+ )
+ // Not yet checked for false positives
+ .experimental()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn simple() {
+ check_diagnostics(
+ r#"
+//- /foo.rs crate:foo
+pub trait Foo {}
+//- /bar.rs crate:bar
+pub struct Bar;
+//- /main.rs crate:main deps:foo,bar
+struct LocalType;
+trait LocalTrait {}
+ impl foo::Foo for bar::Bar {}
+//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: only traits defined in the current crate can be implemented for arbitrary types
+impl foo::Foo for LocalType {}
+impl LocalTrait for bar::Bar {}
+"#,
+ );
+ }
+
+ #[test]
+ fn generics() {
+ check_diagnostics(
+ r#"
+//- /foo.rs crate:foo
+pub trait Foo<T> {}
+//- /bar.rs crate:bar
+pub struct Bar<T>(T);
+//- /main.rs crate:main deps:foo,bar
+struct LocalType<T>;
+trait LocalTrait<T> {}
+ impl<T> foo::Foo<T> for bar::Bar<T> {}
+//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: only traits defined in the current crate can be implemented for arbitrary types
+
+ impl<T> foo::Foo<T> for bar::Bar<LocalType<T>> {}
+//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: only traits defined in the current crate can be implemented for arbitrary types
+
+ impl<T> foo::Foo<LocalType<T>> for bar::Bar<T> {}
+
+ impl<T> foo::Foo<bar::Bar<LocalType<T>>> for bar::Bar<LocalType<T>> {}
+//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: only traits defined in the current crate can be implemented for arbitrary types
+"#,
+ );
+ }
+
+ #[test]
+ fn fundamental() {
+ check_diagnostics(
+ r#"
+//- /foo.rs crate:foo
+pub trait Foo<T> {}
+//- /bar.rs crate:bar
+pub struct Bar<T>(T);
+#[lang = "owned_box"]
+#[fundamental]
+pub struct Box<T>(T);
+//- /main.rs crate:main deps:foo,bar
+struct LocalType;
+ impl<T> foo::Foo<T> for bar::Box<T> {}
+//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: only traits defined in the current crate can be implemented for arbitrary types
+ impl<T> foo::Foo<T> for &LocalType {}
+ impl<T> foo::Foo<T> for bar::Box<LocalType> {}
+"#,
+ );
+ }
+
+ #[test]
+ fn dyn_object() {
+ check_diagnostics(
+ r#"
+//- /foo.rs crate:foo
+pub trait Foo<T> {}
+//- /bar.rs crate:bar
+pub struct Bar;
+//- /main.rs crate:main deps:foo,bar
+trait LocalTrait {}
+impl<T> foo::Foo<T> for dyn LocalTrait {}
+impl<T> foo::Foo<dyn LocalTrait> for Bar {}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
new file mode 100644
index 000000000..820014391
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
@@ -0,0 +1,79 @@
+use hir::{Const, Function, HasSource, TypeAlias};
+use ide_db::base_db::FileRange;
+
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
+
+// Diagnostic: trait-impl-redundant-assoc_item
+//
+// Diagnoses redundant trait items in a trait impl.
+pub(crate) fn trait_impl_redundant_assoc_item(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::TraitImplRedundantAssocItems,
+) -> Diagnostic {
+ let name = d.assoc_item.0.clone();
+ let assoc_item = d.assoc_item.1;
+ let db = ctx.sema.db;
+
+ let default_range = d.impl_.syntax_node_ptr().text_range();
+ let trait_name = d.trait_.name(db).to_smol_str();
+
+ let (redundant_item_name, diagnostic_range) = match assoc_item {
+ hir::AssocItem::Function(id) => (
+ format!("`fn {}`", name.display(db)),
+ Function::from(id)
+ .source(db)
+ .map(|it| it.syntax().value.text_range())
+ .unwrap_or(default_range),
+ ),
+ hir::AssocItem::Const(id) => (
+ format!("`const {}`", name.display(db)),
+ Const::from(id)
+ .source(db)
+ .map(|it| it.syntax().value.text_range())
+ .unwrap_or(default_range),
+ ),
+ hir::AssocItem::TypeAlias(id) => (
+ format!("`type {}`", name.display(db)),
+ TypeAlias::from(id)
+ .source(db)
+ .map(|it| it.syntax().value.text_range())
+ .unwrap_or(default_range),
+ ),
+ };
+
+ Diagnostic::new(
+ DiagnosticCode::RustcHardError("E0407"),
+ format!("{redundant_item_name} is not a member of trait `{trait_name}`"),
+ FileRange { file_id: d.file_id.file_id().unwrap(), range: diagnostic_range },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn trait_with_default_value() {
+ check_diagnostics(
+ r#"
+trait Marker {
+ const FLAG: bool = false;
+ fn boo();
+ fn foo () {}
+}
+struct Foo;
+impl Marker for Foo {
+ type T = i32;
+ //^^^^^^^^^^^^^ error: `type T` is not a member of trait `Marker`
+
+ const FLAG: bool = true;
+
+ fn bar() {}
+ //^^^^^^^^^^^ error: `fn bar` is not a member of trait `Marker`
+
+ fn boo() {}
+}
+ "#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
index 15bd28c00..70beb9468 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
@@ -1,5 +1,4 @@
-use either::Either;
-use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, InFile, Type};
+use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, HirFileIdExt, InFile, Type};
use ide_db::{famous_defs::FamousDefs, source_change::SourceChange};
use syntax::{
ast::{self, BlockExpr, ExprStmt},
@@ -14,9 +13,11 @@ use crate::{adjusted_display_range, fix, Assist, Diagnostic, DiagnosticCode, Dia
// This diagnostic is triggered when the type of an expression or pattern does not match
// the expected type.
pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Diagnostic {
- let display_range = match &d.expr_or_pat {
- Either::Left(expr) => {
- adjusted_display_range::<ast::Expr>(ctx, expr.clone().map(|it| it.into()), &|expr| {
+ let display_range = match &d.expr_or_pat.value {
+ expr if ast::Expr::can_cast(expr.kind()) => adjusted_display_range::<ast::Expr>(
+ ctx,
+ InFile { file_id: d.expr_or_pat.file_id, value: expr.syntax_node_ptr() },
+ &|expr| {
let salient_token_range = match expr {
ast::Expr::IfExpr(it) => it.if_token()?.text_range(),
ast::Expr::LoopExpr(it) => it.loop_token()?.text_range(),
@@ -32,11 +33,12 @@ pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch)
cov_mark::hit!(type_mismatch_range_adjustment);
Some(salient_token_range)
- })
- }
- Either::Right(pat) => {
- ctx.sema.diagnostics_display_range(pat.clone().map(|it| it.into())).range
- }
+ },
+ ),
+ pat => ctx.sema.diagnostics_display_range(InFile {
+ file_id: d.expr_or_pat.file_id,
+ value: pat.syntax_node_ptr(),
+ }),
};
let mut diag = Diagnostic::new(
DiagnosticCode::RustcHardError("E0308"),
@@ -57,14 +59,12 @@ pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch)
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Option<Vec<Assist>> {
let mut fixes = Vec::new();
- match &d.expr_or_pat {
- Either::Left(expr_ptr) => {
- add_reference(ctx, d, expr_ptr, &mut fixes);
- add_missing_ok_or_some(ctx, d, expr_ptr, &mut fixes);
- remove_semicolon(ctx, d, expr_ptr, &mut fixes);
- str_ref_to_owned(ctx, d, expr_ptr, &mut fixes);
- }
- Either::Right(_pat_ptr) => {}
+ if let Some(expr_ptr) = d.expr_or_pat.value.clone().cast::<ast::Expr>() {
+ let expr_ptr = &InFile { file_id: d.expr_or_pat.file_id, value: expr_ptr.clone() };
+ add_reference(ctx, d, expr_ptr, &mut fixes);
+ add_missing_ok_or_some(ctx, d, expr_ptr, &mut fixes);
+ remove_semicolon(ctx, d, expr_ptr, &mut fixes);
+ str_ref_to_owned(ctx, d, expr_ptr, &mut fixes);
}
if fixes.is_empty() {
@@ -80,7 +80,7 @@ fn add_reference(
expr_ptr: &InFile<AstPtr<ast::Expr>>,
acc: &mut Vec<Assist>,
) -> Option<()> {
- let range = ctx.sema.diagnostics_display_range(expr_ptr.clone().map(|it| it.into())).range;
+ let range = ctx.sema.diagnostics_display_range(expr_ptr.clone().map(|it| it.into()));
let (_, mutability) = d.expected.as_reference()?;
let actual_with_ref = Type::reference(&d.actual, mutability);
@@ -90,10 +90,9 @@ fn add_reference(
let ampersands = format!("&{}", mutability.as_keyword_for_ref());
- let edit = TextEdit::insert(range.start(), ampersands);
- let source_change =
- SourceChange::from_text_edit(expr_ptr.file_id.original_file(ctx.sema.db), edit);
- acc.push(fix("add_reference_here", "Add reference here", source_change, range));
+ let edit = TextEdit::insert(range.range.start(), ampersands);
+ let source_change = SourceChange::from_text_edit(range.file_id, edit);
+ acc.push(fix("add_reference_here", "Add reference here", source_change, range.range));
Some(())
}
@@ -205,7 +204,7 @@ fn main() {
test(123);
//^^^ 💡 error: expected &i32, found i32
}
-fn test(arg: &i32) {}
+fn test(_arg: &i32) {}
"#,
);
}
@@ -217,13 +216,13 @@ fn test(arg: &i32) {}
fn main() {
test(123$0);
}
-fn test(arg: &i32) {}
+fn test(_arg: &i32) {}
"#,
r#"
fn main() {
test(&123);
}
-fn test(arg: &i32) {}
+fn test(_arg: &i32) {}
"#,
);
}
@@ -235,13 +234,13 @@ fn test(arg: &i32) {}
fn main() {
test($0123);
}
-fn test(arg: &mut i32) {}
+fn test(_arg: &mut i32) {}
"#,
r#"
fn main() {
test(&mut 123);
}
-fn test(arg: &mut i32) {}
+fn test(_arg: &mut i32) {}
"#,
);
}
@@ -254,13 +253,13 @@ fn test(arg: &mut i32) {}
fn main() {
test($0[1, 2, 3]);
}
-fn test(arg: &[i32]) {}
+fn test(_arg: &[i32]) {}
"#,
r#"
fn main() {
test(&[1, 2, 3]);
}
-fn test(arg: &[i32]) {}
+fn test(_arg: &[i32]) {}
"#,
);
}
@@ -274,24 +273,26 @@ struct Foo;
struct Bar;
impl core::ops::Deref for Foo {
type Target = Bar;
+ fn deref(&self) -> &Self::Target { loop {} }
}
fn main() {
test($0Foo);
}
-fn test(arg: &Bar) {}
+fn test(_arg: &Bar) {}
"#,
r#"
struct Foo;
struct Bar;
impl core::ops::Deref for Foo {
type Target = Bar;
+ fn deref(&self) -> &Self::Target { loop {} }
}
fn main() {
test(&Foo);
}
-fn test(arg: &Bar) {}
+fn test(_arg: &Bar) {}
"#,
);
}
@@ -305,7 +306,7 @@ fn main() {
}
struct Test;
impl Test {
- fn call_by_ref(&self, arg: &i32) {}
+ fn call_by_ref(&self, _arg: &i32) {}
}
"#,
r#"
@@ -314,7 +315,7 @@ fn main() {
}
struct Test;
impl Test {
- fn call_by_ref(&self, arg: &i32) {}
+ fn call_by_ref(&self, _arg: &i32) {}
}
"#,
);
@@ -345,7 +346,7 @@ macro_rules! thousand {
1000_u64
};
}
-fn test(foo: &u64) {}
+fn test(_foo: &u64) {}
fn main() {
test($0thousand!());
}
@@ -356,7 +357,7 @@ macro_rules! thousand {
1000_u64
};
}
-fn test(foo: &u64) {}
+fn test(_foo: &u64) {}
fn main() {
test(&thousand!());
}
@@ -369,12 +370,12 @@ fn main() {
check_fix(
r#"
fn main() {
- let test: &mut i32 = $0123;
+ let _test: &mut i32 = $0123;
}
"#,
r#"
fn main() {
- let test: &mut i32 = &mut 123;
+ let _test: &mut i32 = &mut 123;
}
"#,
);
@@ -411,7 +412,7 @@ fn div(x: i32, y: i32) -> Option<i32> {
fn f<const N: u64>() -> Rate<N> { // FIXME: add some error
loop {}
}
- fn run(t: Rate<5>) {
+ fn run(_t: Rate<5>) {
}
fn main() {
run(f()) // FIXME: remove this error
@@ -426,7 +427,7 @@ fn div(x: i32, y: i32) -> Option<i32> {
check_diagnostics(
r#"
pub struct Rate<T, const NOM: u32, const DENOM: u32>(T);
- fn run(t: Rate<u32, 1, 1>) {
+ fn run(_t: Rate<u32, 1, 1>) {
}
fn main() {
run(Rate::<_, _, _>(5));
@@ -650,7 +651,7 @@ fn h() {
r#"
struct X<T>(T);
-fn foo(x: X<Unknown>) {}
+fn foo(_x: X<Unknown>) {}
fn test1() {
// Unknown might be `i32`, so we should not emit type mismatch here.
foo(X(42));
@@ -736,4 +737,19 @@ fn g() { return; }
"#,
);
}
+
+ #[test]
+ fn smoke_test_inner_items() {
+ check_diagnostics(
+ r#"
+fn f() {
+ fn inner() -> i32 {
+ return;
+ // ^^^^^^ error: expected i32, found ()
+ 0
+ }
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs
index 4af672271..a740e332b 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs
@@ -26,14 +26,14 @@ pub(crate) fn typed_hole(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Di
)
};
- Diagnostic::new(DiagnosticCode::RustcHardError("typed-hole"), message, display_range.range)
+ Diagnostic::new(DiagnosticCode::RustcHardError("typed-hole"), message, display_range)
.with_fixes(fixes)
}
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist>> {
let db = ctx.sema.db;
let root = db.parse_or_expand(d.expr.file_id);
- let original_range =
+ let (original_range, _) =
d.expr.as_ref().map(|it| it.to_node(&root)).syntax().original_file_range_opt(db)?;
let scope = ctx.sema.scope(d.expr.value.to_node(&root).syntax())?;
let mut assists = vec![];
@@ -142,8 +142,8 @@ fn t<T>() -> T { loop {} }
check_diagnostics(
r#"
fn main() {
- let x = [(); _];
- let y: [(); 10] = [(); _];
+ let _x = [(); _];
+ let _y: [(); 10] = [(); _];
_ = 0;
(_,) = (1,);
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
index e04f27c27..becc24ab2 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
@@ -4,7 +4,7 @@ use std::iter;
use hir::{db::DefDatabase, DefMap, InFile, ModuleSource};
use ide_db::{
- base_db::{FileId, FileLoader, SourceDatabase, SourceDatabaseExt},
+ base_db::{FileId, FileLoader, FileRange, SourceDatabase, SourceDatabaseExt},
source_change::SourceChange,
RootDatabase,
};
@@ -46,8 +46,12 @@ pub(crate) fn unlinked_file(
.unwrap_or(range);
acc.push(
- Diagnostic::new(DiagnosticCode::Ra("unlinked-file", Severity::WeakWarning), message, range)
- .with_fixes(fixes),
+ Diagnostic::new(
+ DiagnosticCode::Ra("unlinked-file", Severity::WeakWarning),
+ message,
+ FileRange { file_id, range },
+ )
+ .with_fixes(fixes),
);
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs
index 0758706e4..321459412 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs
@@ -8,7 +8,7 @@ use ide_db::{
use syntax::{ast, AstNode, AstPtr};
use text_edit::TextEdit;
-use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{adjusted_display_range_new, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unresolved-field
//
@@ -22,15 +22,24 @@ pub(crate) fn unresolved_field(
} else {
""
};
- Diagnostic::new_with_syntax_node_ptr(
- ctx,
+ Diagnostic::new(
DiagnosticCode::RustcHardError("E0559"),
format!(
"no field `{}` on type `{}`{method_suffix}",
d.name.display(ctx.sema.db),
d.receiver.display(ctx.sema.db)
),
- d.expr.clone().map(|it| it.into()),
+ adjusted_display_range_new(ctx, d.expr, &|expr| {
+ Some(
+ match expr {
+ ast::Expr::MethodCallExpr(it) => it.name_ref(),
+ ast::Expr::FieldExpr(it) => it.name_ref(),
+ _ => None,
+ }?
+ .syntax()
+ .text_range(),
+ )
+ }),
)
.with_fixes(fixes(ctx, d))
.experimental()
@@ -79,7 +88,7 @@ mod tests {
r#"
fn main() {
().foo;
- // ^^^^^^ error: no field `foo` on type `()`
+ // ^^^ error: no field `foo` on type `()`
}
"#,
);
@@ -95,7 +104,7 @@ impl Foo {
}
fn foo() {
Foo.bar;
- // ^^^^^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists
+ // ^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists
}
"#,
);
@@ -112,7 +121,7 @@ trait Bar {
impl Bar for Foo {}
fn foo() {
Foo.bar;
- // ^^^^^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists
+ // ^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists
}
"#,
);
@@ -131,7 +140,7 @@ impl Bar for Foo {
}
fn foo() {
Foo.bar;
- // ^^^^^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists
+ // ^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists
}
"#,
);
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
index 33e7c2e37..c8ff54cba 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
@@ -70,4 +70,16 @@ self::m!(); self::m2!();
"#,
);
}
+
+ #[test]
+ fn regression_panic_with_inner_attribute_in_presence_of_unresolved_crate() {
+ check_diagnostics(
+ r#"
+ mod _test_inner {
+ #![empty_attr]
+ //^^^^^^^^^^^^^^ error: unresolved macro `empty_attr`
+ }
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs
index ae9f6744c..464b0a710 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs
@@ -8,7 +8,7 @@ use ide_db::{
use syntax::{ast, AstNode, TextRange};
use text_edit::TextEdit;
-use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{adjusted_display_range_new, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unresolved-method
//
@@ -22,15 +22,24 @@ pub(crate) fn unresolved_method(
} else {
""
};
- Diagnostic::new_with_syntax_node_ptr(
- ctx,
+ Diagnostic::new(
DiagnosticCode::RustcHardError("E0599"),
format!(
"no method `{}` on type `{}`{field_suffix}",
d.name.display(ctx.sema.db),
d.receiver.display(ctx.sema.db)
),
- d.expr.clone().map(|it| it.into()),
+ adjusted_display_range_new(ctx, d.expr, &|expr| {
+ Some(
+ match expr {
+ ast::Expr::MethodCallExpr(it) => it.name_ref(),
+ ast::Expr::FieldExpr(it) => it.name_ref(),
+ _ => None,
+ }?
+ .syntax()
+ .text_range(),
+ )
+ }),
)
.with_fixes(fixes(ctx, d))
.experimental()
@@ -92,7 +101,41 @@ mod tests {
r#"
fn main() {
().foo();
- // ^^^^^^^^ error: no method `foo` on type `()`
+ // ^^^ error: no method `foo` on type `()`
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn smoke_test_in_macro_def_site() {
+ check_diagnostics(
+ r#"
+macro_rules! m {
+ ($rcv:expr) => {
+ $rcv.foo()
+ }
+}
+fn main() {
+ m!(());
+ // ^^^^^^ error: no method `foo` on type `()`
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn smoke_test_in_macro_call_site() {
+ check_diagnostics(
+ r#"
+macro_rules! m {
+ ($ident:ident) => {
+ ().$ident()
+ }
+}
+fn main() {
+ m!(foo);
+ // ^^^ error: no method `foo` on type `()`
}
"#,
);
@@ -105,7 +148,7 @@ fn main() {
struct Foo { bar: i32 }
fn foo() {
Foo { bar: i32 }.bar();
- // ^^^^^^^^^^^^^^^^^^^^^^ error: no method `bar` on type `Foo`, but a field with a similar name exists
+ // ^^^ error: no method `bar` on type `Foo`, but a field with a similar name exists
}
"#,
);
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs
index be24e50c9..e90d385ba 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs
@@ -1,4 +1,4 @@
-use hir::db::ExpandDatabase;
+use hir::{db::ExpandDatabase, HirFileIdExt};
use ide_db::{assists::Assist, base_db::AnchoredPathBuf, source_change::FileSystemEdit};
use itertools::Itertools;
use syntax::AstNode;
@@ -87,7 +87,12 @@ mod baz {}
"E0583",
),
message: "unresolved module, can't find module file: foo.rs, or foo/mod.rs",
- range: 0..8,
+ range: FileRange {
+ file_id: FileId(
+ 0,
+ ),
+ range: 0..8,
+ },
severity: Error,
unused: false,
experimental: false,
@@ -150,11 +155,9 @@ mod baz {}
],
),
main_node: Some(
- InFile {
+ InFileWrapper {
file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
value: MODULE@0..8
MOD_KW@0..3 "mod"
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs
new file mode 100644
index 000000000..28ccf474b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs
@@ -0,0 +1,111 @@
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
+
+// Diagnostic: unused-variables
+//
+// This diagnostic is triggered when a local variable is not used.
+pub(crate) fn unused_variables(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::UnusedVariable,
+) -> Diagnostic {
+ let ast = d.local.primary_source(ctx.sema.db).syntax_ptr();
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcLint("unused_variables"),
+ "unused variable",
+ ast,
+ )
+ .experimental()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn unused_variables_simple() {
+ check_diagnostics(
+ r#"
+//- minicore: fn
+struct Foo { f1: i32, f2: i64 }
+
+fn f(kkk: i32) {}
+ //^^^ warn: unused variable
+fn main() {
+ let a = 2;
+ //^ warn: unused variable
+ let b = 5;
+ // note: `unused variable` implies `unused mut`, so we should not emit both at the same time.
+ let mut c = f(b);
+ //^^^^^ warn: unused variable
+ let (d, e) = (3, 5);
+ //^ warn: unused variable
+ let _ = e;
+ let f1 = 2;
+ let f2 = 5;
+ let f = Foo { f1, f2 };
+ match f {
+ Foo { f1, f2 } => {
+ //^^ warn: unused variable
+ _ = f2;
+ }
+ }
+ let g = false;
+ if g {}
+ let h: fn() -> i32 = || 2;
+ let i = h();
+ //^ warn: unused variable
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unused_self() {
+ check_diagnostics(
+ r#"
+struct S {
+}
+impl S {
+ fn owned_self(self, u: i32) {}
+ //^ warn: unused variable
+ fn ref_self(&self, u: i32) {}
+ //^ warn: unused variable
+ fn ref_mut_self(&mut self, u: i32) {}
+ //^ warn: unused variable
+ fn owned_mut_self(mut self) {}
+ //^^^^^^^^ 💡 warn: variable does not need to be mutable
+
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn allow_unused_variables_for_identifiers_starting_with_underline() {
+ check_diagnostics(
+ r#"
+fn main() {
+ let _x = 2;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn respect_lint_attributes_for_unused_variables() {
+ check_diagnostics(
+ r#"
+fn main() {
+ #[allow(unused_variables)]
+ let x = 2;
+}
+
+#[deny(unused)]
+fn main2() {
+ let x = 2;
+ //^ error: unused variable
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs
index c4ac59ec2..8dce2af23 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs
@@ -1,5 +1,8 @@
use hir::InFile;
-use ide_db::{base_db::FileId, source_change::SourceChange};
+use ide_db::{
+ base_db::{FileId, FileRange},
+ source_change::SourceChange,
+};
use itertools::Itertools;
use syntax::{ast, AstNode, SyntaxNode};
use text_edit::TextEdit;
@@ -38,7 +41,7 @@ pub(crate) fn useless_braces(
Diagnostic::new(
DiagnosticCode::RustcLint("unused_braces"),
"Unnecessary braces in use statement".to_string(),
- use_range,
+ FileRange { file_id, range: use_range },
)
.with_main_node(InFile::new(file_id.into(), node.clone()))
.with_fixes(Some(vec![fix(
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
index ebe197a67..579386c72 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
@@ -23,7 +23,7 @@
//! There are also a couple of ad-hoc diagnostics implemented directly here, we
//! don't yet have a great pattern for how to do them properly.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod handlers {
pub(crate) mod break_outside_of_loop;
@@ -44,6 +44,10 @@ mod handlers {
pub(crate) mod private_assoc_item;
pub(crate) mod private_field;
pub(crate) mod replace_filter_map_next_with_find_map;
+ pub(crate) mod trait_impl_orphan;
+ pub(crate) mod trait_impl_incorrect_safety;
+ pub(crate) mod trait_impl_missing_assoc_item;
+ pub(crate) mod trait_impl_redundant_assoc_item;
pub(crate) mod typed_hole;
pub(crate) mod type_mismatch;
pub(crate) mod unimplemented_builtin_macro;
@@ -56,6 +60,7 @@ mod handlers {
pub(crate) mod unresolved_proc_macro;
pub(crate) mod undeclared_label;
pub(crate) mod unreachable_label;
+ pub(crate) mod unused_variables;
// The handlers below are unusual, the implement the diagnostics as well.
pub(crate) mod field_shorthand;
@@ -85,11 +90,11 @@ use stdx::never;
use syntax::{
algo::find_node_at_range,
ast::{self, AstNode},
- SyntaxNode, SyntaxNodePtr, TextRange,
+ AstPtr, SyntaxNode, SyntaxNodePtr, TextRange,
};
// FIXME: Make this an enum
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum DiagnosticCode {
RustcHardError(&'static str),
RustcLint(&'static str),
@@ -129,7 +134,7 @@ impl DiagnosticCode {
pub struct Diagnostic {
pub code: DiagnosticCode,
pub message: String,
- pub range: TextRange,
+ pub range: FileRange,
pub severity: Severity,
pub unused: bool,
pub experimental: bool,
@@ -139,7 +144,7 @@ pub struct Diagnostic {
}
impl Diagnostic {
- fn new(code: DiagnosticCode, message: impl Into<String>, range: TextRange) -> Diagnostic {
+ fn new(code: DiagnosticCode, message: impl Into<String>, range: FileRange) -> Diagnostic {
let message = message.into();
Diagnostic {
code,
@@ -168,7 +173,7 @@ impl Diagnostic {
node: InFile<SyntaxNodePtr>,
) -> Diagnostic {
let file_id = node.file_id;
- Diagnostic::new(code, message, ctx.sema.diagnostics_display_range(node.clone()).range)
+ Diagnostic::new(code, message, ctx.sema.diagnostics_display_range(node.clone()))
.with_main_node(node.map(|x| x.to_node(&ctx.sema.parse_or_expand(file_id))))
}
@@ -193,7 +198,7 @@ impl Diagnostic {
}
}
-#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum Severity {
Error,
Warning,
@@ -224,6 +229,7 @@ pub struct DiagnosticsConfig {
// FIXME: We may want to include a whole `AssistConfig` here
pub insert_use: InsertUseConfig,
pub prefer_no_std: bool,
+ pub prefer_prelude: bool,
}
impl DiagnosticsConfig {
@@ -246,6 +252,7 @@ impl DiagnosticsConfig {
skip_glob_imports: false,
},
prefer_no_std: false,
+ prefer_prelude: true,
}
}
}
@@ -261,7 +268,7 @@ impl DiagnosticsContext<'_> {
&self,
node: &InFile<SyntaxNodePtr>,
precise_location: Option<TextRange>,
- ) -> TextRange {
+ ) -> FileRange {
let sema = &self.sema;
(|| {
let precise_location = precise_location?;
@@ -274,10 +281,11 @@ impl DiagnosticsContext<'_> {
}
})()
.unwrap_or_else(|| sema.diagnostics_display_range(node.clone()))
- .range
}
}
+/// Request diagnostics for the given [`FileId`]. The produced diagnostics may point to other files
+/// due to macros.
pub fn diagnostics(
db: &RootDatabase,
config: &DiagnosticsConfig,
@@ -294,7 +302,7 @@ pub fn diagnostics(
Diagnostic::new(
DiagnosticCode::RustcHardError("syntax-error"),
format!("Syntax Error: {err}"),
- err.range(),
+ FileRange { file_id, range: err.range() },
)
}));
@@ -355,6 +363,10 @@ pub fn diagnostics(
AnyDiagnostic::PrivateAssocItem(d) => handlers::private_assoc_item::private_assoc_item(&ctx, &d),
AnyDiagnostic::PrivateField(d) => handlers::private_field::private_field(&ctx, &d),
AnyDiagnostic::ReplaceFilterMapNextWithFindMap(d) => handlers::replace_filter_map_next_with_find_map::replace_filter_map_next_with_find_map(&ctx, &d),
+ AnyDiagnostic::TraitImplIncorrectSafety(d) => handlers::trait_impl_incorrect_safety::trait_impl_incorrect_safety(&ctx, &d),
+ AnyDiagnostic::TraitImplMissingAssocItems(d) => handlers::trait_impl_missing_assoc_item::trait_impl_missing_assoc_item(&ctx, &d),
+ AnyDiagnostic::TraitImplRedundantAssocItems(d) => handlers::trait_impl_redundant_assoc_item::trait_impl_redundant_assoc_item(&ctx, &d),
+ AnyDiagnostic::TraitImplOrphan(d) => handlers::trait_impl_orphan::trait_impl_orphan(&ctx, &d),
AnyDiagnostic::TypedHole(d) => handlers::typed_hole::typed_hole(&ctx, &d),
AnyDiagnostic::TypeMismatch(d) => handlers::type_mismatch::type_mismatch(&ctx, &d),
AnyDiagnostic::UndeclaredLabel(d) => handlers::undeclared_label::undeclared_label(&ctx, &d),
@@ -368,6 +380,7 @@ pub fn diagnostics(
AnyDiagnostic::UnresolvedModule(d) => handlers::unresolved_module::unresolved_module(&ctx, &d),
AnyDiagnostic::UnresolvedProcMacro(d) => handlers::unresolved_proc_macro::unresolved_proc_macro(&ctx, &d, config.proc_macros_enabled, config.proc_attr_macros_enabled),
AnyDiagnostic::UnusedMut(d) => handlers::mutability_errors::unused_mut(&ctx, &d),
+ AnyDiagnostic::UnusedVariable(d) => handlers::unused_variables::unused_variables(&ctx, &d),
AnyDiagnostic::BreakOutsideOfLoop(d) => handlers::break_outside_of_loop::break_outside_of_loop(&ctx, &d),
AnyDiagnostic::MismatchedTupleStructPatArgCount(d) => handlers::mismatched_arg_count::mismatched_tuple_struct_pat_arg_count(&ctx, &d),
};
@@ -559,12 +572,28 @@ fn adjusted_display_range<N: AstNode>(
ctx: &DiagnosticsContext<'_>,
diag_ptr: InFile<SyntaxNodePtr>,
adj: &dyn Fn(N) -> Option<TextRange>,
-) -> TextRange {
+) -> FileRange {
let FileRange { file_id, range } = ctx.sema.diagnostics_display_range(diag_ptr);
let source_file = ctx.sema.db.parse(file_id);
- find_node_at_range::<N>(&source_file.syntax_node(), range)
- .filter(|it| it.syntax().text_range() == range)
- .and_then(adj)
- .unwrap_or(range)
+ FileRange {
+ file_id,
+ range: find_node_at_range::<N>(&source_file.syntax_node(), range)
+ .filter(|it| it.syntax().text_range() == range)
+ .and_then(adj)
+ .unwrap_or(range),
+ }
+}
+
+// FIXME Replace the one above with this one?
+fn adjusted_display_range_new<N: AstNode>(
+ ctx: &DiagnosticsContext<'_>,
+ diag_ptr: InFile<AstPtr<N>>,
+ adj: &dyn Fn(N) -> Option<TextRange>,
+) -> FileRange {
+ let source_file = ctx.sema.parse_or_expand(diag_ptr.file_id);
+ let node = diag_ptr.value.to_node(&source_file);
+ diag_ptr
+ .with_value(adj(node).unwrap_or_else(|| diag_ptr.value.text_range()))
+ .original_node_file_range_rooted(ctx.sema.db)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
index ee0e03549..48e0363c9 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
@@ -5,8 +5,9 @@ use expect_test::Expect;
use ide_db::{
assists::AssistResolveStrategy,
base_db::{fixture::WithFixture, SourceDatabaseExt},
- RootDatabase,
+ LineIndexDatabase, RootDatabase,
};
+use itertools::Itertools;
use stdx::trim_indent;
use test_utils::{assert_eq_text, extract_annotations, MiniCore};
@@ -43,7 +44,8 @@ fn check_nth_fix(nth: usize, ra_fixture_before: &str, ra_fixture_after: &str) {
super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id)
.pop()
.expect("no diagnostics");
- let fix = &diagnostic.fixes.expect("diagnostic misses fixes")[nth];
+ let fix =
+ &diagnostic.fixes.expect(&format!("{:?} diagnostic misses fixes", diagnostic.code))[nth];
let actual = {
let source_change = fix.source_change.as_ref().unwrap();
let file_id = *source_change.source_file_edits.keys().next().unwrap();
@@ -102,32 +104,39 @@ pub(crate) fn check_diagnostics(ra_fixture: &str) {
#[track_caller]
pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixture: &str) {
let (db, files) = RootDatabase::with_many_files(ra_fixture);
+ let mut annotations = files
+ .iter()
+ .copied()
+ .flat_map(|file_id| {
+ super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id).into_iter().map(
+ |d| {
+ let mut annotation = String::new();
+ if let Some(fixes) = &d.fixes {
+ assert!(!fixes.is_empty());
+ annotation.push_str("💡 ")
+ }
+ annotation.push_str(match d.severity {
+ Severity::Error => "error",
+ Severity::WeakWarning => "weak",
+ Severity::Warning => "warn",
+ Severity::Allow => "allow",
+ });
+ annotation.push_str(": ");
+ annotation.push_str(&d.message);
+ (d.range, annotation)
+ },
+ )
+ })
+ .map(|(diagnostic, annotation)| (diagnostic.file_id, (diagnostic.range, annotation)))
+ .into_group_map();
for file_id in files {
- let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
+ let line_index = db.line_index(file_id);
+ let mut actual = annotations.remove(&file_id).unwrap_or_default();
let expected = extract_annotations(&db.file_text(file_id));
- let mut actual = diagnostics
- .into_iter()
- .map(|d| {
- let mut annotation = String::new();
- if let Some(fixes) = &d.fixes {
- assert!(!fixes.is_empty());
- annotation.push_str("💡 ")
- }
- annotation.push_str(match d.severity {
- Severity::Error => "error",
- Severity::WeakWarning => "weak",
- Severity::Warning => "warn",
- Severity::Allow => "allow",
- });
- annotation.push_str(": ");
- annotation.push_str(&d.message);
- (d.range, annotation)
- })
- .collect::<Vec<_>>();
actual.sort_by_key(|(range, _)| range.start());
if expected.is_empty() {
- // makes minicore smoke test debugable
+ // makes minicore smoke test debuggable
for (e, _) in &actual {
eprintln!(
"Code in range {e:?} = {}",
@@ -136,8 +145,16 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur
}
}
if expected != actual {
- let fneg = expected.iter().filter(|x| !actual.contains(x)).collect::<Vec<_>>();
- let fpos = actual.iter().filter(|x| !expected.contains(x)).collect::<Vec<_>>();
+ let fneg = expected
+ .iter()
+ .filter(|x| !actual.contains(x))
+ .map(|(range, s)| (line_index.line_col(range.start()), range, s))
+ .collect::<Vec<_>>();
+ let fpos = actual
+ .iter()
+ .filter(|x| !expected.contains(x))
+ .map(|(range, s)| (line_index.line_col(range.start()), range, s))
+ .collect::<Vec<_>>();
panic!("Diagnostic test failed.\nFalse negatives: {fneg:?}\nFalse positives: {fpos:?}");
}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml
index 70ed6dea5..56b29f92b 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml
@@ -14,7 +14,7 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
-itertools = "0.10.5"
+itertools.workspace = true
triomphe.workspace = true
nohash-hasher.workspace = true
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
index 66832a0be..d756e7a63 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
@@ -3,7 +3,7 @@
//! Allows searching the AST for code that matches one or more patterns and then replacing that code
//! based on a template.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
// Feature: Structural Search and Replace
//
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs
index 60fcbbbd3..0312a0f11 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs
@@ -651,7 +651,7 @@ impl Match {
for (path, resolved_path) in &template.resolved_paths {
if let hir::PathResolution::Def(module_def) = resolved_path.resolution {
let mod_path =
- module.find_use_path(sema.db, module_def, false).ok_or_else(|| {
+ module.find_use_path(sema.db, module_def, false, true).ok_or_else(|| {
match_error!("Failed to render template path `{}` at match location")
})?;
self.rendered_template_paths.insert(path.clone(), mod_path);
diff --git a/src/tools/rust-analyzer/crates/ide/Cargo.toml b/src/tools/rust-analyzer/crates/ide/Cargo.toml
index 2aee203c4..0943574ec 100644
--- a/src/tools/rust-analyzer/crates/ide/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide/Cargo.toml
@@ -14,9 +14,10 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
crossbeam-channel = "0.5.5"
-either = "1.7.0"
-itertools = "0.10.5"
-tracing = "0.1.35"
+arrayvec = "0.7.4"
+either.workspace = true
+itertools.workspace = true
+tracing.workspace = true
oorandom = "11.1.3"
pulldown-cmark-to-cmark = "10.0.4"
pulldown-cmark = { version = "0.9.1", default-features = false }
diff --git a/src/tools/rust-analyzer/crates/ide/src/annotations.rs b/src/tools/rust-analyzer/crates/ide/src/annotations.rs
index fb79b5dc2..d7f82b4af 100644
--- a/src/tools/rust-analyzer/crates/ide/src/annotations.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/annotations.rs
@@ -1,4 +1,4 @@
-use hir::{HasSource, InFile, Semantics};
+use hir::{HasSource, InFile, InRealFile, Semantics};
use ide_db::{
base_db::{FileId, FilePosition, FileRange},
defs::Definition,
@@ -149,8 +149,8 @@ pub(crate) fn annotations(
node: InFile<T>,
source_file_id: FileId,
) -> Option<(TextRange, Option<TextRange>)> {
- if let Some(InFile { file_id, value }) = node.original_ast_node(db) {
- if file_id == source_file_id.into() {
+ if let Some(InRealFile { file_id, value }) = node.original_ast_node(db) {
+ if file_id == source_file_id {
return Some((
value.syntax().text_range(),
value.name().map(|name| name.syntax().text_range()),
diff --git a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs
index f834f2ce5..458b852e2 100644
--- a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs
@@ -1,6 +1,8 @@
//! Entry point for call-hierarchy
-use hir::Semantics;
+use std::iter;
+
+use hir::{DescendPreference, Semantics};
use ide_db::{
defs::{Definition, NameClass, NameRefClass},
helpers::pick_best_token,
@@ -66,7 +68,10 @@ pub(crate) fn incoming_calls(
def.try_to_nav(sema.db)
});
if let Some(nav) = nav {
- calls.add(nav, sema.original_range(name.syntax()).range);
+ calls.add(nav.call_site, sema.original_range(name.syntax()).range);
+ if let Some(other) = nav.def_site {
+ calls.add(other, sema.original_range(name.syntax()).range);
+ }
}
}
}
@@ -87,7 +92,7 @@ pub(crate) fn outgoing_calls(
})?;
let mut calls = CallLocations::default();
- sema.descend_into_macros(token, offset)
+ sema.descend_into_macros(DescendPreference::None, token)
.into_iter()
.filter_map(|it| it.parent_ancestors().nth(1).and_then(ast::Item::cast))
.filter_map(|item| match item {
@@ -117,8 +122,9 @@ pub(crate) fn outgoing_calls(
function.try_to_nav(db).zip(Some(range))
}
}?;
- Some((nav_target, range))
+ Some(nav_target.into_iter().zip(iter::repeat(range)))
})
+ .flatten()
.for_each(|(nav, range)| calls.add(nav, range));
Some(calls.into_items())
@@ -149,7 +155,7 @@ mod tests {
fn check_hierarchy(
ra_fixture: &str,
- expected: Expect,
+ expected_nav: Expect,
expected_incoming: Expect,
expected_outgoing: Expect,
) {
@@ -158,7 +164,7 @@ mod tests {
let mut navs = analysis.call_hierarchy(pos).unwrap().unwrap().info;
assert_eq!(navs.len(), 1);
let nav = navs.pop().unwrap();
- expected.assert_eq(&nav.debug_render());
+ expected_nav.assert_eq(&nav.debug_render());
let item_pos =
FilePosition { file_id: nav.file_id, offset: nav.focus_or_full_range().start() };
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
index 37a177622..9760f9daf 100644
--- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
@@ -12,7 +12,9 @@ use pulldown_cmark_to_cmark::{cmark_resume_with_options, Options as CMarkOptions
use stdx::format_to;
use url::Url;
-use hir::{db::HirDatabase, Adt, AsAssocItem, AssocItem, AssocItemContainer, HasAttrs};
+use hir::{
+ db::HirDatabase, Adt, AsAssocItem, AssocItem, AssocItemContainer, DescendPreference, HasAttrs,
+};
use ide_db::{
base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, SourceDatabase},
defs::{Definition, NameClass, NameRefClass},
@@ -144,7 +146,7 @@ pub(crate) fn external_docs(
kind if kind.is_trivia() => 0,
_ => 1,
})?;
- let token = sema.descend_into_macros_single(token, offset);
+ let token = sema.descend_into_macros_single(DescendPreference::None, token);
let node = token.parent()?;
let definition = match_ast! {
@@ -286,7 +288,7 @@ impl DocCommentToken {
let original_start = doc_token.text_range().start();
let relative_comment_offset = offset - original_start - prefix_len;
- sema.descend_into_macros(doc_token, offset).into_iter().find_map(|t| {
+ sema.descend_into_macros(DescendPreference::None, doc_token).into_iter().find_map(|t| {
let (node, descended_prefix_len) = match_ast! {
match t {
ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?),
@@ -602,7 +604,17 @@ fn filename_and_frag_for_def(
}
Definition::Const(c) => format!("const.{}.html", c.name(db)?.display(db.upcast())),
Definition::Static(s) => format!("static.{}.html", s.name(db).display(db.upcast())),
- Definition::Macro(mac) => format!("macro.{}.html", mac.name(db).display(db.upcast())),
+ Definition::Macro(mac) => match mac.kind(db) {
+ hir::MacroKind::Declarative
+ | hir::MacroKind::BuiltIn
+ | hir::MacroKind::Attr
+ | hir::MacroKind::ProcMacro => {
+ format!("macro.{}.html", mac.name(db).display(db.upcast()))
+ }
+ hir::MacroKind::Derive => {
+ format!("derive.{}.html", mac.name(db).display(db.upcast()))
+ }
+ },
Definition::Field(field) => {
let def = match field.parent_def(db) {
hir::VariantDef::Struct(it) => Definition::Adt(it.into()),
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs
index 9ae70ae66..f388aea4c 100644
--- a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs
@@ -1,4 +1,4 @@
-use std::ffi::OsStr;
+use std::{ffi::OsStr, iter};
use expect_test::{expect, Expect};
use hir::Semantics;
@@ -63,10 +63,12 @@ fn check_doc_links(ra_fixture: &str) {
let defs = extract_definitions_from_docs(&docs);
let actual: Vec<_> = defs
.into_iter()
- .map(|(_, link, ns)| {
+ .flat_map(|(_, link, ns)| {
let def = resolve_doc_path_for_def(sema.db, cursor_def, &link, ns)
.unwrap_or_else(|| panic!("Failed to resolve {link}"));
- let nav_target = def.try_to_nav(sema.db).unwrap();
+ def.try_to_nav(sema.db).unwrap().into_iter().zip(iter::repeat(link))
+ })
+ .map(|(nav_target, link)| {
let range =
FileRange { file_id: nav_target.file_id, range: nav_target.focus_or_full_range() };
(range, link)
diff --git a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
index 119a9c7c3..024053eff 100644
--- a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
@@ -1,4 +1,4 @@
-use hir::Semantics;
+use hir::{DescendPreference, InFile, MacroFileIdExt, Semantics};
use ide_db::{
base_db::FileId, helpers::pick_best_token,
syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase,
@@ -14,12 +14,12 @@ pub struct ExpandedMacro {
// Feature: Expand Macro Recursively
//
-// Shows the full macro expansion of the macro at current cursor.
+// Shows the full macro expansion of the macro at the current caret position.
//
// |===
// | Editor | Action Name
//
-// | VS Code | **rust-analyzer: Expand macro recursively**
+// | VS Code | **rust-analyzer: Expand macro recursively at caret**
// |===
//
// image::https://user-images.githubusercontent.com/48062697/113020648-b3973180-917a-11eb-84a9-ecb921293dc5.gif[]
@@ -40,16 +40,20 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
// struct Bar;
// ```
- let derive =
- sema.descend_into_macros(tok.clone(), 0.into()).into_iter().find_map(|descended| {
- let hir_file = sema.hir_file_for(&descended.parent()?);
- if !hir_file.is_derive_attr_pseudo_expansion(db) {
+ let derive = sema
+ .descend_into_macros(DescendPreference::None, tok.clone())
+ .into_iter()
+ .find_map(|descended| {
+ let macro_file = sema.hir_file_for(&descended.parent()?).macro_file()?;
+ if !macro_file.is_derive_attr_pseudo_expansion(db) {
return None;
}
let name = descended.parent_ancestors().filter_map(ast::Path::cast).last()?.to_string();
// up map out of the #[derive] expansion
- let token = hir::InFile::new(hir_file, descended).upmap(db)?.value;
+ let InFile { file_id, value: tokens } =
+ hir::InMacroFile::new(macro_file, descended).upmap_once(db);
+ let token = sema.parse_or_expand(file_id).covering_element(tokens[0]).into_token()?;
let attr = token.parent_ancestors().find_map(ast::Attr::cast)?;
let expansions = sema.expand_derive_macro(&attr)?;
let idx = attr
diff --git a/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs b/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs
index 3d89599c5..b706e959d 100644
--- a/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs
@@ -1,6 +1,6 @@
use std::iter::successors;
-use hir::Semantics;
+use hir::{DescendPreference, Semantics};
use ide_db::RootDatabase;
use syntax::{
algo::{self, skip_trivia_token},
@@ -108,7 +108,7 @@ fn try_extend_selection(
let node = shallowest_node(&node);
- if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
+ if node.parent().is_some_and(|n| list_kinds.contains(&n.kind())) {
if let Some(range) = extend_list_item(&node) {
return Some(range);
}
@@ -141,9 +141,9 @@ fn extend_tokens_from_range(
// compute original mapped token range
let extended = {
let fst_expanded =
- sema.descend_into_macros_single(first_token.clone(), original_range.start());
+ sema.descend_into_macros_single(DescendPreference::None, first_token.clone());
let lst_expanded =
- sema.descend_into_macros_single(last_token.clone(), original_range.end());
+ sema.descend_into_macros_single(DescendPreference::None, last_token.clone());
let mut lca =
algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?;
lca = shallowest_node(&lca);
@@ -154,10 +154,10 @@ fn extend_tokens_from_range(
};
// Compute parent node range
- let validate = |offset: TextSize| {
+ let validate = || {
let extended = &extended;
move |token: &SyntaxToken| -> bool {
- let expanded = sema.descend_into_macros_single(token.clone(), offset);
+ let expanded = sema.descend_into_macros_single(DescendPreference::None, token.clone());
let parent = match expanded.parent() {
Some(it) => it,
None => return false,
@@ -171,14 +171,14 @@ fn extend_tokens_from_range(
let token = token.prev_token()?;
skip_trivia_token(token, Direction::Prev)
})
- .take_while(validate(original_range.start()))
+ .take_while(validate())
.last()?;
let last = successors(Some(last_token), |token| {
let token = token.next_token()?;
skip_trivia_token(token, Direction::Next)
})
- .take_while(validate(original_range.end()))
+ .take_while(validate())
.last()?;
let range = first.text_range().cover(last.text_range());
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
index 7e0fab426..fae100743 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
@@ -1,4 +1,4 @@
-use hir::{AsAssocItem, Semantics};
+use hir::{AsAssocItem, DescendPreference, Semantics};
use ide_db::{
defs::{Definition, NameClass, NameRefClass},
RootDatabase,
@@ -29,7 +29,7 @@ pub(crate) fn goto_declaration(
.find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?;
let range = original_token.text_range();
let info: Vec<NavigationTarget> = sema
- .descend_into_macros(original_token, offset)
+ .descend_into_macros(DescendPreference::None, original_token)
.iter()
.filter_map(|token| {
let parent = token.parent()?;
@@ -66,6 +66,7 @@ pub(crate) fn goto_declaration(
let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?;
item.try_to_nav(db)
})
+ .flatten()
.collect();
if info.is_empty() {
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
index e09b9f391..7491879a6 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
@@ -4,7 +4,7 @@ use crate::{
doc_links::token_as_doc_comment, navigation_target::ToNav, FilePosition, NavigationTarget,
RangeInfo, TryToNav,
};
-use hir::{AsAssocItem, AssocItem, Semantics};
+use hir::{AsAssocItem, AssocItem, DescendPreference, Semantics};
use ide_db::{
base_db::{AnchoredPath, FileId, FileLoader},
defs::{Definition, IdentClass},
@@ -52,21 +52,34 @@ pub(crate) fn goto_definition(
if let Some(doc_comment) = token_as_doc_comment(&original_token) {
return doc_comment.get_definition_with_descend_at(sema, offset, |def, _, link_range| {
let nav = def.try_to_nav(db)?;
- Some(RangeInfo::new(link_range, vec![nav]))
+ Some(RangeInfo::new(link_range, nav.collect()))
});
}
+
+ if let Some((range, resolution)) =
+ sema.check_for_format_args_template(original_token.clone(), offset)
+ {
+ return Some(RangeInfo::new(
+ range,
+ match resolution {
+ Some(res) => def_to_nav(db, Definition::from(res)),
+ None => vec![],
+ },
+ ));
+ }
+
let navs = sema
- .descend_into_macros(original_token.clone(), offset)
+ .descend_into_macros(DescendPreference::None, original_token.clone())
.into_iter()
.filter_map(|token| {
let parent = token.parent()?;
- if let Some(tt) = ast::TokenTree::cast(parent) {
+ if let Some(tt) = ast::TokenTree::cast(parent.clone()) {
if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), file_id) {
return Some(vec![x]);
}
}
Some(
- IdentClass::classify_token(sema, &token)?
+ IdentClass::classify_node(sema, &parent)?
.definitions()
.into_iter()
.flat_map(|def| {
@@ -75,6 +88,7 @@ pub(crate) fn goto_definition(
.resolved_crate(db)
.map(|it| it.root_module().to_nav(sema.db))
.into_iter()
+ .flatten()
.collect();
}
try_filter_trait_item_definition(sema, &def)
@@ -125,6 +139,7 @@ fn try_lookup_include_path(
docs: None,
})
}
+
/// finds the trait definition of an impl'd item, except function
/// e.g.
/// ```rust
@@ -153,13 +168,13 @@ fn try_filter_trait_item_definition(
.iter()
.filter(|itm| discriminant(*itm) == discri_value)
.find_map(|itm| (itm.name(db)? == name).then(|| itm.try_to_nav(db)).flatten())
- .map(|it| vec![it])
+ .map(|it| it.collect())
}
}
}
fn def_to_nav(db: &RootDatabase, def: Definition) -> Vec<NavigationTarget> {
- def.try_to_nav(db).map(|it| vec![it]).unwrap_or_default()
+ def.try_to_nav(db).map(|it| it.collect()).unwrap_or_default()
}
#[cfg(test)]
@@ -399,11 +414,11 @@ fn bar() {
//- /lib.rs
macro_rules! define_fn {
() => (fn foo() {})
+ //^^^
}
define_fn!();
//^^^^^^^^^^^^^
-
fn bar() {
$0foo();
}
@@ -807,18 +822,13 @@ mod confuse_index { fn foo(); }
fn goto_through_format() {
check(
r#"
+//- minicore: fmt
#[macro_export]
macro_rules! format {
($($arg:tt)*) => ($crate::fmt::format($crate::__export::format_args!($($arg)*)))
}
-#[rustc_builtin_macro]
-#[macro_export]
-macro_rules! format_args {
- ($fmt:expr) => ({ /* compiler built-in */ });
- ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
-}
pub mod __export {
- pub use crate::format_args;
+ pub use core::format_args;
fn foo() {} // for index confusion
}
fn foo() -> i8 {}
@@ -1738,9 +1748,9 @@ macro_rules! foo {
fn $ident(Foo { $ident }: Foo) {}
}
}
-foo!(foo$0);
- //^^^
- //^^^
+ foo!(foo$0);
+ //^^^
+ //^^^
"#,
);
check(
@@ -2057,4 +2067,18 @@ fn f2() {
"#,
);
}
+
+ #[test]
+ fn implicit_format_args() {
+ check(
+ r#"
+//- minicore: fmt
+fn test() {
+ let a = "world";
+ // ^
+ format_args!("hello {a$0}");
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
index 544c6b423..6384db39d 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
@@ -1,4 +1,4 @@
-use hir::{AsAssocItem, Impl, Semantics};
+use hir::{AsAssocItem, DescendPreference, Impl, Semantics};
use ide_db::{
defs::{Definition, NameClass, NameRefClass},
helpers::pick_best_token,
@@ -34,7 +34,7 @@ pub(crate) fn goto_implementation(
})?;
let range = original_token.text_range();
let navs =
- sema.descend_into_macros(original_token, offset)
+ sema.descend_into_macros(DescendPreference::None, original_token)
.into_iter()
.filter_map(|token| token.parent().and_then(ast::NameLike::cast))
.filter_map(|node| match &node {
@@ -82,7 +82,11 @@ pub(crate) fn goto_implementation(
}
fn impls_for_ty(sema: &Semantics<'_, RootDatabase>, ty: hir::Type) -> Vec<NavigationTarget> {
- Impl::all_for_type(sema.db, ty).into_iter().filter_map(|imp| imp.try_to_nav(sema.db)).collect()
+ Impl::all_for_type(sema.db, ty)
+ .into_iter()
+ .filter_map(|imp| imp.try_to_nav(sema.db))
+ .flatten()
+ .collect()
}
fn impls_for_trait(
@@ -92,6 +96,7 @@ fn impls_for_trait(
Impl::all_for_trait(sema.db, trait_)
.into_iter()
.filter_map(|imp| imp.try_to_nav(sema.db))
+ .flatten()
.collect()
}
@@ -109,6 +114,7 @@ fn impls_for_trait_item(
})?;
item.try_to_nav(sema.db)
})
+ .flatten()
.collect()
}
@@ -249,7 +255,7 @@ impl T for &Foo {}
r#"
//- minicore: copy, derive
#[derive(Copy)]
-//^^^^^^^^^^^^^^^
+ //^^^^
struct Foo$0;
"#,
);
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs
index 955923d76..ad393d980 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs
@@ -1,3 +1,4 @@
+use hir::{DescendPreference, GenericParam};
use ide_db::{base_db::Upcast, defs::Definition, helpers::pick_best_token, RootDatabase};
use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, T};
@@ -30,14 +31,45 @@ pub(crate) fn goto_type_definition(
let mut res = Vec::new();
let mut push = |def: Definition| {
- if let Some(nav) = def.try_to_nav(db) {
- if !res.contains(&nav) {
- res.push(nav);
+ if let Some(navs) = def.try_to_nav(db) {
+ for nav in navs {
+ if !res.contains(&nav) {
+ res.push(nav);
+ }
}
}
};
+ let mut process_ty = |ty: hir::Type| {
+ // collect from each `ty` into the `res` result vec
+ let ty = ty.strip_references();
+ ty.walk(db, |t| {
+ if let Some(adt) = t.as_adt() {
+ push(adt.into());
+ } else if let Some(trait_) = t.as_dyn_trait() {
+ push(trait_.into());
+ } else if let Some(traits) = t.as_impl_traits(db) {
+ traits.for_each(|it| push(it.into()));
+ } else if let Some(trait_) = t.as_associated_type_parent_trait(db) {
+ push(trait_.into());
+ }
+ });
+ };
+ if let Some((range, resolution)) = sema.check_for_format_args_template(token.clone(), offset) {
+ if let Some(ty) = resolution.and_then(|res| match Definition::from(res) {
+ Definition::Const(it) => Some(it.ty(db)),
+ Definition::Static(it) => Some(it.ty(db)),
+ Definition::GenericParam(GenericParam::ConstParam(it)) => Some(it.ty(db)),
+ Definition::Local(it) => Some(it.ty(db)),
+ Definition::Adt(hir::Adt::Struct(it)) => Some(it.ty(db)),
+ _ => None,
+ }) {
+ process_ty(ty);
+ }
+ return Some(RangeInfo::new(range, res));
+ }
+
let range = token.text_range();
- sema.descend_into_macros(token, offset)
+ sema.descend_into_macros(DescendPreference::None, token)
.into_iter()
.filter_map(|token| {
let ty = sema
@@ -75,21 +107,7 @@ pub(crate) fn goto_type_definition(
});
ty
})
- .for_each(|ty| {
- // collect from each `ty` into the `res` result vec
- let ty = ty.strip_references();
- ty.walk(db, |t| {
- if let Some(adt) = t.as_adt() {
- push(adt.into());
- } else if let Some(trait_) = t.as_dyn_trait() {
- push(trait_.into());
- } else if let Some(traits) = t.as_impl_traits(db) {
- traits.for_each(|it| push(it.into()));
- } else if let Some(trait_) = t.as_associated_type_parent_trait(db) {
- push(trait_.into());
- }
- });
- });
+ .for_each(process_ty);
Some(RangeInfo::new(range, res))
}
@@ -328,4 +346,40 @@ fn foo(x$0: Bar<Baz<Foo>, Baz<usize>) {}
"#,
);
}
+
+ #[test]
+ fn implicit_format_args() {
+ check(
+ r#"
+//- minicore: fmt
+struct Bar;
+ // ^^^
+ fn test() {
+ let a = Bar;
+ format_args!("hello {a$0}");
+}
+"#,
+ );
+ check(
+ r#"
+//- minicore: fmt
+struct Bar;
+ // ^^^
+ fn test() {
+ format_args!("hello {Bar$0}");
+}
+"#,
+ );
+ check(
+ r#"
+//- minicore: fmt
+struct Bar;
+ // ^^^
+const BAR: Bar = Bar;
+fn test() {
+ format_args!("hello {BAR$0}");
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
index 46a0464e9..3aed007f3 100644
--- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
@@ -1,4 +1,6 @@
-use hir::Semantics;
+use std::iter;
+
+use hir::{DescendPreference, Semantics};
use ide_db::{
base_db::{FileId, FilePosition, FileRange},
defs::{Definition, IdentClass},
@@ -15,7 +17,6 @@ use syntax::{
SyntaxKind::{self, IDENT, INT_NUMBER},
SyntaxNode, SyntaxToken, TextRange, T,
};
-use text_edit::TextSize;
use crate::{navigation_target::ToNav, references, NavigationTarget, TryToNav};
@@ -43,7 +44,7 @@ pub struct HighlightRelatedConfig {
//
// . if on an identifier, highlights all references to that identifier in the current file
// .. additionally, if the identifier is a trait in a where clause, type parameter trait bound or use item, highlights all references to that trait's assoc items in the corresponding scope
-// . if on an `async` or `await token, highlights all yield points for that async context
+// . if on an `async` or `await` token, highlights all yield points for that async context
// . if on a `return` or `fn` keyword, `?` character or `->` return type arrow, highlights all exit points for that context
// . if on a `break`, `loop`, `while` or `for` token, highlights all break points for that loop or block context
// . if on a `move` or `|` token that belongs to a closure, highlights all captures of the closure.
@@ -116,7 +117,7 @@ fn highlight_closure_captures(
local
.sources(sema.db)
.into_iter()
- .map(|x| x.to_nav(sema.db))
+ .flat_map(|x| x.to_nav(sema.db))
.filter(|decl| decl.file_id == file_id)
.filter_map(|decl| decl.focus_range)
.map(move |range| HighlightedRange { range, category })
@@ -132,7 +133,16 @@ fn highlight_references(
token: SyntaxToken,
FilePosition { file_id, offset }: FilePosition,
) -> Option<Vec<HighlightedRange>> {
- let defs = find_defs(sema, token.clone(), offset);
+ let defs = if let Some((range, resolution)) =
+ sema.check_for_format_args_template(token.clone(), offset)
+ {
+ match resolution.map(Definition::from) {
+ Some(def) => iter::once(def).collect(),
+ None => return Some(vec![HighlightedRange { range, category: None }]),
+ }
+ } else {
+ find_defs(sema, token.clone())
+ };
let usages = defs
.iter()
.filter_map(|&d| {
@@ -206,7 +216,7 @@ fn highlight_references(
local
.sources(sema.db)
.into_iter()
- .map(|x| x.to_nav(sema.db))
+ .flat_map(|x| x.to_nav(sema.db))
.filter(|decl| decl.file_id == file_id)
.filter_map(|decl| decl.focus_range)
.map(|range| HighlightedRange { range, category })
@@ -215,21 +225,27 @@ fn highlight_references(
});
}
def => {
- let hl_range = match def {
+ let navs = match def {
Definition::Module(module) => {
- Some(NavigationTarget::from_module_to_decl(sema.db, module))
+ NavigationTarget::from_module_to_decl(sema.db, module)
+ }
+ def => match def.try_to_nav(sema.db) {
+ Some(it) => it,
+ None => continue,
+ },
+ };
+ for nav in navs {
+ if nav.file_id != file_id {
+ continue;
+ }
+ let hl_range = nav.focus_range.map(|range| {
+ let category = references::decl_mutability(&def, node, range)
+ .then_some(ReferenceCategory::Write);
+ HighlightedRange { range, category }
+ });
+ if let Some(hl_range) = hl_range {
+ res.insert(hl_range);
}
- def => def.try_to_nav(sema.db),
- }
- .filter(|decl| decl.file_id == file_id)
- .and_then(|decl| decl.focus_range)
- .map(|range| {
- let category = references::decl_mutability(&def, node, range)
- .then_some(ReferenceCategory::Write);
- HighlightedRange { range, category }
- });
- if let Some(hl_range) = hl_range {
- res.insert(hl_range);
}
}
}
@@ -456,12 +472,8 @@ fn cover_range(r0: Option<TextRange>, r1: Option<TextRange>) -> Option<TextRange
}
}
-fn find_defs(
- sema: &Semantics<'_, RootDatabase>,
- token: SyntaxToken,
- offset: TextSize,
-) -> FxHashSet<Definition> {
- sema.descend_into_macros(token, offset)
+fn find_defs(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> FxHashSet<Definition> {
+ sema.descend_into_macros(DescendPreference::None, token)
.into_iter()
.filter_map(|token| IdentClass::classify_token(sema, &token))
.map(IdentClass::definitions_no_ops)
@@ -1623,4 +1635,21 @@ fn f2<T: Foo>(t: T) {
"#,
);
}
+
+ #[test]
+ fn implicit_format_args() {
+ check(
+ r#"
+//- minicore: fmt
+fn test() {
+ let a = "foo";
+ // ^
+ format_args!("hello {a} {a$0} {}", a);
+ // ^read
+ // ^read
+ // ^read
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs
index 21934b948..5ad119ace 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs
@@ -6,7 +6,7 @@ mod tests;
use std::iter;
use either::Either;
-use hir::{db::DefDatabase, HasSource, LangItem, Semantics};
+use hir::{db::DefDatabase, DescendPreference, HasSource, LangItem, Semantics};
use ide_db::{
base_db::FileRange,
defs::{Definition, IdentClass, NameRefClass, OperatorClass},
@@ -21,6 +21,7 @@ use crate::{
doc_links::token_as_doc_comment,
markdown_remove::remove_markdown,
markup::Markup,
+ navigation_target::UpmappingResult,
runnables::{runnable_fn, runnable_mod},
FileId, FilePosition, NavigationTarget, RangeInfo, Runnable, TryToNav,
};
@@ -73,7 +74,7 @@ impl HoverAction {
it.module(db)?,
it.name(db).map(|name| name.display(db).to_string()),
),
- nav: it.try_to_nav(db)?,
+ nav: it.try_to_nav(db)?.call_site(),
})
})
.collect();
@@ -150,6 +151,19 @@ fn hover_simple(
});
}
+ if let Some((range, resolution)) =
+ sema.check_for_format_args_template(original_token.clone(), offset)
+ {
+ let res = hover_for_definition(
+ sema,
+ file_id,
+ Definition::from(resolution?),
+ &original_token.parent()?,
+ config,
+ )?;
+ return Some(RangeInfo::new(range, res));
+ }
+
let in_attr = original_token
.parent_ancestors()
.filter_map(ast::Item::cast)
@@ -161,11 +175,10 @@ fn hover_simple(
// prefer descending the same token kind in attribute expansions, in normal macros text
// equivalency is more important
- let descended = if in_attr {
- [sema.descend_into_macros_with_kind_preference(original_token.clone(), offset)].into()
- } else {
- sema.descend_into_macros_with_same_text(original_token.clone(), offset)
- };
+ let descended = sema.descend_into_macros(
+ if in_attr { DescendPreference::SameKind } else { DescendPreference::SameText },
+ original_token.clone(),
+ );
let descended = || descended.iter();
let result = descended()
@@ -180,26 +193,24 @@ fn hover_simple(
descended()
.filter_map(|token| {
let node = token.parent()?;
- let class = IdentClass::classify_token(sema, token)?;
- if let IdentClass::Operator(OperatorClass::Await(_)) = class {
+ match IdentClass::classify_node(sema, &node)? {
// It's better for us to fall back to the keyword hover here,
// rendering poll is very confusing
- return None;
+ IdentClass::Operator(OperatorClass::Await(_)) => None,
+
+ IdentClass::NameRefClass(NameRefClass::ExternCrateShorthand {
+ decl,
+ ..
+ }) => Some(vec![(Definition::ExternCrateDecl(decl), node)]),
+
+ class => Some(
+ class
+ .definitions()
+ .into_iter()
+ .zip(iter::repeat(node))
+ .collect::<Vec<_>>(),
+ ),
}
- if let IdentClass::NameRefClass(NameRefClass::ExternCrateShorthand {
- decl,
- ..
- }) = class
- {
- return Some(vec![(Definition::ExternCrateDecl(decl), node)]);
- }
- Some(
- class
- .definitions()
- .into_iter()
- .zip(iter::once(node).cycle())
- .collect::<Vec<_>>(),
- )
})
.flatten()
.unique_by(|&(def, _)| def)
@@ -300,11 +311,11 @@ pub(crate) fn hover_for_definition(
sema: &Semantics<'_, RootDatabase>,
file_id: FileId,
definition: Definition,
- node: &SyntaxNode,
+ scope_node: &SyntaxNode,
config: &HoverConfig,
) -> Option<HoverResult> {
let famous_defs = match &definition {
- Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(node)?.krate())),
+ Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(scope_node)?.krate())),
_ => None,
};
render::definition(sema.db, definition, famous_defs.as_ref(), config).map(|markup| {
@@ -332,22 +343,26 @@ fn show_implementations_action(db: &RootDatabase, def: Definition) -> Option<Hov
}
let adt = match def {
- Definition::Trait(it) => return it.try_to_nav(db).map(to_action),
+ Definition::Trait(it) => {
+ return it.try_to_nav(db).map(UpmappingResult::call_site).map(to_action)
+ }
Definition::Adt(it) => Some(it),
Definition::SelfType(it) => it.self_ty(db).as_adt(),
_ => None,
}?;
- adt.try_to_nav(db).map(to_action)
+ adt.try_to_nav(db).map(UpmappingResult::call_site).map(to_action)
}
fn show_fn_references_action(db: &RootDatabase, def: Definition) -> Option<HoverAction> {
match def {
- Definition::Function(it) => it.try_to_nav(db).map(|nav_target| {
- HoverAction::Reference(FilePosition {
- file_id: nav_target.file_id,
- offset: nav_target.focus_or_full_range().start(),
+ Definition::Function(it) => {
+ it.try_to_nav(db).map(UpmappingResult::call_site).map(|nav_target| {
+ HoverAction::Reference(FilePosition {
+ file_id: nav_target.file_id,
+ offset: nav_target.focus_or_full_range().start(),
+ })
})
- }),
+ }
_ => None,
}
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
index f72ce37d1..d0a02fd0d 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
@@ -402,10 +402,9 @@ pub(super) fn definition(
|&it| it.layout(db),
|_| {
let var_def = it.parent_def(db);
- let id = it.index();
match var_def {
hir::VariantDef::Struct(s) => {
- Adt::from(s).layout(db).ok().and_then(|layout| layout.field_offset(id))
+ Adt::from(s).layout(db).ok().and_then(|layout| layout.field_offset(it))
}
_ => None,
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
index 81d6db564..d5ec336fc 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
@@ -1136,7 +1136,9 @@ impl Thing {
```
```rust
- struct Thing
+ struct Thing {
+ x: u32,
+ }
```
"#]],
);
@@ -1155,7 +1157,9 @@ impl Thing {
```
```rust
- struct Thing
+ struct Thing {
+ x: u32,
+ }
```
"#]],
);
@@ -1174,7 +1178,9 @@ impl Thing {
```
```rust
- enum Thing
+ enum Thing {
+ A,
+ }
```
"#]],
);
@@ -1193,7 +1199,9 @@ impl Thing {
```
```rust
- enum Thing
+ enum Thing {
+ A,
+ }
```
"#]],
);
@@ -2005,7 +2013,10 @@ fn test_hover_layout_of_enum() {
```
```rust
- enum Foo // size = 16 (0x10), align = 8, niches = 254
+ enum Foo {
+ Variant1(u8, u16),
+ Variant2(i32, u8, i64),
+ } // size = 16 (0x10), align = 8, niches = 254
```
"#]],
);
@@ -2346,7 +2357,7 @@ fn main() { let s$0t = S{ f1:0 }; }
focus_range: 7..8,
name: "S",
kind: Struct,
- description: "struct S",
+ description: "struct S {\n f1: u32,\n}",
},
},
],
@@ -2379,7 +2390,7 @@ fn main() { let s$0t = S{ f1:Arg(0) }; }
focus_range: 24..25,
name: "S",
kind: Struct,
- description: "struct S<T>",
+ description: "struct S<T> {\n f1: T,\n}",
},
},
HoverGotoTypeData {
@@ -2392,7 +2403,7 @@ fn main() { let s$0t = S{ f1:Arg(0) }; }
focus_range: 7..10,
name: "Arg",
kind: Struct,
- description: "struct Arg",
+ description: "struct Arg(u32);",
},
},
],
@@ -2438,7 +2449,7 @@ fn main() { let s$0t = S{ f1: S{ f1: Arg(0) } }; }
focus_range: 24..25,
name: "S",
kind: Struct,
- description: "struct S<T>",
+ description: "struct S<T> {\n f1: T,\n}",
},
},
HoverGotoTypeData {
@@ -2451,7 +2462,7 @@ fn main() { let s$0t = S{ f1: S{ f1: Arg(0) } }; }
focus_range: 7..10,
name: "Arg",
kind: Struct,
- description: "struct Arg",
+ description: "struct Arg(u32);",
},
},
],
@@ -2487,7 +2498,7 @@ fn main() { let s$0t = (A(1), B(2), M::C(3) ); }
focus_range: 7..8,
name: "A",
kind: Struct,
- description: "struct A",
+ description: "struct A(u32);",
},
},
HoverGotoTypeData {
@@ -2500,7 +2511,7 @@ fn main() { let s$0t = (A(1), B(2), M::C(3) ); }
focus_range: 22..23,
name: "B",
kind: Struct,
- description: "struct B",
+ description: "struct B(u32);",
},
},
HoverGotoTypeData {
@@ -2514,7 +2525,7 @@ fn main() { let s$0t = (A(1), B(2), M::C(3) ); }
name: "C",
kind: Struct,
container_name: "M",
- description: "pub struct C",
+ description: "pub struct C(u32);",
},
},
],
@@ -2704,7 +2715,7 @@ fn main() { let s$0t = foo(); }
focus_range: 39..41,
name: "S1",
kind: Struct,
- description: "struct S1",
+ description: "struct S1 {}",
},
},
HoverGotoTypeData {
@@ -2717,7 +2728,7 @@ fn main() { let s$0t = foo(); }
focus_range: 52..54,
name: "S2",
kind: Struct,
- description: "struct S2",
+ description: "struct S2 {}",
},
},
],
@@ -2808,7 +2819,7 @@ fn foo(ar$0g: &impl Foo + Bar<S>) {}
focus_range: 36..37,
name: "S",
kind: Struct,
- description: "struct S",
+ description: "struct S {}",
},
},
],
@@ -2908,7 +2919,7 @@ fn foo(ar$0g: &impl Foo<S>) {}
focus_range: 23..24,
name: "S",
kind: Struct,
- description: "struct S",
+ description: "struct S {}",
},
},
],
@@ -2945,7 +2956,7 @@ fn main() { let s$0t = foo(); }
focus_range: 49..50,
name: "B",
kind: Struct,
- description: "struct B<T>",
+ description: "struct B<T> {}",
},
},
HoverGotoTypeData {
@@ -3034,7 +3045,7 @@ fn foo(ar$0g: &dyn Foo<S>) {}
focus_range: 23..24,
name: "S",
kind: Struct,
- description: "struct S",
+ description: "struct S {}",
},
},
],
@@ -3082,7 +3093,7 @@ fn foo(a$0rg: &impl ImplTrait<B<dyn DynTrait<B<S>>>>) {}
focus_range: 50..51,
name: "B",
kind: Struct,
- description: "struct B<T>",
+ description: "struct B<T> {}",
},
},
HoverGotoTypeData {
@@ -3108,7 +3119,7 @@ fn foo(a$0rg: &impl ImplTrait<B<dyn DynTrait<B<S>>>>) {}
focus_range: 65..66,
name: "S",
kind: Struct,
- description: "struct S",
+ description: "struct S {}",
},
},
],
@@ -3335,7 +3346,7 @@ struct S$0T<const C: usize = 1, T = Foo>(T);
```
```rust
- struct ST<const C: usize = 1, T = Foo>
+ struct ST<const C: usize = 1, T = Foo>(T);
```
"#]],
);
@@ -3356,7 +3367,7 @@ struct S$0T<const C: usize = {40 + 2}, T = Foo>(T);
```
```rust
- struct ST<const C: usize = {const}, T = Foo>
+ struct ST<const C: usize = {const}, T = Foo>(T);
```
"#]],
);
@@ -3378,7 +3389,7 @@ struct S$0T<const C: usize = VAL, T = Foo>(T);
```
```rust
- struct ST<const C: usize = VAL, T = Foo>
+ struct ST<const C: usize = VAL, T = Foo>(T);
```
"#]],
);
@@ -5266,38 +5277,46 @@ pub fn foo() {}
#[test]
fn hover_feature() {
check(
- r#"#![feature(box_syntax$0)]"#,
- expect![[r##"
- *box_syntax*
- ```
- box_syntax
- ```
- ___
+ r#"#![feature(intrinsics$0)]"#,
+ expect![[r#"
+ *intrinsics*
+ ```
+ intrinsics
+ ```
+ ___
- # `box_syntax`
+ # `intrinsics`
- The tracking issue for this feature is: [#49733]
+ The tracking issue for this feature is: None.
- [#49733]: https://github.com/rust-lang/rust/issues/49733
+ Intrinsics are never intended to be stable directly, but intrinsics are often
+ exported in some sort of stable manner. Prefer using the stable interfaces to
+ the intrinsic directly when you can.
- See also [`box_patterns`](box-patterns.md)
+ ------------------------
- ------------------------
- Currently the only stable way to create a `Box` is via the `Box::new` method.
- Also it is not possible in stable Rust to destructure a `Box` in a match
- pattern. The unstable `box` keyword can be used to create a `Box`. An example
- usage would be:
+ These are imported as if they were FFI functions, with the special
+ `rust-intrinsic` ABI. For example, if one was in a freestanding
+ context, but wished to be able to `transmute` between types, and
+ perform efficient pointer arithmetic, one would import those functions
+ via a declaration like
- ```rust
- #![feature(box_syntax)]
+ ```rust
+ #![feature(intrinsics)]
+ #![allow(internal_features)]
+ # fn main() {}
- fn main() {
- let b = box 5;
- }
- ```
+ extern "rust-intrinsic" {
+ fn transmute<T, U>(x: T) -> U;
- "##]],
+ fn arith_offset<T>(dst: *const T, offset: isize) -> *const T;
+ }
+ ```
+
+ As with any other FFI functions, these are always `unsafe` to call.
+
+ "#]],
)
}
@@ -5927,7 +5946,7 @@ pub struct Foo(i32);
```
```rust
- pub struct Foo // size = 4, align = 4
+ pub struct Foo(i32); // size = 4, align = 4
```
---
@@ -6594,3 +6613,115 @@ fn test() {
"#]],
);
}
+
+#[test]
+fn format_args_implicit() {
+ check(
+ r#"
+//- minicore: fmt
+fn test() {
+let aaaaa = "foo";
+format_args!("{aaaaa$0}");
+}
+"#,
+ expect![[r#"
+ *aaaaa*
+
+ ```rust
+ let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn format_args_implicit2() {
+ check(
+ r#"
+//- minicore: fmt
+fn test() {
+let aaaaa = "foo";
+format_args!("{$0aaaaa}");
+}
+"#,
+ expect![[r#"
+ *aaaaa*
+
+ ```rust
+ let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn format_args_implicit_raw() {
+ check(
+ r#"
+//- minicore: fmt
+fn test() {
+let aaaaa = "foo";
+format_args!(r"{$0aaaaa}");
+}
+"#,
+ expect![[r#"
+ *aaaaa*
+
+ ```rust
+ let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn format_args_implicit_nested() {
+ check(
+ r#"
+//- minicore: fmt
+macro_rules! foo {
+ ($($tt:tt)*) => {
+ format_args!($($tt)*)
+ }
+}
+fn test() {
+let aaaaa = "foo";
+foo!(r"{$0aaaaa}");
+}
+"#,
+ expect![[r#"
+ *aaaaa*
+
+ ```rust
+ let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn method_call_without_parens() {
+ check(
+ r#"
+struct S;
+impl S {
+ fn foo<T>(&self, t: T) {}
+}
+
+fn main() {
+ S.foo$0;
+}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test::S
+ ```
+
+ ```rust
+ fn foo<T>(&self, t: T)
+ ```
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
index a5d070fe7..e82d730e4 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
@@ -31,6 +31,7 @@ mod discriminant;
mod fn_lifetime_fn;
mod implicit_static;
mod param_name;
+mod implicit_drop;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct InlayHintsConfig {
@@ -45,6 +46,7 @@ pub struct InlayHintsConfig {
pub closure_return_type_hints: ClosureReturnTypeHints,
pub closure_capture_hints: bool,
pub binding_mode_hints: bool,
+ pub implicit_drop_hints: bool,
pub lifetime_elision_hints: LifetimeElisionHints,
pub param_names_for_lifetime_elision_hints: bool,
pub hide_named_constructor_hints: bool,
@@ -124,6 +126,7 @@ pub enum InlayKind {
Lifetime,
Parameter,
Type,
+ Drop,
}
#[derive(Debug)]
@@ -312,6 +315,7 @@ impl HirWrite for InlayHintLabelBuilder<'_> {
}
self.make_new_part();
let Some(location) = ModuleDef::from(def).try_to_nav(self.db) else { return };
+ let location = location.call_site();
let location =
FileRange { file_id: location.file_id, range: location.focus_or_full_range() };
self.location = Some(location);
@@ -418,6 +422,11 @@ fn ty_to_text_edit(
Some(builder.finish())
}
+pub enum RangeLimit {
+ Fixed(TextRange),
+ NearestParent(TextSize),
+}
+
// Feature: Inlay Hints
//
// rust-analyzer shows additional information inline with the source code.
@@ -439,7 +448,7 @@ fn ty_to_text_edit(
pub(crate) fn inlay_hints(
db: &RootDatabase,
file_id: FileId,
- range_limit: Option<TextRange>,
+ range_limit: Option<RangeLimit>,
config: &InlayHintsConfig,
) -> Vec<InlayHint> {
let _p = profile::span("inlay_hints");
@@ -454,13 +463,31 @@ pub(crate) fn inlay_hints(
let hints = |node| hints(&mut acc, &famous_defs, config, file_id, node);
match range_limit {
- Some(range) => match file.covering_element(range) {
+ Some(RangeLimit::Fixed(range)) => match file.covering_element(range) {
NodeOrToken::Token(_) => return acc,
NodeOrToken::Node(n) => n
.descendants()
.filter(|descendant| range.intersect(descendant.text_range()).is_some())
.for_each(hints),
},
+ Some(RangeLimit::NearestParent(position)) => {
+ match file.token_at_offset(position).left_biased() {
+ Some(token) => {
+ if let Some(parent_block) =
+ token.parent_ancestors().find_map(ast::BlockExpr::cast)
+ {
+ parent_block.syntax().descendants().for_each(hints)
+ } else if let Some(parent_item) =
+ token.parent_ancestors().find_map(ast::Item::cast)
+ {
+ parent_item.syntax().descendants().for_each(hints)
+ } else {
+ return acc;
+ }
+ }
+ None => return acc,
+ }
+ }
None => file.descendants().for_each(hints),
};
}
@@ -503,7 +530,10 @@ fn hints(
ast::Item(it) => match it {
// FIXME: record impl lifetimes so they aren't being reused in assoc item lifetime inlay hints
ast::Item::Impl(_) => None,
- ast::Item::Fn(it) => fn_lifetime_fn::hints(hints, config, it),
+ ast::Item::Fn(it) => {
+ implicit_drop::hints(hints, sema, config, &it);
+ fn_lifetime_fn::hints(hints, config, it)
+ },
// static type elisions
ast::Item::Static(it) => implicit_static::hints(hints, config, Either::Left(it)),
ast::Item::Const(it) => implicit_static::hints(hints, config, Either::Right(it)),
@@ -563,6 +593,7 @@ mod tests {
use hir::ClosureStyle;
use itertools::Itertools;
use test_utils::extract_annotations;
+ use text_edit::{TextRange, TextSize};
use crate::inlay_hints::{AdjustmentHints, AdjustmentHintsMode};
use crate::DiscriminantHints;
@@ -590,6 +621,7 @@ mod tests {
max_length: None,
closing_brace_hints_min_lines: None,
fields_to_resolve: InlayFieldsToResolve::empty(),
+ implicit_drop_hints: false,
};
pub(super) const TEST_CONFIG: InlayHintsConfig = InlayHintsConfig {
type_hints: true,
@@ -629,6 +661,22 @@ mod tests {
expect.assert_debug_eq(&inlay_hints)
}
+ #[track_caller]
+ pub(super) fn check_expect_clear_loc(
+ config: InlayHintsConfig,
+ ra_fixture: &str,
+ expect: Expect,
+ ) {
+ let (analysis, file_id) = fixture::file(ra_fixture);
+ let mut inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap();
+ inlay_hints.iter_mut().flat_map(|hint| &mut hint.label.parts).for_each(|hint| {
+ if let Some(loc) = &mut hint.linked_location {
+ loc.range = TextRange::empty(TextSize::from(0));
+ }
+ });
+ expect.assert_debug_eq(&inlay_hints)
+ }
+
/// Computes inlay hints for the fixture, applies all the provided text edits and then runs
/// expect test.
#[track_caller]
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs
index 680035c72..45b51e355 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs
@@ -177,7 +177,11 @@ mod tests {
use syntax::{TextRange, TextSize};
use test_utils::extract_annotations;
- use crate::{fixture, inlay_hints::InlayHintsConfig, ClosureReturnTypeHints};
+ use crate::{
+ fixture,
+ inlay_hints::{InlayHintsConfig, RangeLimit},
+ ClosureReturnTypeHints,
+ };
use crate::inlay_hints::tests::{
check, check_edit, check_no_edit, check_with_config, DISABLED_CONFIG, TEST_CONFIG,
@@ -400,7 +404,7 @@ fn main() {
.inlay_hints(
&InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG },
file_id,
- Some(TextRange::new(TextSize::from(500), TextSize::from(600))),
+ Some(RangeLimit::Fixed(TextRange::new(TextSize::from(500), TextSize::from(600)))),
)
.unwrap();
let actual =
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs
index 12e46c0f8..c9e9a2237 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs
@@ -78,7 +78,9 @@ mod tests {
use expect_test::expect;
use crate::{
- inlay_hints::tests::{check_expect, check_with_config, DISABLED_CONFIG, TEST_CONFIG},
+ inlay_hints::tests::{
+ check_expect, check_expect_clear_loc, check_with_config, DISABLED_CONFIG, TEST_CONFIG,
+ },
InlayHintsConfig,
};
@@ -444,7 +446,7 @@ fn main() {
#[test]
fn shorten_iterator_chaining_hints() {
- check_expect(
+ check_expect_clear_loc(
InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
r#"
//- minicore: iterators
@@ -484,7 +486,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 10739..10747,
+ range: 0..0,
},
),
tooltip: "",
@@ -497,7 +499,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 10771..10775,
+ range: 0..0,
},
),
tooltip: "",
@@ -522,7 +524,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 10739..10747,
+ range: 0..0,
},
),
tooltip: "",
@@ -535,7 +537,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 10771..10775,
+ range: 0..0,
},
),
tooltip: "",
@@ -560,7 +562,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 10739..10747,
+ range: 0..0,
},
),
tooltip: "",
@@ -573,7 +575,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 10771..10775,
+ range: 0..0,
},
),
tooltip: "",
@@ -598,7 +600,7 @@ fn main() {
file_id: FileId(
0,
),
- range: 24..30,
+ range: 0..0,
},
),
tooltip: "",
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs
index d691303c1..2f8b95951 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs
@@ -2,6 +2,7 @@
//!
//! Tests live in [`bind_pat`][super::bind_pat] module.
use ide_db::{base_db::FileId, famous_defs::FamousDefs};
+use stdx::TupleExt;
use syntax::ast::{self, AstNode};
use text_edit::{TextRange, TextSize};
@@ -73,7 +74,9 @@ pub(super) fn hints(
capture.display_place(sema.db)
),
None,
- source.name().and_then(|name| name.syntax().original_file_range_opt(sema.db)),
+ source.name().and_then(|name| {
+ name.syntax().original_file_range_opt(sema.db).map(TupleExt::head)
+ }),
);
acc.push(InlayHint {
needs_resolve: label.needs_resolve(),
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs
new file mode 100644
index 000000000..9cbaed090
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs
@@ -0,0 +1,218 @@
+//! Implementation of "implicit drop" inlay hints:
+//! ```no_run
+//! fn main() {
+//! let x = vec![2];
+//! if some_condition() {
+//! /* drop(x) */return;
+//! }
+//! }
+//! ```
+use hir::{
+ db::{DefDatabase as _, HirDatabase as _},
+ mir::{MirSpan, TerminatorKind},
+ ChalkTyInterner, DefWithBody, Semantics,
+};
+use ide_db::{base_db::FileRange, RootDatabase};
+
+use syntax::{
+ ast::{self, AstNode},
+ match_ast,
+};
+
+use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, InlayKind};
+
+pub(super) fn hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ def: &ast::Fn,
+) -> Option<()> {
+ if !config.implicit_drop_hints {
+ return None;
+ }
+
+ let def = sema.to_def(def)?;
+ let def: DefWithBody = def.into();
+
+ let source_map = sema.db.body_with_source_map(def.into()).1;
+
+ let hir = sema.db.body(def.into());
+ let mir = sema.db.mir_body(def.into()).ok()?;
+
+ let local_to_binding = mir.local_to_binding_map();
+
+ for (_, bb) in mir.basic_blocks.iter() {
+ let terminator = bb.terminator.as_ref()?;
+ if let TerminatorKind::Drop { place, .. } = terminator.kind {
+ if !place.projection.is_empty() {
+ continue; // Ignore complex cases for now
+ }
+ if mir.locals[place.local].ty.adt_id(ChalkTyInterner).is_none() {
+ continue; // Arguably only ADTs have significant drop impls
+ }
+ let Some(binding) = local_to_binding.get(place.local) else {
+ continue; // Ignore temporary values
+ };
+ let range = match terminator.span {
+ MirSpan::ExprId(e) => match source_map.expr_syntax(e) {
+ Ok(s) => {
+ let root = &s.file_syntax(sema.db);
+ let expr = s.value.to_node(root);
+ let expr = expr.syntax();
+ match_ast! {
+ match expr {
+ ast::BlockExpr(x) => x.stmt_list().and_then(|x| x.r_curly_token()).map(|x| x.text_range()).unwrap_or_else(|| expr.text_range()),
+ // make the inlay hint appear after the semicolon if there is
+ _ => {
+ let nearest_semicolon = nearest_token_after_node(expr, syntax::SyntaxKind::SEMICOLON);
+ nearest_semicolon.map(|x| x.text_range()).unwrap_or_else(|| expr.text_range())
+ },
+ }
+ }
+ }
+ Err(_) => continue,
+ },
+ MirSpan::PatId(p) => match source_map.pat_syntax(p) {
+ Ok(s) => s.value.text_range(),
+ Err(_) => continue,
+ },
+ MirSpan::Unknown => continue,
+ };
+ let binding = &hir.bindings[*binding];
+ let binding_source = binding
+ .definitions
+ .first()
+ .and_then(|d| source_map.pat_syntax(*d).ok())
+ .and_then(|d| {
+ Some(FileRange { file_id: d.file_id.file_id()?, range: d.value.text_range() })
+ });
+ let name = binding.name.to_smol_str();
+ if name.starts_with("<ra@") {
+ continue; // Ignore desugared variables
+ }
+ let mut label = InlayHintLabel::simple(
+ name,
+ Some(crate::InlayTooltip::String("moz".into())),
+ binding_source,
+ );
+ label.prepend_str("drop(");
+ label.append_str(")");
+ acc.push(InlayHint {
+ range,
+ position: InlayHintPosition::After,
+ pad_left: true,
+ pad_right: true,
+ kind: InlayKind::Drop,
+ needs_resolve: label.needs_resolve(),
+ label,
+ text_edit: None,
+ })
+ }
+ }
+
+ Some(())
+}
+
+fn nearest_token_after_node(
+ node: &syntax::SyntaxNode,
+ token_type: syntax::SyntaxKind,
+) -> Option<syntax::SyntaxToken> {
+ node.siblings_with_tokens(syntax::Direction::Next)
+ .filter_map(|it| it.as_token().map(|it| it.clone()))
+ .filter(|it| it.kind() == token_type)
+ .next()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{
+ inlay_hints::tests::{check_with_config, DISABLED_CONFIG},
+ InlayHintsConfig,
+ };
+
+ const ONLY_DROP_CONFIG: InlayHintsConfig =
+ InlayHintsConfig { implicit_drop_hints: true, ..DISABLED_CONFIG };
+
+ #[test]
+ fn basic() {
+ check_with_config(
+ ONLY_DROP_CONFIG,
+ r#"
+ struct X;
+ fn f() {
+ let x = X;
+ if 2 == 5 {
+ return;
+ //^ drop(x)
+ }
+ }
+ //^ drop(x)
+"#,
+ );
+ }
+
+ #[test]
+ fn no_hint_for_copy_types_and_mutable_references() {
+ // `T: Copy` and `T = &mut U` types do nothing on drop, so we should hide drop inlay hint for them.
+ check_with_config(
+ ONLY_DROP_CONFIG,
+ r#"
+//- minicore: copy, derive
+
+ struct X(i32, i32);
+ #[derive(Clone, Copy)]
+ struct Y(i32, i32);
+ fn f() {
+ let a = 2;
+ let b = a + 4;
+ let mut x = X(a, b);
+ let mut y = Y(a, b);
+ let mx = &mut x;
+ let my = &mut y;
+ let c = a + b;
+ }
+ //^ drop(x)
+"#,
+ );
+ }
+
+ #[test]
+ fn try_operator() {
+ // We currently show drop inlay hint for every `?` operator that may potentialy drop something. We probably need to
+ // make it configurable as it doesn't seem very useful.
+ check_with_config(
+ ONLY_DROP_CONFIG,
+ r#"
+//- minicore: copy, try, option
+
+ struct X;
+ fn f() -> Option<()> {
+ let x = X;
+ let t_opt = Some(2);
+ let t = t_opt?;
+ //^ drop(x)
+ Some(())
+ }
+ //^ drop(x)
+"#,
+ );
+ }
+
+ #[test]
+ fn if_let() {
+ check_with_config(
+ ONLY_DROP_CONFIG,
+ r#"
+ struct X;
+ fn f() {
+ let x = X;
+ if let X = x {
+ let y = X;
+ }
+ //^ drop(y)
+ }
+ //^ drop(x)
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs b/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs
index d06ffd535..216974904 100644
--- a/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs
@@ -1,10 +1,10 @@
use hir::Semantics;
-use ide_db::base_db::SourceDatabaseExt;
-use ide_db::RootDatabase;
-use ide_db::{base_db::FilePosition, LineIndexDatabase};
+use ide_db::{
+ base_db::{FilePosition, SourceDatabaseExt},
+ LineIndexDatabase, RootDatabase,
+};
use std::{fmt::Write, time::Instant};
-use syntax::TextRange;
-use syntax::{algo::find_node_at_offset, ast, AstNode};
+use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange};
// Feature: Interpret Function
//
@@ -28,7 +28,9 @@ fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option<Strin
let sema = Semantics::new(db);
let source_file = sema.parse(position.file_id);
- let item = find_node_at_offset::<ast::Item>(source_file.syntax(), position.offset)?;
+ let item = ancestors_at_offset(source_file.syntax(), position.offset)
+ .filter(|it| !ast::MacroCall::can_cast(it.kind()))
+ .find_map(ast::Item::cast)?;
let def = match item {
ast::Item::Fn(it) => sema.to_def(&it)?,
_ => return None,
diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs
index aee03d218..a19952e4c 100644
--- a/src/tools/rust-analyzer/crates/ide/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs
@@ -8,8 +8,9 @@
//! in this crate.
// For proving that RootDatabase is RefUnwindSafe.
+#![warn(rust_2018_idioms, unused_lifetimes)]
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#![recursion_limit = "128"]
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
#[allow(unused)]
macro_rules! eprintln {
@@ -93,13 +94,13 @@ pub use crate::{
inlay_hints::{
AdjustmentHints, AdjustmentHintsMode, ClosureReturnTypeHints, DiscriminantHints,
InlayFieldsToResolve, InlayHint, InlayHintLabel, InlayHintLabelPart, InlayHintPosition,
- InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints,
+ InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints, RangeLimit,
},
join_lines::JoinLinesConfig,
markup::Markup,
moniker::{MonikerDescriptorKind, MonikerKind, MonikerResult, PackageInformation},
move_item::Direction,
- navigation_target::NavigationTarget,
+ navigation_target::{NavigationTarget, UpmappingResult},
prime_caches::ParallelPrimeCachesProgress,
references::ReferenceSearchResult,
rename::RenameError,
@@ -132,7 +133,9 @@ pub use ide_db::{
symbol_index::Query,
RootDatabase, SymbolKind,
};
-pub use ide_diagnostics::{Diagnostic, DiagnosticsConfig, ExprFillDefaultMode, Severity};
+pub use ide_diagnostics::{
+ Diagnostic, DiagnosticCode, DiagnosticsConfig, ExprFillDefaultMode, Severity,
+};
pub use ide_ssr::SsrError;
pub use syntax::{TextRange, TextSize};
pub use text_edit::{Indel, TextEdit};
@@ -229,7 +232,7 @@ impl Analysis {
// `AnalysisHost` for creating a fully-featured analysis.
pub fn from_single_file(text: String) -> (Analysis, FileId) {
let mut host = AnalysisHost::default();
- let file_id = FileId(0);
+ let file_id = FileId::from_raw(0);
let mut file_set = FileSet::default();
file_set.insert(file_id, VfsPath::new_virtual_path("/main.rs".to_string()));
let source_root = SourceRoot::new_local(file_set);
@@ -396,7 +399,7 @@ impl Analysis {
&self,
config: &InlayHintsConfig,
file_id: FileId,
- range: Option<TextRange>,
+ range: Option<RangeLimit>,
) -> Cancellable<Vec<InlayHint>> {
self.with_db(|db| inlay_hints::inlay_hints(db, file_id, range, config))
}
@@ -412,6 +415,7 @@ impl Analysis {
symbol_index::world_symbols(db, query)
.into_iter() // xx: should we make this a par iter?
.filter_map(|s| s.try_to_nav(db))
+ .map(UpmappingResult::call_site)
.collect::<Vec<_>>()
})
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/moniker.rs b/src/tools/rust-analyzer/crates/ide/src/moniker.rs
index 2ca2b5b1d..8e8bb5e01 100644
--- a/src/tools/rust-analyzer/crates/ide/src/moniker.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/moniker.rs
@@ -1,7 +1,7 @@
//! This module generates [moniker](https://microsoft.github.io/language-server-protocol/specifications/lsif/0.6.0/specification/#exportsImports)
//! for LSIF and LSP.
-use hir::{AsAssocItem, AssocItemContainer, Crate, Semantics};
+use hir::{AsAssocItem, AssocItemContainer, Crate, DescendPreference, Semantics};
use ide_db::{
base_db::{CrateOrigin, FilePosition, LangCrateOrigin},
defs::{Definition, IdentClass},
@@ -99,7 +99,7 @@ pub(crate) fn moniker(
});
}
let navs = sema
- .descend_into_macros(original_token.clone(), offset)
+ .descend_into_macros(DescendPreference::None, original_token.clone())
.into_iter()
.filter_map(|token| {
IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| {
diff --git a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
index 32f211c6b..6cb7d7724 100644
--- a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
@@ -2,10 +2,11 @@
use std::fmt;
+use arrayvec::ArrayVec;
use either::Either;
use hir::{
- symbols::FileSymbol, AssocItem, FieldSource, HasContainer, HasSource, HirDisplay, HirFileId,
- InFile, LocalSource, ModuleSource,
+ db::ExpandDatabase, symbols::FileSymbol, AssocItem, FieldSource, HasContainer, HasSource,
+ HirDisplay, HirFileId, InFile, LocalSource, ModuleSource,
};
use ide_db::{
base_db::{FileId, FileRange},
@@ -40,6 +41,8 @@ pub struct NavigationTarget {
/// comments, and `focus_range` is the range of the identifier.
///
/// Clients should place the cursor on this range when navigating to this target.
+ ///
+ /// This range must be contained within [`Self::full_range`].
pub focus_range: Option<TextRange>,
pub name: SmolStr,
pub kind: Option<SymbolKind>,
@@ -70,15 +73,15 @@ impl fmt::Debug for NavigationTarget {
}
pub(crate) trait ToNav {
- fn to_nav(&self, db: &RootDatabase) -> NavigationTarget;
+ fn to_nav(&self, db: &RootDatabase) -> UpmappingResult<NavigationTarget>;
}
pub(crate) trait TryToNav {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget>;
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>>;
}
impl<T: TryToNav, U: TryToNav> TryToNav for Either<T, U> {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
match self {
Either::Left(it) => it.try_to_nav(db),
Either::Right(it) => it.try_to_nav(db),
@@ -91,23 +94,30 @@ impl NavigationTarget {
self.focus_range.unwrap_or(self.full_range)
}
- pub(crate) fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> NavigationTarget {
+ pub(crate) fn from_module_to_decl(
+ db: &RootDatabase,
+ module: hir::Module,
+ ) -> UpmappingResult<NavigationTarget> {
let name = module.name(db).map(|it| it.to_smol_str()).unwrap_or_default();
- if let Some(InFile { value, file_id }) = &module.declaration_source(db) {
- let (file_id, full_range, focus_range) =
- orig_range_with_focus(db, *file_id, value.syntax(), value.name());
- let mut res = NavigationTarget::from_syntax(
- file_id,
- name,
- focus_range,
- full_range,
- SymbolKind::Module,
- );
- res.docs = module.docs(db);
- res.description = Some(module.display(db).to_string());
- return res;
+ match module.declaration_source(db) {
+ Some(InFile { value, file_id }) => {
+ orig_range_with_focus(db, file_id, value.syntax(), value.name()).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| {
+ let mut res = NavigationTarget::from_syntax(
+ file_id,
+ name.clone(),
+ focus_range,
+ full_range,
+ SymbolKind::Module,
+ );
+ res.docs = module.docs(db);
+ res.description = Some(module.display(db).to_string());
+ res
+ },
+ )
+ }
+ _ => module.to_nav(db),
}
- module.to_nav(db)
}
#[cfg(test)]
@@ -133,13 +143,14 @@ impl NavigationTarget {
db: &RootDatabase,
InFile { file_id, value }: InFile<&dyn ast::HasName>,
kind: SymbolKind,
- ) -> NavigationTarget {
- let name = value.name().map(|it| it.text().into()).unwrap_or_else(|| "_".into());
+ ) -> UpmappingResult<NavigationTarget> {
+ let name: SmolStr = value.name().map(|it| it.text().into()).unwrap_or_else(|| "_".into());
- let (file_id, full_range, focus_range) =
- orig_range_with_focus(db, file_id, value.syntax(), value.name());
-
- NavigationTarget::from_syntax(file_id, name, focus_range, full_range, kind)
+ orig_range_with_focus(db, file_id, value.syntax(), value.name()).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| {
+ NavigationTarget::from_syntax(file_id, name.clone(), focus_range, full_range, kind)
+ },
+ )
}
fn from_syntax(
@@ -164,48 +175,51 @@ impl NavigationTarget {
}
impl TryToNav for FileSymbol {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
- let full_range = self.loc.original_range(db);
- let focus_range = self.loc.original_name_range(db).and_then(|it| {
- if it.file_id == full_range.file_id {
- Some(it.range)
- } else {
- None
- }
- });
-
- Some(NavigationTarget {
- file_id: full_range.file_id,
- name: self
- .is_alias
- .then(|| self.def.name(db))
- .flatten()
- .map_or_else(|| self.name.clone(), |it| it.to_smol_str()),
- alias: self.is_alias.then(|| self.name.clone()),
- kind: Some(hir::ModuleDefId::from(self.def).into()),
- full_range: full_range.range,
- focus_range,
- container_name: self.container_name.clone(),
- description: match self.def {
- hir::ModuleDef::Module(it) => Some(it.display(db).to_string()),
- hir::ModuleDef::Function(it) => Some(it.display(db).to_string()),
- hir::ModuleDef::Adt(it) => Some(it.display(db).to_string()),
- hir::ModuleDef::Variant(it) => Some(it.display(db).to_string()),
- hir::ModuleDef::Const(it) => Some(it.display(db).to_string()),
- hir::ModuleDef::Static(it) => Some(it.display(db).to_string()),
- hir::ModuleDef::Trait(it) => Some(it.display(db).to_string()),
- hir::ModuleDef::TraitAlias(it) => Some(it.display(db).to_string()),
- hir::ModuleDef::TypeAlias(it) => Some(it.display(db).to_string()),
- hir::ModuleDef::Macro(it) => Some(it.display(db).to_string()),
- hir::ModuleDef::BuiltinType(_) => None,
- },
- docs: None,
- })
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
+ let root = db.parse_or_expand(self.loc.hir_file_id);
+ self.loc.ptr.to_node(&root);
+ Some(
+ orig_range_with_focus(
+ db,
+ self.loc.hir_file_id,
+ &self.loc.ptr.to_node(&root),
+ Some(self.loc.name_ptr.to_node(&root)),
+ )
+ .map(|(FileRange { file_id, range: full_range }, focus_range)| {
+ NavigationTarget {
+ file_id,
+ name: self
+ .is_alias
+ .then(|| self.def.name(db))
+ .flatten()
+ .map_or_else(|| self.name.clone(), |it| it.to_smol_str()),
+ alias: self.is_alias.then(|| self.name.clone()),
+ kind: Some(hir::ModuleDefId::from(self.def).into()),
+ full_range,
+ focus_range,
+ container_name: self.container_name.clone(),
+ description: match self.def {
+ hir::ModuleDef::Module(it) => Some(it.display(db).to_string()),
+ hir::ModuleDef::Function(it) => Some(it.display(db).to_string()),
+ hir::ModuleDef::Adt(it) => Some(it.display(db).to_string()),
+ hir::ModuleDef::Variant(it) => Some(it.display(db).to_string()),
+ hir::ModuleDef::Const(it) => Some(it.display(db).to_string()),
+ hir::ModuleDef::Static(it) => Some(it.display(db).to_string()),
+ hir::ModuleDef::Trait(it) => Some(it.display(db).to_string()),
+ hir::ModuleDef::TraitAlias(it) => Some(it.display(db).to_string()),
+ hir::ModuleDef::TypeAlias(it) => Some(it.display(db).to_string()),
+ hir::ModuleDef::Macro(it) => Some(it.display(db).to_string()),
+ hir::ModuleDef::BuiltinType(_) => None,
+ },
+ docs: None,
+ }
+ }),
+ )
}
}
impl TryToNav for Definition {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
match self {
Definition::Local(it) => Some(it.to_nav(db)),
Definition::Label(it) => Some(it.to_nav(db)),
@@ -233,7 +247,7 @@ impl TryToNav for Definition {
}
impl TryToNav for hir::ModuleDef {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
match self {
hir::ModuleDef::Module(it) => Some(it.to_nav(db)),
hir::ModuleDef::Function(it) => it.try_to_nav(db),
@@ -331,22 +345,26 @@ where
D: HasSource + ToNavFromAst + Copy + HasDocs + HirDisplay,
D::Ast: ast::HasName,
{
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
let src = self.source(db)?;
- let mut res = NavigationTarget::from_named(
- db,
- src.as_ref().map(|it| it as &dyn ast::HasName),
- D::KIND,
- );
- res.docs = self.docs(db);
- res.description = Some(self.display(db).to_string());
- res.container_name = self.container_name(db);
- Some(res)
+ Some(
+ NavigationTarget::from_named(
+ db,
+ src.as_ref().map(|it| it as &dyn ast::HasName),
+ D::KIND,
+ )
+ .map(|mut res| {
+ res.docs = self.docs(db);
+ res.description = Some(self.display(db).to_string());
+ res.container_name = self.container_name(db);
+ res
+ }),
+ )
}
}
impl ToNav for hir::Module {
- fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
+ fn to_nav(&self, db: &RootDatabase) -> UpmappingResult<NavigationTarget> {
let InFile { file_id, value } = self.definition_source(db);
let name = self.name(db).map(|it| it.to_smol_str()).unwrap_or_default();
@@ -355,97 +373,125 @@ impl ToNav for hir::Module {
ModuleSource::Module(node) => (node.syntax(), node.name()),
ModuleSource::BlockExpr(node) => (node.syntax(), None),
};
- let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, syntax, focus);
- NavigationTarget::from_syntax(file_id, name, focus_range, full_range, SymbolKind::Module)
+
+ orig_range_with_focus(db, file_id, syntax, focus).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| {
+ NavigationTarget::from_syntax(
+ file_id,
+ name.clone(),
+ focus_range,
+ full_range,
+ SymbolKind::Module,
+ )
+ },
+ )
}
}
impl TryToNav for hir::Impl {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
let InFile { file_id, value } = self.source(db)?;
- let derive_attr = self.as_builtin_derive(db);
+ let derive_path = self.as_builtin_derive_path(db);
- let (focus, syntax) = match &derive_attr {
- Some(attr) => (None, attr.value.syntax()),
- None => (value.self_ty(), value.syntax()),
+ let (file_id, focus, syntax) = match &derive_path {
+ Some(attr) => (attr.file_id.into(), None, attr.value.syntax()),
+ None => (file_id, value.self_ty(), value.syntax()),
};
- let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, syntax, focus);
- Some(NavigationTarget::from_syntax(
- file_id,
- "impl".into(),
- focus_range,
- full_range,
- SymbolKind::Impl,
+ Some(orig_range_with_focus(db, file_id, syntax, focus).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| {
+ NavigationTarget::from_syntax(
+ file_id,
+ "impl".into(),
+ focus_range,
+ full_range,
+ SymbolKind::Impl,
+ )
+ },
))
}
}
impl TryToNav for hir::ExternCrateDecl {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
let src = self.source(db)?;
let InFile { file_id, value } = src;
let focus = value
.rename()
.map_or_else(|| value.name_ref().map(Either::Left), |it| it.name().map(Either::Right));
- let (file_id, full_range, focus_range) =
- orig_range_with_focus(db, file_id, value.syntax(), focus);
- let mut res = NavigationTarget::from_syntax(
- file_id,
- self.alias_or_name(db).unwrap_or_else(|| self.name(db)).to_smol_str(),
- focus_range,
- full_range,
- SymbolKind::Module,
- );
- res.docs = self.docs(db);
- res.description = Some(self.display(db).to_string());
- res.container_name = container_name(db, *self);
- Some(res)
+ Some(orig_range_with_focus(db, file_id, value.syntax(), focus).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| {
+ let mut res = NavigationTarget::from_syntax(
+ file_id,
+ self.alias_or_name(db).unwrap_or_else(|| self.name(db)).to_smol_str(),
+ focus_range,
+ full_range,
+ SymbolKind::Module,
+ );
+
+ res.docs = self.docs(db);
+ res.description = Some(self.display(db).to_string());
+ res.container_name = container_name(db, *self);
+ res
+ },
+ ))
}
}
impl TryToNav for hir::Field {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
let src = self.source(db)?;
let field_source = match &src.value {
FieldSource::Named(it) => {
- let mut res =
- NavigationTarget::from_named(db, src.with_value(it), SymbolKind::Field);
- res.docs = self.docs(db);
- res.description = Some(self.display(db).to_string());
- res
- }
- FieldSource::Pos(it) => {
- let FileRange { file_id, range } =
- src.with_value(it.syntax()).original_file_range(db);
- NavigationTarget::from_syntax(file_id, "".into(), None, range, SymbolKind::Field)
+ NavigationTarget::from_named(db, src.with_value(it), SymbolKind::Field).map(
+ |mut res| {
+ res.docs = self.docs(db);
+ res.description = Some(self.display(db).to_string());
+ res
+ },
+ )
}
+ FieldSource::Pos(it) => orig_range(db, src.file_id, it.syntax()).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| {
+ NavigationTarget::from_syntax(
+ file_id,
+ format!("{}", self.index()).into(),
+ focus_range,
+ full_range,
+ SymbolKind::Field,
+ )
+ },
+ ),
};
Some(field_source)
}
}
impl TryToNav for hir::Macro {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
let src = self.source(db)?;
let name_owner: &dyn ast::HasName = match &src.value {
Either::Left(it) => it,
Either::Right(it) => it,
};
- let mut res = NavigationTarget::from_named(
- db,
- src.as_ref().with_value(name_owner),
- self.kind(db).into(),
- );
- res.docs = self.docs(db);
- Some(res)
+ Some(
+ NavigationTarget::from_named(
+ db,
+ src.as_ref().with_value(name_owner),
+ self.kind(db).into(),
+ )
+ .map(|mut res| {
+ res.docs = self.docs(db);
+ res
+ }),
+ )
}
}
impl TryToNav for hir::Adt {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
match self {
hir::Adt::Struct(it) => it.try_to_nav(db),
hir::Adt::Union(it) => it.try_to_nav(db),
@@ -455,7 +501,7 @@ impl TryToNav for hir::Adt {
}
impl TryToNav for hir::AssocItem {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
match self {
AssocItem::Function(it) => it.try_to_nav(db),
AssocItem::Const(it) => it.try_to_nav(db),
@@ -465,7 +511,7 @@ impl TryToNav for hir::AssocItem {
}
impl TryToNav for hir::GenericParam {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
match self {
hir::GenericParam::TypeParam(it) => it.try_to_nav(db),
hir::GenericParam::ConstParam(it) => it.try_to_nav(db),
@@ -475,7 +521,7 @@ impl TryToNav for hir::GenericParam {
}
impl ToNav for LocalSource {
- fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
+ fn to_nav(&self, db: &RootDatabase) -> UpmappingResult<NavigationTarget> {
let InFile { file_id, value } = &self.source;
let file_id = *file_id;
let local = self.local;
@@ -484,60 +530,61 @@ impl ToNav for LocalSource {
Either::Right(it) => (it.syntax(), it.name()),
};
- let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, node, name);
-
- let name = local.name(db).to_smol_str();
- let kind = if local.is_self(db) {
- SymbolKind::SelfParam
- } else if local.is_param(db) {
- SymbolKind::ValueParam
- } else {
- SymbolKind::Local
- };
- NavigationTarget {
- file_id,
- name,
- alias: None,
- kind: Some(kind),
- full_range,
- focus_range,
- container_name: None,
- description: None,
- docs: None,
- }
+ orig_range_with_focus(db, file_id, node, name).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| {
+ let name = local.name(db).to_smol_str();
+ let kind = if local.is_self(db) {
+ SymbolKind::SelfParam
+ } else if local.is_param(db) {
+ SymbolKind::ValueParam
+ } else {
+ SymbolKind::Local
+ };
+ NavigationTarget {
+ file_id,
+ name,
+ alias: None,
+ kind: Some(kind),
+ full_range,
+ focus_range,
+ container_name: None,
+ description: None,
+ docs: None,
+ }
+ },
+ )
}
}
impl ToNav for hir::Local {
- fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
+ fn to_nav(&self, db: &RootDatabase) -> UpmappingResult<NavigationTarget> {
self.primary_source(db).to_nav(db)
}
}
impl ToNav for hir::Label {
- fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
+ fn to_nav(&self, db: &RootDatabase) -> UpmappingResult<NavigationTarget> {
let InFile { file_id, value } = self.source(db);
let name = self.name(db).to_smol_str();
- let (file_id, full_range, focus_range) =
- orig_range_with_focus(db, file_id, value.syntax(), value.lifetime());
-
- NavigationTarget {
- file_id,
- name,
- alias: None,
- kind: Some(SymbolKind::Label),
- full_range,
- focus_range,
- container_name: None,
- description: None,
- docs: None,
- }
+ orig_range_with_focus(db, file_id, value.syntax(), value.lifetime()).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget {
+ file_id,
+ name: name.clone(),
+ alias: None,
+ kind: Some(SymbolKind::Label),
+ full_range,
+ focus_range,
+ container_name: None,
+ description: None,
+ docs: None,
+ },
+ )
}
}
impl TryToNav for hir::TypeParam {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
let InFile { file_id, value } = self.merge().source(db)?;
let name = self.name(db).to_smol_str();
@@ -556,51 +603,51 @@ impl TryToNav for hir::TypeParam {
};
let focus = value.as_ref().either(|it| it.name(), |it| it.name());
- let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, syntax, focus);
-
- Some(NavigationTarget {
- file_id,
- name,
- alias: None,
- kind: Some(SymbolKind::TypeParam),
- full_range,
- focus_range,
- container_name: None,
- description: None,
- docs: None,
- })
+ Some(orig_range_with_focus(db, file_id, syntax, focus).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget {
+ file_id,
+ name: name.clone(),
+ alias: None,
+ kind: Some(SymbolKind::TypeParam),
+ full_range,
+ focus_range,
+ container_name: None,
+ description: None,
+ docs: None,
+ },
+ ))
}
}
impl TryToNav for hir::TypeOrConstParam {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
self.split(db).try_to_nav(db)
}
}
impl TryToNav for hir::LifetimeParam {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
let InFile { file_id, value } = self.source(db)?;
let name = self.name(db).to_smol_str();
- let FileRange { file_id, range } =
- InFile::new(file_id, value.syntax()).original_file_range(db);
- Some(NavigationTarget {
- file_id,
- name,
- alias: None,
- kind: Some(SymbolKind::LifetimeParam),
- full_range: range,
- focus_range: Some(range),
- container_name: None,
- description: None,
- docs: None,
- })
+ Some(orig_range(db, file_id, value.syntax()).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget {
+ file_id,
+ name: name.clone(),
+ alias: None,
+ kind: Some(SymbolKind::LifetimeParam),
+ full_range,
+ focus_range,
+ container_name: None,
+ description: None,
+ docs: None,
+ },
+ ))
}
}
impl TryToNav for hir::ConstParam {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
let InFile { file_id, value } = self.merge().source(db)?;
let name = self.name(db).to_smol_str();
@@ -612,35 +659,178 @@ impl TryToNav for hir::ConstParam {
}
};
- let (file_id, full_range, focus_range) =
- orig_range_with_focus(db, file_id, value.syntax(), value.name());
- Some(NavigationTarget {
- file_id,
- name,
- alias: None,
- kind: Some(SymbolKind::ConstParam),
- full_range,
- focus_range,
- container_name: None,
- description: None,
- docs: None,
- })
+ Some(orig_range_with_focus(db, file_id, value.syntax(), value.name()).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget {
+ file_id,
+ name: name.clone(),
+ alias: None,
+ kind: Some(SymbolKind::ConstParam),
+ full_range,
+ focus_range,
+ container_name: None,
+ description: None,
+ docs: None,
+ },
+ ))
+ }
+}
+
+#[derive(Debug)]
+pub struct UpmappingResult<T> {
+ /// The macro call site.
+ pub call_site: T,
+ /// The macro definition site, if relevant.
+ pub def_site: Option<T>,
+}
+
+impl<T> UpmappingResult<T> {
+ pub fn call_site(self) -> T {
+ self.call_site
+ }
+
+ pub fn collect<FI: FromIterator<T>>(self) -> FI {
+ FI::from_iter(self.into_iter())
+ }
+}
+
+impl<T> IntoIterator for UpmappingResult<T> {
+ type Item = T;
+
+ type IntoIter = <ArrayVec<T, 2> as IntoIterator>::IntoIter;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.def_site
+ .into_iter()
+ .chain(Some(self.call_site))
+ .collect::<ArrayVec<_, 2>>()
+ .into_iter()
}
}
+impl<T> UpmappingResult<T> {
+ fn map<U>(self, f: impl Fn(T) -> U) -> UpmappingResult<U> {
+ UpmappingResult { call_site: f(self.call_site), def_site: self.def_site.map(f) }
+ }
+}
+
+/// Returns the original range of the syntax node, and the range of the name mapped out of macro expansions
+/// May return two results if the mapped node originates from a macro definition in which case the
+/// second result is the creating macro call.
fn orig_range_with_focus(
db: &RootDatabase,
hir_file: HirFileId,
value: &SyntaxNode,
name: Option<impl AstNode>,
-) -> (FileId, TextRange, Option<TextRange>) {
- let FileRange { file_id, range: full_range } =
- InFile::new(hir_file, value).original_file_range(db);
- let focus_range = name
- .and_then(|it| InFile::new(hir_file, it.syntax()).original_file_range_opt(db))
- .and_then(|range| if range.file_id == file_id { Some(range.range) } else { None });
-
- (file_id, full_range, focus_range)
+) -> UpmappingResult<(FileRange, Option<TextRange>)> {
+ let Some(name) = name else { return orig_range(db, hir_file, value) };
+
+ let call_range = || {
+ db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id)
+ .kind
+ .original_call_range(db)
+ };
+
+ let def_range = || {
+ db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id)
+ .def
+ .definition_range(db)
+ };
+
+ let value_range = InFile::new(hir_file, value).original_file_range_opt(db);
+ let ((call_site_range, call_site_focus), def_site) =
+ match InFile::new(hir_file, name.syntax()).original_file_range_opt(db) {
+ // call site name
+ Some((focus_range, ctxt)) if ctxt.is_root() => {
+ // Try to upmap the node as well, if it ends up in the def site, go back to the call site
+ (
+ (
+ match value_range {
+ // name is in the node in the macro input so we can return it
+ Some((range, ctxt))
+ if ctxt.is_root()
+ && range.file_id == focus_range.file_id
+ && range.range.contains_range(focus_range.range) =>
+ {
+ range
+ }
+ // name lies outside the node, so instead point to the macro call which
+ // *should* contain the name
+ _ => call_range(),
+ },
+ Some(focus_range),
+ ),
+ // no def site relevant
+ None,
+ )
+ }
+
+ // def site name
+ // FIXME: This can be de improved
+ Some((focus_range, _ctxt)) => {
+ match value_range {
+ // but overall node is in macro input
+ Some((range, ctxt)) if ctxt.is_root() => (
+ // node mapped up in call site, show the node
+ (range, None),
+ // def site, if the name is in the (possibly) upmapped def site range, show the
+ // def site
+ {
+ let (def_site, _) = def_range().original_node_file_range(db);
+ (def_site.file_id == focus_range.file_id
+ && def_site.range.contains_range(focus_range.range))
+ .then_some((def_site, Some(focus_range)))
+ },
+ ),
+ // node is in macro def, just show the focus
+ _ => (
+ // show the macro call
+ (call_range(), None),
+ Some((focus_range, Some(focus_range))),
+ ),
+ }
+ }
+ // lost name? can't happen for single tokens
+ None => return orig_range(db, hir_file, value),
+ };
+
+ UpmappingResult {
+ call_site: (
+ call_site_range,
+ call_site_focus.and_then(|FileRange { file_id, range }| {
+ if call_site_range.file_id == file_id && call_site_range.range.contains_range(range)
+ {
+ Some(range)
+ } else {
+ None
+ }
+ }),
+ ),
+ def_site: def_site.map(|(def_site_range, def_site_focus)| {
+ (
+ def_site_range,
+ def_site_focus.and_then(|FileRange { file_id, range }| {
+ if def_site_range.file_id == file_id
+ && def_site_range.range.contains_range(range)
+ {
+ Some(range)
+ } else {
+ None
+ }
+ }),
+ )
+ }),
+ }
+}
+
+fn orig_range(
+ db: &RootDatabase,
+ hir_file: HirFileId,
+ value: &SyntaxNode,
+) -> UpmappingResult<(FileRange, Option<TextRange>)> {
+ UpmappingResult {
+ call_site: (InFile::new(hir_file, value).original_file_range(db), None),
+ def_site: None,
+ }
}
#[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs
index 506f9452c..413dbf9c5 100644
--- a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs
@@ -45,11 +45,11 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
Some(module) => sema
.to_def(&module)
.into_iter()
- .map(|module| NavigationTarget::from_module_to_decl(db, module))
+ .flat_map(|module| NavigationTarget::from_module_to_decl(db, module))
.collect(),
None => sema
.to_module_defs(position.file_id)
- .map(|module| NavigationTarget::from_module_to_decl(db, module))
+ .flat_map(|module| NavigationTarget::from_module_to_decl(db, module))
.collect(),
}
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs
index 2d0295692..6c0fb0baf 100644
--- a/src/tools/rust-analyzer/crates/ide/src/references.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/references.rs
@@ -9,7 +9,9 @@
//! at the index that the match starts at and its tree parent is
//! resolved to the search element definition, we get a reference.
-use hir::{PathResolution, Semantics};
+use std::collections::HashMap;
+
+use hir::{DescendPreference, PathResolution, Semantics};
use ide_db::{
base_db::FileId,
defs::{Definition, NameClass, NameRefClass},
@@ -60,19 +62,6 @@ pub(crate) fn find_all_refs(
let syntax = sema.parse(position.file_id).syntax().clone();
let make_searcher = |literal_search: bool| {
move |def: Definition| {
- let declaration = match def {
- Definition::Module(module) => {
- Some(NavigationTarget::from_module_to_decl(sema.db, module))
- }
- def => def.try_to_nav(sema.db),
- }
- .map(|nav| {
- let decl_range = nav.focus_or_full_range();
- Declaration {
- is_mut: decl_mutability(&def, sema.parse(nav.file_id).syntax(), decl_range),
- nav,
- }
- });
let mut usages =
def.usages(sema).set_scope(search_scope.as_ref()).include_self_refs().all();
@@ -80,7 +69,7 @@ pub(crate) fn find_all_refs(
retain_adt_literal_usages(&mut usages, def, sema);
}
- let references = usages
+ let mut references = usages
.into_iter()
.map(|(file_id, refs)| {
(
@@ -91,8 +80,30 @@ pub(crate) fn find_all_refs(
.collect(),
)
})
- .collect();
-
+ .collect::<HashMap<_, Vec<_>, _>>();
+ let declaration = match def {
+ Definition::Module(module) => {
+ Some(NavigationTarget::from_module_to_decl(sema.db, module))
+ }
+ def => def.try_to_nav(sema.db),
+ }
+ .map(|nav| {
+ let (nav, extra_ref) = match nav.def_site {
+ Some(call) => (call, Some(nav.call_site)),
+ None => (nav.call_site, None),
+ };
+ if let Some(extra_ref) = extra_ref {
+ references
+ .entry(extra_ref.file_id)
+ .or_default()
+ .push((extra_ref.focus_or_full_range(), None));
+ }
+ let decl_range = nav.focus_or_full_range();
+ Declaration {
+ is_mut: decl_mutability(&def, sema.parse(nav.file_id).syntax(), decl_range),
+ nav,
+ }
+ });
ReferenceSearchResult { declaration, references }
}
};
@@ -109,7 +120,7 @@ pub(crate) fn find_all_refs(
}
None => {
let search = make_searcher(false);
- Some(find_defs(sema, &syntax, position.offset)?.map(search).collect())
+ Some(find_defs(sema, &syntax, position.offset)?.into_iter().map(search).collect())
}
}
}
@@ -118,15 +129,27 @@ pub(crate) fn find_defs<'a>(
sema: &'a Semantics<'_, RootDatabase>,
syntax: &SyntaxNode,
offset: TextSize,
-) -> Option<impl Iterator<Item = Definition> + 'a> {
+) -> Option<impl IntoIterator<Item = Definition> + 'a> {
let token = syntax.token_at_offset(offset).find(|t| {
matches!(
t.kind(),
- IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self]
+ IDENT
+ | INT_NUMBER
+ | LIFETIME_IDENT
+ | STRING
+ | T![self]
+ | T![super]
+ | T![crate]
+ | T![Self]
)
- });
- token.map(|token| {
- sema.descend_into_macros_with_same_text(token, offset)
+ })?;
+
+ if let Some((_, resolution)) = sema.check_for_format_args_template(token.clone(), offset) {
+ return resolution.map(Definition::from).map(|it| vec![it]);
+ }
+
+ Some(
+ sema.descend_into_macros(DescendPreference::SameText, token)
.into_iter()
.filter_map(|it| ast::NameLike::cast(it.parent()?))
.filter_map(move |name_like| {
@@ -162,7 +185,8 @@ pub(crate) fn find_defs<'a>(
};
Some(def)
})
- })
+ .collect(),
+ )
}
pub(crate) fn decl_mutability(def: &Definition, syntax: &SyntaxNode, range: TextRange) -> bool {
@@ -684,6 +708,32 @@ enum Foo {
}
#[test]
+ fn test_self() {
+ check(
+ r#"
+struct S$0<T> {
+ t: PhantomData<T>,
+}
+
+impl<T> S<T> {
+ fn new() -> Self {
+ Self {
+ t: Default::default(),
+ }
+ }
+}
+"#,
+ expect![[r#"
+ S Struct FileId(0) 0..38 7..8
+
+ FileId(0) 48..49
+ FileId(0) 71..75
+ FileId(0) 86..90
+ "#]],
+ )
+ }
+
+ #[test]
fn test_find_all_refs_two_modules() {
check(
r#"
@@ -843,7 +893,7 @@ pub(super) struct Foo$0 {
check_with_scope(
code,
- Some(SearchScope::single_file(FileId(2))),
+ Some(SearchScope::single_file(FileId::from_raw(2))),
expect![[r#"
quux Function FileId(0) 19..35 26..30
@@ -1142,7 +1192,7 @@ fn foo<'a, 'b: 'a>(x: &'a$0 ()) -> &'a () where &'a (): Foo<'a> {
}
"#,
expect![[r#"
- 'a LifetimeParam FileId(0) 55..57 55..57
+ 'a LifetimeParam FileId(0) 55..57
FileId(0) 63..65
FileId(0) 71..73
@@ -1160,7 +1210,7 @@ fn foo<'a, 'b: 'a>(x: &'a$0 ()) -> &'a () where &'a (): Foo<'a> {
type Foo<'a, T> where T: 'a$0 = &'a T;
"#,
expect![[r#"
- 'a LifetimeParam FileId(0) 9..11 9..11
+ 'a LifetimeParam FileId(0) 9..11
FileId(0) 25..27
FileId(0) 31..33
@@ -1182,7 +1232,7 @@ impl<'a> Foo<'a> for &'a () {
}
"#,
expect![[r#"
- 'a LifetimeParam FileId(0) 47..49 47..49
+ 'a LifetimeParam FileId(0) 47..49
FileId(0) 55..57
FileId(0) 64..66
@@ -2066,4 +2116,27 @@ fn main() { r#fn(); }
"#]],
);
}
+
+ #[test]
+ fn implicit_format_args() {
+ check(
+ r#"
+//- minicore: fmt
+fn test() {
+ let a = "foo";
+ format_args!("hello {a} {a$0} {}", a);
+ // ^
+ // ^
+ // ^
+}
+"#,
+ expect![[r#"
+ a Local FileId(0) 20..21 20..21
+
+ FileId(0) 56..57 Read
+ FileId(0) 60..61 Read
+ FileId(0) 68..69 Read
+ "#]],
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs
index ac9df5ed6..1febfabfc 100644
--- a/src/tools/rust-analyzer/crates/ide/src/rename.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs
@@ -4,16 +4,18 @@
//! tests. This module also implements a couple of magic tricks, like renaming
//! `self` and to `self` (to switch between associated function and method).
-use hir::{AsAssocItem, InFile, Semantics};
+use hir::{AsAssocItem, HirFileIdExt, InFile, Semantics};
use ide_db::{
- base_db::FileId,
+ base_db::{FileId, FileRange},
defs::{Definition, NameClass, NameRefClass},
rename::{bail, format_err, source_edit_from_references, IdentifierKind},
RootDatabase,
};
use itertools::Itertools;
use stdx::{always, never};
-use syntax::{ast, utils::is_raw_identifier, AstNode, SmolStr, SyntaxNode, TextRange, TextSize};
+use syntax::{
+ ast, utils::is_raw_identifier, AstNode, SmolStr, SyntaxKind, SyntaxNode, TextRange, TextSize,
+};
use text_edit::TextEdit;
@@ -34,23 +36,20 @@ pub(crate) fn prepare_rename(
let syntax = source_file.syntax();
let res = find_definitions(&sema, syntax, position)?
- .map(|(name_like, def)| {
+ .map(|(frange, kind, def)| {
// ensure all ranges are valid
if def.range_for_rename(&sema).is_none() {
bail!("No references found at position")
}
- let Some(frange) = sema.original_range_opt(name_like.syntax()) else {
- bail!("No references found at position");
- };
always!(
frange.range.contains_inclusive(position.offset)
&& frange.file_id == position.file_id
);
- Ok(match name_like {
- ast::NameLike::Lifetime(_) => {
+ Ok(match kind {
+ SyntaxKind::LIFETIME => {
TextRange::new(frange.range.start() + TextSize::from(1), frange.range.end())
}
_ => frange.range,
@@ -93,7 +92,7 @@ pub(crate) fn rename(
let defs = find_definitions(&sema, syntax, position)?;
let ops: RenameResult<Vec<SourceChange>> = defs
- .map(|(_namelike, def)| {
+ .map(|(.., def)| {
if let Definition::Local(local) = def {
if let Some(self_param) = local.as_self_param(sema.db) {
cov_mark::hit!(rename_self_to_param);
@@ -134,11 +133,27 @@ pub(crate) fn will_rename_file(
fn find_definitions(
sema: &Semantics<'_, RootDatabase>,
syntax: &SyntaxNode,
- position: FilePosition,
-) -> RenameResult<impl Iterator<Item = (ast::NameLike, Definition)>> {
- let symbols = sema
- .find_nodes_at_offset_with_descend::<ast::NameLike>(syntax, position.offset)
- .map(|name_like| {
+ FilePosition { file_id, offset }: FilePosition,
+) -> RenameResult<impl Iterator<Item = (FileRange, SyntaxKind, Definition)>> {
+ let token = syntax.token_at_offset(offset).find(|t| matches!(t.kind(), SyntaxKind::STRING));
+
+ if let Some((range, Some(resolution))) =
+ token.and_then(|token| sema.check_for_format_args_template(token, offset))
+ {
+ return Ok(vec![(
+ FileRange { file_id, range },
+ SyntaxKind::STRING,
+ Definition::from(resolution),
+ )]
+ .into_iter());
+ }
+
+ let symbols =
+ sema.find_nodes_at_offset_with_descend::<ast::NameLike>(syntax, offset).map(|name_like| {
+ let kind = name_like.syntax().kind();
+ let range = sema
+ .original_range_opt(name_like.syntax())
+ .ok_or_else(|| format_err!("No references found at position"))?;
let res = match &name_like {
// renaming aliases would rename the item being aliased as the HIR doesn't track aliases yet
ast::NameLike::Name(name)
@@ -163,7 +178,6 @@ fn find_definitions(
Definition::Local(local_def)
}
})
- .map(|def| (name_like.clone(), def))
.ok_or_else(|| format_err!("No references found at position")),
ast::NameLike::NameRef(name_ref) => {
NameRefClass::classify(sema, name_ref)
@@ -187,7 +201,7 @@ fn find_definitions(
{
Err(format_err!("Renaming aliases is currently unsupported"))
} else {
- Ok((name_like.clone(), def))
+ Ok(def)
}
})
}
@@ -203,11 +217,10 @@ fn find_definitions(
_ => None,
})
})
- .map(|def| (name_like, def))
.ok_or_else(|| format_err!("No references found at position"))
}
};
- res
+ res.map(|def| (range, kind, def))
});
let res: RenameResult<Vec<_>> = symbols.collect();
@@ -218,7 +231,7 @@ fn find_definitions(
Err(format_err!("No references found at position"))
} else {
// remove duplicates, comparing `Definition`s
- Ok(v.into_iter().unique_by(|t| t.1))
+ Ok(v.into_iter().unique_by(|&(.., def)| def).collect::<Vec<_>>().into_iter())
}
}
Err(e) => Err(e),
@@ -2663,4 +2676,44 @@ struct A;
"error: Cannot rename a non-local definition.",
)
}
+
+ #[test]
+ fn implicit_format_args() {
+ check(
+ "fbar",
+ r#"
+//- minicore: fmt
+fn test() {
+ let foo = "foo";
+ format_args!("hello {foo} {foo$0} {}", foo);
+}
+"#,
+ r#"
+fn test() {
+ let fbar = "foo";
+ format_args!("hello {fbar} {fbar} {}", fbar);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn implicit_format_args2() {
+ check(
+ "fo",
+ r#"
+//- minicore: fmt
+fn test() {
+ let foo = "foo";
+ format_args!("hello {foo} {foo$0} {}", foo);
+}
+"#,
+ r#"
+fn test() {
+ let fo = "foo";
+ format_args!("hello {fo} {fo} {}", fo);
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
index 2d528c642..d334e66d3 100644
--- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
@@ -2,14 +2,14 @@ use std::fmt;
use ast::HasName;
use cfg::CfgExpr;
-use hir::{db::HirDatabase, AsAssocItem, HasAttrs, HasSource, Semantics};
+use hir::{db::HirDatabase, AsAssocItem, HasAttrs, HasSource, HirFileIdExt, Semantics};
use ide_assists::utils::test_related_attribute;
use ide_db::{
base_db::{FilePosition, FileRange},
defs::Definition,
documentation::docs_from_attrs,
helpers::visit_file_defs,
- search::SearchScope,
+ search::{FileReferenceNode, SearchScope},
FxHashMap, FxHashSet, RootDatabase, SymbolKind,
};
use itertools::Itertools;
@@ -142,7 +142,7 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
Definition::Function(it) => it.source(db).map(|src| src.file_id),
_ => None,
};
- if let Some(file_id) = file_id.filter(|file| file.call_node(db).is_some()) {
+ if let Some(file_id) = file_id.filter(|file| file.macro_file().is_some()) {
in_macro_expansion.entry(file_id).or_default().push(runnable);
return;
}
@@ -240,7 +240,7 @@ fn find_related_tests(
.flatten();
for ref_ in defs {
let name_ref = match ref_.name {
- ast::NameLike::NameRef(name_ref) => name_ref,
+ FileReferenceNode::NameRef(name_ref) => name_ref,
_ => continue,
};
if let Some(fn_def) =
@@ -308,11 +308,7 @@ pub(crate) fn runnable_fn(
sema: &Semantics<'_, RootDatabase>,
def: hir::Function,
) -> Option<Runnable> {
- let name = def.name(sema.db).to_smol_str();
-
- let root = def.module(sema.db).krate().root_module();
-
- let kind = if name == "main" && def.module(sema.db) == root {
+ let kind = if def.is_main(sema.db) {
RunnableKind::Bin
} else {
let test_id = || {
@@ -320,7 +316,9 @@ pub(crate) fn runnable_fn(
let def: hir::ModuleDef = def.into();
def.canonical_path(sema.db)
};
- canonical_path.map(TestId::Path).unwrap_or(TestId::Name(name))
+ canonical_path
+ .map(TestId::Path)
+ .unwrap_or(TestId::Name(def.name(sema.db).to_smol_str()))
};
if def.is_test(sema.db) {
@@ -337,7 +335,8 @@ pub(crate) fn runnable_fn(
sema.db,
def.source(sema.db)?.as_ref().map(|it| it as &dyn ast::HasName),
SymbolKind::Function,
- );
+ )
+ .call_site();
let cfg = def.attrs(sema.db).cfg();
Some(Runnable { use_name_in_title: false, nav, kind, cfg })
}
@@ -359,7 +358,7 @@ pub(crate) fn runnable_mod(
let attrs = def.attrs(sema.db);
let cfg = attrs.cfg();
- let nav = NavigationTarget::from_module_to_decl(sema.db, def);
+ let nav = NavigationTarget::from_module_to_decl(sema.db, def).call_site();
Some(Runnable { use_name_in_title: false, nav, kind: RunnableKind::TestMod { path }, cfg })
}
@@ -372,7 +371,7 @@ pub(crate) fn runnable_impl(
return None;
}
let cfg = attrs.cfg();
- let nav = def.try_to_nav(sema.db)?;
+ let nav = def.try_to_nav(sema.db)?.call_site();
let ty = def.self_ty(sema.db);
let adt_name = ty.as_adt()?.name(sema.db);
let mut ty_args = ty.generic_parameters(sema.db).peekable();
@@ -409,7 +408,7 @@ fn runnable_mod_outline_definition(
match def.definition_source(sema.db).value {
hir::ModuleSource::SourceFile(_) => Some(Runnable {
use_name_in_title: false,
- nav: def.to_nav(sema.db),
+ nav: def.to_nav(sema.db).call_site(),
kind: RunnableKind::TestMod { path },
cfg,
}),
@@ -467,7 +466,8 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option<Runnable> {
let mut nav = match def {
Definition::Module(def) => NavigationTarget::from_module_to_decl(db, def),
def => def.try_to_nav(db)?,
- };
+ }
+ .call_site();
nav.focus_range = None;
nav.description = None;
nav.docs = None;
@@ -587,6 +587,9 @@ mod tests {
$0
fn main() {}
+#[export_name = "main"]
+fn __cortex_m_rt_main_trampoline() {}
+
#[test]
fn test_foo() {}
@@ -604,7 +607,7 @@ mod not_a_root {
fn main() {}
}
"#,
- &[TestMod, Bin, Test, Test, Test, Bench],
+ &[TestMod, Bin, Bin, Test, Test, Test, Bench],
expect![[r#"
[
Runnable {
@@ -613,7 +616,7 @@ mod not_a_root {
file_id: FileId(
0,
),
- full_range: 0..190,
+ full_range: 0..253,
name: "",
kind: Module,
},
@@ -642,8 +645,22 @@ mod not_a_root {
file_id: FileId(
0,
),
- full_range: 15..39,
- focus_range: 26..34,
+ full_range: 15..76,
+ focus_range: 42..71,
+ name: "__cortex_m_rt_main_trampoline",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 78..102,
+ focus_range: 89..97,
name: "test_foo",
kind: Function,
},
@@ -663,8 +680,8 @@ mod not_a_root {
file_id: FileId(
0,
),
- full_range: 41..92,
- focus_range: 73..87,
+ full_range: 104..155,
+ focus_range: 136..150,
name: "test_full_path",
kind: Function,
},
@@ -684,8 +701,8 @@ mod not_a_root {
file_id: FileId(
0,
),
- full_range: 94..128,
- focus_range: 115..123,
+ full_range: 157..191,
+ focus_range: 178..186,
name: "test_foo",
kind: Function,
},
@@ -705,8 +722,8 @@ mod not_a_root {
file_id: FileId(
0,
),
- full_range: 130..152,
- focus_range: 142..147,
+ full_range: 193..215,
+ focus_range: 205..210,
name: "bench",
kind: Function,
},
@@ -1655,12 +1672,18 @@ macro_rules! gen2 {
}
}
}
+macro_rules! gen_main {
+ () => {
+ fn main() {}
+ }
+}
mod tests {
gen!();
}
gen2!();
+gen_main!();
"#,
- &[TestMod, TestMod, Test, Test, TestMod],
+ &[TestMod, TestMod, Test, Test, TestMod, Bin],
expect![[r#"
[
Runnable {
@@ -1669,7 +1692,7 @@ gen2!();
file_id: FileId(
0,
),
- full_range: 0..237,
+ full_range: 0..315,
name: "",
kind: Module,
},
@@ -1684,8 +1707,8 @@ gen2!();
file_id: FileId(
0,
),
- full_range: 202..227,
- focus_range: 206..211,
+ full_range: 267..292,
+ focus_range: 271..276,
name: "tests",
kind: Module,
description: "mod tests",
@@ -1701,7 +1724,7 @@ gen2!();
file_id: FileId(
0,
),
- full_range: 218..225,
+ full_range: 283..290,
name: "foo_test",
kind: Function,
},
@@ -1721,7 +1744,7 @@ gen2!();
file_id: FileId(
0,
),
- full_range: 228..236,
+ full_range: 293..301,
name: "foo_test2",
kind: Function,
},
@@ -1741,7 +1764,7 @@ gen2!();
file_id: FileId(
0,
),
- full_range: 228..236,
+ full_range: 293..301,
name: "tests2",
kind: Module,
description: "mod tests2",
@@ -1751,6 +1774,19 @@ gen2!();
},
cfg: None,
},
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 302..314,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
]
"#]],
);
diff --git a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
index e020b52e1..990376a49 100644
--- a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
@@ -4,7 +4,10 @@
use std::collections::BTreeSet;
use either::Either;
-use hir::{AssocItem, GenericParam, HirDisplay, ModuleDef, PathResolution, Semantics, Trait};
+use hir::{
+ AssocItem, DescendPreference, GenericParam, HirDisplay, ModuleDef, PathResolution, Semantics,
+ Trait,
+};
use ide_db::{
active_parameter::{callable_for_node, generic_def_for_node},
base_db::FilePosition,
@@ -79,7 +82,7 @@ pub(crate) fn signature_help(
// if the cursor is sandwiched between two space tokens and the call is unclosed
// this prevents us from leaving the CallExpression
.and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?;
- let token = sema.descend_into_macros_single(token, offset);
+ let token = sema.descend_into_macros_single(DescendPreference::None, token);
for node in token.parent_ancestors() {
match_ast! {
diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
index aabd26da2..3724dc282 100644
--- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
@@ -3,7 +3,7 @@
use std::collections::HashMap;
-use hir::{db::HirDatabase, Crate, Module};
+use hir::{db::HirDatabase, Crate, HirFileIdExt, Module};
use ide_db::helpers::get_definition;
use ide_db::{
base_db::{FileId, FileRange, SourceDatabaseExt},
@@ -13,6 +13,7 @@ use ide_db::{
use syntax::{AstNode, SyntaxKind::*, TextRange, T};
use crate::inlay_hints::InlayFieldsToResolve;
+use crate::navigation_target::UpmappingResult;
use crate::{
hover::hover_for_definition,
inlay_hints::AdjustmentHintsMode,
@@ -118,6 +119,7 @@ impl StaticIndex<'_> {
adjustment_hints: crate::AdjustmentHints::Never,
adjustment_hints_mode: AdjustmentHintsMode::Prefix,
adjustment_hints_hide_outside_unsafe: false,
+ implicit_drop_hints: false,
hide_named_constructor_hints: false,
hide_closure_initialization_hints: false,
closure_style: hir::ClosureStyle::ImplFn,
@@ -165,9 +167,8 @@ impl StaticIndex<'_> {
} else {
let it = self.tokens.insert(TokenStaticData {
hover: hover_for_definition(&sema, file_id, def, &node, &hover_config),
- definition: def.try_to_nav(self.db).map(|it| FileRange {
- file_id: it.file_id,
- range: it.focus_or_full_range(),
+ definition: def.try_to_nav(self.db).map(UpmappingResult::call_site).map(|it| {
+ FileRange { file_id: it.file_id, range: it.focus_or_full_range() }
}),
references: vec![],
moniker: current_crate.and_then(|cc| def_to_moniker(self.db, def, cc)),
@@ -178,7 +179,7 @@ impl StaticIndex<'_> {
let token = self.tokens.get_mut(id).unwrap();
token.references.push(ReferenceData {
range: FileRange { range, file_id },
- is_definition: match def.try_to_nav(self.db) {
+ is_definition: match def.try_to_nav(self.db).map(UpmappingResult::call_site) {
Some(it) => it.file_id == file_id && it.focus_or_full_range() == range,
None => false,
},
@@ -242,6 +243,7 @@ mod tests {
}
}
+ #[track_caller]
fn check_definitions(ra_fixture: &str) {
let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
let s = StaticIndex::compute(&analysis);
diff --git a/src/tools/rust-analyzer/crates/ide/src/status.rs b/src/tools/rust-analyzer/crates/ide/src/status.rs
index c9ee460a1..e7f97ebe6 100644
--- a/src/tools/rust-analyzer/crates/ide/src/status.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/status.rs
@@ -2,7 +2,7 @@ use std::{fmt, marker::PhantomData};
use hir::{
db::{AstIdMapQuery, AttrsQuery, BlockDefMapQuery, ParseMacroExpansionQuery},
- Attr, Attrs, ExpandResult, MacroFile, Module,
+ Attr, Attrs, ExpandResult, MacroFileId, Module,
};
use ide_db::{
base_db::{
@@ -199,8 +199,12 @@ impl StatCollect<FileId, Parse<ast::SourceFile>> for SyntaxTreeStats<false> {
}
}
-impl<M> StatCollect<MacroFile, ExpandResult<(Parse<SyntaxNode>, M)>> for SyntaxTreeStats<true> {
- fn collect_entry(&mut self, _: MacroFile, value: Option<ExpandResult<(Parse<SyntaxNode>, M)>>) {
+impl<M> StatCollect<MacroFileId, ExpandResult<(Parse<SyntaxNode>, M)>> for SyntaxTreeStats<true> {
+ fn collect_entry(
+ &mut self,
+ _: MacroFileId,
+ value: Option<ExpandResult<(Parse<SyntaxNode>, M)>>,
+ ) {
self.total += 1;
self.retained += value.is_some() as usize;
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
index bb01c81d6..307812156 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
@@ -13,7 +13,7 @@ mod html;
#[cfg(test)]
mod tests;
-use hir::{Name, Semantics};
+use hir::{DescendPreference, Name, Semantics};
use ide_db::{FxHashMap, RootDatabase, SymbolKind};
use syntax::{
ast::{self, IsString},
@@ -245,7 +245,7 @@ fn traverse(
let mut macro_highlighter = MacroHighlighter::default();
// FIXME: these are not perfectly accurate, we determine them by the real file's syntax tree
- // an an attribute nested in a macro call will not emit `inside_attribute`
+ // an attribute nested in a macro call will not emit `inside_attribute`
let mut inside_attribute = false;
let mut inside_macro_call = false;
@@ -393,13 +393,18 @@ fn traverse(
// Attempt to descend tokens into macro-calls.
let res = match element {
NodeOrToken::Token(token) if token.kind() != COMMENT => {
- let token = match attr_or_derive_item {
- Some(AttrOrDerive::Attr(_)) => {
- sema.descend_into_macros_with_kind_preference(token, 0.into())
- }
- Some(AttrOrDerive::Derive(_)) | None => {
- sema.descend_into_macros_single(token, 0.into())
- }
+ let token = if token.kind() == STRING {
+ // for strings, try to prefer a string that has not been lost in a token
+ // tree
+ // FIXME: This should be done for everything, but check perf first
+ sema.descend_into_macros(DescendPreference::SameKind, token)
+ .into_iter()
+ .max_by_key(|it| {
+ it.parent().map_or(false, |it| it.kind() != TOKEN_TREE)
+ })
+ .unwrap()
+ } else {
+ sema.descend_into_macros_single(DescendPreference::SameKind, token)
};
match token.parent().and_then(ast::NameLike::cast) {
// Remap the token into the wrapping single token nodes
@@ -441,7 +446,7 @@ fn traverse(
{
continue;
}
- highlight_format_string(hl, &string, &expanded_string, range);
+ highlight_format_string(hl, sema, krate, &string, &expanded_string, range);
if !string.is_raw() {
highlight_escape_string(hl, &string, range.start());
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs
index 2ef131594..518e71454 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs
@@ -1,14 +1,20 @@
//! Syntax highlighting for format macro strings.
use ide_db::{
+ defs::Definition,
syntax_helpers::format_string::{is_format_string, lex_format_specifiers, FormatSpecifier},
SymbolKind,
};
use syntax::{ast, TextRange};
-use crate::{syntax_highlighting::highlights::Highlights, HlRange, HlTag};
+use crate::{
+ syntax_highlighting::{highlight::highlight_def, highlights::Highlights},
+ HlRange, HlTag,
+};
pub(super) fn highlight_format_string(
stack: &mut Highlights,
+ sema: &hir::Semantics<'_, ide_db::RootDatabase>,
+ krate: hir::Crate,
string: &ast::String,
expanded_string: &ast::String,
range: TextRange,
@@ -27,6 +33,18 @@ pub(super) fn highlight_format_string(
});
}
});
+
+ if let Some(parts) = sema.as_format_args_parts(string) {
+ parts.into_iter().for_each(|(range, res)| {
+ if let Some(res) = res {
+ stack.add(HlRange {
+ range,
+ highlight: highlight_def(sema, krate, Definition::from(res)),
+ binding_hash: None,
+ })
+ }
+ })
+ }
}
fn highlight_format_specifier(kind: FormatSpecifier) -> Option<HlTag> {
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
index 7d00282fc..0558f658f 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
@@ -1,6 +1,6 @@
//! Computes color for a single element.
-use hir::{AsAssocItem, HasVisibility, Semantics};
+use hir::{AsAssocItem, HasVisibility, MacroFileIdExt, Semantics};
use ide_db::{
defs::{Definition, IdentClass, NameClass, NameRefClass},
FxHashMap, RootDatabase, SymbolKind,
@@ -218,7 +218,10 @@ fn highlight_name_ref(
// We can fix this for derive attributes since derive helpers are recorded, but not for
// general attributes.
None if name_ref.syntax().ancestors().any(|it| it.kind() == ATTR)
- && !sema.hir_file_for(name_ref.syntax()).is_derive_attr_pseudo_expansion(sema.db) =>
+ && !sema
+ .hir_file_for(name_ref.syntax())
+ .macro_file()
+ .map_or(false, |it| it.is_derive_attr_pseudo_expansion(sema.db)) =>
{
return HlTag::Symbol(SymbolKind::Attribute).into();
}
@@ -348,7 +351,7 @@ fn calc_binding_hash(name: &hir::Name, shadow_count: u32) -> u64 {
hash((name, shadow_count))
}
-fn highlight_def(
+pub(super) fn highlight_def(
sema: &Semantics<'_, RootDatabase>,
krate: hir::Crate,
def: Definition,
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
index 06b66b302..e8b3a38c9 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
@@ -43,7 +43,9 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
-<pre><code><span class="module crate_root library">proc_macros</span><span class="operator">::</span><span class="macro library">mirror</span><span class="macro_bang">!</span> <span class="brace macro">{</span>
+<pre><code><span class="keyword">use</span> <span class="module crate_root library">proc_macros</span><span class="operator">::</span><span class="brace">{</span><span class="function library">mirror</span><span class="comma">,</span> <span class="function library">identity</span><span class="comma">,</span> <span class="derive library">DeriveIdentity</span><span class="brace">}</span><span class="semicolon">;</span>
+
+<span class="macro library">mirror</span><span class="macro_bang">!</span> <span class="brace macro">{</span>
<span class="brace macro">{</span>
<span class="comma macro">,</span><span class="builtin_type macro">i32</span> <span class="colon macro">:</span><span class="field declaration macro public">x</span> <span class="keyword macro">pub</span>
<span class="comma macro">,</span><span class="builtin_type macro">i32</span> <span class="colon macro">:</span><span class="field declaration macro public">y</span> <span class="keyword macro">pub</span>
@@ -90,17 +92,11 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="brace">}</span>
<span class="brace">}</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
-<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">concat</span> <span class="brace">{</span><span class="brace">}</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
-<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">include</span> <span class="brace">{</span><span class="brace">}</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
-<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">format_args</span> <span class="brace">{</span><span class="brace">}</span>
-<span class="macro">include</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="none macro">concat</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"foo/"</span><span class="comma macro">,</span> <span class="string_literal macro">"foo.rs"</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+<span class="macro default_library library">include</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="none macro">concat</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"foo/"</span><span class="comma macro">,</span> <span class="string_literal macro">"foo.rs"</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
- <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello, </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">92</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="macro default_library library">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello, </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">92</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">dont_color_me_braces</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">noop</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="macro macro">noop</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="brace">}</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
index 64e614cec..84a823363 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
@@ -48,47 +48,38 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>io<span class="colon">:</span><span class="colon">:</span>_print<span class="parenthesis">(</span>format_args_nl<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span><span class="punctuation">*</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="brace">}</span><span class="parenthesis">)</span>
<span class="brace">}</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">macro_export</span><span class="attribute_bracket attribute">]</span>
-<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">format_args_nl</span> <span class="brace">{</span><span class="brace">}</span>
<span class="keyword">mod</span> <span class="module declaration">panic</span> <span class="brace">{</span>
<span class="keyword">pub</span> <span class="keyword">macro</span> <span class="macro declaration">panic_2015</span> <span class="brace">{</span>
<span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
- <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic<span class="parenthesis">(</span><span class="string_literal">"explicit panic"</span><span class="parenthesis">)</span>
+ panic<span class="parenthesis">(</span><span class="string_literal">"explicit panic"</span><span class="parenthesis">)</span>
<span class="parenthesis">)</span><span class="comma">,</span>
<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="colon">:</span>literal <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="punctuation">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
- <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="parenthesis">)</span>
+ panic<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="parenthesis">)</span>
<span class="parenthesis">)</span><span class="comma">,</span>
<span class="comment">// Use `panic_str` instead of `panic_display::&lt;&str&gt;` for non_fmt_panic lint.</span>
<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="colon">:</span>expr <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="punctuation">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
- <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic_str<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="parenthesis">)</span>
+ panic_str<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="parenthesis">)</span>
<span class="parenthesis">)</span><span class="comma">,</span>
<span class="comment">// Special-case the single-argument case for const_panic.</span>
<span class="parenthesis">(</span><span class="string_literal">"{}"</span><span class="comma">,</span> <span class="punctuation">$</span>arg<span class="colon">:</span>expr <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="punctuation">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
- <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic_display<span class="parenthesis">(</span><span class="punctuation">&</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span>
+ panic_display<span class="parenthesis">(</span><span class="punctuation">&</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span>
<span class="parenthesis">)</span><span class="comma">,</span>
<span class="parenthesis">(</span><span class="punctuation">$</span>fmt<span class="colon">:</span>expr<span class="comma">,</span> <span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">+</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
- <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic_fmt<span class="parenthesis">(</span>const_format_args<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span>fmt<span class="comma">,</span> <span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span><span class="punctuation">+</span><span class="parenthesis">)</span><span class="parenthesis">)</span>
+ panic_fmt<span class="parenthesis">(</span>const_format_args<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span>fmt<span class="comma">,</span> <span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span><span class="punctuation">+</span><span class="parenthesis">)</span><span class="parenthesis">)</span>
<span class="parenthesis">)</span><span class="comma">,</span>
<span class="brace">}</span>
<span class="brace">}</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="parenthesis attribute">(</span><span class="none attribute">std_panic</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">macro_export</span><span class="attribute_bracket attribute">]</span>
-<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">panic</span> <span class="brace">{</span><span class="brace">}</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
-<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">assert</span> <span class="brace">{</span><span class="brace">}</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
-<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">asm</span> <span class="brace">{</span><span class="brace">}</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
-<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">concat</span> <span class="brace">{</span><span class="brace">}</span>
-
<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">toho</span> <span class="brace">{</span>
<span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span><span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panic<span class="punctuation">!</span><span class="parenthesis">(</span><span class="string_literal">"not yet implemented"</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">+</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span><span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panic<span class="punctuation">!</span><span class="parenthesis">(</span><span class="string_literal">"not yet implemented: {}"</span><span class="comma">,</span> format_args<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span><span class="punctuation">+</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="brace">}</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">reuse_twice</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span>literal<span class="colon">:</span>literal<span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span><span class="brace">{</span>stringify<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span>literal<span class="parenthesis">)</span><span class="semicolon">;</span> format_args<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span>literal<span class="parenthesis">)</span><span class="brace">}</span><span class="brace">}</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="keyword">let</span> <span class="variable declaration">a</span> <span class="operator">=</span> <span class="char_literal">'</span><span class="escape_sequence">\n</span><span class="char_literal">'</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration">a</span> <span class="operator">=</span> <span class="char_literal">'</span><span class="escape_sequence">\t</span><span class="char_literal">'</span><span class="semicolon">;</span>
@@ -165,20 +156,23 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="variable">ничоси</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="variable declaration macro">ничоси</span> <span class="operator macro">=</span> <span class="numeric_literal macro">92</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="variable">x</span><span class="format_specifier">?</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> "</span><span class="comma macro">,</span> <span class="unresolved_reference macro">thingy</span><span class="comma macro">,</span> <span class="unresolved_reference macro">n2</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
- <span class="macro">panic</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"{}"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
- <span class="macro">panic</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"more {}"</span><span class="comma macro">,</span> <span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
- <span class="macro">assert</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="bool_literal macro">true</span><span class="comma macro">,</span> <span class="string_literal macro">"{}"</span><span class="comma macro">,</span> <span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
- <span class="macro">assert</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="bool_literal macro">true</span><span class="comma macro">,</span> <span class="string_literal macro">"{} asdasd"</span><span class="comma macro">,</span> <span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="macro default_library library">panic</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="macro default_library library">panic</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"more </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="macro default_library library">assert</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="bool_literal macro">true</span><span class="comma macro">,</span> <span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="macro default_library library">assert</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="bool_literal macro">true</span><span class="comma macro">,</span> <span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> asdasd"</span><span class="comma macro">,</span> <span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">toho</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"{}fmt"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration">i</span><span class="colon">:</span> <span class="builtin_type">u64</span> <span class="operator">=</span> <span class="numeric_literal">3</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration">o</span><span class="colon">:</span> <span class="builtin_type">u64</span><span class="semicolon">;</span>
- <span class="macro unsafe">asm</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span>
- <span class="string_literal macro">"mov {0}, {1}"</span><span class="comma macro">,</span>
- <span class="string_literal macro">"add {0}, 5"</span><span class="comma macro">,</span>
+ <span class="macro default_library library unsafe">asm</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span>
+ <span class="string_literal macro">"mov </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal macro">, </span><span class="format_specifier">{</span><span class="numeric_literal">1</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span>
+ <span class="string_literal macro">"add </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal macro">, 5"</span><span class="comma macro">,</span>
<span class="none macro">out</span><span class="parenthesis macro">(</span><span class="none macro">reg</span><span class="parenthesis macro">)</span> <span class="none macro">o</span><span class="comma macro">,</span>
<span class="keyword control macro">in</span><span class="parenthesis macro">(</span><span class="none macro">reg</span><span class="parenthesis macro">)</span> <span class="none macro">i</span><span class="comma macro">,</span>
<span class="parenthesis macro">)</span><span class="semicolon">;</span>
- <span class="macro default_library library">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="macro macro">concat</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"{}"</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="string_literal macro">"{}"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
- <span class="macro default_library library">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="variable macro reference">backslash</span><span class="comma macro">,</span> <span class="macro default_library library macro">format_args</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="unresolved_reference macro">foo</span><span class="comma macro">,</span> <span class="string_literal macro">"bar"</span><span class="comma macro">,</span> <span class="macro macro">toho</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="variable macro reference">backslash</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="keyword">const</span> <span class="constant declaration">CONSTANT</span><span class="colon">:</span> <span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="colon">:</span>
+ <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">m</span> <span class="operator">=</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro default_library library">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="macro default_library library macro">concat</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"{}"</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="string_literal macro">"{}"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="macro default_library library">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="variable reference">backslash</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="constant">CONSTANT</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="variable mutable">m</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="variable macro reference">backslash</span><span class="comma macro">,</span> <span class="macro default_library library macro">format_args</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="unresolved_reference macro">foo</span><span class="comma macro">,</span> <span class="string_literal macro">"bar"</span><span class="comma macro">,</span> <span class="macro macro">toho</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="variable macro reference">backslash</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="macro">reuse_twice</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="variable reference">backslash</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
index 542d89925..afb6c555b 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
@@ -47,9 +47,12 @@ struct Foo;
fn macros() {
check_highlighting(
r#"
-//- proc_macros: mirror
+//- proc_macros: mirror, identity, derive_identity
+//- minicore: fmt, include, concat
//- /lib.rs crate:lib
-proc_macros::mirror! {
+use proc_macros::{mirror, identity, DeriveIdentity};
+
+mirror! {
{
,i32 :x pub
,i32 :y pub
@@ -96,12 +99,6 @@ macro without_args {
}
}
-#[rustc_builtin_macro]
-macro_rules! concat {}
-#[rustc_builtin_macro]
-macro_rules! include {}
-#[rustc_builtin_macro]
-macro_rules! format_args {}
include!(concat!("foo/", "foo.rs"));
@@ -401,53 +398,44 @@ fn test_string_highlighting() {
// thus, we have to copy the macro definition from `std`
check_highlighting(
r#"
-//- minicore: fmt
+//- minicore: fmt, assert, asm, concat, panic
macro_rules! println {
($($arg:tt)*) => ({
$crate::io::_print(format_args_nl!($($arg)*));
})
}
-#[rustc_builtin_macro]
-#[macro_export]
-macro_rules! format_args_nl {}
mod panic {
pub macro panic_2015 {
() => (
- $crate::panicking::panic("explicit panic")
+ panic("explicit panic")
),
($msg:literal $(,)?) => (
- $crate::panicking::panic($msg)
+ panic($msg)
),
// Use `panic_str` instead of `panic_display::<&str>` for non_fmt_panic lint.
($msg:expr $(,)?) => (
- $crate::panicking::panic_str($msg)
+ panic_str($msg)
),
// Special-case the single-argument case for const_panic.
("{}", $arg:expr $(,)?) => (
- $crate::panicking::panic_display(&$arg)
+ panic_display(&$arg)
),
($fmt:expr, $($arg:tt)+) => (
- $crate::panicking::panic_fmt(const_format_args!($fmt, $($arg)+))
+ panic_fmt(const_format_args!($fmt, $($arg)+))
),
}
}
-#[rustc_builtin_macro(std_panic)]
-#[macro_export]
-macro_rules! panic {}
-#[rustc_builtin_macro]
-macro_rules! assert {}
-#[rustc_builtin_macro]
-macro_rules! asm {}
-#[rustc_builtin_macro]
-macro_rules! concat {}
-
macro_rules! toho {
() => ($crate::panic!("not yet implemented"));
($($arg:tt)+) => ($crate::panic!("not yet implemented: {}", format_args!($($arg)+)));
}
+macro_rules! reuse_twice {
+ ($literal:literal) => {{stringify!($literal); format_args!($literal)}};
+}
+
fn main() {
let a = '\n';
let a = '\t';
@@ -538,8 +526,11 @@ fn main() {
in(reg) i,
);
+ const CONSTANT: () = ():
+ let mut m = ();
format_args!(concat!("{}"), "{}");
- format_args!("{} {} {} {} {} {}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash);
+ format_args!("{} {} {} {} {} {} {backslash} {CONSTANT} {m}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash);
+ reuse_twice!("{backslash}");
}"#,
expect_file!["./test_data/highlight_strings.html"],
false,
diff --git a/src/tools/rust-analyzer/crates/ide/src/typing.rs b/src/tools/rust-analyzer/crates/ide/src/typing.rs
index b40509715..d21850bcf 100644
--- a/src/tools/rust-analyzer/crates/ide/src/typing.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/typing.rs
@@ -47,7 +47,7 @@ struct ExtendedTextEdit {
// - typing `=` between two expressions adds `;` when in statement position
// - typing `=` to turn an assignment into an equality comparison removes `;` when in expression position
// - typing `.` in a chain method call auto-indents
-// - typing `{` in front of an expression inserts a closing `}` after the expression
+// - typing `{` or `(` in front of an expression inserts a closing `}` or `)` after the expression
// - typing `{` in a use item adds a closing `}` in the right place
//
// VS Code::
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_hir.rs b/src/tools/rust-analyzer/crates/ide/src/view_hir.rs
index d2bbbf6d2..9abe54cd3 100644
--- a/src/tools/rust-analyzer/crates/ide/src/view_hir.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/view_hir.rs
@@ -1,7 +1,7 @@
use hir::{DefWithBody, Semantics};
use ide_db::base_db::FilePosition;
use ide_db::RootDatabase;
-use syntax::{algo::find_node_at_offset, ast, AstNode};
+use syntax::{algo::ancestors_at_offset, ast, AstNode};
// Feature: View Hir
//
@@ -19,7 +19,9 @@ fn body_hir(db: &RootDatabase, position: FilePosition) -> Option<String> {
let sema = Semantics::new(db);
let source_file = sema.parse(position.file_id);
- let item = find_node_at_offset::<ast::Item>(source_file.syntax(), position.offset)?;
+ let item = ancestors_at_offset(source_file.syntax(), position.offset)
+ .filter(|it| !ast::MacroCall::can_cast(it.kind()))
+ .find_map(ast::Item::cast)?;
let def: DefWithBody = match item {
ast::Item::Fn(it) => sema.to_def(&it)?.into(),
ast::Item::Const(it) => sema.to_def(&it)?.into(),
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs b/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs
index 2f6332abd..3802978f4 100644
--- a/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs
@@ -55,6 +55,7 @@ impl fmt::Display for RecursiveMemoryLayout {
}
}
+#[derive(Copy, Clone)]
enum FieldOrTupleIdx {
Field(Field),
TupleIdx(usize),
@@ -71,13 +72,6 @@ impl FieldOrTupleIdx {
FieldOrTupleIdx::TupleIdx(i) => format!(".{i}").to_owned(),
}
}
-
- fn index(&self) -> usize {
- match *self {
- FieldOrTupleIdx::Field(f) => f.index(),
- FieldOrTupleIdx::TupleIdx(i) => i,
- }
- }
}
// Feature: View Memory Layout
@@ -138,7 +132,10 @@ pub(crate) fn view_memory_layout(
return;
}
- fields.sort_by_key(|(f, _)| layout.field_offset(f.index()).unwrap());
+ fields.sort_by_key(|&(f, _)| match f {
+ FieldOrTupleIdx::Field(f) => layout.field_offset(f).unwrap_or(0),
+ FieldOrTupleIdx::TupleIdx(f) => layout.tuple_field_offset(f).unwrap_or(0),
+ });
let children_start = nodes.len();
nodes[parent_idx].children_start = children_start as i64;
@@ -151,7 +148,10 @@ pub(crate) fn view_memory_layout(
typename: child_ty.display(db).to_string(),
size: child_layout.size(),
alignment: child_layout.align(),
- offset: layout.field_offset(field.index()).unwrap_or(0),
+ offset: match *field {
+ FieldOrTupleIdx::Field(f) => layout.field_offset(f).unwrap_or(0),
+ FieldOrTupleIdx::TupleIdx(f) => layout.tuple_field_offset(f).unwrap_or(0),
+ },
parent_idx: parent_idx as i64,
children_start: -1,
children_len: 0,
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_mir.rs b/src/tools/rust-analyzer/crates/ide/src/view_mir.rs
index a36aba58b..08d810c13 100644
--- a/src/tools/rust-analyzer/crates/ide/src/view_mir.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/view_mir.rs
@@ -1,7 +1,7 @@
use hir::{DefWithBody, Semantics};
use ide_db::base_db::FilePosition;
use ide_db::RootDatabase;
-use syntax::{algo::find_node_at_offset, ast, AstNode};
+use syntax::{algo::ancestors_at_offset, ast, AstNode};
// Feature: View Mir
//
@@ -18,7 +18,9 @@ fn body_mir(db: &RootDatabase, position: FilePosition) -> Option<String> {
let sema = Semantics::new(db);
let source_file = sema.parse(position.file_id);
- let item = find_node_at_offset::<ast::Item>(source_file.syntax(), position.offset)?;
+ let item = ancestors_at_offset(source_file.syntax(), position.offset)
+ .filter(|it| !ast::MacroCall::can_cast(it.kind()))
+ .find_map(ast::Item::cast)?;
let def: DefWithBody = match item {
ast::Item::Fn(it) => sema.to_def(&it)?.into(),
ast::Item::Const(it) => sema.to_def(&it)?.into(),
diff --git a/src/tools/rust-analyzer/crates/intern/Cargo.toml b/src/tools/rust-analyzer/crates/intern/Cargo.toml
index 89b302c79..d9184b0fb 100644
--- a/src/tools/rust-analyzer/crates/intern/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/intern/Cargo.toml
@@ -14,7 +14,7 @@ doctest = false
[dependencies]
# We need to freeze the version of the crate, as the raw-api feature is considered unstable
-dashmap = { version = "=5.4.0", features = ["raw-api"] }
+dashmap.workspace = true
hashbrown.workspace = true
rustc-hash = "1.1.0"
triomphe.workspace = true
diff --git a/src/tools/rust-analyzer/crates/intern/src/lib.rs b/src/tools/rust-analyzer/crates/intern/src/lib.rs
index 2934d2667..d784321c7 100644
--- a/src/tools/rust-analyzer/crates/intern/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/intern/src/lib.rs
@@ -33,13 +33,10 @@ impl<T: Internable> Interned<T> {
// - if not, box it up, insert it, and return a clone
// This needs to be atomic (locking the shard) to avoid races with other thread, which could
// insert the same object between us looking it up and inserting it.
- match shard.raw_entry_mut().from_key_hashed_nocheck(hash as u64, &obj) {
+ match shard.raw_entry_mut().from_key_hashed_nocheck(hash, &obj) {
RawEntryMut::Occupied(occ) => Self { arc: occ.key().clone() },
RawEntryMut::Vacant(vac) => Self {
- arc: vac
- .insert_hashed_nocheck(hash as u64, Arc::new(obj), SharedValue::new(()))
- .0
- .clone(),
+ arc: vac.insert_hashed_nocheck(hash, Arc::new(obj), SharedValue::new(())).0.clone(),
},
}
}
@@ -54,13 +51,10 @@ impl Interned<str> {
// - if not, box it up, insert it, and return a clone
// This needs to be atomic (locking the shard) to avoid races with other thread, which could
// insert the same object between us looking it up and inserting it.
- match shard.raw_entry_mut().from_key_hashed_nocheck(hash as u64, s) {
+ match shard.raw_entry_mut().from_key_hashed_nocheck(hash, s) {
RawEntryMut::Occupied(occ) => Self { arc: occ.key().clone() },
RawEntryMut::Vacant(vac) => Self {
- arc: vac
- .insert_hashed_nocheck(hash as u64, Arc::from(s), SharedValue::new(()))
- .0
- .clone(),
+ arc: vac.insert_hashed_nocheck(hash, Arc::from(s), SharedValue::new(())).0.clone(),
},
}
}
diff --git a/src/tools/rust-analyzer/crates/limit/src/lib.rs b/src/tools/rust-analyzer/crates/limit/src/lib.rs
index 7fb4b513a..7f4b00df0 100644
--- a/src/tools/rust-analyzer/crates/limit/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/limit/src/lib.rs
@@ -1,6 +1,6 @@
//! limit defines a struct to enforce limits.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
#[cfg(feature = "tracking")]
use std::sync::atomic::AtomicUsize;
diff --git a/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml b/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml
index f041ca88a..31b9f6c76 100644
--- a/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml
@@ -11,13 +11,13 @@ authors.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
-anyhow = "1.0.62"
+anyhow.workspace = true
crossbeam-channel = "0.5.5"
-itertools = "0.10.5"
-tracing = "0.1.35"
+itertools.workspace = true
+tracing.workspace = true
ide.workspace = true
-ide-db.workspace =true
+ide-db.workspace = true
proc-macro-api.workspace = true
project-model.workspace = true
tt.workspace = true
diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
index 7a795dd62..db9654220 100644
--- a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
@@ -4,19 +4,19 @@
// to run rust-analyzer as a library.
use std::{collections::hash_map::Entry, mem, path::Path, sync};
-use ::tt::token_id as tt;
use crossbeam_channel::{unbounded, Receiver};
use ide::{AnalysisHost, Change, SourceRoot};
use ide_db::{
base_db::{
- CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind,
- ProcMacroLoadResult, ProcMacros,
+ span::SpanData, CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
+ ProcMacroKind, ProcMacroLoadResult, ProcMacros,
},
FxHashMap,
};
use itertools::Itertools;
use proc_macro_api::{MacroDylib, ProcMacroServer};
use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace};
+use tt::DelimSpan;
use vfs::{file_set::FileSetConfig, loader::Handle, AbsPath, AbsPathBuf, VfsPath};
pub struct LoadCargoConfig {
@@ -208,6 +208,7 @@ impl ProjectFolders {
let entry = {
let mut dirs = vfs::loader::Directories::default();
dirs.extensions.push("rs".into());
+ dirs.extensions.push("toml".into());
dirs.include.extend(root.include);
dirs.exclude.extend(root.exclude);
for excl in global_excludes {
@@ -373,12 +374,15 @@ struct Expander(proc_macro_api::ProcMacro);
impl ProcMacroExpander for Expander {
fn expand(
&self,
- subtree: &tt::Subtree,
- attrs: Option<&tt::Subtree>,
+ subtree: &tt::Subtree<SpanData>,
+ attrs: Option<&tt::Subtree<SpanData>>,
env: &Env,
- ) -> Result<tt::Subtree, ProcMacroExpansionError> {
+ def_site: SpanData,
+ call_site: SpanData,
+ mixed_site: SpanData,
+ ) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
- match self.0.expand(subtree, attrs, env) {
+ match self.0.expand(subtree, attrs, env, def_site, call_site, mixed_site) {
Ok(Ok(subtree)) => Ok(subtree),
Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
@@ -393,10 +397,13 @@ struct IdentityExpander;
impl ProcMacroExpander for IdentityExpander {
fn expand(
&self,
- subtree: &tt::Subtree,
- _: Option<&tt::Subtree>,
+ subtree: &tt::Subtree<SpanData>,
+ _: Option<&tt::Subtree<SpanData>>,
_: &Env,
- ) -> Result<tt::Subtree, ProcMacroExpansionError> {
+ _: SpanData,
+ _: SpanData,
+ _: SpanData,
+ ) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
Ok(subtree.clone())
}
}
@@ -408,11 +415,14 @@ struct EmptyExpander;
impl ProcMacroExpander for EmptyExpander {
fn expand(
&self,
- _: &tt::Subtree,
- _: Option<&tt::Subtree>,
+ _: &tt::Subtree<SpanData>,
+ _: Option<&tt::Subtree<SpanData>>,
_: &Env,
- ) -> Result<tt::Subtree, ProcMacroExpansionError> {
- Ok(tt::Subtree::empty())
+ call_site: SpanData,
+ _: SpanData,
+ _: SpanData,
+ ) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
+ Ok(tt::Subtree::empty(DelimSpan { open: call_site, close: call_site }))
}
}
diff --git a/src/tools/rust-analyzer/crates/mbe/Cargo.toml b/src/tools/rust-analyzer/crates/mbe/Cargo.toml
index 82105522e..adab1003d 100644
--- a/src/tools/rust-analyzer/crates/mbe/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/mbe/Cargo.toml
@@ -15,7 +15,7 @@ doctest = false
cov-mark = "2.0.0-pre.1"
rustc-hash = "1.1.0"
smallvec.workspace = true
-tracing = "0.1.35"
+tracing.workspace = true
# local deps
syntax.workspace = true
diff --git a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
index 9d43e1304..f503aecce 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
@@ -9,7 +9,7 @@ use test_utils::{bench, bench_fixture, skip_slow_tests};
use crate::{
parser::{MetaVarKind, Op, RepeatKind, Separator},
- syntax_node_to_token_tree, tt, DeclarativeMacro,
+ syntax_node_to_token_tree, DeclarativeMacro, DummyTestSpanData, DummyTestSpanMap, DUMMY,
};
#[test]
@@ -38,7 +38,7 @@ fn benchmark_expand_macro_rules() {
invocations
.into_iter()
.map(|(id, tt)| {
- let res = rules[&id].expand(tt);
+ let res = rules[&id].expand(&tt, |_| ());
assert!(res.err.is_none());
res.value.token_trees.len()
})
@@ -47,14 +47,14 @@ fn benchmark_expand_macro_rules() {
assert_eq!(hash, 69413);
}
-fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
+fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro<DummyTestSpanData>> {
macro_rules_fixtures_tt()
.into_iter()
.map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true)))
.collect()
}
-fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree> {
+fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<DummyTestSpanData>> {
let fixture = bench_fixture::numerous_macro_rules();
let source_file = ast::SourceFile::parse(&fixture).ok().unwrap();
@@ -64,14 +64,17 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree> {
.filter_map(ast::MacroRules::cast)
.map(|rule| {
let id = rule.name().unwrap().to_string();
- let (def_tt, _) = syntax_node_to_token_tree(rule.token_tree().unwrap().syntax());
+ let def_tt =
+ syntax_node_to_token_tree(rule.token_tree().unwrap().syntax(), DummyTestSpanMap);
(id, def_tt)
})
.collect()
}
/// Generate random invocation fixtures from rules
-fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(String, tt::Subtree)> {
+fn invocation_fixtures(
+ rules: &FxHashMap<String, DeclarativeMacro<DummyTestSpanData>>,
+) -> Vec<(String, tt::Subtree<DummyTestSpanData>)> {
let mut seed = 123456789;
let mut res = Vec::new();
@@ -93,8 +96,8 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
loop {
let mut subtree = tt::Subtree {
delimiter: tt::Delimiter {
- open: tt::TokenId::UNSPECIFIED,
- close: tt::TokenId::UNSPECIFIED,
+ open: DUMMY,
+ close: DUMMY,
kind: tt::DelimiterKind::Invisible,
},
token_trees: vec![],
@@ -102,7 +105,7 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
for op in rule.lhs.iter() {
collect_from_op(op, &mut subtree, &mut seed);
}
- if it.expand(subtree.clone()).err.is_none() {
+ if it.expand(&subtree, |_| ()).err.is_none() {
res.push((name.clone(), subtree));
break;
}
@@ -116,7 +119,11 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
}
return res;
- fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) {
+ fn collect_from_op(
+ op: &Op<DummyTestSpanData>,
+ parent: &mut tt::Subtree<DummyTestSpanData>,
+ seed: &mut usize,
+ ) {
return match op {
Op::Var { kind, .. } => match kind.as_ref() {
Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")),
@@ -202,38 +209,21 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
*seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
*seed
}
- fn make_ident(ident: &str) -> tt::TokenTree {
- tt::Leaf::Ident(tt::Ident {
- span: tt::TokenId::unspecified(),
- text: SmolStr::new(ident),
- })
- .into()
+ fn make_ident(ident: &str) -> tt::TokenTree<DummyTestSpanData> {
+ tt::Leaf::Ident(tt::Ident { span: DUMMY, text: SmolStr::new(ident) }).into()
}
- fn make_punct(char: char) -> tt::TokenTree {
- tt::Leaf::Punct(tt::Punct {
- span: tt::TokenId::unspecified(),
- char,
- spacing: tt::Spacing::Alone,
- })
- .into()
+ fn make_punct(char: char) -> tt::TokenTree<DummyTestSpanData> {
+ tt::Leaf::Punct(tt::Punct { span: DUMMY, char, spacing: tt::Spacing::Alone }).into()
}
- fn make_literal(lit: &str) -> tt::TokenTree {
- tt::Leaf::Literal(tt::Literal {
- span: tt::TokenId::unspecified(),
- text: SmolStr::new(lit),
- })
- .into()
+ fn make_literal(lit: &str) -> tt::TokenTree<DummyTestSpanData> {
+ tt::Leaf::Literal(tt::Literal { span: DUMMY, text: SmolStr::new(lit) }).into()
}
fn make_subtree(
kind: tt::DelimiterKind,
- token_trees: Option<Vec<tt::TokenTree>>,
- ) -> tt::TokenTree {
+ token_trees: Option<Vec<tt::TokenTree<DummyTestSpanData>>>,
+ ) -> tt::TokenTree<DummyTestSpanData> {
tt::Subtree {
- delimiter: tt::Delimiter {
- open: tt::TokenId::unspecified(),
- close: tt::TokenId::unspecified(),
- kind,
- },
+ delimiter: tt::Delimiter { open: DUMMY, close: DUMMY, kind },
token_trees: token_trees.unwrap_or_default(),
}
.into()
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander.rs b/src/tools/rust-analyzer/crates/mbe/src/expander.rs
index f2d89d3ef..0e755f69b 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander.rs
@@ -7,15 +7,17 @@ mod transcriber;
use rustc_hash::FxHashMap;
use syntax::SmolStr;
+use tt::Span;
-use crate::{parser::MetaVarKind, tt, ExpandError, ExpandResult};
+use crate::{parser::MetaVarKind, ExpandError, ExpandResult};
-pub(crate) fn expand_rules(
- rules: &[crate::Rule],
- input: &tt::Subtree,
+pub(crate) fn expand_rules<S: Span>(
+ rules: &[crate::Rule<S>],
+ input: &tt::Subtree<S>,
+ marker: impl Fn(&mut S) + Copy,
is_2021: bool,
-) -> ExpandResult<tt::Subtree> {
- let mut match_: Option<(matcher::Match, &crate::Rule)> = None;
+) -> ExpandResult<tt::Subtree<S>> {
+ let mut match_: Option<(matcher::Match<S>, &crate::Rule<S>)> = None;
for rule in rules {
let new_match = matcher::match_(&rule.lhs, input, is_2021);
@@ -24,7 +26,7 @@ pub(crate) fn expand_rules(
// Unconditionally returning the transcription here makes the
// `test_repeat_bad_var` test fail.
let ExpandResult { value, err: transcribe_err } =
- transcriber::transcribe(&rule.rhs, &new_match.bindings);
+ transcriber::transcribe(&rule.rhs, &new_match.bindings, marker);
if transcribe_err.is_none() {
return ExpandResult::ok(value);
}
@@ -43,11 +45,11 @@ pub(crate) fn expand_rules(
if let Some((match_, rule)) = match_ {
// if we got here, there was no match without errors
let ExpandResult { value, err: transcribe_err } =
- transcriber::transcribe(&rule.rhs, &match_.bindings);
+ transcriber::transcribe(&rule.rhs, &match_.bindings, marker);
ExpandResult { value, err: match_.err.or(transcribe_err) }
} else {
ExpandResult::new(
- tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] },
+ tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] },
ExpandError::NoMatchingRule,
)
}
@@ -98,31 +100,37 @@ pub(crate) fn expand_rules(
/// In other words, `Bindings` is a *multi* mapping from `SmolStr` to
/// `tt::TokenTree`, where the index to select a particular `TokenTree` among
/// many is not a plain `usize`, but a `&[usize]`.
-#[derive(Debug, Default, Clone, PartialEq, Eq)]
-struct Bindings {
- inner: FxHashMap<SmolStr, Binding>,
+#[derive(Debug, Clone, PartialEq, Eq)]
+struct Bindings<S> {
+ inner: FxHashMap<SmolStr, Binding<S>>,
+}
+
+impl<S> Default for Bindings<S> {
+ fn default() -> Self {
+ Self { inner: Default::default() }
+ }
}
#[derive(Debug, Clone, PartialEq, Eq)]
-enum Binding {
- Fragment(Fragment),
- Nested(Vec<Binding>),
+enum Binding<S> {
+ Fragment(Fragment<S>),
+ Nested(Vec<Binding<S>>),
Empty,
Missing(MetaVarKind),
}
#[derive(Debug, Clone, PartialEq, Eq)]
-enum Fragment {
+enum Fragment<S> {
/// token fragments are just copy-pasted into the output
- Tokens(tt::TokenTree),
+ Tokens(tt::TokenTree<S>),
/// Expr ast fragments are surrounded with `()` on insertion to preserve
/// precedence. Note that this impl is different from the one currently in
/// `rustc` -- `rustc` doesn't translate fragments into token trees at all.
///
- /// At one point in time, we tried to to use "fake" delimiters here a-la
+ /// At one point in time, we tried to use "fake" delimiters here à la
/// proc-macro delimiter=none. As we later discovered, "none" delimiters are
/// tricky to handle in the parser, and rustc doesn't handle those either.
- Expr(tt::TokenTree),
+ Expr(tt::Subtree<S>),
/// There are roughly two types of paths: paths in expression context, where a
/// separator `::` between an identifier and its following generic argument list
/// is mandatory, and paths in type context, where `::` can be omitted.
@@ -132,5 +140,5 @@ enum Fragment {
/// and is trasncribed as an expression-context path, verbatim transcription
/// would cause a syntax error. We need to fix it up just before transcribing;
/// see `transcriber::fix_up_and_push_path_tt()`.
- Path(tt::TokenTree),
+ Path(tt::Subtree<S>),
}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
index 1471af98b..012b02a3f 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
@@ -63,21 +63,21 @@ use std::rc::Rc;
use smallvec::{smallvec, SmallVec};
use syntax::SmolStr;
+use tt::Span;
use crate::{
expander::{Binding, Bindings, ExpandResult, Fragment},
parser::{MetaVarKind, Op, RepeatKind, Separator},
- tt,
tt_iter::TtIter,
ExpandError, MetaTemplate, ValueResult,
};
-impl Bindings {
+impl<S: Span> Bindings<S> {
fn push_optional(&mut self, name: &SmolStr) {
// FIXME: Do we have a better way to represent an empty token ?
// Insert an empty subtree for empty token
let tt =
- tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }.into();
+ tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] }.into();
self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt)));
}
@@ -85,14 +85,14 @@ impl Bindings {
self.inner.insert(name.clone(), Binding::Empty);
}
- fn bindings(&self) -> impl Iterator<Item = &Binding> {
+ fn bindings(&self) -> impl Iterator<Item = &Binding<S>> {
self.inner.values()
}
}
-#[derive(Clone, Debug, Default, PartialEq, Eq)]
-pub(super) struct Match {
- pub(super) bindings: Bindings,
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(super) struct Match<S> {
+ pub(super) bindings: Bindings<S>,
/// We currently just keep the first error and count the rest to compare matches.
pub(super) err: Option<ExpandError>,
pub(super) err_count: usize,
@@ -102,7 +102,19 @@ pub(super) struct Match {
pub(super) bound_count: usize,
}
-impl Match {
+impl<S> Default for Match<S> {
+ fn default() -> Self {
+ Self {
+ bindings: Default::default(),
+ err: Default::default(),
+ err_count: Default::default(),
+ unmatched_tts: Default::default(),
+ bound_count: Default::default(),
+ }
+ }
+}
+
+impl<S> Match<S> {
fn add_err(&mut self, err: ExpandError) {
let prev_err = self.err.take();
self.err = prev_err.or(Some(err));
@@ -111,12 +123,16 @@ impl Match {
}
/// Matching errors are added to the `Match`.
-pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree, is_2021: bool) -> Match {
+pub(super) fn match_<S: Span>(
+ pattern: &MetaTemplate<S>,
+ input: &tt::Subtree<S>,
+ is_2021: bool,
+) -> Match<S> {
let mut res = match_loop(pattern, input, is_2021);
res.bound_count = count(res.bindings.bindings());
return res;
- fn count<'a>(bindings: impl Iterator<Item = &'a Binding>) -> usize {
+ fn count<'a, S: 'a>(bindings: impl Iterator<Item = &'a Binding<S>>) -> usize {
bindings
.map(|it| match it {
Binding::Fragment(_) => 1,
@@ -129,10 +145,10 @@ pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree, is_2021: bool)
}
#[derive(Debug, Clone)]
-enum BindingKind {
+enum BindingKind<S> {
Empty(SmolStr),
Optional(SmolStr),
- Fragment(SmolStr, Fragment),
+ Fragment(SmolStr, Fragment<S>),
Missing(SmolStr, MetaVarKind),
Nested(usize, usize),
}
@@ -146,13 +162,18 @@ enum LinkNode<T> {
Parent { idx: usize, len: usize },
}
-#[derive(Default)]
-struct BindingsBuilder {
- nodes: Vec<Vec<LinkNode<Rc<BindingKind>>>>,
+struct BindingsBuilder<S> {
+ nodes: Vec<Vec<LinkNode<Rc<BindingKind<S>>>>>,
nested: Vec<Vec<LinkNode<usize>>>,
}
-impl BindingsBuilder {
+impl<S> Default for BindingsBuilder<S> {
+ fn default() -> Self {
+ Self { nodes: Default::default(), nested: Default::default() }
+ }
+}
+
+impl<S: Span> BindingsBuilder<S> {
fn alloc(&mut self) -> BindingsIdx {
let idx = self.nodes.len();
self.nodes.push(Vec::new());
@@ -189,7 +210,7 @@ impl BindingsBuilder {
self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Optional(var.clone()))));
}
- fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &SmolStr, fragment: Fragment) {
+ fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &SmolStr, fragment: Fragment<S>) {
self.nodes[idx.0]
.push(LinkNode::Node(Rc::new(BindingKind::Fragment(var.clone(), fragment))));
}
@@ -210,11 +231,11 @@ impl BindingsBuilder {
idx.0 = new_idx;
}
- fn build(self, idx: &BindingsIdx) -> Bindings {
+ fn build(self, idx: &BindingsIdx) -> Bindings<S> {
self.build_inner(&self.nodes[idx.0])
}
- fn build_inner(&self, link_nodes: &[LinkNode<Rc<BindingKind>>]) -> Bindings {
+ fn build_inner(&self, link_nodes: &[LinkNode<Rc<BindingKind<S>>>]) -> Bindings<S> {
let mut bindings = Bindings::default();
let mut nodes = Vec::new();
self.collect_nodes(link_nodes, &mut nodes);
@@ -264,7 +285,7 @@ impl BindingsBuilder {
&'a self,
id: usize,
len: usize,
- nested_refs: &mut Vec<&'a [LinkNode<Rc<BindingKind>>]>,
+ nested_refs: &mut Vec<&'a [LinkNode<Rc<BindingKind<S>>>]>,
) {
self.nested[id].iter().take(len).for_each(|it| match it {
LinkNode::Node(id) => nested_refs.push(&self.nodes[*id]),
@@ -272,7 +293,7 @@ impl BindingsBuilder {
});
}
- fn collect_nested(&self, idx: usize, nested_idx: usize, nested: &mut Vec<Bindings>) {
+ fn collect_nested(&self, idx: usize, nested_idx: usize, nested: &mut Vec<Bindings<S>>) {
let last = &self.nodes[idx];
let mut nested_refs: Vec<&[_]> = Vec::new();
self.nested[nested_idx].iter().for_each(|it| match *it {
@@ -283,7 +304,7 @@ impl BindingsBuilder {
nested.extend(nested_refs.into_iter().map(|iter| self.build_inner(iter)));
}
- fn collect_nodes_ref<'a>(&'a self, id: usize, len: usize, nodes: &mut Vec<&'a BindingKind>) {
+ fn collect_nodes_ref<'a>(&'a self, id: usize, len: usize, nodes: &mut Vec<&'a BindingKind<S>>) {
self.nodes[id].iter().take(len).for_each(|it| match it {
LinkNode::Node(it) => nodes.push(it),
LinkNode::Parent { idx, len } => self.collect_nodes_ref(*idx, *len, nodes),
@@ -292,8 +313,8 @@ impl BindingsBuilder {
fn collect_nodes<'a>(
&'a self,
- link_nodes: &'a [LinkNode<Rc<BindingKind>>],
- nodes: &mut Vec<&'a BindingKind>,
+ link_nodes: &'a [LinkNode<Rc<BindingKind<S>>>],
+ nodes: &mut Vec<&'a BindingKind<S>>,
) {
link_nodes.iter().for_each(|it| match it {
LinkNode::Node(it) => nodes.push(it),
@@ -303,22 +324,22 @@ impl BindingsBuilder {
}
#[derive(Debug, Clone)]
-struct MatchState<'t> {
+struct MatchState<'t, S> {
/// The position of the "dot" in this matcher
- dot: OpDelimitedIter<'t>,
+ dot: OpDelimitedIter<'t, S>,
/// Token subtree stack
/// When matching against matchers with nested delimited submatchers (e.g., `pat ( pat ( .. )
/// pat ) pat`), we need to keep track of the matchers we are descending into. This stack does
/// that where the bottom of the stack is the outermost matcher.
- stack: SmallVec<[OpDelimitedIter<'t>; 4]>,
+ stack: SmallVec<[OpDelimitedIter<'t, S>; 4]>,
/// The "parent" matcher position if we are in a repetition. That is, the matcher position just
/// before we enter the repetition.
- up: Option<Box<MatchState<'t>>>,
+ up: Option<Box<MatchState<'t, S>>>,
/// The separator if we are in a repetition.
- sep: Option<Separator>,
+ sep: Option<Separator<S>>,
/// The KleeneOp of this sequence if we are in a repetition.
sep_kind: Option<RepeatKind>,
@@ -330,7 +351,7 @@ struct MatchState<'t> {
bindings: BindingsIdx,
/// Cached result of meta variable parsing
- meta_result: Option<(TtIter<'t>, ExpandResult<Option<Fragment>>)>,
+ meta_result: Option<(TtIter<'t, S>, ExpandResult<Option<Fragment<S>>>)>,
/// Is error occurred in this state, will `poised` to "parent"
is_error: bool,
@@ -355,16 +376,16 @@ struct MatchState<'t> {
/// - `bb_items`: the set of items that are waiting for the black-box parser.
/// - `error_items`: the set of items in errors, used for error-resilient parsing
#[inline]
-fn match_loop_inner<'t>(
- src: TtIter<'t>,
- stack: &[TtIter<'t>],
- res: &mut Match,
- bindings_builder: &mut BindingsBuilder,
- cur_items: &mut SmallVec<[MatchState<'t>; 1]>,
- bb_items: &mut SmallVec<[MatchState<'t>; 1]>,
- next_items: &mut Vec<MatchState<'t>>,
- eof_items: &mut SmallVec<[MatchState<'t>; 1]>,
- error_items: &mut SmallVec<[MatchState<'t>; 1]>,
+fn match_loop_inner<'t, S: Span>(
+ src: TtIter<'t, S>,
+ stack: &[TtIter<'t, S>],
+ res: &mut Match<S>,
+ bindings_builder: &mut BindingsBuilder<S>,
+ cur_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
+ bb_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
+ next_items: &mut Vec<MatchState<'t, S>>,
+ eof_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
+ error_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
is_2021: bool,
) {
macro_rules! try_push {
@@ -468,7 +489,7 @@ fn match_loop_inner<'t>(
if let Ok(subtree) = src.clone().expect_subtree() {
if subtree.delimiter.kind == delimiter.kind {
item.stack.push(item.dot);
- item.dot = tokens.iter_delimited(Some(delimiter));
+ item.dot = tokens.iter_delimited(Some(*delimiter));
cur_items.push(item);
}
}
@@ -587,9 +608,9 @@ fn match_loop_inner<'t>(
}
}
-fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: bool) -> Match {
+fn match_loop<S: Span>(pattern: &MetaTemplate<S>, src: &tt::Subtree<S>, is_2021: bool) -> Match<S> {
let mut src = TtIter::new(src);
- let mut stack: SmallVec<[TtIter<'_>; 1]> = SmallVec::new();
+ let mut stack: SmallVec<[TtIter<'_, S>; 1]> = SmallVec::new();
let mut res = Match::default();
let mut error_recover_item = None;
@@ -736,16 +757,16 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: bool) -> Match
}
}
-fn match_meta_var(
+fn match_meta_var<S: Span>(
kind: MetaVarKind,
- input: &mut TtIter<'_>,
+ input: &mut TtIter<'_, S>,
is_2021: bool,
-) -> ExpandResult<Option<Fragment>> {
+) -> ExpandResult<Option<Fragment<S>>> {
let fragment = match kind {
MetaVarKind::Path => {
return input
.expect_fragment(parser::PrefixEntryPoint::Path)
- .map(|it| it.map(Fragment::Path));
+ .map(|it| it.map(tt::TokenTree::subtree_or_wrap).map(Fragment::Path));
}
MetaVarKind::Ty => parser::PrefixEntryPoint::Ty,
MetaVarKind::Pat if is_2021 => parser::PrefixEntryPoint::PatTop,
@@ -771,9 +792,21 @@ fn match_meta_var(
}
_ => {}
};
- return input
- .expect_fragment(parser::PrefixEntryPoint::Expr)
- .map(|tt| tt.map(Fragment::Expr));
+ return input.expect_fragment(parser::PrefixEntryPoint::Expr).map(|tt| {
+ tt.map(|tt| match tt {
+ tt::TokenTree::Leaf(leaf) => tt::Subtree {
+ delimiter: tt::Delimiter::dummy_invisible(),
+ token_trees: vec![leaf.into()],
+ },
+ tt::TokenTree::Subtree(mut s) => {
+ if s.delimiter.kind == tt::DelimiterKind::Invisible {
+ s.delimiter.kind = tt::DelimiterKind::Parenthesis;
+ }
+ s
+ }
+ })
+ .map(Fragment::Expr)
+ });
}
MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => {
let tt_result = match kind {
@@ -796,7 +829,7 @@ fn match_meta_var(
match neg {
None => lit.into(),
Some(neg) => tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
+ delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![neg, lit.into()],
}),
}
@@ -811,7 +844,7 @@ fn match_meta_var(
input.expect_fragment(fragment).map(|it| it.map(Fragment::Tokens))
}
-fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) {
+fn collect_vars<S: Span>(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate<S>) {
for op in pattern.iter() {
match op {
Op::Var { name, .. } => collector_fun(name.clone()),
@@ -824,38 +857,38 @@ fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate)
}
}
}
-impl MetaTemplate {
- fn iter_delimited<'a>(&'a self, delimited: Option<&'a tt::Delimiter>) -> OpDelimitedIter<'a> {
+impl<S: Span> MetaTemplate<S> {
+ fn iter_delimited(&self, delimited: Option<tt::Delimiter<S>>) -> OpDelimitedIter<'_, S> {
OpDelimitedIter {
inner: &self.0,
idx: 0,
- delimited: delimited.unwrap_or(&tt::Delimiter::UNSPECIFIED),
+ delimited: delimited.unwrap_or(tt::Delimiter::DUMMY_INVISIBLE),
}
}
}
#[derive(Debug, Clone, Copy)]
-enum OpDelimited<'a> {
- Op(&'a Op),
+enum OpDelimited<'a, S> {
+ Op(&'a Op<S>),
Open,
Close,
}
#[derive(Debug, Clone, Copy)]
-struct OpDelimitedIter<'a> {
- inner: &'a [Op],
- delimited: &'a tt::Delimiter,
+struct OpDelimitedIter<'a, S> {
+ inner: &'a [Op<S>],
+ delimited: tt::Delimiter<S>,
idx: usize,
}
-impl<'a> OpDelimitedIter<'a> {
+impl<'a, S: Span> OpDelimitedIter<'a, S> {
fn is_eof(&self) -> bool {
let len = self.inner.len()
+ if self.delimited.kind != tt::DelimiterKind::Invisible { 2 } else { 0 };
self.idx >= len
}
- fn peek(&self) -> Option<OpDelimited<'a>> {
+ fn peek(&self) -> Option<OpDelimited<'a, S>> {
match self.delimited.kind {
tt::DelimiterKind::Invisible => self.inner.get(self.idx).map(OpDelimited::Op),
_ => match self.idx {
@@ -871,8 +904,8 @@ impl<'a> OpDelimitedIter<'a> {
}
}
-impl<'a> Iterator for OpDelimitedIter<'a> {
- type Item = OpDelimited<'a>;
+impl<'a, S: Span> Iterator for OpDelimitedIter<'a, S> {
+ type Item = OpDelimited<'a, S>;
fn next(&mut self) -> Option<Self::Item> {
let res = self.peek();
@@ -888,8 +921,8 @@ impl<'a> Iterator for OpDelimitedIter<'a> {
}
}
-impl TtIter<'_> {
- fn expect_separator(&mut self, separator: &Separator) -> bool {
+impl<S: Span> TtIter<'_, S> {
+ fn expect_separator(&mut self, separator: &Separator<S>) -> bool {
let mut fork = self.clone();
let ok = match separator {
Separator::Ident(lhs) => match fork.expect_ident_or_underscore() {
@@ -919,7 +952,7 @@ impl TtIter<'_> {
ok
}
- fn expect_tt(&mut self) -> Result<tt::TokenTree, ()> {
+ fn expect_tt(&mut self) -> Result<tt::TokenTree<S>, ()> {
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = self.peek_n(0) {
if punct.char == '\'' {
self.expect_lifetime()
@@ -927,7 +960,7 @@ impl TtIter<'_> {
let puncts = self.expect_glued_punct()?;
let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect();
Ok(tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
+ delimiter: tt::Delimiter::dummy_invisible(),
token_trees,
}))
}
@@ -936,7 +969,7 @@ impl TtIter<'_> {
}
}
- fn expect_lifetime(&mut self) -> Result<tt::TokenTree, ()> {
+ fn expect_lifetime(&mut self) -> Result<tt::TokenTree<S>, ()> {
let punct = self.expect_single_punct()?;
if punct.char != '\'' {
return Err(());
@@ -944,7 +977,7 @@ impl TtIter<'_> {
let ident = self.expect_ident_or_underscore()?;
Ok(tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
+ delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![
tt::Leaf::Punct(*punct).into(),
tt::Leaf::Ident(ident.clone()).into(),
@@ -953,7 +986,7 @@ impl TtIter<'_> {
.into())
}
- fn eat_char(&mut self, c: char) -> Option<tt::TokenTree> {
+ fn eat_char(&mut self, c: char) -> Option<tt::TokenTree<S>> {
let mut fork = self.clone();
match fork.expect_char(c) {
Ok(_) => {
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
index cdac2f1e3..7a3e8653c 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
@@ -2,31 +2,29 @@
//! `$ident => foo`, interpolates variables in the template, to get `fn foo() {}`
use syntax::SmolStr;
+use tt::{Delimiter, Span};
use crate::{
expander::{Binding, Bindings, Fragment},
parser::{MetaVarKind, Op, RepeatKind, Separator},
- tt::{self, Delimiter},
CountError, ExpandError, ExpandResult, MetaTemplate,
};
-impl Bindings {
- fn contains(&self, name: &str) -> bool {
- self.inner.contains_key(name)
- }
-
- fn get(&self, name: &str) -> Result<&Binding, ExpandError> {
+impl<S: Span> Bindings<S> {
+ fn get(&self, name: &str) -> Result<&Binding<S>, ExpandError> {
match self.inner.get(name) {
Some(binding) => Ok(binding),
- None => Err(ExpandError::binding_error(format!("could not find binding `{name}`"))),
+ None => Err(ExpandError::UnresolvedBinding(Box::new(Box::from(name)))),
}
}
fn get_fragment(
&self,
name: &str,
+ mut span: S,
nesting: &mut [NestingState],
- ) -> Result<Fragment, ExpandError> {
+ marker: impl Fn(&mut S),
+ ) -> Result<Fragment<S>, ExpandError> {
macro_rules! binding_err {
($($arg:tt)*) => { ExpandError::binding_error(format!($($arg)*)) };
}
@@ -48,54 +46,75 @@ impl Bindings {
};
}
match b {
- Binding::Fragment(it) => Ok(it.clone()),
- // emit some reasonable default expansion for missing bindings,
- // this gives better recovery than emitting the `$fragment-name` verbatim
- Binding::Missing(it) => Ok(match it {
- MetaVarKind::Stmt => {
- Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
- span: tt::TokenId::unspecified(),
- char: ';',
- spacing: tt::Spacing::Alone,
- })))
- }
- MetaVarKind::Block => Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
+ Binding::Fragment(f @ (Fragment::Path(sub) | Fragment::Expr(sub))) => {
+ let tt::Subtree { delimiter, token_trees } = sub;
+ marker(&mut span);
+ let subtree = tt::Subtree {
delimiter: tt::Delimiter {
- open: tt::TokenId::unspecified(),
- close: tt::TokenId::unspecified(),
- kind: tt::DelimiterKind::Brace,
+ // FIXME split span
+ open: span,
+ close: span,
+ kind: delimiter.kind,
},
- token_trees: vec![],
- })),
- // FIXME: Meta and Item should get proper defaults
- MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => {
- Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter::UNSPECIFIED,
+ token_trees: token_trees.clone(),
+ };
+ Ok(match f {
+ Fragment::Tokens(_) => unreachable!(),
+ Fragment::Expr(_) => Fragment::Expr,
+ Fragment::Path(_) => Fragment::Path,
+ }(subtree))
+ }
+ Binding::Fragment(it @ Fragment::Tokens(_)) => Ok(it.clone()),
+ // emit some reasonable default expansion for missing bindings,
+ // this gives better recovery than emitting the `$fragment-name` verbatim
+ Binding::Missing(it) => Ok({
+ marker(&mut span);
+ match it {
+ MetaVarKind::Stmt => {
+ Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
+ span,
+ char: ';',
+ spacing: tt::Spacing::Alone,
+ })))
+ }
+ MetaVarKind::Block => Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: tt::Delimiter {
+ open: span,
+ close: span,
+ kind: tt::DelimiterKind::Brace,
+ },
token_trees: vec![],
- }))
- }
- MetaVarKind::Path
- | MetaVarKind::Ty
- | MetaVarKind::Pat
- | MetaVarKind::PatParam
- | MetaVarKind::Expr
- | MetaVarKind::Ident => {
- Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- text: SmolStr::new_inline("missing"),
- span: tt::TokenId::unspecified(),
- })))
- }
- MetaVarKind::Lifetime => {
- Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- text: SmolStr::new_inline("'missing"),
- span: tt::TokenId::unspecified(),
- })))
- }
- MetaVarKind::Literal => {
- Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- text: SmolStr::new_inline("\"missing\""),
- span: tt::TokenId::unspecified(),
- })))
+ })),
+ // FIXME: Meta and Item should get proper defaults
+ MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => {
+ Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
+ token_trees: vec![],
+ }))
+ }
+ MetaVarKind::Path
+ | MetaVarKind::Ty
+ | MetaVarKind::Pat
+ | MetaVarKind::PatParam
+ | MetaVarKind::Expr
+ | MetaVarKind::Ident => {
+ Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: SmolStr::new_inline("missing"),
+ span,
+ })))
+ }
+ MetaVarKind::Lifetime => {
+ Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: SmolStr::new_inline("'missing"),
+ span,
+ })))
+ }
+ MetaVarKind::Literal => {
+ Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: SmolStr::new_inline("\"missing\""),
+ span,
+ })))
+ }
}
}),
Binding::Nested(_) => {
@@ -108,13 +127,14 @@ impl Bindings {
}
}
-pub(super) fn transcribe(
- template: &MetaTemplate,
- bindings: &Bindings,
-) -> ExpandResult<tt::Subtree> {
+pub(super) fn transcribe<S: Span>(
+ template: &MetaTemplate<S>,
+ bindings: &Bindings<S>,
+ marker: impl Fn(&mut S) + Copy,
+) -> ExpandResult<tt::Subtree<S>> {
let mut ctx = ExpandCtx { bindings, nesting: Vec::new() };
- let mut arena: Vec<tt::TokenTree> = Vec::new();
- expand_subtree(&mut ctx, template, None, &mut arena)
+ let mut arena: Vec<tt::TokenTree<S>> = Vec::new();
+ expand_subtree(&mut ctx, template, None, &mut arena, marker)
}
#[derive(Debug)]
@@ -129,50 +149,75 @@ struct NestingState {
}
#[derive(Debug)]
-struct ExpandCtx<'a> {
- bindings: &'a Bindings,
+struct ExpandCtx<'a, S> {
+ bindings: &'a Bindings<S>,
nesting: Vec<NestingState>,
}
-fn expand_subtree(
- ctx: &mut ExpandCtx<'_>,
- template: &MetaTemplate,
- delimiter: Option<Delimiter>,
- arena: &mut Vec<tt::TokenTree>,
-) -> ExpandResult<tt::Subtree> {
+fn expand_subtree<S: Span>(
+ ctx: &mut ExpandCtx<'_, S>,
+ template: &MetaTemplate<S>,
+ delimiter: Option<Delimiter<S>>,
+ arena: &mut Vec<tt::TokenTree<S>>,
+ marker: impl Fn(&mut S) + Copy,
+) -> ExpandResult<tt::Subtree<S>> {
// remember how many elements are in the arena now - when returning, we want to drain exactly how many elements we added. This way, the recursive uses of the arena get their own "view" of the arena, but will reuse the allocation
let start_elements = arena.len();
let mut err = None;
'ops: for op in template.iter() {
match op {
- Op::Literal(it) => arena.push(tt::Leaf::from(it.clone()).into()),
- Op::Ident(it) => arena.push(tt::Leaf::from(it.clone()).into()),
+ Op::Literal(it) => arena.push(
+ tt::Leaf::from({
+ let mut it = it.clone();
+ marker(&mut it.span);
+ it
+ })
+ .into(),
+ ),
+ Op::Ident(it) => arena.push(
+ tt::Leaf::from({
+ let mut it = it.clone();
+ marker(&mut it.span);
+ it
+ })
+ .into(),
+ ),
Op::Punct(puncts) => {
for punct in puncts {
- arena.push(tt::Leaf::from(*punct).into());
+ arena.push(
+ tt::Leaf::from({
+ let mut it = punct.clone();
+ marker(&mut it.span);
+ it
+ })
+ .into(),
+ );
}
}
Op::Subtree { tokens, delimiter } => {
+ let mut delimiter = *delimiter;
+ marker(&mut delimiter.open);
+ marker(&mut delimiter.close);
let ExpandResult { value: tt, err: e } =
- expand_subtree(ctx, tokens, Some(*delimiter), arena);
+ expand_subtree(ctx, tokens, Some(delimiter), arena, marker);
err = err.or(e);
arena.push(tt.into());
}
Op::Var { name, id, .. } => {
- let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id);
+ let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id, marker);
err = err.or(e);
push_fragment(arena, fragment);
}
Op::Repeat { tokens: subtree, kind, separator } => {
let ExpandResult { value: fragment, err: e } =
- expand_repeat(ctx, subtree, *kind, separator, arena);
+ expand_repeat(ctx, subtree, *kind, separator, arena, marker);
err = err.or(e);
push_fragment(arena, fragment)
}
Op::Ignore { name, id } => {
// Expand the variable, but ignore the result. This registers the repetition count.
// FIXME: Any emitted errors are dropped.
- expand_var(ctx, name, *id);
+ expand_var(ctx, name, *id, marker);
}
Op::Index { depth } => {
let index =
@@ -180,7 +225,8 @@ fn expand_subtree(
arena.push(
tt::Leaf::Literal(tt::Literal {
text: index.to_string().into(),
- span: tt::TokenId::unspecified(),
+ // FIXME
+ span: S::DUMMY,
})
.into(),
);
@@ -239,7 +285,8 @@ fn expand_subtree(
arena.push(
tt::Leaf::Literal(tt::Literal {
text: c.to_string().into(),
- span: tt::TokenId::unspecified(),
+ // FIXME
+ span: S::DUMMY,
})
.into(),
);
@@ -250,60 +297,70 @@ fn expand_subtree(
let tts = arena.drain(start_elements..).collect();
ExpandResult {
value: tt::Subtree {
- delimiter: delimiter.unwrap_or_else(tt::Delimiter::unspecified),
+ delimiter: delimiter.unwrap_or_else(tt::Delimiter::dummy_invisible),
token_trees: tts,
},
err,
}
}
-fn expand_var(ctx: &mut ExpandCtx<'_>, v: &SmolStr, id: tt::TokenId) -> ExpandResult<Fragment> {
+fn expand_var<S: Span>(
+ ctx: &mut ExpandCtx<'_, S>,
+ v: &SmolStr,
+ id: S,
+ marker: impl Fn(&mut S),
+) -> ExpandResult<Fragment<S>> {
// We already handle $crate case in mbe parser
debug_assert!(v != "crate");
- if !ctx.bindings.contains(v) {
- // Note that it is possible to have a `$var` inside a macro which is not bound.
- // For example:
- // ```
- // macro_rules! foo {
- // ($a:ident, $b:ident, $c:tt) => {
- // macro_rules! bar {
- // ($bi:ident) => {
- // fn $bi() -> u8 {$c}
- // }
- // }
- // }
- // ```
- // We just treat it a normal tokens
- let tt = tt::Subtree {
- delimiter: tt::Delimiter::UNSPECIFIED,
- token_trees: vec![
- tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id })
- .into(),
- tt::Leaf::from(tt::Ident { text: v.clone(), span: id }).into(),
- ],
+ match ctx.bindings.get_fragment(v, id, &mut ctx.nesting, marker) {
+ Ok(it) => ExpandResult::ok(it),
+ Err(ExpandError::UnresolvedBinding(_)) => {
+ // Note that it is possible to have a `$var` inside a macro which is not bound.
+ // For example:
+ // ```
+ // macro_rules! foo {
+ // ($a:ident, $b:ident, $c:tt) => {
+ // macro_rules! bar {
+ // ($bi:ident) => {
+ // fn $bi() -> u8 {$c}
+ // }
+ // }
+ // }
+ // ```
+ // We just treat it a normal tokens
+ let tt = tt::Subtree {
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
+ token_trees: vec![
+ tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id })
+ .into(),
+ tt::Leaf::from(tt::Ident { text: v.clone(), span: id }).into(),
+ ],
+ }
+ .into();
+ ExpandResult::ok(Fragment::Tokens(tt))
}
- .into();
- ExpandResult::ok(Fragment::Tokens(tt))
- } else {
- ctx.bindings.get_fragment(v, &mut ctx.nesting).map_or_else(
- |e| ExpandResult {
- value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty())),
- err: Some(e),
- },
- ExpandResult::ok,
- )
+ Err(e) => ExpandResult {
+ value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty(tt::DelimSpan {
+ // FIXME
+ open: S::DUMMY,
+ // FIXME
+ close: S::DUMMY,
+ }))),
+ err: Some(e),
+ },
}
}
-fn expand_repeat(
- ctx: &mut ExpandCtx<'_>,
- template: &MetaTemplate,
+fn expand_repeat<S: Span>(
+ ctx: &mut ExpandCtx<'_, S>,
+ template: &MetaTemplate<S>,
kind: RepeatKind,
- separator: &Option<Separator>,
- arena: &mut Vec<tt::TokenTree>,
-) -> ExpandResult<Fragment> {
- let mut buf: Vec<tt::TokenTree> = Vec::new();
+ separator: &Option<Separator<S>>,
+ arena: &mut Vec<tt::TokenTree<S>>,
+ marker: impl Fn(&mut S) + Copy,
+) -> ExpandResult<Fragment<S>> {
+ let mut buf: Vec<tt::TokenTree<S>> = Vec::new();
ctx.nesting.push(NestingState { idx: 0, at_end: false, hit: false });
// Dirty hack to make macro-expansion terminate.
// This should be replaced by a proper macro-by-example implementation
@@ -313,7 +370,8 @@ fn expand_repeat(
let mut err = None;
loop {
- let ExpandResult { value: mut t, err: e } = expand_subtree(ctx, template, None, arena);
+ let ExpandResult { value: mut t, err: e } =
+ expand_subtree(ctx, template, None, arena, marker);
let nesting_state = ctx.nesting.last_mut().unwrap();
if nesting_state.at_end || !nesting_state.hit {
break;
@@ -330,8 +388,11 @@ fn expand_repeat(
);
return ExpandResult {
value: Fragment::Tokens(
- tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }
- .into(),
+ tt::Subtree {
+ delimiter: tt::Delimiter::dummy_invisible(),
+ token_trees: vec![],
+ }
+ .into(),
),
err: Some(ExpandError::LimitExceeded),
};
@@ -342,7 +403,7 @@ fn expand_repeat(
continue;
}
- t.delimiter = tt::Delimiter::unspecified();
+ t.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
push_subtree(&mut buf, t);
if let Some(sep) = separator {
@@ -376,7 +437,7 @@ fn expand_repeat(
// Check if it is a single token subtree without any delimiter
// e.g {Delimiter:None> ['>'] /Delimiter:None>}
- let tt = tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: buf }.into();
+ let tt = tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: buf }.into();
if RepeatKind::OneOrMore == kind && counter == 0 {
return ExpandResult {
@@ -387,25 +448,18 @@ fn expand_repeat(
ExpandResult { value: Fragment::Tokens(tt), err }
}
-fn push_fragment(buf: &mut Vec<tt::TokenTree>, fragment: Fragment) {
+fn push_fragment<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, fragment: Fragment<S>) {
match fragment {
Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt),
- Fragment::Expr(tt::TokenTree::Subtree(mut tt)) => {
- if tt.delimiter.kind == tt::DelimiterKind::Invisible {
- tt.delimiter = tt::Delimiter {
- open: tt::TokenId::UNSPECIFIED,
- close: tt::TokenId::UNSPECIFIED,
- kind: tt::DelimiterKind::Parenthesis,
- };
- }
- buf.push(tt.into())
+ Fragment::Expr(sub) => {
+ push_subtree(buf, sub);
}
- Fragment::Path(tt::TokenTree::Subtree(tt)) => fix_up_and_push_path_tt(buf, tt),
- Fragment::Tokens(tt) | Fragment::Expr(tt) | Fragment::Path(tt) => buf.push(tt),
+ Fragment::Path(tt) => fix_up_and_push_path_tt(buf, tt),
+ Fragment::Tokens(tt) => buf.push(tt),
}
}
-fn push_subtree(buf: &mut Vec<tt::TokenTree>, tt: tt::Subtree) {
+fn push_subtree<S>(buf: &mut Vec<tt::TokenTree<S>>, tt: tt::Subtree<S>) {
match tt.delimiter.kind {
tt::DelimiterKind::Invisible => buf.extend(tt.token_trees),
_ => buf.push(tt.into()),
@@ -415,7 +469,7 @@ fn push_subtree(buf: &mut Vec<tt::TokenTree>, tt: tt::Subtree) {
/// Inserts the path separator `::` between an identifier and its following generic
/// argument list, and then pushes into the buffer. See [`Fragment::Path`] for why
/// we need this fixup.
-fn fix_up_and_push_path_tt(buf: &mut Vec<tt::TokenTree>, subtree: tt::Subtree) {
+fn fix_up_and_push_path_tt<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, subtree: tt::Subtree<S>) {
stdx::always!(matches!(subtree.delimiter.kind, tt::DelimiterKind::Invisible));
let mut prev_was_ident = false;
// Note that we only need to fix up the top-level `TokenTree`s because the
@@ -432,7 +486,8 @@ fn fix_up_and_push_path_tt(buf: &mut Vec<tt::TokenTree>, subtree: tt::Subtree) {
tt::Leaf::Punct(tt::Punct {
char: ':',
spacing: tt::Spacing::Joint,
- span: tt::Span::unspecified(),
+ // FIXME
+ span: S::DUMMY,
})
.into(),
);
@@ -440,7 +495,8 @@ fn fix_up_and_push_path_tt(buf: &mut Vec<tt::TokenTree>, subtree: tt::Subtree) {
tt::Leaf::Punct(tt::Punct {
char: ':',
spacing: tt::Spacing::Alone,
- span: tt::Span::unspecified(),
+ // FIXME
+ span: S::DUMMY,
})
.into(),
);
@@ -453,9 +509,9 @@ fn fix_up_and_push_path_tt(buf: &mut Vec<tt::TokenTree>, subtree: tt::Subtree) {
/// Handles `${count(t, depth)}`. `our_depth` is the recursion depth and `count_depth` is the depth
/// defined by the metavar expression.
-fn count(
- ctx: &ExpandCtx<'_>,
- binding: &Binding,
+fn count<S>(
+ ctx: &ExpandCtx<'_, S>,
+ binding: &Binding<S>,
our_depth: usize,
count_depth: Option<usize>,
) -> Result<usize, CountError> {
diff --git a/src/tools/rust-analyzer/crates/mbe/src/lib.rs b/src/tools/rust-analyzer/crates/mbe/src/lib.rs
index 9d886a1c9..933179858 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/lib.rs
@@ -3,10 +3,10 @@
//! interface, although it contains some code to bridge `SyntaxNode`s and
//! `TokenTree`s as well!
//!
-//! The tes for this functionality live in another crate:
+//! The tests for this functionality live in another crate:
//! `hir_def::macro_expansion_tests::mbe`.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod parser;
mod expander;
@@ -18,8 +18,8 @@ mod to_parser_input;
mod benchmark;
mod token_map;
-use ::tt::token_id as tt;
use stdx::impl_from;
+use tt::Span;
use std::fmt;
@@ -28,19 +28,21 @@ use crate::{
tt_iter::TtIter,
};
-pub use self::tt::{Delimiter, DelimiterKind, Punct};
+// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces
pub use ::parser::TopEntryPoint;
+pub use tt::{Delimiter, DelimiterKind, Punct, SyntaxContext};
pub use crate::{
syntax_bridge::{
- parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_map,
- syntax_node_to_token_map_with_modifications, syntax_node_to_token_tree,
- syntax_node_to_token_tree_with_modifications, token_tree_to_syntax_node, SyntheticToken,
- SyntheticTokenId,
+ parse_exprs_with_sep, parse_to_token_tree, parse_to_token_tree_static_span,
+ syntax_node_to_token_tree, syntax_node_to_token_tree_modified, token_tree_to_syntax_node,
+ SpanMapper,
},
- token_map::TokenMap,
+ token_map::SpanMap,
};
+pub use crate::syntax_bridge::dummy_test_span_utils::*;
+
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum ParseError {
UnexpectedToken(Box<str>),
@@ -73,6 +75,7 @@ impl fmt::Display for ParseError {
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub enum ExpandError {
BindingError(Box<Box<str>>),
+ UnresolvedBinding(Box<Box<str>>),
LeftoverTokens,
ConversionError,
LimitExceeded,
@@ -95,6 +98,10 @@ impl fmt::Display for ExpandError {
ExpandError::NoMatchingRule => f.write_str("no rule matches input tokens"),
ExpandError::UnexpectedToken => f.write_str("unexpected token in input"),
ExpandError::BindingError(e) => f.write_str(e),
+ ExpandError::UnresolvedBinding(binding) => {
+ f.write_str("could not find binding ")?;
+ f.write_str(binding)
+ }
ExpandError::ConversionError => f.write_str("could not convert tokens"),
ExpandError::LimitExceeded => f.write_str("Expand exceed limit"),
ExpandError::LeftoverTokens => f.write_str("leftover tokens"),
@@ -124,10 +131,8 @@ impl fmt::Display for CountError {
/// `tt::TokenTree`, but there's a crucial difference: in macro rules, `$ident`
/// and `$()*` have special meaning (see `Var` and `Repeat` data structures)
#[derive(Clone, Debug, PartialEq, Eq)]
-pub struct DeclarativeMacro {
- rules: Box<[Rule]>,
- /// Highest id of the token we have in TokenMap
- shift: Shift,
+pub struct DeclarativeMacro<S> {
+ rules: Box<[Rule<S>]>,
// This is used for correctly determining the behavior of the pat fragment
// FIXME: This should be tracked by hygiene of the fragment identifier!
is_2021: bool,
@@ -135,96 +140,18 @@ pub struct DeclarativeMacro {
}
#[derive(Clone, Debug, PartialEq, Eq)]
-struct Rule {
- lhs: MetaTemplate,
- rhs: MetaTemplate,
+struct Rule<S> {
+ lhs: MetaTemplate<S>,
+ rhs: MetaTemplate<S>,
}
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
-pub struct Shift(u32);
-
-impl Shift {
- pub fn new(tt: &tt::Subtree) -> Shift {
- // Note that TokenId is started from zero,
- // We have to add 1 to prevent duplication.
- let value = max_id(tt).map_or(0, |it| it + 1);
- return Shift(value);
-
- // Find the max token id inside a subtree
- fn max_id(subtree: &tt::Subtree) -> Option<u32> {
- let filter =
- |tt: &_| match tt {
- tt::TokenTree::Subtree(subtree) => {
- let tree_id = max_id(subtree);
- if subtree.delimiter.open != tt::TokenId::unspecified() {
- Some(tree_id.map_or(subtree.delimiter.open.0, |t| {
- t.max(subtree.delimiter.open.0)
- }))
- } else {
- tree_id
- }
- }
- tt::TokenTree::Leaf(leaf) => {
- let &(tt::Leaf::Ident(tt::Ident { span, .. })
- | tt::Leaf::Punct(tt::Punct { span, .. })
- | tt::Leaf::Literal(tt::Literal { span, .. })) = leaf;
-
- (span != tt::TokenId::unspecified()).then_some(span.0)
- }
- };
- subtree.token_trees.iter().filter_map(filter).max()
- }
- }
-
- /// Shift given TokenTree token id
- pub fn shift_all(self, tt: &mut tt::Subtree) {
- for t in &mut tt.token_trees {
- match t {
- tt::TokenTree::Leaf(
- tt::Leaf::Ident(tt::Ident { span, .. })
- | tt::Leaf::Punct(tt::Punct { span, .. })
- | tt::Leaf::Literal(tt::Literal { span, .. }),
- ) => *span = self.shift(*span),
- tt::TokenTree::Subtree(tt) => {
- tt.delimiter.open = self.shift(tt.delimiter.open);
- tt.delimiter.close = self.shift(tt.delimiter.close);
- self.shift_all(tt)
- }
- }
- }
- }
-
- pub fn shift(self, id: tt::TokenId) -> tt::TokenId {
- if id == tt::TokenId::unspecified() {
- id
- } else {
- tt::TokenId(id.0 + self.0)
- }
- }
-
- pub fn unshift(self, id: tt::TokenId) -> Option<tt::TokenId> {
- id.0.checked_sub(self.0).map(tt::TokenId)
- }
-}
-
-#[derive(Copy, Clone, Debug, Eq, PartialEq)]
-pub enum Origin {
- Def,
- Call,
-}
-
-impl DeclarativeMacro {
- pub fn from_err(err: ParseError, is_2021: bool) -> DeclarativeMacro {
- DeclarativeMacro {
- rules: Box::default(),
- shift: Shift(0),
- is_2021,
- err: Some(Box::new(err)),
- }
+impl<S: Span> DeclarativeMacro<S> {
+ pub fn from_err(err: ParseError, is_2021: bool) -> DeclarativeMacro<S> {
+ DeclarativeMacro { rules: Box::default(), is_2021, err: Some(Box::new(err)) }
}
/// The old, `macro_rules! m {}` flavor.
- pub fn parse_macro_rules(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro {
+ pub fn parse_macro_rules(tt: &tt::Subtree<S>, is_2021: bool) -> DeclarativeMacro<S> {
// Note: this parsing can be implemented using mbe machinery itself, by
// matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing
// manually seems easier.
@@ -256,11 +183,11 @@ impl DeclarativeMacro {
}
}
- DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err }
+ DeclarativeMacro { rules: rules.into_boxed_slice(), is_2021, err }
}
/// The new, unstable `macro m {}` flavor.
- pub fn parse_macro2(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro {
+ pub fn parse_macro2(tt: &tt::Subtree<S>, is_2021: bool) -> DeclarativeMacro<S> {
let mut src = TtIter::new(tt);
let mut rules = Vec::new();
let mut err = None;
@@ -307,36 +234,24 @@ impl DeclarativeMacro {
}
}
- DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err }
- }
-
- pub fn expand(&self, mut tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
- self.shift.shift_all(&mut tt);
- expander::expand_rules(&self.rules, &tt, self.is_2021)
+ DeclarativeMacro { rules: rules.into_boxed_slice(), is_2021, err }
}
pub fn err(&self) -> Option<&ParseError> {
self.err.as_deref()
}
- pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
- self.shift.shift(id)
- }
-
- pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, Origin) {
- match self.shift.unshift(id) {
- Some(id) => (id, Origin::Call),
- None => (id, Origin::Def),
- }
- }
-
- pub fn shift(&self) -> Shift {
- self.shift
+ pub fn expand(
+ &self,
+ tt: &tt::Subtree<S>,
+ marker: impl Fn(&mut S) + Copy,
+ ) -> ExpandResult<tt::Subtree<S>> {
+ expander::expand_rules(&self.rules, &tt, marker, self.is_2021)
}
}
-impl Rule {
- fn parse(src: &mut TtIter<'_>, expect_arrow: bool) -> Result<Self, ParseError> {
+impl<S: Span> Rule<S> {
+ fn parse(src: &mut TtIter<'_, S>, expect_arrow: bool) -> Result<Self, ParseError> {
let lhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?;
if expect_arrow {
src.expect_char('=').map_err(|()| ParseError::expected("expected `=`"))?;
@@ -351,7 +266,7 @@ impl Rule {
}
}
-fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> {
+fn validate<S: Span>(pattern: &MetaTemplate<S>) -> Result<(), ParseError> {
for op in pattern.iter() {
match op {
Op::Subtree { tokens, .. } => validate(tokens)?,
diff --git a/src/tools/rust-analyzer/crates/mbe/src/parser.rs b/src/tools/rust-analyzer/crates/mbe/src/parser.rs
index 7a143e746..00ba35377 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/parser.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/parser.rs
@@ -3,8 +3,9 @@
use smallvec::{smallvec, SmallVec};
use syntax::SmolStr;
+use tt::Span;
-use crate::{tt, tt_iter::TtIter, ParseError};
+use crate::{tt_iter::TtIter, ParseError};
/// Consider
///
@@ -20,22 +21,22 @@ use crate::{tt, tt_iter::TtIter, ParseError};
/// Stuff to the right is a [`MetaTemplate`] template which is used to produce
/// output.
#[derive(Clone, Debug, PartialEq, Eq)]
-pub(crate) struct MetaTemplate(pub(crate) Box<[Op]>);
+pub(crate) struct MetaTemplate<S>(pub(crate) Box<[Op<S>]>);
-impl MetaTemplate {
- pub(crate) fn parse_pattern(pattern: &tt::Subtree) -> Result<MetaTemplate, ParseError> {
+impl<S: Span> MetaTemplate<S> {
+ pub(crate) fn parse_pattern(pattern: &tt::Subtree<S>) -> Result<Self, ParseError> {
MetaTemplate::parse(pattern, Mode::Pattern)
}
- pub(crate) fn parse_template(template: &tt::Subtree) -> Result<MetaTemplate, ParseError> {
+ pub(crate) fn parse_template(template: &tt::Subtree<S>) -> Result<Self, ParseError> {
MetaTemplate::parse(template, Mode::Template)
}
- pub(crate) fn iter(&self) -> impl Iterator<Item = &Op> {
+ pub(crate) fn iter(&self) -> impl Iterator<Item = &Op<S>> {
self.0.iter()
}
- fn parse(tt: &tt::Subtree, mode: Mode) -> Result<MetaTemplate, ParseError> {
+ fn parse(tt: &tt::Subtree<S>, mode: Mode) -> Result<Self, ParseError> {
let mut src = TtIter::new(tt);
let mut res = Vec::new();
@@ -49,16 +50,16 @@ impl MetaTemplate {
}
#[derive(Clone, Debug, PartialEq, Eq)]
-pub(crate) enum Op {
- Var { name: SmolStr, kind: Option<MetaVarKind>, id: tt::TokenId },
- Ignore { name: SmolStr, id: tt::TokenId },
+pub(crate) enum Op<S> {
+ Var { name: SmolStr, kind: Option<MetaVarKind>, id: S },
+ Ignore { name: SmolStr, id: S },
Index { depth: usize },
Count { name: SmolStr, depth: Option<usize> },
- Repeat { tokens: MetaTemplate, kind: RepeatKind, separator: Option<Separator> },
- Subtree { tokens: MetaTemplate, delimiter: tt::Delimiter },
- Literal(tt::Literal),
- Punct(SmallVec<[tt::Punct; 3]>),
- Ident(tt::Ident),
+ Repeat { tokens: MetaTemplate<S>, kind: RepeatKind, separator: Option<Separator<S>> },
+ Subtree { tokens: MetaTemplate<S>, delimiter: tt::Delimiter<S> },
+ Literal(tt::Literal<S>),
+ Punct(SmallVec<[tt::Punct<S>; 3]>),
+ Ident(tt::Ident<S>),
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
@@ -87,15 +88,15 @@ pub(crate) enum MetaVarKind {
}
#[derive(Clone, Debug, Eq)]
-pub(crate) enum Separator {
- Literal(tt::Literal),
- Ident(tt::Ident),
- Puncts(SmallVec<[tt::Punct; 3]>),
+pub(crate) enum Separator<S> {
+ Literal(tt::Literal<S>),
+ Ident(tt::Ident<S>),
+ Puncts(SmallVec<[tt::Punct<S>; 3]>),
}
// Note that when we compare a Separator, we just care about its textual value.
-impl PartialEq for Separator {
- fn eq(&self, other: &Separator) -> bool {
+impl<S> PartialEq for Separator<S> {
+ fn eq(&self, other: &Separator<S>) -> bool {
use Separator::*;
match (self, other) {
@@ -117,11 +118,11 @@ enum Mode {
Template,
}
-fn next_op(
- first_peeked: &tt::TokenTree,
- src: &mut TtIter<'_>,
+fn next_op<S: Span>(
+ first_peeked: &tt::TokenTree<S>,
+ src: &mut TtIter<'_, S>,
mode: Mode,
-) -> Result<Op, ParseError> {
+) -> Result<Op<S>, ParseError> {
let res = match first_peeked {
tt::TokenTree::Leaf(tt::Leaf::Punct(p @ tt::Punct { char: '$', .. })) => {
src.next().expect("first token already peeked");
@@ -212,7 +213,10 @@ fn next_op(
Ok(res)
}
-fn eat_fragment_kind(src: &mut TtIter<'_>, mode: Mode) -> Result<Option<MetaVarKind>, ParseError> {
+fn eat_fragment_kind<S: Span>(
+ src: &mut TtIter<'_, S>,
+ mode: Mode,
+) -> Result<Option<MetaVarKind>, ParseError> {
if let Mode::Pattern = mode {
src.expect_char(':').map_err(|()| ParseError::unexpected("missing fragment specifier"))?;
let ident = src
@@ -240,11 +244,13 @@ fn eat_fragment_kind(src: &mut TtIter<'_>, mode: Mode) -> Result<Option<MetaVarK
Ok(None)
}
-fn is_boolean_literal(lit: &tt::Literal) -> bool {
+fn is_boolean_literal<S>(lit: &tt::Literal<S>) -> bool {
matches!(lit.text.as_str(), "true" | "false")
}
-fn parse_repeat(src: &mut TtIter<'_>) -> Result<(Option<Separator>, RepeatKind), ParseError> {
+fn parse_repeat<S: Span>(
+ src: &mut TtIter<'_, S>,
+) -> Result<(Option<Separator<S>>, RepeatKind), ParseError> {
let mut separator = Separator::Puncts(SmallVec::new());
for tt in src {
let tt = match tt {
@@ -281,7 +287,7 @@ fn parse_repeat(src: &mut TtIter<'_>) -> Result<(Option<Separator>, RepeatKind),
Err(ParseError::InvalidRepeat)
}
-fn parse_metavar_expr(src: &mut TtIter<'_>) -> Result<Op, ()> {
+fn parse_metavar_expr<S: Span>(src: &mut TtIter<'_, S>) -> Result<Op<S>, ()> {
let func = src.expect_ident()?;
let args = src.expect_subtree()?;
@@ -314,7 +320,7 @@ fn parse_metavar_expr(src: &mut TtIter<'_>) -> Result<Op, ()> {
Ok(op)
}
-fn parse_depth(src: &mut TtIter<'_>) -> Result<usize, ()> {
+fn parse_depth<S: Span>(src: &mut TtIter<'_, S>) -> Result<usize, ()> {
if src.len() == 0 {
Ok(0)
} else if let tt::Leaf::Literal(lit) = src.expect_literal()? {
@@ -325,7 +331,7 @@ fn parse_depth(src: &mut TtIter<'_>) -> Result<usize, ()> {
}
}
-fn try_eat_comma(src: &mut TtIter<'_>) -> bool {
+fn try_eat_comma<S: Span>(src: &mut TtIter<'_, S>) -> bool {
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))) = src.peek_n(0) {
let _ = src.next();
return true;
diff --git a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
index 7b9bb61e6..b89bfd74a 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
@@ -1,98 +1,102 @@
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
-use rustc_hash::FxHashMap;
-use stdx::{always, non_empty_vec::NonEmptyVec};
+use rustc_hash::{FxHashMap, FxHashSet};
+use stdx::{never, non_empty_vec::NonEmptyVec};
use syntax::{
ast::{self, make::tokens::doc_comment},
AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
SyntaxKind::*,
SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T,
};
-
-use crate::{
- to_parser_input::to_parser_input,
- tt::{
- self,
- buffer::{Cursor, TokenBuffer},
- },
- tt_iter::TtIter,
- TokenMap,
+use tt::{
+ buffer::{Cursor, TokenBuffer},
+ Span, SpanData, SyntaxContext,
};
+use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, SpanMap};
+
#[cfg(test)]
mod tests;
-/// Convert the syntax node to a `TokenTree` (what macro
-/// will consume).
-pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
- let (subtree, token_map, _) = syntax_node_to_token_tree_with_modifications(
- node,
- Default::default(),
- 0,
- Default::default(),
- Default::default(),
- );
- (subtree, token_map)
+pub trait SpanMapper<S: Span> {
+ fn span_for(&self, range: TextRange) -> S;
}
-/// Convert the syntax node to a `TokenTree` (what macro will consume)
-/// with the censored range excluded.
-pub fn syntax_node_to_token_tree_with_modifications(
- node: &SyntaxNode,
- existing_token_map: TokenMap,
- next_id: u32,
- replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
-) -> (tt::Subtree, TokenMap, u32) {
- let global_offset = node.text_range().start();
- let mut c = Converter::new(node, global_offset, existing_token_map, next_id, replace, append);
- let subtree = convert_tokens(&mut c);
- c.id_alloc.map.shrink_to_fit();
- always!(c.replace.is_empty(), "replace: {:?}", c.replace);
- always!(c.append.is_empty(), "append: {:?}", c.append);
- (subtree, c.id_alloc.map, c.id_alloc.next_id)
+impl<S: Span> SpanMapper<S> for SpanMap<S> {
+ fn span_for(&self, range: TextRange) -> S {
+ self.span_at(range.start())
+ }
}
-/// Convert the syntax node to a `TokenTree` (what macro
-/// will consume).
-pub fn syntax_node_to_token_map(node: &SyntaxNode) -> TokenMap {
- syntax_node_to_token_map_with_modifications(
- node,
- Default::default(),
- 0,
- Default::default(),
- Default::default(),
- )
- .0
+impl<S: Span, SM: SpanMapper<S>> SpanMapper<S> for &SM {
+ fn span_for(&self, range: TextRange) -> S {
+ SM::span_for(self, range)
+ }
}
-/// Convert the syntax node to a `TokenTree` (what macro will consume)
-/// with the censored range excluded.
-pub fn syntax_node_to_token_map_with_modifications(
- node: &SyntaxNode,
- existing_token_map: TokenMap,
- next_id: u32,
- replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
-) -> (TokenMap, u32) {
- let global_offset = node.text_range().start();
- let mut c = Converter::new(node, global_offset, existing_token_map, next_id, replace, append);
- collect_tokens(&mut c);
- c.id_alloc.map.shrink_to_fit();
- always!(c.replace.is_empty(), "replace: {:?}", c.replace);
- always!(c.append.is_empty(), "append: {:?}", c.append);
- (c.id_alloc.map, c.id_alloc.next_id)
+/// Dummy things for testing where spans don't matter.
+pub(crate) mod dummy_test_span_utils {
+ use super::*;
+
+ pub type DummyTestSpanData = tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext>;
+ pub const DUMMY: DummyTestSpanData = DummyTestSpanData::DUMMY;
+
+ #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+ pub struct DummyTestSpanAnchor;
+ impl tt::SpanAnchor for DummyTestSpanAnchor {
+ const DUMMY: Self = DummyTestSpanAnchor;
+ }
+ #[derive(Debug, Copy, Clone, PartialEq, Eq)]
+ pub struct DummyTestSyntaxContext;
+ impl SyntaxContext for DummyTestSyntaxContext {
+ const DUMMY: Self = DummyTestSyntaxContext;
+ }
+
+ pub struct DummyTestSpanMap;
+
+ impl SpanMapper<tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext>> for DummyTestSpanMap {
+ fn span_for(
+ &self,
+ range: syntax::TextRange,
+ ) -> tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext> {
+ tt::SpanData { range, anchor: DummyTestSpanAnchor, ctx: DummyTestSyntaxContext }
+ }
+ }
}
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
-pub struct SyntheticTokenId(pub u32);
+/// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the
+/// subtree's spans.
+pub fn syntax_node_to_token_tree<Anchor, Ctx, SpanMap>(
+ node: &SyntaxNode,
+ map: SpanMap,
+) -> tt::Subtree<SpanData<Anchor, Ctx>>
+where
+ SpanData<Anchor, Ctx>: Span,
+ Anchor: Copy,
+ Ctx: SyntaxContext,
+ SpanMap: SpanMapper<SpanData<Anchor, Ctx>>,
+{
+ let mut c = Converter::new(node, map, Default::default(), Default::default());
+ convert_tokens(&mut c)
+}
-#[derive(Debug, Clone)]
-pub struct SyntheticToken {
- pub kind: SyntaxKind,
- pub text: SmolStr,
- pub range: TextRange,
- pub id: SyntheticTokenId,
+/// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the
+/// subtree's spans. Additionally using the append and remove parameters, the additional tokens can
+/// be injected or hidden from the output.
+pub fn syntax_node_to_token_tree_modified<Anchor, Ctx, SpanMap>(
+ node: &SyntaxNode,
+ map: SpanMap,
+ append: FxHashMap<SyntaxElement, Vec<tt::Leaf<SpanData<Anchor, Ctx>>>>,
+ remove: FxHashSet<SyntaxNode>,
+) -> tt::Subtree<SpanData<Anchor, Ctx>>
+where
+ SpanMap: SpanMapper<SpanData<Anchor, Ctx>>,
+ SpanData<Anchor, Ctx>: Span,
+ Anchor: Copy,
+ Ctx: SyntaxContext,
+{
+ let mut c = Converter::new(node, map, append, remove);
+ convert_tokens(&mut c)
}
// The following items are what `rustc` macro can be parsed into :
@@ -107,10 +111,17 @@ pub struct SyntheticToken {
// * AssocItems(SmallVec<[ast::AssocItem; 1]>)
// * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
-pub fn token_tree_to_syntax_node(
- tt: &tt::Subtree,
+/// Converts a [`tt::Subtree`] back to a [`SyntaxNode`].
+/// The produced `SpanMap` contains a mapping from the syntax nodes offsets to the subtree's spans.
+pub fn token_tree_to_syntax_node<Anchor, Ctx>(
+ tt: &tt::Subtree<SpanData<Anchor, Ctx>>,
entry_point: parser::TopEntryPoint,
-) -> (Parse<SyntaxNode>, TokenMap) {
+) -> (Parse<SyntaxNode>, SpanMap<SpanData<Anchor, Ctx>>)
+where
+ SpanData<Anchor, Ctx>: Span,
+ Anchor: Copy,
+ Ctx: SyntaxContext,
+{
let buffer = match tt {
tt::Subtree {
delimiter: tt::Delimiter { kind: tt::DelimiterKind::Invisible, .. },
@@ -137,29 +148,41 @@ pub fn token_tree_to_syntax_node(
tree_sink.finish()
}
-/// Convert a string to a `TokenTree`
-pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> {
+/// Convert a string to a `TokenTree`. The spans of the subtree will be anchored to the provided
+/// anchor with the given context.
+pub fn parse_to_token_tree<Anchor, Ctx>(
+ anchor: Anchor,
+ ctx: Ctx,
+ text: &str,
+) -> Option<tt::Subtree<SpanData<Anchor, Ctx>>>
+where
+ SpanData<Anchor, Ctx>: Span,
+ Anchor: Copy,
+ Ctx: SyntaxContext,
+{
let lexed = parser::LexedStr::new(text);
if lexed.errors().next().is_some() {
return None;
}
+ let mut conv = RawConverter { lexed, pos: 0, anchor, ctx };
+ Some(convert_tokens(&mut conv))
+}
- let mut conv = RawConverter {
- lexed,
- pos: 0,
- id_alloc: TokenIdAlloc {
- map: Default::default(),
- global_offset: TextSize::default(),
- next_id: 0,
- },
- };
-
- let subtree = convert_tokens(&mut conv);
- Some((subtree, conv.id_alloc.map))
+/// Convert a string to a `TokenTree`. The passed span will be used for all spans of the produced subtree.
+pub fn parse_to_token_tree_static_span<S>(span: S, text: &str) -> Option<tt::Subtree<S>>
+where
+ S: Span,
+{
+ let lexed = parser::LexedStr::new(text);
+ if lexed.errors().next().is_some() {
+ return None;
+ }
+ let mut conv = StaticRawConverter { lexed, pos: 0, span };
+ Some(convert_tokens(&mut conv))
}
/// Split token tree with separate expr: $($e:expr)SEP*
-pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
+pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt::Subtree<S>> {
if tt.token_trees.is_empty() {
return Vec::new();
}
@@ -172,10 +195,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
res.push(match expanded.value {
None => break,
- Some(tt @ tt::TokenTree::Leaf(_)) => {
- tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![tt] }
- }
- Some(tt::TokenTree::Subtree(tt)) => tt,
+ Some(tt) => tt.subtree_or_wrap(),
});
let mut fork = iter.clone();
@@ -187,7 +207,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
if iter.peek_n(0).is_some() {
res.push(tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: iter.cloned().collect(),
});
}
@@ -195,136 +215,118 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
res
}
-fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
- struct StackEntry {
- subtree: tt::Subtree,
- idx: usize,
- open_range: TextRange,
- }
-
- let entry = StackEntry {
- subtree: tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] },
- // never used (delimiter is `None`)
- idx: !0,
- open_range: TextRange::empty(TextSize::of('.')),
- };
+fn convert_tokens<S, C>(conv: &mut C) -> tt::Subtree<S>
+where
+ C: TokenConverter<S>,
+ S: Span,
+{
+ let entry = tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] };
let mut stack = NonEmptyVec::new(entry);
- loop {
- let StackEntry { subtree, .. } = stack.last_mut();
- let result = &mut subtree.token_trees;
- let (token, range) = match conv.bump() {
- Some(it) => it,
- None => break,
- };
- let synth_id = token.synthetic_id(conv);
-
- let kind = token.kind(conv);
- if kind == COMMENT {
- // Since `convert_doc_comment` can fail, we need to peek the next id, so that we can
- // figure out which token id to use for the doc comment, if it is converted successfully.
- let next_id = conv.id_alloc().peek_next_id();
- if let Some(tokens) = conv.convert_doc_comment(&token, next_id) {
- let id = conv.id_alloc().alloc(range, synth_id);
- debug_assert_eq!(id, next_id);
- result.extend(tokens);
- }
- continue;
- }
- let tt = if kind.is_punct() && kind != UNDERSCORE {
- if synth_id.is_none() {
- assert_eq!(range.len(), TextSize::of('.'));
- }
-
- let expected = match subtree.delimiter.kind {
- tt::DelimiterKind::Parenthesis => Some(T![')']),
- tt::DelimiterKind::Brace => Some(T!['}']),
- tt::DelimiterKind::Bracket => Some(T![']']),
- tt::DelimiterKind::Invisible => None,
- };
-
- if let Some(expected) = expected {
- if kind == expected {
- if let Some(entry) = stack.pop() {
- conv.id_alloc().close_delim(entry.idx, Some(range));
- stack.last_mut().subtree.token_trees.push(entry.subtree.into());
+ while let Some((token, abs_range)) = conv.bump() {
+ let tt::Subtree { delimiter, token_trees: result } = stack.last_mut();
+
+ let tt = match token.as_leaf() {
+ Some(leaf) => tt::TokenTree::Leaf(leaf.clone()),
+ None => match token.kind(conv) {
+ // Desugar doc comments into doc attributes
+ COMMENT => {
+ let span = conv.span_for(abs_range);
+ if let Some(tokens) = conv.convert_doc_comment(&token, span) {
+ result.extend(tokens);
}
continue;
}
- }
-
- let delim = match kind {
- T!['('] => Some(tt::DelimiterKind::Parenthesis),
- T!['{'] => Some(tt::DelimiterKind::Brace),
- T!['['] => Some(tt::DelimiterKind::Bracket),
- _ => None,
- };
+ kind if kind.is_punct() && kind != UNDERSCORE => {
+ let expected = match delimiter.kind {
+ tt::DelimiterKind::Parenthesis => Some(T![')']),
+ tt::DelimiterKind::Brace => Some(T!['}']),
+ tt::DelimiterKind::Bracket => Some(T![']']),
+ tt::DelimiterKind::Invisible => None,
+ };
+
+ // Current token is a closing delimiter that we expect, fix up the closing span
+ // and end the subtree here
+ if matches!(expected, Some(expected) if expected == kind) {
+ if let Some(mut subtree) = stack.pop() {
+ subtree.delimiter.close = conv.span_for(abs_range);
+ stack.last_mut().token_trees.push(subtree.into());
+ }
+ continue;
+ }
- if let Some(kind) = delim {
- let (id, idx) = conv.id_alloc().open_delim(range, synth_id);
- let subtree = tt::Subtree {
- delimiter: tt::Delimiter { open: id, close: tt::TokenId::UNSPECIFIED, kind },
- token_trees: vec![],
- };
- stack.push(StackEntry { subtree, idx, open_range: range });
- continue;
- }
+ let delim = match kind {
+ T!['('] => Some(tt::DelimiterKind::Parenthesis),
+ T!['{'] => Some(tt::DelimiterKind::Brace),
+ T!['['] => Some(tt::DelimiterKind::Bracket),
+ _ => None,
+ };
+
+ // Start a new subtree
+ if let Some(kind) = delim {
+ let open = conv.span_for(abs_range);
+ stack.push(tt::Subtree {
+ delimiter: tt::Delimiter {
+ open,
+ // will be overwritten on subtree close above
+ close: open,
+ kind,
+ },
+ token_trees: vec![],
+ });
+ continue;
+ }
- let spacing = match conv.peek().map(|next| next.kind(conv)) {
- Some(kind) if is_single_token_op(kind) => tt::Spacing::Joint,
- _ => tt::Spacing::Alone,
- };
- let char = match token.to_char(conv) {
- Some(c) => c,
- None => {
- panic!("Token from lexer must be single char: token = {token:#?}");
+ let spacing = match conv.peek().map(|next| next.kind(conv)) {
+ Some(kind) if is_single_token_op(kind) => tt::Spacing::Joint,
+ _ => tt::Spacing::Alone,
+ };
+ let Some(char) = token.to_char(conv) else {
+ panic!("Token from lexer must be single char: token = {token:#?}")
+ };
+ tt::Leaf::from(tt::Punct { char, spacing, span: conv.span_for(abs_range) })
+ .into()
}
- };
- tt::Leaf::from(tt::Punct {
- char,
- spacing,
- span: conv.id_alloc().alloc(range, synth_id),
- })
- .into()
- } else {
- macro_rules! make_leaf {
- ($i:ident) => {
- tt::$i {
- span: conv.id_alloc().alloc(range, synth_id),
- text: token.to_text(conv),
+ kind => {
+ macro_rules! make_leaf {
+ ($i:ident) => {
+ tt::$i { span: conv.span_for(abs_range), text: token.to_text(conv) }
+ .into()
+ };
}
- .into()
- };
- }
- let leaf: tt::Leaf = match kind {
- T![true] | T![false] => make_leaf!(Ident),
- IDENT => make_leaf!(Ident),
- UNDERSCORE => make_leaf!(Ident),
- k if k.is_keyword() => make_leaf!(Ident),
- k if k.is_literal() => make_leaf!(Literal),
- LIFETIME_IDENT => {
- let char_unit = TextSize::of('\'');
- let r = TextRange::at(range.start(), char_unit);
- let apostrophe = tt::Leaf::from(tt::Punct {
- char: '\'',
- spacing: tt::Spacing::Joint,
- span: conv.id_alloc().alloc(r, synth_id),
- });
- result.push(apostrophe.into());
-
- let r = TextRange::at(range.start() + char_unit, range.len() - char_unit);
- let ident = tt::Leaf::from(tt::Ident {
- text: SmolStr::new(&token.to_text(conv)[1..]),
- span: conv.id_alloc().alloc(r, synth_id),
- });
- result.push(ident.into());
- continue;
- }
- _ => continue,
- };
+ let leaf: tt::Leaf<_> = match kind {
+ T![true] | T![false] => make_leaf!(Ident),
+ IDENT => make_leaf!(Ident),
+ UNDERSCORE => make_leaf!(Ident),
+ k if k.is_keyword() => make_leaf!(Ident),
+ k if k.is_literal() => make_leaf!(Literal),
+ LIFETIME_IDENT => {
+ let apostrophe = tt::Leaf::from(tt::Punct {
+ char: '\'',
+ spacing: tt::Spacing::Joint,
+ span: conv
+ .span_for(TextRange::at(abs_range.start(), TextSize::of('\''))),
+ });
+ result.push(apostrophe.into());
+
+ let ident = tt::Leaf::from(tt::Ident {
+ text: SmolStr::new(&token.to_text(conv)[1..]),
+ span: conv.span_for(TextRange::new(
+ abs_range.start() + TextSize::of('\''),
+ abs_range.end(),
+ )),
+ });
+ result.push(ident.into());
+ continue;
+ }
+ _ => continue,
+ };
- leaf.into()
+ leaf.into()
+ }
+ },
};
+
result.push(tt);
}
@@ -334,10 +336,9 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
while let Some(entry) = stack.pop() {
let parent = stack.last_mut();
- conv.id_alloc().close_delim(entry.idx, None);
- let leaf: tt::Leaf = tt::Punct {
- span: conv.id_alloc().alloc(entry.open_range, None),
- char: match entry.subtree.delimiter.kind {
+ let leaf: tt::Leaf<_> = tt::Punct {
+ span: entry.delimiter.open,
+ char: match entry.delimiter.kind {
tt::DelimiterKind::Parenthesis => '(',
tt::DelimiterKind::Brace => '{',
tt::DelimiterKind::Bracket => '[',
@@ -346,11 +347,11 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
spacing: tt::Spacing::Alone,
}
.into();
- parent.subtree.token_trees.push(leaf.into());
- parent.subtree.token_trees.extend(entry.subtree.token_trees);
+ parent.token_trees.push(leaf.into());
+ parent.token_trees.extend(entry.token_trees);
}
- let subtree = stack.into_last().subtree;
+ let subtree = stack.into_last();
if let [tt::TokenTree::Subtree(first)] = &*subtree.token_trees {
first.clone()
} else {
@@ -358,111 +359,6 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
}
}
-fn collect_tokens<C: TokenConverter>(conv: &mut C) {
- struct StackEntry {
- idx: usize,
- open_range: TextRange,
- delimiter: tt::DelimiterKind,
- }
-
- let entry = StackEntry {
- delimiter: tt::DelimiterKind::Invisible,
- // never used (delimiter is `None`)
- idx: !0,
- open_range: TextRange::empty(TextSize::of('.')),
- };
- let mut stack = NonEmptyVec::new(entry);
-
- loop {
- let StackEntry { delimiter, .. } = stack.last_mut();
- let (token, range) = match conv.bump() {
- Some(it) => it,
- None => break,
- };
- let synth_id = token.synthetic_id(conv);
-
- let kind = token.kind(conv);
- if kind == COMMENT {
- // Since `convert_doc_comment` can fail, we need to peek the next id, so that we can
- // figure out which token id to use for the doc comment, if it is converted successfully.
- let next_id = conv.id_alloc().peek_next_id();
- if let Some(_tokens) = conv.convert_doc_comment(&token, next_id) {
- let id = conv.id_alloc().alloc(range, synth_id);
- debug_assert_eq!(id, next_id);
- }
- continue;
- }
- if kind.is_punct() && kind != UNDERSCORE {
- if synth_id.is_none() {
- assert_eq!(range.len(), TextSize::of('.'));
- }
-
- let expected = match delimiter {
- tt::DelimiterKind::Parenthesis => Some(T![')']),
- tt::DelimiterKind::Brace => Some(T!['}']),
- tt::DelimiterKind::Bracket => Some(T![']']),
- tt::DelimiterKind::Invisible => None,
- };
-
- if let Some(expected) = expected {
- if kind == expected {
- if let Some(entry) = stack.pop() {
- conv.id_alloc().close_delim(entry.idx, Some(range));
- }
- continue;
- }
- }
-
- let delim = match kind {
- T!['('] => Some(tt::DelimiterKind::Parenthesis),
- T!['{'] => Some(tt::DelimiterKind::Brace),
- T!['['] => Some(tt::DelimiterKind::Bracket),
- _ => None,
- };
-
- if let Some(kind) = delim {
- let (_id, idx) = conv.id_alloc().open_delim(range, synth_id);
-
- stack.push(StackEntry { idx, open_range: range, delimiter: kind });
- continue;
- }
-
- conv.id_alloc().alloc(range, synth_id);
- } else {
- macro_rules! make_leaf {
- ($i:ident) => {{
- conv.id_alloc().alloc(range, synth_id);
- }};
- }
- match kind {
- T![true] | T![false] => make_leaf!(Ident),
- IDENT => make_leaf!(Ident),
- UNDERSCORE => make_leaf!(Ident),
- k if k.is_keyword() => make_leaf!(Ident),
- k if k.is_literal() => make_leaf!(Literal),
- LIFETIME_IDENT => {
- let char_unit = TextSize::of('\'');
- let r = TextRange::at(range.start(), char_unit);
- conv.id_alloc().alloc(r, synth_id);
-
- let r = TextRange::at(range.start() + char_unit, range.len() - char_unit);
- conv.id_alloc().alloc(r, synth_id);
- continue;
- }
- _ => continue,
- };
- };
-
- // If we get here, we've consumed all input tokens.
- // We might have more than one subtree in the stack, if the delimiters are improperly balanced.
- // Merge them so we're left with one.
- while let Some(entry) = stack.pop() {
- conv.id_alloc().close_delim(entry.idx, None);
- conv.id_alloc().alloc(entry.open_range, None);
- }
- }
-}
-
fn is_single_token_op(kind: SyntaxKind) -> bool {
matches!(
kind,
@@ -511,162 +407,126 @@ fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
text.into()
}
-fn convert_doc_comment(
+fn convert_doc_comment<S: Copy>(
token: &syntax::SyntaxToken,
- span: tt::TokenId,
-) -> Option<Vec<tt::TokenTree>> {
+ span: S,
+) -> Option<Vec<tt::TokenTree<S>>> {
cov_mark::hit!(test_meta_doc_comments);
let comment = ast::Comment::cast(token.clone())?;
let doc = comment.kind().doc?;
- // Make `doc="\" Comments\""
- let meta_tkns =
- vec![mk_ident("doc", span), mk_punct('=', span), mk_doc_literal(&comment, span)];
+ let mk_ident =
+ |s: &str| tt::TokenTree::from(tt::Leaf::from(tt::Ident { text: s.into(), span }));
- // Make `#![]`
- let mut token_trees = Vec::with_capacity(3);
- token_trees.push(mk_punct('#', span));
- if let ast::CommentPlacement::Inner = doc {
- token_trees.push(mk_punct('!', span));
- }
- token_trees.push(tt::TokenTree::from(tt::Subtree {
- delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket },
- token_trees: meta_tkns,
- }));
-
- return Some(token_trees);
-
- // Helper functions
- fn mk_ident(s: &str, span: tt::TokenId) -> tt::TokenTree {
- tt::TokenTree::from(tt::Leaf::from(tt::Ident { text: s.into(), span }))
- }
-
- fn mk_punct(c: char, span: tt::TokenId) -> tt::TokenTree {
+ let mk_punct = |c: char| {
tt::TokenTree::from(tt::Leaf::from(tt::Punct {
char: c,
spacing: tt::Spacing::Alone,
span,
}))
- }
+ };
- fn mk_doc_literal(comment: &ast::Comment, span: tt::TokenId) -> tt::TokenTree {
+ let mk_doc_literal = |comment: &ast::Comment| {
let lit = tt::Literal { text: doc_comment_text(comment), span };
tt::TokenTree::from(tt::Leaf::from(lit))
- }
-}
-
-struct TokenIdAlloc {
- map: TokenMap,
- global_offset: TextSize,
- next_id: u32,
-}
-
-impl TokenIdAlloc {
- fn alloc(
- &mut self,
- absolute_range: TextRange,
- synthetic_id: Option<SyntheticTokenId>,
- ) -> tt::TokenId {
- let relative_range = absolute_range - self.global_offset;
- let token_id = tt::TokenId(self.next_id);
- self.next_id += 1;
- self.map.insert(token_id, relative_range);
- if let Some(id) = synthetic_id {
- self.map.insert_synthetic(token_id, id);
- }
- token_id
- }
+ };
- fn open_delim(
- &mut self,
- open_abs_range: TextRange,
- synthetic_id: Option<SyntheticTokenId>,
- ) -> (tt::TokenId, usize) {
- let token_id = tt::TokenId(self.next_id);
- self.next_id += 1;
- let idx = self.map.insert_delim(
- token_id,
- open_abs_range - self.global_offset,
- open_abs_range - self.global_offset,
- );
- if let Some(id) = synthetic_id {
- self.map.insert_synthetic(token_id, id);
- }
- (token_id, idx)
- }
+ // Make `doc="\" Comments\""
+ let meta_tkns = vec![mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)];
- fn close_delim(&mut self, idx: usize, close_abs_range: Option<TextRange>) {
- match close_abs_range {
- None => {
- self.map.remove_delim(idx);
- }
- Some(close) => {
- self.map.update_close_delim(idx, close - self.global_offset);
- }
- }
+ // Make `#![]`
+ let mut token_trees = Vec::with_capacity(3);
+ token_trees.push(mk_punct('#'));
+ if let ast::CommentPlacement::Inner = doc {
+ token_trees.push(mk_punct('!'));
}
+ token_trees.push(tt::TokenTree::from(tt::Subtree {
+ delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket },
+ token_trees: meta_tkns,
+ }));
- fn peek_next_id(&self) -> tt::TokenId {
- tt::TokenId(self.next_id)
- }
+ Some(token_trees)
}
/// A raw token (straight from lexer) converter
-struct RawConverter<'a> {
+struct RawConverter<'a, Anchor, Ctx> {
+ lexed: parser::LexedStr<'a>,
+ pos: usize,
+ anchor: Anchor,
+ ctx: Ctx,
+}
+/// A raw token (straight from lexer) converter that gives every token the same span.
+struct StaticRawConverter<'a, S> {
lexed: parser::LexedStr<'a>,
pos: usize,
- id_alloc: TokenIdAlloc,
+ span: S,
}
-trait SrcToken<Ctx>: std::fmt::Debug {
+trait SrcToken<Ctx, S>: std::fmt::Debug {
fn kind(&self, ctx: &Ctx) -> SyntaxKind;
fn to_char(&self, ctx: &Ctx) -> Option<char>;
fn to_text(&self, ctx: &Ctx) -> SmolStr;
- fn synthetic_id(&self, ctx: &Ctx) -> Option<SyntheticTokenId>;
+ fn as_leaf(&self) -> Option<&tt::Leaf<S>> {
+ None
+ }
}
-trait TokenConverter: Sized {
- type Token: SrcToken<Self>;
+trait TokenConverter<S>: Sized {
+ type Token: SrcToken<Self, S>;
- fn convert_doc_comment(
- &self,
- token: &Self::Token,
- span: tt::TokenId,
- ) -> Option<Vec<tt::TokenTree>>;
+ fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option<Vec<tt::TokenTree<S>>>;
fn bump(&mut self) -> Option<(Self::Token, TextRange)>;
fn peek(&self) -> Option<Self::Token>;
- fn id_alloc(&mut self) -> &mut TokenIdAlloc;
+ fn span_for(&self, range: TextRange) -> S;
}
-impl SrcToken<RawConverter<'_>> for usize {
- fn kind(&self, ctx: &RawConverter<'_>) -> SyntaxKind {
+impl<Anchor, S, Ctx> SrcToken<RawConverter<'_, Anchor, Ctx>, S> for usize {
+ fn kind(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> SyntaxKind {
ctx.lexed.kind(*self)
}
- fn to_char(&self, ctx: &RawConverter<'_>) -> Option<char> {
+ fn to_char(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> Option<char> {
ctx.lexed.text(*self).chars().next()
}
- fn to_text(&self, ctx: &RawConverter<'_>) -> SmolStr {
+ fn to_text(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> SmolStr {
ctx.lexed.text(*self).into()
}
+}
- fn synthetic_id(&self, _ctx: &RawConverter<'_>) -> Option<SyntheticTokenId> {
- None
+impl<S: Span> SrcToken<StaticRawConverter<'_, S>, S> for usize {
+ fn kind(&self, ctx: &StaticRawConverter<'_, S>) -> SyntaxKind {
+ ctx.lexed.kind(*self)
+ }
+
+ fn to_char(&self, ctx: &StaticRawConverter<'_, S>) -> Option<char> {
+ ctx.lexed.text(*self).chars().next()
+ }
+
+ fn to_text(&self, ctx: &StaticRawConverter<'_, S>) -> SmolStr {
+ ctx.lexed.text(*self).into()
}
}
-impl TokenConverter for RawConverter<'_> {
+impl<Anchor: Copy, Ctx: SyntaxContext> TokenConverter<SpanData<Anchor, Ctx>>
+ for RawConverter<'_, Anchor, Ctx>
+where
+ SpanData<Anchor, Ctx>: Span,
+{
type Token = usize;
- fn convert_doc_comment(&self, &token: &usize, span: tt::TokenId) -> Option<Vec<tt::TokenTree>> {
+ fn convert_doc_comment(
+ &self,
+ &token: &usize,
+ span: SpanData<Anchor, Ctx>,
+ ) -> Option<Vec<tt::TokenTree<SpanData<Anchor, Ctx>>>> {
let text = self.lexed.text(token);
convert_doc_comment(&doc_comment(text), span)
}
@@ -678,7 +538,7 @@ impl TokenConverter for RawConverter<'_> {
let token = self.pos;
self.pos += 1;
let range = self.lexed.text_range(token);
- let range = TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
+ let range = TextRange::new(range.start.try_into().ok()?, range.end.try_into().ok()?);
Some((token, range))
}
@@ -690,137 +550,165 @@ impl TokenConverter for RawConverter<'_> {
Some(self.pos)
}
- fn id_alloc(&mut self) -> &mut TokenIdAlloc {
- &mut self.id_alloc
+ fn span_for(&self, range: TextRange) -> SpanData<Anchor, Ctx> {
+ SpanData { range, anchor: self.anchor, ctx: self.ctx }
}
}
-struct Converter {
- id_alloc: TokenIdAlloc,
+impl<S> TokenConverter<S> for StaticRawConverter<'_, S>
+where
+ S: Span,
+{
+ type Token = usize;
+
+ fn convert_doc_comment(&self, &token: &usize, span: S) -> Option<Vec<tt::TokenTree<S>>> {
+ let text = self.lexed.text(token);
+ convert_doc_comment(&doc_comment(text), span)
+ }
+
+ fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
+ if self.pos == self.lexed.len() {
+ return None;
+ }
+ let token = self.pos;
+ self.pos += 1;
+ let range = self.lexed.text_range(token);
+ let range = TextRange::new(range.start.try_into().ok()?, range.end.try_into().ok()?);
+
+ Some((token, range))
+ }
+
+ fn peek(&self) -> Option<Self::Token> {
+ if self.pos == self.lexed.len() {
+ return None;
+ }
+ Some(self.pos)
+ }
+
+ fn span_for(&self, _: TextRange) -> S {
+ self.span
+ }
+}
+
+struct Converter<SpanMap, S> {
current: Option<SyntaxToken>,
- current_synthetic: Vec<SyntheticToken>,
+ current_leafs: Vec<tt::Leaf<S>>,
preorder: PreorderWithTokens,
- replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
range: TextRange,
punct_offset: Option<(SyntaxToken, TextSize)>,
+ /// Used to make the emitted text ranges in the spans relative to the span anchor.
+ map: SpanMap,
+ append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
+ remove: FxHashSet<SyntaxNode>,
}
-impl Converter {
+impl<SpanMap, S> Converter<SpanMap, S> {
fn new(
node: &SyntaxNode,
- global_offset: TextSize,
- existing_token_map: TokenMap,
- next_id: u32,
- mut replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- mut append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- ) -> Converter {
- let range = node.text_range();
- let mut preorder = node.preorder_with_tokens();
- let (first, synthetic) = Self::next_token(&mut preorder, &mut replace, &mut append);
- Converter {
- id_alloc: { TokenIdAlloc { map: existing_token_map, global_offset, next_id } },
- current: first,
- current_synthetic: synthetic,
- preorder,
- range,
- replace,
- append,
+ map: SpanMap,
+ append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
+ remove: FxHashSet<SyntaxNode>,
+ ) -> Self {
+ let mut this = Converter {
+ current: None,
+ preorder: node.preorder_with_tokens(),
+ range: node.text_range(),
punct_offset: None,
- }
- }
-
- fn next_token(
- preorder: &mut PreorderWithTokens,
- replace: &mut FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- append: &mut FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- ) -> (Option<SyntaxToken>, Vec<SyntheticToken>) {
- while let Some(ev) = preorder.next() {
- let ele = match ev {
- WalkEvent::Enter(ele) => ele,
- WalkEvent::Leave(ele) => {
- if let Some(mut v) = append.remove(&ele) {
- if !v.is_empty() {
- v.reverse();
- return (None, v);
- }
+ map,
+ append,
+ remove,
+ current_leafs: vec![],
+ };
+ let first = this.next_token();
+ this.current = first;
+ this
+ }
+
+ fn next_token(&mut self) -> Option<SyntaxToken> {
+ while let Some(ev) = self.preorder.next() {
+ match ev {
+ WalkEvent::Enter(SyntaxElement::Token(t)) => return Some(t),
+ WalkEvent::Enter(SyntaxElement::Node(n)) if self.remove.contains(&n) => {
+ self.preorder.skip_subtree();
+ if let Some(mut v) = self.append.remove(&n.into()) {
+ v.reverse();
+ self.current_leafs.extend(v);
+ return None;
}
- continue;
}
- };
- if let Some(mut v) = replace.remove(&ele) {
- preorder.skip_subtree();
- if !v.is_empty() {
- v.reverse();
- return (None, v);
+ WalkEvent::Enter(SyntaxElement::Node(_)) => (),
+ WalkEvent::Leave(ele) => {
+ if let Some(mut v) = self.append.remove(&ele) {
+ v.reverse();
+ self.current_leafs.extend(v);
+ return None;
+ }
}
}
- match ele {
- SyntaxElement::Token(t) => return (Some(t), Vec::new()),
- _ => {}
- }
}
- (None, Vec::new())
+ None
}
}
#[derive(Debug)]
-enum SynToken {
+enum SynToken<S> {
Ordinary(SyntaxToken),
- // FIXME is this supposed to be `Punct`?
- Punch(SyntaxToken, TextSize),
- Synthetic(SyntheticToken),
+ Punct { token: SyntaxToken, offset: usize },
+ Leaf(tt::Leaf<S>),
}
-impl SynToken {
- fn token(&self) -> Option<&SyntaxToken> {
+impl<S> SynToken<S> {
+ fn token(&self) -> &SyntaxToken {
match self {
- SynToken::Ordinary(it) | SynToken::Punch(it, _) => Some(it),
- SynToken::Synthetic(_) => None,
+ SynToken::Ordinary(it) | SynToken::Punct { token: it, offset: _ } => it,
+ SynToken::Leaf(_) => unreachable!(),
}
}
}
-impl SrcToken<Converter> for SynToken {
- fn kind(&self, ctx: &Converter) -> SyntaxKind {
+impl<SpanMap, S: std::fmt::Debug> SrcToken<Converter<SpanMap, S>, S> for SynToken<S> {
+ fn kind(&self, ctx: &Converter<SpanMap, S>) -> SyntaxKind {
match self {
SynToken::Ordinary(token) => token.kind(),
- SynToken::Punch(..) => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(),
- SynToken::Synthetic(token) => token.kind,
+ SynToken::Punct { .. } => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(),
+ SynToken::Leaf(_) => {
+ never!();
+ SyntaxKind::ERROR
+ }
}
}
- fn to_char(&self, _ctx: &Converter) -> Option<char> {
+ fn to_char(&self, _ctx: &Converter<SpanMap, S>) -> Option<char> {
match self {
SynToken::Ordinary(_) => None,
- SynToken::Punch(it, i) => it.text().chars().nth((*i).into()),
- SynToken::Synthetic(token) if token.text.len() == 1 => token.text.chars().next(),
- SynToken::Synthetic(_) => None,
+ SynToken::Punct { token: it, offset: i } => it.text().chars().nth(*i),
+ SynToken::Leaf(_) => None,
}
}
- fn to_text(&self, _ctx: &Converter) -> SmolStr {
+ fn to_text(&self, _ctx: &Converter<SpanMap, S>) -> SmolStr {
match self {
- SynToken::Ordinary(token) => token.text().into(),
- SynToken::Punch(token, _) => token.text().into(),
- SynToken::Synthetic(token) => token.text.clone(),
+ SynToken::Ordinary(token) | SynToken::Punct { token, offset: _ } => token.text().into(),
+ SynToken::Leaf(_) => {
+ never!();
+ "".into()
+ }
}
}
-
- fn synthetic_id(&self, _ctx: &Converter) -> Option<SyntheticTokenId> {
+ fn as_leaf(&self) -> Option<&tt::Leaf<S>> {
match self {
- SynToken::Synthetic(token) => Some(token.id),
- _ => None,
+ SynToken::Ordinary(_) | SynToken::Punct { .. } => None,
+ SynToken::Leaf(it) => Some(it),
}
}
}
-impl TokenConverter for Converter {
- type Token = SynToken;
- fn convert_doc_comment(
- &self,
- token: &Self::Token,
- span: tt::TokenId,
- ) -> Option<Vec<tt::TokenTree>> {
- convert_doc_comment(token.token()?, span)
+impl<S, SpanMap> TokenConverter<S> for Converter<SpanMap, S>
+where
+ S: Span,
+ SpanMap: SpanMapper<S>,
+{
+ type Token = SynToken<S>;
+ fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option<Vec<tt::TokenTree<S>>> {
+ convert_doc_comment(token.token(), span)
}
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
@@ -830,34 +718,31 @@ impl TokenConverter for Converter {
let range = punct.text_range();
self.punct_offset = Some((punct.clone(), offset));
let range = TextRange::at(range.start() + offset, TextSize::of('.'));
- return Some((SynToken::Punch(punct, offset), range));
+ return Some((
+ SynToken::Punct { token: punct, offset: u32::from(offset) as usize },
+ range,
+ ));
}
}
- if let Some(synth_token) = self.current_synthetic.pop() {
- if self.current_synthetic.is_empty() {
- let (new_current, new_synth) =
- Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append);
- self.current = new_current;
- self.current_synthetic = new_synth;
+ if let Some(leaf) = self.current_leafs.pop() {
+ if self.current_leafs.is_empty() {
+ self.current = self.next_token();
}
- let range = synth_token.range;
- return Some((SynToken::Synthetic(synth_token), range));
+ return Some((SynToken::Leaf(leaf), TextRange::empty(TextSize::new(0))));
}
let curr = self.current.clone()?;
if !self.range.contains_range(curr.text_range()) {
return None;
}
- let (new_current, new_synth) =
- Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append);
- self.current = new_current;
- self.current_synthetic = new_synth;
+
+ self.current = self.next_token();
let token = if curr.kind().is_punct() {
self.punct_offset = Some((curr.clone(), 0.into()));
let range = curr.text_range();
let range = TextRange::at(range.start(), TextSize::of('.'));
- (SynToken::Punch(curr, 0.into()), range)
+ (SynToken::Punct { token: curr, offset: 0 as usize }, range)
} else {
self.punct_offset = None;
let range = curr.text_range();
@@ -871,55 +756,55 @@ impl TokenConverter for Converter {
if let Some((punct, mut offset)) = self.punct_offset.clone() {
offset += TextSize::of('.');
if usize::from(offset) < punct.text().len() {
- return Some(SynToken::Punch(punct, offset));
+ return Some(SynToken::Punct { token: punct, offset: usize::from(offset) });
}
}
- if let Some(synth_token) = self.current_synthetic.last() {
- return Some(SynToken::Synthetic(synth_token.clone()));
- }
-
let curr = self.current.clone()?;
if !self.range.contains_range(curr.text_range()) {
return None;
}
let token = if curr.kind().is_punct() {
- SynToken::Punch(curr, 0.into())
+ SynToken::Punct { token: curr, offset: 0 as usize }
} else {
SynToken::Ordinary(curr)
};
Some(token)
}
- fn id_alloc(&mut self) -> &mut TokenIdAlloc {
- &mut self.id_alloc
+ fn span_for(&self, range: TextRange) -> S {
+ self.map.span_for(range)
}
}
-struct TtTreeSink<'a> {
+struct TtTreeSink<'a, Anchor, Ctx>
+where
+ SpanData<Anchor, Ctx>: Span,
+{
buf: String,
- cursor: Cursor<'a>,
- open_delims: FxHashMap<tt::TokenId, TextSize>,
+ cursor: Cursor<'a, SpanData<Anchor, Ctx>>,
text_pos: TextSize,
inner: SyntaxTreeBuilder,
- token_map: TokenMap,
+ token_map: SpanMap<SpanData<Anchor, Ctx>>,
}
-impl<'a> TtTreeSink<'a> {
- fn new(cursor: Cursor<'a>) -> Self {
+impl<'a, Anchor, Ctx> TtTreeSink<'a, Anchor, Ctx>
+where
+ SpanData<Anchor, Ctx>: Span,
+{
+ fn new(cursor: Cursor<'a, SpanData<Anchor, Ctx>>) -> Self {
TtTreeSink {
buf: String::new(),
cursor,
- open_delims: FxHashMap::default(),
text_pos: 0.into(),
inner: SyntaxTreeBuilder::default(),
- token_map: TokenMap::default(),
+ token_map: SpanMap::empty(),
}
}
- fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) {
- self.token_map.shrink_to_fit();
+ fn finish(mut self) -> (Parse<SyntaxNode>, SpanMap<SpanData<Anchor, Ctx>>) {
+ self.token_map.finish();
(self.inner.finish(), self.token_map)
}
}
@@ -936,27 +821,34 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> {
Some(&texts[idx..texts.len() - (1 - idx)])
}
-impl TtTreeSink<'_> {
+impl<Anchor, Ctx> TtTreeSink<'_, Anchor, Ctx>
+where
+ SpanData<Anchor, Ctx>: Span,
+{
/// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
/// This occurs when a float literal is used as a field access.
fn float_split(&mut self, has_pseudo_dot: bool) {
- let (text, _span) = match self.cursor.token_tree() {
+ let (text, span) = match self.cursor.token_tree() {
Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Literal(lit), _)) => {
(lit.text.as_str(), lit.span)
}
_ => unreachable!(),
};
+ // FIXME: Span splitting
match text.split_once('.') {
Some((left, right)) => {
assert!(!left.is_empty());
+
self.inner.start_node(SyntaxKind::NAME_REF);
self.inner.token(SyntaxKind::INT_NUMBER, left);
self.inner.finish_node();
+ self.token_map.push(self.text_pos + TextSize::of(left), span);
// here we move the exit up, the original exit has been deleted in process
self.inner.finish_node();
self.inner.token(SyntaxKind::DOT, ".");
+ self.token_map.push(self.text_pos + TextSize::of(left) + TextSize::of("."), span);
if has_pseudo_dot {
assert!(right.is_empty(), "{left}.{right}");
@@ -964,11 +856,13 @@ impl TtTreeSink<'_> {
assert!(!right.is_empty(), "{left}.{right}");
self.inner.start_node(SyntaxKind::NAME_REF);
self.inner.token(SyntaxKind::INT_NUMBER, right);
+ self.token_map.push(self.text_pos + TextSize::of(text), span);
self.inner.finish_node();
// the parser creates an unbalanced start node, we are required to close it here
self.inner.finish_node();
}
+ self.text_pos += TextSize::of(text);
}
None => unreachable!(),
}
@@ -987,11 +881,11 @@ impl TtTreeSink<'_> {
break;
}
last = self.cursor;
- let text: &str = loop {
+ let (text, span) = loop {
break match self.cursor.token_tree() {
Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
// Mark the range if needed
- let (text, id) = match leaf {
+ let (text, span) = match leaf {
tt::Leaf::Ident(ident) => (ident.text.as_str(), ident.span),
tt::Leaf::Punct(punct) => {
assert!(punct.char.is_ascii());
@@ -1003,18 +897,13 @@ impl TtTreeSink<'_> {
}
tt::Leaf::Literal(lit) => (lit.text.as_str(), lit.span),
};
- let range = TextRange::at(self.text_pos, TextSize::of(text));
- self.token_map.insert(id, range);
self.cursor = self.cursor.bump();
- text
+ (text, span)
}
Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
self.cursor = self.cursor.subtree().unwrap();
match delim_to_str(subtree.delimiter.kind, false) {
- Some(it) => {
- self.open_delims.insert(subtree.delimiter.open, self.text_pos);
- it
- }
+ Some(it) => (it, subtree.delimiter.open),
None => continue,
}
}
@@ -1022,21 +911,7 @@ impl TtTreeSink<'_> {
let parent = self.cursor.end().unwrap();
self.cursor = self.cursor.bump();
match delim_to_str(parent.delimiter.kind, true) {
- Some(it) => {
- if let Some(open_delim) =
- self.open_delims.get(&parent.delimiter.open)
- {
- let open_range = TextRange::at(*open_delim, TextSize::of('('));
- let close_range =
- TextRange::at(self.text_pos, TextSize::of('('));
- self.token_map.insert_delim(
- parent.delimiter.open,
- open_range,
- close_range,
- );
- }
- it
- }
+ Some(it) => (it, parent.delimiter.close),
None => continue,
}
}
@@ -1044,10 +919,12 @@ impl TtTreeSink<'_> {
};
self.buf += text;
self.text_pos += TextSize::of(text);
+ self.token_map.push(self.text_pos, span);
}
self.inner.token(kind, self.buf.as_str());
self.buf.clear();
+ // FIXME: Emitting whitespace for this is really just a hack, we should get rid of it.
// Add whitespace between adjoint puncts
let next = last.bump();
if let (
@@ -1063,6 +940,7 @@ impl TtTreeSink<'_> {
if curr.spacing == tt::Spacing::Alone && curr.char != ';' && next.char != '\'' {
self.inner.token(WHITESPACE, " ");
self.text_pos += TextSize::of(' ');
+ self.token_map.push(self.text_pos, curr.span);
}
}
}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs
index fa0125f3e..bd8187a14 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs
@@ -7,21 +7,20 @@ use tt::{
Leaf, Punct, Spacing,
};
-use super::syntax_node_to_token_tree;
+use crate::{syntax_node_to_token_tree, DummyTestSpanData, DummyTestSpanMap};
fn check_punct_spacing(fixture: &str) {
let source_file = ast::SourceFile::parse(fixture).ok().unwrap();
- let (subtree, token_map) = syntax_node_to_token_tree(source_file.syntax());
+ let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap);
let mut annotations: HashMap<_, _> = extract_annotations(fixture)
.into_iter()
.map(|(range, annotation)| {
- let token = token_map.token_by_range(range).expect("no token found");
let spacing = match annotation.as_str() {
"Alone" => Spacing::Alone,
"Joint" => Spacing::Joint,
a => panic!("unknown annotation: {a}"),
};
- (token, spacing)
+ (range, spacing)
})
.collect();
@@ -29,8 +28,12 @@ fn check_punct_spacing(fixture: &str) {
let mut cursor = buf.begin();
while !cursor.eof() {
while let Some(token_tree) = cursor.token_tree() {
- if let TokenTreeRef::Leaf(Leaf::Punct(Punct { spacing, span, .. }), _) = token_tree {
- if let Some(expected) = annotations.remove(span) {
+ if let TokenTreeRef::Leaf(
+ Leaf::Punct(Punct { spacing, span: DummyTestSpanData { range, .. }, .. }),
+ _,
+ ) = token_tree
+ {
+ if let Some(expected) = annotations.remove(range) {
assert_eq!(expected, *spacing);
}
}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs b/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs
index 051e20b3a..00a14f046 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs
@@ -3,9 +3,9 @@
use syntax::{SyntaxKind, SyntaxKind::*, T};
-use crate::tt::buffer::TokenBuffer;
+use tt::{buffer::TokenBuffer, Span};
-pub(crate) fn to_parser_input(buffer: &TokenBuffer<'_>) -> parser::Input {
+pub(crate) fn to_parser_input<S: Span>(buffer: &TokenBuffer<'_, S>) -> parser::Input {
let mut res = parser::Input::default();
let mut current = buffer.begin();
diff --git a/src/tools/rust-analyzer/crates/mbe/src/token_map.rs b/src/tools/rust-analyzer/crates/mbe/src/token_map.rs
index 73a27df5d..7d15812f8 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/token_map.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/token_map.rs
@@ -2,123 +2,75 @@
use std::hash::Hash;
-use parser::{SyntaxKind, T};
+use stdx::{always, itertools::Itertools};
use syntax::{TextRange, TextSize};
+use tt::Span;
-use crate::syntax_bridge::SyntheticTokenId;
-
-#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
-enum TokenTextRange {
- Token(TextRange),
- Delimiter(TextRange),
+/// Maps absolute text ranges for the corresponding file to the relevant span data.
+#[derive(Debug, PartialEq, Eq, Clone, Hash)]
+pub struct SpanMap<S: Span> {
+ spans: Vec<(TextSize, S)>,
}
-impl TokenTextRange {
- fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> {
- match self {
- TokenTextRange::Token(it) => Some(it),
- TokenTextRange::Delimiter(it) => match kind {
- T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())),
- T!['}'] | T![')'] | T![']'] => {
- Some(TextRange::at(it.end() - TextSize::of('}'), 1.into()))
- }
- _ => None,
- },
- }
+impl<S: Span> SpanMap<S> {
+ /// Creates a new empty [`SpanMap`].
+ pub fn empty() -> Self {
+ Self { spans: Vec::new() }
}
-}
-/// Maps `tt::TokenId` to the relative range of the original token.
-#[derive(Debug, PartialEq, Eq, Clone, Default, Hash)]
-pub struct TokenMap {
- /// Maps `tt::TokenId` to the *relative* source range.
- entries: Vec<(tt::TokenId, TokenTextRange)>,
- pub synthetic_entries: Vec<(tt::TokenId, SyntheticTokenId)>,
-}
-
-impl TokenMap {
- pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
- let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
- TokenTextRange::Token(it) => *it == relative_range,
- TokenTextRange::Delimiter(it) => {
- let open = TextRange::at(it.start(), 1.into());
- let close = TextRange::at(it.end() - TextSize::of('}'), 1.into());
- open == relative_range || close == relative_range
- }
- })?;
- Some(token_id)
+ /// Finalizes the [`SpanMap`], shrinking its backing storage and validating that the offsets are
+ /// in order.
+ pub fn finish(&mut self) {
+ always!(
+ self.spans.iter().tuple_windows().all(|(a, b)| a.0 < b.0),
+ "spans are not in order"
+ );
+ self.spans.shrink_to_fit();
}
- pub fn ranges_by_token(
- &self,
- token_id: tt::TokenId,
- kind: SyntaxKind,
- ) -> impl Iterator<Item = TextRange> + '_ {
- self.entries
- .iter()
- .filter(move |&&(tid, _)| tid == token_id)
- .filter_map(move |(_, range)| range.by_kind(kind))
- }
-
- pub fn synthetic_token_id(&self, token_id: tt::TokenId) -> Option<SyntheticTokenId> {
- self.synthetic_entries.iter().find(|(tid, _)| *tid == token_id).map(|(_, id)| *id)
- }
-
- pub fn first_range_by_token(
- &self,
- token_id: tt::TokenId,
- kind: SyntaxKind,
- ) -> Option<TextRange> {
- self.ranges_by_token(token_id, kind).next()
- }
-
- pub(crate) fn shrink_to_fit(&mut self) {
- self.entries.shrink_to_fit();
- self.synthetic_entries.shrink_to_fit();
- }
-
- pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
- self.entries.push((token_id, TokenTextRange::Token(relative_range)));
- }
-
- pub(crate) fn insert_synthetic(&mut self, token_id: tt::TokenId, id: SyntheticTokenId) {
- self.synthetic_entries.push((token_id, id));
- }
-
- pub(crate) fn insert_delim(
- &mut self,
- token_id: tt::TokenId,
- open_relative_range: TextRange,
- close_relative_range: TextRange,
- ) -> usize {
- let res = self.entries.len();
- let cover = open_relative_range.cover(close_relative_range);
-
- self.entries.push((token_id, TokenTextRange::Delimiter(cover)));
- res
+ /// Pushes a new span onto the [`SpanMap`].
+ pub fn push(&mut self, offset: TextSize, span: S) {
+ if cfg!(debug_assertions) {
+ if let Some(&(last_offset, _)) = self.spans.last() {
+ assert!(
+ last_offset < offset,
+ "last_offset({last_offset:?}) must be smaller than offset({offset:?})"
+ );
+ }
+ }
+ self.spans.push((offset, span));
}
- pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
- let (_, token_text_range) = &mut self.entries[idx];
- if let TokenTextRange::Delimiter(dim) = token_text_range {
- let cover = dim.cover(close_relative_range);
- *token_text_range = TokenTextRange::Delimiter(cover);
- }
+ /// Returns all [`TextRange`]s that correspond to the given span.
+ ///
+ /// Note this does a linear search through the entire backing vector.
+ pub fn ranges_with_span(&self, span: S) -> impl Iterator<Item = TextRange> + '_ {
+ // FIXME: This should ignore the syntax context!
+ self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| {
+ if s != span {
+ return None;
+ }
+ let start = idx.checked_sub(1).map_or(TextSize::new(0), |prev| self.spans[prev].0);
+ Some(TextRange::new(start, end))
+ })
}
- pub(crate) fn remove_delim(&mut self, idx: usize) {
- // FIXME: This could be accidentally quadratic
- self.entries.remove(idx);
+ /// Returns the span at the given position.
+ pub fn span_at(&self, offset: TextSize) -> S {
+ let entry = self.spans.partition_point(|&(it, _)| it <= offset);
+ self.spans[entry].1
}
- pub fn entries(&self) -> impl Iterator<Item = (tt::TokenId, TextRange)> + '_ {
- self.entries.iter().filter_map(|&(tid, tr)| match tr {
- TokenTextRange::Token(range) => Some((tid, range)),
- TokenTextRange::Delimiter(_) => None,
- })
+ /// Returns the spans associated with the given range.
+ /// In other words, this will return all spans that correspond to all offsets within the given range.
+ pub fn spans_for_range(&self, range: TextRange) -> impl Iterator<Item = S> + '_ {
+ let (start, end) = (range.start(), range.end());
+ let start_entry = self.spans.partition_point(|&(it, _)| it <= start);
+ let end_entry = self.spans[start_entry..].partition_point(|&(it, _)| it <= end); // FIXME: this might be wrong?
+ (&self.spans[start_entry..][..end_entry]).iter().map(|&(_, s)| s)
}
- pub fn filter(&mut self, id: impl Fn(tt::TokenId) -> bool) {
- self.entries.retain(|&(tid, _)| id(tid));
+ pub fn iter(&self) -> impl Iterator<Item = (TextSize, S)> + '_ {
+ self.spans.iter().copied()
}
}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
index 79ff8ca28..40e8a2385 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
@@ -3,16 +3,17 @@
use smallvec::{smallvec, SmallVec};
use syntax::SyntaxKind;
+use tt::Span;
-use crate::{to_parser_input::to_parser_input, tt, ExpandError, ExpandResult};
+use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult};
#[derive(Debug, Clone)]
-pub(crate) struct TtIter<'a> {
- pub(crate) inner: std::slice::Iter<'a, tt::TokenTree>,
+pub(crate) struct TtIter<'a, S> {
+ pub(crate) inner: std::slice::Iter<'a, tt::TokenTree<S>>,
}
-impl<'a> TtIter<'a> {
- pub(crate) fn new(subtree: &'a tt::Subtree) -> TtIter<'a> {
+impl<'a, S: Span> TtIter<'a, S> {
+ pub(crate) fn new(subtree: &'a tt::Subtree<S>) -> TtIter<'a, S> {
TtIter { inner: subtree.token_trees.iter() }
}
@@ -36,35 +37,35 @@ impl<'a> TtIter<'a> {
}
}
- pub(crate) fn expect_subtree(&mut self) -> Result<&'a tt::Subtree, ()> {
+ pub(crate) fn expect_subtree(&mut self) -> Result<&'a tt::Subtree<S>, ()> {
match self.next() {
Some(tt::TokenTree::Subtree(it)) => Ok(it),
_ => Err(()),
}
}
- pub(crate) fn expect_leaf(&mut self) -> Result<&'a tt::Leaf, ()> {
+ pub(crate) fn expect_leaf(&mut self) -> Result<&'a tt::Leaf<S>, ()> {
match self.next() {
Some(tt::TokenTree::Leaf(it)) => Ok(it),
_ => Err(()),
}
}
- pub(crate) fn expect_ident(&mut self) -> Result<&'a tt::Ident, ()> {
+ pub(crate) fn expect_ident(&mut self) -> Result<&'a tt::Ident<S>, ()> {
match self.expect_leaf()? {
tt::Leaf::Ident(it) if it.text != "_" => Ok(it),
_ => Err(()),
}
}
- pub(crate) fn expect_ident_or_underscore(&mut self) -> Result<&'a tt::Ident, ()> {
+ pub(crate) fn expect_ident_or_underscore(&mut self) -> Result<&'a tt::Ident<S>, ()> {
match self.expect_leaf()? {
tt::Leaf::Ident(it) => Ok(it),
_ => Err(()),
}
}
- pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Leaf, ()> {
+ pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Leaf<S>, ()> {
let it = self.expect_leaf()?;
match it {
tt::Leaf::Literal(_) => Ok(it),
@@ -73,7 +74,7 @@ impl<'a> TtIter<'a> {
}
}
- pub(crate) fn expect_single_punct(&mut self) -> Result<&'a tt::Punct, ()> {
+ pub(crate) fn expect_single_punct(&mut self) -> Result<&'a tt::Punct<S>, ()> {
match self.expect_leaf()? {
tt::Leaf::Punct(it) => Ok(it),
_ => Err(()),
@@ -84,7 +85,7 @@ impl<'a> TtIter<'a> {
///
/// This method currently may return a single quotation, which is part of lifetime ident and
/// conceptually not a punct in the context of mbe. Callers should handle this.
- pub(crate) fn expect_glued_punct(&mut self) -> Result<SmallVec<[tt::Punct; 3]>, ()> {
+ pub(crate) fn expect_glued_punct(&mut self) -> Result<SmallVec<[tt::Punct<S>; 3]>, ()> {
let tt::TokenTree::Leaf(tt::Leaf::Punct(first)) = self.next().ok_or(())?.clone() else {
return Err(());
};
@@ -126,11 +127,10 @@ impl<'a> TtIter<'a> {
pub(crate) fn expect_fragment(
&mut self,
entry_point: parser::PrefixEntryPoint,
- ) -> ExpandResult<Option<tt::TokenTree>> {
+ ) -> ExpandResult<Option<tt::TokenTree<S>>> {
let buffer = tt::buffer::TokenBuffer::from_tokens(self.inner.as_slice());
let parser_input = to_parser_input(&buffer);
let tree_traversal = entry_point.parse(&parser_input);
-
let mut cursor = buffer.begin();
let mut error = false;
for step in tree_traversal.iter() {
@@ -162,32 +162,30 @@ impl<'a> TtIter<'a> {
let mut curr = buffer.begin();
let mut res = vec![];
- if cursor.is_root() {
- while curr != cursor {
- let Some(token) = curr.token_tree() else { break };
- res.push(token.cloned());
- curr = curr.bump();
- }
+ while curr != cursor {
+ let Some(token) = curr.token_tree() else { break };
+ res.push(token.cloned());
+ curr = curr.bump();
}
self.inner = self.inner.as_slice()[res.len()..].iter();
let res = match res.len() {
0 | 1 => res.pop(),
_ => Some(tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: res,
})),
};
ExpandResult { value: res, err }
}
- pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree> {
+ pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree<S>> {
self.inner.as_slice().get(n)
}
}
-impl<'a> Iterator for TtIter<'a> {
- type Item = &'a tt::TokenTree;
+impl<'a, S> Iterator for TtIter<'a, S> {
+ type Item = &'a tt::TokenTree<S>;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next()
}
@@ -197,4 +195,4 @@ impl<'a> Iterator for TtIter<'a> {
}
}
-impl std::iter::ExactSizeIterator for TtIter<'_> {}
+impl<S> std::iter::ExactSizeIterator for TtIter<'_, S> {}
diff --git a/src/tools/rust-analyzer/crates/parser/Cargo.toml b/src/tools/rust-analyzer/crates/parser/Cargo.toml
index 09e62c352..efb326323 100644
--- a/src/tools/rust-analyzer/crates/parser/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/parser/Cargo.toml
@@ -13,7 +13,7 @@ doctest = false
[dependencies]
drop_bomb = "0.1.5"
-rustc_lexer.workspace = true
+rustc-dependencies.workspace = true
limit.workspace = true
@@ -22,3 +22,6 @@ expect-test = "1.4.0"
stdx.workspace = true
sourcegen.workspace = true
+
+[features]
+in-rust-tree = ["rustc-dependencies/in-rust-tree"]
diff --git a/src/tools/rust-analyzer/crates/parser/src/event.rs b/src/tools/rust-analyzer/crates/parser/src/event.rs
index 577eb0967..e38571dd3 100644
--- a/src/tools/rust-analyzer/crates/parser/src/event.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/event.rs
@@ -2,11 +2,6 @@
//! It is intended to be completely decoupled from the
//! parser, so as to allow to evolve the tree representation
//! and the parser algorithm independently.
-//!
-//! The `TreeSink` trait is the bridge between the parser and the
-//! tree builder: the parser produces a stream of events like
-//! `start node`, `finish node`, and `FileBuilder` converts
-//! this stream to a real tree.
use std::mem;
use crate::{
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar.rs b/src/tools/rust-analyzer/crates/parser/src/grammar.rs
index 6a2a9adce..19da297b5 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar.rs
@@ -376,6 +376,16 @@ fn error_block(p: &mut Parser<'_>, message: &str) {
m.complete(p, ERROR);
}
+// test_err top_level_let
+// let ref foo: fn() = 1 + 3;
+fn error_let_stmt(p: &mut Parser<'_>, message: &str) {
+ assert!(p.at(T![let]));
+ let m = p.start();
+ p.error(message);
+ expressions::let_stmt(p, expressions::Semicolon::Optional);
+ m.complete(p, ERROR);
+}
+
/// The `parser` passed this is required to at least consume one token if it returns `true`.
/// If the `parser` returns false, parsing will stop.
fn delimited(
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs
index 1cbd16632..e346ece2f 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs
@@ -59,7 +59,8 @@ pub(super) fn stmt(p: &mut Parser<'_>, semicolon: Semicolon) {
attributes::outer_attrs(p);
if p.at(T![let]) {
- let_stmt(p, m, semicolon);
+ let_stmt(p, semicolon);
+ m.complete(p, LET_STMT);
return;
}
@@ -109,54 +110,53 @@ pub(super) fn stmt(p: &mut Parser<'_>, semicolon: Semicolon) {
m.complete(p, EXPR_STMT);
}
}
+}
- // test let_stmt
- // fn f() { let x: i32 = 92; }
- fn let_stmt(p: &mut Parser<'_>, m: Marker, with_semi: Semicolon) {
- p.bump(T![let]);
- patterns::pattern(p);
- if p.at(T![:]) {
- // test let_stmt_ascription
- // fn f() { let x: i32; }
- types::ascription(p);
- }
+// test let_stmt
+// fn f() { let x: i32 = 92; }
+pub(super) fn let_stmt(p: &mut Parser<'_>, with_semi: Semicolon) {
+ p.bump(T![let]);
+ patterns::pattern(p);
+ if p.at(T![:]) {
+ // test let_stmt_ascription
+ // fn f() { let x: i32; }
+ types::ascription(p);
+ }
- let mut expr_after_eq: Option<CompletedMarker> = None;
- if p.eat(T![=]) {
- // test let_stmt_init
- // fn f() { let x = 92; }
- expr_after_eq = expressions::expr(p);
- }
+ let mut expr_after_eq: Option<CompletedMarker> = None;
+ if p.eat(T![=]) {
+ // test let_stmt_init
+ // fn f() { let x = 92; }
+ expr_after_eq = expressions::expr(p);
+ }
- if p.at(T![else]) {
- // test_err let_else_right_curly_brace
- // fn func() { let Some(_) = {Some(1)} else { panic!("h") };}
- if let Some(expr) = expr_after_eq {
- if BlockLike::is_blocklike(expr.kind()) {
- p.error(
- "right curly brace `}` before `else` in a `let...else` statement not allowed",
- )
- }
+ if p.at(T![else]) {
+ // test_err let_else_right_curly_brace
+ // fn func() { let Some(_) = {Some(1)} else { panic!("h") };}
+ if let Some(expr) = expr_after_eq {
+ if BlockLike::is_blocklike(expr.kind()) {
+ p.error(
+ "right curly brace `}` before `else` in a `let...else` statement not allowed",
+ )
}
-
- // test let_else
- // fn f() { let Some(x) = opt else { return }; }
- let m = p.start();
- p.bump(T![else]);
- block_expr(p);
- m.complete(p, LET_ELSE);
}
- match with_semi {
- Semicolon::Forbidden => (),
- Semicolon::Optional => {
- p.eat(T![;]);
- }
- Semicolon::Required => {
- p.expect(T![;]);
- }
+ // test let_else
+ // fn f() { let Some(x) = opt else { return }; }
+ let m = p.start();
+ p.bump(T![else]);
+ block_expr(p);
+ m.complete(p, LET_ELSE);
+ }
+
+ match with_semi {
+ Semicolon::Forbidden => (),
+ Semicolon::Optional => {
+ p.eat(T![;]);
+ }
+ Semicolon::Required => {
+ p.expect(T![;]);
}
- m.complete(p, LET_STMT);
}
}
@@ -693,6 +693,17 @@ pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) {
// We permit `.. }` on the left-hand side of a destructuring assignment.
if !p.at(T!['}']) {
expr(p);
+
+ if p.at(T![,]) {
+ // test_err comma_after_functional_update_syntax
+ // fn foo() {
+ // S { ..x, };
+ // S { ..x, a: 0 }
+ // }
+
+ // Do not bump, so we can support additional fields after this comma.
+ p.error("cannot use a comma after the base struct");
+ }
}
}
T!['{'] => {
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs
index 4e850b1f7..34fd3420f 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs
@@ -79,6 +79,7 @@ pub(super) fn item_or_macro(p: &mut Parser<'_>, stop_on_r_curly: bool) {
e.complete(p, ERROR);
}
EOF | T!['}'] => p.error("expected an item"),
+ T![let] => error_let_stmt(p, "expected an item"),
_ => p.err_and_bump("expected an item"),
}
}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/params.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/params.rs
index 74eae9151..846da28cb 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/params.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/params.rs
@@ -7,6 +7,9 @@ use super::*;
// fn b(x: i32) {}
// fn c(x: i32, ) {}
// fn d(x: i32, y: ()) {}
+
+// test_err empty_param_slot
+// fn f(y: i32, ,t: i32) {}
pub(super) fn param_list_fn_def(p: &mut Parser<'_>) {
list_(p, Flavor::FnDef);
}
@@ -71,7 +74,11 @@ fn list_(p: &mut Parser<'_>, flavor: Flavor) {
if !p.at_ts(PARAM_FIRST.union(ATTRIBUTE_FIRST)) {
p.error("expected value parameter");
m.abandon(p);
- break;
+ if p.eat(T![,]) {
+ continue;
+ } else {
+ break;
+ }
}
param(p, m, flavor);
if !p.at(T![,]) {
diff --git a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs
index 36c52953a..b9e7566fd 100644
--- a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs
@@ -8,8 +8,12 @@
//! Note that these tokens, unlike the tokens we feed into the parser, do
//! include info about comments and whitespace.
+use rustc_dependencies::lexer as rustc_lexer;
+
use std::ops;
+use rustc_lexer::unescape::{EscapeError, Mode};
+
use crate::{
SyntaxKind::{self, *},
T,
@@ -253,30 +257,60 @@ impl<'a> Converter<'a> {
rustc_lexer::LiteralKind::Char { terminated } => {
if !terminated {
err = "Missing trailing `'` symbol to terminate the character literal";
+ } else {
+ let text = &self.res.text[self.offset + 1..][..len - 1];
+ let i = text.rfind('\'').unwrap();
+ let text = &text[..i];
+ if let Err(e) = rustc_lexer::unescape::unescape_char(text) {
+ err = error_to_diagnostic_message(e, Mode::Char);
+ }
}
CHAR
}
rustc_lexer::LiteralKind::Byte { terminated } => {
if !terminated {
err = "Missing trailing `'` symbol to terminate the byte literal";
+ } else {
+ let text = &self.res.text[self.offset + 2..][..len - 2];
+ let i = text.rfind('\'').unwrap();
+ let text = &text[..i];
+ if let Err(e) = rustc_lexer::unescape::unescape_byte(text) {
+ err = error_to_diagnostic_message(e, Mode::Byte);
+ }
}
+
BYTE
}
rustc_lexer::LiteralKind::Str { terminated } => {
if !terminated {
err = "Missing trailing `\"` symbol to terminate the string literal";
+ } else {
+ let text = &self.res.text[self.offset + 1..][..len - 1];
+ let i = text.rfind('"').unwrap();
+ let text = &text[..i];
+ err = unescape_string_error_message(text, Mode::Str);
}
STRING
}
rustc_lexer::LiteralKind::ByteStr { terminated } => {
if !terminated {
err = "Missing trailing `\"` symbol to terminate the byte string literal";
+ } else {
+ let text = &self.res.text[self.offset + 2..][..len - 2];
+ let i = text.rfind('"').unwrap();
+ let text = &text[..i];
+ err = unescape_string_error_message(text, Mode::ByteStr);
}
BYTE_STRING
}
rustc_lexer::LiteralKind::CStr { terminated } => {
if !terminated {
err = "Missing trailing `\"` symbol to terminate the string literal";
+ } else {
+ let text = &self.res.text[self.offset + 2..][..len - 2];
+ let i = text.rfind('"').unwrap();
+ let text = &text[..i];
+ err = unescape_string_error_message(text, Mode::CStr);
}
C_STRING
}
@@ -304,3 +338,64 @@ impl<'a> Converter<'a> {
self.push(syntax_kind, len, err);
}
}
+
+fn error_to_diagnostic_message(error: EscapeError, mode: Mode) -> &'static str {
+ match error {
+ EscapeError::ZeroChars => "empty character literal",
+ EscapeError::MoreThanOneChar => "character literal may only contain one codepoint",
+ EscapeError::LoneSlash => "",
+ EscapeError::InvalidEscape if mode == Mode::Byte || mode == Mode::ByteStr => {
+ "unknown byte escape"
+ }
+ EscapeError::InvalidEscape => "unknown character escape",
+ EscapeError::BareCarriageReturn => "",
+ EscapeError::BareCarriageReturnInRawString => "",
+ EscapeError::EscapeOnlyChar if mode == Mode::Byte => "byte constant must be escaped",
+ EscapeError::EscapeOnlyChar => "character constant must be escaped",
+ EscapeError::TooShortHexEscape => "numeric character escape is too short",
+ EscapeError::InvalidCharInHexEscape => "invalid character in numeric character escape",
+ EscapeError::OutOfRangeHexEscape => "out of range hex escape",
+ EscapeError::NoBraceInUnicodeEscape => "incorrect unicode escape sequence",
+ EscapeError::InvalidCharInUnicodeEscape => "invalid character in unicode escape",
+ EscapeError::EmptyUnicodeEscape => "empty unicode escape",
+ EscapeError::UnclosedUnicodeEscape => "unterminated unicode escape",
+ EscapeError::LeadingUnderscoreUnicodeEscape => "invalid start of unicode escape",
+ EscapeError::OverlongUnicodeEscape => "overlong unicode escape",
+ EscapeError::LoneSurrogateUnicodeEscape => "invalid unicode character escape",
+ EscapeError::OutOfRangeUnicodeEscape => "invalid unicode character escape",
+ EscapeError::UnicodeEscapeInByte => "unicode escape in byte string",
+ EscapeError::NonAsciiCharInByte if mode == Mode::Byte => {
+ "non-ASCII character in byte literal"
+ }
+ EscapeError::NonAsciiCharInByte if mode == Mode::ByteStr => {
+ "non-ASCII character in byte string literal"
+ }
+ EscapeError::NonAsciiCharInByte => "non-ASCII character in raw byte string literal",
+ EscapeError::UnskippedWhitespaceWarning => "",
+ EscapeError::MultipleSkippedLinesWarning => "",
+ }
+}
+
+fn unescape_string_error_message(text: &str, mode: Mode) -> &'static str {
+ let mut error_message = "";
+ match mode {
+ Mode::CStr => {
+ rustc_lexer::unescape::unescape_c_string(text, mode, &mut |_, res| {
+ if let Err(e) = res {
+ error_message = error_to_diagnostic_message(e, mode);
+ }
+ });
+ }
+ Mode::ByteStr | Mode::Str => {
+ rustc_lexer::unescape::unescape_literal(text, mode, &mut |_, res| {
+ if let Err(e) = res {
+ error_message = error_to_diagnostic_message(e, mode);
+ }
+ });
+ }
+ _ => {
+ // Other Modes are not supported yet or do not apply
+ }
+ }
+ error_message
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/lib.rs b/src/tools/rust-analyzer/crates/parser/src/lib.rs
index c155e8aaf..d9b3f46f2 100644
--- a/src/tools/rust-analyzer/crates/parser/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/lib.rs
@@ -17,8 +17,9 @@
//!
//! [`Parser`]: crate::parser::Parser
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
#![allow(rustdoc::private_intra_doc_links)]
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
mod lexed_str;
mod token_set;
diff --git a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
index 2c47e3d08..57005a683 100644
--- a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
@@ -32,29 +32,27 @@ impl LexedStr<'_> {
let kind = self.kind(i);
if kind.is_trivia() {
was_joint = false
+ } else if kind == SyntaxKind::IDENT {
+ let token_text = self.text(i);
+ let contextual_kw =
+ SyntaxKind::from_contextual_keyword(token_text).unwrap_or(SyntaxKind::IDENT);
+ res.push_ident(contextual_kw);
} else {
- if kind == SyntaxKind::IDENT {
- let token_text = self.text(i);
- let contextual_kw = SyntaxKind::from_contextual_keyword(token_text)
- .unwrap_or(SyntaxKind::IDENT);
- res.push_ident(contextual_kw);
- } else {
- if was_joint {
+ if was_joint {
+ res.was_joint();
+ }
+ res.push(kind);
+ // Tag the token as joint if it is float with a fractional part
+ // we use this jointness to inform the parser about what token split
+ // event to emit when we encounter a float literal in a field access
+ if kind == SyntaxKind::FLOAT_NUMBER {
+ if !self.text(i).ends_with('.') {
res.was_joint();
- }
- res.push(kind);
- // Tag the token as joint if it is float with a fractional part
- // we use this jointness to inform the parser about what token split
- // event to emit when we encounter a float literal in a field access
- if kind == SyntaxKind::FLOAT_NUMBER {
- if !self.text(i).ends_with('.') {
- res.was_joint();
- } else {
- was_joint = false;
- }
} else {
- was_joint = true;
+ was_joint = false;
}
+ } else {
+ was_joint = true;
}
}
}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_char_literals.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_char_literals.rast
new file mode 100644
index 000000000..7603c9099
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_char_literals.rast
@@ -0,0 +1,86 @@
+BYTE "b''" error: empty character literal
+WHITESPACE "\n"
+BYTE "b'\\'" error: Missing trailing `'` symbol to terminate the byte literal
+WHITESPACE "\n"
+BYTE "b'\n'" error: byte constant must be escaped
+WHITESPACE "\n"
+BYTE "b'spam'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+BYTE "b'\\x0ff'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+BYTE "b'\\\"a'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+BYTE "b'\\na'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+BYTE "b'\\ra'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+BYTE "b'\\ta'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+BYTE "b'\\\\a'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+BYTE "b'\\'a'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+BYTE "b'\\0a'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+BYTE "b'\\u{0}x'" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE "b'\\u{1F63b}}'" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE "b'\\v'" error: unknown byte escape
+WHITESPACE "\n"
+BYTE "b'\\💩'" error: unknown byte escape
+WHITESPACE "\n"
+BYTE "b'\\●'" error: unknown byte escape
+WHITESPACE "\n"
+BYTE "b'\\\\\\r'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+BYTE "b'\\x'" error: numeric character escape is too short
+WHITESPACE "\n"
+BYTE "b'\\x0'" error: numeric character escape is too short
+WHITESPACE "\n"
+BYTE "b'\\xf'" error: numeric character escape is too short
+WHITESPACE "\n"
+BYTE "b'\\xa'" error: numeric character escape is too short
+WHITESPACE "\n"
+BYTE "b'\\xx'" error: invalid character in numeric character escape
+WHITESPACE "\n"
+BYTE "b'\\xы'" error: invalid character in numeric character escape
+WHITESPACE "\n"
+BYTE "b'\\x🦀'" error: invalid character in numeric character escape
+WHITESPACE "\n"
+BYTE "b'\\xtt'" error: invalid character in numeric character escape
+WHITESPACE "\n"
+BYTE "b'\\u'" error: incorrect unicode escape sequence
+WHITESPACE "\n"
+BYTE "b'\\u[0123]'" error: incorrect unicode escape sequence
+WHITESPACE "\n"
+BYTE "b'\\u{0x}'" error: invalid character in unicode escape
+WHITESPACE "\n"
+BYTE "b'\\u{'" error: unterminated unicode escape
+WHITESPACE "\n"
+BYTE "b'\\u{0000'" error: unterminated unicode escape
+WHITESPACE "\n"
+BYTE "b'\\u{}'" error: empty unicode escape
+WHITESPACE "\n"
+BYTE "b'\\u{_0000}'" error: invalid start of unicode escape
+WHITESPACE "\n"
+BYTE "b'\\u{0000000}'" error: overlong unicode escape
+WHITESPACE "\n"
+BYTE "b'\\u{FFFFFF}'" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE "b'\\u{ffffff}'" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE "b'\\u{ffffff}'" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE "b'\\u{DC00}'" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE "b'\\u{DDDD}'" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE "b'\\u{DFFF}'" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE "b'\\u{D800}'" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE "b'\\u{DAAA}'" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE "b'\\u{DBFF}'" error: unicode escape in byte string
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_char_literals.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_char_literals.rs
new file mode 100644
index 000000000..b2d06e490
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_char_literals.rs
@@ -0,0 +1,44 @@
+b''
+b'\'
+b'
+'
+b'spam'
+b'\x0ff'
+b'\"a'
+b'\na'
+b'\ra'
+b'\ta'
+b'\\a'
+b'\'a'
+b'\0a'
+b'\u{0}x'
+b'\u{1F63b}}'
+b'\v'
+b'\💩'
+b'\●'
+b'\\\r'
+b'\x'
+b'\x0'
+b'\xf'
+b'\xa'
+b'\xx'
+b'\xы'
+b'\x🦀'
+b'\xtt'
+b'\u'
+b'\u[0123]'
+b'\u{0x}'
+b'\u{'
+b'\u{0000'
+b'\u{}'
+b'\u{_0000}'
+b'\u{0000000}'
+b'\u{FFFFFF}'
+b'\u{ffffff}'
+b'\u{ffffff}'
+b'\u{DC00}'
+b'\u{DDDD}'
+b'\u{DFFF}'
+b'\u{D800}'
+b'\u{DAAA}'
+b'\u{DBFF}'
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_strings.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_strings.rast
new file mode 100644
index 000000000..e8d8ff8ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_strings.rast
@@ -0,0 +1,28 @@
+BYTE_STRING "b\"\\💩\"" error: unknown byte escape
+WHITESPACE "\n"
+BYTE_STRING "b\"\\●\"" error: unknown byte escape
+WHITESPACE "\n"
+BYTE_STRING "b\"\\u{_0000}\"" error: invalid start of unicode escape
+WHITESPACE "\n"
+BYTE_STRING "b\"\\u{0000000}\"" error: overlong unicode escape
+WHITESPACE "\n"
+BYTE_STRING "b\"\\u{FFFFFF}\"" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE_STRING "b\"\\u{ffffff}\"" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE_STRING "b\"\\u{ffffff}\"" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE_STRING "b\"\\u{DC00}\"" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE_STRING "b\"\\u{DDDD}\"" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE_STRING "b\"\\u{DFFF}\"" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE_STRING "b\"\\u{D800}\"" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE_STRING "b\"\\u{DAAA}\"" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE_STRING "b\"\\u{DBFF}\"" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE_STRING "b\"\\xы\"" error: invalid character in numeric character escape
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_strings.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_strings.rs
new file mode 100644
index 000000000..e74847137
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_strings.rs
@@ -0,0 +1,14 @@
+b"\💩"
+b"\●"
+b"\u{_0000}"
+b"\u{0000000}"
+b"\u{FFFFFF}"
+b"\u{ffffff}"
+b"\u{ffffff}"
+b"\u{DC00}"
+b"\u{DDDD}"
+b"\u{DFFF}"
+b"\u{D800}"
+b"\u{DAAA}"
+b"\u{DBFF}"
+b"\xы"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/c_strings.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/c_strings.rast
new file mode 100644
index 000000000..1b4424ba5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/c_strings.rast
@@ -0,0 +1,28 @@
+C_STRING "c\"\\💩\"" error: unknown character escape
+WHITESPACE "\n"
+C_STRING "c\"\\●\"" error: unknown character escape
+WHITESPACE "\n"
+C_STRING "c\"\\u{_0000}\"" error: invalid start of unicode escape
+WHITESPACE "\n"
+C_STRING "c\"\\u{0000000}\"" error: overlong unicode escape
+WHITESPACE "\n"
+C_STRING "c\"\\u{FFFFFF}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+C_STRING "c\"\\u{ffffff}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+C_STRING "c\"\\u{ffffff}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+C_STRING "c\"\\u{DC00}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+C_STRING "c\"\\u{DDDD}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+C_STRING "c\"\\u{DFFF}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+C_STRING "c\"\\u{D800}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+C_STRING "c\"\\u{DAAA}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+C_STRING "c\"\\u{DBFF}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+C_STRING "c\"\\xы\"" error: invalid character in numeric character escape
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/c_strings.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/c_strings.rs
new file mode 100644
index 000000000..1b78ffc28
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/c_strings.rs
@@ -0,0 +1,14 @@
+c"\💩"
+c"\●"
+c"\u{_0000}"
+c"\u{0000000}"
+c"\u{FFFFFF}"
+c"\u{ffffff}"
+c"\u{ffffff}"
+c"\u{DC00}"
+c"\u{DDDD}"
+c"\u{DFFF}"
+c"\u{D800}"
+c"\u{DAAA}"
+c"\u{DBFF}"
+c"\xы"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/char_literals.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/char_literals.rast
new file mode 100644
index 000000000..b1e1364d4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/char_literals.rast
@@ -0,0 +1,92 @@
+CHAR "'hello'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "''" error: empty character literal
+WHITESPACE "\n"
+CHAR "'\n'" error: character constant must be escaped
+WHITESPACE "\n"
+CHAR "'spam'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\x0ff'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\\"a'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\na'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\ra'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\ta'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\\\a'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\'a'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\0a'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\u{0}x'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\u{1F63b}}'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\v'" error: unknown character escape
+WHITESPACE "\n"
+CHAR "'\\💩'" error: unknown character escape
+WHITESPACE "\n"
+CHAR "'\\●'" error: unknown character escape
+WHITESPACE "\n"
+CHAR "'\\\\\\r'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\x'" error: numeric character escape is too short
+WHITESPACE "\n"
+CHAR "'\\x0'" error: numeric character escape is too short
+WHITESPACE "\n"
+CHAR "'\\xf'" error: numeric character escape is too short
+WHITESPACE "\n"
+CHAR "'\\xa'" error: numeric character escape is too short
+WHITESPACE "\n"
+CHAR "'\\xx'" error: invalid character in numeric character escape
+WHITESPACE "\n"
+CHAR "'\\xы'" error: invalid character in numeric character escape
+WHITESPACE "\n"
+CHAR "'\\x🦀'" error: invalid character in numeric character escape
+WHITESPACE "\n"
+CHAR "'\\xtt'" error: invalid character in numeric character escape
+WHITESPACE "\n"
+CHAR "'\\xff'" error: out of range hex escape
+WHITESPACE "\n"
+CHAR "'\\xFF'" error: out of range hex escape
+WHITESPACE "\n"
+CHAR "'\\x80'" error: out of range hex escape
+WHITESPACE "\n"
+CHAR "'\\u'" error: incorrect unicode escape sequence
+WHITESPACE "\n"
+CHAR "'\\u[0123]'" error: incorrect unicode escape sequence
+WHITESPACE "\n"
+CHAR "'\\u{0x}'" error: invalid character in unicode escape
+WHITESPACE "\n"
+CHAR "'\\u{'" error: unterminated unicode escape
+WHITESPACE "\n"
+CHAR "'\\u{0000'" error: unterminated unicode escape
+WHITESPACE "\n"
+CHAR "'\\u{}'" error: empty unicode escape
+WHITESPACE "\n"
+CHAR "'\\u{_0000}'" error: invalid start of unicode escape
+WHITESPACE "\n"
+CHAR "'\\u{0000000}'" error: overlong unicode escape
+WHITESPACE "\n"
+CHAR "'\\u{FFFFFF}'" error: invalid unicode character escape
+WHITESPACE "\n"
+CHAR "'\\u{ffffff}'" error: invalid unicode character escape
+WHITESPACE "\n"
+CHAR "'\\u{ffffff}'" error: invalid unicode character escape
+WHITESPACE "\n"
+CHAR "'\\u{DC00}'" error: invalid unicode character escape
+WHITESPACE "\n"
+CHAR "'\\u{DDDD}'" error: invalid unicode character escape
+WHITESPACE "\n"
+CHAR "'\\u{DFFF}'" error: invalid unicode character escape
+WHITESPACE "\n"
+CHAR "'\\u{D800}'" error: invalid unicode character escape
+WHITESPACE "\n"
+CHAR "'\\u{DAAA}'" error: invalid unicode character escape
+WHITESPACE "\n"
+CHAR "'\\u{DBFF}'" error: invalid unicode character escape
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/char_literals.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/char_literals.rs
new file mode 100644
index 000000000..291f99d80
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/char_literals.rs
@@ -0,0 +1,47 @@
+'hello'
+''
+'
+'
+'spam'
+'\x0ff'
+'\"a'
+'\na'
+'\ra'
+'\ta'
+'\\a'
+'\'a'
+'\0a'
+'\u{0}x'
+'\u{1F63b}}'
+'\v'
+'\💩'
+'\●'
+'\\\r'
+'\x'
+'\x0'
+'\xf'
+'\xa'
+'\xx'
+'\xы'
+'\x🦀'
+'\xtt'
+'\xff'
+'\xFF'
+'\x80'
+'\u'
+'\u[0123]'
+'\u{0x}'
+'\u{'
+'\u{0000'
+'\u{}'
+'\u{_0000}'
+'\u{0000000}'
+'\u{FFFFFF}'
+'\u{ffffff}'
+'\u{ffffff}'
+'\u{DC00}'
+'\u{DDDD}'
+'\u{DFFF}'
+'\u{D800}'
+'\u{DAAA}'
+'\u{DBFF}'
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/strings.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/strings.rast
new file mode 100644
index 000000000..0cd174720
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/strings.rast
@@ -0,0 +1,28 @@
+STRING "\"\\💩\"" error: unknown character escape
+WHITESPACE "\n"
+STRING "\"\\●\"" error: unknown character escape
+WHITESPACE "\n"
+STRING "\"\\u{_0000}\"" error: invalid start of unicode escape
+WHITESPACE "\n"
+STRING "\"\\u{0000000}\"" error: overlong unicode escape
+WHITESPACE "\n"
+STRING "\"\\u{FFFFFF}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+STRING "\"\\u{ffffff}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+STRING "\"\\u{ffffff}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+STRING "\"\\u{DC00}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+STRING "\"\\u{DDDD}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+STRING "\"\\u{DFFF}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+STRING "\"\\u{D800}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+STRING "\"\\u{DAAA}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+STRING "\"\\u{DBFF}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+STRING "\"\\xы\"" error: invalid character in numeric character escape
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/strings.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/strings.rs
new file mode 100644
index 000000000..2499516d3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/strings.rs
@@ -0,0 +1,14 @@
+"\💩"
+"\●"
+"\u{_0000}"
+"\u{0000000}"
+"\u{FFFFFF}"
+"\u{ffffff}"
+"\u{ffffff}"
+"\u{DC00}"
+"\u{DDDD}"
+"\u{DFFF}"
+"\u{D800}"
+"\u{DAAA}"
+"\u{DBFF}"
+"\xы"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rast
index c848ac368..fd20ca57a 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rast
@@ -1,13 +1,9 @@
-BYTE "b''"
-WHITESPACE " "
BYTE "b'x'"
WHITESPACE " "
BYTE_STRING "b\"foo\""
WHITESPACE " "
BYTE_STRING "br\"\""
WHITESPACE "\n"
-BYTE "b''suf"
-WHITESPACE " "
BYTE_STRING "b\"\"ix"
WHITESPACE " "
BYTE_STRING "br\"\"br"
@@ -17,6 +13,4 @@ WHITESPACE " "
BYTE "b'\\\\'"
WHITESPACE " "
BYTE "b'\\''"
-WHITESPACE " "
-BYTE "b'hello'"
WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rs
index b54930f5e..65460d02c 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rs
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rs
@@ -1,3 +1,3 @@
-b'' b'x' b"foo" br""
-b''suf b""ix br""br
-b'\n' b'\\' b'\'' b'hello'
+b'x' b"foo" br""
+b""ix br""br
+b'\n' b'\\' b'\''
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rast
index 66e58cc29..07172a4ec 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rast
@@ -4,8 +4,6 @@ CHAR "' '"
WHITESPACE " "
CHAR "'0'"
WHITESPACE " "
-CHAR "'hello'"
-WHITESPACE " "
CHAR "'\\x7f'"
WHITESPACE " "
CHAR "'\\n'"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rs
index 454ee0a5f..15f52c113 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rs
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rs
@@ -1 +1 @@
-'x' ' ' '0' 'hello' '\x7f' '\n' '\\' '\''
+'x' ' ' '0' '\x7f' '\n' '\\' '\''
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0023_empty_param_slot.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0023_empty_param_slot.rast
new file mode 100644
index 000000000..39e35a81e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0023_empty_param_slot.rast
@@ -0,0 +1,41 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "y"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ COMMA ","
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "t"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 12: expected value parameter
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0023_empty_param_slot.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0023_empty_param_slot.rs
new file mode 100644
index 000000000..0adf7b8d2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0023_empty_param_slot.rs
@@ -0,0 +1 @@
+fn f(y: i32, ,t: i32) {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rast
new file mode 100644
index 000000000..0e2fe5988
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rast
@@ -0,0 +1,66 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ DOT2 ".."
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ COMMA ","
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ DOT2 ".."
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ COMMA ","
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "a"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 22: cannot use a comma after the base struct
+error 38: cannot use a comma after the base struct
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rs
new file mode 100644
index 000000000..14cf96719
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rs
@@ -0,0 +1,4 @@
+fn foo() {
+ S { ..x, };
+ S { ..x, a: 0 }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_top_level_let.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_top_level_let.rast
new file mode 100644
index 000000000..5ddef5f3f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_top_level_let.rast
@@ -0,0 +1,30 @@
+SOURCE_FILE
+ ERROR
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ REF_KW "ref"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ COLON ":"
+ WHITESPACE " "
+ FN_PTR_TYPE
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+error 0: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_top_level_let.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_top_level_let.rs
new file mode 100644
index 000000000..3d3e7dd56
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_top_level_let.rs
@@ -0,0 +1 @@
+let ref foo: fn() = 1 + 3;
diff --git a/src/tools/rust-analyzer/crates/paths/src/lib.rs b/src/tools/rust-analyzer/crates/paths/src/lib.rs
index 88b8d0aee..db705a7b6 100644
--- a/src/tools/rust-analyzer/crates/paths/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/paths/src/lib.rs
@@ -1,7 +1,7 @@
//! Thin wrappers around `std::path`, distinguishing between absolute and
//! relative paths.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
use std::{
borrow::Borrow,
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
index 4229f2891..2cbbc9489 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
@@ -21,15 +21,19 @@ object = { version = "0.32.0", default-features = false, features = [
] }
serde.workspace = true
serde_json = { workspace = true, features = ["unbounded_depth"] }
-tracing = "0.1.37"
+tracing.workspace = true
triomphe.workspace = true
memmap2 = "0.5.4"
snap = "1.1.0"
+indexmap = "2.1.0"
# local deps
paths.workspace = true
tt.workspace = true
stdx.workspace = true
profile.workspace = true
-# Intentionally *not* depend on anything salsa-related
-# base-db.workspace = true
+text-size.workspace = true
+# Ideally this crate would not depend on salsa things, but we need span information here which wraps
+# InternIds for the syntax context
+base-db.workspace = true
+la-arena.workspace = true
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
index 1603458f7..f697ecd35 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
@@ -5,22 +5,22 @@
//! is used to provide basic infrastructure for communication between two
//! processes: Client (RA itself), Server (the external program)
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
pub mod msg;
mod process;
mod version;
+use base_db::span::SpanData;
+use indexmap::IndexSet;
use paths::AbsPathBuf;
use std::{fmt, io, sync::Mutex};
use triomphe::Arc;
use serde::{Deserialize, Serialize};
-use ::tt::token_id as tt;
-
use crate::{
- msg::{ExpandMacro, FlatTree, PanicMessage},
+ msg::{ExpandMacro, ExpnGlobals, FlatTree, PanicMessage, HAS_GLOBAL_SPANS},
process::ProcMacroProcessSrv,
};
@@ -136,30 +136,47 @@ impl ProcMacro {
pub fn expand(
&self,
- subtree: &tt::Subtree,
- attr: Option<&tt::Subtree>,
+ subtree: &tt::Subtree<SpanData>,
+ attr: Option<&tt::Subtree<SpanData>>,
env: Vec<(String, String)>,
- ) -> Result<Result<tt::Subtree, PanicMessage>, ServerError> {
+ def_site: SpanData,
+ call_site: SpanData,
+ mixed_site: SpanData,
+ ) -> Result<Result<tt::Subtree<SpanData>, PanicMessage>, ServerError> {
let version = self.process.lock().unwrap_or_else(|e| e.into_inner()).version();
let current_dir = env
.iter()
.find(|(name, _)| name == "CARGO_MANIFEST_DIR")
.map(|(_, value)| value.clone());
+ let mut span_data_table = IndexSet::default();
+ let def_site = span_data_table.insert_full(def_site).0;
+ let call_site = span_data_table.insert_full(call_site).0;
+ let mixed_site = span_data_table.insert_full(mixed_site).0;
let task = ExpandMacro {
- macro_body: FlatTree::new(subtree, version),
+ macro_body: FlatTree::new(subtree, version, &mut span_data_table),
macro_name: self.name.to_string(),
- attributes: attr.map(|subtree| FlatTree::new(subtree, version)),
+ attributes: attr.map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)),
lib: self.dylib_path.to_path_buf().into(),
env,
current_dir,
+ has_global_spans: ExpnGlobals {
+ serialize: version >= HAS_GLOBAL_SPANS,
+ def_site,
+ call_site,
+ mixed_site,
+ },
};
- let request = msg::Request::ExpandMacro(task);
- let response = self.process.lock().unwrap_or_else(|e| e.into_inner()).send_task(request)?;
+ let response = self
+ .process
+ .lock()
+ .unwrap_or_else(|e| e.into_inner())
+ .send_task(msg::Request::ExpandMacro(task))?;
+
match response {
msg::Response::ExpandMacro(it) => {
- Ok(it.map(|tree| FlatTree::to_subtree(tree, version)))
+ Ok(it.map(|tree| FlatTree::to_subtree_resolved(tree, version, &span_data_table)))
}
msg::Response::ListMacros(..) | msg::Response::ApiVersionCheck(..) => {
Err(ServerError { message: "unexpected response".to_string(), io: None })
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs
index 4b01643c2..1d3e45aff 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs
@@ -10,14 +10,15 @@ use serde::{de::DeserializeOwned, Deserialize, Serialize};
use crate::ProcMacroKind;
-pub use crate::msg::flat::FlatTree;
+pub use crate::msg::flat::{FlatTree, TokenId};
// The versions of the server protocol
pub const NO_VERSION_CHECK_VERSION: u32 = 0;
pub const VERSION_CHECK_VERSION: u32 = 1;
pub const ENCODE_CLOSE_SPAN_VERSION: u32 = 2;
+pub const HAS_GLOBAL_SPANS: u32 = 3;
-pub const CURRENT_API_VERSION: u32 = ENCODE_CLOSE_SPAN_VERSION;
+pub const CURRENT_API_VERSION: u32 = HAS_GLOBAL_SPANS;
#[derive(Debug, Serialize, Deserialize)]
pub enum Request {
@@ -59,6 +60,26 @@ pub struct ExpandMacro {
pub env: Vec<(String, String)>,
pub current_dir: Option<String>,
+ /// marker for serde skip stuff
+ #[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")]
+ #[serde(default)]
+ pub has_global_spans: ExpnGlobals,
+}
+
+#[derive(Default, Debug, Serialize, Deserialize)]
+pub struct ExpnGlobals {
+ #[serde(skip_serializing)]
+ #[serde(default)]
+ pub serialize: bool,
+ pub def_site: usize,
+ pub call_site: usize,
+ pub mixed_site: usize,
+}
+
+impl ExpnGlobals {
+ fn skip_serializing_if(&self) -> bool {
+ !self.serialize
+ }
}
pub trait Message: Serialize + DeserializeOwned {
@@ -115,30 +136,89 @@ fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> {
#[cfg(test)]
mod tests {
+ use base_db::{
+ span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId},
+ FileId,
+ };
+ use la_arena::RawIdx;
+ use text_size::{TextRange, TextSize};
+ use tt::{Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, Subtree, TokenTree};
+
use super::*;
- use crate::tt::*;
- fn fixture_token_tree() -> Subtree {
- let mut subtree = Subtree { delimiter: Delimiter::unspecified(), token_trees: Vec::new() };
- subtree
- .token_trees
- .push(TokenTree::Leaf(Ident { text: "struct".into(), span: TokenId(0) }.into()));
- subtree
- .token_trees
- .push(TokenTree::Leaf(Ident { text: "Foo".into(), span: TokenId(1) }.into()));
+ fn fixture_token_tree() -> Subtree<SpanData> {
+ let anchor = SpanAnchor {
+ file_id: FileId::from_raw(0),
+ ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)),
+ };
+ let mut subtree = Subtree {
+ delimiter: Delimiter {
+ open: SpanData {
+ range: TextRange::empty(TextSize::new(0)),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
+ close: SpanData {
+ range: TextRange::empty(TextSize::new(13)),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
+ kind: DelimiterKind::Invisible,
+ },
+ token_trees: Vec::new(),
+ };
+ subtree.token_trees.push(TokenTree::Leaf(
+ Ident {
+ text: "struct".into(),
+ span: SpanData {
+ range: TextRange::at(TextSize::new(0), TextSize::of("struct")),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
+ }
+ .into(),
+ ));
+ subtree.token_trees.push(TokenTree::Leaf(
+ Ident {
+ text: "Foo".into(),
+ span: SpanData {
+ range: TextRange::at(TextSize::new(5), TextSize::of("Foo")),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
+ }
+ .into(),
+ ));
subtree.token_trees.push(TokenTree::Leaf(Leaf::Literal(Literal {
text: "Foo".into(),
- span: TokenId::unspecified(),
+
+ span: SpanData {
+ range: TextRange::at(TextSize::new(8), TextSize::of("Foo")),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
})));
subtree.token_trees.push(TokenTree::Leaf(Leaf::Punct(Punct {
char: '@',
- span: TokenId::unspecified(),
+ span: SpanData {
+ range: TextRange::at(TextSize::new(11), TextSize::of('@')),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
spacing: Spacing::Joint,
})));
subtree.token_trees.push(TokenTree::Subtree(Subtree {
delimiter: Delimiter {
- open: TokenId(2),
- close: TokenId::UNSPECIFIED,
+ open: SpanData {
+ range: TextRange::at(TextSize::new(12), TextSize::of('{')),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
+ close: SpanData {
+ range: TextRange::at(TextSize::new(13), TextSize::of('}')),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
kind: DelimiterKind::Brace,
},
token_trees: vec![],
@@ -149,19 +229,26 @@ mod tests {
#[test]
fn test_proc_macro_rpc_works() {
let tt = fixture_token_tree();
+ let mut span_data_table = Default::default();
let task = ExpandMacro {
- macro_body: FlatTree::new(&tt, CURRENT_API_VERSION),
+ macro_body: FlatTree::new(&tt, CURRENT_API_VERSION, &mut span_data_table),
macro_name: Default::default(),
attributes: None,
lib: std::env::current_dir().unwrap(),
env: Default::default(),
current_dir: Default::default(),
+ has_global_spans: ExpnGlobals {
+ serialize: true,
+ def_site: 0,
+ call_site: 0,
+ mixed_site: 0,
+ },
};
let json = serde_json::to_string(&task).unwrap();
// println!("{}", json);
let back: ExpandMacro = serde_json::from_str(&json).unwrap();
- assert_eq!(tt, back.macro_body.to_subtree(CURRENT_API_VERSION));
+ assert_eq!(tt, back.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table));
}
}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs
index 44245336f..583571862 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs
@@ -37,12 +37,26 @@
use std::collections::{HashMap, VecDeque};
+use base_db::span::SpanData;
+use indexmap::IndexSet;
use serde::{Deserialize, Serialize};
-use crate::{
- msg::ENCODE_CLOSE_SPAN_VERSION,
- tt::{self, TokenId},
-};
+use crate::msg::ENCODE_CLOSE_SPAN_VERSION;
+
+type SpanDataIndexMap = IndexSet<SpanData>;
+
+#[derive(Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TokenId(pub u32);
+
+impl std::fmt::Debug for TokenId {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+impl tt::Span for TokenId {
+ const DUMMY: Self = TokenId(!0);
+}
#[derive(Serialize, Deserialize, Debug)]
pub struct FlatTree {
@@ -55,33 +69,38 @@ pub struct FlatTree {
}
struct SubtreeRepr {
- open: tt::TokenId,
- close: tt::TokenId,
+ open: TokenId,
+ close: TokenId,
kind: tt::DelimiterKind,
tt: [u32; 2],
}
struct LiteralRepr {
- id: tt::TokenId,
+ id: TokenId,
text: u32,
}
struct PunctRepr {
- id: tt::TokenId,
+ id: TokenId,
char: char,
spacing: tt::Spacing,
}
struct IdentRepr {
- id: tt::TokenId,
+ id: TokenId,
text: u32,
}
impl FlatTree {
- pub fn new(subtree: &tt::Subtree, version: u32) -> FlatTree {
+ pub fn new(
+ subtree: &tt::Subtree<SpanData>,
+ version: u32,
+ span_data_table: &mut SpanDataIndexMap,
+ ) -> FlatTree {
let mut w = Writer {
string_table: HashMap::new(),
work: VecDeque::new(),
+ span_data_table,
subtree: Vec::new(),
literal: Vec::new(),
@@ -92,7 +111,7 @@ impl FlatTree {
};
w.write(subtree);
- return FlatTree {
+ FlatTree {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
write_vec(w.subtree, SubtreeRepr::write_with_close_span)
} else {
@@ -103,15 +122,44 @@ impl FlatTree {
ident: write_vec(w.ident, IdentRepr::write),
token_tree: w.token_tree,
text: w.text,
+ }
+ }
+
+ pub fn new_raw(subtree: &tt::Subtree<TokenId>, version: u32) -> FlatTree {
+ let mut w = Writer {
+ string_table: HashMap::new(),
+ work: VecDeque::new(),
+ span_data_table: &mut (),
+
+ subtree: Vec::new(),
+ literal: Vec::new(),
+ punct: Vec::new(),
+ ident: Vec::new(),
+ token_tree: Vec::new(),
+ text: Vec::new(),
};
+ w.write(subtree);
- fn write_vec<T, F: Fn(T) -> [u32; N], const N: usize>(xs: Vec<T>, f: F) -> Vec<u32> {
- xs.into_iter().flat_map(f).collect()
+ FlatTree {
+ subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
+ write_vec(w.subtree, SubtreeRepr::write_with_close_span)
+ } else {
+ write_vec(w.subtree, SubtreeRepr::write)
+ },
+ literal: write_vec(w.literal, LiteralRepr::write),
+ punct: write_vec(w.punct, PunctRepr::write),
+ ident: write_vec(w.ident, IdentRepr::write),
+ token_tree: w.token_tree,
+ text: w.text,
}
}
- pub fn to_subtree(self, version: u32) -> tt::Subtree {
- return Reader {
+ pub fn to_subtree_resolved(
+ self,
+ version: u32,
+ span_data_table: &SpanDataIndexMap,
+ ) -> tt::Subtree<SpanData> {
+ Reader {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
read_vec(self.subtree, SubtreeRepr::read_with_close_span)
} else {
@@ -122,18 +170,40 @@ impl FlatTree {
ident: read_vec(self.ident, IdentRepr::read),
token_tree: self.token_tree,
text: self.text,
+ span_data_table,
}
- .read();
+ .read()
+ }
- fn read_vec<T, F: Fn([u32; N]) -> T, const N: usize>(xs: Vec<u32>, f: F) -> Vec<T> {
- let mut chunks = xs.chunks_exact(N);
- let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap())).collect();
- assert!(chunks.remainder().is_empty());
- res
+ pub fn to_subtree_unresolved(self, version: u32) -> tt::Subtree<TokenId> {
+ Reader {
+ subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
+ read_vec(self.subtree, SubtreeRepr::read_with_close_span)
+ } else {
+ read_vec(self.subtree, SubtreeRepr::read)
+ },
+ literal: read_vec(self.literal, LiteralRepr::read),
+ punct: read_vec(self.punct, PunctRepr::read),
+ ident: read_vec(self.ident, IdentRepr::read),
+ token_tree: self.token_tree,
+ text: self.text,
+ span_data_table: &(),
}
+ .read()
}
}
+fn read_vec<T, F: Fn([u32; N]) -> T, const N: usize>(xs: Vec<u32>, f: F) -> Vec<T> {
+ let mut chunks = xs.chunks_exact(N);
+ let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap())).collect();
+ assert!(chunks.remainder().is_empty());
+ res
+}
+
+fn write_vec<T, F: Fn(T) -> [u32; N], const N: usize>(xs: Vec<T>, f: F) -> Vec<u32> {
+ xs.into_iter().flat_map(f).collect()
+}
+
impl SubtreeRepr {
fn write(self) -> [u32; 4] {
let kind = match self.kind {
@@ -152,7 +222,7 @@ impl SubtreeRepr {
3 => tt::DelimiterKind::Bracket,
other => panic!("bad kind {other}"),
};
- SubtreeRepr { open: TokenId(open), close: TokenId::UNSPECIFIED, kind, tt: [lo, len] }
+ SubtreeRepr { open: TokenId(open), close: TokenId(!0), kind, tt: [lo, len] }
}
fn write_with_close_span(self) -> [u32; 5] {
let kind = match self.kind {
@@ -211,9 +281,36 @@ impl IdentRepr {
}
}
-struct Writer<'a> {
- work: VecDeque<(usize, &'a tt::Subtree)>,
+trait Span: Copy {
+ type Table;
+ fn token_id_of(table: &mut Self::Table, s: Self) -> TokenId;
+ fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self;
+}
+
+impl Span for TokenId {
+ type Table = ();
+ fn token_id_of((): &mut Self::Table, token_id: Self) -> TokenId {
+ token_id
+ }
+
+ fn span_for_token_id((): &Self::Table, id: TokenId) -> Self {
+ id
+ }
+}
+impl Span for SpanData {
+ type Table = IndexSet<SpanData>;
+ fn token_id_of(table: &mut Self::Table, span: Self) -> TokenId {
+ TokenId(table.insert_full(span).0 as u32)
+ }
+ fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self {
+ *table.get_index(id.0 as usize).unwrap_or_else(|| &table[0])
+ }
+}
+
+struct Writer<'a, 'span, S: Span> {
+ work: VecDeque<(usize, &'a tt::Subtree<S>)>,
string_table: HashMap<&'a str, u32>,
+ span_data_table: &'span mut S::Table,
subtree: Vec<SubtreeRepr>,
literal: Vec<LiteralRepr>,
@@ -223,15 +320,19 @@ struct Writer<'a> {
text: Vec<String>,
}
-impl<'a> Writer<'a> {
- fn write(&mut self, root: &'a tt::Subtree) {
+impl<'a, 'span, S: Span> Writer<'a, 'span, S> {
+ fn write(&mut self, root: &'a tt::Subtree<S>) {
self.enqueue(root);
while let Some((idx, subtree)) = self.work.pop_front() {
self.subtree(idx, subtree);
}
}
- fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree) {
+ fn token_id_of(&mut self, span: S) -> TokenId {
+ S::token_id_of(self.span_data_table, span)
+ }
+
+ fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree<S>) {
let mut first_tt = self.token_tree.len();
let n_tt = subtree.token_trees.len();
self.token_tree.resize(first_tt + n_tt, !0);
@@ -248,22 +349,21 @@ impl<'a> Writer<'a> {
tt::Leaf::Literal(lit) => {
let idx = self.literal.len() as u32;
let text = self.intern(&lit.text);
- self.literal.push(LiteralRepr { id: lit.span, text });
+ let id = self.token_id_of(lit.span);
+ self.literal.push(LiteralRepr { id, text });
idx << 2 | 0b01
}
tt::Leaf::Punct(punct) => {
let idx = self.punct.len() as u32;
- self.punct.push(PunctRepr {
- char: punct.char,
- spacing: punct.spacing,
- id: punct.span,
- });
+ let id = self.token_id_of(punct.span);
+ self.punct.push(PunctRepr { char: punct.char, spacing: punct.spacing, id });
idx << 2 | 0b10
}
tt::Leaf::Ident(ident) => {
let idx = self.ident.len() as u32;
let text = self.intern(&ident.text);
- self.ident.push(IdentRepr { id: ident.span, text });
+ let id = self.token_id_of(ident.span);
+ self.ident.push(IdentRepr { id, text });
idx << 2 | 0b11
}
},
@@ -273,10 +373,10 @@ impl<'a> Writer<'a> {
}
}
- fn enqueue(&mut self, subtree: &'a tt::Subtree) -> u32 {
+ fn enqueue(&mut self, subtree: &'a tt::Subtree<S>) -> u32 {
let idx = self.subtree.len();
- let open = subtree.delimiter.open;
- let close = subtree.delimiter.close;
+ let open = self.token_id_of(subtree.delimiter.open);
+ let close = self.token_id_of(subtree.delimiter.close);
let delimiter_kind = subtree.delimiter.kind;
self.subtree.push(SubtreeRepr { open, close, kind: delimiter_kind, tt: [!0, !0] });
self.work.push_back((idx, subtree));
@@ -293,23 +393,29 @@ impl<'a> Writer<'a> {
}
}
-struct Reader {
+struct Reader<'span, S: Span> {
subtree: Vec<SubtreeRepr>,
literal: Vec<LiteralRepr>,
punct: Vec<PunctRepr>,
ident: Vec<IdentRepr>,
token_tree: Vec<u32>,
text: Vec<String>,
+ span_data_table: &'span S::Table,
}
-impl Reader {
- pub(crate) fn read(self) -> tt::Subtree {
- let mut res: Vec<Option<tt::Subtree>> = vec![None; self.subtree.len()];
+impl<'span, S: Span> Reader<'span, S> {
+ pub(crate) fn read(self) -> tt::Subtree<S> {
+ let mut res: Vec<Option<tt::Subtree<S>>> = vec![None; self.subtree.len()];
+ let read_span = |id| S::span_for_token_id(self.span_data_table, id);
for i in (0..self.subtree.len()).rev() {
let repr = &self.subtree[i];
let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize];
let s = tt::Subtree {
- delimiter: tt::Delimiter { open: repr.open, close: repr.close, kind: repr.kind },
+ delimiter: tt::Delimiter {
+ open: read_span(repr.open),
+ close: read_span(repr.close),
+ kind: repr.kind,
+ },
token_trees: token_trees
.iter()
.copied()
@@ -324,7 +430,7 @@ impl Reader {
let repr = &self.literal[idx];
tt::Leaf::Literal(tt::Literal {
text: self.text[repr.text as usize].as_str().into(),
- span: repr.id,
+ span: read_span(repr.id),
})
.into()
}
@@ -333,7 +439,7 @@ impl Reader {
tt::Leaf::Punct(tt::Punct {
char: repr.char,
spacing: repr.spacing,
- span: repr.id,
+ span: read_span(repr.id),
})
.into()
}
@@ -341,7 +447,7 @@ impl Reader {
let repr = &self.ident[idx];
tt::Leaf::Ident(tt::Ident {
text: self.text[repr.text as usize].as_str().into(),
- span: repr.id,
+ span: read_span(repr.id),
})
.into()
}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs
index 48efbf589..5ff1f36c5 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs
@@ -85,8 +85,8 @@ fn read_section<'a>(dylib_binary: &'a [u8], section_name: &str) -> io::Result<&'
}
/// Check the version of rustc that was used to compile a proc macro crate's
-///
/// binary file.
+///
/// A proc macro crate binary's ".rustc" section has following byte layout:
/// * [b'r',b'u',b's',b't',0,0,0,5] is the first 8 bytes
/// * ff060000 734e6150 is followed, it's the snappy format magic bytes,
@@ -96,8 +96,8 @@ fn read_section<'a>(dylib_binary: &'a [u8], section_name: &str) -> io::Result<&'
/// The bytes you get after decompressing the snappy format portion has
/// following layout:
/// * [b'r',b'u',b's',b't',0,0,0,5] is the first 8 bytes(again)
-/// * [crate root bytes] next 4 bytes is to store crate root position,
-/// according to rustc's source code comment
+/// * [crate root bytes] next 8 bytes (4 in old versions) is to store
+/// crate root position, according to rustc's source code comment
/// * [length byte] next 1 byte tells us how many bytes we should read next
/// for the version string's utf8 bytes
/// * [version string bytes encoded in utf8] <- GET THIS BOI
@@ -119,13 +119,18 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
}
let version = u32::from_be_bytes([dot_rustc[4], dot_rustc[5], dot_rustc[6], dot_rustc[7]]);
// Last supported version is:
- // https://github.com/rust-lang/rust/commit/0696e79f2740ad89309269b460579e548a5cd632
- let snappy_portion = match version {
- 5 | 6 => &dot_rustc[8..],
+ // https://github.com/rust-lang/rust/commit/b94cfefc860715fb2adf72a6955423d384c69318
+ let (snappy_portion, bytes_before_version) = match version {
+ 5 | 6 => (&dot_rustc[8..], 13),
7 | 8 => {
let len_bytes = &dot_rustc[8..12];
let data_len = u32::from_be_bytes(len_bytes.try_into().unwrap()) as usize;
- &dot_rustc[12..data_len + 12]
+ (&dot_rustc[12..data_len + 12], 13)
+ }
+ 9 => {
+ let len_bytes = &dot_rustc[8..16];
+ let data_len = u64::from_le_bytes(len_bytes.try_into().unwrap()) as usize;
+ (&dot_rustc[16..data_len + 12], 17)
}
_ => {
return Err(io::Error::new(
@@ -142,15 +147,15 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
Box::new(SnapDecoder::new(snappy_portion))
};
- // the bytes before version string bytes, so this basically is:
+ // We're going to skip over the bytes before the version string, so basically:
// 8 bytes for [b'r',b'u',b's',b't',0,0,0,5]
- // 4 bytes for [crate root bytes]
+ // 4 or 8 bytes for [crate root bytes]
// 1 byte for length of version string
- // so 13 bytes in total, and we should check the 13th byte
+ // so 13 or 17 bytes in total, and we should check the last of those bytes
// to know the length
- let mut bytes_before_version = [0u8; 13];
- uncompressed.read_exact(&mut bytes_before_version)?;
- let length = bytes_before_version[12];
+ let mut bytes = [0u8; 17];
+ uncompressed.read_exact(&mut bytes[..bytes_before_version])?;
+ let length = bytes[bytes_before_version - 1];
let mut version_string_utf8 = vec![0u8; length as usize];
uncompressed.read_exact(&mut version_string_utf8)?;
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs
index bece19518..50ce586fc 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs
@@ -18,12 +18,12 @@ fn main() -> std::io::Result<()> {
run()
}
-#[cfg(not(feature = "sysroot-abi"))]
+#[cfg(not(any(feature = "sysroot-abi", rust_analyzer)))]
fn run() -> io::Result<()> {
panic!("proc-macro-srv-cli requires the `sysroot-abi` feature to be enabled");
}
-#[cfg(feature = "sysroot-abi")]
+#[cfg(any(feature = "sysroot-abi", rust_analyzer))]
fn run() -> io::Result<()> {
use proc_macro_api::msg::{self, Message};
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs
index dd05e250c..f20e6832f 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs
@@ -11,7 +11,7 @@ use libloading::Library;
use memmap2::Mmap;
use object::Object;
use paths::AbsPath;
-use proc_macro_api::{read_dylib_info, ProcMacroKind};
+use proc_macro_api::{msg::TokenId, read_dylib_info, ProcMacroKind};
const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_";
@@ -152,9 +152,15 @@ impl Expander {
macro_name: &str,
macro_body: &crate::tt::Subtree,
attributes: Option<&crate::tt::Subtree>,
+ def_site: TokenId,
+ call_site: TokenId,
+ mixed_site: TokenId,
) -> Result<crate::tt::Subtree, String> {
- let result = self.inner.proc_macros.expand(macro_name, macro_body, attributes);
- result.map_err(|e| e.as_str().unwrap_or_else(|| "<unknown error>".to_string()))
+ let result = self
+ .inner
+ .proc_macros
+ .expand(macro_name, macro_body, attributes, def_site, call_site, mixed_site);
+ result.map_err(|e| e.into_string().unwrap_or_default())
}
pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
index 84bd15efb..56529f71d 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
@@ -10,10 +10,10 @@
//! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable`
//! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)…
-#![cfg(feature = "sysroot-abi")]
+#![cfg(any(feature = "sysroot-abi", rust_analyzer))]
#![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)]
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
-#![allow(unreachable_pub)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
+#![allow(unreachable_pub, internal_features)]
extern crate proc_macro;
@@ -32,11 +32,23 @@ use std::{
};
use proc_macro_api::{
- msg::{self, CURRENT_API_VERSION},
+ msg::{self, ExpnGlobals, TokenId, CURRENT_API_VERSION},
ProcMacroKind,
};
-use ::tt::token_id as tt;
+mod tt {
+ pub use proc_macro_api::msg::TokenId;
+
+ pub use ::tt::*;
+
+ pub type Subtree = ::tt::Subtree<TokenId>;
+ pub type TokenTree = ::tt::TokenTree<TokenId>;
+ pub type Delimiter = ::tt::Delimiter<TokenId>;
+ pub type Leaf = ::tt::Leaf<TokenId>;
+ pub type Literal = ::tt::Literal<TokenId>;
+ pub type Punct = ::tt::Punct<TokenId>;
+ pub type Ident = ::tt::Ident<TokenId>;
+}
// see `build.rs`
include!(concat!(env!("OUT_DIR"), "/rustc_version.rs"));
@@ -70,16 +82,28 @@ impl ProcMacroSrv {
None => None,
};
- let macro_body = task.macro_body.to_subtree(CURRENT_API_VERSION);
- let attributes = task.attributes.map(|it| it.to_subtree(CURRENT_API_VERSION));
+ let ExpnGlobals { def_site, call_site, mixed_site, .. } = task.has_global_spans;
+ let def_site = TokenId(def_site as u32);
+ let call_site = TokenId(call_site as u32);
+ let mixed_site = TokenId(mixed_site as u32);
+
+ let macro_body = task.macro_body.to_subtree_unresolved(CURRENT_API_VERSION);
+ let attributes = task.attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION));
let result = thread::scope(|s| {
let thread = thread::Builder::new()
.stack_size(EXPANDER_STACK_SIZE)
.name(task.macro_name.clone())
.spawn_scoped(s, || {
expander
- .expand(&task.macro_name, &macro_body, attributes.as_ref())
- .map(|it| msg::FlatTree::new(&it, CURRENT_API_VERSION))
+ .expand(
+ &task.macro_name,
+ &macro_body,
+ attributes.as_ref(),
+ def_site,
+ call_site,
+ mixed_site,
+ )
+ .map(|it| msg::FlatTree::new_raw(&it, CURRENT_API_VERSION))
});
let res = match thread {
Ok(handle) => handle.join(),
@@ -136,8 +160,8 @@ pub struct PanicMessage {
}
impl PanicMessage {
- pub fn as_str(&self) -> Option<String> {
- self.message.clone()
+ pub fn into_string(self) -> Option<String> {
+ self.message
}
}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs
index 3c6f32033..716b85d09 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs
@@ -1,16 +1,17 @@
//! Proc macro ABI
use libloading::Library;
-use proc_macro_api::{ProcMacroKind, RustCInfo};
+use proc_macro::bridge;
+use proc_macro_api::{msg::TokenId, ProcMacroKind, RustCInfo};
use crate::{dylib::LoadProcMacroDylibError, server::SYMBOL_INTERNER, tt};
pub(crate) struct ProcMacros {
- exported_macros: Vec<proc_macro::bridge::client::ProcMacro>,
+ exported_macros: Vec<bridge::client::ProcMacro>,
}
-impl From<proc_macro::bridge::PanicMessage> for crate::PanicMessage {
- fn from(p: proc_macro::bridge::PanicMessage) -> Self {
+impl From<bridge::PanicMessage> for crate::PanicMessage {
+ fn from(p: bridge::PanicMessage) -> Self {
Self { message: p.as_str().map(|s| s.to_string()) }
}
}
@@ -31,9 +32,8 @@ impl ProcMacros {
info: RustCInfo,
) -> Result<ProcMacros, LoadProcMacroDylibError> {
if info.version_string == crate::RUSTC_VERSION_STRING {
- let macros = unsafe {
- lib.get::<&&[proc_macro::bridge::client::ProcMacro]>(symbol_name.as_bytes())
- }?;
+ let macros =
+ unsafe { lib.get::<&&[bridge::client::ProcMacro]>(symbol_name.as_bytes()) }?;
return Ok(Self { exported_macros: macros.to_vec() });
}
@@ -45,6 +45,9 @@ impl ProcMacros {
macro_name: &str,
macro_body: &tt::Subtree,
attributes: Option<&tt::Subtree>,
+ def_site: TokenId,
+ call_site: TokenId,
+ mixed_site: TokenId,
) -> Result<tt::Subtree, crate::PanicMessage> {
let parsed_body = crate::server::TokenStream::with_subtree(macro_body.clone());
@@ -54,58 +57,76 @@ impl ProcMacros {
for proc_macro in &self.exported_macros {
match proc_macro {
- proc_macro::bridge::client::ProcMacro::CustomDerive {
- trait_name, client, ..
- } if *trait_name == macro_name => {
+ bridge::client::ProcMacro::CustomDerive { trait_name, client, .. }
+ if *trait_name == macro_name =>
+ {
let res = client.run(
- &proc_macro::bridge::server::SameThread,
- crate::server::RustAnalyzer { interner: &SYMBOL_INTERNER },
+ &bridge::server::SameThread,
+ crate::server::RustAnalyzer {
+ interner: &SYMBOL_INTERNER,
+ call_site,
+ def_site,
+ mixed_site,
+ },
parsed_body,
- true,
+ false,
);
- return res.map(|it| it.into_subtree()).map_err(crate::PanicMessage::from);
+ return res
+ .map(|it| it.into_subtree(call_site))
+ .map_err(crate::PanicMessage::from);
}
- proc_macro::bridge::client::ProcMacro::Bang { name, client }
- if *name == macro_name =>
- {
+ bridge::client::ProcMacro::Bang { name, client } if *name == macro_name => {
let res = client.run(
- &proc_macro::bridge::server::SameThread,
- crate::server::RustAnalyzer { interner: &SYMBOL_INTERNER },
+ &bridge::server::SameThread,
+ crate::server::RustAnalyzer {
+ interner: &SYMBOL_INTERNER,
+ call_site,
+ def_site,
+ mixed_site,
+ },
parsed_body,
- true,
+ false,
);
- return res.map(|it| it.into_subtree()).map_err(crate::PanicMessage::from);
+ return res
+ .map(|it| it.into_subtree(call_site))
+ .map_err(crate::PanicMessage::from);
}
- proc_macro::bridge::client::ProcMacro::Attr { name, client }
- if *name == macro_name =>
- {
+ bridge::client::ProcMacro::Attr { name, client } if *name == macro_name => {
let res = client.run(
- &proc_macro::bridge::server::SameThread,
- crate::server::RustAnalyzer { interner: &SYMBOL_INTERNER },
+ &bridge::server::SameThread,
+ crate::server::RustAnalyzer {
+ interner: &SYMBOL_INTERNER,
+
+ call_site,
+ def_site,
+ mixed_site,
+ },
parsed_attributes,
parsed_body,
- true,
+ false,
);
- return res.map(|it| it.into_subtree()).map_err(crate::PanicMessage::from);
+ return res
+ .map(|it| it.into_subtree(call_site))
+ .map_err(crate::PanicMessage::from);
}
_ => continue,
}
}
- Err(proc_macro::bridge::PanicMessage::String("Nothing to expand".to_string()).into())
+ Err(bridge::PanicMessage::String("Nothing to expand".to_string()).into())
}
pub(crate) fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
self.exported_macros
.iter()
.map(|proc_macro| match proc_macro {
- proc_macro::bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
+ bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
(trait_name.to_string(), ProcMacroKind::CustomDerive)
}
- proc_macro::bridge::client::ProcMacro::Bang { name, .. } => {
+ bridge::client::ProcMacro::Bang { name, .. } => {
(name.to_string(), ProcMacroKind::FuncLike)
}
- proc_macro::bridge::client::ProcMacro::Attr { name, .. } => {
+ bridge::client::ProcMacro::Attr { name, .. } => {
(name.to_string(), ProcMacroKind::Attr)
}
})
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs
index fe18451d3..917d8a6e2 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs
@@ -11,6 +11,7 @@
use proc_macro::bridge::{self, server};
mod token_stream;
+use proc_macro_api::msg::TokenId;
pub use token_stream::TokenStream;
use token_stream::TokenStreamBuilder;
@@ -43,6 +44,9 @@ pub struct FreeFunctions;
pub struct RustAnalyzer {
// FIXME: store span information here.
pub(crate) interner: SymbolInternerRef,
+ pub call_site: TokenId,
+ pub def_site: TokenId,
+ pub mixed_site: TokenId,
}
impl server::Types for RustAnalyzer {
@@ -54,6 +58,10 @@ impl server::Types for RustAnalyzer {
}
impl server::FreeFunctions for RustAnalyzer {
+ fn injected_env_var(&mut self, _var: &str) -> Option<String> {
+ None
+ }
+
fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
// FIXME: track env var accesses
// https://github.com/rust-lang/rust/pull/71858
@@ -69,7 +77,7 @@ impl server::FreeFunctions for RustAnalyzer {
kind: bridge::LitKind::Err,
symbol: Symbol::intern(self.interner, s),
suffix: None,
- span: tt::TokenId::unspecified(),
+ span: self.call_site,
})
}
@@ -83,7 +91,7 @@ impl server::TokenStream for RustAnalyzer {
stream.is_empty()
}
fn from_str(&mut self, src: &str) -> Self::TokenStream {
- src.parse().expect("cannot parse string")
+ Self::TokenStream::from_str(src, self.call_site).expect("cannot parse string")
}
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
stream.to_string()
@@ -280,7 +288,7 @@ impl server::Span for RustAnalyzer {
}
fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
// FIXME stub
- tt::TokenId::unspecified()
+ self.call_site
}
/// Recent feature, not yet in the proc_macro
///
@@ -317,15 +325,15 @@ impl server::Span for RustAnalyzer {
}
fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
// FIXME handle span
- tt::TokenId::unspecified()
+ self.call_site
}
fn end(&mut self, _self_: Self::Span) -> Self::Span {
- tt::TokenId::unspecified()
+ self.call_site
}
fn start(&mut self, _self_: Self::Span) -> Self::Span {
- tt::TokenId::unspecified()
+ self.call_site
}
fn line(&mut self, _span: Self::Span) -> usize {
@@ -349,9 +357,9 @@ impl server::Symbol for RustAnalyzer {
impl server::Server for RustAnalyzer {
fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
bridge::ExpnGlobals {
- def_site: Span::unspecified(),
- call_site: Span::unspecified(),
- mixed_site: Span::unspecified(),
+ def_site: self.def_site,
+ call_site: self.call_site,
+ mixed_site: self.mixed_site,
}
}
@@ -430,16 +438,16 @@ mod tests {
token_trees: vec![
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
text: "struct".into(),
- span: tt::TokenId::unspecified(),
+ span: tt::TokenId(0),
})),
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
text: "T".into(),
- span: tt::TokenId::unspecified(),
+ span: tt::TokenId(0),
})),
tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter {
- open: tt::TokenId::unspecified(),
- close: tt::TokenId::unspecified(),
+ open: tt::TokenId(0),
+ close: tt::TokenId(0),
kind: tt::DelimiterKind::Brace,
},
token_trees: vec![],
@@ -452,33 +460,32 @@ mod tests {
#[test]
fn test_ra_server_from_str() {
- use std::str::FromStr;
let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter {
- open: tt::TokenId::unspecified(),
- close: tt::TokenId::unspecified(),
+ open: tt::TokenId(0),
+ close: tt::TokenId(0),
kind: tt::DelimiterKind::Parenthesis,
},
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
text: "a".into(),
- span: tt::TokenId::unspecified(),
+ span: tt::TokenId(0),
}))],
});
- let t1 = TokenStream::from_str("(a)").unwrap();
+ let t1 = TokenStream::from_str("(a)", tt::TokenId(0)).unwrap();
assert_eq!(t1.token_trees.len(), 1);
assert_eq!(t1.token_trees[0], subtree_paren_a);
- let t2 = TokenStream::from_str("(a);").unwrap();
+ let t2 = TokenStream::from_str("(a);", tt::TokenId(0)).unwrap();
assert_eq!(t2.token_trees.len(), 2);
assert_eq!(t2.token_trees[0], subtree_paren_a);
- let underscore = TokenStream::from_str("_").unwrap();
+ let underscore = TokenStream::from_str("_", tt::TokenId(0)).unwrap();
assert_eq!(
underscore.token_trees[0],
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
text: "_".into(),
- span: tt::TokenId::unspecified(),
+ span: tt::TokenId(0),
}))
);
}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/token_stream.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/token_stream.rs
index 2589d8b64..36be88250 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/token_stream.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/token_stream.rs
@@ -1,5 +1,7 @@
//! TokenStream implementation used by sysroot ABI
+use proc_macro_api::msg::TokenId;
+
use crate::tt::{self, TokenTree};
#[derive(Debug, Default, Clone)]
@@ -20,8 +22,15 @@ impl TokenStream {
}
}
- pub(crate) fn into_subtree(self) -> tt::Subtree {
- tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: self.token_trees }
+ pub(crate) fn into_subtree(self, call_site: TokenId) -> tt::Subtree {
+ tt::Subtree {
+ delimiter: tt::Delimiter {
+ open: call_site,
+ close: call_site,
+ kind: tt::DelimiterKind::Invisible,
+ },
+ token_trees: self.token_trees,
+ }
}
pub(super) fn is_empty(&self) -> bool {
@@ -84,7 +93,7 @@ pub(super) struct TokenStreamBuilder {
/// pub(super)lic implementation details for the `TokenStream` type, such as iterators.
pub(super) mod token_stream {
- use std::str::FromStr;
+ use proc_macro_api::msg::TokenId;
use super::{tt, TokenStream, TokenTree};
@@ -109,14 +118,15 @@ pub(super) mod token_stream {
///
/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
/// change these errors into `LexError`s later.
- impl FromStr for TokenStream {
- type Err = LexError;
+ #[rustfmt::skip]
+ impl /*FromStr for*/ TokenStream {
+ // type Err = LexError;
- fn from_str(src: &str) -> Result<TokenStream, LexError> {
- let (subtree, _token_map) =
- mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
+ pub(crate) fn from_str(src: &str, call_site: TokenId) -> Result<TokenStream, LexError> {
+ let subtree =
+ mbe::parse_to_token_tree_static_span(call_site, src).ok_or("Failed to parse from mbe")?;
- let subtree = subtree_replace_token_ids_with_unspecified(subtree);
+ let subtree = subtree_replace_token_ids_with_call_site(subtree,call_site);
Ok(TokenStream::with_subtree(subtree))
}
}
@@ -127,43 +137,39 @@ pub(super) mod token_stream {
}
}
- fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
+ fn subtree_replace_token_ids_with_call_site(
+ subtree: tt::Subtree,
+ call_site: TokenId,
+ ) -> tt::Subtree {
tt::Subtree {
- delimiter: tt::Delimiter {
- open: tt::TokenId::UNSPECIFIED,
- close: tt::TokenId::UNSPECIFIED,
- ..subtree.delimiter
- },
+ delimiter: tt::Delimiter { open: call_site, close: call_site, ..subtree.delimiter },
token_trees: subtree
.token_trees
.into_iter()
- .map(token_tree_replace_token_ids_with_unspecified)
+ .map(|it| token_tree_replace_token_ids_with_call_site(it, call_site))
.collect(),
}
}
- fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
+ fn token_tree_replace_token_ids_with_call_site(
+ tt: tt::TokenTree,
+ call_site: TokenId,
+ ) -> tt::TokenTree {
match tt {
tt::TokenTree::Leaf(leaf) => {
- tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
+ tt::TokenTree::Leaf(leaf_replace_token_ids_with_call_site(leaf, call_site))
}
tt::TokenTree::Subtree(subtree) => {
- tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
+ tt::TokenTree::Subtree(subtree_replace_token_ids_with_call_site(subtree, call_site))
}
}
}
- fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
+ fn leaf_replace_token_ids_with_call_site(leaf: tt::Leaf, call_site: TokenId) -> tt::Leaf {
match leaf {
- tt::Leaf::Literal(lit) => {
- tt::Leaf::Literal(tt::Literal { span: tt::TokenId::unspecified(), ..lit })
- }
- tt::Leaf::Punct(punct) => {
- tt::Leaf::Punct(tt::Punct { span: tt::TokenId::unspecified(), ..punct })
- }
- tt::Leaf::Ident(ident) => {
- tt::Leaf::Ident(tt::Ident { span: tt::TokenId::unspecified(), ..ident })
- }
+ tt::Leaf::Literal(lit) => tt::Leaf::Literal(tt::Literal { span: call_site, ..lit }),
+ tt::Leaf::Punct(punct) => tt::Leaf::Punct(tt::Punct { span: call_site, ..punct }),
+ tt::Leaf::Ident(ident) => tt::Leaf::Ident(tt::Ident { span: call_site, ..ident }),
}
}
}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs
index 04a0ae7bc..b04e3ca19 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs
@@ -8,7 +8,7 @@ use expect_test::expect;
#[test]
fn test_derive_empty() {
- assert_expand("DeriveEmpty", r#"struct S;"#, expect!["SUBTREE $$ 4294967295 4294967295"]);
+ assert_expand("DeriveEmpty", r#"struct S;"#, expect!["SUBTREE $$ 1 1"]);
}
#[test]
@@ -17,12 +17,12 @@ fn test_derive_error() {
"DeriveError",
r#"struct S;"#,
expect![[r##"
- SUBTREE $$ 4294967295 4294967295
- IDENT compile_error 4294967295
- PUNCH ! [alone] 4294967295
- SUBTREE () 4294967295 4294967295
- LITERAL "#[derive(DeriveError)] struct S ;" 4294967295
- PUNCH ; [alone] 4294967295"##]],
+ SUBTREE $$ 1 1
+ IDENT compile_error 1
+ PUNCH ! [alone] 1
+ SUBTREE () 1 1
+ LITERAL "#[derive(DeriveError)] struct S ;" 1
+ PUNCH ; [alone] 1"##]],
);
}
@@ -32,14 +32,14 @@ fn test_fn_like_macro_noop() {
"fn_like_noop",
r#"ident, 0, 1, []"#,
expect![[r#"
- SUBTREE $$ 4294967295 4294967295
- IDENT ident 4294967295
- PUNCH , [alone] 4294967295
- LITERAL 0 4294967295
- PUNCH , [alone] 4294967295
- LITERAL 1 4294967295
- PUNCH , [alone] 4294967295
- SUBTREE [] 4294967295 4294967295"#]],
+ SUBTREE $$ 1 1
+ IDENT ident 1
+ PUNCH , [alone] 1
+ LITERAL 0 1
+ PUNCH , [alone] 1
+ LITERAL 1 1
+ PUNCH , [alone] 1
+ SUBTREE [] 1 1"#]],
);
}
@@ -49,10 +49,10 @@ fn test_fn_like_macro_clone_ident_subtree() {
"fn_like_clone_tokens",
r#"ident, []"#,
expect![[r#"
- SUBTREE $$ 4294967295 4294967295
- IDENT ident 4294967295
- PUNCH , [alone] 4294967295
- SUBTREE [] 4294967295 4294967295"#]],
+ SUBTREE $$ 1 1
+ IDENT ident 1
+ PUNCH , [alone] 1
+ SUBTREE [] 1 1"#]],
);
}
@@ -62,8 +62,8 @@ fn test_fn_like_macro_clone_raw_ident() {
"fn_like_clone_tokens",
"r#async",
expect![[r#"
- SUBTREE $$ 4294967295 4294967295
- IDENT r#async 4294967295"#]],
+ SUBTREE $$ 1 1
+ IDENT r#async 1"#]],
);
}
@@ -73,14 +73,14 @@ fn test_fn_like_mk_literals() {
"fn_like_mk_literals",
r#""#,
expect![[r#"
- SUBTREE $$ 4294967295 4294967295
- LITERAL b"byte_string" 4294967295
- LITERAL 'c' 4294967295
- LITERAL "string" 4294967295
- LITERAL 3.14f64 4294967295
- LITERAL 3.14 4294967295
- LITERAL 123i64 4294967295
- LITERAL 123 4294967295"#]],
+ SUBTREE $$ 1 1
+ LITERAL b"byte_string" 1
+ LITERAL 'c' 1
+ LITERAL "string" 1
+ LITERAL 3.14f64 1
+ LITERAL 3.14 1
+ LITERAL 123i64 1
+ LITERAL 123 1"#]],
);
}
@@ -90,9 +90,9 @@ fn test_fn_like_mk_idents() {
"fn_like_mk_idents",
r#""#,
expect![[r#"
- SUBTREE $$ 4294967295 4294967295
- IDENT standard 4294967295
- IDENT r#raw 4294967295"#]],
+ SUBTREE $$ 1 1
+ IDENT standard 1
+ IDENT r#raw 1"#]],
);
}
@@ -102,17 +102,17 @@ fn test_fn_like_macro_clone_literals() {
"fn_like_clone_tokens",
r#"1u16, 2_u32, -4i64, 3.14f32, "hello bridge""#,
expect![[r#"
- SUBTREE $$ 4294967295 4294967295
- LITERAL 1u16 4294967295
- PUNCH , [alone] 4294967295
- LITERAL 2_u32 4294967295
- PUNCH , [alone] 4294967295
- PUNCH - [alone] 4294967295
- LITERAL 4i64 4294967295
- PUNCH , [alone] 4294967295
- LITERAL 3.14f32 4294967295
- PUNCH , [alone] 4294967295
- LITERAL "hello bridge" 4294967295"#]],
+ SUBTREE $$ 1 1
+ LITERAL 1u16 1
+ PUNCH , [alone] 1
+ LITERAL 2_u32 1
+ PUNCH , [alone] 1
+ PUNCH - [alone] 1
+ LITERAL 4i64 1
+ PUNCH , [alone] 1
+ LITERAL 3.14f32 1
+ PUNCH , [alone] 1
+ LITERAL "hello bridge" 1"#]],
);
}
@@ -126,12 +126,12 @@ fn test_attr_macro() {
r#"mod m {}"#,
r#"some arguments"#,
expect![[r##"
- SUBTREE $$ 4294967295 4294967295
- IDENT compile_error 4294967295
- PUNCH ! [alone] 4294967295
- SUBTREE () 4294967295 4294967295
- LITERAL "#[attr_error(some arguments)] mod m {}" 4294967295
- PUNCH ; [alone] 4294967295"##]],
+ SUBTREE $$ 1 1
+ IDENT compile_error 1
+ PUNCH ! [alone] 1
+ SUBTREE () 1 1
+ LITERAL "#[attr_error(some arguments)] mod m {}" 1
+ PUNCH ; [alone] 1"##]],
);
}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs
index 49b4d973b..c12096d14 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs
@@ -1,18 +1,18 @@
//! utils used in proc-macro tests
use expect_test::Expect;
-use std::str::FromStr;
+use proc_macro_api::msg::TokenId;
use crate::{dylib, proc_macro_test_dylib_path, ProcMacroSrv};
-fn parse_string(code: &str) -> Option<crate::server::TokenStream> {
+fn parse_string(code: &str, call_site: TokenId) -> Option<crate::server::TokenStream> {
// This is a bit strange. We need to parse a string into a token stream into
// order to create a tt::SubTree from it in fixtures. `into_subtree` is
// implemented by all the ABIs we have so we arbitrarily choose one ABI to
// write a `parse_string` function for and use that. The tests don't really
// care which ABI we're using as the `into_subtree` function isn't part of
// the ABI and shouldn't change between ABI versions.
- crate::server::TokenStream::from_str(code).ok()
+ crate::server::TokenStream::from_str(code, call_site).ok()
}
pub fn assert_expand(macro_name: &str, ra_fixture: &str, expect: Expect) {
@@ -24,12 +24,24 @@ pub fn assert_expand_attr(macro_name: &str, ra_fixture: &str, attr_args: &str, e
}
fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect: Expect) {
+ let def_site = TokenId(0);
+ let call_site = TokenId(1);
+ let mixed_site = TokenId(2);
let path = proc_macro_test_dylib_path();
let expander = dylib::Expander::new(&path).unwrap();
- let fixture = parse_string(input).unwrap();
- let attr = attr.map(|attr| parse_string(attr).unwrap().into_subtree());
-
- let res = expander.expand(macro_name, &fixture.into_subtree(), attr.as_ref()).unwrap();
+ let fixture = parse_string(input, call_site).unwrap();
+ let attr = attr.map(|attr| parse_string(attr, call_site).unwrap().into_subtree(call_site));
+
+ let res = expander
+ .expand(
+ macro_name,
+ &fixture.into_subtree(call_site),
+ attr.as_ref(),
+ def_site,
+ call_site,
+ mixed_site,
+ )
+ .unwrap();
expect.assert_eq(&format!("{res:?}"));
}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml
index 77b4afd7d..12d7c07d3 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml
@@ -12,7 +12,7 @@ rust-version.workspace = true
doctest = false
[build-dependencies]
-cargo_metadata = "0.15.0"
+cargo_metadata.workspace = true
proc-macro-test-impl = { path = "imp", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/imp/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-test/imp/src/lib.rs
index feeacdb64..32510fba2 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-test/imp/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-test/imp/src/lib.rs
@@ -1,6 +1,6 @@
//! Exports a few trivial procedural macros for testing.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-test/src/lib.rs
index 6d57bc81e..739c6ec6f 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-test/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-test/src/lib.rs
@@ -1,6 +1,6 @@
//! Exports a few trivial procedural macros for testing.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
pub static PROC_MACRO_TEST_LOCATION: &str =
include_str!(concat!(env!("OUT_DIR"), "/proc_macro_test_location.txt"));
diff --git a/src/tools/rust-analyzer/crates/profile/Cargo.toml b/src/tools/rust-analyzer/crates/profile/Cargo.toml
index 937834a82..56ce9d11c 100644
--- a/src/tools/rust-analyzer/crates/profile/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/profile/Cargo.toml
@@ -14,8 +14,8 @@ doctest = false
[dependencies]
once_cell = "1.17.0"
cfg-if = "1.0.0"
-libc = "0.2.135"
la-arena.workspace = true
+libc.workspace = true
countme = { version = "3.0.1", features = ["enable"] }
jemalloc-ctl = { version = "0.5.0", package = "tikv-jemalloc-ctl", optional = true }
diff --git a/src/tools/rust-analyzer/crates/profile/src/lib.rs b/src/tools/rust-analyzer/crates/profile/src/lib.rs
index e7fc3d970..fdd724e2a 100644
--- a/src/tools/rust-analyzer/crates/profile/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/profile/src/lib.rs
@@ -1,6 +1,6 @@
//! A collection of tools for profiling rust-analyzer.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod stop_watch;
mod memory_usage;
diff --git a/src/tools/rust-analyzer/crates/project-model/Cargo.toml b/src/tools/rust-analyzer/crates/project-model/Cargo.toml
index 75977fc5b..3e48de645 100644
--- a/src/tools/rust-analyzer/crates/project-model/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/project-model/Cargo.toml
@@ -12,16 +12,16 @@ rust-version.workspace = true
doctest = false
[dependencies]
-tracing = "0.1.35"
+anyhow.workspace = true
+cargo_metadata.workspace = true
rustc-hash = "1.1.0"
-cargo_metadata = "0.15.0"
semver = "1.0.14"
serde_json.workspace = true
serde.workspace = true
+tracing.workspace = true
triomphe.workspace = true
-anyhow = "1.0.62"
la-arena.workspace = true
-itertools = "0.10.5"
+itertools.workspace = true
# local deps
base-db.workspace = true
diff --git a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs b/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs
index fb0f3ab7d..68cd40c04 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs
@@ -73,6 +73,10 @@ impl WorkspaceBuildScripts {
cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]);
cmd.args(&config.extra_args);
+ if let Some(target_dir) = &config.target_dir {
+ cmd.arg("--target-dir").arg(target_dir);
+ }
+
// --all-targets includes tests, benches and examples in addition to the
// default lib and bins. This is an independent concept from the --target
// flag below.
diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
index e47808a2c..ca3d6e059 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
@@ -96,6 +96,8 @@ pub struct CargoConfig {
pub extra_env: FxHashMap<String, String>,
pub invocation_strategy: InvocationStrategy,
pub invocation_location: InvocationLocation,
+ /// Optional path to use instead of `target` when building
+ pub target_dir: Option<PathBuf>,
}
pub type Package = Idx<PackageData>;
diff --git a/src/tools/rust-analyzer/crates/project-model/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/src/lib.rs
index 901dcfd2b..5f9b70828 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/lib.rs
@@ -15,7 +15,7 @@
//! procedural macros).
//! * Lowering of concrete model to a [`base_db::CrateGraph`]
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod manifest_path;
mod cargo_workspace;
diff --git a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs
index 80897f747..931eba115 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs
@@ -49,7 +49,7 @@
//! user explores them belongs to that extension (it's totally valid to change
//! rust-project.json over time via configuration request!)
-use base_db::{CrateDisplayName, CrateId, CrateName, Dependency, Edition};
+use base_db::{CrateDisplayName, CrateId, CrateName, Dependency, DependencyKind, Edition};
use la_arena::RawIdx;
use paths::{AbsPath, AbsPathBuf};
use rustc_hash::FxHashMap;
@@ -135,6 +135,7 @@ impl ProjectJson {
Dependency::new(
dep_data.name,
CrateId::from_raw(RawIdx::from(dep_data.krate as u32)),
+ DependencyKind::Normal,
)
})
.collect::<Vec<_>>(),
diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs
index 7815b9dda..4887b2981 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs
@@ -58,7 +58,7 @@ fn load_cargo_with_sysroot(
&mut {
|path| {
let len = file_map.len();
- Some(*file_map.entry(path.to_path_buf()).or_insert(FileId(len as u32)))
+ Some(*file_map.entry(path.to_path_buf()).or_insert(FileId::from_raw(len as u32)))
}
},
&Default::default(),
@@ -142,7 +142,7 @@ fn to_crate_graph(project_workspace: ProjectWorkspace) -> (CrateGraph, ProcMacro
let mut counter = 0;
move |_path| {
counter += 1;
- Some(FileId(counter))
+ Some(FileId::from_raw(counter))
}
},
&Default::default(),
@@ -249,3 +249,55 @@ fn crate_graph_dedup() {
crate_graph.extend(regex_crate_graph, &mut regex_proc_macros);
assert_eq!(crate_graph.iter().count(), 118);
}
+
+#[test]
+fn test_deduplicate_origin_dev() {
+ let path_map = &mut Default::default();
+ let (mut crate_graph, _proc_macros) =
+ load_cargo_with_sysroot(path_map, "deduplication_crate_graph_A.json");
+ crate_graph.sort_deps();
+ let (crate_graph_1, mut _proc_macros_2) =
+ load_cargo_with_sysroot(path_map, "deduplication_crate_graph_B.json");
+
+ crate_graph.extend(crate_graph_1, &mut _proc_macros_2);
+
+ let mut crates_named_p2 = vec![];
+ for id in crate_graph.iter() {
+ let krate = &crate_graph[id];
+ if let Some(name) = krate.display_name.as_ref() {
+ if name.to_string() == "p2" {
+ crates_named_p2.push(krate);
+ }
+ }
+ }
+
+ assert!(crates_named_p2.len() == 1);
+ let p2 = crates_named_p2[0];
+ assert!(p2.origin.is_local());
+}
+
+#[test]
+fn test_deduplicate_origin_dev_rev() {
+ let path_map = &mut Default::default();
+ let (mut crate_graph, _proc_macros) =
+ load_cargo_with_sysroot(path_map, "deduplication_crate_graph_B.json");
+ crate_graph.sort_deps();
+ let (crate_graph_1, mut _proc_macros_2) =
+ load_cargo_with_sysroot(path_map, "deduplication_crate_graph_A.json");
+
+ crate_graph.extend(crate_graph_1, &mut _proc_macros_2);
+
+ let mut crates_named_p2 = vec![];
+ for id in crate_graph.iter() {
+ let krate = &crate_graph[id];
+ if let Some(name) = krate.display_name.as_ref() {
+ if name.to_string() == "p2" {
+ crates_named_p2.push(krate);
+ }
+ }
+ }
+
+ assert!(crates_named_p2.len() == 1);
+ let p2 = crates_named_p2[0];
+ assert!(p2.origin.is_local());
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
index e0209ca15..933357035 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
@@ -6,8 +6,8 @@ use std::{collections::VecDeque, fmt, fs, iter, process::Command, str::FromStr,
use anyhow::{format_err, Context};
use base_db::{
- CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env,
- FileId, LangCrateOrigin, ProcMacroPaths, ReleaseChannel, TargetLayoutLoadResult,
+ CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, DependencyKind,
+ Edition, Env, FileId, LangCrateOrigin, ProcMacroPaths, ReleaseChannel, TargetLayoutLoadResult,
};
use cfg::{CfgDiff, CfgOptions};
use paths::{AbsPath, AbsPathBuf};
@@ -834,7 +834,7 @@ fn project_json_to_crate_graph(
for dep in &krate.deps {
if let Some(&to) = crates.get(&dep.crate_id) {
- add_dep(crate_graph, from, dep.name.clone(), to)
+ add_dep(crate_graph, from, dep.name.clone(), to, dep.kind().to_owned())
}
}
}
@@ -979,7 +979,7 @@ fn cargo_to_crate_graph(
// cargo metadata does not do any normalization,
// so we do it ourselves currently
let name = CrateName::normalize_dashes(&name);
- add_dep(crate_graph, from, name, to);
+ add_dep(crate_graph, from, name, to, DependencyKind::Normal);
}
}
}
@@ -999,7 +999,17 @@ fn cargo_to_crate_graph(
continue;
}
- add_dep(crate_graph, from, name.clone(), to)
+ add_dep(
+ crate_graph,
+ from,
+ name.clone(),
+ to,
+ match dep.kind {
+ DepKind::Normal => DependencyKind::Normal,
+ DepKind::Dev => DependencyKind::Dev,
+ DepKind::Build => DependencyKind::Build,
+ },
+ )
}
}
}
@@ -1187,7 +1197,17 @@ fn handle_rustc_crates(
let name = CrateName::new(&dep.name).unwrap();
if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) {
for &from in rustc_pkg_crates.get(&pkg).into_iter().flatten() {
- add_dep(crate_graph, from, name.clone(), to);
+ add_dep(
+ crate_graph,
+ from,
+ name.clone(),
+ to,
+ match dep.kind {
+ DepKind::Normal => DependencyKind::Normal,
+ DepKind::Dev => DependencyKind::Dev,
+ DepKind::Build => DependencyKind::Build,
+ },
+ );
}
}
}
@@ -1209,7 +1229,7 @@ fn handle_rustc_crates(
// `rust_analyzer` thinks that it should use the one from the `rustc_source`
// instead of the one from `crates.io`
if !crate_graph[*from].dependencies.iter().any(|d| d.name == name) {
- add_dep(crate_graph, *from, name.clone(), to);
+ add_dep(crate_graph, *from, name.clone(), to, DependencyKind::Normal);
}
}
}
@@ -1308,7 +1328,14 @@ impl SysrootPublicDeps {
/// Makes `from` depend on the public sysroot crates.
fn add_to_crate_graph(&self, crate_graph: &mut CrateGraph, from: CrateId) {
for (name, krate, prelude) in &self.deps {
- add_dep_with_prelude(crate_graph, from, name.clone(), *krate, *prelude);
+ add_dep_with_prelude(
+ crate_graph,
+ from,
+ name.clone(),
+ *krate,
+ *prelude,
+ DependencyKind::Normal,
+ );
}
}
}
@@ -1363,7 +1390,7 @@ fn sysroot_to_crate_graph(
for &to in sysroot[from].deps.iter() {
let name = CrateName::new(&sysroot[to].name).unwrap();
if let (Some(&from), Some(&to)) = (sysroot_crates.get(&from), sysroot_crates.get(&to)) {
- add_dep(crate_graph, from, name, to);
+ add_dep(crate_graph, from, name, to, DependencyKind::Normal);
}
}
}
@@ -1442,8 +1469,14 @@ fn handle_hack_cargo_workspace(
.collect()
}
-fn add_dep(graph: &mut CrateGraph, from: CrateId, name: CrateName, to: CrateId) {
- add_dep_inner(graph, from, Dependency::new(name, to))
+fn add_dep(
+ graph: &mut CrateGraph,
+ from: CrateId,
+ name: CrateName,
+ to: CrateId,
+ kind: DependencyKind,
+) {
+ add_dep_inner(graph, from, Dependency::new(name, to, kind))
}
fn add_dep_with_prelude(
@@ -1452,12 +1485,20 @@ fn add_dep_with_prelude(
name: CrateName,
to: CrateId,
prelude: bool,
+ kind: DependencyKind,
) {
- add_dep_inner(graph, from, Dependency::with_prelude(name, to, prelude))
+ add_dep_inner(graph, from, Dependency::with_prelude(name, to, prelude, kind))
}
fn add_proc_macro_dep(crate_graph: &mut CrateGraph, from: CrateId, to: CrateId, prelude: bool) {
- add_dep_with_prelude(crate_graph, from, CrateName::new("proc_macro").unwrap(), to, prelude);
+ add_dep_with_prelude(
+ crate_graph,
+ from,
+ CrateName::new("proc_macro").unwrap(),
+ to,
+ prelude,
+ DependencyKind::Normal,
+ );
}
fn add_dep_inner(graph: &mut CrateGraph, from: CrateId, dep: Dependency) {
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/deduplication_crate_graph_A.json b/src/tools/rust-analyzer/crates/project-model/test_data/deduplication_crate_graph_A.json
new file mode 100644
index 000000000..b0fb5845c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/deduplication_crate_graph_A.json
@@ -0,0 +1,140 @@
+{
+ "packages": [
+ {
+ "name": "p1",
+ "version": "0.1.0",
+ "id": "p1 0.1.0 (path+file:///example_project/p1)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": null,
+ "dependencies": [
+ {
+ "name": "p2",
+ "source": null,
+ "req": "*",
+ "kind": null,
+ "rename": null,
+ "optional": false,
+ "uses_default_features": true,
+ "features": [],
+ "target": null,
+ "registry": null,
+ "path": "$ROOT$example_project/p2"
+ }
+ ],
+ "targets": [
+ {
+ "kind": [
+ "lib"
+ ],
+ "crate_types": [
+ "lib"
+ ],
+ "name": "p1",
+ "src_path": "$ROOT$example_project/p1/src/lib.rs",
+ "edition": "2021",
+ "doc": true,
+ "doctest": true,
+ "test": true
+ }
+ ],
+ "features": {},
+ "manifest_path": "$ROOT$example_project/p1/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2021",
+ "links": null,
+ "default_run": null,
+ "rust_version": null
+ },
+ {
+ "name": "p2",
+ "version": "0.1.0",
+ "id": "p2 0.1.0 (path+file:///example_project/p2)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": null,
+ "dependencies": [],
+ "targets": [
+ {
+ "kind": [
+ "lib"
+ ],
+ "crate_types": [
+ "lib"
+ ],
+ "name": "p2",
+ "src_path": "$ROOT$example_project/p2/src/lib.rs",
+ "edition": "2021",
+ "doc": true,
+ "doctest": true,
+ "test": true
+ }
+ ],
+ "features": {},
+ "manifest_path": "$ROOT$example_project/p2/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2021",
+ "links": null,
+ "default_run": null,
+ "rust_version": null
+ }
+ ],
+ "workspace_members": [
+ "p1 0.1.0 (path+file:///example_project/p1)"
+ ],
+ "workspace_default_members": [
+ "p1 0.1.0 (path+file:///example_project/p1)"
+ ],
+ "resolve": {
+ "nodes": [
+ {
+ "id": "p1 0.1.0 (path+file:///example_project/p1)",
+ "dependencies": [
+ "p2 0.1.0 (path+file:///example_project/p2)"
+ ],
+ "deps": [
+ {
+ "name": "p2",
+ "pkg": "p2 0.1.0 (path+file:///example_project/p2)",
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": null
+ }
+ ]
+ }
+ ],
+ "features": []
+ },
+ {
+ "id": "p2 0.1.0 (path+file:///example_project/p2)",
+ "dependencies": [],
+ "deps": [],
+ "features": []
+ }
+ ],
+ "root": "p1 0.1.0 (path+file:///example_project/p1)"
+ },
+ "target_directory": "$ROOT$example_project/p1/target",
+ "version": 1,
+ "workspace_root": "$ROOT$example_project/p1",
+ "metadata": null
+} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/deduplication_crate_graph_B.json b/src/tools/rust-analyzer/crates/project-model/test_data/deduplication_crate_graph_B.json
new file mode 100644
index 000000000..b5d1e16e6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/deduplication_crate_graph_B.json
@@ -0,0 +1,66 @@
+{
+ "packages": [
+ {
+ "name": "p2",
+ "version": "0.1.0",
+ "id": "p2 0.1.0 (path+file:///example_project/p2)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": null,
+ "dependencies": [],
+ "targets": [
+ {
+ "kind": [
+ "lib"
+ ],
+ "crate_types": [
+ "lib"
+ ],
+ "name": "p2",
+ "src_path": "$ROOT$example_project/p2/src/lib.rs",
+ "edition": "2021",
+ "doc": true,
+ "doctest": true,
+ "test": true
+ }
+ ],
+ "features": {},
+ "manifest_path": "$ROOT$example_project/p2/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2021",
+ "links": null,
+ "default_run": null,
+ "rust_version": null
+ }
+ ],
+ "workspace_members": [
+ "p2 0.1.0 (path+file:///example_project/p2)"
+ ],
+ "workspace_default_members": [
+ "p2 0.1.0 (path+file:///example_project/p2)"
+ ],
+ "resolve": {
+ "nodes": [
+ {
+ "id": "p2 0.1.0 (path+file:///example_project/p2)",
+ "dependencies": [],
+ "deps": [],
+ "features": []
+ }
+ ],
+ "root": "p2 0.1.0 (path+file:///example_project/p2)"
+ },
+ "target_directory": "$ROOT$example_project/p2/target",
+ "version": 1,
+ "workspace_root": "$ROOT$example_project/p2",
+ "metadata": null
+} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt
index 727d39a30..e98f016ca 100644
--- a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt
@@ -48,6 +48,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
@@ -112,6 +113,7 @@
name: CrateName(
"hello_world",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -119,6 +121,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
@@ -183,6 +186,7 @@
name: CrateName(
"hello_world",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -190,6 +194,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
@@ -254,6 +259,7 @@
name: CrateName(
"hello_world",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -261,6 +267,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt
index 727d39a30..e98f016ca 100644
--- a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt
@@ -48,6 +48,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
@@ -112,6 +113,7 @@
name: CrateName(
"hello_world",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -119,6 +121,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
@@ -183,6 +186,7 @@
name: CrateName(
"hello_world",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -190,6 +194,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
@@ -254,6 +259,7 @@
name: CrateName(
"hello_world",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -261,6 +267,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt
index 89728babd..7ecd53572 100644
--- a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt
@@ -47,6 +47,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
@@ -110,6 +111,7 @@
name: CrateName(
"hello_world",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -117,6 +119,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
@@ -180,6 +183,7 @@
name: CrateName(
"hello_world",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -187,6 +191,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
@@ -250,6 +255,7 @@
name: CrateName(
"hello_world",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -257,6 +263,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt
index b7bf6cb27..581a6afc1 100644
--- a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt
@@ -28,6 +28,7 @@
name: CrateName(
"core",
),
+ kind: Normal,
prelude: true,
},
],
@@ -168,6 +169,7 @@
name: CrateName(
"std",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -175,6 +177,7 @@
name: CrateName(
"core",
),
+ kind: Normal,
prelude: true,
},
],
@@ -249,6 +252,7 @@
name: CrateName(
"alloc",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -256,6 +260,7 @@
name: CrateName(
"panic_unwind",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -263,6 +268,7 @@
name: CrateName(
"panic_abort",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -270,6 +276,7 @@
name: CrateName(
"core",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -277,6 +284,7 @@
name: CrateName(
"profiler_builtins",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -284,6 +292,7 @@
name: CrateName(
"unwind",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -291,6 +300,7 @@
name: CrateName(
"std_detect",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -298,6 +308,7 @@
name: CrateName(
"test",
),
+ kind: Normal,
prelude: true,
},
],
@@ -438,6 +449,7 @@
name: CrateName(
"core",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -445,6 +457,7 @@
name: CrateName(
"alloc",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -452,6 +465,7 @@
name: CrateName(
"std",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -459,6 +473,7 @@
name: CrateName(
"test",
),
+ kind: Normal,
prelude: false,
},
Dependency {
@@ -466,6 +481,7 @@
name: CrateName(
"proc_macro",
),
+ kind: Normal,
prelude: false,
},
],
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
index 7410f0a3a..39ac338aa 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
@@ -19,44 +19,37 @@ name = "rust-analyzer"
path = "src/bin/main.rs"
[dependencies]
-anyhow = "1.0.62"
+anyhow.workspace = true
crossbeam-channel = "0.5.5"
-dissimilar = "1.0.4"
-itertools = "0.10.5"
-scip = "0.1.1"
+dissimilar.workspace = true
+itertools.workspace = true
+scip = "0.3.1"
lsp-types = { version = "=0.94.0", features = ["proposed"] }
parking_lot = "0.12.1"
xflags = "0.3.0"
oorandom = "11.1.3"
+rayon.workspace = true
rustc-hash = "1.1.0"
serde_json = { workspace = true, features = ["preserve_order"] }
serde.workspace = true
-rayon = "1.6.1"
num_cpus = "1.15.0"
mimalloc = { version = "0.1.30", default-features = false, optional = true }
lsp-server.workspace = true
-tracing = "0.1.35"
-tracing-subscriber = { version = "0.3.16", default-features = false, features = [
- "registry",
- "fmt",
- "tracing-log",
-] }
-tracing-log = "0.1.3"
-tracing-tree = "0.2.1"
+tracing.workspace = true
+tracing-subscriber.workspace = true
+tracing-log = "0.2.0"
+tracing-tree.workspace = true
triomphe.workspace = true
nohash-hasher.workspace = true
always-assert = "0.1.2"
-
-# These 3 deps are not used by r-a directly, but we list them here to lock in their versions
-# in our transitive deps to prevent them from pulling in windows-sys 0.45.0
-mio = "=0.8.5"
-parking_lot_core = "=0.9.6"
+walkdir = "2.3.2"
cfg.workspace = true
flycheck.workspace = true
hir-def.workspace = true
hir-ty.workspace = true
hir.workspace = true
+rustc-dependencies.workspace = true
ide-db.workspace = true
# This should only be used in CLI
ide-ssr.workspace = true
@@ -67,6 +60,7 @@ profile.workspace = true
project-model.workspace = true
stdx.workspace = true
syntax.workspace = true
+parser.workspace = true
toolchain.workspace = true
vfs-notify.workspace = true
vfs.workspace = true
@@ -79,7 +73,7 @@ jemallocator = { version = "0.5.0", package = "tikv-jemallocator", optional = tr
[dev-dependencies]
expect-test = "1.4.0"
-xshell = "0.2.2"
+xshell.workspace = true
test-utils.workspace = true
sourcegen.workspace = true
@@ -89,4 +83,13 @@ mbe.workspace = true
jemalloc = ["jemallocator", "profile/jemalloc"]
force-always-assert = ["always-assert/force"]
sysroot-abi = []
-in-rust-tree = ["sysroot-abi", "ide/in-rust-tree", "syntax/in-rust-tree"]
+in-rust-tree = [
+ "sysroot-abi",
+ "ide/in-rust-tree",
+ "syntax/in-rust-tree",
+ "parser/in-rust-tree",
+ "rustc-dependencies/in-rust-tree",
+ "hir/in-rust-tree",
+ "hir-def/in-rust-tree",
+ "hir-ty/in-rust-tree",
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
index 2fa14fc7e..8472e49de 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
@@ -2,7 +2,11 @@
//!
//! Based on cli flags, either spawns an LSP server, or runs a batch analysis
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
+#[cfg(feature = "in-rust-tree")]
+#[allow(unused_extern_crates)]
+extern crate rustc_driver;
mod logger;
mod rustc_wrapper;
@@ -83,6 +87,7 @@ fn main() -> anyhow::Result<()> {
flags::RustAnalyzerCmd::Lsif(cmd) => cmd.run()?,
flags::RustAnalyzerCmd::Scip(cmd) => cmd.run()?,
flags::RustAnalyzerCmd::RunTests(cmd) => cmd.run()?,
+ flags::RustAnalyzerCmd::RustcTests(cmd) => cmd.run()?,
}
Ok(())
}
@@ -190,6 +195,12 @@ fn run_server() -> anyhow::Result<()> {
}
};
+ let mut is_visual_studio_code = false;
+ if let Some(client_info) = client_info {
+ tracing::info!("Client '{}' {}", client_info.name, client_info.version.unwrap_or_default());
+ is_visual_studio_code = client_info.name.starts_with("Visual Studio Code");
+ }
+
let workspace_roots = workspace_folders
.map(|workspaces| {
workspaces
@@ -201,7 +212,7 @@ fn run_server() -> anyhow::Result<()> {
})
.filter(|workspaces| !workspaces.is_empty())
.unwrap_or_else(|| vec![root_path.clone()]);
- let mut config = Config::new(root_path, capabilities, workspace_roots);
+ let mut config = Config::new(root_path, capabilities, workspace_roots, is_visual_studio_code);
if let Some(json) = initialization_options {
if let Err(e) = config.update(json) {
use lsp_types::{
@@ -231,10 +242,6 @@ fn run_server() -> anyhow::Result<()> {
connection.initialize_finish(initialize_id, initialize_result)?;
- if let Some(client_info) = client_info {
- tracing::info!("Client '{}' {}", client_info.name, client_info.version.unwrap_or_default());
- }
-
if !config.has_linked_projects() && config.detached_files().is_empty() {
config.rediscover_workspaces();
}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs
index c7b84c41b..728bade0d 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs
@@ -209,7 +209,7 @@ mod tests {
use super::*;
use cfg::CfgExpr;
- use mbe::syntax_node_to_token_tree;
+ use mbe::{syntax_node_to_token_tree, DummyTestSpanMap};
use syntax::{
ast::{self, AstNode},
SmolStr,
@@ -219,7 +219,7 @@ mod tests {
let cfg_expr = {
let source_file = ast::SourceFile::parse(cfg).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
- let (tt, _) = syntax_node_to_token_tree(tt.syntax());
+ let tt = syntax_node_to_token_tree(tt.syntax(), &DummyTestSpanMap);
CfgExpr::parse(&tt)
};
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs
index 64646b33a..de00c4192 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs
@@ -10,6 +10,7 @@ mod ssr;
mod lsif;
mod scip;
mod run_tests;
+mod rustc_tests;
mod progress_report;
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
index dcb3ca658..1908c73b3 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -8,7 +8,7 @@ use std::{
use hir::{
db::{DefDatabase, ExpandDatabase, HirDatabase},
- Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, ModuleDef, Name,
+ Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, HirFileIdExt, ModuleDef, Name,
};
use hir_def::{
body::{BodySourceMap, SyntheticSyntax},
@@ -762,7 +762,8 @@ impl flags::AnalysisStats {
group: true,
skip_glob_imports: true,
},
- prefer_no_std: Default::default(),
+ prefer_no_std: false,
+ prefer_prelude: true,
},
ide::AssistResolveStrategy::All,
file_id,
@@ -782,6 +783,7 @@ impl flags::AnalysisStats {
closure_return_type_hints: ide::ClosureReturnTypeHints::Always,
closure_capture_hints: true,
binding_mode_hints: true,
+ implicit_drop_hints: true,
lifetime_elision_hints: ide::LifetimeElisionHints::Always,
param_names_for_lifetime_elision_hints: true,
hide_named_constructor_hints: false,
@@ -846,9 +848,7 @@ fn location_csv_pat(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, pat_id: Pa
Err(SyntheticSyntax) => return "synthetic,,".to_string(),
};
let root = db.parse_or_expand(src.file_id);
- let node = src.map(|e| {
- e.either(|it| it.to_node(&root).syntax().clone(), |it| it.to_node(&root).syntax().clone())
- });
+ let node = src.map(|e| e.to_node(&root).syntax().clone());
let original_range = node.as_ref().original_file_range(db);
let path = vfs.file_path(original_range.file_id);
let line_index = db.line_index(original_range.file_id);
@@ -888,12 +888,7 @@ fn pat_syntax_range(
let src = sm.pat_syntax(pat_id);
if let Ok(src) = src {
let root = db.parse_or_expand(src.file_id);
- let node = src.map(|e| {
- e.either(
- |it| it.to_node(&root).syntax().clone(),
- |it| it.to_node(&root).syntax().clone(),
- )
- });
+ let node = src.map(|e| e.to_node(&root).syntax().clone());
let original_range = node.as_ref().original_file_range(db);
let path = vfs.file_path(original_range.file_id);
let line_index = db.line_index(original_range.file_id);
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
index 8541be715..abec26794 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
@@ -4,7 +4,7 @@
use project_model::{CargoConfig, RustLibSource};
use rustc_hash::FxHashSet;
-use hir::{db::HirDatabase, Crate, Module};
+use hir::{db::HirDatabase, Crate, HirFileIdExt, Module};
use ide::{AssistResolveStrategy, DiagnosticsConfig, Severity};
use ide_db::base_db::SourceDatabaseExt;
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
index 419440b6d..5633c0c48 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
@@ -98,6 +98,15 @@ xflags::xflags! {
required path: PathBuf
}
+ /// Run unit tests of the project using mir interpreter
+ cmd rustc-tests {
+ /// Directory with Cargo.toml.
+ required rustc_repo: PathBuf
+
+ /// Only run tests with filter as substring
+ optional --filter path: String
+ }
+
cmd diagnostics {
/// Directory with Cargo.toml.
required path: PathBuf
@@ -131,6 +140,9 @@ xflags::xflags! {
/// The output path where the SCIP file will be written to. Defaults to `index.scip`.
optional --output path: PathBuf
+
+ /// A path to an json configuration file that can be used to customize cargo behavior.
+ optional --config-path config_path: PathBuf
}
}
}
@@ -156,6 +168,7 @@ pub enum RustAnalyzerCmd {
Highlight(Highlight),
AnalysisStats(AnalysisStats),
RunTests(RunTests),
+ RustcTests(RustcTests),
Diagnostics(Diagnostics),
Ssr(Ssr),
Search(Search),
@@ -209,6 +222,12 @@ pub struct RunTests {
}
#[derive(Debug)]
+pub struct RustcTests {
+ pub rustc_repo: PathBuf,
+ pub filter: Option<String>,
+}
+
+#[derive(Debug)]
pub struct Diagnostics {
pub path: PathBuf,
@@ -239,6 +258,7 @@ pub struct Scip {
pub path: PathBuf,
pub output: Option<PathBuf>,
+ pub config_path: Option<PathBuf>,
}
impl RustAnalyzer {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs
new file mode 100644
index 000000000..c89b88ac0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs
@@ -0,0 +1,236 @@
+//! Run all tests in a project, similar to `cargo test`, but using the mir interpreter.
+
+use std::{
+ cell::RefCell, collections::HashMap, fs::read_to_string, panic::AssertUnwindSafe, path::PathBuf,
+};
+
+use hir::Crate;
+use ide::{AnalysisHost, Change, DiagnosticCode, DiagnosticsConfig};
+use profile::StopWatch;
+use project_model::{CargoConfig, ProjectWorkspace, RustLibSource, Sysroot};
+
+use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
+use triomphe::Arc;
+use vfs::{AbsPathBuf, FileId};
+use walkdir::WalkDir;
+
+use crate::cli::{flags, report_metric, Result};
+
+struct Tester {
+ host: AnalysisHost,
+ root_file: FileId,
+ pass_count: u64,
+ ignore_count: u64,
+ fail_count: u64,
+ stopwatch: StopWatch,
+}
+
+fn string_to_diagnostic_code_leaky(code: &str) -> DiagnosticCode {
+ thread_local! {
+ static LEAK_STORE: RefCell<HashMap<String, DiagnosticCode>> = RefCell::new(HashMap::new());
+ }
+ LEAK_STORE.with_borrow_mut(|s| match s.get(code) {
+ Some(c) => *c,
+ None => {
+ let v = DiagnosticCode::RustcHardError(format!("E{code}").leak());
+ s.insert(code.to_owned(), v);
+ v
+ }
+ })
+}
+
+fn detect_errors_from_rustc_stderr_file(p: PathBuf) -> HashMap<DiagnosticCode, usize> {
+ let text = read_to_string(p).unwrap();
+ let mut result = HashMap::new();
+ {
+ let mut text = &*text;
+ while let Some(p) = text.find("error[E") {
+ text = &text[p + 7..];
+ let code = string_to_diagnostic_code_leaky(&text[..4]);
+ *result.entry(code).or_insert(0) += 1;
+ }
+ }
+ result
+}
+
+impl Tester {
+ fn new() -> Result<Self> {
+ let tmp_file = AbsPathBuf::assert("/tmp/ra-rustc-test.rs".into());
+ std::fs::write(&tmp_file, "")?;
+ let mut cargo_config = CargoConfig::default();
+ cargo_config.sysroot = Some(RustLibSource::Discover);
+ let workspace = ProjectWorkspace::DetachedFiles {
+ files: vec![tmp_file.clone()],
+ sysroot: Ok(
+ Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env).unwrap()
+ ),
+ rustc_cfg: vec![],
+ };
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: false,
+ with_proc_macro_server: ProcMacroServerChoice::Sysroot,
+ prefill_caches: false,
+ };
+ let (host, _vfs, _proc_macro) =
+ load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
+ let db = host.raw_database();
+ let krates = Crate::all(db);
+ let root_crate = krates.iter().cloned().find(|krate| krate.origin(db).is_local()).unwrap();
+ let root_file = root_crate.root_file(db);
+ Ok(Self {
+ host,
+ root_file,
+ pass_count: 0,
+ ignore_count: 0,
+ fail_count: 0,
+ stopwatch: StopWatch::start(),
+ })
+ }
+
+ fn test(&mut self, p: PathBuf) {
+ if p.parent().unwrap().file_name().unwrap() == "auxiliary" {
+ // These are not tests
+ return;
+ }
+ if IGNORED_TESTS.iter().any(|ig| p.file_name().is_some_and(|x| x == *ig)) {
+ println!("{p:?} IGNORE");
+ self.ignore_count += 1;
+ return;
+ }
+ let stderr_path = p.with_extension("stderr");
+ let expected = if stderr_path.exists() {
+ detect_errors_from_rustc_stderr_file(stderr_path)
+ } else {
+ HashMap::new()
+ };
+ let text = read_to_string(&p).unwrap();
+ let mut change = Change::new();
+ // Ignore unstable tests, since they move too fast and we do not intend to support all of them.
+ let mut ignore_test = text.contains("#![feature");
+ // Ignore test with extern crates, as this infra don't support them yet.
+ ignore_test |= text.contains("// aux-build:") || text.contains("// aux-crate:");
+ // Ignore test with extern modules similarly.
+ ignore_test |= text.contains("mod ");
+ // These should work, but they don't, and I don't know why, so ignore them.
+ ignore_test |= text.contains("extern crate proc_macro");
+ let should_have_no_error = text.contains("// check-pass")
+ || text.contains("// build-pass")
+ || text.contains("// run-pass");
+ change.change_file(self.root_file, Some(Arc::from(text)));
+ self.host.apply_change(change);
+ let diagnostic_config = DiagnosticsConfig::test_sample();
+ let diags = self
+ .host
+ .analysis()
+ .diagnostics(&diagnostic_config, ide::AssistResolveStrategy::None, self.root_file)
+ .unwrap();
+ let mut actual = HashMap::new();
+ for diag in diags {
+ if !matches!(diag.code, DiagnosticCode::RustcHardError(_)) {
+ continue;
+ }
+ if !should_have_no_error && !SUPPORTED_DIAGNOSTICS.contains(&diag.code) {
+ continue;
+ }
+ *actual.entry(diag.code).or_insert(0) += 1;
+ }
+ // Ignore tests with diagnostics that we don't emit.
+ ignore_test |= expected.keys().any(|k| !SUPPORTED_DIAGNOSTICS.contains(k));
+ if ignore_test {
+ println!("{p:?} IGNORE");
+ self.ignore_count += 1;
+ } else if actual == expected {
+ println!("{p:?} PASS");
+ self.pass_count += 1;
+ } else {
+ println!("{p:?} FAIL");
+ println!("actual (r-a) = {:?}", actual);
+ println!("expected (rustc) = {:?}", expected);
+ self.fail_count += 1;
+ }
+ }
+
+ fn report(&mut self) {
+ println!(
+ "Pass count = {}, Fail count = {}, Ignore count = {}",
+ self.pass_count, self.fail_count, self.ignore_count
+ );
+ println!("Testing time and memory = {}", self.stopwatch.elapsed());
+ report_metric("rustc failed tests", self.fail_count, "#");
+ report_metric("rustc testing time", self.stopwatch.elapsed().time.as_millis() as u64, "ms");
+ }
+}
+
+/// These tests break rust-analyzer (either by panicking or hanging) so we should ignore them.
+const IGNORED_TESTS: &[&str] = &[
+ "trait-with-missing-associated-type-restriction.rs", // #15646
+ "trait-with-missing-associated-type-restriction-fixable.rs", // #15646
+ "resolve-self-in-impl.rs",
+ "basic.rs", // ../rust/tests/ui/associated-type-bounds/return-type-notation/basic.rs
+ "issue-26056.rs",
+ "float-field.rs",
+ "invalid_operator_trait.rs",
+ "type-alias-impl-trait-assoc-dyn.rs",
+ "deeply-nested_closures.rs", // exponential time
+ "hang-on-deeply-nested-dyn.rs", // exponential time
+ "dyn-rpit-and-let.rs", // unexpected free variable with depth `^1.0` with outer binder ^0
+ "issue-16098.rs", // Huge recursion limit for macros?
+ "issue-83471.rs", // crates/hir-ty/src/builder.rs:78:9: assertion failed: self.remaining() > 0
+];
+
+const SUPPORTED_DIAGNOSTICS: &[DiagnosticCode] = &[
+ DiagnosticCode::RustcHardError("E0023"),
+ DiagnosticCode::RustcHardError("E0046"),
+ DiagnosticCode::RustcHardError("E0063"),
+ DiagnosticCode::RustcHardError("E0107"),
+ DiagnosticCode::RustcHardError("E0117"),
+ DiagnosticCode::RustcHardError("E0133"),
+ DiagnosticCode::RustcHardError("E0210"),
+ DiagnosticCode::RustcHardError("E0268"),
+ DiagnosticCode::RustcHardError("E0308"),
+ DiagnosticCode::RustcHardError("E0384"),
+ DiagnosticCode::RustcHardError("E0407"),
+ DiagnosticCode::RustcHardError("E0432"),
+ DiagnosticCode::RustcHardError("E0451"),
+ DiagnosticCode::RustcHardError("E0507"),
+ DiagnosticCode::RustcHardError("E0583"),
+ DiagnosticCode::RustcHardError("E0559"),
+ DiagnosticCode::RustcHardError("E0616"),
+ DiagnosticCode::RustcHardError("E0618"),
+ DiagnosticCode::RustcHardError("E0624"),
+ DiagnosticCode::RustcHardError("E0774"),
+ DiagnosticCode::RustcHardError("E0767"),
+ DiagnosticCode::RustcHardError("E0777"),
+];
+
+impl flags::RustcTests {
+ pub fn run(self) -> Result<()> {
+ let mut tester = Tester::new()?;
+ let walk_dir = WalkDir::new(self.rustc_repo.join("tests/ui"));
+ for i in walk_dir {
+ let i = i?;
+ let p = i.into_path();
+ if let Some(f) = &self.filter {
+ if !p.as_os_str().to_string_lossy().contains(f) {
+ continue;
+ }
+ }
+ if p.extension().map_or(true, |x| x != "rs") {
+ continue;
+ }
+ if let Err(e) = std::panic::catch_unwind({
+ let tester = AssertUnwindSafe(&mut tester);
+ let p = p.clone();
+ move || {
+ let tester = tester;
+ tester.0.test(p);
+ }
+ }) {
+ println!("panic detected at test {:?}", p);
+ std::panic::resume_unwind(e);
+ }
+ }
+ tester.report();
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
index 8c056fff0..30e11402c 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
@@ -11,10 +11,8 @@ use ide::{
TokenStaticData,
};
use ide_db::LineIndexDatabase;
-use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
-use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
+use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
use scip::types as scip_types;
-use std::env;
use crate::{
cli::flags,
@@ -25,8 +23,6 @@ impl flags::Scip {
pub fn run(self) -> anyhow::Result<()> {
eprintln!("Generating SCIP start...");
let now = Instant::now();
- let mut cargo_config = CargoConfig::default();
- cargo_config.sysroot = Some(RustLibSource::Discover);
let no_progress = &|s| (eprintln!("rust-analyzer: Loading {s}"));
let load_cargo_config = LoadCargoConfig {
@@ -34,14 +30,27 @@ impl flags::Scip {
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
prefill_caches: true,
};
- let path = vfs::AbsPathBuf::assert(env::current_dir()?.join(&self.path));
- let rootpath = path.normalize();
- let manifest = ProjectManifest::discover_single(&path)?;
+ let root = vfs::AbsPathBuf::assert(std::env::current_dir()?.join(&self.path)).normalize();
- let workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?;
+ let mut config = crate::config::Config::new(
+ root.clone(),
+ lsp_types::ClientCapabilities::default(),
+ /* workspace_roots = */ vec![],
+ /* is_visual_studio_code = */ false,
+ );
- let (host, vfs, _) =
- load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
+ if let Some(p) = self.config_path {
+ let mut file = std::io::BufReader::new(std::fs::File::open(p)?);
+ let json = serde_json::from_reader(&mut file)?;
+ config.update(json)?;
+ }
+ let cargo_config = config.cargo();
+ let (host, vfs, _) = load_workspace_at(
+ root.as_path().as_ref(),
+ &cargo_config,
+ &load_cargo_config,
+ &no_progress,
+ )?;
let db = host.raw_database();
let analysis = host.analysis();
@@ -58,8 +67,7 @@ impl flags::Scip {
.into(),
project_root: format!(
"file://{}",
- path.normalize()
- .as_os_str()
+ root.as_os_str()
.to_str()
.ok_or(anyhow::format_err!("Unable to normalize project_root path"))?
),
@@ -80,7 +88,7 @@ impl flags::Scip {
new_symbol
};
- let relative_path = match get_relative_filepath(&vfs, &rootpath, file_id) {
+ let relative_path = match get_relative_filepath(&vfs, &root, file_id) {
Some(relative_path) => relative_path,
None => continue,
};
@@ -125,6 +133,10 @@ impl flags::Scip {
documentation: documentation.unwrap_or_default(),
relationships: Vec::new(),
special_fields: Default::default(),
+ kind: Default::default(),
+ display_name: String::new(),
+ signature_documentation: Default::default(),
+ enclosing_symbol: String::new(),
};
symbols.push(symbol_info)
@@ -139,6 +151,7 @@ impl flags::Scip {
syntax_kind: Default::default(),
diagnostics: Vec::new(),
special_fields: Default::default(),
+ enclosing_range: Vec::new(),
});
});
@@ -152,6 +165,7 @@ impl flags::Scip {
occurrences,
symbols,
special_fields: Default::default(),
+ text: String::new(),
});
}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
index ea3a21241..258f74106 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
@@ -91,6 +91,12 @@ config_data! {
/// and should therefore include `--message-format=json` or a similar
/// option.
///
+ /// If there are multiple linked projects/workspaces, this command is invoked for
+ /// each of them, with the working directory being the workspace root
+ /// (i.e., the folder containing the `Cargo.toml`). This can be overwritten
+ /// by changing `#rust-analyzer.cargo.buildScripts.invocationStrategy#` and
+ /// `#rust-analyzer.cargo.buildScripts.invocationLocation#`.
+ ///
/// By default, a cargo invocation will be constructed for the configured
/// targets and features, with the following base command line:
///
@@ -182,9 +188,11 @@ config_data! {
/// Cargo, you might also want to change
/// `#rust-analyzer.cargo.buildScripts.overrideCommand#`.
///
- /// If there are multiple linked projects, this command is invoked for
- /// each of them, with the working directory being the project root
- /// (i.e., the folder containing the `Cargo.toml`).
+ /// If there are multiple linked projects/workspaces, this command is invoked for
+ /// each of them, with the working directory being the workspace root
+ /// (i.e., the folder containing the `Cargo.toml`). This can be overwritten
+ /// by changing `#rust-analyzer.cargo.check.invocationStrategy#` and
+ /// `#rust-analyzer.cargo.check.invocationLocation#`.
///
/// An example command would be:
///
@@ -209,6 +217,8 @@ config_data! {
completion_autoself_enable: bool = "true",
/// Whether to add parenthesis and argument snippets when completing function.
completion_callable_snippets: CallableCompletionDef = "\"fill_arguments\"",
+ /// Whether to show full function/method signatures in completion docs.
+ completion_fullFunctionSignatures_enable: bool = "false",
/// Maximum number of completions to return. If `None`, the limit is infinite.
completion_limit: Option<usize> = "null",
/// Whether to show postfix snippets like `dbg`, `if`, `not`, etc.
@@ -342,7 +352,9 @@ config_data! {
/// Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
imports_merge_glob: bool = "true",
/// Prefer to unconditionally use imports of the core and alloc crate, over the std crate.
- imports_prefer_no_std: bool = "false",
+ imports_preferNoStd | imports_prefer_no_std: bool = "false",
+ /// Whether to prefer import paths containing a `prelude` module.
+ imports_preferPrelude: bool = "false",
/// The path structure for newly inserted paths to use.
imports_prefix: ImportPrefixDef = "\"plain\"",
@@ -369,6 +381,8 @@ config_data! {
inlayHints_expressionAdjustmentHints_hideOutsideUnsafe: bool = "false",
/// Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc).
inlayHints_expressionAdjustmentHints_mode: AdjustmentHintsModeDef = "\"prefix\"",
+ /// Whether to show implicit drop hints.
+ inlayHints_implicitDrops_enable: bool = "false",
/// Whether to show inlay type hints for elided lifetimes in function signatures.
inlayHints_lifetimeElisionHints_enable: LifetimeElisionDef = "\"never\"",
/// Whether to prefer using parameter names as the name for elided lifetime hints if possible.
@@ -470,6 +484,14 @@ config_data! {
/// tests or binaries. For example, it may be `--release`.
runnables_extraArgs: Vec<String> = "[]",
+ /// Optional path to a rust-analyzer specific target directory.
+ /// This prevents rust-analyzer's `cargo check` from locking the `Cargo.lock`
+ /// at the expense of duplicating build artifacts.
+ ///
+ /// Set to `true` to use a subdirectory of the existing target directory or
+ /// set to a path relative to the workspace to use that path.
+ rust_analyzerTargetDir: Option<TargetDirectory> = "null",
+
/// Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private
/// projects, or "discover" to try to automatically find it if the `rustc-dev` component
/// is installed.
@@ -565,6 +587,7 @@ pub struct Config {
data: ConfigData,
detached_files: Vec<AbsPathBuf>,
snippets: Vec<Snippet>,
+ is_visual_studio_code: bool,
}
type ParallelCachePrimingNumThreads = u8;
@@ -755,11 +778,14 @@ impl fmt::Display for ConfigError {
}
}
+impl std::error::Error for ConfigError {}
+
impl Config {
pub fn new(
root_path: AbsPathBuf,
caps: ClientCapabilities,
workspace_roots: Vec<AbsPathBuf>,
+ is_visual_studio_code: bool,
) -> Self {
Config {
caps,
@@ -769,6 +795,7 @@ impl Config {
root_path,
snippets: Default::default(),
workspace_roots,
+ is_visual_studio_code,
}
}
@@ -1094,7 +1121,8 @@ impl Config {
ExprFillDefaultDef::Default => ExprFillDefaultMode::Default,
},
insert_use: self.insert_use_config(),
- prefer_no_std: self.data.imports_prefer_no_std,
+ prefer_no_std: self.data.imports_preferNoStd,
+ prefer_prelude: self.data.imports_preferPrelude,
}
}
@@ -1248,6 +1276,7 @@ impl Config {
run_build_script_command: self.data.cargo_buildScripts_overrideCommand.clone(),
extra_args: self.data.cargo_extraArgs.clone(),
extra_env: self.data.cargo_extraEnv.clone(),
+ target_dir: self.target_dir_from_config(),
}
}
@@ -1320,10 +1349,22 @@ impl Config {
extra_args: self.check_extra_args(),
extra_env: self.check_extra_env(),
ansi_color_output: self.color_diagnostic_output(),
+ target_dir: self.target_dir_from_config(),
},
}
}
+ // FIXME: This should be an AbsolutePathBuf
+ fn target_dir_from_config(&self) -> Option<PathBuf> {
+ self.data.rust_analyzerTargetDir.as_ref().and_then(|target_dir| match target_dir {
+ TargetDirectory::UseSubdirectory(yes) if *yes => {
+ Some(PathBuf::from("target/rust-analyzer"))
+ }
+ TargetDirectory::UseSubdirectory(_) => None,
+ TargetDirectory::Directory(dir) => Some(dir.clone()),
+ })
+ }
+
pub fn check_on_save(&self) -> bool {
self.data.checkOnSave
}
@@ -1353,6 +1394,7 @@ impl Config {
type_hints: self.data.inlayHints_typeHints_enable,
parameter_hints: self.data.inlayHints_parameterHints_enable,
chaining_hints: self.data.inlayHints_chainingHints_enable,
+ implicit_drop_hints: self.data.inlayHints_implicitDrops_enable,
discriminant_hints: match self.data.inlayHints_discriminantHints_enable {
DiscriminantHintsDef::Always => ide::DiscriminantHints::Always,
DiscriminantHintsDef::Never => ide::DiscriminantHints::Never,
@@ -1444,13 +1486,15 @@ impl Config {
&& completion_item_edit_resolve(&self.caps),
enable_self_on_the_fly: self.data.completion_autoself_enable,
enable_private_editable: self.data.completion_privateEditable_enable,
+ full_function_signatures: self.data.completion_fullFunctionSignatures_enable,
callable: match self.data.completion_callable_snippets {
CallableCompletionDef::FillArguments => Some(CallableSnippets::FillArguments),
CallableCompletionDef::AddParentheses => Some(CallableSnippets::AddParentheses),
CallableCompletionDef::None => None,
},
insert_use: self.insert_use_config(),
- prefer_no_std: self.data.imports_prefer_no_std,
+ prefer_no_std: self.data.imports_preferNoStd,
+ prefer_prelude: self.data.imports_preferPrelude,
snippet_cap: SnippetCap::new(try_or_def!(
self.caps
.text_document
@@ -1479,7 +1523,8 @@ impl Config {
snippet_cap: SnippetCap::new(self.experimental("snippetTextEdit")),
allowed: None,
insert_use: self.insert_use_config(),
- prefer_no_std: self.data.imports_prefer_no_std,
+ prefer_no_std: self.data.imports_preferNoStd,
+ prefer_prelude: self.data.imports_preferPrelude,
assist_emit_must_use: self.data.assist_emitMustUse,
}
}
@@ -1667,6 +1712,12 @@ impl Config {
pub fn typing_autoclose_angle(&self) -> bool {
self.data.typing_autoClosingAngleBrackets_enable
}
+
+ // FIXME: VSCode seems to work wrong sometimes, see https://github.com/microsoft/vscode/issues/193124
+ // hence, distinguish it for now.
+ pub fn is_visual_studio_code(&self) -> bool {
+ self.is_visual_studio_code
+ }
}
// Deserialization definitions
@@ -2015,6 +2066,14 @@ pub enum MemoryLayoutHoverRenderKindDef {
Both,
}
+#[derive(Deserialize, Debug, Clone, PartialEq)]
+#[serde(rename_all = "snake_case")]
+#[serde(untagged)]
+pub enum TargetDirectory {
+ UseSubdirectory(bool),
+ Directory(PathBuf),
+}
+
macro_rules! _config_data {
(struct $name:ident {
$(
@@ -2443,6 +2502,19 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
},
],
},
+ "Option<TargetDirectory>" => set! {
+ "anyOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "string"
+ },
+ ],
+ },
_ => panic!("missing entry for {ty}: {default}"),
}
@@ -2555,8 +2627,12 @@ mod tests {
#[test]
fn proc_macro_srv_null() {
- let mut config =
- Config::new(AbsPathBuf::try_from(project_root()).unwrap(), Default::default(), vec![]);
+ let mut config = Config::new(
+ AbsPathBuf::try_from(project_root()).unwrap(),
+ Default::default(),
+ vec![],
+ false,
+ );
config
.update(serde_json::json!({
"procMacro_server": null,
@@ -2567,8 +2643,12 @@ mod tests {
#[test]
fn proc_macro_srv_abs() {
- let mut config =
- Config::new(AbsPathBuf::try_from(project_root()).unwrap(), Default::default(), vec![]);
+ let mut config = Config::new(
+ AbsPathBuf::try_from(project_root()).unwrap(),
+ Default::default(),
+ vec![],
+ false,
+ );
config
.update(serde_json::json!({
"procMacro": {"server": project_root().display().to_string()}
@@ -2579,8 +2659,12 @@ mod tests {
#[test]
fn proc_macro_srv_rel() {
- let mut config =
- Config::new(AbsPathBuf::try_from(project_root()).unwrap(), Default::default(), vec![]);
+ let mut config = Config::new(
+ AbsPathBuf::try_from(project_root()).unwrap(),
+ Default::default(),
+ vec![],
+ false,
+ );
config
.update(serde_json::json!({
"procMacro": {"server": "./server"}
@@ -2591,4 +2675,67 @@ mod tests {
Some(AbsPathBuf::try_from(project_root().join("./server")).unwrap())
);
}
+
+ #[test]
+ fn cargo_target_dir_unset() {
+ let mut config = Config::new(
+ AbsPathBuf::try_from(project_root()).unwrap(),
+ Default::default(),
+ vec![],
+ false,
+ );
+ config
+ .update(serde_json::json!({
+ "rust": { "analyzerTargetDir": null }
+ }))
+ .unwrap();
+ assert_eq!(config.data.rust_analyzerTargetDir, None);
+ assert!(
+ matches!(config.flycheck(), FlycheckConfig::CargoCommand { target_dir, .. } if target_dir == None)
+ );
+ }
+
+ #[test]
+ fn cargo_target_dir_subdir() {
+ let mut config = Config::new(
+ AbsPathBuf::try_from(project_root()).unwrap(),
+ Default::default(),
+ vec![],
+ false,
+ );
+ config
+ .update(serde_json::json!({
+ "rust": { "analyzerTargetDir": true }
+ }))
+ .unwrap();
+ assert_eq!(
+ config.data.rust_analyzerTargetDir,
+ Some(TargetDirectory::UseSubdirectory(true))
+ );
+ assert!(
+ matches!(config.flycheck(), FlycheckConfig::CargoCommand { target_dir, .. } if target_dir == Some(PathBuf::from("target/rust-analyzer")))
+ );
+ }
+
+ #[test]
+ fn cargo_target_dir_relative_dir() {
+ let mut config = Config::new(
+ AbsPathBuf::try_from(project_root()).unwrap(),
+ Default::default(),
+ vec![],
+ false,
+ );
+ config
+ .update(serde_json::json!({
+ "rust": { "analyzerTargetDir": "other_folder" }
+ }))
+ .unwrap();
+ assert_eq!(
+ config.data.rust_analyzerTargetDir,
+ Some(TargetDirectory::Directory(PathBuf::from("other_folder")))
+ );
+ assert!(
+ matches!(config.flycheck(), FlycheckConfig::CargoCommand { target_dir, .. } if target_dir == Some(PathBuf::from("other_folder")))
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs
index 71701ef16..f80beb9ca 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs
@@ -5,6 +5,7 @@ use std::mem;
use ide::FileId;
use ide_db::FxHashMap;
+use itertools::Itertools;
use nohash_hasher::{IntMap, IntSet};
use rustc_hash::FxHashSet;
use triomphe::Arc;
@@ -129,8 +130,28 @@ pub(crate) fn fetch_native_diagnostics(
) -> Vec<(FileId, Vec<lsp_types::Diagnostic>)> {
let _p = profile::span("fetch_native_diagnostics");
let _ctx = stdx::panic_context::enter("fetch_native_diagnostics".to_owned());
- subscriptions
- .into_iter()
+
+ let convert_diagnostic =
+ |line_index: &crate::line_index::LineIndex, d: ide::Diagnostic| lsp_types::Diagnostic {
+ range: lsp::to_proto::range(&line_index, d.range.range),
+ severity: Some(lsp::to_proto::diagnostic_severity(d.severity)),
+ code: Some(lsp_types::NumberOrString::String(d.code.as_str().to_string())),
+ code_description: Some(lsp_types::CodeDescription {
+ href: lsp_types::Url::parse(&d.code.url()).unwrap(),
+ }),
+ source: Some("rust-analyzer".to_string()),
+ message: d.message,
+ related_information: None,
+ tags: d.unused.then(|| vec![lsp_types::DiagnosticTag::UNNECESSARY]),
+ data: None,
+ };
+
+ // the diagnostics produced may point to different files not requested by the concrete request,
+ // put those into here and filter later
+ let mut odd_ones = Vec::new();
+ let mut diagnostics = subscriptions
+ .iter()
+ .copied()
.filter_map(|file_id| {
let line_index = snapshot.file_line_index(file_id).ok()?;
let diagnostics = snapshot
@@ -142,21 +163,39 @@ pub(crate) fn fetch_native_diagnostics(
)
.ok()?
.into_iter()
- .map(move |d| lsp_types::Diagnostic {
- range: lsp::to_proto::range(&line_index, d.range),
- severity: Some(lsp::to_proto::diagnostic_severity(d.severity)),
- code: Some(lsp_types::NumberOrString::String(d.code.as_str().to_string())),
- code_description: Some(lsp_types::CodeDescription {
- href: lsp_types::Url::parse(&d.code.url()).unwrap(),
- }),
- source: Some("rust-analyzer".to_string()),
- message: d.message,
- related_information: None,
- tags: d.unused.then(|| vec![lsp_types::DiagnosticTag::UNNECESSARY]),
- data: None,
+ .filter_map(|d| {
+ if d.range.file_id == file_id {
+ Some(convert_diagnostic(&line_index, d))
+ } else {
+ odd_ones.push(d);
+ None
+ }
})
.collect::<Vec<_>>();
Some((file_id, diagnostics))
})
- .collect()
+ .collect::<Vec<_>>();
+
+ // Add back any diagnostics that point to files we are subscribed to
+ for (file_id, group) in odd_ones
+ .into_iter()
+ .sorted_by_key(|it| it.range.file_id)
+ .group_by(|it| it.range.file_id)
+ .into_iter()
+ {
+ if !subscriptions.contains(&file_id) {
+ continue;
+ }
+ let Some((_, diagnostics)) = diagnostics.iter_mut().find(|&&mut (id, _)| id == file_id)
+ else {
+ continue;
+ };
+ let Some(line_index) = snapshot.file_line_index(file_id).ok() else {
+ break;
+ };
+ for diagnostic in group {
+ diagnostics.push(convert_diagnostic(&line_index, diagnostic));
+ }
+ }
+ diagnostics
}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs
index 731580557..f8bc66ff8 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs
@@ -538,7 +538,12 @@ mod tests {
let (sender, _) = crossbeam_channel::unbounded();
let state = GlobalState::new(
sender,
- Config::new(workspace_root.to_path_buf(), ClientCapabilities::default(), Vec::new()),
+ Config::new(
+ workspace_root.to_path_buf(),
+ ClientCapabilities::default(),
+ Vec::new(),
+ false,
+ ),
);
let snap = state.snapshot();
let mut actual = map_rust_diagnostic_to_lsp(&config, &diagnostic, workspace_root, &snap);
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
index c09f57252..0f31fe160 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
@@ -187,11 +187,9 @@ impl GlobalState {
config_errors: Default::default(),
proc_macro_changed: false,
- // FIXME: use `Arc::from_iter` when it becomes available
- proc_macro_clients: Arc::from(Vec::new()),
+ proc_macro_clients: Arc::from_iter([]),
- // FIXME: use `Arc::from_iter` when it becomes available
- flycheck: Arc::from(Vec::new()),
+ flycheck: Arc::from_iter([]),
flycheck_sender,
flycheck_receiver,
last_flycheck_error: None,
@@ -202,7 +200,7 @@ impl GlobalState {
vfs_progress_n_total: 0,
vfs_progress_n_done: 0,
- workspaces: Arc::new(Vec::new()),
+ workspaces: Arc::from(Vec::new()),
crate_graph_file_dependencies: FxHashSet::default(),
fetch_workspaces_queue: OpQueue::default(),
fetch_build_data_queue: OpQueue::default(),
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
index b8a1a39be..d8a590c80 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
@@ -4,6 +4,7 @@
use std::{
fs,
io::Write as _,
+ path::PathBuf,
process::{self, Stdio},
};
@@ -11,8 +12,8 @@ use anyhow::Context;
use ide::{
AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FilePosition, FileRange,
- HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, ReferenceCategory,
- Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit,
+ HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, RangeLimit,
+ ReferenceCategory, Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit,
};
use ide_db::SymbolKind;
use lsp_server::ErrorCode;
@@ -50,8 +51,7 @@ use crate::{
};
pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
- // FIXME: use `Arc::from_iter` when it becomes available
- state.proc_macro_clients = Arc::from(Vec::new());
+ state.proc_macro_clients = Arc::from_iter([]);
state.proc_macro_changed = false;
state.fetch_workspaces_queue.request_op("reload workspace request".to_string(), false);
@@ -59,8 +59,7 @@ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow:
}
pub(crate) fn handle_proc_macros_rebuild(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
- // FIXME: use `Arc::from_iter` when it becomes available
- state.proc_macro_clients = Arc::from(Vec::new());
+ state.proc_macro_clients = Arc::from_iter([]);
state.proc_macro_changed = false;
state.fetch_build_data_queue.request_op("rebuild proc macros request".to_string(), ());
@@ -1410,7 +1409,7 @@ pub(crate) fn handle_inlay_hints(
let inlay_hints_config = snap.config.inlay_hints();
Ok(Some(
snap.analysis
- .inlay_hints(&inlay_hints_config, file_id, Some(range))?
+ .inlay_hints(&inlay_hints_config, file_id, Some(RangeLimit::Fixed(range)))?
.into_iter()
.map(|it| {
to_proto::inlay_hint(
@@ -1437,26 +1436,17 @@ pub(crate) fn handle_inlay_hints_resolve(
};
let resolve_data: lsp_ext::InlayHintResolveData = serde_json::from_value(data)?;
- let file_id = FileId(resolve_data.file_id);
+ let file_id = FileId::from_raw(resolve_data.file_id);
anyhow::ensure!(snap.file_exists(file_id), "Invalid LSP resolve data");
let line_index = snap.file_line_index(file_id)?;
- let range = from_proto::text_range(
- &line_index,
- lsp_types::Range { start: original_hint.position, end: original_hint.position },
- )?;
- let range_start = range.start();
- let range_end = range.end();
- let large_range = TextRange::new(
- range_start.checked_sub(1.into()).unwrap_or(range_start),
- range_end.checked_add(1.into()).unwrap_or(range_end),
- );
+ let hint_position = from_proto::offset(&line_index, original_hint.position)?;
let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints();
forced_resolve_inlay_hints_config.fields_to_resolve = InlayFieldsToResolve::empty();
let resolve_hints = snap.analysis.inlay_hints(
&forced_resolve_inlay_hints_config,
file_id,
- Some(large_range),
+ Some(RangeLimit::NearestParent(hint_position)),
)?;
let mut resolved_hints = resolve_hints
@@ -1995,7 +1985,25 @@ fn run_rustfmt(
cmd
}
RustfmtConfig::CustomCommand { command, args } => {
- let mut cmd = process::Command::new(command);
+ let cmd = PathBuf::from(&command);
+ let workspace = CargoTargetSpec::for_file(&snap, file_id)?;
+ let mut cmd = match workspace {
+ Some(spec) => {
+ // approach: if the command name contains a path separator, join it with the workspace root.
+ // however, if the path is absolute, joining will result in the absolute path being preserved.
+ // as a fallback, rely on $PATH-based discovery.
+ let cmd_path =
+ if cfg!(windows) && command.contains(&[std::path::MAIN_SEPARATOR, '/']) {
+ spec.workspace_root.join(cmd).into()
+ } else if command.contains(std::path::MAIN_SEPARATOR) {
+ spec.workspace_root.join(cmd).into()
+ } else {
+ cmd
+ };
+ process::Command::new(cmd_path)
+ }
+ None => process::Command::new(cmd),
+ };
cmd.envs(snap.config.extra_env());
cmd.args(args);
@@ -2003,6 +2011,8 @@ fn run_rustfmt(
}
};
+ tracing::debug!(?command, "created format command");
+
// try to chdir to the file so we can respect `rustfmt.toml`
// FIXME: use `rustfmt --config-path` once
// https://github.com/rust-lang/rustfmt/issues/4660 gets fixed
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
index 5a11012b9..41ff17f5e 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -30,9 +30,12 @@ fn integrated_highlighting_benchmark() {
// Load rust-analyzer itself.
let workspace_to_load = project_root();
- let file = "./crates/ide-db/src/apply_change.rs";
+ let file = "./crates/rust-analyzer/src/config.rs";
- let cargo_config = CargoConfig::default();
+ let cargo_config = CargoConfig {
+ sysroot: Some(project_model::RustLibSource::Discover),
+ ..CargoConfig::default()
+ };
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::None,
@@ -57,7 +60,6 @@ fn integrated_highlighting_benchmark() {
}
profile::init_from("*>100");
- // let _s = profile::heartbeat_span();
{
let _it = stdx::timeit("change");
@@ -86,7 +88,10 @@ fn integrated_completion_benchmark() {
let workspace_to_load = project_root();
let file = "./crates/hir/src/lib.rs";
- let cargo_config = CargoConfig::default();
+ let cargo_config = CargoConfig {
+ sysroot: Some(project_model::RustLibSource::Discover),
+ ..CargoConfig::default()
+ };
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::None,
@@ -104,10 +109,46 @@ fn integrated_completion_benchmark() {
vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}"))
};
+ // kick off parsing and index population
+
+ let completion_offset = {
+ let _it = stdx::timeit("change");
+ let mut text = host.analysis().file_text(file_id).unwrap().to_string();
+ let completion_offset =
+ patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)")
+ + "sel".len();
+ let mut change = Change::new();
+ change.change_file(file_id, Some(Arc::from(text)));
+ host.apply_change(change);
+ completion_offset
+ };
+
{
- let _it = stdx::timeit("initial");
+ let _span = profile::cpu_span();
let analysis = host.analysis();
- analysis.highlight_as_html(file_id, false).unwrap();
+ let config = CompletionConfig {
+ enable_postfix_completions: true,
+ enable_imports_on_the_fly: true,
+ enable_self_on_the_fly: true,
+ enable_private_editable: true,
+ full_function_signatures: false,
+ callable: Some(CallableSnippets::FillArguments),
+ snippet_cap: SnippetCap::new(true),
+ insert_use: InsertUseConfig {
+ granularity: ImportGranularity::Crate,
+ prefix_kind: hir::PrefixKind::ByCrate,
+ enforce_granularity: true,
+ group: true,
+ skip_glob_imports: true,
+ },
+ snippets: Vec::new(),
+ prefer_no_std: false,
+ prefer_prelude: true,
+ limit: None,
+ };
+ let position =
+ FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
+ analysis.completions(&config, position, None).unwrap();
}
profile::init_from("*>5");
@@ -117,8 +158,8 @@ fn integrated_completion_benchmark() {
let _it = stdx::timeit("change");
let mut text = host.analysis().file_text(file_id).unwrap().to_string();
let completion_offset =
- patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)")
- + "sel".len();
+ patch(&mut text, "sel;\ndb.struct_data(self.id)", ";sel;\ndb.struct_data(self.id)")
+ + ";sel".len();
let mut change = Change::new();
change.change_file(file_id, Some(Arc::from(text)));
host.apply_change(change);
@@ -134,6 +175,7 @@ fn integrated_completion_benchmark() {
enable_imports_on_the_fly: true,
enable_self_on_the_fly: true,
enable_private_editable: true,
+ full_function_signatures: false,
callable: Some(CallableSnippets::FillArguments),
snippet_cap: SnippetCap::new(true),
insert_use: InsertUseConfig {
@@ -145,6 +187,7 @@ fn integrated_completion_benchmark() {
},
snippets: Vec::new(),
prefer_no_std: false,
+ prefer_prelude: true,
limit: None,
};
let position =
@@ -173,6 +216,7 @@ fn integrated_completion_benchmark() {
enable_imports_on_the_fly: true,
enable_self_on_the_fly: true,
enable_private_editable: true,
+ full_function_signatures: false,
callable: Some(CallableSnippets::FillArguments),
snippet_cap: SnippetCap::new(true),
insert_use: InsertUseConfig {
@@ -184,6 +228,7 @@ fn integrated_completion_benchmark() {
},
snippets: Vec::new(),
prefer_no_std: false,
+ prefer_prelude: true,
limit: None,
};
let position =
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs
index 6c62577f6..29bc0b80d 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs
@@ -9,7 +9,7 @@
//! The `cli` submodule implements some batch-processing analysis, primarily as
//! a debugging aid.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
pub mod cli;
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
index 23074493a..dae560c5d 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -1,7 +1,7 @@
//! Conversion of rust-analyzer specific types to lsp_types equivalents.
use std::{
iter::once,
- path,
+ mem, path,
sync::atomic::{AtomicU32, Ordering},
};
@@ -301,9 +301,11 @@ fn completion_item(
if config.completion_label_details_support() {
lsp_item.label_details = Some(lsp_types::CompletionItemLabelDetails {
- detail: None,
+ detail: item.label_detail.as_ref().map(ToString::to_string),
description: lsp_item.detail.clone(),
});
+ } else if let Some(label_detail) = item.label_detail {
+ lsp_item.label.push_str(label_detail.as_str());
}
set_score(&mut lsp_item, max_relevance, item.relevance);
@@ -443,17 +445,19 @@ pub(crate) fn inlay_hint(
file_id: FileId,
inlay_hint: InlayHint,
) -> Cancellable<lsp_types::InlayHint> {
+ let is_visual_studio_code = snap.config.is_visual_studio_code();
let needs_resolve = inlay_hint.needs_resolve;
let (label, tooltip, mut something_to_resolve) =
inlay_hint_label(snap, fields_to_resolve, needs_resolve, inlay_hint.label)?;
- let text_edits = if needs_resolve && fields_to_resolve.resolve_text_edits {
- something_to_resolve |= inlay_hint.text_edit.is_some();
- None
- } else {
- inlay_hint.text_edit.map(|it| text_edit_vec(line_index, it))
- };
+ let text_edits =
+ if !is_visual_studio_code && needs_resolve && fields_to_resolve.resolve_text_edits {
+ something_to_resolve |= inlay_hint.text_edit.is_some();
+ None
+ } else {
+ inlay_hint.text_edit.map(|it| text_edit_vec(line_index, it))
+ };
let data = if needs_resolve && something_to_resolve {
- Some(to_value(lsp_ext::InlayHintResolveData { file_id: file_id.0 }).unwrap())
+ Some(to_value(lsp_ext::InlayHintResolveData { file_id: file_id.index() }).unwrap())
} else {
None
};
@@ -1121,13 +1125,20 @@ pub(crate) fn snippet_text_document_ops(
pub(crate) fn snippet_workspace_edit(
snap: &GlobalStateSnapshot,
- source_change: SourceChange,
+ mut source_change: SourceChange,
) -> Cancellable<lsp_ext::SnippetWorkspaceEdit> {
let mut document_changes: Vec<lsp_ext::SnippetDocumentChangeOperation> = Vec::new();
- for op in source_change.file_system_edits {
- let ops = snippet_text_document_ops(snap, op)?;
- document_changes.extend_from_slice(&ops);
+ for op in &mut source_change.file_system_edits {
+ if let FileSystemEdit::CreateFile { dst, initial_contents } = op {
+ // replace with a placeholder to avoid cloneing the edit
+ let op = FileSystemEdit::CreateFile {
+ dst: dst.clone(),
+ initial_contents: mem::take(initial_contents),
+ };
+ let ops = snippet_text_document_ops(snap, op)?;
+ document_changes.extend_from_slice(&ops);
+ }
}
for (file_id, (edit, snippet_edit)) in source_change.source_file_edits {
let edit = snippet_text_document_edit(
@@ -1139,6 +1150,12 @@ pub(crate) fn snippet_workspace_edit(
)?;
document_changes.push(lsp_ext::SnippetDocumentChangeOperation::Edit(edit));
}
+ for op in source_change.file_system_edits {
+ if !matches!(op, FileSystemEdit::CreateFile { .. }) {
+ let ops = snippet_text_document_ops(snap, op)?;
+ document_changes.extend_from_slice(&ops);
+ }
+ }
let mut workspace_edit = lsp_ext::SnippetWorkspaceEdit {
changes: None,
document_changes: Some(document_changes),
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
index 3fae08b82..7ab528f49 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
@@ -22,6 +22,7 @@ use ide_db::{
base_db::{salsa::Durability, CrateGraph, ProcMacroPaths, ProcMacros},
FxHashMap,
};
+use itertools::Itertools;
use load_cargo::{load_proc_macro, ProjectFolders};
use proc_macro_api::ProcMacroServer;
use project_model::{ProjectWorkspace, WorkspaceBuildScripts};
@@ -227,16 +228,12 @@ impl GlobalState {
let mut i = 0;
while i < workspaces.len() {
if let Ok(w) = &workspaces[i] {
- let dupes: Vec<_> = workspaces
+ let dupes: Vec<_> = workspaces[i + 1..]
.iter()
- .enumerate()
- .skip(i + 1)
- .filter_map(|(i, it)| {
- it.as_ref().ok().filter(|ws| ws.eq_ignore_build_data(w)).map(|_| i)
- })
+ .positions(|it| it.as_ref().is_ok_and(|ws| ws.eq_ignore_build_data(w)))
.collect();
dupes.into_iter().rev().for_each(|d| {
- _ = workspaces.remove(d);
+ _ = workspaces.remove(d + i + 1);
});
}
i += 1;
@@ -380,7 +377,6 @@ impl GlobalState {
ws
})
.collect::<Vec<_>>();
-
// Workspaces are the same, but we've updated build data.
self.workspaces = Arc::new(workspaces);
} else {
@@ -441,28 +437,22 @@ impl GlobalState {
if self.config.expand_proc_macros() {
tracing::info!("Spawning proc-macro servers");
- // FIXME: use `Arc::from_iter` when it becomes available
- self.proc_macro_clients = Arc::from(
- self.workspaces
- .iter()
- .map(|ws| {
- let path = match self.config.proc_macro_srv() {
- Some(path) => path,
- None => ws.find_sysroot_proc_macro_srv()?,
- };
-
- tracing::info!("Using proc-macro server at {path}");
- ProcMacroServer::spawn(path.clone()).map_err(|err| {
- tracing::error!(
- "Failed to run proc-macro server from path {path}, error: {err:?}",
- );
- anyhow::format_err!(
- "Failed to run proc-macro server from path {path}, error: {err:?}",
- )
- })
- })
- .collect::<Vec<_>>(),
- )
+ self.proc_macro_clients = Arc::from_iter(self.workspaces.iter().map(|ws| {
+ let path = match self.config.proc_macro_srv() {
+ Some(path) => path,
+ None => ws.find_sysroot_proc_macro_srv()?,
+ };
+
+ tracing::info!("Using proc-macro server at {path}");
+ ProcMacroServer::spawn(path.clone()).map_err(|err| {
+ tracing::error!(
+ "Failed to run proc-macro server from path {path}, error: {err:?}",
+ );
+ anyhow::format_err!(
+ "Failed to run proc-macro server from path {path}, error: {err:?}",
+ )
+ })
+ }))
};
}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs
index d59914298..ec8e5c6dd 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs
@@ -8,7 +8,7 @@
//! specific JSON shapes here -- there's little value in such tests, as we can't
//! be sure without a real client anyway.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
#[cfg(not(feature = "in-rust-tree"))]
mod sourcegen;
@@ -984,6 +984,11 @@ fn main() {}
//- /src/old_file.rs
//- /src/old_folder/mod.rs
+mod nested;
+
+//- /src/old_folder/nested.rs
+struct foo;
+use crate::old_folder::nested::foo as bar;
//- /src/from_mod/mod.rs
@@ -1080,6 +1085,27 @@ fn main() {}
"newText": "new_folder"
}
]
+ },
+ {
+ "textDocument": {
+ "uri": format!("file://{}", tmp_dir_path.join("src").join("old_folder").join("nested.rs").to_str().unwrap().to_string().replace("C:\\", "/c:/").replace('\\', "/")),
+ "version": null
+ },
+ "edits": [
+ {
+ "range": {
+ "start": {
+ "line": 1,
+ "character": 11
+ },
+ "end": {
+ "line": 1,
+ "character": 21
+ }
+ },
+ "newText": "new_folder"
+ }
+ ]
}
]
}),
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
index e49b5768f..106b99cb9 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
@@ -150,6 +150,7 @@ impl Project<'_> {
..Default::default()
},
roots,
+ false,
);
config.update(self.config).expect("invalid config");
config.rediscover_workspaces();
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs
index 8b5c92c66..dba336ea7 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs
@@ -157,7 +157,6 @@ Apache-2.0 OR MIT
Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT
Apache-2.0/MIT
BSD-3-Clause
-BlueOak-1.0.0 OR MIT OR Apache-2.0
CC0-1.0
ISC
MIT
@@ -251,6 +250,7 @@ fn check_dbg(path: &Path, text: &str) {
// We have .dbg postfix
"ide-completion/src/completions/postfix.rs",
"ide-completion/src/completions/keyword.rs",
+ "ide-completion/src/tests/expression.rs",
"ide-completion/src/tests/proc_macros.rs",
// The documentation in string literals may contain anything for its own purposes
"ide-completion/src/lib.rs",
@@ -300,6 +300,8 @@ fn check_test_attrs(path: &Path, text: &str) {
// This file.
"slow-tests/tidy.rs",
"test-utils/src/fixture.rs",
+ // Generated code from lints contains doc tests in string literals.
+ "ide-db/src/generated/lints.rs",
];
if text.contains("#[should_panic") && !need_panic.iter().any(|p| path.ends_with(p)) {
panic!(
@@ -315,7 +317,7 @@ fn check_trailing_ws(path: &Path, text: &str) {
return;
}
for (line_number, line) in text.lines().enumerate() {
- if line.chars().last().map(char::is_whitespace) == Some(true) {
+ if line.chars().last().is_some_and(char::is_whitespace) {
panic!("Trailing whitespace in {} at line {}", path.display(), line_number + 1)
}
}
diff --git a/src/tools/rust-analyzer/crates/rustc-dependencies/Cargo.toml b/src/tools/rust-analyzer/crates/rustc-dependencies/Cargo.toml
new file mode 100644
index 000000000..1b3b6ec73
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rustc-dependencies/Cargo.toml
@@ -0,0 +1,20 @@
+[package]
+name = "rustc-dependencies"
+version = "0.0.0"
+description = "TBD"
+
+rust-version.workspace = true
+edition.workspace = true
+license.workspace = true
+authors.workspace = true
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+ra-ap-rustc_lexer = { version = "0.21.0" }
+ra-ap-rustc_parse_format = { version = "0.21.0", default-features = false }
+ra-ap-rustc_index = { version = "0.21.0", default-features = false }
+ra-ap-rustc_abi = { version = "0.21.0", default-features = false }
+
+[features]
+in-rust-tree = []
diff --git a/src/tools/rust-analyzer/crates/rustc-dependencies/src/lib.rs b/src/tools/rust-analyzer/crates/rustc-dependencies/src/lib.rs
new file mode 100644
index 000000000..13fcbc491
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rustc-dependencies/src/lib.rs
@@ -0,0 +1,48 @@
+//! A wrapper around rustc internal crates, which enables switching between compiler provided
+//! ones and stable ones published in crates.io
+
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
+
+#[cfg(feature = "in-rust-tree")]
+extern crate rustc_lexer;
+
+pub mod lexer {
+ #[cfg(not(feature = "in-rust-tree"))]
+ pub use ::ra_ap_rustc_lexer::*;
+
+ #[cfg(feature = "in-rust-tree")]
+ pub use ::rustc_lexer::*;
+}
+
+#[cfg(feature = "in-rust-tree")]
+extern crate rustc_parse_format;
+
+pub mod parse_format {
+ #[cfg(not(feature = "in-rust-tree"))]
+ pub use ::ra_ap_rustc_parse_format::*;
+
+ #[cfg(feature = "in-rust-tree")]
+ pub use ::rustc_parse_format::*;
+}
+
+#[cfg(feature = "in-rust-tree")]
+extern crate rustc_abi;
+
+pub mod abi {
+ #[cfg(not(feature = "in-rust-tree"))]
+ pub use ::ra_ap_rustc_abi::*;
+
+ #[cfg(feature = "in-rust-tree")]
+ pub use ::rustc_abi::*;
+}
+
+#[cfg(feature = "in-rust-tree")]
+extern crate rustc_index;
+
+pub mod index {
+ #[cfg(not(feature = "in-rust-tree"))]
+ pub use ::ra_ap_rustc_index::*;
+
+ #[cfg(feature = "in-rust-tree")]
+ pub use ::rustc_index::*;
+}
diff --git a/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml b/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml
index fb2b9ebef..0514af8e7 100644
--- a/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml
@@ -12,4 +12,4 @@ rust-version.workspace = true
doctest = false
[dependencies]
-xshell = "0.2.2"
+xshell.workspace = true
diff --git a/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs b/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs
index 1514c6c7d..18fa77fd9 100644
--- a/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs
@@ -6,7 +6,7 @@
//!
//! This crate contains utilities to make this kind of source-gen easy.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
use std::{
fmt, fs, mem,
diff --git a/src/tools/rust-analyzer/crates/stdx/Cargo.toml b/src/tools/rust-analyzer/crates/stdx/Cargo.toml
index 536f000a4..c914ae214 100644
--- a/src/tools/rust-analyzer/crates/stdx/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/stdx/Cargo.toml
@@ -12,15 +12,16 @@ rust-version.workspace = true
doctest = false
[dependencies]
-libc = "0.2.135"
backtrace = { version = "0.3.67", optional = true }
always-assert = { version = "0.1.2", features = ["log"] }
jod-thread = "0.1.2"
+libc.workspace = true
crossbeam-channel = "0.5.5"
+itertools.workspace = true
# Think twice before adding anything here
[target.'cfg(windows)'.dependencies]
-miow = "0.5.0"
+miow = "0.6.0"
winapi = { version = "0.3.9", features = ["winerror"] }
[features]
diff --git a/src/tools/rust-analyzer/crates/stdx/src/anymap.rs b/src/tools/rust-analyzer/crates/stdx/src/anymap.rs
new file mode 100644
index 000000000..9990f8b08
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/stdx/src/anymap.rs
@@ -0,0 +1,379 @@
+//! This file is a port of only the necessary features from https://github.com/chris-morgan/anymap version 1.0.0-beta.2 for use within rust-analyzer.
+//! Copyright © 2014–2022 Chris Morgan.
+//! COPYING: https://github.com/chris-morgan/anymap/blob/master/COPYING
+//! Note that the license is changed from Blue Oak Model 1.0.0 or MIT or Apache-2.0 to MIT OR Apache-2.0
+//!
+//! This implementation provides a safe and convenient store for one value of each type.
+//!
+//! Your starting point is [`Map`]. It has an example.
+//!
+//! # Cargo features
+//!
+//! This implementation has two independent features, each of which provides an implementation providing
+//! types `Map`, `AnyMap`, `OccupiedEntry`, `VacantEntry`, `Entry` and `RawMap`:
+//!
+//! - **std** (default, *enabled* in this build):
+//! an implementation using `std::collections::hash_map`, placed in the crate root
+//! (e.g. `anymap::AnyMap`).
+
+#![warn(missing_docs, unused_results)]
+
+use core::hash::Hasher;
+
+/// A hasher designed to eke a little more speed out, given `TypeId`’s known characteristics.
+///
+/// Specifically, this is a no-op hasher that expects to be fed a u64’s worth of
+/// randomly-distributed bits. It works well for `TypeId` (eliminating start-up time, so that my
+/// get_missing benchmark is ~30ns rather than ~900ns, and being a good deal faster after that, so
+/// that my insert_and_get_on_260_types benchmark is ~12μs instead of ~21.5μs), but will
+/// panic in debug mode and always emit zeros in release mode for any other sorts of inputs, so
+/// yeah, don’t use it! 😀
+#[derive(Default)]
+pub struct TypeIdHasher {
+ value: u64,
+}
+
+impl Hasher for TypeIdHasher {
+ #[inline]
+ fn write(&mut self, bytes: &[u8]) {
+ // This expects to receive exactly one 64-bit value, and there’s no realistic chance of
+ // that changing, but I don’t want to depend on something that isn’t expressly part of the
+ // contract for safety. But I’m OK with release builds putting everything in one bucket
+ // if it *did* change (and debug builds panicking).
+ debug_assert_eq!(bytes.len(), 8);
+ let _ = bytes.try_into().map(|array| self.value = u64::from_ne_bytes(array));
+ }
+
+ #[inline]
+ fn finish(&self) -> u64 {
+ self.value
+ }
+}
+
+use core::any::{Any, TypeId};
+use core::hash::BuildHasherDefault;
+use core::marker::PhantomData;
+
+use ::std::collections::hash_map::{self, HashMap};
+
+/// Raw access to the underlying `HashMap`.
+///
+/// This alias is provided for convenience because of the ugly third generic parameter.
+pub type RawMap<A> = HashMap<TypeId, Box<A>, BuildHasherDefault<TypeIdHasher>>;
+
+/// A collection containing zero or one values for any given type and allowing convenient,
+/// type-safe access to those values.
+///
+/// The type parameter `A` allows you to use a different value type; normally you will want
+/// it to be `core::any::Any` (also known as `std::any::Any`), but there are other choices:
+///
+/// - If you want the entire map to be cloneable, use `CloneAny` instead of `Any`; with
+/// that, you can only add types that implement `Clone` to the map.
+/// - You can add on `+ Send` or `+ Send + Sync` (e.g. `Map<dyn Any + Send>`) to add those
+/// auto traits.
+///
+/// Cumulatively, there are thus six forms of map:
+///
+/// - <code>[Map]&lt;dyn [core::any::Any]&gt;</code>,
+/// also spelled [`AnyMap`] for convenience.
+/// - <code>[Map]&lt;dyn [core::any::Any] + Send&gt;</code>
+/// - <code>[Map]&lt;dyn [core::any::Any] + Send + Sync&gt;</code>
+/// - <code>[Map]&lt;dyn [CloneAny]&gt;</code>
+/// - <code>[Map]&lt;dyn [CloneAny] + Send&gt;</code>
+/// - <code>[Map]&lt;dyn [CloneAny] + Send + Sync&gt;</code>
+///
+/// ## Example
+///
+/// (Here using the [`AnyMap`] convenience alias; the first line could use
+/// <code>[anymap::Map][Map]::&lt;[core::any::Any]&gt;::new()</code> instead if desired.)
+///
+/// ```rust
+#[doc = "let mut data = anymap::AnyMap::new();"]
+/// assert_eq!(data.get(), None::<&i32>);
+/// ```
+///
+/// Values containing non-static references are not permitted.
+#[derive(Debug)]
+pub struct Map<A: ?Sized + Downcast = dyn Any> {
+ raw: RawMap<A>,
+}
+
+/// The most common type of `Map`: just using `Any`; <code>[Map]&lt;dyn [Any]&gt;</code>.
+///
+/// Why is this a separate type alias rather than a default value for `Map<A>`?
+/// `Map::new()` doesn’t seem to be happy to infer that it should go with the default
+/// value. It’s a bit sad, really. Ah well, I guess this approach will do.
+pub type AnyMap = Map<dyn Any>;
+impl<A: ?Sized + Downcast> Default for Map<A> {
+ #[inline]
+ fn default() -> Map<A> {
+ Map::new()
+ }
+}
+
+impl<A: ?Sized + Downcast> Map<A> {
+ /// Create an empty collection.
+ #[inline]
+ pub fn new() -> Map<A> {
+ Map { raw: RawMap::with_hasher(Default::default()) }
+ }
+
+ /// Returns a reference to the value stored in the collection for the type `T`,
+ /// if it exists.
+ #[inline]
+ pub fn get<T: IntoBox<A>>(&self) -> Option<&T> {
+ self.raw.get(&TypeId::of::<T>()).map(|any| unsafe { any.downcast_ref_unchecked::<T>() })
+ }
+
+ /// Gets the entry for the given type in the collection for in-place manipulation
+ #[inline]
+ pub fn entry<T: IntoBox<A>>(&mut self) -> Entry<'_, A, T> {
+ match self.raw.entry(TypeId::of::<T>()) {
+ hash_map::Entry::Occupied(e) => {
+ Entry::Occupied(OccupiedEntry { inner: e, type_: PhantomData })
+ }
+ hash_map::Entry::Vacant(e) => {
+ Entry::Vacant(VacantEntry { inner: e, type_: PhantomData })
+ }
+ }
+ }
+}
+
+/// A view into a single occupied location in an `Map`.
+pub struct OccupiedEntry<'a, A: ?Sized + Downcast, V: 'a> {
+ inner: hash_map::OccupiedEntry<'a, TypeId, Box<A>>,
+ type_: PhantomData<V>,
+}
+
+/// A view into a single empty location in an `Map`.
+pub struct VacantEntry<'a, A: ?Sized + Downcast, V: 'a> {
+ inner: hash_map::VacantEntry<'a, TypeId, Box<A>>,
+ type_: PhantomData<V>,
+}
+
+/// A view into a single location in an `Map`, which may be vacant or occupied.
+pub enum Entry<'a, A: ?Sized + Downcast, V> {
+ /// An occupied Entry
+ Occupied(OccupiedEntry<'a, A, V>),
+ /// A vacant Entry
+ Vacant(VacantEntry<'a, A, V>),
+}
+
+impl<'a, A: ?Sized + Downcast, V: IntoBox<A>> Entry<'a, A, V> {
+ /// Ensures a value is in the entry by inserting the result of the default function if
+ /// empty, and returns a mutable reference to the value in the entry.
+ #[inline]
+ pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V {
+ match self {
+ Entry::Occupied(inner) => inner.into_mut(),
+ Entry::Vacant(inner) => inner.insert(default()),
+ }
+ }
+}
+
+impl<'a, A: ?Sized + Downcast, V: IntoBox<A>> OccupiedEntry<'a, A, V> {
+ /// Converts the OccupiedEntry into a mutable reference to the value in the entry
+ /// with a lifetime bound to the collection itself
+ #[inline]
+ pub fn into_mut(self) -> &'a mut V {
+ unsafe { self.inner.into_mut().downcast_mut_unchecked() }
+ }
+}
+
+impl<'a, A: ?Sized + Downcast, V: IntoBox<A>> VacantEntry<'a, A, V> {
+ /// Sets the value of the entry with the VacantEntry's key,
+ /// and returns a mutable reference to it
+ #[inline]
+ pub fn insert(self, value: V) -> &'a mut V {
+ unsafe { self.inner.insert(value.into_box()).downcast_mut_unchecked() }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[derive(Clone, Debug, PartialEq)]
+ struct A(i32);
+ #[derive(Clone, Debug, PartialEq)]
+ struct B(i32);
+ #[derive(Clone, Debug, PartialEq)]
+ struct C(i32);
+ #[derive(Clone, Debug, PartialEq)]
+ struct D(i32);
+ #[derive(Clone, Debug, PartialEq)]
+ struct E(i32);
+ #[derive(Clone, Debug, PartialEq)]
+ struct F(i32);
+ #[derive(Clone, Debug, PartialEq)]
+ struct J(i32);
+
+ #[test]
+ fn test_varieties() {
+ fn assert_send<T: Send>() {}
+ fn assert_sync<T: Sync>() {}
+ fn assert_debug<T: ::core::fmt::Debug>() {}
+ assert_send::<Map<dyn Any + Send>>();
+ assert_send::<Map<dyn Any + Send + Sync>>();
+ assert_sync::<Map<dyn Any + Send + Sync>>();
+ assert_debug::<Map<dyn Any>>();
+ assert_debug::<Map<dyn Any + Send>>();
+ assert_debug::<Map<dyn Any + Send + Sync>>();
+ assert_send::<Map<dyn CloneAny + Send>>();
+ assert_send::<Map<dyn CloneAny + Send + Sync>>();
+ assert_sync::<Map<dyn CloneAny + Send + Sync>>();
+ assert_debug::<Map<dyn CloneAny>>();
+ assert_debug::<Map<dyn CloneAny + Send>>();
+ assert_debug::<Map<dyn CloneAny + Send + Sync>>();
+ }
+
+ #[test]
+ fn type_id_hasher() {
+ use core::any::TypeId;
+ use core::hash::Hash;
+ fn verify_hashing_with(type_id: TypeId) {
+ let mut hasher = TypeIdHasher::default();
+ type_id.hash(&mut hasher);
+ // SAFETY: u64 is valid for all bit patterns.
+ let _ = hasher.finish();
+ }
+ // Pick a variety of types, just to demonstrate it’s all sane. Normal, zero-sized, unsized, &c.
+ verify_hashing_with(TypeId::of::<usize>());
+ verify_hashing_with(TypeId::of::<()>());
+ verify_hashing_with(TypeId::of::<str>());
+ verify_hashing_with(TypeId::of::<&str>());
+ verify_hashing_with(TypeId::of::<Vec<u8>>());
+ }
+}
+
+// impl some traits for dyn Any
+use core::fmt;
+
+#[doc(hidden)]
+pub trait CloneToAny {
+ /// Clone `self` into a new `Box<dyn CloneAny>` object.
+ fn clone_to_any(&self) -> Box<dyn CloneAny>;
+}
+
+impl<T: Any + Clone> CloneToAny for T {
+ #[inline]
+ fn clone_to_any(&self) -> Box<dyn CloneAny> {
+ Box::new(self.clone())
+ }
+}
+
+macro_rules! impl_clone {
+ ($t:ty) => {
+ impl Clone for Box<$t> {
+ #[inline]
+ fn clone(&self) -> Box<$t> {
+ // SAFETY: this dance is to reapply any Send/Sync marker. I’m not happy about this
+ // approach, given that I used to do it in safe code, but then came a dodgy
+ // future-compatibility warning where_clauses_object_safety, which is spurious for
+ // auto traits but still super annoying (future-compatibility lints seem to mean
+ // your bin crate needs a corresponding allow!). Although I explained my plight¹
+ // and it was all explained and agreed upon, no action has been taken. So I finally
+ // caved and worked around it by doing it this way, which matches what’s done for
+ // core::any², so it’s probably not *too* bad.
+ //
+ // ¹ https://github.com/rust-lang/rust/issues/51443#issuecomment-421988013
+ // ² https://github.com/rust-lang/rust/blob/e7825f2b690c9a0d21b6f6d84c404bb53b151b38/library/alloc/src/boxed.rs#L1613-L1616
+ let clone: Box<dyn CloneAny> = (**self).clone_to_any();
+ let raw: *mut dyn CloneAny = Box::into_raw(clone);
+ unsafe { Box::from_raw(raw as *mut $t) }
+ }
+ }
+
+ impl fmt::Debug for $t {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.pad(stringify!($t))
+ }
+ }
+ };
+}
+
+/// Methods for downcasting from an `Any`-like trait object.
+///
+/// This should only be implemented on trait objects for subtraits of `Any`, though you can
+/// implement it for other types and it’ll work fine, so long as your implementation is correct.
+pub trait Downcast {
+ /// Gets the `TypeId` of `self`.
+ fn type_id(&self) -> TypeId;
+
+ // Note the bound through these downcast methods is 'static, rather than the inexpressible
+ // concept of Self-but-as-a-trait (where Self is `dyn Trait`). This is sufficient, exceeding
+ // TypeId’s requirements. Sure, you *can* do CloneAny.downcast_unchecked::<NotClone>() and the
+ // type system won’t protect you, but that doesn’t introduce any unsafety: the method is
+ // already unsafe because you can specify the wrong type, and if this were exposing safe
+ // downcasting, CloneAny.downcast::<NotClone>() would just return an error, which is just as
+ // correct.
+ //
+ // Now in theory we could also add T: ?Sized, but that doesn’t play nicely with the common
+ // implementation, so I’m doing without it.
+
+ /// Downcast from `&Any` to `&T`, without checking the type matches.
+ ///
+ /// # Safety
+ ///
+ /// The caller must ensure that `T` matches the trait object, on pain of *undefined behaviour*.
+ unsafe fn downcast_ref_unchecked<T: 'static>(&self) -> &T;
+
+ /// Downcast from `&mut Any` to `&mut T`, without checking the type matches.
+ ///
+ /// # Safety
+ ///
+ /// The caller must ensure that `T` matches the trait object, on pain of *undefined behaviour*.
+ unsafe fn downcast_mut_unchecked<T: 'static>(&mut self) -> &mut T;
+}
+
+/// A trait for the conversion of an object into a boxed trait object.
+pub trait IntoBox<A: ?Sized + Downcast>: Any {
+ /// Convert self into the appropriate boxed form.
+ fn into_box(self) -> Box<A>;
+}
+
+macro_rules! implement {
+ ($any_trait:ident $(+ $auto_traits:ident)*) => {
+ impl Downcast for dyn $any_trait $(+ $auto_traits)* {
+ #[inline]
+ fn type_id(&self) -> TypeId {
+ self.type_id()
+ }
+
+ #[inline]
+ unsafe fn downcast_ref_unchecked<T: 'static>(&self) -> &T {
+ &*(self as *const Self as *const T)
+ }
+
+ #[inline]
+ unsafe fn downcast_mut_unchecked<T: 'static>(&mut self) -> &mut T {
+ &mut *(self as *mut Self as *mut T)
+ }
+ }
+
+ impl<T: $any_trait $(+ $auto_traits)*> IntoBox<dyn $any_trait $(+ $auto_traits)*> for T {
+ #[inline]
+ fn into_box(self) -> Box<dyn $any_trait $(+ $auto_traits)*> {
+ Box::new(self)
+ }
+ }
+ }
+}
+
+implement!(Any);
+implement!(Any + Send);
+implement!(Any + Send + Sync);
+
+/// [`Any`], but with cloning.
+///
+/// Every type with no non-`'static` references that implements `Clone` implements `CloneAny`.
+/// See [`core::any`] for more details on `Any` in general.
+pub trait CloneAny: Any + CloneToAny {}
+impl<T: Any + Clone> CloneAny for T {}
+implement!(CloneAny);
+implement!(CloneAny + Send);
+implement!(CloneAny + Send + Sync);
+impl_clone!(dyn CloneAny);
+impl_clone!(dyn CloneAny + Send);
+impl_clone!(dyn CloneAny + Send + Sync);
diff --git a/src/tools/rust-analyzer/crates/stdx/src/lib.rs b/src/tools/rust-analyzer/crates/stdx/src/lib.rs
index 24990d6a0..71e269f74 100644
--- a/src/tools/rust-analyzer/crates/stdx/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/stdx/src/lib.rs
@@ -1,6 +1,6 @@
//! Missing batteries for standard libraries.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
use std::io as sio;
use std::process::Command;
@@ -12,8 +12,10 @@ pub mod panic_context;
pub mod non_empty_vec;
pub mod rand;
pub mod thread;
+pub mod anymap;
pub use always_assert::{always, never};
+pub use itertools;
#[inline(always)]
pub fn is_ci() -> bool {
@@ -39,6 +41,24 @@ Uncomment `default = [ "backtrace" ]` in `crates/stdx/Cargo.toml`.
);
}
+pub trait TupleExt {
+ type Head;
+ type Tail;
+ fn head(self) -> Self::Head;
+ fn tail(self) -> Self::Tail;
+}
+
+impl<T, U> TupleExt for (T, U) {
+ type Head = T;
+ type Tail = U;
+ fn head(self) -> Self::Head {
+ self.0
+ }
+ fn tail(self) -> Self::Tail {
+ self.1
+ }
+}
+
pub fn to_lower_snake_case(s: &str) -> String {
to_snake_case(s, char::to_lowercase)
}
@@ -89,6 +109,57 @@ where
words.join("_")
}
+// Taken from rustc.
+pub fn to_camel_case(ident: &str) -> String {
+ ident
+ .trim_matches('_')
+ .split('_')
+ .filter(|component| !component.is_empty())
+ .map(|component| {
+ let mut camel_cased_component = String::with_capacity(component.len());
+
+ let mut new_word = true;
+ let mut prev_is_lower_case = true;
+
+ for c in component.chars() {
+ // Preserve the case if an uppercase letter follows a lowercase letter, so that
+ // `camelCase` is converted to `CamelCase`.
+ if prev_is_lower_case && c.is_uppercase() {
+ new_word = true;
+ }
+
+ if new_word {
+ camel_cased_component.extend(c.to_uppercase());
+ } else {
+ camel_cased_component.extend(c.to_lowercase());
+ }
+
+ prev_is_lower_case = c.is_lowercase();
+ new_word = false;
+ }
+
+ camel_cased_component
+ })
+ .fold((String::new(), None), |(acc, prev): (_, Option<String>), next| {
+ // separate two components with an underscore if their boundary cannot
+ // be distinguished using an uppercase/lowercase case distinction
+ let join = prev
+ .and_then(|prev| {
+ let f = next.chars().next()?;
+ let l = prev.chars().last()?;
+ Some(!char_has_case(l) && !char_has_case(f))
+ })
+ .unwrap_or(false);
+ (acc + if join { "_" } else { "" } + &next, Some(next))
+ })
+ .0
+}
+
+// Taken from rustc.
+pub fn char_has_case(c: char) -> bool {
+ c.is_lowercase() || c.is_uppercase()
+}
+
pub fn replace(buf: &mut String, from: char, to: &str) {
if !buf.contains(from) {
return;
diff --git a/src/tools/rust-analyzer/crates/stdx/src/macros.rs b/src/tools/rust-analyzer/crates/stdx/src/macros.rs
index 1a9982fa8..d71e418c8 100644
--- a/src/tools/rust-analyzer/crates/stdx/src/macros.rs
+++ b/src/tools/rust-analyzer/crates/stdx/src/macros.rs
@@ -15,7 +15,12 @@ macro_rules! eprintln {
macro_rules! format_to {
($buf:expr) => ();
($buf:expr, $lit:literal $($arg:tt)*) => {
- { use ::std::fmt::Write as _; let _ = ::std::write!($buf, $lit $($arg)*); }
+ {
+ use ::std::fmt::Write as _;
+ // We can't do ::std::fmt::Write::write_fmt($buf, format_args!($lit $($arg)*))
+ // unfortunately, as that loses out on autoref behavior.
+ _ = $buf.write_fmt(format_args!($lit $($arg)*))
+ }
};
}
diff --git a/src/tools/rust-analyzer/crates/stdx/src/process.rs b/src/tools/rust-analyzer/crates/stdx/src/process.rs
index e5aa34365..bca0cbc36 100644
--- a/src/tools/rust-analyzer/crates/stdx/src/process.rs
+++ b/src/tools/rust-analyzer/crates/stdx/src/process.rs
@@ -23,7 +23,7 @@ pub fn streaming_output(
let idx = if eof {
data.len()
} else {
- match data.iter().rposition(|b| *b == b'\n') {
+ match data.iter().rposition(|&b| b == b'\n') {
Some(i) => i + 1,
None => return,
}
diff --git a/src/tools/rust-analyzer/crates/syntax/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/Cargo.toml
index 5ee0c4792..7a7c0d267 100644
--- a/src/tools/rust-analyzer/crates/syntax/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/syntax/Cargo.toml
@@ -14,16 +14,16 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
-either = "1.7.0"
-itertools = "0.10.5"
-rowan = "0.15.11"
+either.workspace = true
+itertools.workspace = true
+rowan = "0.15.15"
rustc-hash = "1.1.0"
once_cell = "1.17.0"
-indexmap = "2.0.0"
+indexmap.workspace = true
smol_str.workspace = true
triomphe.workspace = true
-rustc_lexer.workspace = true
+rustc-dependencies.workspace = true
parser.workspace = true
profile.workspace = true
@@ -31,7 +31,7 @@ stdx.workspace = true
text-edit.workspace = true
[dev-dependencies]
-rayon = "1.6.1"
+rayon.workspace = true
expect-test = "1.4.0"
proc-macro2 = "1.0.47"
quote = "1.0.20"
@@ -41,4 +41,4 @@ test-utils.workspace = true
sourcegen.workspace = true
[features]
-in-rust-tree = []
+in-rust-tree = ["rustc-dependencies/in-rust-tree"]
diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram
index 3603560d3..c3010d090 100644
--- a/src/tools/rust-analyzer/crates/syntax/rust.ungram
+++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram
@@ -36,7 +36,7 @@ PathSegment =
'::'? NameRef
| NameRef GenericArgList?
| NameRef ParamList RetType?
-| '<' PathType ('as' PathType)? '>'
+| '<' Type ('as' PathType)? '>'
GenericArgList =
'::'? '<' (GenericArg (',' GenericArg)* ','?)? '>'
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
index a150d9e6c..37d821204 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
@@ -3,18 +3,17 @@
use std::iter::{empty, successors};
use parser::{SyntaxKind, T};
-use rowan::SyntaxElement;
use crate::{
algo::{self, neighbor},
ast::{self, edit::IndentLevel, make, HasGenericParams},
ted::{self, Position},
- AstNode, AstToken, Direction,
+ AstNode, AstToken, Direction, SyntaxElement,
SyntaxKind::{ATTR, COMMENT, WHITESPACE},
SyntaxNode, SyntaxToken,
};
-use super::HasName;
+use super::{HasArgList, HasName};
pub trait GenericParamsOwnerEdit: ast::HasGenericParams {
fn get_or_create_generic_param_list(&self) -> ast::GenericParamList;
@@ -224,7 +223,7 @@ pub trait AttrsOwnerEdit: ast::HasAttrs {
let after_attrs_and_comments = node
.children_with_tokens()
.find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR))
- .map_or(Position::first_child_of(node), |it| Position::before(it));
+ .map_or(Position::first_child_of(node), Position::before);
ted::insert_all(
after_attrs_and_comments,
@@ -362,6 +361,24 @@ impl ast::PathSegment {
}
}
+impl ast::MethodCallExpr {
+ pub fn get_or_create_generic_arg_list(&self) -> ast::GenericArgList {
+ if self.generic_arg_list().is_none() {
+ let generic_arg_list = make::turbofish_generic_arg_list(empty()).clone_for_update();
+
+ if let Some(arg_list) = self.arg_list() {
+ ted::insert_raw(
+ ted::Position::before(arg_list.syntax()),
+ generic_arg_list.syntax(),
+ );
+ } else {
+ ted::append_child(self.syntax(), generic_arg_list.syntax());
+ }
+ }
+ self.generic_arg_list().unwrap()
+ }
+}
+
impl Removable for ast::UseTree {
fn remove(&self) {
for dir in [Direction::Next, Direction::Prev] {
@@ -433,7 +450,9 @@ impl ast::UseTree {
if &path == prefix && self.use_tree_list().is_none() {
if self.star_token().is_some() {
// path$0::* -> *
- self.coloncolon_token().map(ted::remove);
+ if let Some(a) = self.coloncolon_token() {
+ ted::remove(a)
+ }
ted::remove(prefix.syntax());
} else {
// path$0 -> self
@@ -460,7 +479,9 @@ impl ast::UseTree {
for p in successors(parent.parent_path(), |it| it.parent_path()) {
p.segment()?;
}
- prefix.parent_path().and_then(|p| p.coloncolon_token()).map(ted::remove);
+ if let Some(a) = prefix.parent_path().and_then(|p| p.coloncolon_token()) {
+ ted::remove(a)
+ }
ted::remove(prefix.syntax());
Some(())
}
@@ -555,7 +576,7 @@ impl ast::AssocItemList {
None => (IndentLevel::single(), Position::last_child_of(self.syntax()), "\n"),
},
};
- let elements: Vec<SyntaxElement<_>> = vec![
+ let elements: Vec<SyntaxElement> = vec![
make::tokens::whitespace(&format!("{whitespace}{indent}")).into(),
item.syntax().clone().into(),
];
@@ -625,6 +646,50 @@ impl ast::MatchArmList {
}
}
+impl ast::LetStmt {
+ pub fn set_ty(&self, ty: Option<ast::Type>) {
+ match ty {
+ None => {
+ if let Some(colon_token) = self.colon_token() {
+ ted::remove(colon_token);
+ }
+
+ if let Some(existing_ty) = self.ty() {
+ if let Some(sibling) = existing_ty.syntax().prev_sibling_or_token() {
+ if sibling.kind() == SyntaxKind::WHITESPACE {
+ ted::remove(sibling);
+ }
+ }
+
+ ted::remove(existing_ty.syntax());
+ }
+
+ // Remove any trailing ws
+ if let Some(last) = self.syntax().last_token().filter(|it| it.kind() == WHITESPACE)
+ {
+ last.detach();
+ }
+ }
+ Some(new_ty) => {
+ if self.colon_token().is_none() {
+ ted::insert_raw(
+ Position::after(
+ self.pat().expect("let stmt should have a pattern").syntax(),
+ ),
+ make::token(T![:]),
+ );
+ }
+
+ if let Some(old_ty) = self.ty() {
+ ted::replace(old_ty.syntax(), new_ty.syntax());
+ } else {
+ ted::insert(Position::after(self.colon_token().unwrap()), new_ty.syntax());
+ }
+ }
+ }
+ }
+}
+
impl ast::RecordExprFieldList {
pub fn add_field(&self, field: ast::RecordExprField) {
let is_multiline = self.syntax().text().contains_char('\n');
@@ -749,7 +814,7 @@ impl ast::VariantList {
None => (IndentLevel::single(), Position::last_child_of(self.syntax())),
},
};
- let elements: Vec<SyntaxElement<_>> = vec![
+ let elements: Vec<SyntaxElement> = vec![
make::tokens::whitespace(&format!("{}{indent}", "\n")).into(),
variant.syntax().clone().into(),
ast::make::token(T![,]).into(),
@@ -784,6 +849,53 @@ fn normalize_ws_between_braces(node: &SyntaxNode) -> Option<()> {
Some(())
}
+impl ast::IdentPat {
+ pub fn set_pat(&self, pat: Option<ast::Pat>) {
+ match pat {
+ None => {
+ if let Some(at_token) = self.at_token() {
+ // Remove `@ Pat`
+ let start = at_token.clone().into();
+ let end = self
+ .pat()
+ .map(|it| it.syntax().clone().into())
+ .unwrap_or_else(|| at_token.into());
+
+ ted::remove_all(start..=end);
+
+ // Remove any trailing ws
+ if let Some(last) =
+ self.syntax().last_token().filter(|it| it.kind() == WHITESPACE)
+ {
+ last.detach();
+ }
+ }
+ }
+ Some(pat) => {
+ if let Some(old_pat) = self.pat() {
+ // Replace existing pattern
+ ted::replace(old_pat.syntax(), pat.syntax())
+ } else if let Some(at_token) = self.at_token() {
+ // Have an `@` token but not a pattern yet
+ ted::insert(ted::Position::after(at_token), pat.syntax());
+ } else {
+ // Don't have an `@`, should have a name
+ let name = self.name().unwrap();
+
+ ted::insert_all(
+ ted::Position::after(name.syntax()),
+ vec![
+ make::token(T![@]).into(),
+ make::tokens::single_space().into(),
+ pat.syntax().clone().into(),
+ ],
+ )
+ }
+ }
+ }
+ }
+}
+
pub trait HasVisibilityEdit: ast::HasVisibility {
fn set_visibility(&self, visbility: ast::Visibility) {
match self.visibility() {
@@ -886,6 +998,65 @@ mod tests {
}
#[test]
+ fn test_ident_pat_set_pat() {
+ #[track_caller]
+ fn check(before: &str, expected: &str, pat: Option<ast::Pat>) {
+ let pat = pat.map(|it| it.clone_for_update());
+
+ let ident_pat = ast_mut_from_text::<ast::IdentPat>(&format!("fn f() {{ {before} }}"));
+ ident_pat.set_pat(pat);
+
+ let after = ast_mut_from_text::<ast::IdentPat>(&format!("fn f() {{ {expected} }}"));
+ assert_eq!(ident_pat.to_string(), after.to_string());
+ }
+
+ // replacing
+ check("let a @ _;", "let a @ ();", Some(make::tuple_pat([]).into()));
+
+ // note: no trailing semicolon is added for the below tests since it
+ // seems to be picked up by the ident pat during error recovery?
+
+ // adding
+ check("let a ", "let a @ ()", Some(make::tuple_pat([]).into()));
+ check("let a @ ", "let a @ ()", Some(make::tuple_pat([]).into()));
+
+ // removing
+ check("let a @ ()", "let a", None);
+ check("let a @ ", "let a", None);
+ }
+
+ #[test]
+ fn test_let_stmt_set_ty() {
+ #[track_caller]
+ fn check(before: &str, expected: &str, ty: Option<ast::Type>) {
+ let ty = ty.map(|it| it.clone_for_update());
+
+ let let_stmt = ast_mut_from_text::<ast::LetStmt>(&format!("fn f() {{ {before} }}"));
+ let_stmt.set_ty(ty);
+
+ let after = ast_mut_from_text::<ast::LetStmt>(&format!("fn f() {{ {expected} }}"));
+ assert_eq!(let_stmt.to_string(), after.to_string(), "{let_stmt:#?}\n!=\n{after:#?}");
+ }
+
+ // adding
+ check("let a;", "let a: ();", Some(make::ty_tuple([])));
+ // no semicolon due to it being eaten during error recovery
+ check("let a:", "let a: ()", Some(make::ty_tuple([])));
+
+ // replacing
+ check("let a: u8;", "let a: ();", Some(make::ty_tuple([])));
+ check("let a: u8 = 3;", "let a: () = 3;", Some(make::ty_tuple([])));
+ check("let a: = 3;", "let a: () = 3;", Some(make::ty_tuple([])));
+
+ // removing
+ check("let a: u8;", "let a;", None);
+ check("let a:;", "let a;", None);
+
+ check("let a: u8 = 3;", "let a = 3;", None);
+ check("let a: = 3;", "let a = 3;", None);
+ }
+
+ #[test]
fn add_variant_to_empty_enum() {
let variant = make::variant(make::name("Bar"), None).clone_for_update();
@@ -976,7 +1147,9 @@ enum Foo {
fn check_add_variant(before: &str, expected: &str, variant: ast::Variant) {
let enum_ = ast_mut_from_text::<ast::Enum>(before);
- enum_.variant_list().map(|it| it.add_variant(variant));
+ if let Some(it) = enum_.variant_list() {
+ it.add_variant(variant)
+ }
let after = enum_.to_string();
assert_eq_text!(&trim_indent(expected.trim()), &trim_indent(after.trim()));
}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
index 7ba0d4dc6..6c86e5910 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
@@ -59,8 +59,9 @@ impl PathSegment {
pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
- pub fn path_type(&self) -> Option<PathType> { support::child(&self.syntax) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
+ pub fn path_type(&self) -> Option<PathType> { support::child(&self.syntax) }
pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
}
@@ -1577,14 +1578,6 @@ impl RecordPatField {
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum GenericArg {
- TypeArg(TypeArg),
- AssocTypeArg(AssocTypeArg),
- LifetimeArg(LifetimeArg),
- ConstArg(ConstArg),
-}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Type {
ArrayType(ArrayType),
DynTraitType(DynTraitType),
@@ -1603,6 +1596,14 @@ pub enum Type {
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum GenericArg {
+ TypeArg(TypeArg),
+ AssocTypeArg(AssocTypeArg),
+ LifetimeArg(LifetimeArg),
+ ConstArg(ConstArg),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Expr {
ArrayExpr(ArrayExpr),
AsmExpr(AsmExpr),
@@ -3319,41 +3320,6 @@ impl AstNode for RecordPatField {
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
-impl From<TypeArg> for GenericArg {
- fn from(node: TypeArg) -> GenericArg { GenericArg::TypeArg(node) }
-}
-impl From<AssocTypeArg> for GenericArg {
- fn from(node: AssocTypeArg) -> GenericArg { GenericArg::AssocTypeArg(node) }
-}
-impl From<LifetimeArg> for GenericArg {
- fn from(node: LifetimeArg) -> GenericArg { GenericArg::LifetimeArg(node) }
-}
-impl From<ConstArg> for GenericArg {
- fn from(node: ConstArg) -> GenericArg { GenericArg::ConstArg(node) }
-}
-impl AstNode for GenericArg {
- fn can_cast(kind: SyntaxKind) -> bool {
- matches!(kind, TYPE_ARG | ASSOC_TYPE_ARG | LIFETIME_ARG | CONST_ARG)
- }
- fn cast(syntax: SyntaxNode) -> Option<Self> {
- let res = match syntax.kind() {
- TYPE_ARG => GenericArg::TypeArg(TypeArg { syntax }),
- ASSOC_TYPE_ARG => GenericArg::AssocTypeArg(AssocTypeArg { syntax }),
- LIFETIME_ARG => GenericArg::LifetimeArg(LifetimeArg { syntax }),
- CONST_ARG => GenericArg::ConstArg(ConstArg { syntax }),
- _ => return None,
- };
- Some(res)
- }
- fn syntax(&self) -> &SyntaxNode {
- match self {
- GenericArg::TypeArg(it) => &it.syntax,
- GenericArg::AssocTypeArg(it) => &it.syntax,
- GenericArg::LifetimeArg(it) => &it.syntax,
- GenericArg::ConstArg(it) => &it.syntax,
- }
- }
-}
impl From<ArrayType> for Type {
fn from(node: ArrayType) -> Type { Type::ArrayType(node) }
}
@@ -3455,6 +3421,41 @@ impl AstNode for Type {
}
}
}
+impl From<TypeArg> for GenericArg {
+ fn from(node: TypeArg) -> GenericArg { GenericArg::TypeArg(node) }
+}
+impl From<AssocTypeArg> for GenericArg {
+ fn from(node: AssocTypeArg) -> GenericArg { GenericArg::AssocTypeArg(node) }
+}
+impl From<LifetimeArg> for GenericArg {
+ fn from(node: LifetimeArg) -> GenericArg { GenericArg::LifetimeArg(node) }
+}
+impl From<ConstArg> for GenericArg {
+ fn from(node: ConstArg) -> GenericArg { GenericArg::ConstArg(node) }
+}
+impl AstNode for GenericArg {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(kind, TYPE_ARG | ASSOC_TYPE_ARG | LIFETIME_ARG | CONST_ARG)
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ TYPE_ARG => GenericArg::TypeArg(TypeArg { syntax }),
+ ASSOC_TYPE_ARG => GenericArg::AssocTypeArg(AssocTypeArg { syntax }),
+ LIFETIME_ARG => GenericArg::LifetimeArg(LifetimeArg { syntax }),
+ CONST_ARG => GenericArg::ConstArg(ConstArg { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ GenericArg::TypeArg(it) => &it.syntax,
+ GenericArg::AssocTypeArg(it) => &it.syntax,
+ GenericArg::LifetimeArg(it) => &it.syntax,
+ GenericArg::ConstArg(it) => &it.syntax,
+ }
+ }
+}
impl From<ArrayExpr> for Expr {
fn from(node: ArrayExpr) -> Expr { Expr::ArrayExpr(node) }
}
@@ -4340,12 +4341,12 @@ impl AstNode for AnyHasVisibility {
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
-impl std::fmt::Display for GenericArg {
+impl std::fmt::Display for Type {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
}
}
-impl std::fmt::Display for Type {
+impl std::fmt::Display for GenericArg {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
index 17e311c0c..ad63cc558 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
@@ -433,7 +433,6 @@ pub fn record_field(
ast_from_text(&format!("struct S {{ {visibility}{name}: {ty}, }}"))
}
-// TODO
pub fn block_expr(
stmts: impl IntoIterator<Item = ast::Stmt>,
tail_expr: Option<ast::Expr>,
@@ -853,6 +852,10 @@ pub fn self_param() -> ast::SelfParam {
ast_from_text("fn f(&self) { }")
}
+pub fn mut_self_param() -> ast::SelfParam {
+ ast_from_text("fn f(&mut self) { }")
+}
+
pub fn ret_type(ty: ast::Type) -> ast::RetType {
ast_from_text(&format!("fn f() -> {ty} {{ }}"))
}
@@ -938,6 +941,13 @@ pub fn lifetime_arg(lifetime: ast::Lifetime) -> ast::LifetimeArg {
ast_from_text(&format!("const S: T<{lifetime}> = ();"))
}
+pub fn turbofish_generic_arg_list(
+ args: impl IntoIterator<Item = ast::GenericArg>,
+) -> ast::GenericArgList {
+ let args = args.into_iter().join(", ");
+ ast_from_text(&format!("const S: T::<{args}> = ();"))
+}
+
pub(crate) fn generic_arg_list(
args: impl IntoIterator<Item = ast::GenericArg>,
) -> ast::GenericArgList {
@@ -973,6 +983,11 @@ pub fn tuple_field(visibility: Option<ast::Visibility>, ty: ast::Type) -> ast::T
ast_from_text(&format!("struct f({visibility}{ty});"))
}
+pub fn variant_list(variants: impl IntoIterator<Item = ast::Variant>) -> ast::VariantList {
+ let variants = variants.into_iter().join(", ");
+ ast_from_text(&format!("enum f {{ {variants} }}"))
+}
+
pub fn variant(name: ast::Name, field_list: Option<ast::FieldList>) -> ast::Variant {
let field_list = match field_list {
None => String::new(),
@@ -1037,6 +1052,19 @@ pub fn struct_(
ast_from_text(&format!("{visibility}struct {strukt_name}{type_params}{field_list}{semicolon}",))
}
+pub fn enum_(
+ visibility: Option<ast::Visibility>,
+ enum_name: ast::Name,
+ variant_list: ast::VariantList,
+) -> ast::Enum {
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{it} "),
+ };
+
+ ast_from_text(&format!("{visibility}enum {enum_name} {variant_list}"))
+}
+
pub fn attr_outer(meta: ast::Meta) -> ast::Attr {
ast_from_text(&format!("#[{meta}]"))
}
@@ -1105,7 +1133,7 @@ pub mod tokens {
pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| {
SourceFile::parse(
- "const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p)\n;\n\n",
+ "const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let a @ [] })\n;\n\n",
)
});
@@ -1149,6 +1177,16 @@ pub mod tokens {
lit.syntax().first_child_or_token().unwrap().into_token().unwrap()
}
+ pub fn ident(text: &str) -> SyntaxToken {
+ assert_eq!(text.trim(), text);
+ let path: ast::Path = super::ext::ident_path(text);
+ path.syntax()
+ .descendants_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == IDENT)
+ .unwrap()
+ }
+
pub fn single_newline() -> SyntaxToken {
let res = SOURCE_FILE
.tree()
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
index 691d0c618..f81dff884 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
@@ -275,10 +275,19 @@ impl ast::Path {
successors(Some(self.clone()), ast::Path::qualifier).last().unwrap()
}
+ pub fn first_qualifier(&self) -> Option<ast::Path> {
+ successors(self.qualifier(), ast::Path::qualifier).last()
+ }
+
pub fn first_segment(&self) -> Option<ast::PathSegment> {
self.first_qualifier_or_self().segment()
}
+ // FIXME: Check usages of Self::segments, they might be wrong because of the logic of the bloew function
+ pub fn segments_of_this_path_only_rev(&self) -> impl Iterator<Item = ast::PathSegment> + Clone {
+ self.qualifiers_and_self().filter_map(|it| it.segment())
+ }
+
pub fn segments(&self) -> impl Iterator<Item = ast::PathSegment> + Clone {
successors(self.first_segment(), |p| {
p.parent_path().parent_path().and_then(|p| p.segment())
@@ -289,6 +298,10 @@ impl ast::Path {
successors(self.qualifier(), |p| p.qualifier())
}
+ pub fn qualifiers_and_self(&self) -> impl Iterator<Item = ast::Path> + Clone {
+ successors(Some(self.clone()), |p| p.qualifier())
+ }
+
pub fn top_path(&self) -> ast::Path {
let mut this = self.clone();
while let Some(path) = this.parent_path() {
@@ -361,6 +374,15 @@ impl ast::Impl {
}
}
+// [#15778](https://github.com/rust-lang/rust-analyzer/issues/15778)
+impl ast::PathSegment {
+ pub fn qualifying_trait(&self) -> Option<ast::PathType> {
+ let mut path_types = support::children(self.syntax());
+ let first = path_types.next()?;
+ path_types.next().or(Some(first))
+ }
+}
+
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum StructKind {
Record(ast::RecordFieldList),
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
index 87fd51d70..d5d565a01 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
@@ -2,6 +2,8 @@
use std::borrow::Cow;
+use rustc_dependencies::lexer as rustc_lexer;
+
use rustc_lexer::unescape::{
unescape_byte, unescape_c_string, unescape_char, unescape_literal, CStrUnit, Mode,
};
@@ -119,6 +121,7 @@ impl ast::Whitespace {
}
}
+#[derive(Debug)]
pub struct QuoteOffsets {
pub quotes: (TextRange, TextRange),
pub contents: TextRange,
@@ -165,6 +168,11 @@ pub trait IsString: AstToken {
fn text_range_between_quotes(&self) -> Option<TextRange> {
self.quote_offsets().map(|it| it.contents)
}
+ fn text_without_quotes(&self) -> &str {
+ let text = self.text();
+ let Some(offsets) = self.text_range_between_quotes() else { return text };
+ &text[offsets - self.syntax().text_range().start()]
+ }
fn open_quote_text_range(&self) -> Option<TextRange> {
self.quote_offsets().map(|it| it.quotes.0)
}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
index 3e43df2d0..16f7356b1 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
@@ -76,9 +76,6 @@ pub trait HasDocComments: HasAttrs {
fn doc_comments(&self) -> DocCommentIter {
DocCommentIter { iter: self.syntax().children_with_tokens() }
}
- fn doc_comments_and_attrs(&self) -> AttrDocCommentIter {
- AttrDocCommentIter { iter: self.syntax().children_with_tokens() }
- }
}
impl DocCommentIter {
diff --git a/src/tools/rust-analyzer/crates/syntax/src/lib.rs b/src/tools/rust-analyzer/crates/syntax/src/lib.rs
index 27c8a13e5..d60069804 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/lib.rs
@@ -19,7 +19,8 @@
//! [RFC]: <https://github.com/rust-lang/rfcs/pull/2256>
//! [Swift]: <https://github.com/apple/swift/blob/13d593df6f359d0cb2fc81cfaac273297c539455/lib/Syntax/README.md>
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
+#![warn(rust_2018_idioms, unused_lifetimes)]
#[allow(unused)]
macro_rules! eprintln {
@@ -74,7 +75,7 @@ pub use smol_str::SmolStr;
#[derive(Debug, PartialEq, Eq)]
pub struct Parse<T> {
green: GreenNode,
- errors: Arc<Vec<SyntaxError>>,
+ errors: Option<Arc<[SyntaxError]>>,
_ty: PhantomData<fn() -> T>,
}
@@ -86,14 +87,18 @@ impl<T> Clone for Parse<T> {
impl<T> Parse<T> {
fn new(green: GreenNode, errors: Vec<SyntaxError>) -> Parse<T> {
- Parse { green, errors: Arc::new(errors), _ty: PhantomData }
+ Parse {
+ green,
+ errors: if errors.is_empty() { None } else { Some(errors.into()) },
+ _ty: PhantomData,
+ }
}
pub fn syntax_node(&self) -> SyntaxNode {
SyntaxNode::new_root(self.green.clone())
}
pub fn errors(&self) -> &[SyntaxError] {
- &self.errors
+ self.errors.as_deref().unwrap_or_default()
}
}
@@ -106,11 +111,10 @@ impl<T: AstNode> Parse<T> {
T::cast(self.syntax_node()).unwrap()
}
- pub fn ok(self) -> Result<T, Arc<Vec<SyntaxError>>> {
- if self.errors.is_empty() {
- Ok(self.tree())
- } else {
- Err(self.errors)
+ pub fn ok(self) -> Result<T, Arc<[SyntaxError]>> {
+ match self.errors {
+ Some(e) => Err(e),
+ None => Ok(self.tree()),
}
}
}
@@ -128,7 +132,7 @@ impl Parse<SyntaxNode> {
impl Parse<SourceFile> {
pub fn debug_dump(&self) -> String {
let mut buf = format!("{:#?}", self.tree().syntax());
- for err in self.errors.iter() {
+ for err in self.errors.as_deref().into_iter().flat_map(<[_]>::iter) {
format_to!(buf, "error {:?}: {}\n", err.range(), err);
}
buf
@@ -140,13 +144,16 @@ impl Parse<SourceFile> {
fn incremental_reparse(&self, indel: &Indel) -> Option<Parse<SourceFile>> {
// FIXME: validation errors are not handled here
- parsing::incremental_reparse(self.tree().syntax(), indel, self.errors.to_vec()).map(
- |(green_node, errors, _reparsed_range)| Parse {
- green: green_node,
- errors: Arc::new(errors),
- _ty: PhantomData,
- },
+ parsing::incremental_reparse(
+ self.tree().syntax(),
+ indel,
+ self.errors.as_deref().unwrap_or_default().iter().cloned(),
)
+ .map(|(green_node, errors, _reparsed_range)| Parse {
+ green: green_node,
+ errors: if errors.is_empty() { None } else { Some(errors.into()) },
+ _ty: PhantomData,
+ })
}
fn full_reparse(&self, indel: &Indel) -> Parse<SourceFile> {
@@ -167,7 +174,11 @@ impl SourceFile {
errors.extend(validation::validate(&root));
assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
- Parse { green, errors: Arc::new(errors), _ty: PhantomData }
+ Parse {
+ green,
+ errors: if errors.is_empty() { None } else { Some(errors.into()) },
+ _ty: PhantomData,
+ }
}
}
@@ -181,29 +192,27 @@ impl ast::TokenTree {
let kind = t.kind();
if kind.is_trivia() {
was_joint = false
+ } else if kind == SyntaxKind::IDENT {
+ let token_text = t.text();
+ let contextual_kw =
+ SyntaxKind::from_contextual_keyword(token_text).unwrap_or(SyntaxKind::IDENT);
+ parser_input.push_ident(contextual_kw);
} else {
- if kind == SyntaxKind::IDENT {
- let token_text = t.text();
- let contextual_kw = SyntaxKind::from_contextual_keyword(token_text)
- .unwrap_or(SyntaxKind::IDENT);
- parser_input.push_ident(contextual_kw);
- } else {
- if was_joint {
+ if was_joint {
+ parser_input.was_joint();
+ }
+ parser_input.push(kind);
+ // Tag the token as joint if it is float with a fractional part
+ // we use this jointness to inform the parser about what token split
+ // event to emit when we encounter a float literal in a field access
+ if kind == SyntaxKind::FLOAT_NUMBER {
+ if !t.text().ends_with('.') {
parser_input.was_joint();
- }
- parser_input.push(kind);
- // Tag the token as joint if it is float with a fractional part
- // we use this jointness to inform the parser about what token split
- // event to emit when we encounter a float literal in a field access
- if kind == SyntaxKind::FLOAT_NUMBER {
- if !t.text().ends_with('.') {
- parser_input.was_joint();
- } else {
- was_joint = false;
- }
} else {
- was_joint = true;
+ was_joint = false;
}
+ } else {
+ was_joint = true;
}
}
}
@@ -276,7 +285,11 @@ impl ast::TokenTree {
let (green, errors) = builder.finish_raw();
- Parse { green, errors: Arc::new(errors), _ty: PhantomData }
+ Parse {
+ green,
+ errors: if errors.is_empty() { None } else { Some(errors.into()) },
+ _ty: PhantomData,
+ }
}
}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs b/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs
index 45e591609..0ddc64171 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs
@@ -20,7 +20,7 @@ use crate::{
pub(crate) fn incremental_reparse(
node: &SyntaxNode,
edit: &Indel,
- errors: Vec<SyntaxError>,
+ errors: impl IntoIterator<Item = SyntaxError>,
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
if let Some((green, new_errors, old_range)) = reparse_token(node, edit) {
return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
@@ -147,7 +147,7 @@ fn is_balanced(lexed: &parser::LexedStr<'_>) -> bool {
}
fn merge_errors(
- old_errors: Vec<SyntaxError>,
+ old_errors: impl IntoIterator<Item = SyntaxError>,
new_errors: Vec<SyntaxError>,
range_before_reparse: TextRange,
edit: &Indel,
@@ -191,8 +191,12 @@ mod tests {
let fully_reparsed = SourceFile::parse(&after);
let incrementally_reparsed: Parse<SourceFile> = {
let before = SourceFile::parse(&before);
- let (green, new_errors, range) =
- incremental_reparse(before.tree().syntax(), &edit, before.errors.to_vec()).unwrap();
+ let (green, new_errors, range) = incremental_reparse(
+ before.tree().syntax(),
+ &edit,
+ before.errors.as_deref().unwrap_or_default().iter().cloned(),
+ )
+ .unwrap();
assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length");
Parse::new(green, new_errors)
};
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ptr.rs b/src/tools/rust-analyzer/crates/syntax/src/ptr.rs
index 1d4a89201..8750147ee 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ptr.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ptr.rs
@@ -22,12 +22,18 @@ use crate::{syntax_node::RustLanguage, AstNode, SyntaxNode};
pub type SyntaxNodePtr = rowan::ast::SyntaxNodePtr<RustLanguage>;
/// Like `SyntaxNodePtr`, but remembers the type of node.
-#[derive(Debug)]
pub struct AstPtr<N: AstNode> {
raw: SyntaxNodePtr,
_ty: PhantomData<fn() -> N>,
}
+impl<N: AstNode + std::fmt::Debug> std::fmt::Debug for AstPtr<N> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_tuple("AstPtr").field(&self.raw).finish()
+ }
+}
+
+impl<N: AstNode> Copy for AstPtr<N> {}
impl<N: AstNode> Clone for AstPtr<N> {
fn clone(&self) -> AstPtr<N> {
AstPtr { raw: self.raw.clone(), _ty: PhantomData }
@@ -73,6 +79,10 @@ impl<N: AstNode> AstPtr<N> {
Some(AstPtr { raw: self.raw, _ty: PhantomData })
}
+ pub fn kind(&self) -> parser::SyntaxKind {
+ self.raw.kind()
+ }
+
pub fn upcast<M: AstNode>(self) -> AstPtr<M>
where
N: Into<M>,
@@ -84,6 +94,20 @@ impl<N: AstNode> AstPtr<N> {
pub fn try_from_raw(raw: SyntaxNodePtr) -> Option<AstPtr<N>> {
N::can_cast(raw.kind()).then_some(AstPtr { raw, _ty: PhantomData })
}
+
+ pub fn wrap_left<R>(self) -> AstPtr<either::Either<N, R>>
+ where
+ either::Either<N, R>: AstNode,
+ {
+ AstPtr { raw: self.raw, _ty: PhantomData }
+ }
+
+ pub fn wrap_right<L>(self) -> AstPtr<either::Either<L, N>>
+ where
+ either::Either<L, N>: AstNode,
+ {
+ AstPtr { raw: self.raw, _ty: PhantomData }
+ }
}
impl<N: AstNode> From<AstPtr<N>> for SyntaxNodePtr {
diff --git a/src/tools/rust-analyzer/crates/syntax/src/tests.rs b/src/tools/rust-analyzer/crates/syntax/src/tests.rs
index 168439053..8ae1242cf 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/tests.rs
@@ -17,11 +17,11 @@ use crate::{ast, fuzz, AstNode, SourceFile, SyntaxError};
#[test]
fn parse_smoke_test() {
- let code = r##"
+ let code = r#"
fn main() {
println!("Hello, world!")
}
- "##;
+ "#;
let parse = SourceFile::parse(code);
// eprintln!("{:#?}", parse.syntax_node());
@@ -38,7 +38,7 @@ fn benchmark_parser() {
let tree = {
let _b = bench("parsing");
let p = SourceFile::parse(&data);
- assert!(p.errors.is_empty());
+ assert!(p.errors.is_none());
assert_eq!(p.tree().syntax.text_range().len(), 352474.into());
p.tree()
};
diff --git a/src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs b/src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs
index dc6c96343..c2e921e4b 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs
@@ -622,7 +622,7 @@ fn lower_enum(grammar: &Grammar, rule: &Rule) -> Option<Vec<String>> {
}
fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, rule: &Rule) {
- if lower_seperated_list(acc, grammar, label, rule) {
+ if lower_separated_list(acc, grammar, label, rule) {
return;
}
@@ -688,7 +688,7 @@ fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, r
}
// (T (',' T)* ','?)
-fn lower_seperated_list(
+fn lower_separated_list(
acc: &mut Vec<Field>,
grammar: &Grammar,
label: Option<&String>,
diff --git a/src/tools/rust-analyzer/crates/syntax/src/token_text.rs b/src/tools/rust-analyzer/crates/syntax/src/token_text.rs
index 09c080c0c..e69deb49c 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/token_text.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/token_text.rs
@@ -13,7 +13,7 @@ pub(crate) enum Repr<'a> {
}
impl<'a> TokenText<'a> {
- pub(crate) fn borrowed(text: &'a str) -> Self {
+ pub fn borrowed(text: &'a str) -> Self {
TokenText(Repr::Borrowed(text))
}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/utils.rs b/src/tools/rust-analyzer/crates/syntax/src/utils.rs
index 25f34ea9d..a38f8b2b5 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/utils.rs
@@ -1,48 +1,8 @@
//! A set of utils methods to reuse on other abstraction levels
-use itertools::Itertools;
-
-use crate::{ast, match_ast, AstNode, SyntaxKind};
-
-pub fn path_to_string_stripping_turbo_fish(path: &ast::Path) -> String {
- path.syntax()
- .children()
- .filter_map(|node| {
- match_ast! {
- match node {
- ast::PathSegment(it) => {
- Some(it.name_ref()?.to_string())
- },
- ast::Path(it) => {
- Some(path_to_string_stripping_turbo_fish(&it))
- },
- _ => None,
- }
- }
- })
- .join("::")
-}
+use crate::SyntaxKind;
pub fn is_raw_identifier(name: &str) -> bool {
let is_keyword = SyntaxKind::from_keyword(name).is_some();
is_keyword && !matches!(name, "self" | "crate" | "super" | "Self")
}
-
-#[cfg(test)]
-mod tests {
- use super::path_to_string_stripping_turbo_fish;
- use crate::ast::make;
-
- #[test]
- fn turbofishes_are_stripped() {
- assert_eq!("Vec", path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::<i32>")),);
- assert_eq!(
- "Vec::new",
- path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::<i32>::new")),
- );
- assert_eq!(
- "Vec::new",
- path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::new()")),
- );
- }
-}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/validation.rs b/src/tools/rust-analyzer/crates/syntax/src/validation.rs
index e0ec6a242..2b1bbac08 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/validation.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/validation.rs
@@ -5,7 +5,7 @@
mod block;
use rowan::Direction;
-use rustc_lexer::unescape::{self, unescape_literal, Mode};
+use rustc_dependencies::lexer::unescape::{self, unescape_literal, Mode};
use crate::{
algo,
diff --git a/src/tools/rust-analyzer/crates/test-utils/Cargo.toml b/src/tools/rust-analyzer/crates/test-utils/Cargo.toml
index 2b5b6f495..438b599ff 100644
--- a/src/tools/rust-analyzer/crates/test-utils/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/test-utils/Cargo.toml
@@ -13,7 +13,7 @@ doctest = false
[dependencies]
# Avoid adding deps here, this crate is widely used in tests it should compile fast!
-dissimilar = "1.0.4"
+dissimilar = "1.0.7"
text-size.workspace = true
rustc-hash = "1.1.0"
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/lib.rs b/src/tools/rust-analyzer/crates/test-utils/src/lib.rs
index fd3e68e2d..e48b27313 100644
--- a/src/tools/rust-analyzer/crates/test-utils/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/test-utils/src/lib.rs
@@ -6,7 +6,7 @@
//! * Extracting markup (mainly, `$0` markers) out of fixture strings.
//! * marks (see the eponymous module).
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod assert_linear;
pub mod bench_fixture;
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
index 573f56b00..f766747d7 100644
--- a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
+++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
@@ -9,12 +9,15 @@
//!
//! Available flags:
//! add:
+//! asm:
+//! assert:
//! as_ref: sized
//! bool_impl: option, fn
//! builtin_impls:
//! cell: copy, drop
//! clone: sized
//! coerce_unsized: unsize
+//! concat:
//! copy: clone
//! default: sized
//! deref_mut: deref
@@ -44,7 +47,7 @@
//! panic: fmt
//! phantom_data:
//! pin:
-//! pointee:
+//! pointee: copy, send, sync, ord, hash, unpin
//! range:
//! result:
//! send: sized
@@ -54,6 +57,7 @@
//! sync: sized
//! transmute:
//! try: infallible
+//! unpin: sized
//! unsize: sized
#![rustc_coherence_is_core]
@@ -89,6 +93,11 @@ pub mod marker {
pub trait Unsize<T: ?Sized> {}
// endregion:unsize
+ // region:unpin
+ #[lang = "unpin"]
+ pub auto trait Unpin {}
+ // endregion:unpin
+
// region:copy
#[lang = "copy"]
pub trait Copy: Clone {}
@@ -387,9 +396,10 @@ pub mod ptr {
// region:pointee
#[lang = "pointee_trait"]
+ #[rustc_deny_explicit_impl(implement_via_object = false)]
pub trait Pointee {
#[lang = "metadata_type"]
- type Metadata;
+ type Metadata: Copy + Send + Sync + Ord + Hash + Unpin;
}
// endregion:pointee
// region:non_null
@@ -489,7 +499,7 @@ pub mod ops {
I: SliceIndex<[T]>,
{
type Output = I::Output;
- fn index(&self, index: I) -> &I::Output {
+ fn index(&self, _index: I) -> &I::Output {
loop {}
}
}
@@ -497,7 +507,7 @@ pub mod ops {
where
I: SliceIndex<[T]>,
{
- fn index_mut(&mut self, index: I) -> &mut I::Output {
+ fn index_mut(&mut self, _index: I) -> &mut I::Output {
loop {}
}
}
@@ -507,7 +517,7 @@ pub mod ops {
I: SliceIndex<[T]>,
{
type Output = I::Output;
- fn index(&self, index: I) -> &I::Output {
+ fn index(&self, _index: I) -> &I::Output {
loop {}
}
}
@@ -515,7 +525,7 @@ pub mod ops {
where
I: SliceIndex<[T]>,
{
- fn index_mut(&mut self, index: I) -> &mut I::Output {
+ fn index_mut(&mut self, _index: I) -> &mut I::Output {
loop {}
}
}
@@ -863,17 +873,17 @@ pub mod fmt {
pub struct DebugTuple;
pub struct DebugStruct;
impl Formatter<'_> {
- pub fn debug_tuple(&mut self, name: &str) -> DebugTuple {
+ pub fn debug_tuple(&mut self, _name: &str) -> DebugTuple {
DebugTuple
}
- pub fn debug_struct(&mut self, name: &str) -> DebugStruct {
+ pub fn debug_struct(&mut self, _name: &str) -> DebugStruct {
DebugStruct
}
}
impl DebugTuple {
- pub fn field(&mut self, value: &dyn Debug) -> &mut Self {
+ pub fn field(&mut self, _value: &dyn Debug) -> &mut Self {
self
}
@@ -883,7 +893,7 @@ pub mod fmt {
}
impl DebugStruct {
- pub fn field(&mut self, name: &str, value: &dyn Debug) -> &mut Self {
+ pub fn field(&mut self, _name: &str, _value: &dyn Debug) -> &mut Self {
self
}
@@ -996,7 +1006,7 @@ pub mod fmt {
($($t:ty)*) => {
$(
impl const Debug for $t {
- fn fmt(&self, f: &mut Formatter<'_>) -> Result {
+ fn fmt(&self, _f: &mut Formatter<'_>) -> Result {
Ok(())
}
}
@@ -1012,7 +1022,7 @@ pub mod fmt {
}
impl<T: Debug> Debug for [T] {
- fn fmt(&self, f: &mut Formatter<'_>) -> Result {
+ fn fmt(&self, _f: &mut Formatter<'_>) -> Result {
Ok(())
}
}
@@ -1047,6 +1057,10 @@ pub mod option {
Some(T),
}
+ // region:copy
+ impl<T: Copy> Copy for Option<T> {}
+ // endregion:copy
+
impl<T> Option<T> {
pub const fn unwrap(self) -> T {
match self {
@@ -1062,7 +1076,7 @@ pub mod option {
}
}
- pub fn and<U>(self, optb: Option<U>) -> Option<U> {
+ pub fn and<U>(self, _optb: Option<U>) -> Option<U> {
loop {}
}
pub fn unwrap_or(self, default: T) -> T {
@@ -1080,25 +1094,25 @@ pub mod option {
}
// endregion:result
// region:fn
- pub fn and_then<U, F>(self, f: F) -> Option<U>
+ pub fn and_then<U, F>(self, _f: F) -> Option<U>
where
F: FnOnce(T) -> Option<U>,
{
loop {}
}
- pub fn unwrap_or_else<F>(self, f: F) -> T
+ pub fn unwrap_or_else<F>(self, _f: F) -> T
where
F: FnOnce() -> T,
{
loop {}
}
- pub fn map_or<U, F>(self, default: U, f: F) -> U
+ pub fn map_or<U, F>(self, _default: U, _f: F) -> U
where
F: FnOnce(T) -> U,
{
loop {}
}
- pub fn map_or_else<U, D, F>(self, default: D, f: F) -> U
+ pub fn map_or_else<U, D, F>(self, _default: D, _f: F) -> U
where
D: FnOnce() -> U,
F: FnOnce(T) -> U,
@@ -1129,7 +1143,7 @@ pub mod pin {
pointer: P,
}
impl<P> Pin<P> {
- pub fn new(pointer: P) -> Pin<P> {
+ pub fn new(_pointer: P) -> Pin<P> {
loop {}
}
}
@@ -1226,7 +1240,7 @@ pub mod iter {
mod sources {
mod repeat {
- pub fn repeat<T>(elt: T) -> Repeat<T> {
+ pub fn repeat<T>(_elt: T) -> Repeat<T> {
loop {}
}
@@ -1266,7 +1280,7 @@ pub mod iter {
fn take(self, n: usize) -> crate::iter::Take<Self> {
loop {}
}
- fn filter_map<B, F>(self, f: F) -> crate::iter::FilterMap<Self, F>
+ fn filter_map<B, F>(self, _f: F) -> crate::iter::FilterMap<Self, F>
where
Self: Sized,
F: FnMut(Self::Item) -> Option<B>,
@@ -1337,7 +1351,7 @@ mod panic {
mod panicking {
#[lang = "panic_fmt"]
- pub const fn panic_fmt(fmt: crate::fmt::Arguments<'_>) -> ! {
+ pub const fn panic_fmt(_fmt: crate::fmt::Arguments<'_>) -> ! {
loop {}
}
}
@@ -1346,7 +1360,7 @@ mod panicking {
mod macros {
// region:panic
#[macro_export]
- #[rustc_builtin_macro(std_panic)]
+ #[rustc_builtin_macro(core_panic)]
macro_rules! panic {
($($arg:tt)*) => {
/* compiler built-in */
@@ -1354,6 +1368,26 @@ mod macros {
}
// endregion:panic
+ // region:asm
+ #[macro_export]
+ #[rustc_builtin_macro]
+ macro_rules! asm {
+ ($($arg:tt)*) => {
+ /* compiler built-in */
+ };
+ }
+ // endregion:asm
+
+ // region:assert
+ #[macro_export]
+ #[rustc_builtin_macro]
+ macro_rules! assert {
+ ($($arg:tt)*) => {
+ /* compiler built-in */
+ };
+ }
+ // endregion:assert
+
// region:fmt
#[macro_export]
#[rustc_builtin_macro]
@@ -1370,6 +1404,13 @@ mod macros {
}
#[macro_export]
+ #[rustc_builtin_macro]
+ macro_rules! format_args_nl {
+ ($fmt:expr) => {{ /* compiler built-in */ }};
+ ($fmt:expr, $($args:tt)*) => {{ /* compiler built-in */ }};
+ }
+
+ #[macro_export]
macro_rules! print {
($($arg:tt)*) => {{
$crate::io::_print($crate::format_args!($($arg)*));
@@ -1399,6 +1440,12 @@ mod macros {
($file:expr $(,)?) => {{ /* compiler built-in */ }};
}
// endregion:include
+
+ // region:concat
+ #[rustc_builtin_macro]
+ #[macro_export]
+ macro_rules! concat {}
+ // endregion:concat
}
// region:non_zero
diff --git a/src/tools/rust-analyzer/crates/text-edit/Cargo.toml b/src/tools/rust-analyzer/crates/text-edit/Cargo.toml
index 76d0ca5cc..4620cc72d 100644
--- a/src/tools/rust-analyzer/crates/text-edit/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/text-edit/Cargo.toml
@@ -12,5 +12,5 @@ rust-version.workspace = true
doctest = false
[dependencies]
-itertools = "0.10.5"
+itertools.workspace = true
text-size.workspace = true
diff --git a/src/tools/rust-analyzer/crates/text-edit/src/lib.rs b/src/tools/rust-analyzer/crates/text-edit/src/lib.rs
index 4705d1818..fb52a50f0 100644
--- a/src/tools/rust-analyzer/crates/text-edit/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/text-edit/src/lib.rs
@@ -4,7 +4,7 @@
//! so `TextEdit` is the ultimate representation of the work done by
//! rust-analyzer.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
use itertools::Itertools;
use std::cmp::max;
diff --git a/src/tools/rust-analyzer/crates/toolchain/src/lib.rs b/src/tools/rust-analyzer/crates/toolchain/src/lib.rs
index 729f84a81..997f339ed 100644
--- a/src/tools/rust-analyzer/crates/toolchain/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/toolchain/src/lib.rs
@@ -1,6 +1,6 @@
//! Discovery of `cargo` & `rustc` executables.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
use std::{env, iter, path::PathBuf};
diff --git a/src/tools/rust-analyzer/crates/tt/Cargo.toml b/src/tools/rust-analyzer/crates/tt/Cargo.toml
index a28ee5f1c..572224497 100644
--- a/src/tools/rust-analyzer/crates/tt/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/tt/Cargo.toml
@@ -13,5 +13,6 @@ doctest = false
[dependencies]
smol_str.workspace = true
+text-size.workspace = true
stdx.workspace = true
diff --git a/src/tools/rust-analyzer/crates/tt/src/lib.rs b/src/tools/rust-analyzer/crates/tt/src/lib.rs
index b5a72bec0..481d57540 100644
--- a/src/tools/rust-analyzer/crates/tt/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/tt/src/lib.rs
@@ -2,115 +2,98 @@
//! input and output) of macros. It closely mirrors `proc_macro` crate's
//! `TokenTree`.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
use std::fmt;
use stdx::impl_from;
pub use smol_str::SmolStr;
+pub use text_size::{TextRange, TextSize};
+
+#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
+pub struct SpanData<Anchor, Ctx> {
+ /// The text range of this span, relative to the anchor.
+ /// We need the anchor for incrementality, as storing absolute ranges will require
+ /// recomputation on every change in a file at all times.
+ pub range: TextRange,
+ pub anchor: Anchor,
+ /// The syntax context of the span.
+ pub ctx: Ctx,
+}
-/// Represents identity of the token.
-///
-/// For hygiene purposes, we need to track which expanded tokens originated from
-/// which source tokens. We do it by assigning an distinct identity to each
-/// source token and making sure that identities are preserved during macro
-/// expansion.
-#[derive(Clone, Copy, PartialEq, Eq, Hash)]
-pub struct TokenId(pub u32);
+impl<Anchor: SpanAnchor, Ctx: SyntaxContext> Span for SpanData<Anchor, Ctx> {
+ #[allow(deprecated)]
+ const DUMMY: Self = SpanData {
+ range: TextRange::empty(TextSize::new(0)),
+ anchor: Anchor::DUMMY,
+ ctx: Ctx::DUMMY,
+ };
+}
-impl fmt::Debug for TokenId {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- self.0.fmt(f)
- }
+pub trait Span: std::fmt::Debug + Copy + Sized + Eq {
+ // FIXME: Should not exist. Dummy spans will always be wrong if they leak somewhere. Instead,
+ // the call site or def site spans should be used in relevant places, its just that we don't
+ // expose those everywhere in the yet.
+ const DUMMY: Self;
}
-impl TokenId {
- pub const UNSPECIFIED: TokenId = TokenId(!0);
- pub const fn unspecified() -> TokenId {
- Self::UNSPECIFIED
- }
+// FIXME: Should not exist
+pub trait SpanAnchor:
+ std::fmt::Debug + Copy + Sized + Eq + Copy + fmt::Debug + std::hash::Hash
+{
+ #[deprecated(note = "this should not exist")]
+ const DUMMY: Self;
}
-pub mod token_id {
- pub use crate::{DelimiterKind, Spacing, TokenId};
- pub type Span = crate::TokenId;
- pub type Subtree = crate::Subtree<Span>;
- pub type Punct = crate::Punct<Span>;
- pub type Delimiter = crate::Delimiter<Span>;
- pub type Leaf = crate::Leaf<Span>;
- pub type Ident = crate::Ident<Span>;
- pub type Literal = crate::Literal<Span>;
- pub type TokenTree = crate::TokenTree<Span>;
- pub mod buffer {
- pub type TokenBuffer<'a> = crate::buffer::TokenBuffer<'a, super::Span>;
- pub type Cursor<'a> = crate::buffer::Cursor<'a, super::Span>;
- pub type TokenTreeRef<'a> = crate::buffer::TokenTreeRef<'a, super::Span>;
- }
+// FIXME: Should not exist
+pub trait SyntaxContext: std::fmt::Debug + Copy + Sized + Eq {
+ #[deprecated(note = "this should not exist")]
+ const DUMMY: Self;
+}
- impl Delimiter {
- pub const UNSPECIFIED: Self = Self {
- open: TokenId::UNSPECIFIED,
- close: TokenId::UNSPECIFIED,
- kind: DelimiterKind::Invisible,
- };
- pub const fn unspecified() -> Self {
- Self::UNSPECIFIED
- }
- }
- impl Subtree {
- pub const fn empty() -> Self {
- Subtree { delimiter: Delimiter::unspecified(), token_trees: vec![] }
- }
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum TokenTree<S> {
+ Leaf(Leaf<S>),
+ Subtree(Subtree<S>),
+}
+impl_from!(Leaf<S>, Subtree<S> for TokenTree);
+impl<S: Span> TokenTree<S> {
+ pub const fn empty(span: S) -> Self {
+ Self::Subtree(Subtree {
+ delimiter: Delimiter::invisible_spanned(span),
+ token_trees: vec![],
+ })
}
- impl TokenTree {
- pub const fn empty() -> Self {
- Self::Subtree(Subtree::empty())
+
+ pub fn subtree_or_wrap(self) -> Subtree<S> {
+ match self {
+ TokenTree::Leaf(_) => {
+ Subtree { delimiter: Delimiter::DUMMY_INVISIBLE, token_trees: vec![self] }
+ }
+ TokenTree::Subtree(s) => s,
}
}
-
- impl Subtree {
- pub fn visit_ids(&mut self, f: &mut impl FnMut(TokenId) -> TokenId) {
- self.delimiter.open = f(self.delimiter.open);
- self.delimiter.close = f(self.delimiter.close);
- self.token_trees.iter_mut().for_each(|tt| match tt {
- crate::TokenTree::Leaf(leaf) => match leaf {
- crate::Leaf::Literal(it) => it.span = f(it.span),
- crate::Leaf::Punct(it) => it.span = f(it.span),
- crate::Leaf::Ident(it) => it.span = f(it.span),
- },
- crate::TokenTree::Subtree(s) => s.visit_ids(f),
- })
+ pub fn subtree_or_wrap2(self, span: DelimSpan<S>) -> Subtree<S> {
+ match self {
+ TokenTree::Leaf(_) => Subtree {
+ delimiter: Delimiter::invisible_delim_spanned(span),
+ token_trees: vec![self],
+ },
+ TokenTree::Subtree(s) => s,
}
}
}
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct SyntaxContext(pub u32);
-
-// #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-// pub struct Span {
-// pub id: TokenId,
-// pub ctx: SyntaxContext,
-// }
-// pub type Span = (TokenId, SyntaxContext);
-
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum TokenTree<Span> {
- Leaf(Leaf<Span>),
- Subtree(Subtree<Span>),
+pub enum Leaf<S> {
+ Literal(Literal<S>),
+ Punct(Punct<S>),
+ Ident(Ident<S>),
}
-impl_from!(Leaf<Span>, Subtree<Span> for TokenTree);
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum Leaf<Span> {
- Literal(Literal<Span>),
- Punct(Punct<Span>),
- Ident(Ident<Span>),
-}
-
-impl<Span> Leaf<Span> {
- pub fn span(&self) -> &Span {
+impl<S> Leaf<S> {
+ pub fn span(&self) -> &S {
match self {
Leaf::Literal(it) => &it.span,
Leaf::Punct(it) => &it.span,
@@ -118,21 +101,74 @@ impl<Span> Leaf<Span> {
}
}
}
-impl_from!(Literal<Span>, Punct<Span>, Ident<Span> for Leaf);
+impl_from!(Literal<S>, Punct<S>, Ident<S> for Leaf);
#[derive(Clone, PartialEq, Eq, Hash)]
-pub struct Subtree<Span> {
- pub delimiter: Delimiter<Span>,
- pub token_trees: Vec<TokenTree<Span>>,
+pub struct Subtree<S> {
+ pub delimiter: Delimiter<S>,
+ pub token_trees: Vec<TokenTree<S>>,
+}
+
+impl<S: Span> Subtree<S> {
+ pub const fn empty(span: DelimSpan<S>) -> Self {
+ Subtree { delimiter: Delimiter::invisible_delim_spanned(span), token_trees: vec![] }
+ }
+
+ pub fn visit_ids(&mut self, f: &mut impl FnMut(S) -> S) {
+ self.delimiter.open = f(self.delimiter.open);
+ self.delimiter.close = f(self.delimiter.close);
+ self.token_trees.iter_mut().for_each(|tt| match tt {
+ crate::TokenTree::Leaf(leaf) => match leaf {
+ crate::Leaf::Literal(it) => it.span = f(it.span),
+ crate::Leaf::Punct(it) => it.span = f(it.span),
+ crate::Leaf::Ident(it) => it.span = f(it.span),
+ },
+ crate::TokenTree::Subtree(s) => s.visit_ids(f),
+ })
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct DelimSpan<S> {
+ pub open: S,
+ pub close: S,
+}
+
+impl<S: Span> DelimSpan<S> {
+ // FIXME should not exist
+ pub const DUMMY: Self = Self { open: S::DUMMY, close: S::DUMMY };
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
-pub struct Delimiter<Span> {
- pub open: Span,
- pub close: Span,
+pub struct Delimiter<S> {
+ pub open: S,
+ pub close: S,
pub kind: DelimiterKind,
}
+impl<S: Span> Delimiter<S> {
+ // FIXME should not exist
+ pub const DUMMY_INVISIBLE: Self =
+ Self { open: S::DUMMY, close: S::DUMMY, kind: DelimiterKind::Invisible };
+
+ // FIXME should not exist
+ pub const fn dummy_invisible() -> Self {
+ Self::DUMMY_INVISIBLE
+ }
+
+ pub const fn invisible_spanned(span: S) -> Self {
+ Delimiter { open: span, close: span, kind: DelimiterKind::Invisible }
+ }
+
+ pub const fn invisible_delim_spanned(span: DelimSpan<S>) -> Self {
+ Delimiter { open: span.open, close: span.close, kind: DelimiterKind::Invisible }
+ }
+
+ pub fn delim_span(&self) -> DelimSpan<S> {
+ DelimSpan { open: self.open, close: self.close }
+ }
+}
+
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum DelimiterKind {
Parenthesis,
@@ -142,16 +178,16 @@ pub enum DelimiterKind {
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct Literal<Span> {
+pub struct Literal<S> {
pub text: SmolStr,
- pub span: Span,
+ pub span: S,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct Punct<Span> {
+pub struct Punct<S> {
pub char: char,
pub spacing: Spacing,
- pub span: Span,
+ pub span: S,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -162,9 +198,9 @@ pub enum Spacing {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
/// Identifier or keyword. Unlike rustc, we keep "r#" prefix when it represents a raw identifier.
-pub struct Ident<Span> {
+pub struct Ident<S> {
pub text: SmolStr,
- pub span: Span,
+ pub span: S,
}
impl<S> Ident<S> {
@@ -173,9 +209,9 @@ impl<S> Ident<S> {
}
}
-fn print_debug_subtree<Span: fmt::Debug>(
+fn print_debug_subtree<S: fmt::Debug>(
f: &mut fmt::Formatter<'_>,
- subtree: &Subtree<Span>,
+ subtree: &Subtree<S>,
level: usize,
) -> fmt::Result {
let align = " ".repeat(level);
@@ -203,9 +239,9 @@ fn print_debug_subtree<Span: fmt::Debug>(
Ok(())
}
-fn print_debug_token<Span: fmt::Debug>(
+fn print_debug_token<S: fmt::Debug>(
f: &mut fmt::Formatter<'_>,
- tkn: &TokenTree<Span>,
+ tkn: &TokenTree<S>,
level: usize,
) -> fmt::Result {
let align = " ".repeat(level);
@@ -231,13 +267,13 @@ fn print_debug_token<Span: fmt::Debug>(
Ok(())
}
-impl<Span: fmt::Debug> fmt::Debug for Subtree<Span> {
+impl<S: fmt::Debug> fmt::Debug for Subtree<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
print_debug_subtree(f, self, 0)
}
}
-impl<Span> fmt::Display for TokenTree<Span> {
+impl<S> fmt::Display for TokenTree<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
TokenTree::Leaf(it) => fmt::Display::fmt(it, f),
@@ -246,7 +282,7 @@ impl<Span> fmt::Display for TokenTree<Span> {
}
}
-impl<Span> fmt::Display for Subtree<Span> {
+impl<S> fmt::Display for Subtree<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let (l, r) = match self.delimiter.kind {
DelimiterKind::Parenthesis => ("(", ")"),
@@ -274,7 +310,7 @@ impl<Span> fmt::Display for Subtree<Span> {
}
}
-impl<Span> fmt::Display for Leaf<Span> {
+impl<S> fmt::Display for Leaf<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Leaf::Ident(it) => fmt::Display::fmt(it, f),
@@ -284,25 +320,25 @@ impl<Span> fmt::Display for Leaf<Span> {
}
}
-impl<Span> fmt::Display for Ident<Span> {
+impl<S> fmt::Display for Ident<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.text, f)
}
}
-impl<Span> fmt::Display for Literal<Span> {
+impl<S> fmt::Display for Literal<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.text, f)
}
}
-impl<Span> fmt::Display for Punct<Span> {
+impl<S> fmt::Display for Punct<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.char, f)
}
}
-impl<Span> Subtree<Span> {
+impl<S> Subtree<S> {
/// Count the number of tokens recursively
pub fn count(&self) -> usize {
let children_count = self
@@ -318,7 +354,7 @@ impl<Span> Subtree<Span> {
}
}
-impl<Span> Subtree<Span> {
+impl<S> Subtree<S> {
/// A simple line string used for debugging
pub fn as_debug_string(&self) -> String {
let delim = match self.delimiter.kind {
@@ -366,8 +402,8 @@ impl<Span> Subtree<Span> {
pub mod buffer;
-pub fn pretty<Span>(tkns: &[TokenTree<Span>]) -> String {
- fn tokentree_to_text<Span>(tkn: &TokenTree<Span>) -> String {
+pub fn pretty<S>(tkns: &[TokenTree<S>]) -> String {
+ fn tokentree_to_text<S>(tkn: &TokenTree<S>) -> String {
match tkn {
TokenTree::Leaf(Leaf::Ident(ident)) => ident.text.clone().into(),
TokenTree::Leaf(Leaf::Literal(literal)) => literal.text.clone().into(),
diff --git a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
index 11055f028..fe6cb0a2c 100644
--- a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
@@ -12,10 +12,9 @@ rust-version.workspace = true
doctest = false
[dependencies]
-tracing = "0.1.35"
+tracing.workspace = true
walkdir = "2.3.2"
crossbeam-channel = "0.5.5"
-# We demand 5.1.0 as any higher version pulls in a new windows-sys dupe
notify = "6.1.1"
stdx.workspace = true
diff --git a/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs b/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs
index abfc51dfe..030650437 100644
--- a/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs
@@ -7,7 +7,7 @@
//! Hopefully, one day a reliable file watching/walking crate appears on
//! crates.io, and we can reduce this to trivial glue code.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
use std::fs;
diff --git a/src/tools/rust-analyzer/crates/vfs/Cargo.toml b/src/tools/rust-analyzer/crates/vfs/Cargo.toml
index c35785cf9..11409f2eb 100644
--- a/src/tools/rust-analyzer/crates/vfs/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/vfs/Cargo.toml
@@ -14,7 +14,7 @@ doctest = false
[dependencies]
rustc-hash = "1.1.0"
fst = "0.4.7"
-indexmap = "2.0.0"
+indexmap.workspace = true
nohash-hasher.workspace = true
paths.workspace = true
diff --git a/src/tools/rust-analyzer/crates/vfs/src/lib.rs b/src/tools/rust-analyzer/crates/vfs/src/lib.rs
index 06004adad..ef5b10ee9 100644
--- a/src/tools/rust-analyzer/crates/vfs/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/vfs/src/lib.rs
@@ -38,7 +38,7 @@
//! [`Handle`]: loader::Handle
//! [`Entries`]: loader::Entry
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod anchored_path;
pub mod file_set;
@@ -60,7 +60,26 @@ pub use paths::{AbsPath, AbsPathBuf};
///
/// Most functions in rust-analyzer use this when they need to refer to a file.
#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
-pub struct FileId(pub u32);
+pub struct FileId(u32);
+// pub struct FileId(NonMaxU32);
+
+impl FileId {
+ /// Think twice about using this outside of tests. If this ends up in a wrong place it will cause panics!
+ // FIXME: To be removed once we get rid of all `SpanData::DUMMY` usages.
+ pub const BOGUS: FileId = FileId(0xe4e4e);
+ pub const MAX_FILE_ID: u32 = 0x7fff_ffff;
+
+ #[inline]
+ pub const fn from_raw(raw: u32) -> FileId {
+ assert!(raw <= Self::MAX_FILE_ID);
+ FileId(raw)
+ }
+
+ #[inline]
+ pub fn index(self) -> u32 {
+ self.0
+ }
+}
/// safe because `FileId` is a newtype of `u32`
impl nohash_hasher::IsEnabled for FileId {}
diff --git a/src/tools/rust-analyzer/docs/dev/guide.md b/src/tools/rust-analyzer/docs/dev/guide.md
index 56a68ef04..a5f1811bf 100644
--- a/src/tools/rust-analyzer/docs/dev/guide.md
+++ b/src/tools/rust-analyzer/docs/dev/guide.md
@@ -272,7 +272,7 @@ several times, with different sets of `cfg`s enabled. The IDE-specific task of
mapping source code into a semantic model is inherently imprecise for
this reason and gets handled by the [`source_binder`].
-[`source_binder`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/source_binder.rs
+[`source_binder`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/source_binder.rs
The semantic interface is declared in the [`code_model_api`] module. Each entity is
identified by an integer ID and has a bunch of methods which take a salsa database
@@ -280,8 +280,8 @@ as an argument and returns other entities (which are also IDs). Internally, thes
methods invoke various queries on the database to build the model on demand.
Here's [the list of queries].
-[`code_model_api`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/code_model_api.rs
-[the list of queries]: https://github.com/rust-lang/rust-analyzer/blob/7e84440e25e19529e4ff8a66e521d1b06349c6ec/crates/hir/src/db.rs#L20-L106
+[`code_model_api`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/code_model_api.rs
+[the list of queries]: https://github.com/rust-lang/rust-analyzer/blob/7e84440e25e19529e4ff8a66e521d1b06349c6ec/crates/ra_hir/src/db.rs#L20-L106
The first step of building the model is parsing the source code.
diff --git a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md
index 0801e988f..b66c9c943 100644
--- a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md
+++ b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md
@@ -57,8 +57,9 @@ export interface TextDocumentEdit {
}
```
-When applying such code action or text edit, the editor should insert snippet, with tab stops and placeholder.
-At the moment, rust-analyzer guarantees that only a single edit will have `InsertTextFormat.Snippet`.
+When applying such code action or text edit, the editor should insert snippet, with tab stops and placeholders.
+At the moment, rust-analyzer guarantees that only a single `TextDocumentEdit` will have edits which can be `InsertTextFormat.Snippet`.
+Any additional `TextDocumentEdit`s will only have edits which are `InsertTextFormat.PlainText`.
### Example
diff --git a/src/tools/rust-analyzer/docs/user/generated_config.adoc b/src/tools/rust-analyzer/docs/user/generated_config.adoc
index 71feed0f7..8a2d08084 100644
--- a/src/tools/rust-analyzer/docs/user/generated_config.adoc
+++ b/src/tools/rust-analyzer/docs/user/generated_config.adoc
@@ -57,6 +57,12 @@ build procedural macros. The command is required to output json
and should therefore include `--message-format=json` or a similar
option.
+If there are multiple linked projects/workspaces, this command is invoked for
+each of them, with the working directory being the workspace root
+(i.e., the folder containing the `Cargo.toml`). This can be overwritten
+by changing `#rust-analyzer.cargo.buildScripts.invocationStrategy#` and
+`#rust-analyzer.cargo.buildScripts.invocationLocation#`.
+
By default, a cargo invocation will be constructed for the configured
targets and features, with the following base command line:
@@ -206,9 +212,11 @@ If you're changing this because you're using some tool wrapping
Cargo, you might also want to change
`#rust-analyzer.cargo.buildScripts.overrideCommand#`.
-If there are multiple linked projects, this command is invoked for
-each of them, with the working directory being the project root
-(i.e., the folder containing the `Cargo.toml`).
+If there are multiple linked projects/workspaces, this command is invoked for
+each of them, with the working directory being the workspace root
+(i.e., the folder containing the `Cargo.toml`). This can be overwritten
+by changing `#rust-analyzer.cargo.check.invocationStrategy#` and
+`#rust-analyzer.cargo.check.invocationLocation#`.
An example command would be:
@@ -244,6 +252,11 @@ with `self` prefixed to them when inside a method.
--
Whether to add parenthesis and argument snippets when completing function.
--
+[[rust-analyzer.completion.fullFunctionSignatures.enable]]rust-analyzer.completion.fullFunctionSignatures.enable (default: `false`)::
++
+--
+Whether to show full function/method signatures in completion docs.
+--
[[rust-analyzer.completion.limit]]rust-analyzer.completion.limit (default: `null`)::
+
--
@@ -480,11 +493,16 @@ Group inserted imports by the https://rust-analyzer.github.io/manual.html#auto-i
--
Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
--
-[[rust-analyzer.imports.prefer.no.std]]rust-analyzer.imports.prefer.no.std (default: `false`)::
+[[rust-analyzer.imports.preferNoStd]]rust-analyzer.imports.preferNoStd (default: `false`)::
+
--
Prefer to unconditionally use imports of the core and alloc crate, over the std crate.
--
+[[rust-analyzer.imports.preferPrelude]]rust-analyzer.imports.preferPrelude (default: `false`)::
++
+--
+Whether to prefer import paths containing a `prelude` module.
+--
[[rust-analyzer.imports.prefix]]rust-analyzer.imports.prefix (default: `"plain"`)::
+
--
@@ -546,6 +564,11 @@ Whether to hide inlay hints for type adjustments outside of `unsafe` blocks.
--
Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc).
--
+[[rust-analyzer.inlayHints.implicitDrops.enable]]rust-analyzer.inlayHints.implicitDrops.enable (default: `false`)::
++
+--
+Whether to show implicit drop hints.
+--
[[rust-analyzer.inlayHints.lifetimeElisionHints.enable]]rust-analyzer.inlayHints.lifetimeElisionHints.enable (default: `"never"`)::
+
--
@@ -744,6 +767,16 @@ Command to be executed instead of 'cargo' for runnables.
Additional arguments to be passed to cargo for runnables such as
tests or binaries. For example, it may be `--release`.
--
+[[rust-analyzer.rust.analyzerTargetDir]]rust-analyzer.rust.analyzerTargetDir (default: `null`)::
++
+--
+Optional path to a rust-analyzer specific target directory.
+This prevents rust-analyzer's `cargo check` from locking the `Cargo.lock`
+at the expense of duplicating build artifacts.
+
+Set to `true` to use a subdirectory of the existing target directory or
+set to a path relative to the workspace to use that path.
+--
[[rust-analyzer.rustc.source]]rust-analyzer.rustc.source (default: `null`)::
+
--
diff --git a/src/tools/rust-analyzer/docs/user/manual.adoc b/src/tools/rust-analyzer/docs/user/manual.adoc
index 5dafd1a4c..9fc19a7d0 100644
--- a/src/tools/rust-analyzer/docs/user/manual.adoc
+++ b/src/tools/rust-analyzer/docs/user/manual.adoc
@@ -174,7 +174,7 @@ $ rustup component add rust-analyzer
The `rust-analyzer` binary can be installed from the repos or AUR (Arch User Repository):
-- https://www.archlinux.org/packages/community/x86_64/rust-analyzer/[`rust-analyzer`] (built from latest tagged source)
+- https://www.archlinux.org/packages/extra/x86_64/rust-analyzer/[`rust-analyzer`] (built from latest tagged source)
- https://aur.archlinux.org/packages/rust-analyzer-git[`rust-analyzer-git`] (latest Git version)
Install it with pacman, for example:
@@ -237,7 +237,7 @@ To use `rust-analyzer`, you need to install and enable one of the two popular LS
==== Eglot
-Eglot is the more minimalistic and lightweight LSP client for Emacs, integrates well with existing Emacs functionality and will be built into Emacs starting from release 29.
+Eglot is the more minimalistic and lightweight LSP client for Emacs, integrates well with existing Emacs functionality and is built into Emacs starting from release 29.
After installing Eglot, e.g. via `M-x package-install` (not needed from Emacs 29), you can enable it via the `M-x eglot` command or load it automatically in `rust-mode` via
@@ -246,6 +246,15 @@ After installing Eglot, e.g. via `M-x package-install` (not needed from Emacs 29
(add-hook 'rust-mode-hook 'eglot-ensure)
----
+To enable clippy, you will need to configure the initialization options to pass the `check.command` setting.
+
+[source,emacs-lisp]
+----
+(add-to-list 'eglot-server-programs
+ '((rust-ts-mode rust-mode) .
+ ("rust-analyzer" :initializationOptions (:check (:command "clippy")))))
+----
+
For more detailed instructions and options see the https://joaotavora.github.io/eglot[Eglot manual] (also available from Emacs via `M-x info`) and the
https://github.com/joaotavora/eglot/blob/master/README.md[Eglot readme].
@@ -555,6 +564,11 @@ There is a package named `ra_ap_rust_analyzer` available on https://crates.io/cr
For more details, see https://github.com/rust-lang/rust-analyzer/blob/master/.github/workflows/publish.yml[the publish workflow].
+=== Zed
+
+https://zed.dev[Zed] has native `rust-analyzer` support.
+If the LSP binary is not available, Zed can install it when opening a Rust file.
+
== Troubleshooting
Start with looking at the rust-analyzer version.
@@ -932,7 +946,7 @@ Or it is possible to specify vars more granularly:
"rust-analyzer.runnables.extraEnv": [
{
// "mask": null, // null mask means that this rule will be applied for all runnables
- env: {
+ "env": {
"APP_ID": "1",
"APP_DATA": "asdf"
}
@@ -954,7 +968,7 @@ If needed, you can set different values for different platforms:
"rust-analyzer.runnables.extraEnv": [
{
"platform": "win32", // windows only
- env: {
+ "env": {
"APP_DATA": "windows specific data"
}
},
diff --git a/src/tools/rust-analyzer/lib/la-arena/src/lib.rs b/src/tools/rust-analyzer/lib/la-arena/src/lib.rs
index f39c3a3e4..d195bdd15 100644
--- a/src/tools/rust-analyzer/lib/la-arena/src/lib.rs
+++ b/src/tools/rust-analyzer/lib/la-arena/src/lib.rs
@@ -1,6 +1,6 @@
//! Yet another index-based arena.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
#![warn(missing_docs)]
use std::{
diff --git a/src/tools/rust-analyzer/lib/line-index/Cargo.toml b/src/tools/rust-analyzer/lib/line-index/Cargo.toml
index 6c0d06f47..494a7fa97 100644
--- a/src/tools/rust-analyzer/lib/line-index/Cargo.toml
+++ b/src/tools/rust-analyzer/lib/line-index/Cargo.toml
@@ -1,11 +1,11 @@
[package]
name = "line-index"
-version = "0.1.0"
+version = "0.1.1"
description = "Maps flat `TextSize` offsets to/from `(line, column)` representation."
license = "MIT OR Apache-2.0"
repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/line-index"
edition = "2021"
[dependencies]
-text-size = "1.1.0"
+text-size = "1.1.1"
nohash-hasher = "0.2.0"
diff --git a/src/tools/rust-analyzer/lib/line-index/src/lib.rs b/src/tools/rust-analyzer/lib/line-index/src/lib.rs
index 03371c9c8..58f266d67 100644
--- a/src/tools/rust-analyzer/lib/line-index/src/lib.rs
+++ b/src/tools/rust-analyzer/lib/line-index/src/lib.rs
@@ -363,7 +363,10 @@ fn analyze_source_file_generic(
let c = src[i..].chars().next().unwrap();
char_len = c.len_utf8();
- let pos = TextSize::from(i as u32) + output_offset;
+ // The last element of `lines` represents the offset of the start of
+ // current line. To get the offset inside the line, we subtract it.
+ let pos = TextSize::from(i as u32) + output_offset
+ - lines.last().unwrap_or(&TextSize::default());
if char_len > 1 {
assert!((2..=4).contains(&char_len));
diff --git a/src/tools/rust-analyzer/lib/line-index/src/tests.rs b/src/tools/rust-analyzer/lib/line-index/src/tests.rs
index 8f3762d19..981008e34 100644
--- a/src/tools/rust-analyzer/lib/line-index/src/tests.rs
+++ b/src/tools/rust-analyzer/lib/line-index/src/tests.rs
@@ -1,4 +1,4 @@
-use crate::{LineIndex, TextSize, WideChar};
+use crate::{LineCol, LineIndex, TextSize, WideChar, WideEncoding, WideLineCol};
macro_rules! test {
(
@@ -102,7 +102,7 @@ test!(
case: multi_byte_with_new_lines,
text: "01\t345\n789abcΔf01234567\u{07}9\nbcΔf",
lines: vec![7, 27],
- multi_byte_chars: vec![(1, (13, 15)), (2, (29, 31))],
+ multi_byte_chars: vec![(1, (6, 8)), (2, (2, 4))],
);
test!(
@@ -118,3 +118,27 @@ test!(
lines: vec![16],
multi_byte_chars: vec![],
);
+
+#[test]
+fn test_try_line_col() {
+ let text = "\n\n\n\n\n宽3456";
+ assert_eq!(&text[5..8], "宽");
+ assert_eq!(&text[11..12], "6");
+ let line_index = LineIndex::new(text);
+ let before_6 = TextSize::from(11);
+ let line_col = line_index.try_line_col(before_6);
+ assert_eq!(line_col, Some(LineCol { line: 5, col: 6 }));
+}
+
+#[test]
+fn test_to_wide() {
+ let text = "\n\n\n\n\n宽3456";
+ assert_eq!(&text[5..8], "宽");
+ assert_eq!(&text[11..12], "6");
+ let line_index = LineIndex::new(text);
+ let before_6 = TextSize::from(11);
+ let line_col = line_index.try_line_col(before_6);
+ assert_eq!(line_col, Some(LineCol { line: 5, col: 6 }));
+ let wide_line_col = line_index.to_wide(WideEncoding::Utf16, line_col.unwrap());
+ assert_eq!(wide_line_col, Some(WideLineCol { line: 5, col: 4 }));
+}
diff --git a/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml b/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml
index 7ec3247e9..2a70aedbe 100644
--- a/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml
+++ b/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "lsp-server"
-version = "0.7.4"
+version = "0.7.5"
description = "Generic LSP server scaffold."
license = "MIT OR Apache-2.0"
repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/lsp-server"
@@ -8,9 +8,10 @@ edition = "2021"
[dependencies]
log = "0.4.17"
-serde_json = "1.0.96"
-serde = { version = "1.0.156", features = ["derive"] }
+serde_json = "1.0.108"
+serde = { version = "1.0.192", features = ["derive"] }
crossbeam-channel = "0.5.6"
[dev-dependencies]
lsp-types = "=0.94"
+ctrlc = "3.4.1"
diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/lib.rs b/src/tools/rust-analyzer/lib/lsp-server/src/lib.rs
index affab60a2..2797a6b60 100644
--- a/src/tools/rust-analyzer/lib/lsp-server/src/lib.rs
+++ b/src/tools/rust-analyzer/lib/lsp-server/src/lib.rs
@@ -4,7 +4,7 @@
//!
//! Run with `RUST_LOG=lsp_server=debug` to see all the messages.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod msg;
mod stdio;
@@ -17,7 +17,7 @@ use std::{
net::{TcpListener, TcpStream, ToSocketAddrs},
};
-use crossbeam_channel::{Receiver, Sender};
+use crossbeam_channel::{Receiver, RecvTimeoutError, Sender};
pub use crate::{
error::{ExtractError, ProtocolError},
@@ -113,11 +113,62 @@ impl Connection {
/// }
/// ```
pub fn initialize_start(&self) -> Result<(RequestId, serde_json::Value), ProtocolError> {
- loop {
- break match self.receiver.recv() {
- Ok(Message::Request(req)) if req.is_initialize() => Ok((req.id, req.params)),
+ self.initialize_start_while(|| true)
+ }
+
+ /// Starts the initialization process by waiting for an initialize as described in
+ /// [`Self::initialize_start`] as long as `running` returns
+ /// `true` while the return value can be changed through a sig handler such as `CTRL + C`.
+ ///
+ /// # Example
+ ///
+ /// ```rust
+ /// use std::sync::atomic::{AtomicBool, Ordering};
+ /// use std::sync::Arc;
+ /// # use std::error::Error;
+ /// # use lsp_types::{ClientCapabilities, InitializeParams, ServerCapabilities};
+ /// # use lsp_server::{Connection, Message, Request, RequestId, Response};
+ /// # fn main() -> Result<(), Box<dyn Error + Sync + Send>> {
+ /// let running = Arc::new(AtomicBool::new(true));
+ /// # running.store(true, Ordering::SeqCst);
+ /// let r = running.clone();
+ ///
+ /// ctrlc::set_handler(move || {
+ /// r.store(false, Ordering::SeqCst);
+ /// }).expect("Error setting Ctrl-C handler");
+ ///
+ /// let (connection, io_threads) = Connection::stdio();
+ ///
+ /// let res = connection.initialize_start_while(|| running.load(Ordering::SeqCst));
+ /// # assert!(res.is_err());
+ ///
+ /// # Ok(())
+ /// # }
+ /// ```
+ pub fn initialize_start_while<C>(
+ &self,
+ running: C,
+ ) -> Result<(RequestId, serde_json::Value), ProtocolError>
+ where
+ C: Fn() -> bool,
+ {
+ while running() {
+ let msg = match self.receiver.recv_timeout(std::time::Duration::from_secs(1)) {
+ Ok(msg) => msg,
+ Err(RecvTimeoutError::Timeout) => {
+ continue;
+ }
+ Err(e) => {
+ return Err(ProtocolError(format!(
+ "expected initialize request, got error: {e}"
+ )))
+ }
+ };
+
+ match msg {
+ Message::Request(req) if req.is_initialize() => return Ok((req.id, req.params)),
// Respond to non-initialize requests with ServerNotInitialized
- Ok(Message::Request(req)) => {
+ Message::Request(req) => {
let resp = Response::new_err(
req.id.clone(),
ErrorCode::ServerNotInitialized as i32,
@@ -126,15 +177,18 @@ impl Connection {
self.sender.send(resp.into()).unwrap();
continue;
}
- Ok(Message::Notification(n)) if !n.is_exit() => {
+ Message::Notification(n) if !n.is_exit() => {
continue;
}
- Ok(msg) => Err(ProtocolError(format!("expected initialize request, got {msg:?}"))),
- Err(e) => {
- Err(ProtocolError(format!("expected initialize request, got error: {e}")))
+ msg => {
+ return Err(ProtocolError(format!("expected initialize request, got {msg:?}")));
}
};
}
+
+ return Err(ProtocolError(String::from(
+ "Initialization has been aborted during initialization",
+ )));
}
/// Finishes the initialization process by sending an `InitializeResult` to the client
@@ -156,6 +210,51 @@ impl Connection {
}
}
+ /// Finishes the initialization process as described in [`Self::initialize_finish`] as
+ /// long as `running` returns `true` while the return value can be changed through a sig
+ /// handler such as `CTRL + C`.
+ pub fn initialize_finish_while<C>(
+ &self,
+ initialize_id: RequestId,
+ initialize_result: serde_json::Value,
+ running: C,
+ ) -> Result<(), ProtocolError>
+ where
+ C: Fn() -> bool,
+ {
+ let resp = Response::new_ok(initialize_id, initialize_result);
+ self.sender.send(resp.into()).unwrap();
+
+ while running() {
+ let msg = match self.receiver.recv_timeout(std::time::Duration::from_secs(1)) {
+ Ok(msg) => msg,
+ Err(RecvTimeoutError::Timeout) => {
+ continue;
+ }
+ Err(e) => {
+ return Err(ProtocolError(format!(
+ "expected initialized notification, got error: {e}",
+ )));
+ }
+ };
+
+ match msg {
+ Message::Notification(n) if n.is_initialized() => {
+ return Ok(());
+ }
+ msg => {
+ return Err(ProtocolError(format!(
+ r#"expected initialized notification, got: {msg:?}"#
+ )));
+ }
+ }
+ }
+
+ return Err(ProtocolError(String::from(
+ "Initialization has been aborted during initialization",
+ )));
+ }
+
/// Initialize the connection. Sends the server capabilities
/// to the client and returns the serialized client capabilities
/// on success. If more fine-grained initialization is required use
@@ -198,6 +297,58 @@ impl Connection {
Ok(params)
}
+ /// Initialize the connection as described in [`Self::initialize`] as long as `running` returns
+ /// `true` while the return value can be changed through a sig handler such as `CTRL + C`.
+ ///
+ /// # Example
+ ///
+ /// ```rust
+ /// use std::sync::atomic::{AtomicBool, Ordering};
+ /// use std::sync::Arc;
+ /// # use std::error::Error;
+ /// # use lsp_types::ServerCapabilities;
+ /// # use lsp_server::{Connection, Message, Request, RequestId, Response};
+ ///
+ /// # fn main() -> Result<(), Box<dyn Error + Sync + Send>> {
+ /// let running = Arc::new(AtomicBool::new(true));
+ /// # running.store(true, Ordering::SeqCst);
+ /// let r = running.clone();
+ ///
+ /// ctrlc::set_handler(move || {
+ /// r.store(false, Ordering::SeqCst);
+ /// }).expect("Error setting Ctrl-C handler");
+ ///
+ /// let (connection, io_threads) = Connection::stdio();
+ ///
+ /// let server_capabilities = serde_json::to_value(&ServerCapabilities::default()).unwrap();
+ /// let initialization_params = connection.initialize_while(
+ /// server_capabilities,
+ /// || running.load(Ordering::SeqCst)
+ /// );
+ ///
+ /// # assert!(initialization_params.is_err());
+ /// # Ok(())
+ /// # }
+ /// ```
+ pub fn initialize_while<C>(
+ &self,
+ server_capabilities: serde_json::Value,
+ running: C,
+ ) -> Result<serde_json::Value, ProtocolError>
+ where
+ C: Fn() -> bool,
+ {
+ let (id, params) = self.initialize_start_while(&running)?;
+
+ let initialize_data = serde_json::json!({
+ "capabilities": server_capabilities,
+ });
+
+ self.initialize_finish_while(id, initialize_data, running)?;
+
+ Ok(params)
+ }
+
/// If `req` is `Shutdown`, respond to it and return `true`, otherwise return `false`
pub fn handle_shutdown(&self, req: &Request) -> Result<bool, ProtocolError> {
if !req.is_shutdown() {
diff --git a/src/tools/rust-analyzer/rust-bors.toml b/src/tools/rust-analyzer/rust-bors.toml
new file mode 100644
index 000000000..c31ba66c5
--- /dev/null
+++ b/src/tools/rust-analyzer/rust-bors.toml
@@ -0,0 +1 @@
+timeout = 3600
diff --git a/src/tools/rust-analyzer/triagebot.toml b/src/tools/rust-analyzer/triagebot.toml
index f0cd35399..95eed3ee1 100644
--- a/src/tools/rust-analyzer/triagebot.toml
+++ b/src/tools/rust-analyzer/triagebot.toml
@@ -11,5 +11,10 @@ allow-unauthenticated = [
new_pr = true
[no-merges]
-exclude_labels = ["sync"]
+exclude_titles = [ # exclude syncs from subtree in rust-lang/rust
+ "Sync from downstream",
+ "sync from downstream",
+ "Sync from rust",
+ "sync from rust",
+]
labels = ["has-merge-commits", "S-waiting-on-author"]
diff --git a/src/tools/rust-analyzer/xtask/Cargo.toml b/src/tools/rust-analyzer/xtask/Cargo.toml
index 7a34617e2..1c785b60a 100644
--- a/src/tools/rust-analyzer/xtask/Cargo.toml
+++ b/src/tools/rust-analyzer/xtask/Cargo.toml
@@ -7,10 +7,10 @@ edition = "2021"
rust-version.workspace = true
[dependencies]
-anyhow = "1.0.62"
+anyhow.workspace = true
flate2 = "1.0.24"
write-json = "0.1.2"
-xshell = "0.2.2"
+xshell.workspace = true
xflags = "0.3.0"
time = { version = "0.3", default-features = false }
zip = { version = "0.6", default-features = false, features = ["deflate", "time"] }
diff --git a/src/tools/rust-analyzer/xtask/src/flags.rs b/src/tools/rust-analyzer/xtask/src/flags.rs
index e52cbfca3..092ab8c59 100644
--- a/src/tools/rust-analyzer/xtask/src/flags.rs
+++ b/src/tools/rust-analyzer/xtask/src/flags.rs
@@ -110,6 +110,7 @@ pub struct PublishReleaseNotes {
#[derive(Debug)]
pub enum MeasurementType {
Build,
+ RustcTests,
AnalyzeSelf,
AnalyzeRipgrep,
AnalyzeWebRender,
@@ -122,6 +123,7 @@ impl FromStr for MeasurementType {
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"build" => Ok(Self::Build),
+ "rustc_tests" => Ok(Self::RustcTests),
"self" => Ok(Self::AnalyzeSelf),
"ripgrep-13.0.0" => Ok(Self::AnalyzeRipgrep),
"webrender-2022" => Ok(Self::AnalyzeWebRender),
@@ -135,6 +137,7 @@ impl AsRef<str> for MeasurementType {
fn as_ref(&self) -> &str {
match self {
Self::Build => "build",
+ Self::RustcTests => "rustc_tests",
Self::AnalyzeSelf => "self",
Self::AnalyzeRipgrep => "ripgrep-13.0.0",
Self::AnalyzeWebRender => "webrender-2022",
diff --git a/src/tools/rust-analyzer/xtask/src/main.rs b/src/tools/rust-analyzer/xtask/src/main.rs
index 6a45033ad..49f8ae79b 100644
--- a/src/tools/rust-analyzer/xtask/src/main.rs
+++ b/src/tools/rust-analyzer/xtask/src/main.rs
@@ -8,7 +8,7 @@
//! This binary is integrated into the `cargo` command line by using an alias in
//! `.cargo/config`.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod flags;
diff --git a/src/tools/rust-analyzer/xtask/src/metrics.rs b/src/tools/rust-analyzer/xtask/src/metrics.rs
index 59d41d8e4..845928432 100644
--- a/src/tools/rust-analyzer/xtask/src/metrics.rs
+++ b/src/tools/rust-analyzer/xtask/src/metrics.rs
@@ -36,6 +36,9 @@ impl flags::Metrics {
MeasurementType::Build => {
metrics.measure_build(sh)?;
}
+ MeasurementType::RustcTests => {
+ metrics.measure_rustc_tests(sh)?;
+ }
MeasurementType::AnalyzeSelf => {
metrics.measure_analysis_stats_self(sh)?;
}
@@ -50,6 +53,7 @@ impl flags::Metrics {
}
None => {
metrics.measure_build(sh)?;
+ metrics.measure_rustc_tests(sh)?;
metrics.measure_analysis_stats_self(sh)?;
metrics.measure_analysis_stats(sh, MeasurementType::AnalyzeRipgrep.as_ref())?;
metrics.measure_analysis_stats(sh, MeasurementType::AnalyzeWebRender.as_ref())?;
@@ -78,6 +82,19 @@ impl Metrics {
self.report("build", time.as_millis() as u64, "ms".into());
Ok(())
}
+
+ fn measure_rustc_tests(&mut self, sh: &Shell) -> anyhow::Result<()> {
+ eprintln!("\nMeasuring rustc tests");
+
+ cmd!(sh, "git clone --depth=1 https://github.com/rust-lang/rust").run()?;
+
+ let output = cmd!(sh, "./target/release/rust-analyzer rustc-tests ./rust").read()?;
+ for (metric, value, unit) in parse_metrics(&output) {
+ self.report(metric, value, unit.into());
+ }
+ Ok(())
+ }
+
fn measure_analysis_stats_self(&mut self, sh: &Shell) -> anyhow::Result<()> {
self.measure_analysis_stats_path(sh, "self", ".")
}
diff --git a/src/tools/rustdoc-js/tester.js b/src/tools/rustdoc-js/tester.js
index c7e6dd361..86881ef36 100644
--- a/src/tools/rustdoc-js/tester.js
+++ b/src/tools/rustdoc-js/tester.js
@@ -122,7 +122,31 @@ function checkNeededFields(fullPath, expected, error_text, queryName, position)
}
function valueCheck(fullPath, expected, result, error_text, queryName) {
- if (Array.isArray(expected)) {
+ if (Array.isArray(expected) && result instanceof Map) {
+ const expected_set = new Set();
+ for (const [key, expected_value] of expected) {
+ expected_set.add(key);
+ checkNeededFields(fullPath, expected_value, error_text, queryName, key);
+ if (result.has(key)) {
+ valueCheck(
+ fullPath + "[" + key + "]",
+ expected_value,
+ result.get(key),
+ error_text,
+ queryName
+ );
+ } else {
+ error_text.push(`${queryName}==> EXPECTED has extra key in map from field ` +
+ `\`${fullPath}\` (key ${key}): \`${JSON.stringify(expected_value)}\``);
+ }
+ }
+ for (const [key, result_value] of result.entries()) {
+ if (!expected_set.has(key)) {
+ error_text.push(`${queryName}==> EXPECTED missing key in map from field ` +
+ `\`${fullPath}\` (key ${key}): \`${JSON.stringify(result_value)}\``);
+ }
+ }
+ } else if (Array.isArray(expected)) {
let i;
for (i = 0; i < expected.length; ++i) {
checkNeededFields(fullPath, expected[i], error_text, queryName, i);
@@ -153,6 +177,9 @@ function valueCheck(fullPath, expected, result, error_text, queryName) {
}
let result_v = result[key];
if (result_v !== null && key === "error") {
+ if (!result_v.forEach) {
+ throw result_v;
+ }
result_v.forEach((value, index) => {
value = value.split("&nbsp;").join(" ");
if (index % 2 === 1) {
@@ -369,16 +396,16 @@ function loadSearchJS(doc_folder, resource_suffix) {
const staticFiles = path.join(doc_folder, "static.files");
const searchJs = fs.readdirSync(staticFiles).find(f => f.match(/search.*\.js$/));
const searchModule = require(path.join(staticFiles, searchJs));
- const searchWords = searchModule.initSearch(searchIndex.searchIndex);
+ searchModule.initSearch(searchIndex.searchIndex);
return {
doSearch: function(queryStr, filterCrate, currentCrate) {
- return searchModule.execQuery(searchModule.parseQuery(queryStr), searchWords,
+ return searchModule.execQuery(searchModule.parseQuery(queryStr),
filterCrate, currentCrate);
},
getCorrections: function(queryStr, filterCrate, currentCrate) {
const parsedQuery = searchModule.parseQuery(queryStr);
- searchModule.execQuery(parsedQuery, searchWords, filterCrate, currentCrate);
+ searchModule.execQuery(parsedQuery, filterCrate, currentCrate);
return parsedQuery.correction;
},
parseQuery: searchModule.parseQuery,
diff --git a/src/tools/rustfmt/Cargo.toml b/src/tools/rustfmt/Cargo.toml
index 00e0ed37a..032b9b548 100644
--- a/src/tools/rustfmt/Cargo.toml
+++ b/src/tools/rustfmt/Cargo.toml
@@ -43,7 +43,7 @@ diff = "0.1"
dirs = "4.0"
getopts = "0.2"
ignore = "0.4"
-itertools = "0.10"
+itertools = "0.11"
lazy_static = "1.4"
regex = "1.7"
serde = { version = "1.0.160", features = ["derive"] }
diff --git a/src/tools/rustfmt/src/closures.rs b/src/tools/rustfmt/src/closures.rs
index 8a4089a56..f698f494a 100644
--- a/src/tools/rustfmt/src/closures.rs
+++ b/src/tools/rustfmt/src/closures.rs
@@ -29,7 +29,7 @@ pub(crate) fn rewrite_closure(
binder: &ast::ClosureBinder,
constness: ast::Const,
capture: ast::CaptureBy,
- is_async: &ast::Async,
+ coroutine_kind: &Option<ast::CoroutineKind>,
movability: ast::Movability,
fn_decl: &ast::FnDecl,
body: &ast::Expr,
@@ -40,7 +40,16 @@ pub(crate) fn rewrite_closure(
debug!("rewrite_closure {:?}", body);
let (prefix, extra_offset) = rewrite_closure_fn_decl(
- binder, constness, capture, is_async, movability, fn_decl, body, span, context, shape,
+ binder,
+ constness,
+ capture,
+ coroutine_kind,
+ movability,
+ fn_decl,
+ body,
+ span,
+ context,
+ shape,
)?;
// 1 = space between `|...|` and body.
let body_shape = shape.offset_left(extra_offset)?;
@@ -233,7 +242,7 @@ fn rewrite_closure_fn_decl(
binder: &ast::ClosureBinder,
constness: ast::Const,
capture: ast::CaptureBy,
- asyncness: &ast::Async,
+ coroutine_kind: &Option<ast::CoroutineKind>,
movability: ast::Movability,
fn_decl: &ast::FnDecl,
body: &ast::Expr,
@@ -263,7 +272,12 @@ fn rewrite_closure_fn_decl(
} else {
""
};
- let is_async = if asyncness.is_async() { "async " } else { "" };
+ let coro = match coroutine_kind {
+ Some(ast::CoroutineKind::Async { .. }) => "async ",
+ Some(ast::CoroutineKind::Gen { .. }) => "gen ",
+ Some(ast::CoroutineKind::AsyncGen { .. }) => "async gen ",
+ None => "",
+ };
let mover = if matches!(capture, ast::CaptureBy::Value { .. }) {
"move "
} else {
@@ -272,7 +286,7 @@ fn rewrite_closure_fn_decl(
// 4 = "|| {".len(), which is overconservative when the closure consists of
// a single expression.
let nested_shape = shape
- .shrink_left(binder.len() + const_.len() + immovable.len() + is_async.len() + mover.len())?
+ .shrink_left(binder.len() + const_.len() + immovable.len() + coro.len() + mover.len())?
.sub_width(4)?;
// 1 = |
@@ -310,7 +324,7 @@ fn rewrite_closure_fn_decl(
.tactic(tactic)
.preserve_newline(true);
let list_str = write_list(&item_vec, &fmt)?;
- let mut prefix = format!("{binder}{const_}{immovable}{is_async}{mover}|{list_str}|");
+ let mut prefix = format!("{binder}{const_}{immovable}{coro}{mover}|{list_str}|");
if !ret_str.is_empty() {
if prefix.contains('\n') {
@@ -339,7 +353,7 @@ pub(crate) fn rewrite_last_closure(
ref binder,
constness,
capture_clause,
- ref asyncness,
+ ref coroutine_kind,
movability,
ref fn_decl,
ref body,
@@ -360,7 +374,7 @@ pub(crate) fn rewrite_last_closure(
binder,
constness,
capture_clause,
- asyncness,
+ coroutine_kind,
movability,
fn_decl,
body,
diff --git a/src/tools/rustfmt/src/comment.rs b/src/tools/rustfmt/src/comment.rs
index 7da0f79bd..f7cd7cefb 100644
--- a/src/tools/rustfmt/src/comment.rs
+++ b/src/tools/rustfmt/src/comment.rs
@@ -1,6 +1,6 @@
// Formatting and tools for comments.
-use std::{self, borrow::Cow, iter};
+use std::{borrow::Cow, iter};
use itertools::{multipeek, MultiPeek};
use lazy_static::lazy_static;
@@ -1847,7 +1847,6 @@ fn remove_comment_header(comment: &str) -> &str {
#[cfg(test)]
mod test {
use super::*;
- use crate::shape::{Indent, Shape};
#[test]
fn char_classes() {
diff --git a/src/tools/rustfmt/src/config/file_lines.rs b/src/tools/rustfmt/src/config/file_lines.rs
index e33fe9bb2..224864393 100644
--- a/src/tools/rustfmt/src/config/file_lines.rs
+++ b/src/tools/rustfmt/src/config/file_lines.rs
@@ -6,7 +6,7 @@ use std::path::PathBuf;
use std::{cmp, fmt, iter, str};
use rustc_data_structures::sync::Lrc;
-use rustc_span::{self, SourceFile};
+use rustc_span::SourceFile;
use serde::{ser, Deserialize, Deserializer, Serialize, Serializer};
use serde_json as json;
use thiserror::Error;
diff --git a/src/tools/rustfmt/src/config/mod.rs b/src/tools/rustfmt/src/config/mod.rs
index 7538b2652..9d454137b 100644
--- a/src/tools/rustfmt/src/config/mod.rs
+++ b/src/tools/rustfmt/src/config/mod.rs
@@ -1,5 +1,4 @@
use std::cell::Cell;
-use std::default::Default;
use std::fs::File;
use std::io::{Error, ErrorKind, Read};
use std::path::{Path, PathBuf};
@@ -1017,7 +1016,6 @@ make_backup = false
#[cfg(test)]
mod partially_unstable_option {
use super::mock::{Config, PartiallyUnstableOption};
- use super::*;
/// From the command line, we can override with a stable variant.
#[test]
diff --git a/src/tools/rustfmt/src/emitter/checkstyle.rs b/src/tools/rustfmt/src/emitter/checkstyle.rs
index 56d6a0ed6..2a4a9dfce 100644
--- a/src/tools/rustfmt/src/emitter/checkstyle.rs
+++ b/src/tools/rustfmt/src/emitter/checkstyle.rs
@@ -1,7 +1,6 @@
use self::xml::XmlEscaped;
use super::*;
use crate::rustfmt_diff::{make_diff, DiffLine, Mismatch};
-use std::io::{self, Write};
mod xml;
diff --git a/src/tools/rustfmt/src/emitter/diff.rs b/src/tools/rustfmt/src/emitter/diff.rs
index 764cd136e..0af19a7d0 100644
--- a/src/tools/rustfmt/src/emitter/diff.rs
+++ b/src/tools/rustfmt/src/emitter/diff.rs
@@ -51,8 +51,6 @@ impl Emitter for DiffEmitter {
#[cfg(test)]
mod tests {
use super::*;
- use crate::config::Config;
- use crate::FileName;
use std::path::PathBuf;
#[test]
diff --git a/src/tools/rustfmt/src/emitter/json.rs b/src/tools/rustfmt/src/emitter/json.rs
index 5594196be..f47c3260a 100644
--- a/src/tools/rustfmt/src/emitter/json.rs
+++ b/src/tools/rustfmt/src/emitter/json.rs
@@ -2,7 +2,6 @@ use super::*;
use crate::rustfmt_diff::{make_diff, DiffLine, Mismatch};
use serde::Serialize;
use serde_json::to_string as to_json_string;
-use std::io::{self, Write};
#[derive(Debug, Default)]
pub(crate) struct JsonEmitter {
@@ -106,7 +105,6 @@ impl JsonEmitter {
#[cfg(test)]
mod tests {
use super::*;
- use crate::FileName;
use std::path::PathBuf;
#[test]
diff --git a/src/tools/rustfmt/src/emitter/modified_lines.rs b/src/tools/rustfmt/src/emitter/modified_lines.rs
index 94ff570a8..81f0a31b9 100644
--- a/src/tools/rustfmt/src/emitter/modified_lines.rs
+++ b/src/tools/rustfmt/src/emitter/modified_lines.rs
@@ -1,6 +1,5 @@
use super::*;
use crate::rustfmt_diff::{make_diff, ModifiedLines};
-use std::io::Write;
#[derive(Debug, Default)]
pub(crate) struct ModifiedLinesEmitter;
diff --git a/src/tools/rustfmt/src/emitter/stdout.rs b/src/tools/rustfmt/src/emitter/stdout.rs
index 0bbc7332d..0b635a28b 100644
--- a/src/tools/rustfmt/src/emitter/stdout.rs
+++ b/src/tools/rustfmt/src/emitter/stdout.rs
@@ -1,6 +1,5 @@
use super::*;
use crate::config::Verbosity;
-use std::io::Write;
#[derive(Debug)]
pub(crate) struct StdoutEmitter {
diff --git a/src/tools/rustfmt/src/expr.rs b/src/tools/rustfmt/src/expr.rs
index fa941e614..a68bd6694 100644
--- a/src/tools/rustfmt/src/expr.rs
+++ b/src/tools/rustfmt/src/expr.rs
@@ -212,7 +212,7 @@ pub(crate) fn format_expr(
&cl.binder,
cl.constness,
cl.capture_clause,
- &cl.asyncness,
+ &cl.coroutine_kind,
cl.movability,
&cl.fn_decl,
&cl.body,
@@ -1933,7 +1933,7 @@ fn rewrite_unary_op(
shape: Shape,
) -> Option<String> {
// For some reason, an UnOp is not spanned like BinOp!
- rewrite_unary_prefix(context, ast::UnOp::to_string(op), expr, shape)
+ rewrite_unary_prefix(context, op.as_str(), expr, shape)
}
pub(crate) enum RhsAssignKind<'ast> {
diff --git a/src/tools/rustfmt/src/ignore_path.rs b/src/tools/rustfmt/src/ignore_path.rs
index d95594949..7b5697bec 100644
--- a/src/tools/rustfmt/src/ignore_path.rs
+++ b/src/tools/rustfmt/src/ignore_path.rs
@@ -1,4 +1,4 @@
-use ignore::{self, gitignore};
+use ignore::gitignore;
use crate::config::{FileName, IgnoreList};
diff --git a/src/tools/rustfmt/src/imports.rs b/src/tools/rustfmt/src/imports.rs
index f8e7fa628..09f6e7523 100644
--- a/src/tools/rustfmt/src/imports.rs
+++ b/src/tools/rustfmt/src/imports.rs
@@ -1102,7 +1102,6 @@ enum SharedPrefix {
#[cfg(test)]
mod test {
use super::*;
- use rustc_span::DUMMY_SP;
// Parse the path part of an import. This parser is not robust and is only
// suitable for use in a test harness.
diff --git a/src/tools/rustfmt/src/items.rs b/src/tools/rustfmt/src/items.rs
index edb5a5b62..6fb69d6b8 100644
--- a/src/tools/rustfmt/src/items.rs
+++ b/src/tools/rustfmt/src/items.rs
@@ -287,7 +287,7 @@ pub(crate) struct FnSig<'a> {
decl: &'a ast::FnDecl,
generics: &'a ast::Generics,
ext: ast::Extern,
- is_async: Cow<'a, ast::Async>,
+ coroutine_kind: Cow<'a, Option<ast::CoroutineKind>>,
constness: ast::Const,
defaultness: ast::Defaultness,
unsafety: ast::Unsafe,
@@ -302,7 +302,7 @@ impl<'a> FnSig<'a> {
) -> FnSig<'a> {
FnSig {
unsafety: method_sig.header.unsafety,
- is_async: Cow::Borrowed(&method_sig.header.asyncness),
+ coroutine_kind: Cow::Borrowed(&method_sig.header.coroutine_kind),
constness: method_sig.header.constness,
defaultness: ast::Defaultness::Final,
ext: method_sig.header.ext,
@@ -328,7 +328,7 @@ impl<'a> FnSig<'a> {
generics,
ext: fn_sig.header.ext,
constness: fn_sig.header.constness,
- is_async: Cow::Borrowed(&fn_sig.header.asyncness),
+ coroutine_kind: Cow::Borrowed(&fn_sig.header.coroutine_kind),
defaultness,
unsafety: fn_sig.header.unsafety,
visibility: vis,
@@ -343,7 +343,8 @@ impl<'a> FnSig<'a> {
result.push_str(&*format_visibility(context, self.visibility));
result.push_str(format_defaultness(self.defaultness));
result.push_str(format_constness(self.constness));
- result.push_str(format_async(&self.is_async));
+ self.coroutine_kind
+ .map(|coroutine_kind| result.push_str(format_coro(&coroutine_kind)));
result.push_str(format_unsafety(self.unsafety));
result.push_str(&format_extern(
self.ext,
@@ -665,7 +666,7 @@ impl<'a> FmtVisitor<'a> {
let span = mk_sp(lo, field.span.lo());
let variant_body = match field.data {
- ast::VariantData::Tuple(..) | ast::VariantData::Struct(..) => format_struct(
+ ast::VariantData::Tuple(..) | ast::VariantData::Struct { .. } => format_struct(
&context,
&StructParts::from_variant(field, &context),
self.block_indent,
@@ -1091,7 +1092,7 @@ fn enum_variant_span(variant: &ast::Variant, context: &RewriteContext<'_>) -> Sp
if let Some(ref anon_const) = variant.disr_expr {
let span_before_consts = variant.span.until(anon_const.value.span);
let hi = match &variant.data {
- Struct(..) => context
+ Struct { .. } => context
.snippet_provider
.span_after_last(span_before_consts, "}"),
Tuple(..) => context
@@ -1111,12 +1112,12 @@ fn format_struct(
offset: Indent,
one_line_width: Option<usize>,
) -> Option<String> {
- match *struct_parts.def {
+ match struct_parts.def {
ast::VariantData::Unit(..) => format_unit_struct(context, struct_parts, offset),
- ast::VariantData::Tuple(ref fields, _) => {
+ ast::VariantData::Tuple(fields, _) => {
format_tuple_struct(context, struct_parts, fields, offset)
}
- ast::VariantData::Struct(ref fields, _) => {
+ ast::VariantData::Struct { fields, .. } => {
format_struct_struct(context, struct_parts, fields, offset, one_line_width)
}
}
diff --git a/src/tools/rustfmt/src/macros.rs b/src/tools/rustfmt/src/macros.rs
index 76553466e..b4c58d2fe 100644
--- a/src/tools/rustfmt/src/macros.rs
+++ b/src/tools/rustfmt/src/macros.rs
@@ -708,7 +708,7 @@ struct MacroArgParser {
fn last_tok(tt: &TokenTree) -> Token {
match *tt {
TokenTree::Token(ref t, _) => t.clone(),
- TokenTree::Delimited(delim_span, delim, _) => Token {
+ TokenTree::Delimited(delim_span, _, delim, _) => Token {
kind: TokenKind::CloseDelim(delim),
span: delim_span.close,
},
@@ -925,7 +925,7 @@ impl MacroArgParser {
self.add_meta_variable(&mut iter)?;
}
TokenTree::Token(ref t, _) => self.update_buffer(t),
- &TokenTree::Delimited(_delimited_span, delimited, ref tts) => {
+ &TokenTree::Delimited(_dspan, _spacing, delimited, ref tts) => {
if !self.buf.is_empty() {
if next_space(&self.last_tok.kind) == SpaceState::Always {
self.add_separator();
@@ -1167,7 +1167,7 @@ impl<'a> MacroParser<'a> {
let tok = self.toks.next()?;
let (lo, args_paren_kind) = match tok {
TokenTree::Token(..) => return None,
- &TokenTree::Delimited(delimited_span, d, _) => (delimited_span.open.lo(), d),
+ &TokenTree::Delimited(delimited_span, _, d, _) => (delimited_span.open.lo(), d),
};
let args = TokenStream::new(vec![tok.clone()]);
match self.toks.next()? {
diff --git a/src/tools/rustfmt/src/matches.rs b/src/tools/rustfmt/src/matches.rs
index 95b0ed16d..ef509b568 100644
--- a/src/tools/rustfmt/src/matches.rs
+++ b/src/tools/rustfmt/src/matches.rs
@@ -223,7 +223,7 @@ fn rewrite_match_arm(
) -> Option<String> {
let (missing_span, attrs_str) = if !arm.attrs.is_empty() {
if contains_skip(&arm.attrs) {
- let (_, body) = flatten_arm_body(context, &arm.body, None);
+ let (_, body) = flatten_arm_body(context, arm.body.as_deref()?, None);
// `arm.span()` does not include trailing comma, add it manually.
return Some(format!(
"{}{}",
@@ -246,7 +246,7 @@ fn rewrite_match_arm(
};
// Patterns
- let pat_shape = match &arm.body.kind {
+ let pat_shape = match &arm.body.as_ref()?.kind {
ast::ExprKind::Block(_, Some(label)) => {
// Some block with a label ` => 'label: {`
// 7 = ` => : {`
@@ -280,10 +280,10 @@ fn rewrite_match_arm(
false,
)?;
- let arrow_span = mk_sp(arm.pat.span.hi(), arm.body.span().lo());
+ let arrow_span = mk_sp(arm.pat.span.hi(), arm.body.as_ref()?.span().lo());
rewrite_match_body(
context,
- &arm.body,
+ arm.body.as_ref()?,
&lhs_str,
shape,
guard_str.contains('\n'),
diff --git a/src/tools/rustfmt/src/pairs.rs b/src/tools/rustfmt/src/pairs.rs
index 07c051937..bfc2ffed3 100644
--- a/src/tools/rustfmt/src/pairs.rs
+++ b/src/tools/rustfmt/src/pairs.rs
@@ -339,7 +339,7 @@ impl FlattenPair for ast::Expr {
if let Some(pop) = stack.pop() {
match pop.kind {
ast::ExprKind::Binary(op, _, ref rhs) => {
- separators.push(op.node.to_string());
+ separators.push(op.node.as_str());
node = rhs;
}
_ => unreachable!(),
diff --git a/src/tools/rustfmt/src/parse/macros/cfg_if.rs b/src/tools/rustfmt/src/parse/macros/cfg_if.rs
index cbc4c90b8..bafef7b0f 100644
--- a/src/tools/rustfmt/src/parse/macros/cfg_if.rs
+++ b/src/tools/rustfmt/src/parse/macros/cfg_if.rs
@@ -67,7 +67,7 @@ fn parse_cfg_if_inner<'a>(
Ok(None) => continue,
Err(err) => {
err.cancel();
- parser.sess.span_diagnostic.reset_err_count();
+ parser.sess.dcx.reset_err_count();
return Err(
"Expected item inside cfg_if block, but failed to parse it as an item",
);
diff --git a/src/tools/rustfmt/src/parse/macros/lazy_static.rs b/src/tools/rustfmt/src/parse/macros/lazy_static.rs
index a8c2feec4..8b1dc6694 100644
--- a/src/tools/rustfmt/src/parse/macros/lazy_static.rs
+++ b/src/tools/rustfmt/src/parse/macros/lazy_static.rs
@@ -16,8 +16,8 @@ pub(crate) fn parse_lazy_static(
($method:ident $(,)* $($arg:expr),* $(,)*) => {
match parser.$method($($arg,)*) {
Ok(val) => {
- if parser.sess.span_diagnostic.has_errors().is_some() {
- parser.sess.span_diagnostic.reset_err_count();
+ if parser.sess.dcx.has_errors().is_some() {
+ parser.sess.dcx.reset_err_count();
return None;
} else {
val
@@ -25,7 +25,7 @@ pub(crate) fn parse_lazy_static(
}
Err(err) => {
err.cancel();
- parser.sess.span_diagnostic.reset_err_count();
+ parser.sess.dcx.reset_err_count();
return None;
}
}
diff --git a/src/tools/rustfmt/src/parse/macros/mod.rs b/src/tools/rustfmt/src/parse/macros/mod.rs
index 7a802f7a8..2dd262217 100644
--- a/src/tools/rustfmt/src/parse/macros/mod.rs
+++ b/src/tools/rustfmt/src/parse/macros/mod.rs
@@ -28,8 +28,8 @@ fn parse_macro_arg<'a, 'b: 'a>(parser: &'a mut Parser<'b>) -> Option<MacroArg> {
let mut cloned_parser = (*parser).clone();
match $parser(&mut cloned_parser) {
Ok(x) => {
- if parser.sess.span_diagnostic.has_errors().is_some() {
- parser.sess.span_diagnostic.reset_err_count();
+ if parser.sess.dcx.has_errors().is_some() {
+ parser.sess.dcx.reset_err_count();
} else {
// Parsing succeeded.
*parser = cloned_parser;
@@ -38,7 +38,7 @@ fn parse_macro_arg<'a, 'b: 'a>(parser: &'a mut Parser<'b>) -> Option<MacroArg> {
}
Err(e) => {
e.cancel();
- parser.sess.span_diagnostic.reset_err_count();
+ parser.sess.dcx.reset_err_count();
}
}
};
diff --git a/src/tools/rustfmt/src/parse/session.rs b/src/tools/rustfmt/src/parse/session.rs
index 0573df9de..e8f7b422a 100644
--- a/src/tools/rustfmt/src/parse/session.rs
+++ b/src/tools/rustfmt/src/parse/session.rs
@@ -4,7 +4,7 @@ use std::sync::atomic::{AtomicBool, Ordering};
use rustc_data_structures::sync::{IntoDynSyncSend, Lrc};
use rustc_errors::emitter::{DynEmitter, Emitter, EmitterWriter};
use rustc_errors::translation::Translate;
-use rustc_errors::{ColorConfig, Diagnostic, Handler, Level as DiagnosticLevel};
+use rustc_errors::{ColorConfig, DiagCtxt, Diagnostic, Level as DiagnosticLevel};
use rustc_session::parse::ParseSess as RawParseSess;
use rustc_span::{
source_map::{FilePathMapping, SourceMap},
@@ -118,13 +118,13 @@ impl From<Color> for ColorConfig {
}
}
-fn default_handler(
+fn default_dcx(
source_map: Lrc<SourceMap>,
ignore_path_set: Lrc<IgnorePathSet>,
can_reset: Lrc<AtomicBool>,
hide_parse_errors: bool,
color: Color,
-) -> Handler {
+) -> DiagCtxt {
let supports_color = term::stderr().map_or(false, |term| term.supports_color());
let emit_color = if supports_color {
ColorConfig::from(color)
@@ -141,7 +141,7 @@ fn default_handler(
);
Box::new(EmitterWriter::stderr(emit_color, fallback_bundle).sm(Some(source_map.clone())))
};
- Handler::with_emitter(Box::new(SilentOnIgnoredFilesEmitter {
+ DiagCtxt::with_emitter(Box::new(SilentOnIgnoredFilesEmitter {
has_non_ignorable_parser_errors: false,
source_map,
emitter,
@@ -159,14 +159,14 @@ impl ParseSess {
let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let can_reset_errors = Lrc::new(AtomicBool::new(false));
- let handler = default_handler(
+ let dcx = default_dcx(
Lrc::clone(&source_map),
Lrc::clone(&ignore_path_set),
Lrc::clone(&can_reset_errors),
config.hide_parse_errors(),
config.color(),
);
- let parse_sess = RawParseSess::with_span_handler(handler, source_map);
+ let parse_sess = RawParseSess::with_dcx(dcx, source_map);
Ok(ParseSess {
parse_sess,
@@ -218,7 +218,7 @@ impl ParseSess {
}
pub(crate) fn set_silent_emitter(&mut self) {
- self.parse_sess.span_diagnostic = Handler::with_emitter(silent_emitter());
+ self.parse_sess.dcx = DiagCtxt::with_emitter(silent_emitter());
}
pub(crate) fn span_to_filename(&self, span: Span) -> FileName {
@@ -284,10 +284,8 @@ impl ParseSess {
// Methods that should be restricted within the parse module.
impl ParseSess {
pub(super) fn emit_diagnostics(&self, diagnostics: Vec<Diagnostic>) {
- for mut diagnostic in diagnostics {
- self.parse_sess
- .span_diagnostic
- .emit_diagnostic(&mut diagnostic);
+ for diagnostic in diagnostics {
+ self.parse_sess.dcx.emit_diagnostic(diagnostic);
}
}
@@ -296,11 +294,11 @@ impl ParseSess {
}
pub(super) fn has_errors(&self) -> bool {
- self.parse_sess.span_diagnostic.has_errors().is_some()
+ self.parse_sess.dcx.has_errors().is_some()
}
pub(super) fn reset_errors(&self) {
- self.parse_sess.span_diagnostic.reset_err_count();
+ self.parse_sess.dcx.reset_err_count();
}
}
diff --git a/src/tools/rustfmt/src/patterns.rs b/src/tools/rustfmt/src/patterns.rs
index 33f3b4b8a..8504999b8 100644
--- a/src/tools/rustfmt/src/patterns.rs
+++ b/src/tools/rustfmt/src/patterns.rs
@@ -40,7 +40,9 @@ pub(crate) fn is_short_pattern(pat: &ast::Pat, pat_str: &str) -> bool {
fn is_short_pattern_inner(pat: &ast::Pat) -> bool {
match pat.kind {
- ast::PatKind::Rest | ast::PatKind::Wild | ast::PatKind::Lit(_) => true,
+ ast::PatKind::Rest | ast::PatKind::Never | ast::PatKind::Wild | ast::PatKind::Lit(_) => {
+ true
+ }
ast::PatKind::Ident(_, _, ref pat) => pat.is_none(),
ast::PatKind::Struct(..)
| ast::PatKind::MacCall(..)
@@ -193,6 +195,7 @@ impl Rewrite for Pat {
None
}
}
+ PatKind::Never => None,
PatKind::Range(ref lhs, ref rhs, ref end_kind) => {
let infix = match end_kind.node {
RangeEnd::Included(RangeSyntax::DotDotDot) => "...",
diff --git a/src/tools/rustfmt/src/reorder.rs b/src/tools/rustfmt/src/reorder.rs
index 3bddf4c1b..3e14f9f12 100644
--- a/src/tools/rustfmt/src/reorder.rs
+++ b/src/tools/rustfmt/src/reorder.rs
@@ -6,7 +6,7 @@
// FIXME(#2455): Reorder trait items.
-use std::cmp::{Ord, Ordering};
+use std::cmp::Ordering;
use rustc_ast::{ast, attr};
use rustc_span::{symbol::sym, Span};
diff --git a/src/tools/rustfmt/src/spanned.rs b/src/tools/rustfmt/src/spanned.rs
index 2136cfeae..5960b1444 100644
--- a/src/tools/rustfmt/src/spanned.rs
+++ b/src/tools/rustfmt/src/spanned.rs
@@ -97,7 +97,12 @@ impl Spanned for ast::Arm {
} else {
self.attrs[0].span.lo()
};
- span_with_attrs_lo_hi!(self, lo, self.body.span.hi())
+ let hi = if let Some(body) = &self.body {
+ body.span.hi()
+ } else {
+ self.pat.span.hi()
+ };
+ span_with_attrs_lo_hi!(self, lo, hi)
}
}
diff --git a/src/tools/rustfmt/src/types.rs b/src/tools/rustfmt/src/types.rs
index 127aff913..a5a424490 100644
--- a/src/tools/rustfmt/src/types.rs
+++ b/src/tools/rustfmt/src/types.rs
@@ -1,4 +1,3 @@
-use std::iter::ExactSizeIterator;
use std::ops::Deref;
use rustc_ast::ast::{self, FnRetTy, Mutability, Term};
@@ -546,7 +545,7 @@ impl Rewrite for ast::GenericBound {
ast::TraitBoundModifier::Maybe => poly_trait_ref
.rewrite(context, shape.offset_left(1)?)
.map(|s| format!("?{}", s)),
- ast::TraitBoundModifier::MaybeConst => poly_trait_ref
+ ast::TraitBoundModifier::MaybeConst(_) => poly_trait_ref
.rewrite(context, shape.offset_left(7)?)
.map(|s| format!("~const {}", s)),
ast::TraitBoundModifier::MaybeConstMaybe => poly_trait_ref
diff --git a/src/tools/rustfmt/src/utils.rs b/src/tools/rustfmt/src/utils.rs
index fd49030bf..7d7bbf115 100644
--- a/src/tools/rustfmt/src/utils.rs
+++ b/src/tools/rustfmt/src/utils.rs
@@ -75,10 +75,11 @@ pub(crate) fn format_visibility(
}
#[inline]
-pub(crate) fn format_async(is_async: &ast::Async) -> &'static str {
- match is_async {
- ast::Async::Yes { .. } => "async ",
- ast::Async::No => "",
+pub(crate) fn format_coro(coroutine_kind: &ast::CoroutineKind) -> &'static str {
+ match coroutine_kind {
+ ast::CoroutineKind::Async { .. } => "async ",
+ ast::CoroutineKind::Gen { .. } => "gen ",
+ ast::CoroutineKind::AsyncGen { .. } => "async gen ",
}
}
diff --git a/src/tools/rustfmt/tests/source/issue-2927-2.rs b/src/tools/rustfmt/tests/source/issue-2927-2.rs
index d87761fdc..07afef38c 100644
--- a/src/tools/rustfmt/tests/source/issue-2927-2.rs
+++ b/src/tools/rustfmt/tests/source/issue-2927-2.rs
@@ -1,5 +1,5 @@
// rustfmt-edition: 2015
-#![feature(rust_2018_preview, uniform_paths)]
+#![feature(uniform_paths)]
use futures::prelude::*;
use http_03::cli::Cli;
use hyper::{service::service_fn_ok, Body, Response, Server};
diff --git a/src/tools/rustfmt/tests/source/issue-2927.rs b/src/tools/rustfmt/tests/source/issue-2927.rs
index a7df32084..c7ec7bb08 100644
--- a/src/tools/rustfmt/tests/source/issue-2927.rs
+++ b/src/tools/rustfmt/tests/source/issue-2927.rs
@@ -1,5 +1,5 @@
// rustfmt-edition: 2018
-#![feature(rust_2018_preview, uniform_paths)]
+#![feature(uniform_paths)]
use futures::prelude::*;
use http_03::cli::Cli;
use hyper::{service::service_fn_ok, Body, Response, Server};
diff --git a/src/tools/rustfmt/tests/target/issue-2927-2.rs b/src/tools/rustfmt/tests/target/issue-2927-2.rs
index e895783ba..46e0bf0e9 100644
--- a/src/tools/rustfmt/tests/target/issue-2927-2.rs
+++ b/src/tools/rustfmt/tests/target/issue-2927-2.rs
@@ -1,5 +1,5 @@
// rustfmt-edition: 2015
-#![feature(rust_2018_preview, uniform_paths)]
+#![feature(uniform_paths)]
use futures::prelude::*;
use http_03::cli::Cli;
use hyper::{service::service_fn_ok, Body, Response, Server};
diff --git a/src/tools/rustfmt/tests/target/issue-2927.rs b/src/tools/rustfmt/tests/target/issue-2927.rs
index 3267be28d..56afc2d3e 100644
--- a/src/tools/rustfmt/tests/target/issue-2927.rs
+++ b/src/tools/rustfmt/tests/target/issue-2927.rs
@@ -1,5 +1,5 @@
// rustfmt-edition: 2018
-#![feature(rust_2018_preview, uniform_paths)]
+#![feature(uniform_paths)]
use ::log::{error, info, log};
use futures::prelude::*;
use http_03::cli::Cli;
diff --git a/src/tools/suggest-tests/src/lib.rs b/src/tools/suggest-tests/src/lib.rs
index 44cd3c7f6..1c1d9d033 100644
--- a/src/tools/suggest-tests/src/lib.rs
+++ b/src/tools/suggest-tests/src/lib.rs
@@ -33,13 +33,15 @@ pub fn get_suggestions<T: AsRef<str>>(modified_files: &[T]) -> Vec<Suggestion> {
let mut suggestions = Vec::new();
// static suggestions
- for sug in STATIC_SUGGESTIONS.iter() {
- let glob = Pattern::new(&sug.0).expect("Found invalid glob pattern!");
-
- for file in modified_files {
- if glob.matches(file.as_ref()) {
- suggestions.extend_from_slice(&sug.1);
- }
+ for (globs, sugs) in STATIC_SUGGESTIONS.iter() {
+ let globs = globs
+ .iter()
+ .map(|glob| Pattern::new(glob).expect("Found invalid glob pattern!"))
+ .collect::<Vec<_>>();
+ let matches_some_glob = |file: &str| globs.iter().any(|glob| glob.matches(file));
+
+ if modified_files.iter().map(AsRef::as_ref).any(matches_some_glob) {
+ suggestions.extend_from_slice(sugs);
}
}
diff --git a/src/tools/suggest-tests/src/static_suggestions.rs b/src/tools/suggest-tests/src/static_suggestions.rs
index a84e78254..fbd265ea4 100644
--- a/src/tools/suggest-tests/src/static_suggestions.rs
+++ b/src/tools/suggest-tests/src/static_suggestions.rs
@@ -2,23 +2,34 @@ use crate::{sug, Suggestion};
// FIXME: perhaps this could use `std::lazy` when it is stablizied
macro_rules! static_suggestions {
- ($( $glob:expr => [ $( $suggestion:expr ),* ] ),*) => {
- pub(crate) const STATIC_SUGGESTIONS: ::once_cell::unsync::Lazy<Vec<(&'static str, Vec<Suggestion>)>>
- = ::once_cell::unsync::Lazy::new(|| vec![ $( ($glob, vec![ $($suggestion),* ]) ),*]);
+ ($( [ $( $glob:expr ),* $(,)? ] => [ $( $suggestion:expr ),* $(,)? ] ),* $(,)? ) => {
+ pub(crate) const STATIC_SUGGESTIONS: ::once_cell::unsync::Lazy<Vec<(Vec<&'static str>, Vec<Suggestion>)>>
+ = ::once_cell::unsync::Lazy::new(|| vec![ $( (vec![ $($glob),* ], vec![ $($suggestion),* ]) ),*]);
}
}
static_suggestions! {
- "*.md" => [
- sug!("test", 0, ["linkchecker"])
+ ["*.md"] => [
+ sug!("test", 0, ["linkchecker"]),
],
- "compiler/*" => [
+ ["compiler/*"] => [
sug!("check"),
- sug!("test", 1, ["tests/ui", "tests/run-make"])
+ sug!("test", 1, ["tests/ui", "tests/run-make"]),
],
- "src/librustdoc/*" => [
- sug!("test", 1, ["rustdoc"])
- ]
+ ["compiler/rustc_mir_transform/*"] => [
+ sug!("test", 1, ["mir-opt"]),
+ ],
+
+ [
+ "compiler/rustc_mir_transform/src/coverage/*",
+ "compiler/rustc_codegen_llvm/src/coverageinfo/*",
+ ] => [
+ sug!("test", 1, ["coverage"]),
+ ],
+
+ ["src/librustdoc/*"] => [
+ sug!("test", 1, ["rustdoc"]),
+ ],
}
diff --git a/src/tools/tidy/src/deps.rs b/src/tools/tidy/src/deps.rs
index f88f91655..3c00027b9 100644
--- a/src/tools/tidy/src/deps.rs
+++ b/src/tools/tidy/src/deps.rs
@@ -16,6 +16,7 @@ const LICENSES: &[&str] = &[
"Apache-2.0 OR MIT",
"Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT", // wasi license
"Apache-2.0/MIT",
+ "BSD-2-Clause OR Apache-2.0 OR MIT", // zerocopy
"ISC",
"MIT / Apache-2.0",
"MIT OR Apache-2.0 OR LGPL-2.1-or-later", // r-efi, r-efi-alloc
@@ -128,9 +129,7 @@ const EXCEPTIONS_CARGO: ExceptionList = &[
const EXCEPTIONS_RUST_ANALYZER: ExceptionList = &[
// tidy-alphabetical-start
- ("anymap", "BlueOak-1.0.0 OR MIT OR Apache-2.0"), // BlueOak is not acceptable, but we use it under MIT OR Apache-2 .0
("dissimilar", "Apache-2.0"),
- ("instant", "BSD-3-Clause"),
("notify", "CC0-1.0"),
("pulldown-cmark-to-cmark", "Apache-2.0"),
("ryu", "Apache-2.0 OR BSL-1.0"), // BSL is not acceptble, but we use it under Apache-2.0
@@ -212,7 +211,6 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"crossbeam-epoch",
"crossbeam-utils",
"crypto-common",
- "cstr",
"darling",
"darling_core",
"darling_macro",
@@ -256,7 +254,6 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"icu_provider_macros",
"ident_case",
"indexmap",
- "instant",
"intl-memoizer",
"intl_pluralrules",
"is-terminal",
@@ -360,6 +357,7 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"tracing-tree",
"twox-hash",
"type-map",
+ "typed-arena",
"typenum",
"unic-langid",
"unic-langid-impl",
@@ -373,6 +371,7 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"unicode-security",
"unicode-width",
"unicode-xid",
+ "unwinding",
"valuable",
"version_check",
"wasi",
@@ -394,6 +393,8 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"yansi-term", // this is a false-positive: it's only used by rustfmt, but because it's enabled through a feature, tidy thinks it's used by rustc as well.
"yoke",
"yoke-derive",
+ "zerocopy",
+ "zerocopy-derive",
"zerofrom",
"zerofrom-derive",
"zerovec",
@@ -489,7 +490,6 @@ const PERMITTED_CRANELIFT_DEPENDENCIES: &[&str] = &[
/// to the cargo executable.
pub fn check(root: &Path, cargo: &Path, bad: &mut bool) {
let mut checked_runtime_licenses = false;
- let mut rust_metadata = None;
for &(workspace, exceptions, permitted_deps) in WORKSPACES {
if !root.join(workspace).join("Cargo.lock").exists() {
@@ -513,15 +513,6 @@ pub fn check(root: &Path, cargo: &Path, bad: &mut bool) {
let runtime_ids = compute_runtime_crates(&metadata);
check_runtime_license_exceptions(&metadata, runtime_ids, bad);
checked_runtime_licenses = true;
- rust_metadata = Some(metadata);
- } else if workspace == "src/tools/cargo" {
- check_rustfix(
- rust_metadata
- .as_ref()
- .expect("The root workspace should be the first to be checked"),
- &metadata,
- bad,
- );
}
}
@@ -750,33 +741,3 @@ fn deps_of_filtered<'a>(
deps_of_filtered(metadata, &dep.pkg, result, filter);
}
}
-
-fn direct_deps_of<'a>(
- metadata: &'a Metadata,
- pkg_id: &'a PackageId,
-) -> impl Iterator<Item = &'a Package> {
- let resolve = metadata.resolve.as_ref().unwrap();
- let node = resolve.nodes.iter().find(|n| &n.id == pkg_id).unwrap();
- node.deps.iter().map(|dep| pkg_from_id(metadata, &dep.pkg))
-}
-
-fn check_rustfix(rust_metadata: &Metadata, cargo_metadata: &Metadata, bad: &mut bool) {
- let cargo = pkg_from_name(cargo_metadata, "cargo");
- let cargo_rustfix =
- direct_deps_of(cargo_metadata, &cargo.id).find(|p| p.name == "rustfix").unwrap();
-
- let compiletest = pkg_from_name(rust_metadata, "compiletest");
- let compiletest_rustfix =
- direct_deps_of(rust_metadata, &compiletest.id).find(|p| p.name == "rustfix").unwrap();
-
- if cargo_rustfix.version != compiletest_rustfix.version {
- tidy_error!(
- bad,
- "cargo's rustfix version {} does not match compiletest's rustfix version {}\n\
- rustfix should be kept in sync, update the cargo side first, and then update \
- compiletest along with cargo.",
- cargo_rustfix.version,
- compiletest_rustfix.version
- );
- }
-}
diff --git a/src/tools/tidy/src/main.rs b/src/tools/tidy/src/main.rs
index 80e58ba00..9f92b8995 100644
--- a/src/tools/tidy/src/main.rs
+++ b/src/tools/tidy/src/main.rs
@@ -132,6 +132,7 @@ fn main() {
check!(edition, &library_path);
check!(alphabetical, &src_path);
+ check!(alphabetical, &tests_path);
check!(alphabetical, &compiler_path);
check!(alphabetical, &library_path);
diff --git a/src/tools/tidy/src/ui_tests.rs b/src/tools/tidy/src/ui_tests.rs
index 7e24793ad..dfa386b49 100644
--- a/src/tools/tidy/src/ui_tests.rs
+++ b/src/tools/tidy/src/ui_tests.rs
@@ -10,7 +10,7 @@ use std::path::{Path, PathBuf};
const ENTRY_LIMIT: usize = 900;
// FIXME: The following limits should be reduced eventually.
-const ISSUES_ENTRY_LIMIT: usize = 1854;
+const ISSUES_ENTRY_LIMIT: usize = 1852;
const ROOT_ENTRY_LIMIT: usize = 867;
const EXPECTED_TEST_FILE_EXTENSIONS: &[&str] = &[