summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/bootstrap/CHANGELOG.md4
-rw-r--r--src/bootstrap/Cargo.lock121
-rw-r--r--src/bootstrap/Cargo.toml34
-rw-r--r--src/bootstrap/README.md2
-rw-r--r--src/bootstrap/bin/main.rs25
-rw-r--r--src/bootstrap/bin/rustc.rs52
-rw-r--r--src/bootstrap/bolt.rs29
-rw-r--r--src/bootstrap/bootstrap.py4
-rw-r--r--src/bootstrap/bootstrap_test.py39
-rw-r--r--src/bootstrap/builder.rs117
-rw-r--r--src/bootstrap/cache.rs3
-rw-r--r--src/bootstrap/channel.rs8
-rw-r--r--src/bootstrap/check.rs18
-rw-r--r--src/bootstrap/compile.rs234
-rw-r--r--src/bootstrap/config.rs100
-rw-r--r--src/bootstrap/config/tests.rs60
-rwxr-xr-xsrc/bootstrap/configure.py440
-rw-r--r--src/bootstrap/defaults/config.codegen.toml6
-rw-r--r--src/bootstrap/defaults/config.user.toml8
-rw-r--r--src/bootstrap/dist.rs227
-rw-r--r--src/bootstrap/doc.rs12
-rw-r--r--src/bootstrap/download-ci-llvm-stamp2
-rw-r--r--src/bootstrap/download.rs66
-rw-r--r--src/bootstrap/dylib_util.rs2
-rw-r--r--src/bootstrap/flags.rs27
-rw-r--r--src/bootstrap/format.rs20
-rw-r--r--src/bootstrap/install.rs11
-rw-r--r--src/bootstrap/job.rs85
-rw-r--r--src/bootstrap/lib.rs122
-rw-r--r--src/bootstrap/llvm.rs (renamed from src/bootstrap/native.rs)131
-rw-r--r--src/bootstrap/metrics.rs57
-rw-r--r--src/bootstrap/render_tests.rs371
-rw-r--r--src/bootstrap/setup.rs10
-rw-r--r--src/bootstrap/setup/tests.rs6
-rw-r--r--src/bootstrap/suggest.rs80
-rw-r--r--src/bootstrap/tarball.rs1
-rw-r--r--src/bootstrap/test.rs235
-rw-r--r--src/bootstrap/tool.rs3
-rw-r--r--src/bootstrap/util.rs93
-rw-r--r--src/ci/docker/host-x86_64/dist-x86_64-illumos/Dockerfile9
-rw-r--r--src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile4
-rwxr-xr-xsrc/ci/docker/host-x86_64/dist-x86_64-linux/build-clang.sh2
-rwxr-xr-xsrc/ci/docker/host-x86_64/dist-x86_64-linux/build-gcc.sh2
-rw-r--r--src/ci/docker/host-x86_64/i686-gnu/Dockerfile1
-rw-r--r--src/ci/docker/host-x86_64/mingw-check-tidy/Dockerfile5
-rw-r--r--src/ci/docker/host-x86_64/mingw-check/Dockerfile4
-rw-r--r--src/ci/docker/host-x86_64/x86_64-gnu-distcheck/Dockerfile2
-rw-r--r--src/ci/docker/host-x86_64/x86_64-gnu-llvm-14-stage1/Dockerfile1
-rw-r--r--src/ci/docker/host-x86_64/x86_64-gnu-llvm-14/Dockerfile9
-rw-r--r--src/ci/docker/host-x86_64/x86_64-gnu-llvm-15/Dockerfile1
-rw-r--r--src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version2
-rw-r--r--src/ci/docker/host-x86_64/x86_64-gnu/Dockerfile1
-rw-r--r--src/ci/docker/scripts/emscripten.sh4
-rw-r--r--src/ci/github-actions/ci.yml144
-rw-r--r--src/ci/github-actions/problem_matchers.json15
-rwxr-xr-xsrc/ci/run.sh6
-rwxr-xr-xsrc/ci/scripts/collect-cpu-stats.sh3
-rwxr-xr-xsrc/ci/scripts/install-awscli.sh7
-rwxr-xr-xsrc/ci/scripts/run-build-from-ci.sh2
-rwxr-xr-xsrc/ci/scripts/upload-artifacts.sh2
-rw-r--r--src/ci/stage-build.py10
-rw-r--r--src/doc/book/COPYRIGHT293
-rw-r--r--src/doc/book/src/ch15-06-reference-cycles.md2
-rw-r--r--src/doc/footer.inc2
-rw-r--r--src/doc/nomicon/src/ffi.md2
-rw-r--r--src/doc/nomicon/src/subtyping.md2
-rw-r--r--src/doc/reference/src/attributes/codegen.md4
-rw-r--r--src/doc/reference/src/attributes/diagnostics.md2
-rw-r--r--src/doc/reference/src/expressions/loop-expr.md23
-rw-r--r--src/doc/reference/src/expressions/struct-expr.md2
-rw-r--r--src/doc/reference/src/inline-assembly.md48
-rw-r--r--src/doc/reference/src/items/constant-items.md16
-rw-r--r--src/doc/reference/src/items/unions.md4
-rw-r--r--src/doc/reference/src/names/namespaces.md2
-rw-r--r--src/doc/rust-by-example/src/crates/lib.md2
-rw-r--r--src/doc/rust-by-example/src/flow_control/match/guard.md1
-rw-r--r--src/doc/rust-by-example/src/hello/print.md2
-rw-r--r--src/doc/rust-by-example/src/hello/print/fmt.md8
-rw-r--r--src/doc/rust-by-example/src/macros.md4
-rw-r--r--src/doc/rust-by-example/src/primitives/array.md2
-rw-r--r--src/doc/rust-by-example/src/scope/lifetime.md2
-rw-r--r--src/doc/rust-by-example/src/scope/raii.md3
-rw-r--r--src/doc/rust-by-example/src/std/panic.md3
-rw-r--r--src/doc/rust-by-example/src/std_misc/file/read_lines.md79
-rw-r--r--src/doc/rust-by-example/src/unsafe/asm.md38
-rw-r--r--src/doc/rust-by-example/src/variable_bindings/mut.md3
-rw-r--r--src/doc/rustc-dev-guide/.github/workflows/ci.yml2
-rw-r--r--src/doc/rustc-dev-guide/README.md10
-rw-r--r--src/doc/rustc-dev-guide/book.toml6
-rwxr-xr-xsrc/doc/rustc-dev-guide/ci/lengthcheck.sh (renamed from src/doc/rustc-dev-guide/ci/check_line_lengths.sh)8
-rwxr-xr-xsrc/doc/rustc-dev-guide/ci/linkcheck.sh16
-rw-r--r--src/doc/rustc-dev-guide/examples/rustc-driver-example.rs7
-rw-r--r--src/doc/rustc-dev-guide/examples/rustc-driver-getting-diagnostics.rs1
-rw-r--r--src/doc/rustc-dev-guide/examples/rustc-driver-interacting-with-the-ast.rs1
-rw-r--r--src/doc/rustc-dev-guide/src/SUMMARY.md11
-rw-r--r--src/doc/rustc-dev-guide/src/about-this-guide.md41
-rw-r--r--src/doc/rustc-dev-guide/src/backend/libs-and-metadata.md5
-rw-r--r--src/doc/rustc-dev-guide/src/backend/monomorph.md2
-rw-r--r--src/doc/rustc-dev-guide/src/bug-fix-procedure.md9
-rw-r--r--src/doc/rustc-dev-guide/src/building/bootstrapping.md21
-rw-r--r--src/doc/rustc-dev-guide/src/building/compiler-documenting.md2
-rw-r--r--src/doc/rustc-dev-guide/src/building/how-to-build-and-run.md60
-rw-r--r--src/doc/rustc-dev-guide/src/building/new-target.md2
-rw-r--r--src/doc/rustc-dev-guide/src/building/suggested.md16
-rw-r--r--src/doc/rustc-dev-guide/src/compiler-debugging.md4
-rw-r--r--src/doc/rustc-dev-guide/src/const-eval/interpret.md2
-rw-r--r--src/doc/rustc-dev-guide/src/contributing.md513
-rw-r--r--src/doc/rustc-dev-guide/src/conventions.md3
-rw-r--r--src/doc/rustc-dev-guide/src/diagnostics.md22
-rw-r--r--src/doc/rustc-dev-guide/src/diagnostics/diagnostic-codes.md79
-rw-r--r--src/doc/rustc-dev-guide/src/diagnostics/error-codes.md95
-rw-r--r--src/doc/rustc-dev-guide/src/external-repos.md113
-rw-r--r--src/doc/rustc-dev-guide/src/fuzzing.md149
-rw-r--r--src/doc/rustc-dev-guide/src/generics.md5
-rw-r--r--src/doc/rustc-dev-guide/src/getting-started.md306
-rw-r--r--src/doc/rustc-dev-guide/src/git.md12
-rw-r--r--src/doc/rustc-dev-guide/src/identifiers.md5
-rw-r--r--src/doc/rustc-dev-guide/src/implementing_new_features.md2
-rw-r--r--src/doc/rustc-dev-guide/src/mir/index.md13
-rw-r--r--src/doc/rustc-dev-guide/src/notification-groups/cleanup-crew.md37
-rw-r--r--src/doc/rustc-dev-guide/src/rustc-driver-getting-diagnostics.md2
-rw-r--r--src/doc/rustc-dev-guide/src/rustc-driver-interacting-with-the-ast.md2
-rw-r--r--src/doc/rustc-dev-guide/src/rustdoc-internals.md229
-rw-r--r--src/doc/rustc-dev-guide/src/rustdoc.md8
-rw-r--r--src/doc/rustc-dev-guide/src/tests/compiletest.md10
-rw-r--r--src/doc/rustc-dev-guide/src/the-parser.md3
-rw-r--r--src/doc/rustc-dev-guide/src/tracing.md3
-rw-r--r--src/doc/rustc-dev-guide/src/type-inference.md2
-rw-r--r--src/doc/rustc/src/SUMMARY.md2
-rw-r--r--src/doc/rustc/src/codegen-options/index.md8
-rw-r--r--src/doc/rustc/src/instrument-coverage.md2
-rw-r--r--src/doc/rustc/src/platform-support.md5
-rw-r--r--src/doc/rustc/src/platform-support/armeb-unknown-linux-gnueabi.md1
-rw-r--r--src/doc/rustc/src/platform-support/armv4t-none-eabi.md7
-rw-r--r--src/doc/rustc/src/platform-support/loongarch-linux.md92
-rw-r--r--src/doc/rustc/src/platform-support/nto-qnx.md15
-rw-r--r--src/doc/rustc/src/platform-support/openharmony.md128
-rw-r--r--src/doc/rustdoc/src/command-line-arguments.md20
-rw-r--r--src/doc/rustdoc/src/how-to-read-rustdoc.md28
-rw-r--r--src/doc/rustdoc/src/unstable-features.md15
-rw-r--r--src/doc/rustdoc/src/write-documentation/documentation-tests.md12
-rw-r--r--src/doc/rustdoc/src/write-documentation/linking-to-items-by-name.md7
-rw-r--r--src/doc/rustdoc/src/write-documentation/what-to-include.md4
-rw-r--r--src/doc/style-guide/src/expressions.md2
-rw-r--r--src/doc/unstable-book/src/compiler-flags/cf-protection.md4
-rw-r--r--src/doc/unstable-book/src/compiler-flags/dump-mono-stats-format.md2
-rw-r--r--src/doc/unstable-book/src/compiler-flags/sanitizer.md4
-rw-r--r--src/doc/unstable-book/src/language-features/asm-experimental-arch.md14
-rw-r--r--src/doc/unstable-book/src/language-features/box-patterns.md2
-rw-r--r--src/doc/unstable-book/src/language-features/box-syntax.md22
-rw-r--r--src/doc/unstable-book/src/language-features/lang-items.md16
-rw-r--r--src/doc/unstable-book/src/language-features/plugin.md4
-rw-r--r--src/doc/unstable-book/src/the-unstable-book.md23
-rw-r--r--src/etc/installer/msi/rust.wxs8
-rw-r--r--src/etc/installer/pkg/Distribution.xml6
-rwxr-xr-xsrc/etc/rust-gdb4
-rwxr-xr-xsrc/etc/rust-gdbgui6
-rw-r--r--src/etc/rust_analyzer_settings.json (renamed from src/etc/vscode_settings.json)0
-rw-r--r--src/librustdoc/Cargo.toml6
-rw-r--r--src/librustdoc/askama.toml1
-rw-r--r--src/librustdoc/clean/blanket_impl.rs5
-rw-r--r--src/librustdoc/clean/cfg.rs1
-rw-r--r--src/librustdoc/clean/inline.rs106
-rw-r--r--src/librustdoc/clean/mod.rs288
-rw-r--r--src/librustdoc/clean/simplify.rs6
-rw-r--r--src/librustdoc/clean/types.rs218
-rw-r--r--src/librustdoc/clean/types/tests.rs13
-rw-r--r--src/librustdoc/clean/utils.rs6
-rw-r--r--src/librustdoc/config.rs22
-rw-r--r--src/librustdoc/core.rs12
-rw-r--r--src/librustdoc/docfs.rs28
-rw-r--r--src/librustdoc/doctest.rs17
-rw-r--r--src/librustdoc/formats/cache.rs5
-rw-r--r--src/librustdoc/html/format.rs202
-rw-r--r--src/librustdoc/html/highlight.rs63
-rw-r--r--src/librustdoc/html/markdown.rs15
-rw-r--r--src/librustdoc/html/render/context.rs57
-rw-r--r--src/librustdoc/html/render/mod.rs1092
-rw-r--r--src/librustdoc/html/render/print_item.rs806
-rw-r--r--src/librustdoc/html/render/search_index.rs68
-rw-r--r--src/librustdoc/html/render/sidebar.rs558
-rw-r--r--src/librustdoc/html/render/span_map.rs6
-rw-r--r--src/librustdoc/html/sources.rs64
-rw-r--r--src/librustdoc/html/static/COPYRIGHT.txt4
-rw-r--r--src/librustdoc/html/static/css/rustdoc.css81
-rw-r--r--src/librustdoc/html/static/css/settings.css3
-rw-r--r--src/librustdoc/html/static/css/themes/ayu.css1
-rw-r--r--src/librustdoc/html/static/css/themes/dark.css1
-rw-r--r--src/librustdoc/html/static/css/themes/light.css1
-rw-r--r--src/librustdoc/html/static/fonts/FiraSans-LICENSE.txt4
-rw-r--r--src/librustdoc/html/static/fonts/NanumBarunGothic-LICENSE.txt4
-rw-r--r--src/librustdoc/html/static/fonts/SourceCodePro-LICENSE.txt4
-rw-r--r--src/librustdoc/html/static/fonts/SourceSerif4-LICENSE.md5
-rw-r--r--src/librustdoc/html/static/js/externs.js7
-rw-r--r--src/librustdoc/html/static/js/main.js115
-rw-r--r--src/librustdoc/html/static/js/search.js716
-rw-r--r--src/librustdoc/html/static/js/settings.js8
-rw-r--r--src/librustdoc/html/static/js/source-script.js9
-rw-r--r--src/librustdoc/html/static/js/storage.js127
-rw-r--r--src/librustdoc/html/templates/STYLE.md39
-rw-r--r--src/librustdoc/html/templates/item_info.html7
-rw-r--r--src/librustdoc/html/templates/item_union.html23
-rw-r--r--src/librustdoc/html/templates/page.html297
-rw-r--r--src/librustdoc/html/templates/print_item.html52
-rw-r--r--src/librustdoc/html/templates/short_item_info.html23
-rw-r--r--src/librustdoc/html/templates/sidebar.html37
-rw-r--r--src/librustdoc/html/templates/source.html21
-rw-r--r--src/librustdoc/json/conversions.rs39
-rw-r--r--src/librustdoc/json/mod.rs2
-rw-r--r--src/librustdoc/lib.rs9
-rw-r--r--src/librustdoc/passes/calculate_doc_coverage.rs1
-rw-r--r--src/librustdoc/passes/check_doc_test_visibility.rs1
-rw-r--r--src/librustdoc/passes/collect_intra_doc_links.rs570
-rw-r--r--src/librustdoc/passes/collect_trait_impls.rs8
-rw-r--r--src/librustdoc/passes/propagate_doc_cfg.rs4
-rw-r--r--src/librustdoc/passes/strip_hidden.rs9
-rw-r--r--src/librustdoc/visit_ast.rs33
-rw-r--r--src/rustdoc-json-types/Cargo.toml1
-rw-r--r--src/rustdoc-json-types/lib.rs19
-rw-r--r--src/stage0.json580
-rw-r--r--src/tools/bump-stage0/src/main.rs1
-rw-r--r--src/tools/clippy/.cargo/config.toml3
-rw-r--r--src/tools/clippy/.editorconfig1
-rw-r--r--src/tools/clippy/.github/workflows/clippy_bors.yml2
-rw-r--r--src/tools/clippy/.github/workflows/remark.yml2
-rw-r--r--src/tools/clippy/CHANGELOG.md169
-rw-r--r--src/tools/clippy/CONTRIBUTING.md2
-rw-r--r--src/tools/clippy/COPYRIGHT4
-rw-r--r--src/tools/clippy/Cargo.toml2
-rw-r--r--src/tools/clippy/README.md10
-rw-r--r--src/tools/clippy/book/src/README.md2
-rw-r--r--src/tools/clippy/book/src/SUMMARY.md1
-rw-r--r--src/tools/clippy/book/src/configuration.md29
-rw-r--r--src/tools/clippy/book/src/development/README.md2
-rw-r--r--src/tools/clippy/book/src/development/adding_lints.md32
-rw-r--r--src/tools/clippy/book/src/development/basics.md6
-rw-r--r--src/tools/clippy/book/src/development/common_tools_writing_lints.md14
-rw-r--r--src/tools/clippy/book/src/development/infrastructure/book.md4
-rw-r--r--src/tools/clippy/book/src/development/infrastructure/changelog_update.md2
-rw-r--r--src/tools/clippy/book/src/development/infrastructure/release.md2
-rw-r--r--src/tools/clippy/book/src/development/infrastructure/sync.md13
-rw-r--r--src/tools/clippy/book/src/development/proposals/README.md2
-rw-r--r--src/tools/clippy/book/src/development/proposals/roadmap-2021.md6
-rw-r--r--src/tools/clippy/book/src/development/proposals/syntax-tree-patterns.md42
-rw-r--r--src/tools/clippy/book/src/development/type_checking.md144
-rw-r--r--src/tools/clippy/book/src/installation.md4
-rw-r--r--src/tools/clippy/book/src/lint_configuration.md19
-rw-r--r--src/tools/clippy/book/src/lints.md10
-rw-r--r--src/tools/clippy/book/src/usage.md4
-rw-r--r--src/tools/clippy/clippy_dev/src/lib.rs2
-rw-r--r--src/tools/clippy/clippy_dev/src/new_lint.rs8
-rw-r--r--src/tools/clippy/clippy_dev/src/update_lints.rs14
-rw-r--r--src/tools/clippy/clippy_lints/Cargo.toml3
-rw-r--r--src/tools/clippy/clippy_lints/src/allow_attributes.rs71
-rw-r--r--src/tools/clippy/clippy_lints/src/almost_complete_range.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/booleans.rs64
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/cast_possible_truncation.rs42
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/cast_slice_from_raw_parts.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/cognitive_complexity.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/collection_is_never_read.rs141
-rw-r--r--src/tools/clippy/clippy_lints/src/copies.rs70
-rw-r--r--src/tools/clippy/clippy_lints/src/declared_lints.rs14
-rw-r--r--src/tools/clippy/clippy_lints/src/default.rs9
-rw-r--r--src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs16
-rw-r--r--src/tools/clippy/clippy_lints/src/derivable_impls.rs23
-rw-r--r--src/tools/clippy/clippy_lints/src/derive.rs10
-rw-r--r--src/tools/clippy/clippy_lints/src/disallowed_script_idents.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/exit.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/explicit_write.rs29
-rw-r--r--src/tools/clippy/clippy_lints/src/extra_unused_type_parameters.rs177
-rw-r--r--src/tools/clippy/clippy_lints/src/fn_null_check.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/format.rs91
-rw-r--r--src/tools/clippy/clippy_lints/src/format_args.rs224
-rw-r--r--src/tools/clippy/clippy_lints/src/format_impl.rs60
-rw-r--r--src/tools/clippy/clippy_lints/src/functions/impl_trait_in_params.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/functions/mod.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/functions/must_use.rs33
-rw-r--r--src/tools/clippy/clippy_lints/src/future_not_send.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs17
-rw-r--r--src/tools/clippy/clippy_lints/src/implicit_saturating_add.rs16
-rw-r--r--src/tools/clippy/clippy_lints/src/infinite_iter.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/instant_subtraction.rs13
-rw-r--r--src/tools/clippy/clippy_lints/src/items_after_statements.rs31
-rw-r--r--src/tools/clippy/clippy_lints/src/large_futures.rs87
-rw-r--r--src/tools/clippy/clippy_lints/src/len_zero.rs125
-rw-r--r--src/tools/clippy/clippy_lints/src/let_underscore.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/let_with_type_underscore.rs45
-rw-r--r--src/tools/clippy/clippy_lints/src/lib.rs53
-rw-r--r--src/tools/clippy/clippy_lints/src/lines_filter_map_ok.rs100
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/never_loop.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/same_item_push.rs32
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_async_fn.rs24
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_bits.rs11
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_clamp.rs7
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs15
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_main_separator_str.rs74
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_non_exhaustive.rs1
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_slice_size_calculation.rs93
-rw-r--r--src/tools/clippy/clippy_lints/src/match_result_ok.rs25
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/manual_unwrap_or.rs13
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_bool.rs20
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_ref_pats.rs28
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_single_binding.rs52
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/mod.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs19
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs1
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/single_match.rs18
-rw-r--r--src/tools/clippy/clippy_lints/src/mem_replace.rs71
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs12
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/clear_with_drain.rs53
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/clone_on_ref_ptr.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs27
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/iter_with_drain.rs24
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/mod.rs45
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs1
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs28
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/misc.rs17
-rw-r--r--src/tools/clippy/clippy_lints/src/missing_assert_message.rs82
-rw-r--r--src/tools/clippy/clippy_lints/src/missing_const_for_fn.rs1
-rw-r--r--src/tools/clippy/clippy_lints/src/missing_doc.rs12
-rw-r--r--src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs37
-rw-r--r--src/tools/clippy/clippy_lints/src/mut_key.rs60
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_bool.rs13
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_question_mark.rs1
-rw-r--r--src/tools/clippy/clippy_lints/src/neg_multiply.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/no_effect.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/no_mangle_with_rust_abi.rs28
-rw-r--r--src/tools/clippy/clippy_lints/src/non_octal_unix_permissions.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs23
-rw-r--r--src/tools/clippy/clippy_lints/src/option_if_let_else.rs28
-rw-r--r--src/tools/clippy/clippy_lints/src/partialeq_ne_impl.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/permissions_set_readonly_false.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_async_block.rs108
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_static_lifetimes.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/ref_option_ref.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/returns.rs32
-rw-r--r--src/tools/clippy/clippy_lints/src/semicolon_if_nothing_returned.rs13
-rw-r--r--src/tools/clippy/clippy_lints/src/shadow.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs1
-rw-r--r--src/tools/clippy/clippy_lints/src/single_component_path_imports.rs59
-rw-r--r--src/tools/clippy/clippy_lints/src/size_of_ref.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/std_instead_of_core.rs1
-rw-r--r--src/tools/clippy/clippy_lints/src/suspicious_doc_comments.rs94
-rw-r--r--src/tools/clippy/clippy_lints/src/suspicious_operation_groupings.rs5
-rw-r--r--src/tools/clippy/clippy_lints/src/swap.rs116
-rw-r--r--src/tools/clippy/clippy_lints/src/tests_outside_test_module.rs71
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/mod.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/transmutes_expressible_as_ptr_casts.rs11
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/utils.rs57
-rw-r--r--src/tools/clippy/clippy_lints/src/types/borrowed_box.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/types/utils.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/unit_types/let_unit_value.rs13
-rw-r--r--src/tools/clippy/clippy_lints/src/unnecessary_box_returns.rs120
-rw-r--r--src/tools/clippy/clippy_lints/src/unnecessary_struct_initialization.rs88
-rw-r--r--src/tools/clippy/clippy_lints/src/use_self.rs18
-rw-r--r--src/tools/clippy/clippy_lints/src/useless_conversion.rs14
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/author.rs7
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/conf.rs38
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/format_args_collector.rs98
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/interning_defined_symbol.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs7
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/unnecessary_def_path.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/mod.rs1
-rw-r--r--src/tools/clippy/clippy_lints/src/wildcard_imports.rs18
-rw-r--r--src/tools/clippy/clippy_lints/src/write.rs160
-rw-r--r--src/tools/clippy/clippy_utils/Cargo.toml2
-rw-r--r--src/tools/clippy/clippy_utils/src/ast_utils.rs43
-rw-r--r--src/tools/clippy/clippy_utils/src/attrs.rs4
-rw-r--r--src/tools/clippy/clippy_utils/src/check_proc_macro.rs1
-rw-r--r--src/tools/clippy/clippy_utils/src/eager_or_lazy.rs8
-rw-r--r--src/tools/clippy/clippy_utils/src/hir_utils.rs10
-rw-r--r--src/tools/clippy/clippy_utils/src/lib.rs89
-rw-r--r--src/tools/clippy/clippy_utils/src/macros.rs832
-rw-r--r--src/tools/clippy/clippy_utils/src/msrvs.rs1
-rw-r--r--src/tools/clippy/clippy_utils/src/paths.rs3
-rw-r--r--src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs17
-rw-r--r--src/tools/clippy/clippy_utils/src/source.rs42
-rw-r--r--src/tools/clippy/clippy_utils/src/sugg.rs12
-rw-r--r--src/tools/clippy/clippy_utils/src/ty.rs69
-rw-r--r--src/tools/clippy/clippy_utils/src/visitors.rs5
-rw-r--r--src/tools/clippy/declare_clippy_lint/Cargo.toml2
-rw-r--r--src/tools/clippy/etc/relicense/RELICENSE_DOCUMENTATION.md4
-rw-r--r--src/tools/clippy/lintcheck/Cargo.toml14
-rw-r--r--src/tools/clippy/lintcheck/README.md11
-rw-r--r--src/tools/clippy/lintcheck/src/config.rs160
-rw-r--r--src/tools/clippy/lintcheck/src/popular-crates.rs65
-rw-r--r--src/tools/clippy/rust-toolchain2
-rw-r--r--src/tools/clippy/rustc_tools_util/README.md4
-rw-r--r--src/tools/clippy/src/driver.rs6
-rw-r--r--src/tools/clippy/src/main.rs2
-rw-r--r--src/tools/clippy/tests/compile-test.rs2
-rw-r--r--src/tools/clippy/tests/dogfood.rs21
-rw-r--r--src/tools/clippy/tests/lint_message_convention.rs2
-rw-r--r--src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/src/main.stderr6
-rw-r--r--src/tools/clippy/tests/ui-internal/custom_ice_message.stderr1
-rw-r--r--src/tools/clippy/tests/ui-toml/allow_mixed_uninlined_format_args/uninlined_format_args.stderr22
-rw-r--r--src/tools/clippy/tests/ui-toml/array_size_threshold/array_size_threshold.rs10
-rw-r--r--src/tools/clippy/tests/ui-toml/array_size_threshold/array_size_threshold.stderr29
-rw-r--r--src/tools/clippy/tests/ui-toml/array_size_threshold/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/extra_unused_type_parameters/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/extra_unused_type_parameters/extra_unused_type_parameters.rs9
-rw-r--r--src/tools/clippy/tests/ui-toml/ifs_same_cond/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/ifs_same_cond/ifs_same_cond.rs18
-rw-r--r--src/tools/clippy/tests/ui-toml/ifs_same_cond/ifs_same_cond.stderr15
-rw-r--r--src/tools/clippy/tests/ui-toml/large_futures/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/large_futures/large_futures.rs27
-rw-r--r--src/tools/clippy/tests/ui-toml/large_futures/large_futures.stderr10
-rw-r--r--src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr1
-rw-r--r--src/tools/clippy/tests/ui/allow_attributes.fixed25
-rw-r--r--src/tools/clippy/tests/ui/allow_attributes.rs25
-rw-r--r--src/tools/clippy/tests/ui/allow_attributes.stderr16
-rw-r--r--src/tools/clippy/tests/ui/almost_complete_range.fixed49
-rw-r--r--src/tools/clippy/tests/ui/almost_complete_range.rs49
-rw-r--r--src/tools/clippy/tests/ui/almost_complete_range.stderr93
-rw-r--r--src/tools/clippy/tests/ui/arithmetic_side_effects.rs32
-rw-r--r--src/tools/clippy/tests/ui/arithmetic_side_effects.stderr294
-rw-r--r--src/tools/clippy/tests/ui/as_conversions.rs13
-rw-r--r--src/tools/clippy/tests/ui/as_conversions.stderr6
-rw-r--r--src/tools/clippy/tests/ui/async_yields_async.fixed1
-rw-r--r--src/tools/clippy/tests/ui/async_yields_async.rs1
-rw-r--r--src/tools/clippy/tests/ui/async_yields_async.stderr12
-rw-r--r--src/tools/clippy/tests/ui/author/blocks.stdout6
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/doc_unsafe_macros.rs16
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/implicit_hasher_macros.rs6
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/macro_rules.rs141
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/macro_use_helper.rs2
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/proc_macro_with_span.rs32
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/proc_macros.rs474
-rw-r--r--src/tools/clippy/tests/ui/borrow_interior_mutable_const/auxiliary/helper.rs2
-rw-r--r--src/tools/clippy/tests/ui/boxed_local.rs35
-rw-r--r--src/tools/clippy/tests/ui/boxed_local.stderr8
-rw-r--r--src/tools/clippy/tests/ui/cast.rs6
-rw-r--r--src/tools/clippy/tests/ui/cast.stderr112
-rw-r--r--src/tools/clippy/tests/ui/clear_with_drain.fixed358
-rw-r--r--src/tools/clippy/tests/ui/clear_with_drain.rs358
-rw-r--r--src/tools/clippy/tests/ui/clear_with_drain.stderr130
-rw-r--r--src/tools/clippy/tests/ui/collection_is_never_read.rs190
-rw-r--r--src/tools/clippy/tests/ui/collection_is_never_read.stderr76
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-10148.rs9
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-10148.stderr12
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6179.rs2
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6252.stderr6
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6254.stderr2
-rw-r--r--src/tools/clippy/tests/ui/default_numeric_fallback_f64.fixed20
-rw-r--r--src/tools/clippy/tests/ui/default_numeric_fallback_f64.rs20
-rw-r--r--src/tools/clippy/tests/ui/default_numeric_fallback_f64.stderr57
-rw-r--r--src/tools/clippy/tests/ui/default_numeric_fallback_i32.fixed20
-rw-r--r--src/tools/clippy/tests/ui/default_numeric_fallback_i32.rs20
-rw-r--r--src/tools/clippy/tests/ui/default_numeric_fallback_i32.stderr61
-rw-r--r--src/tools/clippy/tests/ui/default_trait_access.fixed6
-rw-r--r--src/tools/clippy/tests/ui/default_trait_access.rs6
-rw-r--r--src/tools/clippy/tests/ui/deref_addrof.fixed24
-rw-r--r--src/tools/clippy/tests/ui/deref_addrof.rs24
-rw-r--r--src/tools/clippy/tests/ui/deref_addrof.stderr38
-rw-r--r--src/tools/clippy/tests/ui/deref_addrof_macro.rs15
-rw-r--r--src/tools/clippy/tests/ui/derivable_impls.fixed37
-rw-r--r--src/tools/clippy/tests/ui/derivable_impls.rs37
-rw-r--r--src/tools/clippy/tests/ui/derivable_impls.stderr24
-rw-r--r--src/tools/clippy/tests/ui/doc_unsafe.rs12
-rw-r--r--src/tools/clippy/tests/ui/double_must_use.rs11
-rw-r--r--src/tools/clippy/tests/ui/double_must_use.stderr10
-rw-r--r--src/tools/clippy/tests/ui/empty_loop.rs15
-rw-r--r--src/tools/clippy/tests/ui/equatable_if_let.fixed16
-rw-r--r--src/tools/clippy/tests/ui/equatable_if_let.rs16
-rw-r--r--src/tools/clippy/tests/ui/equatable_if_let.stderr32
-rw-r--r--src/tools/clippy/tests/ui/erasing_op.rs8
-rw-r--r--src/tools/clippy/tests/ui/erasing_op.stderr10
-rw-r--r--src/tools/clippy/tests/ui/extra_unused_type_parameters.fixed105
-rw-r--r--src/tools/clippy/tests/ui/extra_unused_type_parameters.rs11
-rw-r--r--src/tools/clippy/tests/ui/extra_unused_type_parameters.stderr79
-rw-r--r--src/tools/clippy/tests/ui/extra_unused_type_parameters_unfixable.rs24
-rw-r--r--src/tools/clippy/tests/ui/extra_unused_type_parameters_unfixable.stderr27
-rw-r--r--src/tools/clippy/tests/ui/field_reassign_with_default.rs36
-rw-r--r--src/tools/clippy/tests/ui/field_reassign_with_default.stderr44
-rw-r--r--src/tools/clippy/tests/ui/format.fixed6
-rw-r--r--src/tools/clippy/tests/ui/format.rs6
-rw-r--r--src/tools/clippy/tests/ui/format.stderr30
-rw-r--r--src/tools/clippy/tests/ui/format_args_unfixable.rs44
-rw-r--r--src/tools/clippy/tests/ui/format_args_unfixable.stderr36
-rw-r--r--src/tools/clippy/tests/ui/ifs_same_cond.rs26
-rw-r--r--src/tools/clippy/tests/ui/ifs_same_cond.stderr14
-rw-r--r--src/tools/clippy/tests/ui/impl_trait_in_params.stderr4
-rw-r--r--src/tools/clippy/tests/ui/implicit_clone.fixed2
-rw-r--r--src/tools/clippy/tests/ui/implicit_clone.rs2
-rw-r--r--src/tools/clippy/tests/ui/implicit_hasher.rs25
-rw-r--r--src/tools/clippy/tests/ui/implicit_hasher.stderr55
-rw-r--r--src/tools/clippy/tests/ui/inconsistent_struct_constructor.fixed21
-rw-r--r--src/tools/clippy/tests/ui/inconsistent_struct_constructor.rs21
-rw-r--r--src/tools/clippy/tests/ui/inconsistent_struct_constructor.stderr4
-rw-r--r--src/tools/clippy/tests/ui/integer_arithmetic.rs2
-rw-r--r--src/tools/clippy/tests/ui/items_after_statement.rs (renamed from src/tools/clippy/tests/ui/item_after_statement.rs)17
-rw-r--r--src/tools/clippy/tests/ui/items_after_statement.stderr (renamed from src/tools/clippy/tests/ui/item_after_statement.stderr)6
-rw-r--r--src/tools/clippy/tests/ui/large_enum_variant.rs13
-rw-r--r--src/tools/clippy/tests/ui/large_futures.rs61
-rw-r--r--src/tools/clippy/tests/ui/large_futures.stderr82
-rw-r--r--src/tools/clippy/tests/ui/len_without_is_empty.rs92
-rw-r--r--src/tools/clippy/tests/ui/len_without_is_empty.stderr20
-rw-r--r--src/tools/clippy/tests/ui/let_unit.fixed4
-rw-r--r--src/tools/clippy/tests/ui/let_unit.rs4
-rw-r--r--src/tools/clippy/tests/ui/let_with_type_underscore.rs19
-rw-r--r--src/tools/clippy/tests/ui/let_with_type_underscore.stderr39
-rw-r--r--src/tools/clippy/tests/ui/lines_filter_map_ok.fixed29
-rw-r--r--src/tools/clippy/tests/ui/lines_filter_map_ok.rs29
-rw-r--r--src/tools/clippy/tests/ui/lines_filter_map_ok.stderr51
-rw-r--r--src/tools/clippy/tests/ui/macro_use_imports.fixed6
-rw-r--r--src/tools/clippy/tests/ui/macro_use_imports.rs2
-rw-r--r--src/tools/clippy/tests/ui/macro_use_imports.stderr4
-rw-r--r--src/tools/clippy/tests/ui/macro_use_imports_expect.rs2
-rw-r--r--src/tools/clippy/tests/ui/manual_async_fn.fixed6
-rw-r--r--src/tools/clippy/tests/ui/manual_async_fn.rs12
-rw-r--r--src/tools/clippy/tests/ui/manual_async_fn.stderr47
-rw-r--r--src/tools/clippy/tests/ui/manual_clamp.rs19
-rw-r--r--src/tools/clippy/tests/ui/manual_main_separator_str.fixed39
-rw-r--r--src/tools/clippy/tests/ui/manual_main_separator_str.rs39
-rw-r--r--src/tools/clippy/tests/ui/manual_main_separator_str.stderr28
-rw-r--r--src/tools/clippy/tests/ui/manual_rem_euclid.fixed25
-rw-r--r--src/tools/clippy/tests/ui/manual_rem_euclid.rs25
-rw-r--r--src/tools/clippy/tests/ui/manual_rem_euclid.stderr25
-rw-r--r--src/tools/clippy/tests/ui/manual_slice_size_calculation.rs36
-rw-r--r--src/tools/clippy/tests/ui/manual_slice_size_calculation.stderr51
-rw-r--r--src/tools/clippy/tests/ui/match_result_ok.fixed2
-rw-r--r--src/tools/clippy/tests/ui/match_result_ok.stderr2
-rw-r--r--src/tools/clippy/tests/ui/match_single_binding.fixed41
-rw-r--r--src/tools/clippy/tests/ui/match_single_binding.rs55
-rw-r--r--src/tools/clippy/tests/ui/match_single_binding.stderr111
-rw-r--r--src/tools/clippy/tests/ui/match_single_binding2.fixed4
-rw-r--r--src/tools/clippy/tests/ui/match_single_binding2.stderr4
-rw-r--r--src/tools/clippy/tests/ui/mem_replace.fixed34
-rw-r--r--src/tools/clippy/tests/ui/mem_replace.rs34
-rw-r--r--src/tools/clippy/tests/ui/mem_replace.stderr26
-rw-r--r--src/tools/clippy/tests/ui/mem_replace_macro.rs23
-rw-r--r--src/tools/clippy/tests/ui/mem_replace_macro.stderr11
-rw-r--r--src/tools/clippy/tests/ui/missing_assert_message.rs84
-rw-r--r--src/tools/clippy/tests/ui/missing_assert_message.stderr131
-rw-r--r--src/tools/clippy/tests/ui/missing_const_for_fn/cant_be_const.rs6
-rw-r--r--src/tools/clippy/tests/ui/missing_doc.rs6
-rw-r--r--src/tools/clippy/tests/ui/missing_doc.stderr70
-rw-r--r--src/tools/clippy/tests/ui/missing_doc_impl.rs6
-rw-r--r--src/tools/clippy/tests/ui/missing_doc_impl.stderr56
-rw-r--r--src/tools/clippy/tests/ui/mistyped_literal_suffix.fixed6
-rw-r--r--src/tools/clippy/tests/ui/mistyped_literal_suffix.rs6
-rw-r--r--src/tools/clippy/tests/ui/multiple_unsafe_ops_per_block.rs39
-rw-r--r--src/tools/clippy/tests/ui/multiple_unsafe_ops_per_block.stderr62
-rw-r--r--src/tools/clippy/tests/ui/must_use_unit.fixed11
-rw-r--r--src/tools/clippy/tests/ui/must_use_unit.rs11
-rw-r--r--src/tools/clippy/tests/ui/mut_mut.rs11
-rw-r--r--src/tools/clippy/tests/ui/mut_mut.stderr25
-rw-r--r--src/tools/clippy/tests/ui/needless_late_init.fixed36
-rw-r--r--src/tools/clippy/tests/ui/needless_late_init.rs36
-rw-r--r--src/tools/clippy/tests/ui/needless_late_init.stderr32
-rw-r--r--src/tools/clippy/tests/ui/needless_lifetimes.fixed37
-rw-r--r--src/tools/clippy/tests/ui/needless_lifetimes.rs37
-rw-r--r--src/tools/clippy/tests/ui/needless_lifetimes.stderr15
-rw-r--r--src/tools/clippy/tests/ui/needless_return.fixed9
-rw-r--r--src/tools/clippy/tests/ui/needless_return.rs9
-rw-r--r--src/tools/clippy/tests/ui/needless_update.rs2
-rw-r--r--src/tools/clippy/tests/ui/new_ret_no_self.rs2
-rw-r--r--src/tools/clippy/tests/ui/no_effect.rs10
-rw-r--r--src/tools/clippy/tests/ui/no_effect.stderr66
-rw-r--r--src/tools/clippy/tests/ui/no_mangle_with_rust_abi.fixed48
-rw-r--r--src/tools/clippy/tests/ui/no_mangle_with_rust_abi.rs2
-rw-r--r--src/tools/clippy/tests/ui/no_mangle_with_rust_abi.stderr74
-rw-r--r--src/tools/clippy/tests/ui/nonminimal_bool.rs47
-rw-r--r--src/tools/clippy/tests/ui/option_env_unwrap.rs24
-rw-r--r--src/tools/clippy/tests/ui/option_env_unwrap.stderr42
-rw-r--r--src/tools/clippy/tests/ui/overflow_check_conditional.rs9
-rw-r--r--src/tools/clippy/tests/ui/overflow_check_conditional.stderr16
-rw-r--r--src/tools/clippy/tests/ui/print_literal.rs4
-rw-r--r--src/tools/clippy/tests/ui/ptr_as_ptr.fixed16
-rw-r--r--src/tools/clippy/tests/ui/ptr_as_ptr.rs16
-rw-r--r--src/tools/clippy/tests/ui/ptr_as_ptr.stderr25
-rw-r--r--src/tools/clippy/tests/ui/redundant_async_block.fixed193
-rw-r--r--src/tools/clippy/tests/ui/redundant_async_block.rs193
-rw-r--r--src/tools/clippy/tests/ui/redundant_async_block.stderr74
-rw-r--r--src/tools/clippy/tests/ui/redundant_closure_call_fixable.fixed1
-rw-r--r--src/tools/clippy/tests/ui/redundant_closure_call_fixable.rs1
-rw-r--r--src/tools/clippy/tests/ui/redundant_closure_call_fixable.stderr12
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_result.fixed4
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_result.stderr4
-rw-r--r--src/tools/clippy/tests/ui/single_component_path_imports.fixed6
-rw-r--r--src/tools/clippy/tests/ui/single_component_path_imports.rs6
-rw-r--r--src/tools/clippy/tests/ui/single_component_path_imports.stderr4
-rw-r--r--src/tools/clippy/tests/ui/single_match_else.rs6
-rw-r--r--src/tools/clippy/tests/ui/string_add.rs11
-rw-r--r--src/tools/clippy/tests/ui/suspicious_doc_comments.fixed81
-rw-r--r--src/tools/clippy/tests/ui/suspicious_doc_comments.rs81
-rw-r--r--src/tools/clippy/tests/ui/suspicious_doc_comments.stderr114
-rw-r--r--src/tools/clippy/tests/ui/suspicious_doc_comments_unfixable.rs16
-rw-r--r--src/tools/clippy/tests/ui/suspicious_doc_comments_unfixable.stderr37
-rw-r--r--src/tools/clippy/tests/ui/swap.fixed21
-rw-r--r--src/tools/clippy/tests/ui/swap.rs15
-rw-r--r--src/tools/clippy/tests/ui/swap.stderr58
-rw-r--r--src/tools/clippy/tests/ui/tests_outside_test_module.rs18
-rw-r--r--src/tools/clippy/tests/ui/tests_outside_test_module.stderr11
-rw-r--r--src/tools/clippy/tests/ui/toplevel_ref_arg.fixed24
-rw-r--r--src/tools/clippy/tests/ui/toplevel_ref_arg.rs24
-rw-r--r--src/tools/clippy/tests/ui/toplevel_ref_arg.stderr21
-rw-r--r--src/tools/clippy/tests/ui/toplevel_ref_arg_non_rustfix.rs22
-rw-r--r--src/tools/clippy/tests/ui/toplevel_ref_arg_non_rustfix.stderr7
-rw-r--r--src/tools/clippy/tests/ui/trailing_empty_array.rs2
-rw-r--r--src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.fixed8
-rw-r--r--src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.rs8
-rw-r--r--src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.stderr8
-rw-r--r--src/tools/clippy/tests/ui/try_err.fixed65
-rw-r--r--src/tools/clippy/tests/ui/try_err.rs65
-rw-r--r--src/tools/clippy/tests/ui/try_err.stderr32
-rw-r--r--src/tools/clippy/tests/ui/uninit.rs39
-rw-r--r--src/tools/clippy/tests/ui/uninit.stderr14
-rw-r--r--src/tools/clippy/tests/ui/uninit_vec.rs35
-rw-r--r--src/tools/clippy/tests/ui/uninit_vec.stderr33
-rw-r--r--src/tools/clippy/tests/ui/uninlined_format_args.fixed94
-rw-r--r--src/tools/clippy/tests/ui/uninlined_format_args.rs92
-rw-r--r--src/tools/clippy/tests/ui/uninlined_format_args.stderr14
-rw-r--r--src/tools/clippy/tests/ui/unit_arg.rs6
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_box_returns.rs60
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_box_returns.stderr35
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_lazy_eval.fixed6
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_lazy_eval.rs6
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_operation.fixed10
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_operation.rs10
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_operation.stderr46
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_struct_initialization.fixed73
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_struct_initialization.rs77
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_struct_initialization.stderr46
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_unsafety_doc.rs12
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_unsafety_doc.stderr2
-rw-r--r--src/tools/clippy/tests/ui/unused_format_specs.fixed18
-rw-r--r--src/tools/clippy/tests/ui/unused_format_specs.rs18
-rw-r--r--src/tools/clippy/tests/ui/unused_format_specs.stderr54
-rw-r--r--src/tools/clippy/tests/ui/unused_format_specs_unfixable.stderr12
-rw-r--r--src/tools/clippy/tests/ui/use_self.fixed10
-rw-r--r--src/tools/clippy/tests/ui/use_self.rs10
-rw-r--r--src/tools/clippy/tests/workspace.rs2
-rw-r--r--src/tools/clippy/triagebot.toml1
-rw-r--r--src/tools/collect-license-metadata/src/licenses.rs9
-rw-r--r--src/tools/collect-license-metadata/src/path_tree.rs168
-rw-r--r--src/tools/compiletest/Cargo.toml8
-rw-r--r--src/tools/compiletest/src/common.rs455
-rw-r--r--src/tools/compiletest/src/header.rs178
-rw-r--r--src/tools/compiletest/src/header/cfg.rs324
-rw-r--r--src/tools/compiletest/src/header/tests.rs6
-rw-r--r--src/tools/compiletest/src/main.rs145
-rw-r--r--src/tools/compiletest/src/read2.rs4
-rw-r--r--src/tools/compiletest/src/runtest.rs119
-rw-r--r--src/tools/compiletest/src/util.rs2
-rw-r--r--src/tools/generate-copyright/src/main.rs20
-rw-r--r--src/tools/jsondocck/src/main.rs2
-rw-r--r--src/tools/jsondoclint/Cargo.toml1
-rw-r--r--src/tools/jsondoclint/src/item_kind.rs4
-rw-r--r--src/tools/jsondoclint/src/main.rs6
-rw-r--r--src/tools/jsondoclint/src/validator/tests.rs29
-rw-r--r--src/tools/lint-docs/src/lib.rs95
-rwxr-xr-xsrc/tools/publish_toolstate.py6
-rw-r--r--src/tools/replace-version-placeholder/src/main.rs2
-rw-r--r--src/tools/rust-analyzer/Cargo.lock18
-rw-r--r--src/tools/rust-analyzer/bench_data/numerous_macro_rules4
-rw-r--r--src/tools/rust-analyzer/crates/flycheck/src/lib.rs109
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/adt.rs19
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/attr.rs48
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body.rs71
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs202
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs103
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs50
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs22
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs3
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/data.rs70
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/db.rs15
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/expr.rs70
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/generics.rs44
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/import_map.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs1
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs17
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs137
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs29
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/keys.rs5
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/lib.rs17
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs1
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres.rs32
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs41
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs1
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/path.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/resolver.rs258
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/test_db.rs8
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/type_ref.rs62
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/visibility.rs31
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs12
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs8
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs28
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs58
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/db.rs51
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/eager.rs8
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs3
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs18
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/lib.rs88
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs10
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs19
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/Cargo.toml9
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/builder.rs9
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs3
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs22
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs503
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs955
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/db.rs25
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs5
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs31
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs3
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/display.rs187
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer.rs215
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs60
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs849
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs312
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs147
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs5
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs19
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/interner.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout.rs16
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs13
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs21
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lib.rs49
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lower.rs30
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs213
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir.rs863
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs223
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs1253
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs1581
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs237
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs348
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs8
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests.rs40
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs3
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs21
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs15
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs30
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs52
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs84
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs71
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/utils.rs7
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/attrs.rs5
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/db.rs5
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/diagnostics.rs50
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/display.rs49
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/from_id.rs13
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/has_source.rs21
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/lib.rs541
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics.rs44
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs38
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs49
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/symbols.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_type.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs19
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs85
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs25
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs150
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs40
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs11
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs136
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs27
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs310
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_or_with_or_else.rs364
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/lib.rs11
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs84
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions.rs19
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/context.rs17
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs62
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs12
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/item.rs3
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render.rs3
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs59
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs26
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs60
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs66
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/defs.rs30
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/helpers.rs10
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/lib.rs12
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/rename.rs105
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/search.rs50
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/source_change.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs (renamed from src/tools/rust-analyzer/crates/ide-db/src/use_trivial_contructor.rs)0
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs23
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs39
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs77
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs381
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs649
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs38
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs20
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs15
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs86
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs148
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs131
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs12
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/doc_links.rs15
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/file_structure.rs79
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_definition.rs25
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/highlight_related.rs57
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/render.rs31
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/tests.rs350
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs51
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs117
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs12
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs10
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/lib.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/markup.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/moniker.rs3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/move_item.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/navigation_target.rs26
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/references.rs32
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/rename.rs22
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/runnables.rs290
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/signature_help.rs269
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/static_index.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/view_mir.rs29
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/attributes.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs28
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs10
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/items/traits.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/paths.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs15
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0049_let_else_right_curly_brace_for.rast58
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0049_let_else_right_curly_brace_for.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0050_let_else_right_curly_brace_loop.rast46
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0050_let_else_right_curly_brace_loop.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0051_let_else_right_curly_brace_match.rast85
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0051_let_else_right_curly_brace_match.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0052_let_else_right_curly_brace_while.rast49
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0052_let_else_right_curly_brace_while.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0053_let_else_right_curly_brace_if.rast57
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0053_let_else_right_curly_brace_if.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0016_angled_path_without_qual.rast49
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0016_angled_path_without_qual.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0017_let_else_right_curly_brace.rast69
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0017_let_else_right_curly_brace.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rast2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rast4
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs85
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs8
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/sysroot.rs21
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/tests.rs15
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/workspace.rs206
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs193
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs4
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs5
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs5
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs5
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs71
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs36
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs22
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs8
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_utils.rs40
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs82
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs134
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs4
-rw-r--r--src/tools/rust-analyzer/crates/syntax/Cargo.toml1
-rw-r--r--src/tools/rust-analyzer/crates/syntax/rust.ungram10
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast.rs12
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs43
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs43
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs81
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs4
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs1
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs1
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/fixture.rs4
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/minicore.rs51
-rw-r--r--src/tools/rust-analyzer/crates/toolchain/src/lib.rs21
-rw-r--r--src/tools/rust-analyzer/docs/dev/lsp-extensions.md13
-rw-r--r--src/tools/rust-analyzer/docs/user/generated_config.adoc15
-rw-r--r--src/tools/rust-analyzer/lib/la-arena/src/map.rs6
-rw-r--r--src/tools/rust-installer/.github/workflows/ci.yml23
-rw-r--r--src/tools/rust-installer/Cargo.toml6
-rw-r--r--src/tools/rust-installer/LICENSE-APACHE201
-rw-r--r--src/tools/rust-installer/LICENSE-MIT25
-rwxr-xr-xsrc/tools/rust-installer/combine-installers.sh11
-rwxr-xr-xsrc/tools/rust-installer/gen-install-script.sh9
-rwxr-xr-xsrc/tools/rust-installer/gen-installer.sh11
-rw-r--r--src/tools/rust-installer/install-template.sh9
-rwxr-xr-xsrc/tools/rust-installer/make-tarballs.sh11
-rw-r--r--src/tools/rust-installer/src/combiner.rs7
-rw-r--r--src/tools/rust-installer/src/compression.rs93
-rw-r--r--src/tools/rust-installer/src/generator.rs7
-rw-r--r--src/tools/rust-installer/src/remove_dir_all.rs860
-rw-r--r--src/tools/rust-installer/src/tarballer.rs13
-rwxr-xr-xsrc/tools/rust-installer/test.sh1
-rw-r--r--src/tools/rust-installer/triagebot.toml4
-rw-r--r--src/tools/rustbook/Cargo.toml4
-rw-r--r--src/tools/rustc-workspace-hack/Cargo.toml8
-rw-r--r--src/tools/rustdoc-gui/.eslintrc.js96
-rw-r--r--src/tools/rustdoc-gui/tester.js64
-rw-r--r--src/tools/rustdoc-js/.eslintrc.js96
-rw-r--r--src/tools/rustdoc-js/tester.js202
-rw-r--r--src/tools/rustfmt/src/attr.rs16
-rw-r--r--src/tools/rustfmt/src/closures.rs2
-rw-r--r--src/tools/rustfmt/src/expr.rs7
-rw-r--r--src/tools/rustfmt/src/items.rs26
-rw-r--r--src/tools/rustfmt/src/matches.rs1
-rw-r--r--src/tools/rustfmt/src/parse/parser.rs5
-rw-r--r--src/tools/rustfmt/src/reorder.rs4
-rw-r--r--src/tools/rustfmt/src/utils.rs1
-rw-r--r--src/tools/rustfmt/tests/source/expr.rs11
-rw-r--r--src/tools/rustfmt/tests/target/configs/combine_control_expr/false.rs6
-rw-r--r--src/tools/rustfmt/tests/target/configs/combine_control_expr/true.rs6
-rw-r--r--src/tools/rustfmt/tests/target/expr.rs14
-rw-r--r--src/tools/suggest-tests/Cargo.toml9
-rw-r--r--src/tools/suggest-tests/src/dynamic_suggestions.rs23
-rw-r--r--src/tools/suggest-tests/src/lib.rs96
-rw-r--r--src/tools/suggest-tests/src/main.rs27
-rw-r--r--src/tools/suggest-tests/src/static_suggestions.rs24
-rw-r--r--src/tools/suggest-tests/src/tests.rs21
-rw-r--r--src/tools/tidy/Cargo.toml2
-rw-r--r--src/tools/tidy/src/alphabetical.rs2
-rw-r--r--src/tools/tidy/src/bins.rs24
-rw-r--r--src/tools/tidy/src/debug_artifacts.rs30
-rw-r--r--src/tools/tidy/src/deps.rs6
-rw-r--r--src/tools/tidy/src/edition.rs37
-rw-r--r--src/tools/tidy/src/error_codes.rs10
-rw-r--r--src/tools/tidy/src/features.rs23
-rw-r--r--src/tools/tidy/src/main.rs23
-rw-r--r--src/tools/tidy/src/mir_opt_tests.rs41
-rw-r--r--src/tools/tidy/src/pal.rs5
-rw-r--r--src/tools/tidy/src/rustdoc_gui_tests.rs5
-rw-r--r--src/tools/tidy/src/style.rs111
-rw-r--r--src/tools/tidy/src/target_specific_tests.rs100
-rw-r--r--src/tools/tidy/src/ui_tests.rs146
-rw-r--r--src/tools/tidy/src/unit_tests.rs21
-rw-r--r--src/tools/tidy/src/walk.rs60
-rw-r--r--src/tools/unicode-table-generator/src/case_mapping.rs104
-rw-r--r--src/tools/unicode-table-generator/src/raw_emitter.rs1
-rw-r--r--src/tools/unicode-table-generator/src/skiplist.rs1
-rw-r--r--src/version2
970 files changed, 33893 insertions, 15027 deletions
diff --git a/src/bootstrap/CHANGELOG.md b/src/bootstrap/CHANGELOG.md
index 4105fa5ec..74dd22df9 100644
--- a/src/bootstrap/CHANGELOG.md
+++ b/src/bootstrap/CHANGELOG.md
@@ -16,6 +16,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- `remote-test-server`'s `verbose` argument has been removed in favor of the `--verbose` flag
- `remote-test-server`'s `remote` argument has been removed in favor of the `--bind` flag. Use `--bind 0.0.0.0:12345` to replicate the behavior of the `remote` argument.
- `x.py fmt` now formats only files modified between the merge-base of HEAD and the last commit in the master branch of the rust-lang repository and the current working directory. To restore old behaviour, use `x.py fmt .`. The check mode is not affected by this change. [#105702](https://github.com/rust-lang/rust/pull/105702)
+- The `llvm.version-check` config option has been removed. Older versions were never supported. If you still need to support older versions (e.g. you are applying custom patches), patch `check_llvm_version` in bootstrap to change the minimum version. [#108619](https://github.com/rust-lang/rust/pull/108619)
+- The `rust.ignore-git` option has been renamed to `rust.omit-git-hash`. [#110059](https://github.com/rust-lang/rust/pull/110059)
### Non-breaking changes
@@ -24,6 +26,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- If you have Rust already installed, `x.py` will now infer the host target
from the default rust toolchain. [#78513](https://github.com/rust-lang/rust/pull/78513)
- Add options for enabling overflow checks, one for std (`overflow-checks-std`) and one for everything else (`overflow-checks`). Both default to false.
+- Add llvm option `enable-warnings` to have control on llvm compilation warnings. Default to false.
## [Version 2] - 2020-09-25
@@ -46,6 +49,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Add `--keep-stage-std`, which behaves like `keep-stage` but allows the stage
0 compiler artifacts (i.e., stage1/bin/rustc) to be rebuilt if changed
[#77120](https://github.com/rust-lang/rust/pull/77120).
+- File locking is now used to avoid collisions between multiple running instances of `x.py` (e.g. when using `rust-analyzer` and `x.py` at the same time). Note that Solaris and possibly other non Unix and non Windows systems don't support it [#108607](https://github.com/rust-lang/rust/pull/108607). This might possibly lead to build data corruption.
## [Version 1] - 2020-09-11
diff --git a/src/bootstrap/Cargo.lock b/src/bootstrap/Cargo.lock
index e861d520c..a158d1f71 100644
--- a/src/bootstrap/Cargo.lock
+++ b/src/bootstrap/Cargo.lock
@@ -44,6 +44,8 @@ dependencies = [
"getopts",
"hex",
"ignore",
+ "is-terminal",
+ "junction",
"libc",
"object",
"once_cell",
@@ -55,9 +57,10 @@ dependencies = [
"sha2",
"sysinfo",
"tar",
+ "termcolor",
"toml",
"walkdir",
- "winapi",
+ "windows",
"xz2",
]
@@ -199,13 +202,13 @@ checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
[[package]]
name = "errno"
-version = "0.2.8"
+version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1"
+checksum = "50d6a0976c999d473fe89ad888d5a284e55366d9dc9038b1ba2aa15128c4afa0"
dependencies = [
"errno-dragonfly",
"libc",
- "winapi",
+ "windows-sys",
]
[[package]]
@@ -220,9 +223,9 @@ dependencies = [
[[package]]
name = "fd-lock"
-version = "3.0.8"
+version = "3.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bb21c69b9fea5e15dbc1049e4b77145dd0ba1c84019c488102de0dc4ea4b0a27"
+checksum = "9799aefb4a2e4a01cc47610b1dd47c18ab13d991f27bbcaed9296f5a53d5cbad"
dependencies = [
"cfg-if",
"rustix",
@@ -289,6 +292,12 @@ dependencies = [
]
[[package]]
+name = "hermit-abi"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286"
+
+[[package]]
name = "hex"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -314,21 +323,44 @@ dependencies = [
[[package]]
name = "io-lifetimes"
-version = "1.0.1"
+version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a7d367024b3f3414d8e01f437f704f41a9f64ab36f9067fa73e526ad4c763c87"
+checksum = "09270fd4fa1111bc614ed2246c7ef56239a3063d5be0d1ec3b589c505d400aeb"
dependencies = [
+ "hermit-abi 0.3.1",
"libc",
"windows-sys",
]
[[package]]
+name = "is-terminal"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "256017f749ab3117e93acb91063009e1f1bb56d03965b14c2c8df4eb02c524d8"
+dependencies = [
+ "hermit-abi 0.3.1",
+ "io-lifetimes",
+ "rustix",
+ "windows-sys",
+]
+
+[[package]]
name = "itoa"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d"
[[package]]
+name = "junction"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ca39ef0d69b18e6a2fd14c2f0a1d593200f4a4ed949b240b5917ab51fac754cb"
+dependencies = [
+ "scopeguard",
+ "winapi",
+]
+
+[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -336,15 +368,15 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
-version = "0.2.137"
+version = "0.2.140"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fc7fcc620a3bff7cdd7a365be3376c97191aeaccc2a603e600951e452615bf89"
+checksum = "99227334921fae1a979cf0bfdfcc6b3e5ce376ef57e16fb6fb3ea2ed6095f80c"
[[package]]
name = "linux-raw-sys"
-version = "0.1.3"
+version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8f9f08d8963a6c613f4b1a78f4f4a4dbfadf8e6545b2d72861731e4858b8b47f"
+checksum = "d59d8c75012853d2e872fb56bc8a2e53718e2cafe1a4c823143141c6d90c322f"
[[package]]
name = "log"
@@ -396,7 +428,7 @@ version = "1.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1"
dependencies = [
- "hermit-abi",
+ "hermit-abi 0.1.19",
"libc",
]
@@ -527,9 +559,9 @@ checksum = "49b3de9ec5dc0a3417da371aab17d729997c15010e7fd24ff707773a33bddb64"
[[package]]
name = "rustix"
-version = "0.36.3"
+version = "0.37.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0b1fbb4dfc4eb1d390c02df47760bb19a84bb80b301ecc947ab5406394d8223e"
+checksum = "d097081ed288dfe45699b72f5b5d648e5f15d64d900c7080273baa20c16a6849"
dependencies = [
"bitflags",
"errno",
@@ -637,6 +669,15 @@ dependencies = [
]
[[package]]
+name = "termcolor"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
name = "thread_local"
version = "1.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -721,10 +762,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
+name = "windows"
+version = "0.46.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cdacb41e6a96a052c6cb63a144f24900236121c6f63f4f8219fef5977ecb0c25"
+dependencies = [
+ "windows-targets",
+]
+
+[[package]]
name = "windows-sys"
-version = "0.42.0"
+version = "0.45.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0"
+dependencies = [
+ "windows-targets",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7"
+checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
@@ -737,45 +796,45 @@ dependencies = [
[[package]]
name = "windows_aarch64_gnullvm"
-version = "0.42.0"
+version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e"
+checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8"
[[package]]
name = "windows_aarch64_msvc"
-version = "0.42.0"
+version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4"
+checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43"
[[package]]
name = "windows_i686_gnu"
-version = "0.42.0"
+version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7"
+checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f"
[[package]]
name = "windows_i686_msvc"
-version = "0.42.0"
+version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246"
+checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060"
[[package]]
name = "windows_x86_64_gnu"
-version = "0.42.0"
+version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed"
+checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36"
[[package]]
name = "windows_x86_64_gnullvm"
-version = "0.42.0"
+version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028"
+checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3"
[[package]]
name = "windows_x86_64_msvc"
-version = "0.42.0"
+version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5"
+checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0"
[[package]]
name = "xattr"
diff --git a/src/bootstrap/Cargo.toml b/src/bootstrap/Cargo.toml
index 663987f11..eeda6d7c1 100644
--- a/src/bootstrap/Cargo.toml
+++ b/src/bootstrap/Cargo.toml
@@ -30,9 +30,9 @@ path = "bin/sccache-plus-cl.rs"
test = false
[dependencies]
+is-terminal = "0.4"
build_helper = { path = "../tools/build_helper" }
cmake = "0.1.38"
-fd-lock = "3.0.8"
filetime = "0.2"
getopts = "0.2.19"
cc = "1.0.69"
@@ -46,6 +46,7 @@ serde_derive = "1.0.137"
serde_json = "1.0.2"
sha2 = "0.10"
tar = "0.4"
+termcolor = "1.2.0"
toml = "0.5"
ignore = "0.4.10"
opener = "0.5"
@@ -56,18 +57,27 @@ walkdir = "2"
# Dependencies needed by the build-metrics feature
sysinfo = { version = "0.26.0", optional = true }
-[target.'cfg(windows)'.dependencies.winapi]
-version = "0.3"
+# Solaris doesn't support flock() and thus fd-lock is not option now
+[target.'cfg(not(target_os = "solaris"))'.dependencies]
+fd-lock = "3.0.8"
+
+[target.'cfg(windows)'.dependencies.junction]
+version = "1.0.0"
+
+[target.'cfg(windows)'.dependencies.windows]
+version = "0.46.0"
features = [
- "fileapi",
- "ioapiset",
- "jobapi2",
- "handleapi",
- "winioctl",
- "psapi",
- "impl-default",
- "timezoneapi",
- "winbase",
+ "Win32_Foundation",
+ "Win32_Security",
+ "Win32_Storage_FileSystem",
+ "Win32_System_Diagnostics_Debug",
+ "Win32_System_IO",
+ "Win32_System_Ioctl",
+ "Win32_System_JobObjects",
+ "Win32_System_ProcessStatus",
+ "Win32_System_SystemServices",
+ "Win32_System_Threading",
+ "Win32_System_Time",
]
[dev-dependencies]
diff --git a/src/bootstrap/README.md b/src/bootstrap/README.md
index 71eee8968..253d504d7 100644
--- a/src/bootstrap/README.md
+++ b/src/bootstrap/README.md
@@ -185,7 +185,7 @@ Some general areas that you may be interested in modifying are:
If you make a major change, please remember to:
+ Update `VERSION` in `src/bootstrap/main.rs`.
-* Update `changelog-seen = N` in `config.toml.example`.
+* Update `changelog-seen = N` in `config.example.toml`.
* Add an entry in `src/bootstrap/CHANGELOG.md`.
A 'major change' includes
diff --git a/src/bootstrap/bin/main.rs b/src/bootstrap/bin/main.rs
index 3856bb64f..912d875e4 100644
--- a/src/bootstrap/bin/main.rs
+++ b/src/bootstrap/bin/main.rs
@@ -7,15 +7,18 @@
use std::env;
-use bootstrap::{t, Build, Config, Subcommand, VERSION};
+#[cfg(all(any(unix, windows), not(target_os = "solaris")))]
+use bootstrap::t;
+use bootstrap::{Build, Config, Subcommand, VERSION};
fn main() {
let args = env::args().skip(1).collect::<Vec<_>>();
let config = Config::parse(&args);
- let mut build_lock;
- let _build_lock_guard;
- if cfg!(any(unix, windows)) {
+ #[cfg(all(any(unix, windows), not(target_os = "solaris")))]
+ {
+ let mut build_lock;
+ let _build_lock_guard;
let path = config.out.join("lock");
build_lock = fd_lock::RwLock::new(t!(std::fs::File::create(&path)));
_build_lock_guard = match build_lock.try_write() {
@@ -30,9 +33,9 @@ fn main() {
t!(build_lock.write())
}
};
- } else {
- println!("warning: file locking not supported for target, not locking build directory");
}
+ #[cfg(any(not(any(unix, windows)), target_os = "solaris"))]
+ println!("warning: file locking not supported for target, not locking build directory");
// check_version warnings are not printed during setup
let changelog_suggestion =
@@ -44,8 +47,8 @@ fn main() {
if suggest_setup {
println!("warning: you have not made a `config.toml`");
println!(
- "help: consider running `./x.py setup` or copying `config.toml.example` by running \
- `cp config.toml.example config.toml`"
+ "help: consider running `./x.py setup` or copying `config.example.toml` by running \
+ `cp config.example.toml config.toml`"
);
} else if let Some(suggestion) = &changelog_suggestion {
println!("{}", suggestion);
@@ -57,8 +60,8 @@ fn main() {
if suggest_setup {
println!("warning: you have not made a `config.toml`");
println!(
- "help: consider running `./x.py setup` or copying `config.toml.example` by running \
- `cp config.toml.example config.toml`"
+ "help: consider running `./x.py setup` or copying `config.example.toml` by running \
+ `cp config.example.toml config.toml`"
);
} else if let Some(suggestion) = &changelog_suggestion {
println!("{}", suggestion);
@@ -125,7 +128,7 @@ fn get_lock_owner(f: &std::path::Path) -> Option<u64> {
})
}
-#[cfg(not(target_os = "linux"))]
+#[cfg(not(any(target_os = "linux", target_os = "solaris")))]
fn get_lock_owner(_: &std::path::Path) -> Option<u64> {
// FIXME: Implement on other OS's
None
diff --git a/src/bootstrap/bin/rustc.rs b/src/bootstrap/bin/rustc.rs
index 9611c866d..040fec361 100644
--- a/src/bootstrap/bin/rustc.rs
+++ b/src/bootstrap/bin/rustc.rs
@@ -281,41 +281,49 @@ fn format_rusage_data(_child: Child) -> Option<String> {
#[cfg(windows)]
fn format_rusage_data(child: Child) -> Option<String> {
use std::os::windows::io::AsRawHandle;
- use winapi::um::{processthreadsapi, psapi, timezoneapi};
- let handle = child.as_raw_handle();
- macro_rules! try_bool {
- ($e:expr) => {
- if $e != 1 {
- return None;
- }
- };
- }
+
+ use windows::{
+ Win32::Foundation::HANDLE,
+ Win32::System::ProcessStatus::{
+ K32GetProcessMemoryInfo, PROCESS_MEMORY_COUNTERS, PROCESS_MEMORY_COUNTERS_EX,
+ },
+ Win32::System::Threading::GetProcessTimes,
+ Win32::System::Time::FileTimeToSystemTime,
+ };
+
+ let handle = HANDLE(child.as_raw_handle() as isize);
let mut user_filetime = Default::default();
let mut user_time = Default::default();
let mut kernel_filetime = Default::default();
let mut kernel_time = Default::default();
- let mut memory_counters = psapi::PROCESS_MEMORY_COUNTERS::default();
+ let mut memory_counters = PROCESS_MEMORY_COUNTERS::default();
unsafe {
- try_bool!(processthreadsapi::GetProcessTimes(
+ GetProcessTimes(
handle,
&mut Default::default(),
&mut Default::default(),
&mut kernel_filetime,
&mut user_filetime,
- ));
- try_bool!(timezoneapi::FileTimeToSystemTime(&user_filetime, &mut user_time));
- try_bool!(timezoneapi::FileTimeToSystemTime(&kernel_filetime, &mut kernel_time));
-
- // Unlike on Linux with RUSAGE_CHILDREN, this will only return memory information for the process
- // with the given handle and none of that process's children.
- try_bool!(psapi::GetProcessMemoryInfo(
- handle as _,
- &mut memory_counters as *mut _ as _,
- std::mem::size_of::<psapi::PROCESS_MEMORY_COUNTERS_EX>() as u32,
- ));
+ )
+ }
+ .ok()
+ .ok()?;
+ unsafe { FileTimeToSystemTime(&user_filetime, &mut user_time) }.ok().ok()?;
+ unsafe { FileTimeToSystemTime(&kernel_filetime, &mut kernel_time) }.ok().ok()?;
+
+ // Unlike on Linux with RUSAGE_CHILDREN, this will only return memory information for the process
+ // with the given handle and none of that process's children.
+ unsafe {
+ K32GetProcessMemoryInfo(
+ handle,
+ &mut memory_counters,
+ std::mem::size_of::<PROCESS_MEMORY_COUNTERS_EX>() as u32,
+ )
}
+ .ok()
+ .ok()?;
// Guide on interpreting these numbers:
// https://docs.microsoft.com/en-us/windows/win32/psapi/process-memory-usage-information
diff --git a/src/bootstrap/bolt.rs b/src/bootstrap/bolt.rs
index ea37cd470..10e6d2e7d 100644
--- a/src/bootstrap/bolt.rs
+++ b/src/bootstrap/bolt.rs
@@ -1,46 +1,40 @@
use std::path::Path;
use std::process::Command;
-/// Uses the `llvm-bolt` binary to instrument the binary/library at the given `path` with BOLT.
+/// Uses the `llvm-bolt` binary to instrument the artifact at the given `path` with BOLT.
/// When the instrumented artifact is executed, it will generate BOLT profiles into
/// `/tmp/prof.fdata.<pid>.fdata`.
-pub fn instrument_with_bolt_inplace(path: &Path) {
- let dir = std::env::temp_dir();
- let instrumented_path = dir.join("instrumented.so");
-
+/// Creates the instrumented artifact at `output_path`.
+pub fn instrument_with_bolt(path: &Path, output_path: &Path) {
let status = Command::new("llvm-bolt")
.arg("-instrument")
.arg(&path)
// Make sure that each process will write its profiles into a separate file
.arg("--instrumentation-file-append-pid")
.arg("-o")
- .arg(&instrumented_path)
+ .arg(output_path)
.status()
.expect("Could not instrument artifact using BOLT");
if !status.success() {
panic!("Could not instrument {} with BOLT, exit code {:?}", path.display(), status.code());
}
-
- std::fs::copy(&instrumented_path, path).expect("Cannot copy instrumented artifact");
- std::fs::remove_file(instrumented_path).expect("Cannot delete instrumented artifact");
}
-/// Uses the `llvm-bolt` binary to optimize the binary/library at the given `path` with BOLT,
+/// Uses the `llvm-bolt` binary to optimize the artifact at the given `path` with BOLT,
/// using merged profiles from `profile_path`.
///
/// The recorded profiles have to be merged using the `merge-fdata` tool from LLVM and the merged
/// profile path should be then passed to this function.
-pub fn optimize_library_with_bolt_inplace(path: &Path, profile_path: &Path) {
- let dir = std::env::temp_dir();
- let optimized_path = dir.join("optimized.so");
-
+///
+/// Creates the optimized artifact at `output_path`.
+pub fn optimize_with_bolt(path: &Path, profile_path: &Path, output_path: &Path) {
let status = Command::new("llvm-bolt")
.arg(&path)
.arg("-data")
.arg(&profile_path)
.arg("-o")
- .arg(&optimized_path)
+ .arg(output_path)
// Reorder basic blocks within functions
.arg("-reorder-blocks=ext-tsp")
// Reorder functions within the binary
@@ -51,8 +45,6 @@ pub fn optimize_library_with_bolt_inplace(path: &Path, profile_path: &Path) {
.arg("-split-all-cold")
// Move jump tables to a separate section
.arg("-jump-tables=move")
- // Use GNU_STACK program header for new segment (workaround for issues with strip/objcopy)
- .arg("-use-gnu-stack")
// Fold functions with identical code
.arg("-icf=1")
// Update DWARF debug info in the final binary
@@ -65,7 +57,4 @@ pub fn optimize_library_with_bolt_inplace(path: &Path, profile_path: &Path) {
if !status.success() {
panic!("Could not optimize {} with BOLT, exit code {:?}", path.display(), status.code());
}
-
- std::fs::copy(&optimized_path, path).expect("Cannot copy optimized artifact");
- std::fs::remove_file(optimized_path).expect("Cannot delete optimized artifact");
}
diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py
index 013d1ab52..025145244 100644
--- a/src/bootstrap/bootstrap.py
+++ b/src/bootstrap/bootstrap.py
@@ -304,6 +304,7 @@ def default_build_triple(verbose):
'i486': 'i686',
'i686': 'i686',
'i786': 'i686',
+ 'loongarch64': 'loongarch64',
'm68k': 'm68k',
'powerpc': 'powerpc',
'powerpc64': 'powerpc64',
@@ -741,6 +742,9 @@ class RustBuild(object):
env["LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \
(os.pathsep + env["LIBRARY_PATH"]) \
if "LIBRARY_PATH" in env else ""
+ env["LIBPATH"] = os.path.join(self.bin_root(), "lib") + \
+ (os.pathsep + env["LIBPATH"]) \
+ if "LIBPATH" in env else ""
# Export Stage0 snapshot compiler related env variables
build_section = "target.{}".format(self.build)
diff --git a/src/bootstrap/bootstrap_test.py b/src/bootstrap/bootstrap_test.py
index 06ca3ce21..20bd71f06 100644
--- a/src/bootstrap/bootstrap_test.py
+++ b/src/bootstrap/bootstrap_test.py
@@ -11,6 +11,7 @@ import sys
from shutil import rmtree
import bootstrap
+import configure
class VerifyTestCase(unittest.TestCase):
@@ -74,12 +75,50 @@ class ProgramOutOfDate(unittest.TestCase):
self.assertFalse(self.build.program_out_of_date(self.rustc_stamp_path, self.key))
+class GenerateAndParseConfig(unittest.TestCase):
+ """Test that we can serialize and deserialize a config.toml file"""
+ def serialize_and_parse(self, args):
+ from io import StringIO
+
+ section_order, sections, targets = configure.parse_args(args)
+ buffer = StringIO()
+ configure.write_config_toml(buffer, section_order, targets, sections)
+ build = bootstrap.RustBuild()
+ build.config_toml = buffer.getvalue()
+
+ try:
+ import tomllib
+ # Verify this is actually valid TOML.
+ tomllib.loads(build.config_toml)
+ except ImportError:
+ print("warning: skipping TOML validation, need at least python 3.11", file=sys.stderr)
+ return build
+
+ def test_no_args(self):
+ build = self.serialize_and_parse([])
+ self.assertEqual(build.get_toml("changelog-seen"), '2')
+ self.assertIsNone(build.get_toml("llvm.download-ci-llvm"))
+
+ def test_set_section(self):
+ build = self.serialize_and_parse(["--set", "llvm.download-ci-llvm"])
+ self.assertEqual(build.get_toml("download-ci-llvm", section="llvm"), 'true')
+
+ def test_set_target(self):
+ build = self.serialize_and_parse(["--set", "target.x86_64-unknown-linux-gnu.cc=gcc"])
+ self.assertEqual(build.get_toml("cc", section="target.x86_64-unknown-linux-gnu"), 'gcc')
+
+ # Uncomment when #108928 is fixed.
+ # def test_set_top_level(self):
+ # build = self.serialize_and_parse(["--set", "profile=compiler"])
+ # self.assertEqual(build.get_toml("profile"), 'compiler')
+
if __name__ == '__main__':
SUITE = unittest.TestSuite()
TEST_LOADER = unittest.TestLoader()
SUITE.addTest(doctest.DocTestSuite(bootstrap))
SUITE.addTests([
TEST_LOADER.loadTestsFromTestCase(VerifyTestCase),
+ TEST_LOADER.loadTestsFromTestCase(GenerateAndParseConfig),
TEST_LOADER.loadTestsFromTestCase(ProgramOutOfDate)])
RUNNER = unittest.TextTestRunner(stream=sys.stdout, verbosity=2)
diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs
index b33fc02f4..e959ea06f 100644
--- a/src/bootstrap/builder.rs
+++ b/src/bootstrap/builder.rs
@@ -4,8 +4,9 @@ use std::collections::BTreeSet;
use std::env;
use std::ffi::OsStr;
use std::fmt::{Debug, Write};
-use std::fs::{self};
+use std::fs::{self, File};
use std::hash::Hash;
+use std::io::{BufRead, BufReader};
use std::ops::Deref;
use std::path::{Component, Path, PathBuf};
use std::process::Command;
@@ -16,7 +17,7 @@ use crate::config::{SplitDebuginfo, TargetSelection};
use crate::doc;
use crate::flags::{Color, Subcommand};
use crate::install;
-use crate::native;
+use crate::llvm;
use crate::run;
use crate::setup;
use crate::test;
@@ -28,8 +29,11 @@ use crate::{clean, dist};
use crate::{Build, CLang, DocTests, GitRepo, Mode};
pub use crate::Compiler;
-// FIXME: replace with std::lazy after it gets stabilized and reaches beta
-use once_cell::sync::Lazy;
+// FIXME:
+// - use std::lazy for `Lazy`
+// - use std::cell for `OnceCell`
+// Once they get stabilized and reach beta.
+use once_cell::sync::{Lazy, OnceCell};
pub struct Builder<'a> {
pub build: &'a Build,
@@ -484,17 +488,43 @@ impl<'a> ShouldRun<'a> {
// multiple aliases for the same job
pub fn paths(mut self, paths: &[&str]) -> Self {
+ static SUBMODULES_PATHS: OnceCell<Vec<String>> = OnceCell::new();
+
+ let init_submodules_paths = |src: &PathBuf| {
+ let file = File::open(src.join(".gitmodules")).unwrap();
+
+ let mut submodules_paths = vec![];
+ for line in BufReader::new(file).lines() {
+ if let Ok(line) = line {
+ let line = line.trim();
+
+ if line.starts_with("path") {
+ let actual_path =
+ line.split(' ').last().expect("Couldn't get value of path");
+ submodules_paths.push(actual_path.to_owned());
+ }
+ }
+ }
+
+ submodules_paths
+ };
+
+ let submodules_paths =
+ SUBMODULES_PATHS.get_or_init(|| init_submodules_paths(&self.builder.src));
+
self.paths.insert(PathSet::Set(
paths
.iter()
.map(|p| {
- // FIXME(#96188): make sure this is actually a path.
- // This currently breaks for paths within submodules.
- //assert!(
- // self.builder.src.join(p).exists(),
- // "`should_run.paths` should correspond to real on-disk paths - use `alias` if there is no relevant path: {}",
- // p
- //);
+ // assert only if `p` isn't submodule
+ if !submodules_paths.iter().find(|sm_p| p.contains(*sm_p)).is_some() {
+ assert!(
+ self.builder.src.join(p).exists(),
+ "`should_run.paths` should correspond to real on-disk paths - use `alias` if there is no relevant path: {}",
+ p
+ );
+ }
+
TaskPath { path: p.into(), kind: Some(self.kind) }
})
.collect(),
@@ -561,6 +591,7 @@ pub enum Kind {
Install,
Run,
Setup,
+ Suggest,
}
impl Kind {
@@ -580,6 +611,7 @@ impl Kind {
"install" => Kind::Install,
"run" | "r" => Kind::Run,
"setup" => Kind::Setup,
+ "suggest" => Kind::Suggest,
_ => return None,
})
}
@@ -599,6 +631,7 @@ impl Kind {
Kind::Install => "install",
Kind::Run => "run",
Kind::Setup => "setup",
+ Kind::Suggest => "suggest",
}
}
}
@@ -636,13 +669,13 @@ impl<'a> Builder<'a> {
tool::Rustdoc,
tool::Clippy,
tool::CargoClippy,
- native::Llvm,
- native::Sanitizers,
+ llvm::Llvm,
+ llvm::Sanitizers,
tool::Rustfmt,
tool::Miri,
tool::CargoMiri,
- native::Lld,
- native::CrtBeginEnd
+ llvm::Lld,
+ llvm::CrtBeginEnd
),
Kind::Check | Kind::Clippy | Kind::Fix => describe!(
check::Std,
@@ -679,6 +712,7 @@ impl<'a> Builder<'a> {
test::CrateRustdoc,
test::CrateRustdocJsonTypes,
test::CrateJsonDocLint,
+ test::SuggestTestsCrate,
test::Linkcheck,
test::TierCheck,
test::ReplacePlaceholderTest,
@@ -711,6 +745,7 @@ impl<'a> Builder<'a> {
test::RustdocUi,
test::RustdocJson,
test::HtmlCheck,
+ test::RustInstaller,
// Run bootstrap close to the end as it's unlikely to fail
test::Bootstrap,
// Run run-make last, since these won't pass without make on Windows
@@ -740,6 +775,7 @@ impl<'a> Builder<'a> {
doc::EmbeddedBook,
doc::EditionGuide,
doc::StyleGuide,
+ doc::Tidy,
),
Kind::Dist => describe!(
dist::Docs,
@@ -795,7 +831,7 @@ impl<'a> Builder<'a> {
Kind::Setup => describe!(setup::Profile, setup::Hook, setup::Link, setup::Vscode),
Kind::Clean => describe!(clean::CleanAll, clean::Rustc, clean::Std),
// special-cased in Build::build()
- Kind::Format => vec![],
+ Kind::Format | Kind::Suggest => vec![],
}
}
@@ -859,6 +895,7 @@ impl<'a> Builder<'a> {
Subcommand::Run { ref paths, .. } => (Kind::Run, &paths[..]),
Subcommand::Clean { ref paths, .. } => (Kind::Clean, &paths[..]),
Subcommand::Format { .. } => (Kind::Format, &[][..]),
+ Subcommand::Suggest { .. } => (Kind::Suggest, &[][..]),
Subcommand::Setup { profile: ref path } => (
Kind::Setup,
path.as_ref().map_or([].as_slice(), |path| std::slice::from_ref(path)),
@@ -868,6 +905,21 @@ impl<'a> Builder<'a> {
Self::new_internal(build, kind, paths.to_owned())
}
+ /// Creates a new standalone builder for use outside of the normal process
+ pub fn new_standalone(
+ build: &mut Build,
+ kind: Kind,
+ paths: Vec<PathBuf>,
+ stage: Option<u32>,
+ ) -> Builder<'_> {
+ // FIXME: don't mutate `build`
+ if let Some(stage) = stage {
+ build.config.stage = stage;
+ }
+
+ Self::new_internal(build, kind, paths.to_owned())
+ }
+
pub fn execute_cli(&self) {
self.run_step_descriptions(&Builder::get_step_descriptions(self.kind), &self.paths);
}
@@ -910,14 +962,16 @@ impl<'a> Builder<'a> {
/// new artifacts, it can't be used to rely on the presence of a particular
/// sysroot.
///
- /// See `force_use_stage1` for documentation on what each argument is.
+ /// See `force_use_stage1` and `force_use_stage2` for documentation on what each argument is.
pub fn compiler_for(
&self,
stage: u32,
host: TargetSelection,
target: TargetSelection,
) -> Compiler {
- if self.build.force_use_stage1(Compiler { stage, host }, target) {
+ if self.build.force_use_stage2(stage) {
+ self.compiler(2, self.config.build)
+ } else if self.build.force_use_stage1(stage, target) {
self.compiler(1, self.config.build)
} else {
self.compiler(stage, host)
@@ -1097,7 +1151,7 @@ impl<'a> Builder<'a> {
/// check build or dry-run, where there's no need to build all of LLVM.
fn llvm_config(&self, target: TargetSelection) -> Option<PathBuf> {
if self.config.llvm_enabled() && self.kind != Kind::Check && !self.config.dry_run() {
- let native::LlvmResult { llvm_config, .. } = self.ensure(native::Llvm { target });
+ let llvm::LlvmResult { llvm_config, .. } = self.ensure(llvm::Llvm { target });
if llvm_config.is_file() {
return Some(llvm_config);
}
@@ -1223,7 +1277,7 @@ impl<'a> Builder<'a> {
// rustc_llvm. But if LLVM is stale, that'll be a tiny amount
// of work comparatively, and we'd likely need to rebuild it anyway,
// so that's okay.
- if crate::native::prebuilt_llvm_config(self, target).is_err() {
+ if crate::llvm::prebuilt_llvm_config(self, target).is_err() {
cargo.env("RUST_CHECK", "1");
}
}
@@ -1299,6 +1353,14 @@ impl<'a> Builder<'a> {
}
};
+ // By default, windows-rs depends on a native library that doesn't get copied into the
+ // sysroot. Passing this cfg enables raw-dylib support instead, which makes the native
+ // library unnecessary. This can be removed when windows-rs enables raw-dylib
+ // unconditionally.
+ if let Mode::Rustc | Mode::ToolRustc = mode {
+ rustflags.arg("--cfg=windows_raw_dylib");
+ }
+
if use_new_symbol_mangling {
rustflags.arg("-Csymbol-mangling-version=v0");
} else {
@@ -1718,6 +1780,15 @@ impl<'a> Builder<'a> {
cargo.env("RUSTC_VERBOSE", self.verbosity.to_string());
+ // Downstream forks of the Rust compiler might want to use a custom libc to add support for
+ // targets that are not yet available upstream. Adding a patch to replace libc with a
+ // custom one would cause compilation errors though, because Cargo would interpret the
+ // custom libc as part of the workspace, and apply the check-cfg lints on it.
+ //
+ // The libc build script emits check-cfg flags only when this environment variable is set,
+ // so this line allows the use of custom libcs.
+ cargo.env("LIBC_CHECK_CFG", "1");
+
if source_type == SourceType::InTree {
let mut lint_flags = Vec::new();
// When extending this list, add the new lints to the RUSTFLAGS of the
@@ -1920,6 +1991,12 @@ impl<'a> Builder<'a> {
rustflags.arg("-Zvalidate-mir");
rustflags.arg(&format!("-Zmir-opt-level={}", mir_opt_level));
}
+ // Always enable inlining MIR when building the standard library.
+ // Without this flag, MIR inlining is disabled when incremental compilation is enabled.
+ // That causes some mir-opt tests which inline functions from the standard library to
+ // break when incremental compilation is enabled. So this overrides the "no inlining
+ // during incremental builds" heuristic for the standard library.
+ rustflags.arg("-Zinline-mir");
}
Cargo { command: cargo, rustflags, rustdocflags, allow_features }
diff --git a/src/bootstrap/cache.rs b/src/bootstrap/cache.rs
index 05f25af68..5376c4ec9 100644
--- a/src/bootstrap/cache.rs
+++ b/src/bootstrap/cache.rs
@@ -1,9 +1,8 @@
use std::any::{Any, TypeId};
use std::borrow::Borrow;
use std::cell::RefCell;
-use std::cmp::{Ord, Ordering, PartialOrd};
+use std::cmp::Ordering;
use std::collections::HashMap;
-use std::convert::AsRef;
use std::fmt;
use std::hash::{Hash, Hasher};
use std::marker::PhantomData;
diff --git a/src/bootstrap/channel.rs b/src/bootstrap/channel.rs
index eae81b9fc..c3e3fa009 100644
--- a/src/bootstrap/channel.rs
+++ b/src/bootstrap/channel.rs
@@ -19,7 +19,7 @@ pub enum GitInfo {
#[default]
Absent,
/// This is a git repository.
- /// If the info should be used (`ignore_git` is false), this will be
+ /// If the info should be used (`omit_git_hash` is false), this will be
/// `Some`, otherwise it will be `None`.
Present(Option<Info>),
/// This is not a git repostory, but the info can be fetched from the
@@ -35,7 +35,7 @@ pub struct Info {
}
impl GitInfo {
- pub fn new(ignore_git: bool, dir: &Path) -> GitInfo {
+ pub fn new(omit_git_hash: bool, dir: &Path) -> GitInfo {
// See if this even begins to look like a git dir
if !dir.join(".git").exists() {
match read_commit_info_file(dir) {
@@ -52,7 +52,7 @@ impl GitInfo {
// If we're ignoring the git info, we don't actually need to collect it, just make sure this
// was a git repo in the first place.
- if ignore_git {
+ if omit_git_hash {
return GitInfo::Present(None);
}
@@ -139,7 +139,7 @@ pub fn read_commit_info_file(root: &Path) -> Option<Info> {
sha: sha.to_owned(),
short_sha: short_sha.to_owned(),
},
- _ => panic!("the `git-comit-info` file is malformed"),
+ _ => panic!("the `git-commit-info` file is malformed"),
};
Some(info)
} else {
diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs
index cd1966713..fcaa69831 100644
--- a/src/bootstrap/check.rs
+++ b/src/bootstrap/check.rs
@@ -237,7 +237,7 @@ impl Step for Rustc {
target,
cargo_subcommand(builder.kind),
);
- rustc_cargo(builder, &mut cargo, target);
+ rustc_cargo(builder, &mut cargo, target, compiler.stage);
// For ./x.py clippy, don't run with --all-targets because
// linting tests and benchmarks can produce very noisy results
@@ -271,9 +271,17 @@ impl Step for Rustc {
false,
);
- let libdir = builder.sysroot_libdir(compiler, target);
- let hostdir = builder.sysroot_libdir(compiler, compiler.host);
- add_to_sysroot(&builder, &libdir, &hostdir, &librustc_stamp(builder, compiler, target));
+ // HACK: This avoids putting the newly built artifacts in the sysroot if we're using
+ // `download-rustc`, to avoid "multiple candidates for `rmeta`" errors. Technically, that's
+ // not quite right: people can set `download-rustc = true` to download even if there are
+ // changes to the compiler, and in that case ideally we would put the *new* artifacts in the
+ // sysroot, in case there are API changes that should be used by tools. In practice,
+ // though, that should be very uncommon, and people can still disable download-rustc.
+ if !builder.download_rustc() {
+ let libdir = builder.sysroot_libdir(compiler, target);
+ let hostdir = builder.sysroot_libdir(compiler, compiler.host);
+ add_to_sysroot(&builder, &libdir, &hostdir, &librustc_stamp(builder, compiler, target));
+ }
}
}
@@ -315,7 +323,7 @@ impl Step for CodegenBackend {
cargo
.arg("--manifest-path")
.arg(builder.src.join(format!("compiler/rustc_codegen_{}/Cargo.toml", backend)));
- rustc_cargo_env(builder, &mut cargo, target);
+ rustc_cargo_env(builder, &mut cargo, target, compiler.stage);
let msg = if compiler.host == target {
format!("Checking stage{} {} artifacts ({target})", builder.top_stage, backend)
diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs
index 8b80dfc0f..4a4e7adcb 100644
--- a/src/bootstrap/compile.rs
+++ b/src/bootstrap/compile.rs
@@ -20,11 +20,11 @@ use serde_derive::Deserialize;
use crate::builder::crate_description;
use crate::builder::Cargo;
-use crate::builder::{Builder, Kind, RunConfig, ShouldRun, Step};
+use crate::builder::{Builder, Kind, PathSet, RunConfig, ShouldRun, Step, TaskPath};
use crate::cache::{Interned, INTERNER};
use crate::config::{LlvmLibunwind, RustcLto, TargetSelection};
use crate::dist;
-use crate::native;
+use crate::llvm;
use crate::tool::SourceType;
use crate::util::get_clang_cl_resource_dir;
use crate::util::{exe, is_debug_info, is_dylib, output, symlink_dir, t, up_to_date};
@@ -83,11 +83,11 @@ impl Step for Std {
let target = self.target;
let compiler = self.compiler;
- // These artifacts were already copied (in `impl Step for Sysroot`).
- // Don't recompile them.
+ // When using `download-rustc`, we already have artifacts for the host available
+ // (they were copied in `impl Step for Sysroot`). Don't recompile them.
// NOTE: the ABI of the beta compiler is different from the ABI of the downloaded compiler,
// so its artifacts can't be reused.
- if builder.download_rustc() && compiler.stage != 0 {
+ if builder.download_rustc() && compiler.stage != 0 && target == builder.build.build {
return;
}
@@ -191,7 +191,7 @@ fn copy_and_stamp(
}
fn copy_llvm_libunwind(builder: &Builder<'_>, target: TargetSelection, libdir: &Path) -> PathBuf {
- let libunwind_path = builder.ensure(native::Libunwind { target });
+ let libunwind_path = builder.ensure(llvm::Libunwind { target });
let libunwind_source = libunwind_path.join("libunwind.a");
let libunwind_target = libdir.join("libunwind.a");
builder.copy(&libunwind_source, &libunwind_target);
@@ -266,7 +266,7 @@ fn copy_self_contained_objects(
DependencyType::TargetSelfContained,
);
}
- let crt_path = builder.ensure(native::CrtBeginEnd { target });
+ let crt_path = builder.ensure(llvm::CrtBeginEnd { target });
for &obj in &["crtbegin.o", "crtbeginS.o", "crtend.o", "crtendS.o"] {
let src = crt_path.join(obj);
let target = libdir_self_contained.join(obj);
@@ -339,6 +339,12 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, car
""
};
+ // `libtest` uses this to know whether or not to support
+ // `-Zunstable-options`.
+ if !builder.unstable_features() {
+ cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1");
+ }
+
let mut features = String::new();
// Cranelift doesn't support `asm`.
@@ -468,7 +474,7 @@ fn copy_sanitizers(
compiler: &Compiler,
target: TargetSelection,
) -> Vec<PathBuf> {
- let runtimes: Vec<native::SanitizerRuntime> = builder.ensure(native::Sanitizers { target });
+ let runtimes: Vec<llvm::SanitizerRuntime> = builder.ensure(llvm::Sanitizers { target });
if builder.config.dry_run() {
return Vec::new();
@@ -690,7 +696,7 @@ impl Step for Rustc {
));
let mut cargo = builder.cargo(compiler, Mode::Rustc, SourceType::InTree, target, "build");
- rustc_cargo(builder, &mut cargo, target);
+ rustc_cargo(builder, &mut cargo, target, compiler.stage);
if builder.config.rust_profile_use.is_some()
&& builder.config.rust_profile_generate.is_some()
@@ -807,16 +813,21 @@ impl Step for Rustc {
}
}
-pub fn rustc_cargo(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelection) {
+pub fn rustc_cargo(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelection, stage: u32) {
cargo
.arg("--features")
.arg(builder.rustc_features(builder.kind))
.arg("--manifest-path")
.arg(builder.src.join("compiler/rustc/Cargo.toml"));
- rustc_cargo_env(builder, cargo, target);
+ rustc_cargo_env(builder, cargo, target, stage);
}
-pub fn rustc_cargo_env(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelection) {
+pub fn rustc_cargo_env(
+ builder: &Builder<'_>,
+ cargo: &mut Cargo,
+ target: TargetSelection,
+ stage: u32,
+) {
// Set some configuration variables picked up by build scripts and
// the compiler alike
cargo
@@ -861,83 +872,86 @@ pub fn rustc_cargo_env(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetS
cargo.env("RUSTC_VERIFY_LLVM_IR", "1");
}
- // Pass down configuration from the LLVM build into the build of
- // rustc_llvm and rustc_codegen_llvm.
- //
// Note that this is disabled if LLVM itself is disabled or we're in a check
// build. If we are in a check build we still go ahead here presuming we've
// detected that LLVM is already built and good to go which helps prevent
// busting caches (e.g. like #71152).
- if builder.config.llvm_enabled()
- && (builder.kind != Kind::Check
- || crate::native::prebuilt_llvm_config(builder, target).is_ok())
- {
- if builder.is_rust_llvm(target) {
- cargo.env("LLVM_RUSTLLVM", "1");
- }
- let native::LlvmResult { llvm_config, .. } = builder.ensure(native::Llvm { target });
- cargo.env("LLVM_CONFIG", &llvm_config);
- if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
- cargo.env("CFG_LLVM_ROOT", s);
+ if builder.config.llvm_enabled() {
+ let building_is_expensive = crate::llvm::prebuilt_llvm_config(builder, target).is_err();
+ // `top_stage == stage` might be false for `check --stage 1`, if we are building the stage 1 compiler
+ let can_skip_build = builder.kind == Kind::Check && builder.top_stage == stage;
+ let should_skip_build = building_is_expensive && can_skip_build;
+ if !should_skip_build {
+ rustc_llvm_env(builder, cargo, target)
}
+ }
+}
- // Some LLVM linker flags (-L and -l) may be needed to link `rustc_llvm`. Its build script
- // expects these to be passed via the `LLVM_LINKER_FLAGS` env variable, separated by
- // whitespace.
- //
- // For example:
- // - on windows, when `clang-cl` is used with instrumentation, we need to manually add
- // clang's runtime library resource directory so that the profiler runtime library can be
- // found. This is to avoid the linker errors about undefined references to
- // `__llvm_profile_instrument_memop` when linking `rustc_driver`.
- let mut llvm_linker_flags = String::new();
- if builder.config.llvm_profile_generate && target.contains("msvc") {
- if let Some(ref clang_cl_path) = builder.config.llvm_clang_cl {
- // Add clang's runtime library directory to the search path
- let clang_rt_dir = get_clang_cl_resource_dir(clang_cl_path);
- llvm_linker_flags.push_str(&format!("-L{}", clang_rt_dir.display()));
- }
- }
+/// Pass down configuration from the LLVM build into the build of
+/// rustc_llvm and rustc_codegen_llvm.
+fn rustc_llvm_env(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelection) {
+ let target_config = builder.config.target_config.get(&target);
- // The config can also specify its own llvm linker flags.
- if let Some(ref s) = builder.config.llvm_ldflags {
- if !llvm_linker_flags.is_empty() {
- llvm_linker_flags.push_str(" ");
- }
- llvm_linker_flags.push_str(s);
+ if builder.is_rust_llvm(target) {
+ cargo.env("LLVM_RUSTLLVM", "1");
+ }
+ let llvm::LlvmResult { llvm_config, .. } = builder.ensure(llvm::Llvm { target });
+ cargo.env("LLVM_CONFIG", &llvm_config);
+ if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
+ cargo.env("CFG_LLVM_ROOT", s);
+ }
+
+ // Some LLVM linker flags (-L and -l) may be needed to link `rustc_llvm`. Its build script
+ // expects these to be passed via the `LLVM_LINKER_FLAGS` env variable, separated by
+ // whitespace.
+ //
+ // For example:
+ // - on windows, when `clang-cl` is used with instrumentation, we need to manually add
+ // clang's runtime library resource directory so that the profiler runtime library can be
+ // found. This is to avoid the linker errors about undefined references to
+ // `__llvm_profile_instrument_memop` when linking `rustc_driver`.
+ let mut llvm_linker_flags = String::new();
+ if builder.config.llvm_profile_generate && target.contains("msvc") {
+ if let Some(ref clang_cl_path) = builder.config.llvm_clang_cl {
+ // Add clang's runtime library directory to the search path
+ let clang_rt_dir = get_clang_cl_resource_dir(clang_cl_path);
+ llvm_linker_flags.push_str(&format!("-L{}", clang_rt_dir.display()));
}
+ }
- // Set the linker flags via the env var that `rustc_llvm`'s build script will read.
+ // The config can also specify its own llvm linker flags.
+ if let Some(ref s) = builder.config.llvm_ldflags {
if !llvm_linker_flags.is_empty() {
- cargo.env("LLVM_LINKER_FLAGS", llvm_linker_flags);
+ llvm_linker_flags.push_str(" ");
}
+ llvm_linker_flags.push_str(s);
+ }
- // Building with a static libstdc++ is only supported on linux right now,
- // not for MSVC or macOS
- if builder.config.llvm_static_stdcpp
- && !target.contains("freebsd")
- && !target.contains("msvc")
- && !target.contains("apple")
- && !target.contains("solaris")
- {
- let file = compiler_file(
- builder,
- builder.cxx(target).unwrap(),
- target,
- CLang::Cxx,
- "libstdc++.a",
- );
- cargo.env("LLVM_STATIC_STDCPP", file);
- }
- if builder.llvm_link_shared() {
- cargo.env("LLVM_LINK_SHARED", "1");
- }
- if builder.config.llvm_use_libcxx {
- cargo.env("LLVM_USE_LIBCXX", "1");
- }
- if builder.config.llvm_optimize && !builder.config.llvm_release_debuginfo {
- cargo.env("LLVM_NDEBUG", "1");
- }
+ // Set the linker flags via the env var that `rustc_llvm`'s build script will read.
+ if !llvm_linker_flags.is_empty() {
+ cargo.env("LLVM_LINKER_FLAGS", llvm_linker_flags);
+ }
+
+ // Building with a static libstdc++ is only supported on linux right now,
+ // not for MSVC or macOS
+ if builder.config.llvm_static_stdcpp
+ && !target.contains("freebsd")
+ && !target.contains("msvc")
+ && !target.contains("apple")
+ && !target.contains("solaris")
+ {
+ let file =
+ compiler_file(builder, builder.cxx(target).unwrap(), target, CLang::Cxx, "libstdc++.a");
+ cargo.env("LLVM_STATIC_STDCPP", file);
+ }
+ if builder.llvm_link_shared() {
+ cargo.env("LLVM_LINK_SHARED", "1");
+ }
+ if builder.config.llvm_use_libcxx {
+ cargo.env("LLVM_USE_LIBCXX", "1");
+ }
+ if builder.config.llvm_optimize && !builder.config.llvm_release_debuginfo {
+ cargo.env("LLVM_NDEBUG", "1");
}
}
@@ -989,6 +1003,44 @@ pub struct CodegenBackend {
pub backend: Interned<String>,
}
+fn needs_codegen_config(run: &RunConfig<'_>) -> bool {
+ let mut needs_codegen_cfg = false;
+ for path_set in &run.paths {
+ needs_codegen_cfg = match path_set {
+ PathSet::Set(set) => set.iter().any(|p| is_codegen_cfg_needed(p, run)),
+ PathSet::Suite(suite) => is_codegen_cfg_needed(&suite, run),
+ }
+ }
+ needs_codegen_cfg
+}
+
+const CODEGEN_BACKEND_PREFIX: &str = "rustc_codegen_";
+
+fn is_codegen_cfg_needed(path: &TaskPath, run: &RunConfig<'_>) -> bool {
+ if path.path.to_str().unwrap().contains(&CODEGEN_BACKEND_PREFIX) {
+ let mut needs_codegen_backend_config = true;
+ for &backend in &run.builder.config.rust_codegen_backends {
+ if path
+ .path
+ .to_str()
+ .unwrap()
+ .ends_with(&(CODEGEN_BACKEND_PREFIX.to_owned() + &backend))
+ {
+ needs_codegen_backend_config = false;
+ }
+ }
+ if needs_codegen_backend_config {
+ run.builder.info(
+ "Warning: no codegen-backends config matched the requested path to build a codegen backend. \
+ Help: add backend to codegen-backends in config.toml.",
+ );
+ return true;
+ }
+ }
+
+ return false;
+}
+
impl Step for CodegenBackend {
type Output = ();
const ONLY_HOSTS: bool = true;
@@ -1000,6 +1052,10 @@ impl Step for CodegenBackend {
}
fn make_run(run: RunConfig<'_>) {
+ if needs_codegen_config(&run) {
+ return;
+ }
+
for &backend in &run.builder.config.rust_codegen_backends {
if backend == "llvm" {
continue; // Already built as part of rustc
@@ -1042,7 +1098,7 @@ impl Step for CodegenBackend {
cargo
.arg("--manifest-path")
.arg(builder.src.join(format!("compiler/rustc_codegen_{}/Cargo.toml", backend)));
- rustc_cargo_env(builder, &mut cargo, target);
+ rustc_cargo_env(builder, &mut cargo, target, compiler.stage);
let tmp_stamp = out_dir.join(".tmp.stamp");
@@ -1225,9 +1281,7 @@ impl Step for Sysroot {
}
// Copy the compiler into the correct sysroot.
- let ci_rustc_dir =
- builder.config.out.join(&*builder.config.build.triple).join("ci-rustc");
- builder.cp_r(&ci_rustc_dir, &sysroot);
+ builder.cp_r(&builder.ci_rustc_dir(builder.build.build), &sysroot);
return INTERNER.intern_path(sysroot);
}
@@ -1329,7 +1383,10 @@ impl Step for Assemble {
// If we're downloading a compiler from CI, we can use the same compiler for all stages other than 0.
if builder.download_rustc() {
- builder.ensure(Sysroot { compiler: target_compiler });
+ let sysroot = builder.ensure(Sysroot { compiler: target_compiler });
+ // Ensure that `libLLVM.so` ends up in the newly created target directory,
+ // so that tools using `rustc_private` can use it.
+ dist::maybe_install_llvm_target(builder, target_compiler.host, &sysroot);
return target_compiler;
}
@@ -1340,6 +1397,13 @@ impl Step for Assemble {
// when not performing a full bootstrap).
builder.ensure(Rustc::new(build_compiler, target_compiler.host));
+ // FIXME: For now patch over problems noted in #90244 by early returning here, even though
+ // we've not properly assembled the target sysroot. A full fix is pending further investigation,
+ // for now full bootstrap usage is rare enough that this is OK.
+ if target_compiler.stage >= 3 && !builder.config.full_bootstrap {
+ return target_compiler;
+ }
+
for &backend in builder.config.rust_codegen_backends.iter() {
if backend == "llvm" {
continue; // Already built as part of rustc
@@ -1353,7 +1417,7 @@ impl Step for Assemble {
}
let lld_install = if builder.config.lld_enabled {
- Some(builder.ensure(native::Lld { target: target_compiler.host }))
+ Some(builder.ensure(llvm::Lld { target: target_compiler.host }))
} else {
None
};
@@ -1417,8 +1481,8 @@ impl Step for Assemble {
}
if builder.config.rust_codegen_backends.contains(&INTERNER.intern_str("llvm")) {
- let native::LlvmResult { llvm_config, .. } =
- builder.ensure(native::Llvm { target: target_compiler.host });
+ let llvm::LlvmResult { llvm_config, .. } =
+ builder.ensure(llvm::Llvm { target: target_compiler.host });
if !builder.config.dry_run() {
let llvm_bin_dir = output(Command::new(llvm_config).arg("--bindir"));
let llvm_bin_dir = Path::new(llvm_bin_dir.trim());
diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs
index 05e742549..cc3b3bc25 100644
--- a/src/bootstrap/config.rs
+++ b/src/bootstrap/config.rs
@@ -55,9 +55,8 @@ pub enum DryRun {
/// Note that this structure is not decoded directly into, but rather it is
/// filled out from the decoded forms of the structs below. For documentation
/// each field, see the corresponding fields in
-/// `config.toml.example`.
-#[derive(Default)]
-#[cfg_attr(test, derive(Clone))]
+/// `config.example.toml`.
+#[derive(Default, Clone)]
pub struct Config {
pub changelog_seen: Option<usize>,
pub ccache: Option<String>,
@@ -77,7 +76,7 @@ pub struct Config {
pub tools: Option<HashSet<String>>,
pub sanitizers: bool,
pub profiler: bool,
- pub ignore_git: bool,
+ pub omit_git_hash: bool,
pub exclude: Vec<TaskPath>,
pub include_default_paths: bool,
pub rustc_error_format: Option<String>,
@@ -87,6 +86,9 @@ pub struct Config {
pub patch_binaries_for_nix: bool,
pub stage0_metadata: Stage0Metadata,
+ pub stdout_is_tty: bool,
+ pub stderr_is_tty: bool,
+
pub on_fail: Option<String>,
pub stage: u32,
pub keep_stage: Vec<u32>,
@@ -112,14 +114,12 @@ pub struct Config {
pub backtrace_on_ice: bool,
// llvm codegen options
- pub llvm_skip_rebuild: bool,
pub llvm_assertions: bool,
pub llvm_tests: bool,
pub llvm_plugins: bool,
pub llvm_optimize: bool,
pub llvm_thin_lto: bool,
pub llvm_release_debuginfo: bool,
- pub llvm_version_check: bool,
pub llvm_static_stdcpp: bool,
/// `None` if `llvm_from_ci` is true and we haven't yet downloaded llvm.
#[cfg(not(test))]
@@ -135,6 +135,7 @@ pub struct Config {
pub llvm_allow_old_toolchain: bool,
pub llvm_polly: bool,
pub llvm_clang: bool,
+ pub llvm_enable_warnings: bool,
pub llvm_from_ci: bool,
pub llvm_build_config: HashMap<String, String>,
@@ -192,6 +193,8 @@ pub struct Config {
pub dist_sign_folder: Option<PathBuf>,
pub dist_upload_addr: Option<String>,
pub dist_compression_formats: Option<Vec<String>>,
+ pub dist_compression_profile: String,
+ pub dist_include_mingw_linker: bool,
// libstd features
pub backtrace: bool, // support for RUST_BACKTRACE
@@ -223,27 +226,33 @@ pub struct Config {
pub reuse: Option<PathBuf>,
pub cargo_native_static: bool,
pub configure_args: Vec<String>,
+ pub out: PathBuf,
+ pub rust_info: channel::GitInfo,
// These are either the stage0 downloaded binaries or the locally installed ones.
pub initial_cargo: PathBuf,
pub initial_rustc: PathBuf,
+
#[cfg(not(test))]
initial_rustfmt: RefCell<RustfmtState>,
#[cfg(test)]
pub initial_rustfmt: RefCell<RustfmtState>,
- pub out: PathBuf,
- pub rust_info: channel::GitInfo,
}
-#[derive(Default, Deserialize)]
-#[cfg_attr(test, derive(Clone))]
+#[derive(Default, Deserialize, Clone)]
pub struct Stage0Metadata {
+ pub compiler: CompilerMetadata,
pub config: Stage0Config,
pub checksums_sha256: HashMap<String, String>,
pub rustfmt: Option<RustfmtMetadata>,
}
-#[derive(Default, Deserialize)]
-#[cfg_attr(test, derive(Clone))]
+#[derive(Default, Deserialize, Clone)]
+pub struct CompilerMetadata {
+ pub date: String,
+ pub version: String,
+}
+
+#[derive(Default, Deserialize, Clone)]
pub struct Stage0Config {
pub dist_server: String,
pub artifacts_server: String,
@@ -251,8 +260,7 @@ pub struct Stage0Config {
pub git_merge_commit_email: String,
pub nightly_branch: String,
}
-#[derive(Default, Deserialize)]
-#[cfg_attr(test, derive(Clone))]
+#[derive(Default, Deserialize, Clone)]
pub struct RustfmtMetadata {
pub date: String,
pub version: String,
@@ -326,7 +334,7 @@ impl std::str::FromStr for SplitDebuginfo {
impl SplitDebuginfo {
/// Returns the default `-Csplit-debuginfo` value for the current target. See the comment for
- /// `rust.split-debuginfo` in `config.toml.example`.
+ /// `rust.split-debuginfo` in `config.example.toml`.
fn default_for_platform(target: &str) -> Self {
if target.contains("apple") {
SplitDebuginfo::Unpacked
@@ -430,8 +438,7 @@ impl PartialEq<&str> for TargetSelection {
}
/// Per-target configuration stored in the global configuration structure.
-#[derive(Default)]
-#[cfg_attr(test, derive(Clone))]
+#[derive(Default, Clone)]
pub struct Target {
/// Some(path to llvm-config) if using an external LLVM.
pub llvm_config: Option<PathBuf>,
@@ -666,7 +673,6 @@ define_config! {
define_config! {
/// TOML representation of how the LLVM build is configured.
struct Llvm {
- skip_rebuild: Option<bool> = "skip-rebuild",
optimize: Option<bool> = "optimize",
thin_lto: Option<bool> = "thin-lto",
release_debuginfo: Option<bool> = "release-debuginfo",
@@ -674,7 +680,6 @@ define_config! {
tests: Option<bool> = "tests",
plugins: Option<bool> = "plugins",
ccache: Option<StringOrBool> = "ccache",
- version_check: Option<bool> = "version-check",
static_libstdcpp: Option<bool> = "static-libstdcpp",
ninja: Option<bool> = "ninja",
targets: Option<String> = "targets",
@@ -691,6 +696,7 @@ define_config! {
allow_old_toolchain: Option<bool> = "allow-old-toolchain",
polly: Option<bool> = "polly",
clang: Option<bool> = "clang",
+ enable_warnings: Option<bool> = "enable-warnings",
download_ci_llvm: Option<StringOrBool> = "download-ci-llvm",
build_config: Option<HashMap<String, String>> = "build-config",
}
@@ -704,6 +710,8 @@ define_config! {
src_tarball: Option<bool> = "src-tarball",
missing_tools: Option<bool> = "missing-tools",
compression_formats: Option<Vec<String>> = "compression-formats",
+ compression_profile: Option<String> = "compression-profile",
+ include_mingw_linker: Option<bool> = "include-mingw-linker",
}
}
@@ -750,7 +758,7 @@ define_config! {
verbose_tests: Option<bool> = "verbose-tests",
optimize_tests: Option<bool> = "optimize-tests",
codegen_tests: Option<bool> = "codegen-tests",
- ignore_git: Option<bool> = "ignore-git",
+ omit_git_hash: Option<bool> = "omit-git-hash",
dist_src: Option<bool> = "dist-src",
save_toolstates: Option<String> = "save-toolstates",
codegen_backends: Option<Vec<String>> = "codegen-backends",
@@ -803,10 +811,11 @@ define_config! {
impl Config {
pub fn default_opts() -> Config {
+ use is_terminal::IsTerminal;
+
let mut config = Config::default();
config.llvm_optimize = true;
config.ninja_in_file = true;
- config.llvm_version_check = true;
config.llvm_static_stdcpp = false;
config.backtrace = true;
config.rust_optimize = true;
@@ -821,6 +830,11 @@ impl Config {
config.rust_codegen_backends = vec![INTERNER.intern_str("llvm")];
config.deny_warnings = true;
config.bindir = "bin".into();
+ config.dist_include_mingw_linker = true;
+ config.dist_compression_profile = "fast".into();
+
+ config.stdout_is_tty = std::io::stdout().is_terminal();
+ config.stderr_is_tty = std::io::stderr().is_terminal();
// set by build.rs
config.build = TargetSelection::from_user(&env!("BUILD_TRIPLE"));
@@ -989,10 +1003,10 @@ impl Config {
config.out = crate::util::absolute(&config.out);
}
- config.initial_rustc = build
- .rustc
- .map(PathBuf::from)
- .unwrap_or_else(|| config.out.join(config.build.triple).join("stage0/bin/rustc"));
+ config.initial_rustc = build.rustc.map(PathBuf::from).unwrap_or_else(|| {
+ config.download_beta_toolchain();
+ config.out.join(config.build.triple).join("stage0/bin/rustc")
+ });
config.initial_cargo = build
.cargo
.map(PathBuf::from)
@@ -1060,11 +1074,6 @@ impl Config {
config.mandir = install.mandir.map(PathBuf::from);
}
- // We want the llvm-skip-rebuild flag to take precedence over the
- // skip-rebuild config.toml option so we store it separately
- // so that we can infer the right value
- let mut llvm_skip_rebuild = flags.llvm_skip_rebuild;
-
// Store off these values as options because if they're not provided
// we'll infer default values for them later
let mut llvm_assertions = None;
@@ -1082,7 +1091,7 @@ impl Config {
let mut debuginfo_level_tools = None;
let mut debuginfo_level_tests = None;
let mut optimize = None;
- let mut ignore_git = None;
+ let mut omit_git_hash = None;
if let Some(rust) = toml.rust {
debug = rust.debug;
@@ -1103,7 +1112,7 @@ impl Config {
.map(|v| v.expect("invalid value for rust.split_debuginfo"))
.unwrap_or(SplitDebuginfo::default_for_platform(&config.build.triple));
optimize = rust.optimize;
- ignore_git = rust.ignore_git;
+ omit_git_hash = rust.omit_git_hash;
config.rust_new_symbol_mangling = rust.new_symbol_mangling;
set(&mut config.rust_optimize_tests, rust.optimize_tests);
set(&mut config.codegen_tests, rust.codegen_tests);
@@ -1158,6 +1167,11 @@ impl Config {
config.rust_profile_generate = flags.rust_profile_generate;
}
+ // rust_info must be set before is_ci_llvm_available() is called.
+ let default = config.channel == "dev";
+ config.omit_git_hash = omit_git_hash.unwrap_or(default);
+ config.rust_info = GitInfo::new(config.omit_git_hash, &config.src);
+
if let Some(llvm) = toml.llvm {
match llvm.ccache {
Some(StringOrBool::String(ref s)) => config.ccache = Some(s.to_string()),
@@ -1170,11 +1184,9 @@ impl Config {
llvm_assertions = llvm.assertions;
llvm_tests = llvm.tests;
llvm_plugins = llvm.plugins;
- llvm_skip_rebuild = llvm_skip_rebuild.or(llvm.skip_rebuild);
set(&mut config.llvm_optimize, llvm.optimize);
set(&mut config.llvm_thin_lto, llvm.thin_lto);
set(&mut config.llvm_release_debuginfo, llvm.release_debuginfo);
- set(&mut config.llvm_version_check, llvm.version_check);
set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp);
if let Some(v) = llvm.link_shared {
config.llvm_link_shared.set(Some(v));
@@ -1193,17 +1205,18 @@ impl Config {
config.llvm_allow_old_toolchain = llvm.allow_old_toolchain.unwrap_or(false);
config.llvm_polly = llvm.polly.unwrap_or(false);
config.llvm_clang = llvm.clang.unwrap_or(false);
+ config.llvm_enable_warnings = llvm.enable_warnings.unwrap_or(false);
config.llvm_build_config = llvm.build_config.clone().unwrap_or(Default::default());
let asserts = llvm_assertions.unwrap_or(false);
config.llvm_from_ci = match llvm.download_ci_llvm {
Some(StringOrBool::String(s)) => {
assert!(s == "if-available", "unknown option `{}` for download-ci-llvm", s);
- crate::native::is_ci_llvm_available(&config, asserts)
+ crate::llvm::is_ci_llvm_available(&config, asserts)
}
Some(StringOrBool::Bool(b)) => b,
None => {
- config.channel == "dev" && crate::native::is_ci_llvm_available(&config, asserts)
+ config.channel == "dev" && crate::llvm::is_ci_llvm_available(&config, asserts)
}
};
@@ -1246,7 +1259,7 @@ impl Config {
}
} else {
config.llvm_from_ci =
- config.channel == "dev" && crate::native::is_ci_llvm_available(&config, false);
+ config.channel == "dev" && crate::llvm::is_ci_llvm_available(&config, false);
}
if let Some(t) = toml.target {
@@ -1309,8 +1322,10 @@ impl Config {
config.dist_sign_folder = t.sign_folder.map(PathBuf::from);
config.dist_upload_addr = t.upload_addr;
config.dist_compression_formats = t.compression_formats;
+ set(&mut config.dist_compression_profile, t.compression_profile);
set(&mut config.rust_dist_src, t.src_tarball);
set(&mut config.missing_tools, t.missing_tools);
+ set(&mut config.dist_include_mingw_linker, t.include_mingw_linker)
}
if let Some(r) = build.rustfmt {
@@ -1324,7 +1339,6 @@ impl Config {
// Now that we've reached the end of our configuration, infer the
// default values for all options that we haven't otherwise stored yet.
- config.llvm_skip_rebuild = llvm_skip_rebuild.unwrap_or(false);
config.llvm_assertions = llvm_assertions.unwrap_or(false);
config.llvm_tests = llvm_tests.unwrap_or(false);
config.llvm_plugins = llvm_plugins.unwrap_or(false);
@@ -1352,10 +1366,6 @@ impl Config {
config.rust_debuginfo_level_tools = with_defaults(debuginfo_level_tools);
config.rust_debuginfo_level_tests = debuginfo_level_tests.unwrap_or(0);
- let default = config.channel == "dev";
- config.ignore_git = ignore_git.unwrap_or(default);
- config.rust_info = GitInfo::new(config.ignore_git, &config.src);
-
let download_rustc = config.download_rustc_commit.is_some();
// See https://github.com/rust-lang/compiler-team/issues/326
config.stage = match config.cmd {
@@ -1380,7 +1390,8 @@ impl Config {
| Subcommand::Fix { .. }
| Subcommand::Run { .. }
| Subcommand::Setup { .. }
- | Subcommand::Format { .. } => flags.stage.unwrap_or(0),
+ | Subcommand::Format { .. }
+ | Subcommand::Suggest { .. } => flags.stage.unwrap_or(0),
};
// CI should always run stage 2 builds, unless it specifically states otherwise
@@ -1405,7 +1416,8 @@ impl Config {
| Subcommand::Fix { .. }
| Subcommand::Run { .. }
| Subcommand::Setup { .. }
- | Subcommand::Format { .. } => {}
+ | Subcommand::Format { .. }
+ | Subcommand::Suggest { .. } => {}
}
}
diff --git a/src/bootstrap/config/tests.rs b/src/bootstrap/config/tests.rs
index 5a105007f..50569eb4f 100644
--- a/src/bootstrap/config/tests.rs
+++ b/src/bootstrap/config/tests.rs
@@ -1,5 +1,5 @@
use super::{Config, TomlConfig};
-use std::path::Path;
+use std::{env, path::Path};
fn toml(config: &str) -> impl '_ + Fn(&Path) -> TomlConfig {
|&_| toml::from_str(config).unwrap()
@@ -11,7 +11,7 @@ fn parse(config: &str) -> Config {
#[test]
fn download_ci_llvm() {
- if crate::native::is_ci_llvm_modified(&parse("")) {
+ if crate::llvm::is_ci_llvm_modified(&parse("")) {
eprintln!("Detected LLVM as non-available: running in CI and modified LLVM in this change");
return;
}
@@ -33,4 +33,58 @@ fn download_ci_llvm() {
));
}
-// FIXME: add test for detecting `src` and `out`
+// FIXME(ozkanonur): extend scope of the test
+// refs:
+// - https://github.com/rust-lang/rust/issues/109120
+// - https://github.com/rust-lang/rust/pull/109162#issuecomment-1496782487
+#[test]
+fn detect_src_and_out() {
+ fn test(cfg: Config, build_dir: Option<&str>) {
+ // This will bring absolute form of `src/bootstrap` path
+ let current_dir = std::env::current_dir().unwrap();
+
+ // get `src` by moving into project root path
+ let expected_src = current_dir.ancestors().nth(2).unwrap();
+ assert_eq!(&cfg.src, expected_src);
+
+ // Sanity check for `src`
+ let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR"));
+ let expected_src = manifest_dir.ancestors().nth(2).unwrap();
+ assert_eq!(&cfg.src, expected_src);
+
+ // test if build-dir was manually given in config.toml
+ if let Some(custom_build_dir) = build_dir {
+ assert_eq!(&cfg.out, Path::new(custom_build_dir));
+ }
+ // test the native bootstrap way
+ else {
+ // This should bring output path of bootstrap in absolute form
+ let cargo_target_dir = env::var_os("CARGO_TARGET_DIR").expect(
+ "CARGO_TARGET_DIR must been provided for the test environment from bootstrap",
+ );
+
+ // Move to `build` from `build/bootstrap`
+ let expected_out = Path::new(&cargo_target_dir).parent().unwrap();
+ assert_eq!(&cfg.out, expected_out);
+
+ let args: Vec<String> = env::args().collect();
+
+ // Another test for `out` as a sanity check
+ //
+ // This will bring something similar to:
+ // `{build-dir}/bootstrap/debug/deps/bootstrap-c7ee91d5661e2804`
+ // `{build-dir}` can be anywhere, not just in the rust project directory.
+ let dep = Path::new(args.first().unwrap());
+ let expected_out = dep.ancestors().nth(4).unwrap();
+
+ assert_eq!(&cfg.out, expected_out);
+ }
+ }
+
+ test(parse(""), None);
+
+ {
+ let build_dir = if cfg!(windows) { Some("C:\\tmp") } else { Some("/tmp") };
+ test(parse("build.build-dir = \"/tmp\""), build_dir);
+ }
+}
diff --git a/src/bootstrap/configure.py b/src/bootstrap/configure.py
index ab3d08292..abd28b400 100755
--- a/src/bootstrap/configure.py
+++ b/src/bootstrap/configure.py
@@ -44,7 +44,6 @@ o("local-rebuild", "build.local-rebuild", "assume local-rust matches the current
o("llvm-static-stdcpp", "llvm.static-libstdcpp", "statically link to libstdc++ for LLVM")
o("llvm-link-shared", "llvm.link-shared", "prefer shared linking to LLVM (llvm-config --link-shared)")
o("rpath", "rust.rpath", "build rpaths into rustc itself")
-o("llvm-version-check", "llvm.version-check", "check if the LLVM version is supported, build anyway")
o("codegen-tests", "rust.codegen-tests", "run the tests/codegen tests")
o("option-checking", None, "complain about unrecognized options in this configure script")
o("ninja", "llvm.ninja", "build LLVM using the Ninja generator (for MSVC, requires building in the correct environment)")
@@ -195,7 +194,7 @@ if '--help' in sys.argv or '-h' in sys.argv:
print('')
print('This configure script is a thin configuration shim over the true')
print('configuration system, `config.toml`. You can explore the comments')
- print('in `config.toml.example` next to this configure script to see')
+ print('in `config.example.toml` next to this configure script to see')
print('more information about what each option is. Additionally you can')
print('pass `--set` as an argument to set arbitrary key/value pairs')
print('in the TOML configuration if desired')
@@ -206,77 +205,78 @@ if '--help' in sys.argv or '-h' in sys.argv:
# Parse all command line arguments into one of these three lists, handling
# boolean and value-based options separately
-unknown_args = []
-need_value_args = []
-known_args = {}
-
-p("processing command line")
-i = 1
-while i < len(sys.argv):
- arg = sys.argv[i]
- i += 1
- if not arg.startswith('--'):
- unknown_args.append(arg)
- continue
-
- found = False
- for option in options:
- value = None
- if option.value:
- keyval = arg[2:].split('=', 1)
- key = keyval[0]
- if option.name != key:
- continue
+def parse_args(args):
+ unknown_args = []
+ need_value_args = []
+ known_args = {}
+
+ i = 0
+ while i < len(args):
+ arg = args[i]
+ i += 1
+ if not arg.startswith('--'):
+ unknown_args.append(arg)
+ continue
- if len(keyval) > 1:
- value = keyval[1]
- elif i < len(sys.argv):
- value = sys.argv[i]
- i += 1
- else:
- need_value_args.append(arg)
- continue
- else:
- if arg[2:] == 'enable-' + option.name:
- value = True
- elif arg[2:] == 'disable-' + option.name:
- value = False
+ found = False
+ for option in options:
+ value = None
+ if option.value:
+ keyval = arg[2:].split('=', 1)
+ key = keyval[0]
+ if option.name != key:
+ continue
+
+ if len(keyval) > 1:
+ value = keyval[1]
+ elif i < len(args):
+ value = args[i]
+ i += 1
+ else:
+ need_value_args.append(arg)
+ continue
else:
- continue
+ if arg[2:] == 'enable-' + option.name:
+ value = True
+ elif arg[2:] == 'disable-' + option.name:
+ value = False
+ else:
+ continue
+
+ found = True
+ if option.name not in known_args:
+ known_args[option.name] = []
+ known_args[option.name].append((option, value))
+ break
+
+ if not found:
+ unknown_args.append(arg)
+
+ # Note: here and a few other places, we use [-1] to apply the *last* value
+ # passed. But if option-checking is enabled, then the known_args loop will
+ # also assert that options are only passed once.
+ option_checking = ('option-checking' not in known_args
+ or known_args['option-checking'][-1][1])
+ if option_checking:
+ if len(unknown_args) > 0:
+ err("Option '" + unknown_args[0] + "' is not recognized")
+ if len(need_value_args) > 0:
+ err("Option '{0}' needs a value ({0}=val)".format(need_value_args[0]))
+
+ config = {}
+
+ set('build.configure-args', sys.argv[1:], config)
+ apply_args(known_args, option_checking, config)
+ return parse_example_config(known_args, config)
- found = True
- if option.name not in known_args:
- known_args[option.name] = []
- known_args[option.name].append((option, value))
- break
-
- if not found:
- unknown_args.append(arg)
-p("")
-
-# Note: here and a few other places, we use [-1] to apply the *last* value
-# passed. But if option-checking is enabled, then the known_args loop will
-# also assert that options are only passed once.
-option_checking = ('option-checking' not in known_args
- or known_args['option-checking'][-1][1])
-if option_checking:
- if len(unknown_args) > 0:
- err("Option '" + unknown_args[0] + "' is not recognized")
- if len(need_value_args) > 0:
- err("Option '{0}' needs a value ({0}=val)".format(need_value_args[0]))
-
-# Parse all known arguments into a configuration structure that reflects the
-# TOML we're going to write out
-config = {}
-
-
-def build():
+
+def build(known_args):
if 'build' in known_args:
return known_args['build'][-1][1]
return bootstrap.default_build_triple(verbose=False)
-def set(key, value):
+def set(key, value, config):
if isinstance(value, list):
# Remove empty values, which value.split(',') tends to generate.
value = [v for v in value if v]
@@ -298,122 +298,127 @@ def set(key, value):
arr = arr[part]
-for key in known_args:
- # The `set` option is special and can be passed a bunch of times
- if key == 'set':
- for option, value in known_args[key]:
- keyval = value.split('=', 1)
- if len(keyval) == 1 or keyval[1] == "true":
- value = True
- elif keyval[1] == "false":
- value = False
- else:
- value = keyval[1]
- set(keyval[0], value)
- continue
-
- # Ensure each option is only passed once
- arr = known_args[key]
- if option_checking and len(arr) > 1:
- err("Option '{}' provided more than once".format(key))
- option, value = arr[-1]
-
- # If we have a clear avenue to set our value in rustbuild, do so
- if option.rustbuild is not None:
- set(option.rustbuild, value)
- continue
-
- # Otherwise we're a "special" option and need some extra handling, so do
- # that here.
- if option.name == 'sccache':
- set('llvm.ccache', 'sccache')
- elif option.name == 'local-rust':
- for path in os.environ['PATH'].split(os.pathsep):
- if os.path.exists(path + '/rustc'):
- set('build.rustc', path + '/rustc')
- break
- for path in os.environ['PATH'].split(os.pathsep):
- if os.path.exists(path + '/cargo'):
- set('build.cargo', path + '/cargo')
- break
- elif option.name == 'local-rust-root':
- set('build.rustc', value + '/bin/rustc')
- set('build.cargo', value + '/bin/cargo')
- elif option.name == 'llvm-root':
- set('target.{}.llvm-config'.format(build()), value + '/bin/llvm-config')
- elif option.name == 'llvm-config':
- set('target.{}.llvm-config'.format(build()), value)
- elif option.name == 'llvm-filecheck':
- set('target.{}.llvm-filecheck'.format(build()), value)
- elif option.name == 'tools':
- set('build.tools', value.split(','))
- elif option.name == 'codegen-backends':
- set('rust.codegen-backends', value.split(','))
- elif option.name == 'host':
- set('build.host', value.split(','))
- elif option.name == 'target':
- set('build.target', value.split(','))
- elif option.name == 'full-tools':
- set('rust.codegen-backends', ['llvm'])
- set('rust.lld', True)
- set('rust.llvm-tools', True)
- set('build.extended', True)
- elif option.name == 'option-checking':
- # this was handled above
- pass
- elif option.name == 'dist-compression-formats':
- set('dist.compression-formats', value.split(','))
- else:
- raise RuntimeError("unhandled option {}".format(option.name))
+def apply_args(known_args, option_checking, config):
+ for key in known_args:
+ # The `set` option is special and can be passed a bunch of times
+ if key == 'set':
+ for option, value in known_args[key]:
+ keyval = value.split('=', 1)
+ if len(keyval) == 1 or keyval[1] == "true":
+ value = True
+ elif keyval[1] == "false":
+ value = False
+ else:
+ value = keyval[1]
+ set(keyval[0], value, config)
+ continue
-set('build.configure-args', sys.argv[1:])
+ # Ensure each option is only passed once
+ arr = known_args[key]
+ if option_checking and len(arr) > 1:
+ err("Option '{}' provided more than once".format(key))
+ option, value = arr[-1]
-# "Parse" the `config.toml.example` file into the various sections, and we'll
+ # If we have a clear avenue to set our value in rustbuild, do so
+ if option.rustbuild is not None:
+ set(option.rustbuild, value, config)
+ continue
+
+ # Otherwise we're a "special" option and need some extra handling, so do
+ # that here.
+ build_triple = build(known_args)
+
+ if option.name == 'sccache':
+ set('llvm.ccache', 'sccache', config)
+ elif option.name == 'local-rust':
+ for path in os.environ['PATH'].split(os.pathsep):
+ if os.path.exists(path + '/rustc'):
+ set('build.rustc', path + '/rustc', config)
+ break
+ for path in os.environ['PATH'].split(os.pathsep):
+ if os.path.exists(path + '/cargo'):
+ set('build.cargo', path + '/cargo', config)
+ break
+ elif option.name == 'local-rust-root':
+ set('build.rustc', value + '/bin/rustc', config)
+ set('build.cargo', value + '/bin/cargo', config)
+ elif option.name == 'llvm-root':
+ set('target.{}.llvm-config'.format(build_triple), value + '/bin/llvm-config', config)
+ elif option.name == 'llvm-config':
+ set('target.{}.llvm-config'.format(build_triple), value, config)
+ elif option.name == 'llvm-filecheck':
+ set('target.{}.llvm-filecheck'.format(build_triple), value, config)
+ elif option.name == 'tools':
+ set('build.tools', value.split(','), config)
+ elif option.name == 'codegen-backends':
+ set('rust.codegen-backends', value.split(','), config)
+ elif option.name == 'host':
+ set('build.host', value.split(','), config)
+ elif option.name == 'target':
+ set('build.target', value.split(','), config)
+ elif option.name == 'full-tools':
+ set('rust.codegen-backends', ['llvm'], config)
+ set('rust.lld', True, config)
+ set('rust.llvm-tools', True, config)
+ set('build.extended', True, config)
+ elif option.name == 'option-checking':
+ # this was handled above
+ pass
+ elif option.name == 'dist-compression-formats':
+ set('dist.compression-formats', value.split(','), config)
+ else:
+ raise RuntimeError("unhandled option {}".format(option.name))
+
+# "Parse" the `config.example.toml` file into the various sections, and we'll
# use this as a template of a `config.toml` to write out which preserves
# all the various comments and whatnot.
#
# Note that the `target` section is handled separately as we'll duplicate it
# per configured target, so there's a bit of special handling for that here.
-sections = {}
-cur_section = None
-sections[None] = []
-section_order = [None]
-targets = {}
-top_level_keys = []
-
-for line in open(rust_dir + '/config.toml.example').read().split("\n"):
- if cur_section == None:
- if line.count('=') == 1:
- top_level_key = line.split('=')[0]
- top_level_key = top_level_key.strip(' #')
- top_level_keys.append(top_level_key)
- if line.startswith('['):
- cur_section = line[1:-1]
- if cur_section.startswith('target'):
- cur_section = 'target'
- elif '.' in cur_section:
- raise RuntimeError("don't know how to deal with section: {}".format(cur_section))
- sections[cur_section] = [line]
- section_order.append(cur_section)
- else:
- sections[cur_section].append(line)
-
-# Fill out the `targets` array by giving all configured targets a copy of the
-# `target` section we just loaded from the example config
-configured_targets = [build()]
-if 'build' in config:
- if 'host' in config['build']:
- configured_targets += config['build']['host']
- if 'target' in config['build']:
- configured_targets += config['build']['target']
-if 'target' in config:
- for target in config['target']:
- configured_targets.append(target)
-for target in configured_targets:
- targets[target] = sections['target'][:]
- # For `.` to be valid TOML, it needs to be quoted. But `bootstrap.py` doesn't use a proper TOML parser and fails to parse the target.
- # Avoid using quotes unless it's necessary.
- targets[target][0] = targets[target][0].replace("x86_64-unknown-linux-gnu", "'{}'".format(target) if "." in target else target)
+def parse_example_config(known_args, config):
+ sections = {}
+ cur_section = None
+ sections[None] = []
+ section_order = [None]
+ targets = {}
+ top_level_keys = []
+
+ for line in open(rust_dir + '/config.example.toml').read().split("\n"):
+ if cur_section == None:
+ if line.count('=') == 1:
+ top_level_key = line.split('=')[0]
+ top_level_key = top_level_key.strip(' #')
+ top_level_keys.append(top_level_key)
+ if line.startswith('['):
+ cur_section = line[1:-1]
+ if cur_section.startswith('target'):
+ cur_section = 'target'
+ elif '.' in cur_section:
+ raise RuntimeError("don't know how to deal with section: {}".format(cur_section))
+ sections[cur_section] = [line]
+ section_order.append(cur_section)
+ else:
+ sections[cur_section].append(line)
+
+ # Fill out the `targets` array by giving all configured targets a copy of the
+ # `target` section we just loaded from the example config
+ configured_targets = [build(known_args)]
+ if 'build' in config:
+ if 'host' in config['build']:
+ configured_targets += config['build']['host']
+ if 'target' in config['build']:
+ configured_targets += config['build']['target']
+ if 'target' in config:
+ for target in config['target']:
+ configured_targets.append(target)
+ for target in configured_targets:
+ targets[target] = sections['target'][:]
+ # For `.` to be valid TOML, it needs to be quoted. But `bootstrap.py` doesn't use a proper TOML parser and fails to parse the target.
+ # Avoid using quotes unless it's necessary.
+ targets[target][0] = targets[target][0].replace("x86_64-unknown-linux-gnu", "'{}'".format(target) if "." in target else target)
+
+ configure_file(sections, top_level_keys, targets, config)
+ return section_order, sections, targets
def is_number(value):
@@ -476,38 +481,67 @@ def configure_top_level_key(lines, top_level_key, value):
raise RuntimeError("failed to find config line for {}".format(top_level_key))
-for section_key, section_config in config.items():
- if section_key not in sections and section_key not in top_level_keys:
- raise RuntimeError("config key {} not in sections or top_level_keys".format(section_key))
- if section_key in top_level_keys:
- configure_top_level_key(sections[None], section_key, section_config)
+# Modify `sections` to reflect the parsed arguments and example configs.
+def configure_file(sections, top_level_keys, targets, config):
+ for section_key, section_config in config.items():
+ if section_key not in sections and section_key not in top_level_keys:
+ raise RuntimeError("config key {} not in sections or top_level_keys".format(section_key))
+ if section_key in top_level_keys:
+ configure_top_level_key(sections[None], section_key, section_config)
+
+ elif section_key == 'target':
+ for target in section_config:
+ configure_section(targets[target], section_config[target])
+ else:
+ configure_section(sections[section_key], section_config)
+
+
+def write_uncommented(target, f):
+ block = []
+ is_comment = True
+
+ for line in target:
+ block.append(line)
+ if len(line) == 0:
+ if not is_comment:
+ for l in block:
+ f.write(l + "\n")
+ block = []
+ is_comment = True
+ continue
+ is_comment = is_comment and line.startswith('#')
+ return f
- elif section_key == 'target':
- for target in section_config:
- configure_section(targets[target], section_config[target])
- else:
- configure_section(sections[section_key], section_config)
-# Now that we've built up our `config.toml`, write it all out in the same
-# order that we read it in.
-p("")
-p("writing `config.toml` in current directory")
-with bootstrap.output('config.toml') as f:
+def write_config_toml(writer, section_order, targets, sections):
for section in section_order:
if section == 'target':
for target in targets:
- for line in targets[target]:
- f.write(line + "\n")
+ writer = write_uncommented(targets[target], writer)
else:
- for line in sections[section]:
- f.write(line + "\n")
-
-with bootstrap.output('Makefile') as f:
- contents = os.path.join(rust_dir, 'src', 'bootstrap', 'mk', 'Makefile.in')
- contents = open(contents).read()
- contents = contents.replace("$(CFG_SRC_DIR)", rust_dir + '/')
- contents = contents.replace("$(CFG_PYTHON)", sys.executable)
- f.write(contents)
-
-p("")
-p("run `python {}/x.py --help`".format(rust_dir))
+ writer = write_uncommented(sections[section], writer)
+
+
+if __name__ == "__main__":
+ p("processing command line")
+ # Parse all known arguments into a configuration structure that reflects the
+ # TOML we're going to write out
+ p("")
+ section_order, sections, targets = parse_args(sys.argv[1:])
+
+ # Now that we've built up our `config.toml`, write it all out in the same
+ # order that we read it in.
+ p("")
+ p("writing `config.toml` in current directory")
+ with bootstrap.output('config.toml') as f:
+ write_config_toml(f, section_order, targets, sections)
+
+ with bootstrap.output('Makefile') as f:
+ contents = os.path.join(rust_dir, 'src', 'bootstrap', 'mk', 'Makefile.in')
+ contents = open(contents).read()
+ contents = contents.replace("$(CFG_SRC_DIR)", rust_dir + '/')
+ contents = contents.replace("$(CFG_PYTHON)", sys.executable)
+ f.write(contents)
+
+ p("")
+ p("run `python {}/x.py --help`".format(rust_dir))
diff --git a/src/bootstrap/defaults/config.codegen.toml b/src/bootstrap/defaults/config.codegen.toml
index 088cbd105..113df88d7 100644
--- a/src/bootstrap/defaults/config.codegen.toml
+++ b/src/bootstrap/defaults/config.codegen.toml
@@ -7,6 +7,10 @@ compiler-docs = true
# This enables debug-assertions in LLVM,
# catching logic errors in codegen much earlier in the process.
assertions = true
+# enable warnings during the llvm compilation
+enable-warnings = true
+# build llvm from source
+download-ci-llvm = false
[rust]
# This enables `RUSTC_LOG=debug`, avoiding confusing situations
@@ -17,3 +21,5 @@ debug-logging = true
incremental = true
# Print backtrace on internal compiler errors during bootstrap
backtrace-on-ice = true
+# Make the compiler and standard library faster to build, at the expense of a ~20% runtime slowdown.
+lto = "off"
diff --git a/src/bootstrap/defaults/config.user.toml b/src/bootstrap/defaults/config.user.toml
index 48ae2fe44..25d9e649f 100644
--- a/src/bootstrap/defaults/config.user.toml
+++ b/src/bootstrap/defaults/config.user.toml
@@ -8,6 +8,12 @@ doc-stage = 2
# When compiling from source, you usually want all tools.
extended = true
+# Most users installing from source want to build all parts of the project from source.
[llvm]
-# Most users installing from source want to build all parts of the project from source, not just rustc itself.
download-ci-llvm = false
+[rust]
+download-rustc = false
+
+[dist]
+# Use better compression when preparing tarballs.
+compression-profile = "balanced"
diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs
index 9b2b54961..76aad16c1 100644
--- a/src/bootstrap/dist.rs
+++ b/src/bootstrap/dist.rs
@@ -18,14 +18,16 @@ use std::process::Command;
use object::read::archive::ArchiveFile;
use object::BinaryFormat;
+use sha2::Digest;
+use crate::bolt::{instrument_with_bolt, optimize_with_bolt};
use crate::builder::{Builder, Kind, RunConfig, ShouldRun, Step};
use crate::cache::{Interned, INTERNER};
use crate::channel;
use crate::compile;
use crate::config::TargetSelection;
use crate::doc::DocumentationFormat;
-use crate::native;
+use crate::llvm;
use crate::tarball::{GeneratedTarball, OverlayKind, Tarball};
use crate::tool::{self, Tool};
use crate::util::{exe, is_dylib, output, t, timeit};
@@ -208,6 +210,8 @@ fn make_win_dist(
rustc_dlls.push("libgcc_s_seh-1.dll");
}
+ // Libraries necessary to link the windows-gnu toolchains.
+ // System libraries will be preferred if they are available (see #67429).
let target_libs = [
//MinGW libs
"libgcc.a",
@@ -221,6 +225,7 @@ fn make_win_dist(
"libmoldname.a",
"libpthread.a",
//Windows import libs
+ //This should contain only the set of libraries necessary to link the standard library.
"libadvapi32.a",
"libbcrypt.a",
"libcomctl32.a",
@@ -234,6 +239,7 @@ fn make_win_dist(
"libkernel32.a",
"libmsimg32.a",
"libmsvcrt.a",
+ "libntdll.a",
"libodbc32.a",
"libole32.a",
"liboleaut32.a",
@@ -322,7 +328,7 @@ impl Step for Mingw {
/// without any extra installed software (e.g., we bundle gcc, libraries, etc).
fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
let host = self.host;
- if !host.ends_with("pc-windows-gnu") {
+ if !host.ends_with("pc-windows-gnu") || !builder.config.dist_include_mingw_linker {
return None;
}
@@ -378,7 +384,7 @@ impl Step for Rustc {
// anything requiring us to distribute a license, but it's likely the
// install will *also* include the rust-mingw package, which also needs
// licenses, so to be safe we just include it here in all MinGW packages.
- if host.ends_with("pc-windows-gnu") {
+ if host.ends_with("pc-windows-gnu") && builder.config.dist_include_mingw_linker {
make_win_dist(tarball.image_dir(), &tmpdir(builder), host, builder);
tarball.add_dir(builder.src.join("src/etc/third-party"), "share/doc");
}
@@ -889,6 +895,8 @@ impl Step for Src {
/// Creates the `rust-src` installer component
fn run(self, builder: &Builder<'_>) -> GeneratedTarball {
+ builder.update_submodule(&Path::new("src/llvm-project"));
+
let tarball = Tarball::new_targetless(builder, "rust-src");
// A lot of tools expect the rust-src component to be entirely in this directory, so if you
@@ -965,9 +973,10 @@ impl Step for PlainSourceTarball {
"RELEASES.md",
"configure",
"x.py",
- "config.toml.example",
+ "config.example.toml",
"Cargo.toml",
"Cargo.lock",
+ ".gitmodules",
];
let src_dirs = ["src", "compiler", "library", "tests"];
@@ -1482,7 +1491,7 @@ impl Step for Extended {
let xform = |p: &Path| {
let mut contents = t!(fs::read_to_string(p));
- for tool in &["rust-demangler", "miri"] {
+ for tool in &["rust-demangler", "miri", "rust-docs"] {
if !built_tools.contains(tool) {
contents = filter(&contents, tool);
}
@@ -1579,11 +1588,10 @@ impl Step for Extended {
prepare("rustc");
prepare("cargo");
prepare("rust-analysis");
- prepare("rust-docs");
prepare("rust-std");
prepare("clippy");
prepare("rust-analyzer");
- for tool in &["rust-demangler", "miri"] {
+ for tool in &["rust-docs", "rust-demangler", "miri"] {
if built_tools.contains(tool) {
prepare(tool);
}
@@ -1618,23 +1626,25 @@ impl Step for Extended {
.arg("-out")
.arg(exe.join("RustcGroup.wxs")),
);
- builder.run(
- Command::new(&heat)
- .current_dir(&exe)
- .arg("dir")
- .arg("rust-docs")
- .args(&heat_flags)
- .arg("-cg")
- .arg("DocsGroup")
- .arg("-dr")
- .arg("Docs")
- .arg("-var")
- .arg("var.DocsDir")
- .arg("-out")
- .arg(exe.join("DocsGroup.wxs"))
- .arg("-t")
- .arg(etc.join("msi/squash-components.xsl")),
- );
+ if built_tools.contains("rust-docs") {
+ builder.run(
+ Command::new(&heat)
+ .current_dir(&exe)
+ .arg("dir")
+ .arg("rust-docs")
+ .args(&heat_flags)
+ .arg("-cg")
+ .arg("DocsGroup")
+ .arg("-dr")
+ .arg("Docs")
+ .arg("-var")
+ .arg("var.DocsDir")
+ .arg("-out")
+ .arg(exe.join("DocsGroup.wxs"))
+ .arg("-t")
+ .arg(etc.join("msi/squash-components.xsl")),
+ );
+ }
builder.run(
Command::new(&heat)
.current_dir(&exe)
@@ -1781,7 +1791,6 @@ impl Step for Extended {
cmd.current_dir(&exe)
.arg("-nologo")
.arg("-dRustcDir=rustc")
- .arg("-dDocsDir=rust-docs")
.arg("-dCargoDir=cargo")
.arg("-dStdDir=rust-std")
.arg("-dAnalysisDir=rust-analysis")
@@ -1793,6 +1802,9 @@ impl Step for Extended {
.arg(&input);
add_env(builder, &mut cmd, target);
+ if built_tools.contains("rust-docs") {
+ cmd.arg("-dDocsDir=rust-docs");
+ }
if built_tools.contains("rust-demangler") {
cmd.arg("-dRustDemanglerDir=rust-demangler");
}
@@ -1811,7 +1823,9 @@ impl Step for Extended {
candle(&etc.join("msi/ui.wxs"));
candle(&etc.join("msi/rustwelcomedlg.wxs"));
candle("RustcGroup.wxs".as_ref());
- candle("DocsGroup.wxs".as_ref());
+ if built_tools.contains("rust-docs") {
+ candle("DocsGroup.wxs".as_ref());
+ }
candle("CargoGroup.wxs".as_ref());
candle("StdGroup.wxs".as_ref());
candle("ClippyGroup.wxs".as_ref());
@@ -1848,7 +1862,6 @@ impl Step for Extended {
.arg("ui.wixobj")
.arg("rustwelcomedlg.wixobj")
.arg("RustcGroup.wixobj")
- .arg("DocsGroup.wixobj")
.arg("CargoGroup.wixobj")
.arg("StdGroup.wixobj")
.arg("AnalysisGroup.wixobj")
@@ -1864,6 +1877,9 @@ impl Step for Extended {
if built_tools.contains("rust-demangler") {
cmd.arg("RustDemanglerGroup.wixobj");
}
+ if built_tools.contains("rust-docs") {
+ cmd.arg("DocsGroup.wixobj");
+ }
if target.ends_with("windows-gnu") {
cmd.arg("GccGroup.wixobj");
@@ -1904,6 +1920,26 @@ fn add_env(builder: &Builder<'_>, cmd: &mut Command, target: TargetSelection) {
}
}
+fn install_llvm_file(builder: &Builder<'_>, source: &Path, destination: &Path) {
+ if builder.config.dry_run() {
+ return;
+ }
+
+ // After LLVM is built, we modify (instrument or optimize) the libLLVM.so library file.
+ // This is not done in-place so that the built LLVM files are not "tainted" with BOLT.
+ // We perform the instrumentation/optimization here, on the fly, just before they are being
+ // packaged into some destination directory.
+ let postprocessed = if builder.config.llvm_bolt_profile_generate {
+ builder.ensure(BoltInstrument::new(source.to_path_buf()))
+ } else if let Some(path) = &builder.config.llvm_bolt_profile_use {
+ builder.ensure(BoltOptimize::new(source.to_path_buf(), path.into()))
+ } else {
+ source.to_path_buf()
+ };
+
+ builder.install(&postprocessed, destination, 0o644);
+}
+
/// Maybe add LLVM object files to the given destination lib-dir. Allows either static or dynamic linking.
///
/// Returns whether the files were actually copied.
@@ -1927,6 +1963,20 @@ fn maybe_install_llvm(builder: &Builder<'_>, target: TargetSelection, dst_libdir
}
}
+ // FIXME: for reasons I don't understand, the LLVM so in the `rustc` component is different than the one in `rust-dev`.
+ // Only the one in `rustc` works with the downloaded compiler.
+ if builder.download_rustc() && target == builder.build.build {
+ let src_libdir = builder.ci_rustc_dir(target).join("lib");
+ for entry in t!(std::fs::read_dir(&src_libdir)) {
+ let entry = t!(entry);
+ if entry.file_name().to_str().unwrap().starts_with("libLLVM-") {
+ install_llvm_file(builder, &entry.path(), dst_libdir);
+ return !builder.config.dry_run();
+ }
+ }
+ panic!("libLLVM.so not found in src_libdir {}!", src_libdir.display());
+ }
+
// On macOS, rustc (and LLVM tools) link to an unversioned libLLVM.dylib
// instead of libLLVM-11-rust-....dylib, as on linux. It's not entirely
// clear why this is the case, though. llvm-config will emit the versioned
@@ -1939,8 +1989,8 @@ fn maybe_install_llvm(builder: &Builder<'_>, target: TargetSelection, dst_libdir
builder.install(&llvm_dylib_path, dst_libdir, 0o644);
}
!builder.config.dry_run()
- } else if let Ok(native::LlvmResult { llvm_config, .. }) =
- native::prebuilt_llvm_config(builder, target)
+ } else if let Ok(llvm::LlvmResult { llvm_config, .. }) =
+ llvm::prebuilt_llvm_config(builder, target)
{
let mut cmd = Command::new(llvm_config);
cmd.arg("--libfiles");
@@ -1955,7 +2005,7 @@ fn maybe_install_llvm(builder: &Builder<'_>, target: TargetSelection, dst_libdir
} else {
PathBuf::from(file)
};
- builder.install(&file, dst_libdir, 0o644);
+ install_llvm_file(builder, &file, dst_libdir);
}
!builder.config.dry_run()
} else {
@@ -1986,6 +2036,117 @@ pub fn maybe_install_llvm_runtime(builder: &Builder<'_>, target: TargetSelection
}
}
+/// Creates an output path to a BOLT-manipulated artifact for the given `file`.
+/// The hash of the file is used to make sure that we don't mix BOLT artifacts amongst different
+/// files with the same name.
+///
+/// We need to keep the file-name the same though, to make sure that copying the manipulated file
+/// to a directory will not change the final file path.
+fn create_bolt_output_path(builder: &Builder<'_>, file: &Path, hash: &str) -> PathBuf {
+ let directory = builder.out.join("bolt").join(hash);
+ t!(fs::create_dir_all(&directory));
+ directory.join(file.file_name().unwrap())
+}
+
+/// Instrument the provided file with BOLT.
+/// Returns a path to the instrumented artifact.
+#[derive(Clone, Debug, Eq, Hash, PartialEq)]
+pub struct BoltInstrument {
+ file: PathBuf,
+ hash: String,
+}
+
+impl BoltInstrument {
+ fn new(file: PathBuf) -> Self {
+ let mut hasher = sha2::Sha256::new();
+ hasher.update(t!(fs::read(&file)));
+ let hash = hex::encode(hasher.finalize().as_slice());
+
+ Self { file, hash }
+ }
+}
+
+impl Step for BoltInstrument {
+ type Output = PathBuf;
+
+ const ONLY_HOSTS: bool = false;
+ const DEFAULT: bool = false;
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ run.never()
+ }
+
+ fn run(self, builder: &Builder<'_>) -> PathBuf {
+ if builder.build.config.dry_run() {
+ return self.file.clone();
+ }
+
+ if builder.build.config.llvm_from_ci {
+ println!("warning: trying to use BOLT with LLVM from CI, this will probably not work");
+ }
+
+ println!("Instrumenting {} with BOLT", self.file.display());
+
+ let output_path = create_bolt_output_path(builder, &self.file, &self.hash);
+ if !output_path.is_file() {
+ instrument_with_bolt(&self.file, &output_path);
+ }
+ output_path
+ }
+}
+
+/// Optimize the provided file with BOLT.
+/// Returns a path to the optimized artifact.
+///
+/// The hash is stored in the step to make sure that we don't optimize the same file
+/// twice (even under different file paths).
+#[derive(Clone, Debug, Eq, Hash, PartialEq)]
+pub struct BoltOptimize {
+ file: PathBuf,
+ profile: PathBuf,
+ hash: String,
+}
+
+impl BoltOptimize {
+ fn new(file: PathBuf, profile: PathBuf) -> Self {
+ let mut hasher = sha2::Sha256::new();
+ hasher.update(t!(fs::read(&file)));
+ hasher.update(t!(fs::read(&profile)));
+ let hash = hex::encode(hasher.finalize().as_slice());
+
+ Self { file, profile, hash }
+ }
+}
+
+impl Step for BoltOptimize {
+ type Output = PathBuf;
+
+ const ONLY_HOSTS: bool = false;
+ const DEFAULT: bool = false;
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ run.never()
+ }
+
+ fn run(self, builder: &Builder<'_>) -> PathBuf {
+ if builder.build.config.dry_run() {
+ return self.file.clone();
+ }
+
+ if builder.build.config.llvm_from_ci {
+ println!("warning: trying to use BOLT with LLVM from CI, this will probably not work");
+ }
+
+ println!("Optimizing {} with BOLT", self.file.display());
+
+ let output_path = create_bolt_output_path(builder, &self.file, &self.hash);
+ if !output_path.is_file() {
+ optimize_with_bolt(&self.file, &self.profile, &output_path);
+ }
+ output_path
+ }
+}
+
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub struct LlvmTools {
pub target: TargetSelection,
@@ -2017,7 +2178,7 @@ impl Step for LlvmTools {
}
}
- builder.ensure(crate::native::Llvm { target });
+ builder.ensure(crate::llvm::Llvm { target });
let mut tarball = Tarball::new(builder, "llvm-tools", &target.triple);
tarball.set_overlay(OverlayKind::LLVM);
@@ -2076,10 +2237,10 @@ impl Step for RustDev {
let mut tarball = Tarball::new(builder, "rust-dev", &target.triple);
tarball.set_overlay(OverlayKind::LLVM);
- builder.ensure(crate::native::Llvm { target });
+ builder.ensure(crate::llvm::Llvm { target });
// We want to package `lld` to use it with `download-ci-llvm`.
- builder.ensure(crate::native::Lld { target });
+ builder.ensure(crate::llvm::Lld { target });
let src_bindir = builder.llvm_out(target).join("bin");
// If updating this list, you likely want to change
diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs
index cc80763ef..9ad98eb57 100644
--- a/src/bootstrap/doc.rs
+++ b/src/bootstrap/doc.rs
@@ -696,7 +696,7 @@ impl Step for Rustc {
cargo.rustdocflag("-Znormalize-docs");
cargo.rustdocflag("--show-type-layout");
cargo.rustdocflag("--generate-link-to-definition");
- compile::rustc_cargo(builder, &mut cargo, target);
+ compile::rustc_cargo(builder, &mut cargo, target, compiler.stage);
cargo.arg("-Zunstable-options");
cargo.arg("-Zskip-rustdoc-fingerprint");
@@ -882,6 +882,7 @@ tool_doc!(
// "cargo-credential-wincred",
]
);
+tool_doc!(Tidy, "tidy", "src/tools/tidy", ["tidy"]);
#[derive(Ord, PartialOrd, Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct ErrorIndex {
@@ -1026,10 +1027,11 @@ impl Step for RustcBook {
if self.validate {
cmd.arg("--validate");
}
- if !builder.unstable_features() {
- // We need to validate nightly features, even on the stable channel.
- cmd.env("RUSTC_BOOTSTRAP", "1");
- }
+ // We need to validate nightly features, even on the stable channel.
+ // Set this unconditionally as the stage0 compiler may be being used to
+ // document.
+ cmd.env("RUSTC_BOOTSTRAP", "1");
+
// If the lib directories are in an unusual location (changed in
// config.toml), then this needs to explicitly update the dylib search
// path.
diff --git a/src/bootstrap/download-ci-llvm-stamp b/src/bootstrap/download-ci-llvm-stamp
index 94630e40f..36f9aaa59 100644
--- a/src/bootstrap/download-ci-llvm-stamp
+++ b/src/bootstrap/download-ci-llvm-stamp
@@ -1,4 +1,4 @@
Change this file to make users of the `download-ci-llvm` configuration download
a new version of LLVM from CI, even if the LLVM submodule hasn’t changed.
-Last change is for: https://github.com/rust-lang/rust/pull/104748
+Last change is for: https://github.com/rust-lang/rust/pull/109373
diff --git a/src/bootstrap/download.rs b/src/bootstrap/download.rs
index d1e2149d3..242515565 100644
--- a/src/bootstrap/download.rs
+++ b/src/bootstrap/download.rs
@@ -12,7 +12,7 @@ use xz2::bufread::XzDecoder;
use crate::{
config::RustfmtMetadata,
- native::detect_llvm_sha,
+ llvm::detect_llvm_sha,
t,
util::{check_run, exe, program_out_of_date, try_run},
Config,
@@ -367,26 +367,70 @@ impl Config {
pub(crate) fn download_ci_rustc(&self, commit: &str) {
self.verbose(&format!("using downloaded stage2 artifacts from CI (commit {commit})"));
+
let version = self.artifact_version_part(commit);
+ // download-rustc doesn't need its own cargo, it can just use beta's. But it does need the
+ // `rustc_private` crates for tools.
+ let extra_components = ["rustc-dev"];
+
+ self.download_toolchain(
+ &version,
+ "ci-rustc",
+ commit,
+ &extra_components,
+ Self::download_ci_component,
+ );
+ }
+
+ pub(crate) fn download_beta_toolchain(&self) {
+ self.verbose(&format!("downloading stage0 beta artifacts"));
+
+ let date = &self.stage0_metadata.compiler.date;
+ let version = &self.stage0_metadata.compiler.version;
+ let extra_components = ["cargo"];
+
+ let download_beta_component = |config: &Config, filename, prefix: &_, date: &_| {
+ config.download_component(DownloadSource::Dist, filename, prefix, date, "stage0")
+ };
+
+ self.download_toolchain(
+ version,
+ "stage0",
+ date,
+ &extra_components,
+ download_beta_component,
+ );
+ }
+
+ fn download_toolchain(
+ &self,
+ // FIXME(ozkanonur) use CompilerMetadata instead of `version: &str`
+ version: &str,
+ sysroot: &str,
+ stamp_key: &str,
+ extra_components: &[&str],
+ download_component: fn(&Config, String, &str, &str),
+ ) {
let host = self.build.triple;
- let bin_root = self.out.join(host).join("ci-rustc");
+ let bin_root = self.out.join(host).join(sysroot);
let rustc_stamp = bin_root.join(".rustc-stamp");
- if !bin_root.join("bin").join("rustc").exists() || program_out_of_date(&rustc_stamp, commit)
+ if !bin_root.join("bin").join(exe("rustc", self.build)).exists()
+ || program_out_of_date(&rustc_stamp, stamp_key)
{
if bin_root.exists() {
t!(fs::remove_dir_all(&bin_root));
}
let filename = format!("rust-std-{version}-{host}.tar.xz");
let pattern = format!("rust-std-{host}");
- self.download_ci_component(filename, &pattern, commit);
+ download_component(self, filename, &pattern, stamp_key);
let filename = format!("rustc-{version}-{host}.tar.xz");
- self.download_ci_component(filename, "rustc", commit);
- // download-rustc doesn't need its own cargo, it can just use beta's.
- let filename = format!("rustc-dev-{version}-{host}.tar.xz");
- self.download_ci_component(filename, "rustc-dev", commit);
- let filename = format!("rust-src-{version}.tar.xz");
- self.download_ci_component(filename, "rust-src", commit);
+ download_component(self, filename, "rustc", stamp_key);
+
+ for component in extra_components {
+ let filename = format!("{component}-{version}-{host}.tar.xz");
+ download_component(self, filename, component, stamp_key);
+ }
if self.should_fix_bins_and_dylibs() {
self.fix_bin_or_dylib(&bin_root.join("bin").join("rustc"));
@@ -403,7 +447,7 @@ impl Config {
}
}
- t!(fs::write(rustc_stamp, commit));
+ t!(fs::write(rustc_stamp, stamp_key));
}
}
diff --git a/src/bootstrap/dylib_util.rs b/src/bootstrap/dylib_util.rs
index 6d75272c5..b14c0bed6 100644
--- a/src/bootstrap/dylib_util.rs
+++ b/src/bootstrap/dylib_util.rs
@@ -12,6 +12,8 @@ pub fn dylib_path_var() -> &'static str {
"DYLD_LIBRARY_PATH"
} else if cfg!(target_os = "haiku") {
"LIBRARY_PATH"
+ } else if cfg!(target_os = "aix") {
+ "LIBPATH"
} else {
"LD_LIBRARY_PATH"
}
diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs
index 9d1504c34..b6f5f3103 100644
--- a/src/bootstrap/flags.rs
+++ b/src/bootstrap/flags.rs
@@ -67,8 +67,6 @@ pub struct Flags {
// true => deny, false => warn
pub deny_warnings: Option<bool>,
- pub llvm_skip_rebuild: Option<bool>,
-
pub rust_profile_use: Option<String>,
pub rust_profile_generate: Option<String>,
@@ -86,8 +84,7 @@ pub struct Flags {
pub free_args: Option<Vec<String>>,
}
-#[derive(Debug)]
-#[cfg_attr(test, derive(Clone))]
+#[derive(Debug, Clone)]
pub enum Subcommand {
Build {
paths: Vec<PathBuf>,
@@ -151,6 +148,9 @@ pub enum Subcommand {
Setup {
profile: Option<PathBuf>,
},
+ Suggest {
+ run: bool,
+ },
}
impl Default for Subcommand {
@@ -185,6 +185,7 @@ Subcommands:
install Install distribution artifacts
run, r Run tools contained in this repository
setup Create a config.toml (making it easier to use `x.py` itself)
+ suggest Suggest a subset of tests to run, based on modified files
To learn more about a subcommand, run `./x.py <subcommand> -h`",
);
@@ -251,14 +252,6 @@ To learn more about a subcommand, run `./x.py <subcommand> -h`",
opts.optopt("", "color", "whether to use color in cargo and rustc output", "STYLE");
opts.optopt(
"",
- "llvm-skip-rebuild",
- "whether rebuilding llvm should be skipped \
- a VALUE of TRUE indicates that llvm will not be rebuilt \
- VALUE overrides the skip-rebuild option in config.toml.",
- "VALUE",
- );
- opts.optopt(
- "",
"rust-profile-generate",
"generate PGO profile with rustc build",
"PROFILE",
@@ -359,6 +352,9 @@ To learn more about a subcommand, run `./x.py <subcommand> -h`",
Kind::Run => {
opts.optmulti("", "args", "arguments for the tool", "ARGS");
}
+ Kind::Suggest => {
+ opts.optflag("", "run", "run suggested tests");
+ }
_ => {}
};
@@ -575,7 +571,7 @@ Arguments:
Profile::all_for_help(" ").trim_end()
));
}
- Kind::Bench | Kind::Clean | Kind::Dist | Kind::Install => {}
+ Kind::Bench | Kind::Clean | Kind::Dist | Kind::Install | Kind::Suggest => {}
};
// Get any optional paths which occur after the subcommand
let mut paths = matches.free[1..].iter().map(|p| p.into()).collect::<Vec<PathBuf>>();
@@ -636,6 +632,7 @@ Arguments:
Kind::Format => Subcommand::Format { check: matches.opt_present("check"), paths },
Kind::Dist => Subcommand::Dist { paths },
Kind::Install => Subcommand::Install { paths },
+ Kind::Suggest => Subcommand::Suggest { run: matches.opt_present("run") },
Kind::Run => {
if paths.is_empty() {
println!("\nrun requires at least a path!\n");
@@ -714,9 +711,6 @@ Arguments:
.collect::<Vec<_>>(),
include_default_paths: matches.opt_present("include-default-paths"),
deny_warnings: parse_deny_warnings(&matches),
- llvm_skip_rebuild: matches.opt_str("llvm-skip-rebuild").map(|s| s.to_lowercase()).map(
- |s| s.parse::<bool>().expect("`llvm-skip-rebuild` should be either true or false"),
- ),
color: matches
.opt_get_default("color", Color::Auto)
.expect("`color` should be `always`, `never`, or `auto`"),
@@ -747,6 +741,7 @@ impl Subcommand {
Subcommand::Install { .. } => Kind::Install,
Subcommand::Run { .. } => Kind::Run,
Subcommand::Setup { .. } => Kind::Setup,
+ Subcommand::Suggest { .. } => Kind::Suggest,
}
}
diff --git a/src/bootstrap/format.rs b/src/bootstrap/format.rs
index 6d5753e8a..b79969663 100644
--- a/src/bootstrap/format.rs
+++ b/src/bootstrap/format.rs
@@ -2,6 +2,7 @@
use crate::builder::Builder;
use crate::util::{output, program_out_of_date, t};
+use build_helper::ci::CiEnv;
use build_helper::git::get_git_modified_files;
use ignore::WalkBuilder;
use std::collections::VecDeque;
@@ -144,8 +145,10 @@ pub fn format(build: &Builder<'_>, check: bool, paths: &[PathBuf]) {
let untracked_paths = untracked_paths_output
.lines()
.filter(|entry| entry.starts_with("??"))
- .map(|entry| {
- entry.split(' ').nth(1).expect("every git status entry should list a path")
+ .filter_map(|entry| {
+ let path =
+ entry.split(' ').nth(1).expect("every git status entry should list a path");
+ path.ends_with(".rs").then_some(path)
});
for untracked_path in untracked_paths {
println!("skip untracked path {} during rustfmt invocations", untracked_path);
@@ -156,11 +159,20 @@ pub fn format(build: &Builder<'_>, check: bool, paths: &[PathBuf]) {
// preventing the latter from being formatted.
ignore_fmt.add(&format!("!/{}", untracked_path)).expect(&untracked_path);
}
- if !check && paths.is_empty() {
+ // Only check modified files locally to speed up runtime.
+ // We still check all files in CI to avoid bugs in `get_modified_rs_files` letting regressions slip through;
+ // we also care about CI time less since this is still very fast compared to building the compiler.
+ if !CiEnv::is_ci() && paths.is_empty() {
match get_modified_rs_files(build) {
Ok(Some(files)) => {
+ if files.len() <= 10 {
+ for file in &files {
+ println!("formatting modified file {file}");
+ }
+ } else {
+ println!("formatting {} modified files", files.len());
+ }
for file in files {
- println!("formatting modified file {file}");
ignore_fmt.add(&format!("/{file}")).expect(&file);
}
}
diff --git a/src/bootstrap/install.rs b/src/bootstrap/install.rs
index ac3843c33..42d895a34 100644
--- a/src/bootstrap/install.rs
+++ b/src/bootstrap/install.rs
@@ -210,10 +210,13 @@ install!((self, builder, _config),
}
};
LlvmTools, alias = "llvm-tools", Self::should_build(_config), only_hosts: true, {
- let tarball = builder
- .ensure(dist::LlvmTools { target: self.target })
- .expect("missing llvm-tools");
- install_sh(builder, "llvm-tools", self.compiler.stage, Some(self.target), &tarball);
+ if let Some(tarball) = builder.ensure(dist::LlvmTools { target: self.target }) {
+ install_sh(builder, "llvm-tools", self.compiler.stage, Some(self.target), &tarball);
+ } else {
+ builder.info(
+ &format!("skipping llvm-tools stage{} ({}): external LLVM", self.compiler.stage, self.target),
+ );
+ }
};
Rustfmt, alias = "rustfmt", Self::should_build(_config), only_hosts: true, {
if let Some(tarball) = builder.ensure(dist::Rustfmt {
diff --git a/src/bootstrap/job.rs b/src/bootstrap/job.rs
index 5c0322e18..4fb00f65d 100644
--- a/src/bootstrap/job.rs
+++ b/src/bootstrap/job.rs
@@ -27,52 +27,54 @@
//! Note that this module has a #[cfg(windows)] above it as none of this logic
//! is required on Unix.
-#![allow(nonstandard_style, dead_code)]
-
use crate::Build;
use std::env;
+use std::ffi::c_void;
use std::io;
use std::mem;
-use std::ptr;
-use winapi::shared::minwindef::{DWORD, FALSE, LPVOID};
-use winapi::um::errhandlingapi::SetErrorMode;
-use winapi::um::handleapi::{CloseHandle, DuplicateHandle};
-use winapi::um::jobapi2::{AssignProcessToJobObject, CreateJobObjectW, SetInformationJobObject};
-use winapi::um::processthreadsapi::{GetCurrentProcess, OpenProcess};
-use winapi::um::winbase::{BELOW_NORMAL_PRIORITY_CLASS, SEM_NOGPFAULTERRORBOX};
-use winapi::um::winnt::{
- JobObjectExtendedLimitInformation, DUPLICATE_SAME_ACCESS, JOBOBJECT_EXTENDED_LIMIT_INFORMATION,
- JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE, JOB_OBJECT_LIMIT_PRIORITY_CLASS, PROCESS_DUP_HANDLE,
+use windows::{
+ core::PCWSTR,
+ Win32::Foundation::{CloseHandle, DuplicateHandle, DUPLICATE_SAME_ACCESS, HANDLE},
+ Win32::System::Diagnostics::Debug::{SetErrorMode, SEM_NOGPFAULTERRORBOX, THREAD_ERROR_MODE},
+ Win32::System::JobObjects::{
+ AssignProcessToJobObject, CreateJobObjectW, JobObjectExtendedLimitInformation,
+ SetInformationJobObject, JOBOBJECT_EXTENDED_LIMIT_INFORMATION,
+ JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE, JOB_OBJECT_LIMIT_PRIORITY_CLASS,
+ },
+ Win32::System::Threading::{
+ GetCurrentProcess, OpenProcess, BELOW_NORMAL_PRIORITY_CLASS, PROCESS_DUP_HANDLE,
+ },
};
pub unsafe fn setup(build: &mut Build) {
// Enable the Windows Error Reporting dialog which msys disables,
// so we can JIT debug rustc
- let mode = SetErrorMode(0);
+ let mode = SetErrorMode(THREAD_ERROR_MODE::default());
+ let mode = THREAD_ERROR_MODE(mode);
SetErrorMode(mode & !SEM_NOGPFAULTERRORBOX);
// Create a new job object for us to use
- let job = CreateJobObjectW(ptr::null_mut(), ptr::null());
- assert!(!job.is_null(), "{}", io::Error::last_os_error());
+ let job = CreateJobObjectW(None, PCWSTR::null()).unwrap();
// Indicate that when all handles to the job object are gone that all
// process in the object should be killed. Note that this includes our
// entire process tree by default because we've added ourselves and our
// children will reside in the job by default.
- let mut info = mem::zeroed::<JOBOBJECT_EXTENDED_LIMIT_INFORMATION>();
+ let mut info = JOBOBJECT_EXTENDED_LIMIT_INFORMATION::default();
info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
if build.config.low_priority {
info.BasicLimitInformation.LimitFlags |= JOB_OBJECT_LIMIT_PRIORITY_CLASS;
- info.BasicLimitInformation.PriorityClass = BELOW_NORMAL_PRIORITY_CLASS;
+ info.BasicLimitInformation.PriorityClass = BELOW_NORMAL_PRIORITY_CLASS.0;
}
let r = SetInformationJobObject(
job,
JobObjectExtendedLimitInformation,
- &mut info as *mut _ as LPVOID,
- mem::size_of_val(&info) as DWORD,
- );
- assert!(r != 0, "{}", io::Error::last_os_error());
+ &info as *const _ as *const c_void,
+ mem::size_of_val(&info) as u32,
+ )
+ .ok();
+ assert!(r.is_ok(), "{}", io::Error::last_os_error());
// Assign our process to this job object. Note that if this fails, one very
// likely reason is that we are ourselves already in a job object! This can
@@ -83,8 +85,8 @@ pub unsafe fn setup(build: &mut Build) {
// Also note that nested jobs (why this might fail) are supported in recent
// versions of Windows, but the version of Windows that our bots are running
// at least don't support nested job objects.
- let r = AssignProcessToJobObject(job, GetCurrentProcess());
- if r == 0 {
+ let r = AssignProcessToJobObject(job, GetCurrentProcess()).ok();
+ if r.is_err() {
CloseHandle(job);
return;
}
@@ -102,31 +104,32 @@ pub unsafe fn setup(build: &mut Build) {
Err(..) => return,
};
- let parent = OpenProcess(PROCESS_DUP_HANDLE, FALSE, pid.parse().unwrap());
-
- // If we get a null parent pointer here, it is possible that either
- // we have got an invalid pid or the parent process has been closed.
- // Since the first case rarely happens
- // (only when wrongly setting the environmental variable),
- // so it might be better to improve the experience of the second case
- // when users have interrupted the parent process and we don't finish
- // duplicating the handle yet.
- // We just need close the job object if that occurs.
- if parent.is_null() {
- CloseHandle(job);
- return;
- }
+ let parent = match OpenProcess(PROCESS_DUP_HANDLE, false, pid.parse().unwrap()).ok() {
+ Some(parent) => parent,
+ _ => {
+ // If we get a null parent pointer here, it is possible that either
+ // we have an invalid pid or the parent process has been closed.
+ // Since the first case rarely happens
+ // (only when wrongly setting the environmental variable),
+ // it might be better to improve the experience of the second case
+ // when users have interrupted the parent process and we haven't finish
+ // duplicating the handle yet. We just need close the job object if that occurs.
+ CloseHandle(job);
+ return;
+ }
+ };
- let mut parent_handle = ptr::null_mut();
+ let mut parent_handle = HANDLE::default();
let r = DuplicateHandle(
GetCurrentProcess(),
job,
parent,
&mut parent_handle,
0,
- FALSE,
+ false,
DUPLICATE_SAME_ACCESS,
- );
+ )
+ .ok();
// If this failed, well at least we tried! An example of DuplicateHandle
// failing in the past has been when the wrong python2 package spawned this
@@ -134,7 +137,7 @@ pub unsafe fn setup(build: &mut Build) {
// `mingw-w64-x86_64-python2`. Not sure why it failed, but the "failure
// mode" here is that we only clean everything up when the build system
// dies, not when the python parent does, so not too bad.
- if r != 0 {
+ if r.is_err() {
CloseHandle(job);
}
}
diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs
index 950f3b151..419bcbc63 100644
--- a/src/bootstrap/lib.rs
+++ b/src/bootstrap/lib.rs
@@ -21,7 +21,6 @@ use std::collections::{HashMap, HashSet};
use std::env;
use std::fs::{self, File};
use std::io;
-use std::io::ErrorKind;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use std::str;
@@ -53,11 +52,13 @@ mod download;
mod flags;
mod format;
mod install;
+mod llvm;
mod metadata;
-mod native;
+mod render_tests;
mod run;
mod sanity;
mod setup;
+mod suggest;
mod tarball;
mod test;
mod tool;
@@ -88,6 +89,7 @@ pub use crate::builder::PathSet;
use crate::cache::{Interned, INTERNER};
pub use crate::config::Config;
pub use crate::flags::Subcommand;
+use termcolor::{ColorChoice, StandardStream, WriteColor};
const LLVM_TOOLS: &[&str] = &[
"llvm-cov", // used to generate coverage report
@@ -123,11 +125,13 @@ const EXTRA_CHECK_CFGS: &[(Option<Mode>, &'static str, Option<&[&'static str]>)]
(Some(Mode::Std), "no_rc", None),
(Some(Mode::Std), "no_sync", None),
(Some(Mode::Std), "freebsd12", None),
+ (Some(Mode::Std), "freebsd13", None),
(Some(Mode::Std), "backtrace_in_libstd", None),
/* Extra values not defined in the built-in targets yet, but used in std */
(Some(Mode::Std), "target_env", Some(&["libnx"])),
// (Some(Mode::Std), "target_os", Some(&[])),
- (Some(Mode::Std), "target_arch", Some(&["asmjs", "spirv", "nvptx", "xtensa"])),
+ // #[cfg(bootstrap)] loongarch64
+ (Some(Mode::Std), "target_arch", Some(&["asmjs", "spirv", "nvptx", "xtensa", "loongarch64"])),
/* Extra names used by dependencies */
// FIXME: Used by serde_json, but we should not be triggering on external dependencies.
(Some(Mode::Rustc), "no_btreemap_remove_entry", None),
@@ -144,6 +148,11 @@ const EXTRA_CHECK_CFGS: &[(Option<Mode>, &'static str, Option<&[&'static str]>)]
// FIXME: Used by filetime, but we should not be triggering on external dependencies.
(Some(Mode::Rustc), "emulate_second_only_system", None),
(Some(Mode::ToolRustc), "emulate_second_only_system", None),
+ // Needed to avoid the need to copy windows.lib into the sysroot.
+ (Some(Mode::Rustc), "windows_raw_dylib", None),
+ (Some(Mode::ToolRustc), "windows_raw_dylib", None),
+ // #[cfg(bootstrap)] ohos
+ (Some(Mode::Std), "target_env", Some(&["ohos"])),
];
/// A structure representing a Rust compiler.
@@ -182,6 +191,7 @@ pub enum GitRepo {
/// although most functions are implemented as free functions rather than
/// methods specifically on this structure itself (to make it easier to
/// organize).
+#[cfg_attr(not(feature = "build-metrics"), derive(Clone))]
pub struct Build {
/// User-specified configuration from `config.toml`.
config: Config,
@@ -235,7 +245,7 @@ pub struct Build {
metrics: metrics::BuildMetrics,
}
-#[derive(Debug)]
+#[derive(Debug, Clone)]
struct Crate {
name: Interned<String>,
deps: HashSet<Interned<String>>,
@@ -351,14 +361,14 @@ impl Build {
#[cfg(not(unix))]
let is_sudo = false;
- let ignore_git = config.ignore_git;
- let rust_info = channel::GitInfo::new(ignore_git, &src);
- let cargo_info = channel::GitInfo::new(ignore_git, &src.join("src/tools/cargo"));
+ let omit_git_hash = config.omit_git_hash;
+ let rust_info = channel::GitInfo::new(omit_git_hash, &src);
+ let cargo_info = channel::GitInfo::new(omit_git_hash, &src.join("src/tools/cargo"));
let rust_analyzer_info =
- channel::GitInfo::new(ignore_git, &src.join("src/tools/rust-analyzer"));
- let clippy_info = channel::GitInfo::new(ignore_git, &src.join("src/tools/clippy"));
- let miri_info = channel::GitInfo::new(ignore_git, &src.join("src/tools/miri"));
- let rustfmt_info = channel::GitInfo::new(ignore_git, &src.join("src/tools/rustfmt"));
+ channel::GitInfo::new(omit_git_hash, &src.join("src/tools/rust-analyzer"));
+ let clippy_info = channel::GitInfo::new(omit_git_hash, &src.join("src/tools/clippy"));
+ let miri_info = channel::GitInfo::new(omit_git_hash, &src.join("src/tools/miri"));
+ let rustfmt_info = channel::GitInfo::new(omit_git_hash, &src.join("src/tools/rustfmt"));
// we always try to use git for LLVM builds
let in_tree_llvm_info = channel::GitInfo::new(false, &src.join("src/llvm-project"));
@@ -482,12 +492,7 @@ impl Build {
// Make sure we update these before gathering metadata so we don't get an error about missing
// Cargo.toml files.
- let rust_submodules = [
- "src/tools/rust-installer",
- "src/tools/cargo",
- "library/backtrace",
- "library/stdarch",
- ];
+ let rust_submodules = ["src/tools/cargo", "library/backtrace", "library/stdarch"];
for s in rust_submodules {
build.update_submodule(Path::new(s));
}
@@ -502,16 +507,18 @@ impl Build {
let build_triple = build.out.join(&build.build.triple);
t!(fs::create_dir_all(&build_triple));
let host = build.out.join("host");
- if let Err(e) = symlink_dir(&build.config, &build_triple, &host) {
- if e.kind() != ErrorKind::AlreadyExists {
- panic!(
- "symlink_dir({} => {}) failed with {}",
- host.display(),
- build_triple.display(),
- e
- );
- }
- }
+ if host.is_symlink() {
+ // Left over from a previous build; overwrite it.
+ // This matters if `build.build` has changed between invocations.
+ #[cfg(windows)]
+ t!(fs::remove_dir(&host));
+ #[cfg(not(windows))]
+ t!(fs::remove_file(&host));
+ }
+ t!(
+ symlink_dir(&build.config, &build_triple, &host),
+ format!("symlink_dir({} => {}) failed", host.display(), build_triple.display())
+ );
build
}
@@ -652,13 +659,20 @@ impl Build {
job::setup(self);
}
- if let Subcommand::Format { check, paths } = &self.config.cmd {
- return format::format(&builder::Builder::new(&self), *check, &paths);
- }
-
// Download rustfmt early so that it can be used in rust-analyzer configs.
let _ = &builder::Builder::new(&self).initial_rustfmt();
+ // hardcoded subcommands
+ match &self.config.cmd {
+ Subcommand::Format { check, paths } => {
+ return format::format(&builder::Builder::new(&self), *check, &paths);
+ }
+ Subcommand::Suggest { run } => {
+ return suggest::suggest(&builder::Builder::new(&self), *run);
+ }
+ _ => (),
+ }
+
{
let builder = builder::Builder::new(&self);
if let Some(path) = builder.paths.get(0) {
@@ -800,6 +814,11 @@ impl Build {
self.stage_out(compiler, mode).join(&*target.triple).join(self.cargo_dir())
}
+ /// Directory where the extracted `rustc-dev` component is stored.
+ fn ci_rustc_dir(&self, target: TargetSelection) -> PathBuf {
+ self.out.join(&*target.triple).join("ci-rustc")
+ }
+
/// Root output directory for LLVM compiled for `target`
///
/// Note that if LLVM is configured externally then the directory returned
@@ -1204,12 +1223,22 @@ impl Build {
///
/// When all of these conditions are met the build will lift artifacts from
/// the previous stage forward.
- fn force_use_stage1(&self, compiler: Compiler, target: TargetSelection) -> bool {
+ fn force_use_stage1(&self, stage: u32, target: TargetSelection) -> bool {
!self.config.full_bootstrap
- && compiler.stage >= 2
+ && !self.config.download_rustc()
+ && stage >= 2
&& (self.hosts.iter().any(|h| *h == target) || target == self.build)
}
+ /// Checks whether the `compiler` compiling for `target` should be forced to
+ /// use a stage2 compiler instead.
+ ///
+ /// When we download the pre-compiled version of rustc and compiler stage is >= 2,
+ /// it should be forced to use a stage2 compiler.
+ fn force_use_stage2(&self, stage: u32) -> bool {
+ self.config.download_rustc() && stage >= 2
+ }
+
/// Given `num` in the form "a.b.c" return a "release string" which
/// describes the release version number.
///
@@ -1219,7 +1248,7 @@ impl Build {
match &self.config.channel[..] {
"stable" => num.to_string(),
"beta" => {
- if self.rust_info().is_managed_git_subrepository() && !self.config.ignore_git {
+ if self.rust_info().is_managed_git_subrepository() && !self.config.omit_git_hash {
format!("{}-beta.{}", num, self.beta_prerelease_version())
} else {
format!("{}-beta", num)
@@ -1575,6 +1604,31 @@ to download LLVM rather than building it.
self.config.ninja_in_file
}
+
+ pub fn colored_stdout<R, F: FnOnce(&mut dyn WriteColor) -> R>(&self, f: F) -> R {
+ self.colored_stream_inner(StandardStream::stdout, self.config.stdout_is_tty, f)
+ }
+
+ pub fn colored_stderr<R, F: FnOnce(&mut dyn WriteColor) -> R>(&self, f: F) -> R {
+ self.colored_stream_inner(StandardStream::stderr, self.config.stderr_is_tty, f)
+ }
+
+ fn colored_stream_inner<R, F, C>(&self, constructor: C, is_tty: bool, f: F) -> R
+ where
+ C: Fn(ColorChoice) -> StandardStream,
+ F: FnOnce(&mut dyn WriteColor) -> R,
+ {
+ let choice = match self.config.color {
+ flags::Color::Always => ColorChoice::Always,
+ flags::Color::Never => ColorChoice::Never,
+ flags::Color::Auto if !is_tty => ColorChoice::Never,
+ flags::Color::Auto => ColorChoice::Auto,
+ };
+ let mut stream = constructor(choice);
+ let result = f(&mut stream);
+ stream.reset().unwrap();
+ result
+ }
}
#[cfg(unix)]
diff --git a/src/bootstrap/native.rs b/src/bootstrap/llvm.rs
index 5987b641b..a893c3a47 100644
--- a/src/bootstrap/native.rs
+++ b/src/bootstrap/llvm.rs
@@ -16,7 +16,6 @@ use std::io;
use std::path::{Path, PathBuf};
use std::process::Command;
-use crate::bolt::{instrument_with_bolt_inplace, optimize_library_with_bolt_inplace};
use crate::builder::{Builder, RunConfig, ShouldRun, Step};
use crate::channel;
use crate::config::{Config, TargetSelection};
@@ -109,15 +108,6 @@ pub fn prebuilt_llvm_config(
let stamp = out_dir.join("llvm-finished-building");
let stamp = HashStamp::new(stamp, builder.in_tree_llvm_info.sha());
- if builder.config.llvm_skip_rebuild && stamp.path.exists() {
- builder.info(
- "Warning: \
- Using a potentially stale build of LLVM; \
- This may not behave well.",
- );
- return Ok(res);
- }
-
if stamp.is_done() {
if stamp.hash.is_none() {
builder.info(
@@ -226,7 +216,7 @@ pub(crate) fn is_ci_llvm_available(config: &Config, asserts: bool) -> bool {
/// Returns true if we're running in CI with modified LLVM (and thus can't download it)
pub(crate) fn is_ci_llvm_modified(config: &Config) -> bool {
- CiEnv::is_ci() && {
+ CiEnv::is_ci() && config.rust_info.is_managed_git_subrepository() && {
// We assume we have access to git, so it's okay to unconditionally pass
// `true` here.
let llvm_sha = detect_llvm_sha(config, true);
@@ -296,12 +286,12 @@ impl Step for Llvm {
(true, true) => "RelWithDebInfo",
};
- // NOTE: remember to also update `config.toml.example` when changing the
+ // NOTE: remember to also update `config.example.toml` when changing the
// defaults!
let llvm_targets = match &builder.config.llvm_targets {
Some(s) => s,
None => {
- "AArch64;ARM;BPF;Hexagon;MSP430;Mips;NVPTX;PowerPC;RISCV;\
+ "AArch64;ARM;BPF;Hexagon;LoongArch;MSP430;Mips;NVPTX;PowerPC;RISCV;\
Sparc;SystemZ;WebAssembly;X86"
}
};
@@ -314,10 +304,12 @@ impl Step for Llvm {
let assertions = if builder.config.llvm_assertions { "ON" } else { "OFF" };
let plugins = if builder.config.llvm_plugins { "ON" } else { "OFF" };
let enable_tests = if builder.config.llvm_tests { "ON" } else { "OFF" };
+ let enable_warnings = if builder.config.llvm_enable_warnings { "ON" } else { "OFF" };
cfg.out_dir(&out_dir)
.profile(profile)
.define("LLVM_ENABLE_ASSERTIONS", assertions)
+ .define("LLVM_UNREACHABLE_OPTIMIZE", "OFF")
.define("LLVM_ENABLE_PLUGINS", plugins)
.define("LLVM_TARGETS_TO_BUILD", llvm_targets)
.define("LLVM_EXPERIMENTAL_TARGETS_TO_BUILD", llvm_exp_targets)
@@ -331,7 +323,8 @@ impl Step for Llvm {
.define("LLVM_ENABLE_Z3_SOLVER", "OFF")
.define("LLVM_PARALLEL_COMPILE_JOBS", builder.jobs().to_string())
.define("LLVM_TARGET_ARCH", target_native.split('-').next().unwrap())
- .define("LLVM_DEFAULT_TARGET_TRIPLE", target_native);
+ .define("LLVM_DEFAULT_TARGET_TRIPLE", target_native)
+ .define("LLVM_ENABLE_WARNINGS", enable_warnings);
// Parts of our test suite rely on the `FileCheck` tool, which is built by default in
// `build/$TARGET/llvm/build/bin` is but *not* then installed to `build/$TARGET/llvm/bin`.
@@ -441,11 +434,6 @@ impl Step for Llvm {
}
}
- // Workaround for ppc32 lld limitation
- if target == "powerpc-unknown-freebsd" {
- ldflags.exe.push(" -fuse-ld=bfd");
- }
-
// https://llvm.org/docs/HowToCrossCompileLLVM.html
if target != builder.config.build {
let LlvmResult { llvm_config, .. } =
@@ -493,11 +481,6 @@ impl Step for Llvm {
cfg.define(key, val);
}
- // FIXME: we don't actually need to build all LLVM tools and all LLVM
- // libraries here, e.g., we just want a few components and a few
- // tools. Figure out how to filter them down and only build the right
- // tools and libs on all platforms.
-
if builder.config.dry_run() {
return res;
}
@@ -523,39 +506,13 @@ impl Step for Llvm {
}
}
- // After LLVM is built, we modify (instrument or optimize) the libLLVM.so library file
- // in place. This is fine, because currently we do not support incrementally rebuilding
- // LLVM after a configuration change, so to rebuild it the build files have to be removed,
- // which will also remove these modified files.
- if builder.config.llvm_bolt_profile_generate {
- instrument_with_bolt_inplace(&get_built_llvm_lib_path(&res.llvm_config));
- }
- if let Some(path) = &builder.config.llvm_bolt_profile_use {
- optimize_library_with_bolt_inplace(
- &get_built_llvm_lib_path(&res.llvm_config),
- &Path::new(path),
- );
- }
-
t!(stamp.write());
res
}
}
-/// Returns path to a built LLVM library (libLLVM.so).
-/// Assumes that we have built LLVM into a single library file.
-fn get_built_llvm_lib_path(llvm_config_path: &Path) -> PathBuf {
- let mut cmd = Command::new(llvm_config_path);
- cmd.arg("--libfiles");
- PathBuf::from(output(&mut cmd).trim())
-}
-
fn check_llvm_version(builder: &Builder<'_>, llvm_config: &Path) {
- if !builder.config.llvm_version_check {
- return;
- }
-
if builder.config.dry_run() {
return;
}
@@ -606,6 +563,8 @@ fn configure_cmake(
cfg.define("CMAKE_SYSTEM_NAME", "Haiku");
} else if target.contains("solaris") || target.contains("illumos") {
cfg.define("CMAKE_SYSTEM_NAME", "SunOS");
+ } else if target.contains("linux") {
+ cfg.define("CMAKE_SYSTEM_NAME", "Linux");
}
// When cross-compiling we should also set CMAKE_SYSTEM_VERSION, but in
// that case like CMake we cannot easily determine system version either.
@@ -906,71 +865,6 @@ impl Step for Lld {
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct TestHelpers {
- pub target: TargetSelection,
-}
-
-impl Step for TestHelpers {
- type Output = ();
-
- fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
- run.path("tests/auxiliary/rust_test_helpers.c")
- }
-
- fn make_run(run: RunConfig<'_>) {
- run.builder.ensure(TestHelpers { target: run.target })
- }
-
- /// Compiles the `rust_test_helpers.c` library which we used in various
- /// `run-pass` tests for ABI testing.
- fn run(self, builder: &Builder<'_>) {
- if builder.config.dry_run() {
- return;
- }
- // The x86_64-fortanix-unknown-sgx target doesn't have a working C
- // toolchain. However, some x86_64 ELF objects can be linked
- // without issues. Use this hack to compile the test helpers.
- let target = if self.target == "x86_64-fortanix-unknown-sgx" {
- TargetSelection::from_user("x86_64-unknown-linux-gnu")
- } else {
- self.target
- };
- let dst = builder.test_helpers_out(target);
- let src = builder.src.join("tests/auxiliary/rust_test_helpers.c");
- if up_to_date(&src, &dst.join("librust_test_helpers.a")) {
- return;
- }
-
- builder.info("Building test helpers");
- t!(fs::create_dir_all(&dst));
- let mut cfg = cc::Build::new();
- // FIXME: Workaround for https://github.com/emscripten-core/emscripten/issues/9013
- if target.contains("emscripten") {
- cfg.pic(false);
- }
-
- // We may have found various cross-compilers a little differently due to our
- // extra configuration, so inform cc of these compilers. Note, though, that
- // on MSVC we still need cc's detection of env vars (ugh).
- if !target.contains("msvc") {
- if let Some(ar) = builder.ar(target) {
- cfg.archiver(ar);
- }
- cfg.compiler(builder.cc(target));
- }
- cfg.cargo_metadata(false)
- .out_dir(&dst)
- .target(&target.triple)
- .host(&builder.config.build.triple)
- .opt_level(0)
- .warnings(false)
- .debug(false)
- .file(builder.src.join("tests/auxiliary/rust_test_helpers.c"))
- .compile("rust_test_helpers");
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Sanitizers {
pub target: TargetSelection,
}
@@ -1109,6 +1003,9 @@ fn supported_sanitizers(
"aarch64-unknown-linux-gnu" => {
common_libs("linux", "aarch64", &["asan", "lsan", "msan", "tsan", "hwasan"])
}
+ "aarch64-unknown-linux-ohos" => {
+ common_libs("linux", "aarch64", &["asan", "lsan", "msan", "tsan", "hwasan"])
+ }
"x86_64-apple-darwin" => darwin_libs("osx", &["asan", "lsan", "tsan"]),
"x86_64-unknown-fuchsia" => common_libs("fuchsia", "x86_64", &["asan"]),
"x86_64-apple-ios" => darwin_libs("iossim", &["asan", "tsan"]),
@@ -1190,6 +1087,8 @@ impl Step for CrtBeginEnd {
/// Build crtbegin.o/crtend.o for musl target.
fn run(self, builder: &Builder<'_>) -> Self::Output {
+ builder.update_submodule(&Path::new("src/llvm-project"));
+
let out_dir = builder.native_dir(self.target).join("crt");
if builder.config.dry_run() {
@@ -1256,6 +1155,8 @@ impl Step for Libunwind {
/// Build linunwind.a
fn run(self, builder: &Builder<'_>) -> Self::Output {
+ builder.update_submodule(&Path::new("src/llvm-project"));
+
if builder.config.dry_run() {
return PathBuf::new();
}
diff --git a/src/bootstrap/metrics.rs b/src/bootstrap/metrics.rs
index 2e62c9507..82b123ec8 100644
--- a/src/bootstrap/metrics.rs
+++ b/src/bootstrap/metrics.rs
@@ -11,7 +11,7 @@ use serde_derive::{Deserialize, Serialize};
use std::cell::RefCell;
use std::fs::File;
use std::io::BufWriter;
-use std::time::{Duration, Instant};
+use std::time::{Duration, Instant, SystemTime};
use sysinfo::{CpuExt, System, SystemExt};
pub(crate) struct BuildMetrics {
@@ -27,6 +27,7 @@ impl BuildMetrics {
system_info: System::new(),
timer_start: None,
invocation_timer_start: Instant::now(),
+ invocation_start: SystemTime::now(),
});
BuildMetrics { state }
@@ -51,6 +52,7 @@ impl BuildMetrics {
duration_excluding_children_sec: Duration::ZERO,
children: Vec::new(),
+ tests: Vec::new(),
});
}
@@ -72,6 +74,16 @@ impl BuildMetrics {
}
}
+ pub(crate) fn record_test(&self, name: &str, outcome: TestOutcome) {
+ let mut state = self.state.borrow_mut();
+ state
+ .running_steps
+ .last_mut()
+ .unwrap()
+ .tests
+ .push(Test { name: name.to_string(), outcome });
+ }
+
fn collect_stats(&self, state: &mut MetricsState) {
let step = state.running_steps.last_mut().unwrap();
@@ -113,6 +125,11 @@ impl BuildMetrics {
}
};
invocations.push(JsonInvocation {
+ start_time: state
+ .invocation_start
+ .duration_since(SystemTime::UNIX_EPOCH)
+ .unwrap()
+ .as_secs(),
duration_including_children_sec: state.invocation_timer_start.elapsed().as_secs_f64(),
children: steps.into_iter().map(|step| self.prepare_json_step(step)).collect(),
});
@@ -125,6 +142,14 @@ impl BuildMetrics {
}
fn prepare_json_step(&self, step: StepMetrics) -> JsonNode {
+ let mut children = Vec::new();
+ children.extend(step.children.into_iter().map(|child| self.prepare_json_step(child)));
+ children.extend(
+ step.tests
+ .into_iter()
+ .map(|test| JsonNode::Test { name: test.name, outcome: test.outcome }),
+ );
+
JsonNode::RustbuildStep {
type_: step.type_,
debug_repr: step.debug_repr,
@@ -135,11 +160,7 @@ impl BuildMetrics {
/ step.duration_excluding_children_sec.as_secs_f64(),
},
- children: step
- .children
- .into_iter()
- .map(|child| self.prepare_json_step(child))
- .collect(),
+ children,
}
}
}
@@ -151,6 +172,7 @@ struct MetricsState {
system_info: System,
timer_start: Option<Instant>,
invocation_timer_start: Instant,
+ invocation_start: SystemTime,
}
struct StepMetrics {
@@ -161,6 +183,12 @@ struct StepMetrics {
duration_excluding_children_sec: Duration,
children: Vec<StepMetrics>,
+ tests: Vec<Test>,
+}
+
+struct Test {
+ name: String,
+ outcome: TestOutcome,
}
#[derive(Serialize, Deserialize)]
@@ -173,6 +201,10 @@ struct JsonRoot {
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
struct JsonInvocation {
+ // Unix timestamp in seconds
+ //
+ // This is necessary to easily correlate this invocation with logs or other data.
+ start_time: u64,
duration_including_children_sec: f64,
children: Vec<JsonNode>,
}
@@ -190,6 +222,19 @@ enum JsonNode {
children: Vec<JsonNode>,
},
+ Test {
+ name: String,
+ #[serde(flatten)]
+ outcome: TestOutcome,
+ },
+}
+
+#[derive(Serialize, Deserialize)]
+#[serde(tag = "outcome", rename_all = "snake_case")]
+pub(crate) enum TestOutcome {
+ Passed,
+ Failed,
+ Ignored { ignore_reason: Option<String> },
}
#[derive(Serialize, Deserialize)]
diff --git a/src/bootstrap/render_tests.rs b/src/bootstrap/render_tests.rs
new file mode 100644
index 000000000..19019ad2c
--- /dev/null
+++ b/src/bootstrap/render_tests.rs
@@ -0,0 +1,371 @@
+//! This module renders the JSON output of libtest into a human-readable form, trying to be as
+//! similar to libtest's native output as possible.
+//!
+//! This is needed because we need to use libtest in JSON mode to extract granluar information
+//! about the executed tests. Doing so suppresses the human-readable output, and (compared to Cargo
+//! and rustc) libtest doesn't include the rendered human-readable output as a JSON field. We had
+//! to reimplement all the rendering logic in this module because of that.
+
+use crate::builder::Builder;
+use std::io::{BufRead, BufReader, Write};
+use std::process::{ChildStdout, Command, Stdio};
+use std::time::Duration;
+use termcolor::{Color, ColorSpec, WriteColor};
+
+const TERSE_TESTS_PER_LINE: usize = 88;
+
+pub(crate) fn add_flags_and_try_run_tests(builder: &Builder<'_>, cmd: &mut Command) -> bool {
+ if cmd.get_args().position(|arg| arg == "--").is_none() {
+ cmd.arg("--");
+ }
+ cmd.args(&["-Z", "unstable-options", "--format", "json"]);
+
+ try_run_tests(builder, cmd)
+}
+
+pub(crate) fn try_run_tests(builder: &Builder<'_>, cmd: &mut Command) -> bool {
+ if builder.config.dry_run() {
+ return true;
+ }
+
+ if !run_tests(builder, cmd) {
+ if builder.fail_fast {
+ crate::detail_exit(1);
+ } else {
+ let mut failures = builder.delayed_failures.borrow_mut();
+ failures.push(format!("{cmd:?}"));
+ false
+ }
+ } else {
+ true
+ }
+}
+
+fn run_tests(builder: &Builder<'_>, cmd: &mut Command) -> bool {
+ cmd.stdout(Stdio::piped());
+
+ builder.verbose(&format!("running: {cmd:?}"));
+
+ let mut process = cmd.spawn().unwrap();
+
+ // This runs until the stdout of the child is closed, which means the child exited. We don't
+ // run this on another thread since the builder is not Sync.
+ Renderer::new(process.stdout.take().unwrap(), builder).render_all();
+
+ let result = process.wait_with_output().unwrap();
+ if !result.status.success() && builder.is_verbose() {
+ println!(
+ "\n\ncommand did not execute successfully: {cmd:?}\n\
+ expected success, got: {}",
+ result.status
+ );
+ }
+
+ result.status.success()
+}
+
+struct Renderer<'a> {
+ stdout: BufReader<ChildStdout>,
+ failures: Vec<TestOutcome>,
+ benches: Vec<BenchOutcome>,
+ builder: &'a Builder<'a>,
+ tests_count: Option<usize>,
+ executed_tests: usize,
+ terse_tests_in_line: usize,
+}
+
+impl<'a> Renderer<'a> {
+ fn new(stdout: ChildStdout, builder: &'a Builder<'a>) -> Self {
+ Self {
+ stdout: BufReader::new(stdout),
+ benches: Vec::new(),
+ failures: Vec::new(),
+ builder,
+ tests_count: None,
+ executed_tests: 0,
+ terse_tests_in_line: 0,
+ }
+ }
+
+ fn render_all(mut self) {
+ let mut line = String::new();
+ loop {
+ line.clear();
+ match self.stdout.read_line(&mut line) {
+ Ok(_) => {}
+ Err(err) if err.kind() == std::io::ErrorKind::UnexpectedEof => break,
+ Err(err) => panic!("failed to read output of test runner: {err}"),
+ }
+ if line.is_empty() {
+ break;
+ }
+
+ match serde_json::from_str(&line) {
+ Ok(parsed) => self.render_message(parsed),
+ Err(_err) => {
+ // Handle non-JSON output, for example when --nocapture is passed.
+ print!("{line}");
+ let _ = std::io::stdout().flush();
+ }
+ }
+ }
+ }
+
+ fn render_test_outcome(&mut self, outcome: Outcome<'_>, test: &TestOutcome) {
+ self.executed_tests += 1;
+
+ #[cfg(feature = "build-metrics")]
+ self.builder.metrics.record_test(
+ &test.name,
+ match outcome {
+ Outcome::Ok | Outcome::BenchOk => crate::metrics::TestOutcome::Passed,
+ Outcome::Failed => crate::metrics::TestOutcome::Failed,
+ Outcome::Ignored { reason } => crate::metrics::TestOutcome::Ignored {
+ ignore_reason: reason.map(|s| s.to_string()),
+ },
+ },
+ );
+
+ if self.builder.config.verbose_tests {
+ self.render_test_outcome_verbose(outcome, test);
+ } else {
+ self.render_test_outcome_terse(outcome, test);
+ }
+ }
+
+ fn render_test_outcome_verbose(&self, outcome: Outcome<'_>, test: &TestOutcome) {
+ print!("test {} ... ", test.name);
+ self.builder.colored_stdout(|stdout| outcome.write_long(stdout)).unwrap();
+ if let Some(exec_time) = test.exec_time {
+ print!(" ({exec_time:.2?})");
+ }
+ println!();
+ }
+
+ fn render_test_outcome_terse(&mut self, outcome: Outcome<'_>, _: &TestOutcome) {
+ if self.terse_tests_in_line != 0 && self.terse_tests_in_line % TERSE_TESTS_PER_LINE == 0 {
+ if let Some(total) = self.tests_count {
+ let total = total.to_string();
+ let executed = format!("{:>width$}", self.executed_tests - 1, width = total.len());
+ print!(" {executed}/{total}");
+ }
+ println!();
+ self.terse_tests_in_line = 0;
+ }
+
+ self.terse_tests_in_line += 1;
+ self.builder.colored_stdout(|stdout| outcome.write_short(stdout)).unwrap();
+ let _ = std::io::stdout().flush();
+ }
+
+ fn render_suite_outcome(&self, outcome: Outcome<'_>, suite: &SuiteOutcome) {
+ // The terse output doesn't end with a newline, so we need to add it ourselves.
+ if !self.builder.config.verbose_tests {
+ println!();
+ }
+
+ if !self.failures.is_empty() {
+ println!("\nfailures:\n");
+ for failure in &self.failures {
+ if let Some(stdout) = &failure.stdout {
+ println!("---- {} stdout ----", failure.name);
+ println!("{stdout}");
+ }
+ }
+
+ println!("\nfailures:");
+ for failure in &self.failures {
+ println!(" {}", failure.name);
+ }
+ }
+
+ if !self.benches.is_empty() {
+ println!("\nbenchmarks:");
+
+ let mut rows = Vec::new();
+ for bench in &self.benches {
+ rows.push((
+ &bench.name,
+ format!("{:.2?}/iter", Duration::from_nanos(bench.median)),
+ format!("+/- {:.2?}", Duration::from_nanos(bench.deviation)),
+ ));
+ }
+
+ let max_0 = rows.iter().map(|r| r.0.len()).max().unwrap_or(0);
+ let max_1 = rows.iter().map(|r| r.1.len()).max().unwrap_or(0);
+ let max_2 = rows.iter().map(|r| r.2.len()).max().unwrap_or(0);
+ for row in &rows {
+ println!(" {:<max_0$} {:>max_1$} {:>max_2$}", row.0, row.1, row.2);
+ }
+ }
+
+ print!("\ntest result: ");
+ self.builder.colored_stdout(|stdout| outcome.write_long(stdout)).unwrap();
+ println!(
+ ". {} passed; {} failed; {} ignored; {} measured; {} filtered out; \
+ finished in {:.2?}\n",
+ suite.passed,
+ suite.failed,
+ suite.ignored,
+ suite.measured,
+ suite.filtered_out,
+ Duration::from_secs_f64(suite.exec_time)
+ );
+ }
+
+ fn render_message(&mut self, message: Message) {
+ match message {
+ Message::Suite(SuiteMessage::Started { test_count }) => {
+ println!("\nrunning {test_count} tests");
+ self.executed_tests = 0;
+ self.terse_tests_in_line = 0;
+ self.tests_count = Some(test_count);
+ }
+ Message::Suite(SuiteMessage::Ok(outcome)) => {
+ self.render_suite_outcome(Outcome::Ok, &outcome);
+ }
+ Message::Suite(SuiteMessage::Failed(outcome)) => {
+ self.render_suite_outcome(Outcome::Failed, &outcome);
+ }
+ Message::Bench(outcome) => {
+ // The formatting for benchmarks doesn't replicate 1:1 the formatting libtest
+ // outputs, mostly because libtest's formatting is broken in terse mode, which is
+ // the default used by our monorepo. We use a different formatting instead:
+ // successful benchmarks are just showed as "benchmarked"/"b", and the details are
+ // outputted at the bottom like failures.
+ let fake_test_outcome = TestOutcome {
+ name: outcome.name.clone(),
+ exec_time: None,
+ stdout: None,
+ message: None,
+ };
+ self.render_test_outcome(Outcome::BenchOk, &fake_test_outcome);
+ self.benches.push(outcome);
+ }
+ Message::Test(TestMessage::Ok(outcome)) => {
+ self.render_test_outcome(Outcome::Ok, &outcome);
+ }
+ Message::Test(TestMessage::Ignored(outcome)) => {
+ self.render_test_outcome(
+ Outcome::Ignored { reason: outcome.message.as_deref() },
+ &outcome,
+ );
+ }
+ Message::Test(TestMessage::Failed(outcome)) => {
+ self.render_test_outcome(Outcome::Failed, &outcome);
+ self.failures.push(outcome);
+ }
+ Message::Test(TestMessage::Timeout { name }) => {
+ println!("test {name} has been running for a long time");
+ }
+ Message::Test(TestMessage::Started) => {} // Not useful
+ }
+ }
+}
+
+enum Outcome<'a> {
+ Ok,
+ BenchOk,
+ Failed,
+ Ignored { reason: Option<&'a str> },
+}
+
+impl Outcome<'_> {
+ fn write_short(&self, writer: &mut dyn WriteColor) -> Result<(), std::io::Error> {
+ match self {
+ Outcome::Ok => {
+ writer.set_color(&ColorSpec::new().set_fg(Some(Color::Green)))?;
+ write!(writer, ".")?;
+ }
+ Outcome::BenchOk => {
+ writer.set_color(&ColorSpec::new().set_fg(Some(Color::Cyan)))?;
+ write!(writer, "b")?;
+ }
+ Outcome::Failed => {
+ writer.set_color(&ColorSpec::new().set_fg(Some(Color::Red)))?;
+ write!(writer, "F")?;
+ }
+ Outcome::Ignored { .. } => {
+ writer.set_color(&ColorSpec::new().set_fg(Some(Color::Yellow)))?;
+ write!(writer, "i")?;
+ }
+ }
+ writer.reset()
+ }
+
+ fn write_long(&self, writer: &mut dyn WriteColor) -> Result<(), std::io::Error> {
+ match self {
+ Outcome::Ok => {
+ writer.set_color(&ColorSpec::new().set_fg(Some(Color::Green)))?;
+ write!(writer, "ok")?;
+ }
+ Outcome::BenchOk => {
+ writer.set_color(&ColorSpec::new().set_fg(Some(Color::Cyan)))?;
+ write!(writer, "benchmarked")?;
+ }
+ Outcome::Failed => {
+ writer.set_color(&ColorSpec::new().set_fg(Some(Color::Red)))?;
+ write!(writer, "FAILED")?;
+ }
+ Outcome::Ignored { reason } => {
+ writer.set_color(&ColorSpec::new().set_fg(Some(Color::Yellow)))?;
+ write!(writer, "ignored")?;
+ if let Some(reason) = reason {
+ write!(writer, ", {reason}")?;
+ }
+ }
+ }
+ writer.reset()
+ }
+}
+
+#[derive(serde_derive::Deserialize)]
+#[serde(tag = "type", rename_all = "snake_case")]
+enum Message {
+ Suite(SuiteMessage),
+ Test(TestMessage),
+ Bench(BenchOutcome),
+}
+
+#[derive(serde_derive::Deserialize)]
+#[serde(tag = "event", rename_all = "snake_case")]
+enum SuiteMessage {
+ Ok(SuiteOutcome),
+ Failed(SuiteOutcome),
+ Started { test_count: usize },
+}
+
+#[derive(serde_derive::Deserialize)]
+struct SuiteOutcome {
+ passed: usize,
+ failed: usize,
+ ignored: usize,
+ measured: usize,
+ filtered_out: usize,
+ exec_time: f64,
+}
+
+#[derive(serde_derive::Deserialize)]
+#[serde(tag = "event", rename_all = "snake_case")]
+enum TestMessage {
+ Ok(TestOutcome),
+ Failed(TestOutcome),
+ Ignored(TestOutcome),
+ Timeout { name: String },
+ Started,
+}
+
+#[derive(serde_derive::Deserialize)]
+struct BenchOutcome {
+ name: String,
+ median: u64,
+ deviation: u64,
+}
+
+#[derive(serde_derive::Deserialize)]
+struct TestOutcome {
+ name: String,
+ exec_time: Option<f64>,
+ stdout: Option<String>,
+ message: Option<String>,
+}
diff --git a/src/bootstrap/setup.rs b/src/bootstrap/setup.rs
index 4480bce99..09f26862b 100644
--- a/src/bootstrap/setup.rs
+++ b/src/bootstrap/setup.rs
@@ -24,7 +24,7 @@ pub enum Profile {
None,
}
-/// A list of historical hashes of `src/etc/vscode_settings.json`.
+/// A list of historical hashes of `src/etc/rust_analyzer_settings.json`.
/// New entries should be appended whenever this is updated so we can detect
/// outdated vs. user-modified settings files.
static SETTINGS_HASHES: &[&str] = &[
@@ -32,7 +32,7 @@ static SETTINGS_HASHES: &[&str] = &[
"56e7bf011c71c5d81e0bf42e84938111847a810eee69d906bba494ea90b51922",
"af1b5efe196aed007577899db9dae15d6dbc923d6fa42fa0934e68617ba9bbe0",
];
-static VSCODE_SETTINGS: &str = include_str!("../etc/vscode_settings.json");
+static RUST_ANALYZER_SETTINGS: &str = include_str!("../etc/rust_analyzer_settings.json");
impl Profile {
fn include_path(&self, src_path: &Path) -> PathBuf {
@@ -489,7 +489,7 @@ undesirable, simply delete the `pre-push` file from .git/hooks."
Ok(())
}
-/// Sets up or displays `src/etc/vscode_settings.json`
+/// Sets up or displays `src/etc/rust_analyzer_settings.json`
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
pub struct Vscode;
@@ -580,10 +580,10 @@ fn create_vscode_settings_maybe(config: &Config) -> io::Result<()> {
}
_ => "Created",
};
- fs::write(&vscode_settings, &VSCODE_SETTINGS)?;
+ fs::write(&vscode_settings, &RUST_ANALYZER_SETTINGS)?;
println!("{verb} `.vscode/settings.json`");
} else {
- println!("\n{VSCODE_SETTINGS}");
+ println!("\n{RUST_ANALYZER_SETTINGS}");
}
Ok(())
}
diff --git a/src/bootstrap/setup/tests.rs b/src/bootstrap/setup/tests.rs
index dcf9d18e6..0fe6e4a46 100644
--- a/src/bootstrap/setup/tests.rs
+++ b/src/bootstrap/setup/tests.rs
@@ -1,14 +1,14 @@
-use super::{SETTINGS_HASHES, VSCODE_SETTINGS};
+use super::{RUST_ANALYZER_SETTINGS, SETTINGS_HASHES};
use sha2::Digest;
#[test]
fn check_matching_settings_hash() {
let mut hasher = sha2::Sha256::new();
- hasher.update(&VSCODE_SETTINGS);
+ hasher.update(&RUST_ANALYZER_SETTINGS);
let hash = hex::encode(hasher.finalize().as_slice());
assert_eq!(
&hash,
SETTINGS_HASHES.last().unwrap(),
- "Update `SETTINGS_HASHES` with the new hash of `src/etc/vscode_settings.json`"
+ "Update `SETTINGS_HASHES` with the new hash of `src/etc/rust_analyzer_settings.json`"
);
}
diff --git a/src/bootstrap/suggest.rs b/src/bootstrap/suggest.rs
new file mode 100644
index 000000000..ff20ebec2
--- /dev/null
+++ b/src/bootstrap/suggest.rs
@@ -0,0 +1,80 @@
+#![cfg_attr(feature = "build-metrics", allow(unused))]
+
+use std::str::FromStr;
+
+use std::path::PathBuf;
+
+use crate::{
+ builder::{Builder, Kind},
+ tool::Tool,
+};
+
+#[cfg(feature = "build-metrics")]
+pub fn suggest(builder: &Builder<'_>, run: bool) {
+ panic!("`x suggest` is not supported with `build-metrics`")
+}
+
+/// Suggests a list of possible `x.py` commands to run based on modified files in branch.
+#[cfg(not(feature = "build-metrics"))]
+pub fn suggest(builder: &Builder<'_>, run: bool) {
+ let suggestions =
+ builder.tool_cmd(Tool::SuggestTests).output().expect("failed to run `suggest-tests` tool");
+
+ if !suggestions.status.success() {
+ println!("failed to run `suggest-tests` tool ({})", suggestions.status);
+ println!(
+ "`suggest_tests` stdout:\n{}`suggest_tests` stderr:\n{}",
+ String::from_utf8(suggestions.stdout).unwrap(),
+ String::from_utf8(suggestions.stderr).unwrap()
+ );
+ panic!("failed to run `suggest-tests`");
+ }
+
+ let suggestions = String::from_utf8(suggestions.stdout).unwrap();
+ let suggestions = suggestions
+ .lines()
+ .map(|line| {
+ let mut sections = line.split_ascii_whitespace();
+
+ // this code expects one suggestion per line in the following format:
+ // <x_subcommand> {some number of flags} [optional stage number]
+ let cmd = sections.next().unwrap();
+ let stage = sections.next_back().map(|s| str::parse(s).ok()).flatten();
+ let paths: Vec<PathBuf> = sections.map(|p| PathBuf::from_str(p).unwrap()).collect();
+
+ (cmd, stage, paths)
+ })
+ .collect::<Vec<_>>();
+
+ if !suggestions.is_empty() {
+ println!("==== SUGGESTIONS ====");
+ for sug in &suggestions {
+ print!("x {} ", sug.0);
+ if let Some(stage) = sug.1 {
+ print!("--stage {stage} ");
+ }
+
+ for path in &sug.2 {
+ print!("{} ", path.display());
+ }
+ println!();
+ }
+ println!("=====================");
+ } else {
+ println!("No suggestions found!");
+ return;
+ }
+
+ if run {
+ for sug in suggestions {
+ let mut build = builder.build.clone();
+
+ let builder =
+ Builder::new_standalone(&mut build, Kind::parse(&sug.0).unwrap(), sug.2, sug.1);
+
+ builder.execute_cli()
+ }
+ } else {
+ println!("help: consider using the `--run` flag to automatically run suggested tests");
+ }
+}
diff --git a/src/bootstrap/tarball.rs b/src/bootstrap/tarball.rs
index fc850a22b..7fa8a4d9d 100644
--- a/src/bootstrap/tarball.rs
+++ b/src/bootstrap/tarball.rs
@@ -318,6 +318,7 @@ impl<'a> Tarball<'a> {
assert!(!formats.is_empty(), "dist.compression-formats can't be empty");
cmd.arg("--compression-formats").arg(formats.join(","));
}
+ cmd.args(&["--compression-profile", &self.builder.config.dist_compression_profile]);
self.builder.run(&mut cmd);
// Ensure there are no symbolic links in the tarball. In particular,
diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs
index b4f1506dc..aedf1ecab 100644
--- a/src/bootstrap/test.rs
+++ b/src/bootstrap/test.rs
@@ -19,10 +19,11 @@ use crate::config::TargetSelection;
use crate::dist;
use crate::doc::DocumentationFormat;
use crate::flags::Subcommand;
-use crate::native;
+use crate::llvm;
+use crate::render_tests::add_flags_and_try_run_tests;
use crate::tool::{self, SourceType, Tool};
use crate::toolstate::ToolState;
-use crate::util::{self, add_link_lib_path, dylib_path, dylib_path_var, output, t};
+use crate::util::{self, add_link_lib_path, dylib_path, dylib_path_var, output, t, up_to_date};
use crate::{envify, CLang, DocTests, GitRepo, Mode};
const ADB_TEST_DIR: &str = "/data/local/tmp/work";
@@ -123,7 +124,43 @@ impl Step for CrateJsonDocLint {
SourceType::InTree,
&[],
);
- try_run(builder, &mut cargo.into());
+ add_flags_and_try_run_tests(builder, &mut cargo.into());
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct SuggestTestsCrate {
+ host: TargetSelection,
+}
+
+impl Step for SuggestTestsCrate {
+ type Output = ();
+ const ONLY_HOSTS: bool = true;
+ const DEFAULT: bool = true;
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ run.path("src/tools/suggest-tests")
+ }
+
+ fn make_run(run: RunConfig<'_>) {
+ run.builder.ensure(SuggestTestsCrate { host: run.target });
+ }
+
+ fn run(self, builder: &Builder<'_>) {
+ let bootstrap_host = builder.config.build;
+ let compiler = builder.compiler(0, bootstrap_host);
+
+ let suggest_tests = tool::prepare_tool_cargo(
+ builder,
+ compiler,
+ Mode::ToolBootstrap,
+ bootstrap_host,
+ "test",
+ "src/tools/suggest-tests",
+ SourceType::InTree,
+ &[],
+ );
+ add_flags_and_try_run_tests(builder, &mut suggest_tests.into());
}
}
@@ -172,7 +209,7 @@ You can skip linkcheck with --exclude src/tools/linkchecker"
SourceType::InTree,
&[],
);
- try_run(builder, &mut cargo.into());
+ add_flags_and_try_run_tests(builder, &mut cargo.into());
// Build all the default documentation.
builder.default_doc(&[]);
@@ -333,7 +370,7 @@ impl Step for Cargo {
cargo.env("PATH", &path_for_cargo(builder, compiler));
- try_run(builder, &mut cargo.into());
+ add_flags_and_try_run_tests(builder, &mut cargo.into());
}
}
@@ -392,7 +429,7 @@ impl Step for RustAnalyzer {
cargo.add_rustc_lib_path(builder, compiler);
cargo.arg("--").args(builder.config.cmd.test_args());
- builder.run(&mut cargo.into());
+ add_flags_and_try_run_tests(builder, &mut cargo.into());
}
}
@@ -445,7 +482,7 @@ impl Step for Rustfmt {
cargo.add_rustc_lib_path(builder, compiler);
- builder.run(&mut cargo.into());
+ add_flags_and_try_run_tests(builder, &mut cargo.into());
}
}
@@ -496,7 +533,7 @@ impl Step for RustDemangler {
cargo.add_rustc_lib_path(builder, compiler);
- builder.run(&mut cargo.into());
+ add_flags_and_try_run_tests(builder, &mut cargo.into());
}
}
@@ -637,6 +674,8 @@ impl Step for Miri {
// Forward test filters.
cargo.arg("--").args(builder.config.cmd.test_args());
+ // This can NOT be `add_flags_and_try_run_tests` since the Miri test runner
+ // does not understand those flags!
let mut cargo = Command::from(cargo);
builder.run(&mut cargo);
@@ -694,7 +733,7 @@ impl Step for CompiletestTest {
/// Runs `cargo test` for compiletest.
fn run(self, builder: &Builder<'_>) {
let host = self.host;
- let compiler = builder.compiler(0, host);
+ let compiler = builder.compiler(1, host);
// We need `ToolStd` for the locally-built sysroot because
// compiletest uses unstable features of the `test` crate.
@@ -711,7 +750,7 @@ impl Step for CompiletestTest {
);
cargo.allow_features("test");
- try_run(builder, &mut cargo.into());
+ add_flags_and_try_run_tests(builder, &mut cargo.into());
}
}
@@ -1064,6 +1103,8 @@ impl Step for RustdocGUI {
cargo.env("RUSTDOCFLAGS", "-Zunstable-options --generate-link-to-definition");
} else if entry.file_name() == "scrape_examples" {
cargo.arg("-Zrustdoc-scrape-examples");
+ } else if entry.file_name() == "extend_css" {
+ cargo.env("RUSTDOCFLAGS", &format!("--extend-css extra.css"));
}
builder.run(&mut cargo);
}
@@ -1118,7 +1159,11 @@ impl Step for Tidy {
cmd.arg(&builder.src);
cmd.arg(&builder.initial_cargo);
cmd.arg(&builder.out);
- cmd.arg(builder.jobs().to_string());
+ // Tidy is heavily IO constrained. Still respect `-j`, but use a higher limit if `jobs` hasn't been configured.
+ let jobs = builder.config.jobs.unwrap_or_else(|| {
+ 8 * std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get) as u32
+ });
+ cmd.arg(jobs.to_string());
if builder.is_verbose() {
cmd.arg("--verbose");
}
@@ -1129,7 +1174,7 @@ impl Step for Tidy {
if builder.config.channel == "dev" || builder.config.channel == "nightly" {
builder.info("fmt check");
if builder.initial_rustfmt().is_none() {
- let inferred_rustfmt_dir = builder.config.initial_rustc.parent().unwrap();
+ let inferred_rustfmt_dir = builder.initial_rustc.parent().unwrap();
eprintln!(
"\
error: no `rustfmt` binary found in {PATH}
@@ -1189,7 +1234,7 @@ impl Step for TidySelfTest {
SourceType::InTree,
&[],
);
- try_run(builder, &mut cargo.into());
+ add_flags_and_try_run_tests(builder, &mut cargo.into());
}
}
@@ -1430,11 +1475,11 @@ note: if you're sure you want to do this, please open an issue as to why. In the
builder.ensure(compile::Std::new(compiler, compiler.host));
// Also provide `rust_test_helpers` for the host.
- builder.ensure(native::TestHelpers { target: compiler.host });
+ builder.ensure(TestHelpers { target: compiler.host });
// As well as the target, except for plain wasm32, which can't build it
if !target.contains("wasm") || target.contains("emscripten") {
- builder.ensure(native::TestHelpers { target });
+ builder.ensure(TestHelpers { target });
}
builder.ensure(RemoteCopyLibs { compiler, target });
@@ -1531,7 +1576,10 @@ note: if you're sure you want to do this, please open an issue as to why. In the
flags.extend(builder.config.cmd.rustc_args().iter().map(|s| s.to_string()));
if let Some(linker) = builder.linker(target) {
- cmd.arg("--linker").arg(linker);
+ cmd.arg("--target-linker").arg(linker);
+ }
+ if let Some(linker) = builder.linker(compiler.host) {
+ cmd.arg("--host-linker").arg(linker);
}
let mut hostflags = flags.clone();
@@ -1616,15 +1664,13 @@ note: if you're sure you want to do this, please open an issue as to why. In the
cmd.arg("--verbose");
}
- if !builder.config.verbose_tests {
- cmd.arg("--quiet");
- }
+ cmd.arg("--json");
let mut llvm_components_passed = false;
let mut copts_passed = false;
if builder.config.llvm_enabled() {
- let native::LlvmResult { llvm_config, .. } =
- builder.ensure(native::Llvm { target: builder.config.build });
+ let llvm::LlvmResult { llvm_config, .. } =
+ builder.ensure(llvm::Llvm { target: builder.config.build });
if !builder.config.dry_run() {
let llvm_version = output(Command::new(&llvm_config).arg("--version"));
let llvm_components = output(Command::new(&llvm_config).arg("--components"));
@@ -1662,7 +1708,7 @@ note: if you're sure you want to do this, please open an issue as to why. In the
// If LLD is available, add it to the PATH
if builder.config.lld_enabled {
let lld_install_root =
- builder.ensure(native::Lld { target: builder.config.build });
+ builder.ensure(llvm::Lld { target: builder.config.build });
let lld_bin_path = lld_install_root.join("bin");
@@ -1769,7 +1815,7 @@ note: if you're sure you want to do this, please open an issue as to why. In the
suite, mode, &compiler.host, target
));
let _time = util::timeit(&builder);
- try_run(builder, &mut cmd);
+ crate::render_tests::try_run_tests(builder, &mut cmd);
if let Some(compare_mode) = compare_mode {
cmd.arg("--compare-mode").arg(compare_mode);
@@ -1778,7 +1824,7 @@ note: if you're sure you want to do this, please open an issue as to why. In the
suite, mode, compare_mode, &compiler.host, target
));
let _time = util::timeit(&builder);
- try_run(builder, &mut cmd);
+ crate::render_tests::try_run_tests(builder, &mut cmd);
}
}
}
@@ -2141,7 +2187,7 @@ impl Step for Crate {
compile::std_cargo(builder, target, compiler.stage, &mut cargo);
}
Mode::Rustc => {
- compile::rustc_cargo(builder, &mut cargo, target);
+ compile::rustc_cargo(builder, &mut cargo, target, compiler.stage);
}
_ => panic!("can only test libraries"),
};
@@ -2180,9 +2226,8 @@ impl Step for Crate {
cargo.arg("--");
cargo.args(&builder.config.cmd.test_args());
- if !builder.config.verbose_tests {
- cargo.arg("--quiet");
- }
+ cargo.arg("-Z").arg("unstable-options");
+ cargo.arg("--format").arg("json");
if target.contains("emscripten") {
cargo.env(
@@ -2210,7 +2255,7 @@ impl Step for Crate {
target
));
let _time = util::timeit(&builder);
- try_run(builder, &mut cargo.into());
+ crate::render_tests::try_run_tests(builder, &mut cargo.into());
}
}
@@ -2330,7 +2375,7 @@ impl Step for CrateRustdoc {
));
let _time = util::timeit(&builder);
- try_run(builder, &mut cargo.into());
+ add_flags_and_try_run_tests(builder, &mut cargo.into());
}
}
@@ -2391,17 +2436,13 @@ impl Step for CrateRustdocJsonTypes {
cargo.arg("'-Ctarget-feature=-crt-static'");
}
- if !builder.config.verbose_tests {
- cargo.arg("--quiet");
- }
-
builder.info(&format!(
"{} rustdoc-json-types stage{} ({} -> {})",
test_kind, compiler.stage, &compiler.host, target
));
let _time = util::timeit(&builder);
- try_run(builder, &mut cargo.into());
+ add_flags_and_try_run_tests(builder, &mut cargo.into());
}
}
@@ -2570,7 +2611,7 @@ impl Step for Bootstrap {
// rustbuild tests are racy on directory creation so just run them one at a time.
// Since there's not many this shouldn't be a problem.
cmd.arg("--test-threads=1");
- try_run(builder, &mut cmd);
+ add_flags_and_try_run_tests(builder, &mut cmd);
}
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
@@ -2651,7 +2692,7 @@ impl Step for ReplacePlaceholderTest {
SourceType::InTree,
&[],
);
- try_run(builder, &mut cargo.into());
+ add_flags_and_try_run_tests(builder, &mut cargo.into());
}
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
@@ -2695,3 +2736,123 @@ impl Step for LintDocs {
});
}
}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct RustInstaller;
+
+impl Step for RustInstaller {
+ type Output = ();
+ const ONLY_HOSTS: bool = true;
+ const DEFAULT: bool = true;
+
+ /// Ensure the version placeholder replacement tool builds
+ fn run(self, builder: &Builder<'_>) {
+ builder.info("test rust-installer");
+
+ let bootstrap_host = builder.config.build;
+ let compiler = builder.compiler(0, bootstrap_host);
+ let cargo = tool::prepare_tool_cargo(
+ builder,
+ compiler,
+ Mode::ToolBootstrap,
+ bootstrap_host,
+ "test",
+ "src/tools/rust-installer",
+ SourceType::InTree,
+ &[],
+ );
+ try_run(builder, &mut cargo.into());
+
+ // We currently don't support running the test.sh script outside linux(?) environments.
+ // Eventually this should likely migrate to #[test]s in rust-installer proper rather than a
+ // set of scripts, which will likely allow dropping this if.
+ if bootstrap_host != "x86_64-unknown-linux-gnu" {
+ return;
+ }
+
+ let mut cmd =
+ std::process::Command::new(builder.src.join("src/tools/rust-installer/test.sh"));
+ let tmpdir = testdir(builder, compiler.host).join("rust-installer");
+ let _ = std::fs::remove_dir_all(&tmpdir);
+ let _ = std::fs::create_dir_all(&tmpdir);
+ cmd.current_dir(&tmpdir);
+ cmd.env("CARGO_TARGET_DIR", tmpdir.join("cargo-target"));
+ cmd.env("CARGO", &builder.initial_cargo);
+ cmd.env("RUSTC", &builder.initial_rustc);
+ cmd.env("TMP_DIR", &tmpdir);
+ try_run(builder, &mut cmd);
+ }
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ run.path("src/tools/rust-installer")
+ }
+
+ fn make_run(run: RunConfig<'_>) {
+ run.builder.ensure(Self);
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct TestHelpers {
+ pub target: TargetSelection,
+}
+
+impl Step for TestHelpers {
+ type Output = ();
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ run.path("tests/auxiliary/rust_test_helpers.c")
+ }
+
+ fn make_run(run: RunConfig<'_>) {
+ run.builder.ensure(TestHelpers { target: run.target })
+ }
+
+ /// Compiles the `rust_test_helpers.c` library which we used in various
+ /// `run-pass` tests for ABI testing.
+ fn run(self, builder: &Builder<'_>) {
+ if builder.config.dry_run() {
+ return;
+ }
+ // The x86_64-fortanix-unknown-sgx target doesn't have a working C
+ // toolchain. However, some x86_64 ELF objects can be linked
+ // without issues. Use this hack to compile the test helpers.
+ let target = if self.target == "x86_64-fortanix-unknown-sgx" {
+ TargetSelection::from_user("x86_64-unknown-linux-gnu")
+ } else {
+ self.target
+ };
+ let dst = builder.test_helpers_out(target);
+ let src = builder.src.join("tests/auxiliary/rust_test_helpers.c");
+ if up_to_date(&src, &dst.join("librust_test_helpers.a")) {
+ return;
+ }
+
+ builder.info("Building test helpers");
+ t!(fs::create_dir_all(&dst));
+ let mut cfg = cc::Build::new();
+ // FIXME: Workaround for https://github.com/emscripten-core/emscripten/issues/9013
+ if target.contains("emscripten") {
+ cfg.pic(false);
+ }
+
+ // We may have found various cross-compilers a little differently due to our
+ // extra configuration, so inform cc of these compilers. Note, though, that
+ // on MSVC we still need cc's detection of env vars (ugh).
+ if !target.contains("msvc") {
+ if let Some(ar) = builder.ar(target) {
+ cfg.archiver(ar);
+ }
+ cfg.compiler(builder.cc(target));
+ }
+ cfg.cargo_metadata(false)
+ .out_dir(&dst)
+ .target(&target.triple)
+ .host(&builder.config.build.triple)
+ .opt_level(0)
+ .warnings(false)
+ .debug(false)
+ .file(builder.src.join("tests/auxiliary/rust_test_helpers.c"))
+ .compile("rust_test_helpers");
+ }
+}
diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs
index 3c9a154da..d1fd2e8c4 100644
--- a/src/bootstrap/tool.rs
+++ b/src/bootstrap/tool.rs
@@ -320,7 +320,7 @@ pub fn prepare_tool_cargo(
cargo.env("CFG_RELEASE_NUM", &builder.version);
cargo.env("DOC_RUST_LANG_ORG_CHANNEL", builder.doc_rust_lang_org_channel());
- let info = GitInfo::new(builder.config.ignore_git, &dir);
+ let info = GitInfo::new(builder.config.omit_git_hash, &dir);
if let Some(sha) = info.sha() {
cargo.env("CFG_COMMIT_HASH", sha);
}
@@ -433,6 +433,7 @@ bootstrap_tool!(
ReplaceVersionPlaceholder, "src/tools/replace-version-placeholder", "replace-version-placeholder";
CollectLicenseMetadata, "src/tools/collect-license-metadata", "collect-license-metadata";
GenerateCopyright, "src/tools/generate-copyright", "generate-copyright";
+ SuggestTests, "src/tools/suggest-tests", "suggest-tests";
);
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)]
diff --git a/src/bootstrap/util.rs b/src/bootstrap/util.rs
index 93e53d383..2e1adbf63 100644
--- a/src/bootstrap/util.rs
+++ b/src/bootstrap/util.rs
@@ -146,100 +146,9 @@ pub fn symlink_dir(config: &Config, src: &Path, dest: &Path) -> io::Result<()> {
fs::symlink(src, dest)
}
- // Creating a directory junction on windows involves dealing with reparse
- // points and the DeviceIoControl function, and this code is a skeleton of
- // what can be found here:
- //
- // http://www.flexhex.com/docs/articles/hard-links.phtml
#[cfg(windows)]
fn symlink_dir_inner(target: &Path, junction: &Path) -> io::Result<()> {
- use std::ffi::OsStr;
- use std::os::windows::ffi::OsStrExt;
- use std::ptr;
-
- use winapi::shared::minwindef::{DWORD, WORD};
- use winapi::um::fileapi::{CreateFileW, OPEN_EXISTING};
- use winapi::um::handleapi::CloseHandle;
- use winapi::um::ioapiset::DeviceIoControl;
- use winapi::um::winbase::{FILE_FLAG_BACKUP_SEMANTICS, FILE_FLAG_OPEN_REPARSE_POINT};
- use winapi::um::winioctl::FSCTL_SET_REPARSE_POINT;
- use winapi::um::winnt::{
- FILE_SHARE_DELETE, FILE_SHARE_READ, FILE_SHARE_WRITE, GENERIC_WRITE,
- IO_REPARSE_TAG_MOUNT_POINT, MAXIMUM_REPARSE_DATA_BUFFER_SIZE, WCHAR,
- };
-
- #[allow(non_snake_case)]
- #[repr(C)]
- struct REPARSE_MOUNTPOINT_DATA_BUFFER {
- ReparseTag: DWORD,
- ReparseDataLength: DWORD,
- Reserved: WORD,
- ReparseTargetLength: WORD,
- ReparseTargetMaximumLength: WORD,
- Reserved1: WORD,
- ReparseTarget: WCHAR,
- }
-
- fn to_u16s<S: AsRef<OsStr>>(s: S) -> io::Result<Vec<u16>> {
- Ok(s.as_ref().encode_wide().chain(Some(0)).collect())
- }
-
- // We're using low-level APIs to create the junction, and these are more
- // picky about paths. For example, forward slashes cannot be used as a
- // path separator, so we should try to canonicalize the path first.
- let target = fs::canonicalize(target)?;
-
- fs::create_dir(junction)?;
-
- let path = to_u16s(junction)?;
-
- unsafe {
- let h = CreateFileW(
- path.as_ptr(),
- GENERIC_WRITE,
- FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
- ptr::null_mut(),
- OPEN_EXISTING,
- FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS,
- ptr::null_mut(),
- );
-
- #[repr(C, align(8))]
- struct Align8<T>(T);
- let mut data = Align8([0u8; MAXIMUM_REPARSE_DATA_BUFFER_SIZE as usize]);
- let db = data.0.as_mut_ptr() as *mut REPARSE_MOUNTPOINT_DATA_BUFFER;
- let buf = core::ptr::addr_of_mut!((*db).ReparseTarget) as *mut u16;
- let mut i = 0;
- // FIXME: this conversion is very hacky
- let v = br"\??\";
- let v = v.iter().map(|x| *x as u16);
- for c in v.chain(target.as_os_str().encode_wide().skip(4)) {
- *buf.offset(i) = c;
- i += 1;
- }
- *buf.offset(i) = 0;
- i += 1;
- (*db).ReparseTag = IO_REPARSE_TAG_MOUNT_POINT;
- (*db).ReparseTargetMaximumLength = (i * 2) as WORD;
- (*db).ReparseTargetLength = ((i - 1) * 2) as WORD;
- (*db).ReparseDataLength = (*db).ReparseTargetLength as DWORD + 12;
-
- let mut ret = 0;
- let res = DeviceIoControl(
- h as *mut _,
- FSCTL_SET_REPARSE_POINT,
- db.cast(),
- (*db).ReparseDataLength + 8,
- ptr::null_mut(),
- 0,
- &mut ret,
- ptr::null_mut(),
- );
-
- let out = if res == 0 { Err(io::Error::last_os_error()) } else { Ok(()) };
- CloseHandle(h);
- out
- }
+ junction::create(&target, &junction)
}
}
diff --git a/src/ci/docker/host-x86_64/dist-x86_64-illumos/Dockerfile b/src/ci/docker/host-x86_64/dist-x86_64-illumos/Dockerfile
index 4e46bdee5..2089bf387 100644
--- a/src/ci/docker/host-x86_64/dist-x86_64-illumos/Dockerfile
+++ b/src/ci/docker/host-x86_64/dist-x86_64-illumos/Dockerfile
@@ -9,10 +9,10 @@ RUN bash /tmp/cross-apt-packages.sh
# Required for cross-build gcc
RUN apt-get update && \
apt-get install -y --no-install-recommends \
- libgmp-dev \
- libmpfr-dev \
- libmpc-dev \
- && rm -rf /var/lib/apt/lists/*
+ libgmp-dev \
+ libmpfr-dev \
+ libmpc-dev \
+ && rm -rf /var/lib/apt/lists/*
COPY scripts/illumos-toolchain.sh /tmp/
@@ -28,6 +28,7 @@ RUN /scripts/cmake.sh
ENV \
AR_x86_64_unknown_illumos=x86_64-illumos-ar \
+ RANLIB_x86_64_unknown_illumos=x86_64-illumos-ranlib \
CC_x86_64_unknown_illumos=x86_64-illumos-gcc \
CXX_x86_64_unknown_illumos=x86_64-illumos-g++
diff --git a/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile b/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile
index 5feba4e06..04fdb15f5 100644
--- a/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile
+++ b/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile
@@ -54,8 +54,8 @@ COPY host-x86_64/dist-x86_64-linux/build-clang.sh /tmp/
RUN ./build-clang.sh
ENV CC=clang CXX=clang++
-# rustc-perf version from 2022-07-22
-ENV PERF_COMMIT 3c253134664fdcba862c539d37f0de18557a9a4c
+# rustc-perf version from 2023-03-15
+ENV PERF_COMMIT 9dfaa35193154b690922347ee1141a06ec87a199
RUN curl -LS -o perf.zip https://github.com/rust-lang/rustc-perf/archive/$PERF_COMMIT.zip && \
unzip perf.zip && \
mv rustc-perf-$PERF_COMMIT rustc-perf && \
diff --git a/src/ci/docker/host-x86_64/dist-x86_64-linux/build-clang.sh b/src/ci/docker/host-x86_64/dist-x86_64-linux/build-clang.sh
index 9abfd4e97..9b274cc27 100755
--- a/src/ci/docker/host-x86_64/dist-x86_64-linux/build-clang.sh
+++ b/src/ci/docker/host-x86_64/dist-x86_64-linux/build-clang.sh
@@ -4,7 +4,7 @@ set -ex
source shared.sh
-LLVM=llvmorg-15.0.0
+LLVM=llvmorg-16.0.0
mkdir llvm-project
cd llvm-project
diff --git a/src/ci/docker/host-x86_64/dist-x86_64-linux/build-gcc.sh b/src/ci/docker/host-x86_64/dist-x86_64-linux/build-gcc.sh
index 9932b2505..41ca1385c 100755
--- a/src/ci/docker/host-x86_64/dist-x86_64-linux/build-gcc.sh
+++ b/src/ci/docker/host-x86_64/dist-x86_64-linux/build-gcc.sh
@@ -3,7 +3,7 @@ set -ex
source shared.sh
-GCC=7.5.0
+GCC=8.5.0
curl https://ftp.gnu.org/gnu/gcc/gcc-$GCC/gcc-$GCC.tar.xz | xzcat | tar xf -
cd gcc-$GCC
diff --git a/src/ci/docker/host-x86_64/i686-gnu/Dockerfile b/src/ci/docker/host-x86_64/i686-gnu/Dockerfile
index d9e583862..b5abf6564 100644
--- a/src/ci/docker/host-x86_64/i686-gnu/Dockerfile
+++ b/src/ci/docker/host-x86_64/i686-gnu/Dockerfile
@@ -16,6 +16,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
zlib1g-dev \
lib32z1-dev \
xz-utils \
+ mingw-w64 \
&& rm -rf /var/lib/apt/lists/*
diff --git a/src/ci/docker/host-x86_64/mingw-check-tidy/Dockerfile b/src/ci/docker/host-x86_64/mingw-check-tidy/Dockerfile
index 889a586b3..34b93be41 100644
--- a/src/ci/docker/host-x86_64/mingw-check-tidy/Dockerfile
+++ b/src/ci/docker/host-x86_64/mingw-check-tidy/Dockerfile
@@ -8,6 +8,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
file \
curl \
ca-certificates \
+ python2.7 \
python3 \
python3-pip \
python3-pkg-resources \
@@ -30,4 +31,6 @@ RUN pip3 install --no-deps --no-cache-dir --require-hashes -r /tmp/reuse-require
COPY host-x86_64/mingw-check/validate-toolstate.sh /scripts/
COPY host-x86_64/mingw-check/validate-error-codes.sh /scripts/
-ENV SCRIPT python3 ../x.py test --stage 0 src/tools/tidy tidyselftest
+# NOTE: intentionally uses python2 for x.py so we can test it still works.
+# validate-toolstate only runs in our CI, so it's ok for it to only support python3.
+ENV SCRIPT python2.7 ../x.py test --stage 0 src/tools/tidy tidyselftest
diff --git a/src/ci/docker/host-x86_64/mingw-check/Dockerfile b/src/ci/docker/host-x86_64/mingw-check/Dockerfile
index 98bd90210..515890aef 100644
--- a/src/ci/docker/host-x86_64/mingw-check/Dockerfile
+++ b/src/ci/docker/host-x86_64/mingw-check/Dockerfile
@@ -52,4 +52,6 @@ ENV SCRIPT python3 ../x.py --stage 2 test src/tools/expand-yaml-anchors && \
reuse lint && \
# Runs checks to ensure that there are no ES5 issues in our JS code.
es-check es6 ../src/librustdoc/html/static/js/*.js && \
- eslint -c ../src/librustdoc/html/static/.eslintrc.js ../src/librustdoc/html/static/js/*.js
+ eslint -c ../src/librustdoc/html/static/.eslintrc.js ../src/librustdoc/html/static/js/*.js && \
+ eslint -c ../src/tools/rustdoc-js/.eslintrc.js ../src/tools/rustdoc-js/tester.js && \
+ eslint -c ../src/tools/rustdoc-gui/.eslintrc.js ../src/tools/rustdoc-gui/tester.js
diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-distcheck/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-distcheck/Dockerfile
index 7e640c49f..2217e6ee7 100644
--- a/src/ci/docker/host-x86_64/x86_64-gnu-distcheck/Dockerfile
+++ b/src/ci/docker/host-x86_64/x86_64-gnu-distcheck/Dockerfile
@@ -24,6 +24,6 @@ RUN sh /scripts/sccache.sh
# We are disabling CI LLVM since distcheck is an offline build.
ENV NO_DOWNLOAD_CI_LLVM 1
-ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --set rust.ignore-git=false
+ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --set rust.omit-git-hash=false
ENV SCRIPT python3 ../x.py --stage 2 test distcheck
ENV DIST_SRC 1
diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-14-stage1/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-14-stage1/Dockerfile
index b99a0886b..21dcf29b4 100644
--- a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-14-stage1/Dockerfile
+++ b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-14-stage1/Dockerfile
@@ -22,6 +22,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
zlib1g-dev \
xz-utils \
nodejs \
+ mingw-w64 \
&& rm -rf /var/lib/apt/lists/*
COPY scripts/sccache.sh /scripts/
diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-14/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-14/Dockerfile
index db6032f87..cfb638e8b 100644
--- a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-14/Dockerfile
+++ b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-14/Dockerfile
@@ -2,7 +2,6 @@ FROM ubuntu:22.04
ARG DEBIAN_FRONTEND=noninteractive
-# NOTE: intentionally installs both python2 and python3 so we can test support for both.
RUN apt-get update && apt-get install -y --no-install-recommends \
g++ \
gcc-multilib \
@@ -11,8 +10,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
file \
curl \
ca-certificates \
- python2.7 \
- python3 \
+ python3.11 \
git \
cmake \
sudo \
@@ -25,6 +23,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
zlib1g-dev \
xz-utils \
nodejs \
+ mingw-w64 \
&& rm -rf /var/lib/apt/lists/*
# Install powershell (universal package) so we can test x.ps1 on Linux
@@ -62,6 +61,4 @@ ENV SCRIPT ../x.py --stage 2 test --exclude src/tools/tidy && \
# work.
#
../x.ps1 --stage 2 test tests/ui --pass=check \
- --host='' --target=i686-unknown-linux-gnu && \
- # Run tidy at the very end, after all the other tests.
- python2.7 ../x.py --stage 2 test src/tools/tidy
+ --host='' --target=i686-unknown-linux-gnu
diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-15/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-15/Dockerfile
index 5219247cc..fb5037e3b 100644
--- a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-15/Dockerfile
+++ b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-15/Dockerfile
@@ -25,6 +25,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
zlib1g-dev \
xz-utils \
nodejs \
+ mingw-w64 \
&& rm -rf /var/lib/apt/lists/*
# Install powershell (universal package) so we can test x.ps1 on Linux
diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version b/src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version
index 9c550b2d7..7092c7c46 100644
--- a/src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version
+++ b/src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version
@@ -1 +1 @@
-0.14.4 \ No newline at end of file
+0.15.0 \ No newline at end of file
diff --git a/src/ci/docker/host-x86_64/x86_64-gnu/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu/Dockerfile
index 5b9581f72..fbec368c9 100644
--- a/src/ci/docker/host-x86_64/x86_64-gnu/Dockerfile
+++ b/src/ci/docker/host-x86_64/x86_64-gnu/Dockerfile
@@ -16,6 +16,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
libssl-dev \
pkg-config \
xz-utils \
+ mingw-w64 \
&& rm -rf /var/lib/apt/lists/*
COPY scripts/sccache.sh /scripts/
diff --git a/src/ci/docker/scripts/emscripten.sh b/src/ci/docker/scripts/emscripten.sh
index 56dc96283..3f5e2c6ff 100644
--- a/src/ci/docker/scripts/emscripten.sh
+++ b/src/ci/docker/scripts/emscripten.sh
@@ -20,5 +20,5 @@ exit 1
git clone https://github.com/emscripten-core/emsdk.git /emsdk-portable
cd /emsdk-portable
-hide_output ./emsdk install 1.39.20
-./emsdk activate 1.39.20
+hide_output ./emsdk install 2.0.5
+./emsdk activate 2.0.5
diff --git a/src/ci/github-actions/ci.yml b/src/ci/github-actions/ci.yml
index c594288dc..bfca7b3ab 100644
--- a/src/ci/github-actions/ci.yml
+++ b/src/ci/github-actions/ci.yml
@@ -73,18 +73,26 @@ x--expand-yaml-anchors--remove:
- &base-job
env: {}
- - &job-linux-xl
+ - &job-linux-8c
+ os: ubuntu-20.04-8core-32gb
+ <<: *base-job
+
+ - &job-linux-16c
os: ubuntu-20.04-16core-64gb
<<: *base-job
- &job-macos-xl
- os: macos-12-xl
+ os: macos-latest # We use the standard runner for now
<<: *base-job
- - &job-windows-xl
+ - &job-windows-8c
os: windows-2019-8core-32gb
<<: *base-job
+ - &job-windows-16c
+ os: windows-2019-16core-64gb
+ <<: *base-job
+
- &job-aarch64-linux
os: [self-hosted, ARM64, linux]
@@ -284,7 +292,7 @@ jobs:
permissions:
actions: write # for rust-lang/simpleinfra/github-actions/cancel-outdated-builds
<<: *base-ci-job
- name: PR
+ name: PR - ${{ matrix.name }}
env:
<<: [*shared-ci-variables, *public-variables]
if: github.event_name == 'pull_request'
@@ -293,26 +301,26 @@ jobs:
matrix:
include:
- name: mingw-check
- <<: *job-linux-xl
+ <<: *job-linux-16c
tidy: false
- name: mingw-check-tidy
- <<: *job-linux-xl
+ <<: *job-linux-16c
tidy: true
- name: x86_64-gnu-llvm-14
- <<: *job-linux-xl
+ <<: *job-linux-16c
tidy: false
- name: x86_64-gnu-tools
- <<: *job-linux-xl
+ <<: *job-linux-16c
tidy: false
auto:
permissions:
actions: write # for rust-lang/simpleinfra/github-actions/cancel-outdated-builds
<<: *base-ci-job
- name: auto
+ name: auto - ${{ matrix.name }}
env:
<<: [*shared-ci-variables, *prod-variables]
if: github.event_name == 'push' && github.ref == 'refs/heads/auto' && github.repository == 'rust-lang-ci/rust'
@@ -327,103 +335,103 @@ jobs:
<<: *job-aarch64-linux
- name: arm-android
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: armhf-gnu
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: dist-aarch64-linux
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: dist-android
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: dist-arm-linux
- <<: *job-linux-xl
+ <<: *job-linux-16c
- name: dist-armhf-linux
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: dist-armv7-linux
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: dist-i586-gnu-i586-i686-musl
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: dist-i686-linux
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: dist-mips-linux
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: dist-mips64-linux
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: dist-mips64el-linux
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: dist-mipsel-linux
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: dist-powerpc-linux
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: dist-powerpc64-linux
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: dist-powerpc64le-linux
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: dist-riscv64-linux
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: dist-s390x-linux
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: dist-various-1
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: dist-various-2
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: dist-x86_64-freebsd
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: dist-x86_64-illumos
- <<: *job-linux-xl
+ <<: *job-linux-8c
- &dist-x86_64-linux
name: dist-x86_64-linux
- <<: *job-linux-xl
+ <<: *job-linux-16c
- name: dist-x86_64-linux-alt
env:
IMAGE: dist-x86_64-linux
- <<: *job-linux-xl
+ <<: *job-linux-16c
- name: dist-x86_64-musl
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: dist-x86_64-netbsd
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: i686-gnu
- <<: *job-linux-xl
+ <<: *job-linux-16c
- name: i686-gnu-nopt
- <<: *job-linux-xl
+ <<: *job-linux-16c
- name: mingw-check
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: test-various
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: wasm32
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: x86_64-gnu
- <<: *job-linux-xl
+ <<: *job-linux-8c
# This job ensures commits landing on nightly still pass the full
# test suite on the stable channel. There are some UI tests that
@@ -438,39 +446,39 @@ jobs:
# could cause failures when `dev: 1` in `stage0.txt`, and running
# this on stable is useless.
CI_ONLY_WHEN_CHANNEL: nightly
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: x86_64-gnu-aux
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: x86_64-gnu-debug
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: x86_64-gnu-distcheck
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: x86_64-gnu-llvm-15
env:
RUST_BACKTRACE: 1
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: x86_64-gnu-llvm-14
env:
RUST_BACKTRACE: 1
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: x86_64-gnu-llvm-14-stage1
env:
RUST_BACKTRACE: 1
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: x86_64-gnu-nopt
- <<: *job-linux-xl
+ <<: *job-linux-8c
- name: x86_64-gnu-tools
env:
DEPLOY_TOOLSTATES_JSON: toolstates-linux.json
- <<: *job-linux-xl
+ <<: *job-linux-8c
####################
# macOS Builders #
@@ -572,38 +580,38 @@ jobs:
env:
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-profiler
SCRIPT: make ci-subset-1
- <<: *job-windows-xl
+ <<: *job-windows-8c
- name: x86_64-msvc-2
env:
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-profiler
SCRIPT: make ci-subset-2
- <<: *job-windows-xl
+ <<: *job-windows-8c
- name: i686-msvc-1
env:
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-msvc
SCRIPT: make ci-subset-1
- <<: *job-windows-xl
+ <<: *job-windows-8c
- name: i686-msvc-2
env:
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-msvc
SCRIPT: make ci-subset-2
- <<: *job-windows-xl
+ <<: *job-windows-8c
- name: x86_64-msvc-cargo
env:
SCRIPT: python x.py --stage 2 test src/tools/cargotest src/tools/cargo
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-lld
- <<: *job-windows-xl
+ <<: *job-windows-8c
- name: x86_64-msvc-tools
env:
SCRIPT: src/ci/docker/host-x86_64/x86_64-gnu-tools/checktools.sh x.py /tmp/toolstate/toolstates.json windows
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --save-toolstates=/tmp/toolstate/toolstates.json
DEPLOY_TOOLSTATES_JSON: toolstates-windows.json
- <<: *job-windows-xl
+ <<: *job-windows-8c
# 32/64-bit MinGW builds.
#
@@ -629,7 +637,7 @@ jobs:
# incompatible with LLVM downloads today).
NO_DOWNLOAD_CI_LLVM: 1
CUSTOM_MINGW: 1
- <<: *job-windows-xl
+ <<: *job-windows-8c
- name: i686-mingw-2
env:
@@ -639,7 +647,7 @@ jobs:
# incompatible with LLVM downloads today).
NO_DOWNLOAD_CI_LLVM: 1
CUSTOM_MINGW: 1
- <<: *job-windows-xl
+ <<: *job-windows-8c
- name: x86_64-mingw-1
env:
@@ -651,7 +659,7 @@ jobs:
# incompatible with LLVM downloads today).
NO_DOWNLOAD_CI_LLVM: 1
CUSTOM_MINGW: 1
- <<: *job-windows-xl
+ <<: *job-windows-8c
- name: x86_64-mingw-2
env:
@@ -663,7 +671,7 @@ jobs:
# incompatible with LLVM downloads today).
NO_DOWNLOAD_CI_LLVM: 1
CUSTOM_MINGW: 1
- <<: *job-windows-xl
+ <<: *job-windows-8c
- name: dist-x86_64-msvc
env:
@@ -675,7 +683,7 @@ jobs:
--enable-profiler
SCRIPT: PGO_HOST=x86_64-pc-windows-msvc python src/ci/stage-build.py python x.py dist bootstrap --include-default-paths
DIST_REQUIRE_ALL_TOOLS: 1
- <<: *job-windows-xl
+ <<: *job-windows-8c
- name: dist-i686-msvc
env:
@@ -687,7 +695,7 @@ jobs:
--enable-profiler
SCRIPT: python x.py dist bootstrap --include-default-paths
DIST_REQUIRE_ALL_TOOLS: 1
- <<: *job-windows-xl
+ <<: *job-windows-8c
- name: dist-aarch64-msvc
env:
@@ -701,7 +709,7 @@ jobs:
# Hack around this SDK version, because it doesn't work with clang.
# See https://github.com/rust-lang/rust/issues/88796
WINDOWS_SDK_20348_HACK: 1
- <<: *job-windows-xl
+ <<: *job-windows-8c
- name: dist-i686-mingw
env:
@@ -715,7 +723,7 @@ jobs:
SCRIPT: python x.py dist bootstrap --include-default-paths
CUSTOM_MINGW: 1
DIST_REQUIRE_ALL_TOOLS: 1
- <<: *job-windows-xl
+ <<: *job-windows-8c
- name: dist-x86_64-mingw
env:
@@ -729,19 +737,19 @@ jobs:
NO_DOWNLOAD_CI_LLVM: 1
CUSTOM_MINGW: 1
DIST_REQUIRE_ALL_TOOLS: 1
- <<: *job-windows-xl
+ <<: *job-windows-8c
- name: dist-x86_64-msvc-alt
env:
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-extended --enable-profiler
SCRIPT: python x.py dist bootstrap --include-default-paths
- <<: *job-windows-xl
+ <<: *job-windows-8c
try:
permissions:
actions: write # for rust-lang/simpleinfra/github-actions/cancel-outdated-builds
<<: *base-ci-job
- name: try
+ name: try - ${{ matrix.name }}
env:
<<: [*shared-ci-variables, *prod-variables]
if: github.event_name == 'push' && (github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.repository == 'rust-lang-ci/rust'
@@ -750,7 +758,7 @@ jobs:
include:
- &dist-x86_64-linux
name: dist-x86_64-linux
- <<: *job-linux-xl
+ <<: *job-linux-16c
master:
name: master
diff --git a/src/ci/github-actions/problem_matchers.json b/src/ci/github-actions/problem_matchers.json
new file mode 100644
index 000000000..37561924b
--- /dev/null
+++ b/src/ci/github-actions/problem_matchers.json
@@ -0,0 +1,15 @@
+{
+ "problemMatcher": [
+ {
+ "owner": "tidy-error-file-line",
+ "pattern": [
+ {
+ "regexp": "^tidy error: /checkout/(.+):(\\d+): (.+)$",
+ "file": 1,
+ "line": 2,
+ "message": 3
+ }
+ ]
+ }
+ ]
+}
diff --git a/src/ci/run.sh b/src/ci/run.sh
index efeb850cd..3056d9fc0 100755
--- a/src/ci/run.sh
+++ b/src/ci/run.sh
@@ -58,6 +58,12 @@ RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --disable-manage-submodules"
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-locked-deps"
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-cargo-native-static"
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set rust.codegen-units-std=1"
+# rust-lang/promote-release will recompress CI artifacts, and while we care
+# about the per-commit artifact sizes, it's not as important that they're
+# highly compressed as it is that the process is fast. Best compression
+# generally implies single-threaded compression which results in wasting most
+# of our CPU resources.
+RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set dist.compression-profile=balanced"
# When building for mingw, limit the number of parallel linker jobs during
# the LLVM build, as not to run out of memory.
diff --git a/src/ci/scripts/collect-cpu-stats.sh b/src/ci/scripts/collect-cpu-stats.sh
index 853b4628f..44875b54d 100755
--- a/src/ci/scripts/collect-cpu-stats.sh
+++ b/src/ci/scripts/collect-cpu-stats.sh
@@ -6,4 +6,5 @@
set -euo pipefail
IFS=$'\n\t'
-python3 src/ci/cpu-usage-over-time.py &> cpu-usage.csv &
+mkdir -p build
+python3 src/ci/cpu-usage-over-time.py &> build/cpu-usage.csv &
diff --git a/src/ci/scripts/install-awscli.sh b/src/ci/scripts/install-awscli.sh
index 3d8f0de7a..aa62407ea 100755
--- a/src/ci/scripts/install-awscli.sh
+++ b/src/ci/scripts/install-awscli.sh
@@ -10,15 +10,14 @@
#
# Before compressing please make sure all the wheels end with `-none-any.whl`.
# If that's not the case you'll need to remove the non-cross-platform ones and
-# replace them with the .tar.gz downloaded from https://pypi.org. Also make
-# sure it's possible to call this script with both Python 2 and Python 3.
+# replace them with the .tar.gz downloaded from https://pypi.org.
set -euo pipefail
IFS=$'\n\t'
source "$(cd "$(dirname "$0")" && pwd)/../shared.sh"
-MIRROR="${MIRRORS_BASE}/2019-07-27-awscli.tar"
+MIRROR="${MIRRORS_BASE}/2023-04-28-awscli.tar"
DEPS_DIR="/tmp/awscli-deps"
pip="pip"
@@ -29,6 +28,8 @@ if isLinux; then
sudo apt-get install -y python3-setuptools python3-wheel
ciCommandAddPath "${HOME}/.local/bin"
+elif isMacOS; then
+ pip="pip3"
fi
mkdir -p "${DEPS_DIR}"
diff --git a/src/ci/scripts/run-build-from-ci.sh b/src/ci/scripts/run-build-from-ci.sh
index c02117f45..55e75800d 100755
--- a/src/ci/scripts/run-build-from-ci.sh
+++ b/src/ci/scripts/run-build-from-ci.sh
@@ -10,6 +10,8 @@ source "$(cd "$(dirname "$0")" && pwd)/../shared.sh"
export CI="true"
export SRC=.
+echo "::add-matcher::src/ci/github-actions/problem_matchers.json"
+
# Remove any preexisting rustup installation since it can interfere
# with the cargotest step and its auto-detection of things like Clippy in
# the environment
diff --git a/src/ci/scripts/upload-artifacts.sh b/src/ci/scripts/upload-artifacts.sh
index ffa1859fc..9755edb6d 100755
--- a/src/ci/scripts/upload-artifacts.sh
+++ b/src/ci/scripts/upload-artifacts.sh
@@ -23,7 +23,7 @@ if [[ "${DEPLOY-0}" -eq "1" ]] || [[ "${DEPLOY_ALT-0}" -eq "1" ]]; then
fi
# CPU usage statistics.
-cp cpu-usage.csv "${upload_dir}/cpu-${CI_JOB_NAME}.csv"
+cp build/cpu-usage.csv "${upload_dir}/cpu-${CI_JOB_NAME}.csv"
# Build metrics generated by x.py.
cp "${build_dir}/metrics.json" "${upload_dir}/metrics-${CI_JOB_NAME}.json"
diff --git a/src/ci/stage-build.py b/src/ci/stage-build.py
index bd8fd524a..7cd5e88f6 100644
--- a/src/ci/stage-build.py
+++ b/src/ci/stage-build.py
@@ -175,8 +175,8 @@ class WindowsPipeline(Pipeline):
return super().rustc_stage_2().with_suffix(".exe")
def build_rustc_perf(self):
- # rustc-perf version from 2022-07-22
- perf_commit = "3c253134664fdcba862c539d37f0de18557a9a4c"
+ # rustc-perf version from 2023-03-15
+ perf_commit = "9dfaa35193154b690922347ee1141a06ec87a199"
rustc_perf_zip_path = self.opt_artifacts() / "perf.zip"
def download_rustc_perf():
@@ -727,7 +727,7 @@ def record_metrics(pipeline: Pipeline, timer: Timer):
metrics = load_last_metrics(pipeline.metrics_path())
if metrics is None:
return
- llvm_steps = tuple(metrics.find_all_by_type("bootstrap::native::Llvm"))
+ llvm_steps = tuple(metrics.find_all_by_type("bootstrap::llvm::Llvm"))
assert len(llvm_steps) > 0
llvm_duration = sum(step.duration for step in llvm_steps)
@@ -798,14 +798,16 @@ def execute_build_pipeline(timer: Timer, pipeline: Pipeline, final_build_args: L
"--llvm-profile-use",
pipeline.llvm_profile_merged_file(),
"--llvm-bolt-profile-generate",
+ "--rust-profile-use",
+ pipeline.rustc_profile_merged_file()
])
record_metrics(pipeline, rustc_build)
with stage3.section("Gather profiles"):
gather_llvm_bolt_profiles(pipeline)
+ # LLVM is not being cleared here, we want to reuse the previous build
print_free_disk_space(pipeline)
- clear_llvm_files(pipeline)
final_build_args += [
"--llvm-bolt-profile-use",
pipeline.llvm_bolt_profile_merged_file()
diff --git a/src/doc/book/COPYRIGHT b/src/doc/book/COPYRIGHT
index dfe614df9..0fc3ea43f 100644
--- a/src/doc/book/COPYRIGHT
+++ b/src/doc/book/COPYRIGHT
@@ -1,290 +1,3 @@
-Short version for non-lawyers:
-
-The Rust Project is dual-licensed under Apache 2.0 and MIT
-terms.
-
-
-Longer version:
-
-The Rust Project is copyright 2010, The Rust Project
-Developers.
-
-Licensed under the Apache License, Version 2.0
-<LICENSE-APACHE or
-https://www.apache.org/licenses/LICENSE-2.0> or the MIT
-license <LICENSE-MIT or https://opensource.org/licenses/MIT>,
-at your option. All files in the project carrying such
-notice may not be copied, modified, or distributed except
-according to those terms.
-
-
-The Rust Project includes packages written by third parties.
-The following third party packages are included, and carry
-their own copyright notices and license terms:
-
-* The src/rt/miniz.c file, carrying an implementation of
- RFC1950/RFC1951 DEFLATE, by Rich Geldreich
- <richgel99@gmail.com>. All uses of this file are
- permitted by the embedded "unlicense" notice
- (effectively: public domain with warranty disclaimer).
-
-* LLVM. Code for this package is found in src/llvm.
-
- Copyright (c) 2003-2013 University of Illinois at
- Urbana-Champaign. All rights reserved.
-
- Developed by:
-
- LLVM Team
-
- University of Illinois at Urbana-Champaign
-
- https://llvm.org
-
- Permission is hereby granted, free of charge, to any
- person obtaining a copy of this software and associated
- documentation files (the "Software"), to deal with the
- Software without restriction, including without
- limitation the rights to use, copy, modify, merge,
- publish, distribute, sublicense, and/or sell copies of
- the Software, and to permit persons to whom the Software
- is furnished to do so, subject to the following
- conditions:
-
- * Redistributions of source code must retain the
- above copyright notice, this list of conditions
- and the following disclaimers.
-
- * Redistributions in binary form must reproduce the
- above copyright notice, this list of conditions
- and the following disclaimers in the documentation
- and/or other materials provided with the
- distribution.
-
- * Neither the names of the LLVM Team, University of
- Illinois at Urbana-Champaign, nor the names of its
- contributors may be used to endorse or promote
- products derived from this Software without
- specific prior written permission.
-
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
- ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
- TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
- PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
- SHALL THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE
- FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
- ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
- OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- OTHER DEALINGS WITH THE SOFTWARE.
-
-* Additional libraries included in LLVM carry separate
- BSD-compatible licenses. See src/llvm/LICENSE.txt for
- details.
-
-* compiler-rt, in src/compiler-rt is dual licensed under
- LLVM's license and MIT:
-
- Copyright (c) 2009-2014 by the contributors listed in
- CREDITS.TXT
-
- All rights reserved.
-
- Developed by:
-
- LLVM Team
-
- University of Illinois at Urbana-Champaign
-
- https://llvm.org
-
- Permission is hereby granted, free of charge, to any
- person obtaining a copy of this software and associated
- documentation files (the "Software"), to deal with the
- Software without restriction, including without
- limitation the rights to use, copy, modify, merge,
- publish, distribute, sublicense, and/or sell copies of
- the Software, and to permit persons to whom the Software
- is furnished to do so, subject to the following
- conditions:
-
- * Redistributions of source code must retain the
- above copyright notice, this list of conditions
- and the following disclaimers.
-
- * Redistributions in binary form must reproduce the
- above copyright notice, this list of conditions
- and the following disclaimers in the documentation
- and/or other materials provided with the
- distribution.
-
- * Neither the names of the LLVM Team, University of
- Illinois at Urbana-Champaign, nor the names of its
- contributors may be used to endorse or promote
- products derived from this Software without
- specific prior written permission.
-
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
- ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
- TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
- PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
- SHALL THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE
- FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
- ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
- OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- OTHER DEALINGS WITH THE SOFTWARE.
-
- ========================================================
-
- Copyright (c) 2009-2014 by the contributors listed in
- CREDITS.TXT
-
- Permission is hereby granted, free of charge, to any
- person obtaining a copy of this software and associated
- documentation files (the "Software"), to deal in the
- Software without restriction, including without
- limitation the rights to use, copy, modify, merge,
- publish, distribute, sublicense, and/or sell copies of
- the Software, and to permit persons to whom the Software
- is furnished to do so, subject to the following
- conditions:
-
- The above copyright notice and this permission notice
- shall be included in all copies or substantial portions
- of the Software.
-
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
- ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
- TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
- PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
- SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
- CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
- OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
- IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
- DEALINGS IN THE SOFTWARE.
-
-* Portions of the FFI code for interacting with the native ABI
- is derived from the Clay programming language, which carries
- the following license.
-
- Copyright (C) 2008-2010 Tachyon Technologies.
- All rights reserved.
-
- Redistribution and use in source and binary forms, with
- or without modification, are permitted provided that the
- following conditions are met:
-
- 1. Redistributions of source code must retain the above
- copyright notice, this list of conditions and the
- following disclaimer.
-
- 2. Redistributions in binary form must reproduce the
- above copyright notice, this list of conditions and
- the following disclaimer in the documentation and/or
- other materials provided with the distribution.
-
- THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR
- IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
- DEVELOPERS AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
- INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
- CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
- CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
- NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
- USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
- OF SUCH DAMAGE.
-
-* libbacktrace, under src/libbacktrace:
-
- Copyright (C) 2012-2014 Free Software Foundation, Inc.
- Written by Ian Lance Taylor, Google.
-
- Redistribution and use in source and binary forms, with
- or without modification, are permitted provided that the
- following conditions are met:
-
- (1) Redistributions of source code must retain the
- above copyright notice, this list of conditions and
- the following disclaimer.
-
- (2) Redistributions in binary form must reproduce
- the above copyright notice, this list of conditions
- and the following disclaimer in the documentation
- and/or other materials provided with the
- distribution.
-
- (3) The name of the author may not be used to
- endorse or promote products derived from this
- software without specific prior written permission.
-
- THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND
- ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
- AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
- NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
- INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
- CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
- CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
- NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
- USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
- OF SUCH DAMAGE. */
-
-* jemalloc, under src/jemalloc:
-
- Copyright (C) 2002-2014 Jason Evans
- <jasone@canonware.com>. All rights reserved.
- Copyright (C) 2007-2012 Mozilla Foundation.
- All rights reserved.
- Copyright (C) 2009-2014 Facebook, Inc.
- All rights reserved.
-
- Redistribution and use in source and binary forms, with or without
- modification, are permitted provided that the following conditions are met:
- 1. Redistributions of source code must retain the above copyright notice(s),
- this list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice(s),
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
-
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S)
- ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,
- INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S)
- BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
- HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
- IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
- NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
- USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
- OF SUCH DAMAGE.
-
-* Additional copyright may be retained by contributors other
- than Mozilla, the Rust Project Developers, or the parties
- enumerated in this file. Such copyright can be determined
- on a case-by-case basis by examining the author of each
- portion of a file in the revision-control commit records
- of the project, or by consulting representative comments
- claiming copyright ownership for a file.
-
- For example, the text:
-
- "Copyright (c) 2011 Google Inc."
-
- appears in some files, and these files thereby denote
- that their author and copyright-holder is Google Inc.
-
- In all such cases, the absence of explicit licensing text
- indicates that the contributor chose to license their work
- for distribution under identical terms to those Mozilla
- has chosen for the collective work, enumerated at the top
- of this file. The only difference is the retention of
- copyright itself, held by the contributor.
+This repository is licensed under the Apache License, Version 2.0
+<LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0> or the MIT
+license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option.
diff --git a/src/doc/book/src/ch15-06-reference-cycles.md b/src/doc/book/src/ch15-06-reference-cycles.md
index bef289202..beb2bc216 100644
--- a/src/doc/book/src/ch15-06-reference-cycles.md
+++ b/src/doc/book/src/ch15-06-reference-cycles.md
@@ -84,7 +84,7 @@ If you uncomment the last `println!` and run the program, Rust will try to
print this cycle with `a` pointing to `b` pointing to `a` and so forth until it
overflows the stack.
-Compared to a real-world program, the consequences creating a reference cycle
+Compared to a real-world program, the consequences of creating a reference cycle
in this example aren’t very dire: right after we create the reference cycle,
the program ends. However, if a more complex program allocated lots of memory
in a cycle and held onto it for a long time, the program would use more memory
diff --git a/src/doc/footer.inc b/src/doc/footer.inc
index 77e151235..504fe5115 100644
--- a/src/doc/footer.inc
+++ b/src/doc/footer.inc
@@ -1,3 +1,4 @@
+<!-- REUSE-IgnoreStart -->
<footer><p>
Copyright &copy; 2011 The Rust Project Developers. Licensed under the
<a href="http://www.apache.org/licenses/LICENSE-2.0">Apache License, Version 2.0</a>
@@ -5,3 +6,4 @@ or the <a href="https://opensource.org/licenses/MIT">MIT license</a>, at your op
</p><p>
This file may not be copied, modified, or distributed except according to those terms.
</p></footer>
+<!-- REUSE-IgnoreEnd -->
diff --git a/src/doc/nomicon/src/ffi.md b/src/doc/nomicon/src/ffi.md
index 684e7125b..55be225de 100644
--- a/src/doc/nomicon/src/ffi.md
+++ b/src/doc/nomicon/src/ffi.md
@@ -258,7 +258,7 @@ pub extern "C" fn hello_from_rust() {
# fn main() {}
```
-The `extern "C"` makes this function adhere to the C calling convention, as discussed above in "[Foreign Calling Conventions]".
+The `extern "C"` makes this function adhere to the C calling convention, as discussed below in "[Foreign Calling Conventions]".
The `no_mangle` attribute turns off Rust's name mangling, so that it has a well defined symbol to link to.
Then, to compile Rust code as a shared library that can be called from C, add the following to your `Cargo.toml`:
diff --git a/src/doc/nomicon/src/subtyping.md b/src/doc/nomicon/src/subtyping.md
index cc48a5970..79b29beb0 100644
--- a/src/doc/nomicon/src/subtyping.md
+++ b/src/doc/nomicon/src/subtyping.md
@@ -335,7 +335,7 @@ we inherited invariance as soon as we put our reference inside an `&mut T`.
As it turns out, the argument for why it's ok for Box (and Vec, Hashmap, etc.) to
be covariant is pretty similar to the argument for why it's ok for
-lifetimes to be covariant: as soon as you try to stuff them in something like a
+references to be covariant: as soon as you try to stuff them in something like a
mutable reference, they inherit invariance and you're prevented from doing anything
bad.
diff --git a/src/doc/reference/src/attributes/codegen.md b/src/doc/reference/src/attributes/codegen.md
index 69ad341d1..ab59cd8e7 100644
--- a/src/doc/reference/src/attributes/codegen.md
+++ b/src/doc/reference/src/attributes/codegen.md
@@ -88,9 +88,11 @@ Feature | Implicitly Enables | Description
`avx2` | `avx` | [AVX2] — Advanced Vector Extensions 2
`bmi1` | | [BMI1] — Bit Manipulation Instruction Sets
`bmi2` | | [BMI2] — Bit Manipulation Instruction Sets 2
+`cmpxchg16b`| | [`cmpxchg16b`] - Compares and exchange 16 bytes (128 bits) of data atomically
`fma` | `avx` | [FMA3] — Three-operand fused multiply-add
`fxsr` | | [`fxsave`] and [`fxrstor`] — Save and restore x87 FPU, MMX Technology, and SSE State
`lzcnt` | | [`lzcnt`] — Leading zeros count
+`movbe` | | [`movbe`] - Move data after swapping bytes
`pclmulqdq` | `sse2` | [`pclmulqdq`] — Packed carry-less multiplication quadword
`popcnt` | | [`popcnt`] — Count of bits set to 1
`rdrand` | | [`rdrand`] — Read random number
@@ -115,10 +117,12 @@ Feature | Implicitly Enables | Description
[AVX2]: https://en.wikipedia.org/wiki/Advanced_Vector_Extensions#AVX2
[BMI1]: https://en.wikipedia.org/wiki/Bit_Manipulation_Instruction_Sets
[BMI2]: https://en.wikipedia.org/wiki/Bit_Manipulation_Instruction_Sets#BMI2
+[`cmpxchg16b`]: https://www.felixcloutier.com/x86/cmpxchg8b:cmpxchg16b
[FMA3]: https://en.wikipedia.org/wiki/FMA_instruction_set
[`fxsave`]: https://www.felixcloutier.com/x86/fxsave
[`fxrstor`]: https://www.felixcloutier.com/x86/fxrstor
[`lzcnt`]: https://www.felixcloutier.com/x86/lzcnt
+[`movbe`]: https://www.felixcloutier.com/x86/movbe
[`pclmulqdq`]: https://www.felixcloutier.com/x86/pclmulqdq
[`popcnt`]: https://www.felixcloutier.com/x86/popcnt
[`rdrand`]: https://en.wikipedia.org/wiki/RdRand
diff --git a/src/doc/reference/src/attributes/diagnostics.md b/src/doc/reference/src/attributes/diagnostics.md
index 45f9cc440..506e2848b 100644
--- a/src/doc/reference/src/attributes/diagnostics.md
+++ b/src/doc/reference/src/attributes/diagnostics.md
@@ -49,7 +49,7 @@ check on and off:
```rust
#[warn(missing_docs)]
-pub mod m2{
+pub mod m2 {
#[allow(missing_docs)]
pub mod nested {
// Missing documentation is ignored here
diff --git a/src/doc/reference/src/expressions/loop-expr.md b/src/doc/reference/src/expressions/loop-expr.md
index 204207ee0..c8b93ea39 100644
--- a/src/doc/reference/src/expressions/loop-expr.md
+++ b/src/doc/reference/src/expressions/loop-expr.md
@@ -249,8 +249,27 @@ A `break` expression is only permitted in the body of a loop, and has one of the
> &nbsp;&nbsp; [_BlockExpression_]
Labelled block expressions are exactly like block expressions, except that they allow using `break` expressions within the block.
-Unlike other loops, `break` expressions within a label expression *must* have a label (i.e. the label is not optional).
-Unlike other loops, labelled block expressions *must* begin with a label.
+Unlike loops, `break` expressions within a labelled block expression *must* have a label (i.e. the label is not optional).
+Similarly, labelled block expressions *must* begin with a label.
+
+```rust
+# fn do_thing() {}
+# fn condition_not_met() -> bool { true }
+# fn do_next_thing() {}
+# fn do_last_thing() {}
+let result = 'block: {
+ do_thing();
+ if condition_not_met() {
+ break 'block 1;
+ }
+ do_next_thing();
+ if condition_not_met() {
+ break 'block 2;
+ }
+ do_last_thing();
+ 3
+};
+```
## `continue` expressions
diff --git a/src/doc/reference/src/expressions/struct-expr.md b/src/doc/reference/src/expressions/struct-expr.md
index 8caeff200..8d9154789 100644
--- a/src/doc/reference/src/expressions/struct-expr.md
+++ b/src/doc/reference/src/expressions/struct-expr.md
@@ -73,7 +73,7 @@ drop(y_ref);
```
Struct expressions with curly braces can't be used directly in a [loop] or [if] expression's head, or in the [scrutinee] of an [if let] or [match] expression.
-However, struct expressions can be in used in these situations if they are within another expression, for example inside [parentheses].
+However, struct expressions can be used in these situations if they are within another expression, for example inside [parentheses].
The field names can be decimal integer values to specify indices for constructing tuple structs.
This can be used with base structs to fill out the remaining indices not specified:
diff --git a/src/doc/reference/src/inline-assembly.md b/src/doc/reference/src/inline-assembly.md
index 996b157da..a12f495ff 100644
--- a/src/doc/reference/src/inline-assembly.md
+++ b/src/doc/reference/src/inline-assembly.md
@@ -43,16 +43,15 @@ format_string := STRING_LITERAL / RAW_STRING_LITERAL
dir_spec := "in" / "out" / "lateout" / "inout" / "inlateout"
reg_spec := <register class> / "\"" <explicit register> "\""
operand_expr := expr / "_" / expr "=>" expr / expr "=>" "_"
-reg_operand := dir_spec "(" reg_spec ")" operand_expr
-operand := reg_operand
+reg_operand := [ident "="] dir_spec "(" reg_spec ")" operand_expr
clobber_abi := "clobber_abi(" <abi> *("," <abi>) [","] ")"
option := "pure" / "nomem" / "readonly" / "preserves_flags" / "noreturn" / "nostack" / "att_syntax" / "raw"
options := "options(" option *("," option) [","] ")"
-asm := "asm!(" format_string *("," format_string) *("," [ident "="] operand) *("," clobber_abi) *("," options) [","] ")"
-global_asm := "global_asm!(" format_string *("," format_string) *("," [ident "="] operand) *("," options) [","] ")"
+operand := reg_operand / clobber_abi / options
+asm := "asm!(" format_string *("," format_string) *("," operand) [","] ")"
+global_asm := "global_asm!(" format_string *("," format_string) *("," operand) [","] ")"
```
-
## Scope
Inline assembly can be used in one of two ways.
@@ -74,8 +73,7 @@ An `asm!` invocation may have one or more template string arguments; an `asm!` w
The expected usage is for each template string argument to correspond to a line of assembly code.
All template string arguments must appear before any other arguments.
-As with format strings, named arguments must appear after positional arguments.
-Explicit [register operands](#register-operands) must appear at the end of the operand list, after named arguments if any.
+As with format strings, positional arguments must appear before named arguments and explicit [register operands](#register-operands).
Explicit register operands cannot be used by placeholders in the template string.
All other named and positional operands must appear at least once in the template string, otherwise a compiler error is generated.
@@ -486,6 +484,29 @@ To avoid undefined behavior, these rules must be followed when using function-sc
> **Note**: As a general rule, the flags covered by `preserves_flags` are those which are *not* preserved when performing a function call.
+### Correctness and Validity
+
+In addition to all of the previous rules, the string argument to `asm!` must ultimately become—
+after all other arguments are evaluated, formatting is performed, and operands are translated—
+assembly that is both syntactically correct and semantically valid for the target architecture.
+The formatting rules allow the compiler to generate assembly with correct syntax.
+Rules concerning operands permit valid translation of Rust operands into and out of `asm!`.
+Adherence to these rules is necessary, but not sufficient, for the final expanded assembly to be
+both correct and valid. For instance:
+
+- arguments may be placed in positions which are syntactically incorrect after formatting
+- an instruction may be correctly written, but given architecturally invalid operands
+- an architecturally unspecified instruction may be assembled into unspecified code
+- a set of instructions, each correct and valid, may cause undefined behavior if placed in immediate succession
+
+As a result, these rules are _non-exhaustive_. The compiler is not required to check the
+correctness and validity of the initial string nor the final assembly that is generated.
+The assembler may check for correctness and validity but is not required to do so.
+When using `asm!`, a typographical error may be sufficient to make a program unsound,
+and the rules for assembly may include thousands of pages of architectural reference manuals.
+Programmers should exercise appropriate care, as invoking this `unsafe` capability comes with
+assuming the responsibility of not violating rules of both the compiler or the architecture.
+
### Directives Support
Inline assembly supports a subset of the directives supported by both GNU AS and LLVM's internal assembler, given as follows.
@@ -499,12 +520,9 @@ The following directives are guaranteed to be supported by the assembler:
- `.4byte`
- `.8byte`
- `.align`
+- `.alt_entry`
- `.ascii`
- `.asciz`
-- `.alt_entry`
-- `.balign`
-- `.balignl`
-- `.balignw`
- `.balign`
- `.balignl`
- `.balignw`
@@ -520,17 +538,17 @@ The following directives are guaranteed to be supported by the assembler:
- `.eqv`
- `.fill`
- `.float`
-- `.globl`
- `.global`
-- `.lcomm`
+- `.globl`
- `.inst`
+- `.lcomm`
- `.long`
- `.octa`
- `.option`
-- `.private_extern`
- `.p2align`
-- `.pushsection`
- `.popsection`
+- `.private_extern`
+- `.pushsection`
- `.quad`
- `.scl`
- `.section`
diff --git a/src/doc/reference/src/items/constant-items.md b/src/doc/reference/src/items/constant-items.md
index bf315932f..85d3e015d 100644
--- a/src/doc/reference/src/items/constant-items.md
+++ b/src/doc/reference/src/items/constant-items.md
@@ -89,6 +89,22 @@ m!(const _: () = (););
// const _: () = ();
```
+## Evaluation
+
+[Free][free] constants are always [evaluated][const_eval] at compile-time to surface
+panics. This happens even within an unused function:
+
+```rust,compile_fail
+// Compile-time panic
+const PANIC: () = std::unimplemented!();
+
+fn unused_generic_function<T>() {
+ // A failing compile-time assertion
+ const _: () = assert!(usize::BITS == 0);
+}
+```
+
+[const_eval]: ../const_eval.md
[associated constant]: ../items/associated-items.md#associated-constants
[constant value]: ../const_eval.md#constant-expressions
[free]: ../glossary.md#free-item
diff --git a/src/doc/reference/src/items/unions.md b/src/doc/reference/src/items/unions.md
index 325b22717..3c6c83d50 100644
--- a/src/doc/reference/src/items/unions.md
+++ b/src/doc/reference/src/items/unions.md
@@ -55,13 +55,13 @@ let f = unsafe { u.f1 };
## Reading and writing union fields
Unions have no notion of an "active field". Instead, every union access just
-interprets the storage at the type of the field used for the access. Reading a
+interprets the storage as the type of the field used for the access. Reading a
union field reads the bits of the union at the field's type. Fields might have a
non-zero offset (except when [the C representation] is used); in that case the
bits starting at the offset of the fields are read. It is the programmer's
responsibility to make sure that the data is valid at the field's type. Failing
to do so results in [undefined behavior]. For example, reading the value `3`
-through of a field of the [boolean type] is undefined behavior. Effectively,
+from a field of the [boolean type] is undefined behavior. Effectively,
writing to and then reading from a union with [the C representation] is
analogous to a [`transmute`] from the type used for writing to the type used for
reading.
diff --git a/src/doc/reference/src/names/namespaces.md b/src/doc/reference/src/names/namespaces.md
index 14811697c..bb4409b73 100644
--- a/src/doc/reference/src/names/namespaces.md
+++ b/src/doc/reference/src/names/namespaces.md
@@ -52,6 +52,7 @@ The following is a list of namespaces, with their corresponding entities:
* [Generic lifetime parameters]
* Label Namespace
* [Loop labels]
+ * [Block labels]
An example of how overlapping names in different namespaces can be used unambiguously:
@@ -132,6 +133,7 @@ It is still an error for a [`use` import] to shadow another macro, regardless of
[Attribute macros]: ../procedural-macros.md#attribute-macros
[attributes]: ../attributes.md
[bang-style macros]: ../macros.md
+[Block labels]: ../expressions/loop-expr.md#labelled-block-expressions
[boolean]: ../types/boolean.md
[Built-in attributes]: ../attributes.md#built-in-attributes-index
[closure parameters]: ../expressions/closure-expr.md
diff --git a/src/doc/rust-by-example/src/crates/lib.md b/src/doc/rust-by-example/src/crates/lib.md
index 44593f3bb..729ccb890 100644
--- a/src/doc/rust-by-example/src/crates/lib.md
+++ b/src/doc/rust-by-example/src/crates/lib.md
@@ -2,6 +2,8 @@
Let's create a library, and then see how to link it to another crate.
+In `rary.rs`:
+
```rust,ignore
pub fn public_function() {
println!("called rary's `public_function()`");
diff --git a/src/doc/rust-by-example/src/flow_control/match/guard.md b/src/doc/rust-by-example/src/flow_control/match/guard.md
index 63008a743..af81f64c9 100644
--- a/src/doc/rust-by-example/src/flow_control/match/guard.md
+++ b/src/doc/rust-by-example/src/flow_control/match/guard.md
@@ -3,6 +3,7 @@
A `match` *guard* can be added to filter the arm.
```rust,editable
+#[allow(dead_code)]
enum Temperature {
Celsius(i32),
Fahrenheit(i32),
diff --git a/src/doc/rust-by-example/src/hello/print.md b/src/doc/rust-by-example/src/hello/print.md
index 55f6ed520..d578337ad 100644
--- a/src/doc/rust-by-example/src/hello/print.md
+++ b/src/doc/rust-by-example/src/hello/print.md
@@ -39,7 +39,6 @@ fn main() {
println!("Base 16 (hexadecimal): {:x}", 69420); // 10f2c
println!("Base 16 (hexadecimal): {:X}", 69420); // 10F2C
-
// You can right-justify text with a specified width. This will
// output " 1". (Four white spaces and a "1", for a total width of 5.)
println!("{number:>5}", number=1);
@@ -51,7 +50,6 @@ fn main() {
// You can use named arguments in the format specifier by appending a `$`.
println!("{number:0>width$}", number=1, width=5);
-
// Rust even checks to make sure the correct number of arguments are used.
println!("My name is {0}, {1} {0}", "Bond");
// FIXME ^ Add the missing argument: "James"
diff --git a/src/doc/rust-by-example/src/hello/print/fmt.md b/src/doc/rust-by-example/src/hello/print/fmt.md
index 5332b4903..c3c78f6b1 100644
--- a/src/doc/rust-by-example/src/hello/print/fmt.md
+++ b/src/doc/rust-by-example/src/hello/print/fmt.md
@@ -49,17 +49,17 @@ fn main() {
City { name: "Dublin", lat: 53.347778, lon: -6.259722 },
City { name: "Oslo", lat: 59.95, lon: 10.75 },
City { name: "Vancouver", lat: 49.25, lon: -123.1 },
- ].iter() {
- println!("{}", *city);
+ ] {
+ println!("{}", city);
}
for color in [
Color { red: 128, green: 255, blue: 90 },
Color { red: 0, green: 3, blue: 254 },
Color { red: 0, green: 0, blue: 0 },
- ].iter() {
+ ] {
// Switch this to use {} once you've added an implementation
// for fmt::Display.
- println!("{:?}", *color);
+ println!("{:?}", color);
}
}
```
diff --git a/src/doc/rust-by-example/src/macros.md b/src/doc/rust-by-example/src/macros.md
index 3f12fcc41..f01cf8dc7 100644
--- a/src/doc/rust-by-example/src/macros.md
+++ b/src/doc/rust-by-example/src/macros.md
@@ -16,12 +16,12 @@ macro_rules! say_hello {
// `()` indicates that the macro takes no argument.
() => {
// The macro will expand into the contents of this block.
- println!("Hello!");
+ println!("Hello!")
};
}
fn main() {
- // This call will expand into `println!("Hello");`
+ // This call will expand into `println!("Hello")`
say_hello!()
}
```
diff --git a/src/doc/rust-by-example/src/primitives/array.md b/src/doc/rust-by-example/src/primitives/array.md
index 3811bb6d7..9cec24d69 100644
--- a/src/doc/rust-by-example/src/primitives/array.md
+++ b/src/doc/rust-by-example/src/primitives/array.md
@@ -63,7 +63,7 @@ fn main() {
}
}
- // Out of bound indexing causes runtime error.
+ // Out of bound indexing causes compile time error.
//println!("{}", xs[5]);
}
```
diff --git a/src/doc/rust-by-example/src/scope/lifetime.md b/src/doc/rust-by-example/src/scope/lifetime.md
index 01c4bf405..68b42d380 100644
--- a/src/doc/rust-by-example/src/scope/lifetime.md
+++ b/src/doc/rust-by-example/src/scope/lifetime.md
@@ -1,6 +1,6 @@
# Lifetimes
-A *lifetime* is a construct the compiler (or more specifically, its *borrow
+A *lifetime* is a construct of the compiler (or more specifically, its *borrow
checker*) uses to ensure all borrows are valid. Specifically, a variable's
lifetime begins when it is created and ends when it is destroyed. While
lifetimes and scopes are often referred to together, they are not the same.
diff --git a/src/doc/rust-by-example/src/scope/raii.md b/src/doc/rust-by-example/src/scope/raii.md
index 7b6bca618..6d94b0713 100644
--- a/src/doc/rust-by-example/src/scope/raii.md
+++ b/src/doc/rust-by-example/src/scope/raii.md
@@ -41,6 +41,8 @@ fn main() {
Of course, we can double check for memory errors using [`valgrind`][valgrind]:
+<!-- REUSE-IgnoreStart -->
+<!-- Prevent REUSE from parsing the copyright statement in the sample code -->
```shell
$ rustc raii.rs && valgrind ./raii
==26873== Memcheck, a memory error detector
@@ -58,6 +60,7 @@ $ rustc raii.rs && valgrind ./raii
==26873== For counts of detected and suppressed errors, rerun with: -v
==26873== ERROR SUMMARY: 0 errors from 0 contexts (suppressed: 2 from 2)
```
+<!-- REUSE-IgnoreEnd -->
No leaks here!
diff --git a/src/doc/rust-by-example/src/std/panic.md b/src/doc/rust-by-example/src/std/panic.md
index b22000494..d08d1f4e2 100644
--- a/src/doc/rust-by-example/src/std/panic.md
+++ b/src/doc/rust-by-example/src/std/panic.md
@@ -34,6 +34,8 @@ fn main() {
Let's check that `panic!` doesn't leak memory.
+<!-- REUSE-IgnoreStart -->
+<!-- Prevent REUSE from parsing the copyright statement in the sample code -->
```shell
$ rustc panic.rs && valgrind ./panic
==4401== Memcheck, a memory error detector
@@ -52,3 +54,4 @@ thread '<main>' panicked at 'division by zero', panic.rs:5
==4401== For counts of detected and suppressed errors, rerun with: -v
==4401== ERROR SUMMARY: 0 errors from 0 contexts (suppressed: 0 from 0)
```
+<!-- REUSE-IgnoreEnd -->
diff --git a/src/doc/rust-by-example/src/std_misc/file/read_lines.md b/src/doc/rust-by-example/src/std_misc/file/read_lines.md
index 641eb972a..216b0181c 100644
--- a/src/doc/rust-by-example/src/std_misc/file/read_lines.md
+++ b/src/doc/rust-by-example/src/std_misc/file/read_lines.md
@@ -1,44 +1,51 @@
# `read_lines`
-## Beginner friendly method
-This method is NOT efficient. It's here for beginners
-who can't understand the efficient method yet.
+## A naive approach
-```rust,no_run
-use std::fs::File;
-use std::io::{ self, BufRead, BufReader };
+This might be a reasonable first attempt for a beginner's first
+implementation for reading lines from a file.
-fn read_lines(filename: String) -> io::Lines<BufReader<File>> {
- // Open the file in read-only mode.
- let file = File::open(filename).unwrap();
- // Read the file line by line, and return an iterator of the lines of the file.
- return io::BufReader::new(file).lines();
-}
+```rust,norun
+use std::fs::read_to_string;
-fn main() {
- // Stores the iterator of lines of the file in lines variable.
- let lines = read_lines("./hosts".to_string());
- // Iterate over the lines of the file, and in this case print them.
- for line in lines {
- println!("{}", line.unwrap());
+fn read_lines(filename: &str) -> Vec<String> {
+ let mut result = Vec::new();
+
+ for line in read_to_string(filename).unwrap().lines() {
+ result.push(line.to_string())
}
+
+ result
}
```
-Running this program simply prints the lines individually.
-```shell
-$ echo -e "127.0.0.1\n192.168.0.1\n" > hosts
-$ rustc read_lines.rs && ./read_lines
-127.0.0.1
-192.168.0.1
+Since the method `lines()` returns an iterator over the lines in the file,
+we can also perform a map inline and collect the results, yielding a more
+concise and fluent expression.
+
+```rust,norun
+use std::fs::read_to_string;
+
+fn read_lines(filename: &str) -> Vec<String> {
+ read_to_string(filename)
+ .unwrap() // panic on possible file-reading errors
+ .lines() // split the string into an iterator of string slices
+ .map(String::from) // make each slice into a string
+ .collect() // gather them together into a vector
+}
```
-## Efficient method
-The method `lines()` returns an iterator over the lines
-of a file.
+Note that in both examples above, we must convert the `&str` reference
+returned from `lines()` to the owned type `String`, using `.to_string()`
+and `String::from` respectively.
-`File::open` expects a generic, `AsRef<Path>`. That's what
-`read_lines()` expects as input.
+## A more efficient approach
+
+Here we pass ownership of the open `File` to a `BufReader` struct. `BufReader` uses an internal
+buffer to reduce intermediate allocations.
+
+We also update `read_lines` to return an iterator instead of allocating new
+`String` objects in memory for each line.
```rust,no_run
use std::fs::File;
@@ -46,8 +53,8 @@ use std::io::{self, BufRead};
use std::path::Path;
fn main() {
- // File hosts must exist in current path before this produces output
- if let Ok(lines) = read_lines("./hosts") {
+ // File hosts.txt must exist in the current path
+ if let Ok(lines) = read_lines("./hosts.txt") {
// Consumes the iterator, returns an (Optional) String
for line in lines {
if let Ok(ip) = line {
@@ -68,11 +75,15 @@ where P: AsRef<Path>, {
Running this program simply prints the lines individually.
```shell
-$ echo -e "127.0.0.1\n192.168.0.1\n" > hosts
+$ echo -e "127.0.0.1\n192.168.0.1\n" > hosts.txt
$ rustc read_lines.rs && ./read_lines
127.0.0.1
192.168.0.1
```
-This process is more efficient than creating a `String` in memory
-especially working with larger files. \ No newline at end of file
+(Note that since `File::open` expects a generic `AsRef<Path>` as argument, we define our
+generic `read_lines()` method with the same generic constraint, using the `where` keyword.)
+
+This process is more efficient than creating a `String` in memory with all of the file's
+contents. This can especially cause performance issues when working with larger files.
+
diff --git a/src/doc/rust-by-example/src/unsafe/asm.md b/src/doc/rust-by-example/src/unsafe/asm.md
index 7ad6e0c5e..1a3fab904 100644
--- a/src/doc/rust-by-example/src/unsafe/asm.md
+++ b/src/doc/rust-by-example/src/unsafe/asm.md
@@ -18,11 +18,13 @@ Inline assembly is currently supported on the following architectures:
Let us start with the simplest possible example:
```rust
+# #[cfg(target_arch = "x86_64")] {
use std::arch::asm;
unsafe {
asm!("nop");
}
+# }
```
This will insert a NOP (no operation) instruction into the assembly generated by the compiler.
@@ -36,6 +38,7 @@ Now inserting an instruction that does nothing is rather boring. Let us do somet
actually acts on data:
```rust
+# #[cfg(target_arch = "x86_64")] {
use std::arch::asm;
let x: u64;
@@ -43,6 +46,7 @@ unsafe {
asm!("mov {}, 5", out(reg) x);
}
assert_eq!(x, 5);
+# }
```
This will write the value `5` into the `u64` variable `x`.
@@ -61,6 +65,7 @@ the template and will read the variable from there after the inline assembly fin
Let us see another example that also uses an input:
```rust
+# #[cfg(target_arch = "x86_64")] {
use std::arch::asm;
let i: u64 = 3;
@@ -74,6 +79,7 @@ unsafe {
);
}
assert_eq!(o, 8);
+# }
```
This will add `5` to the input in variable `i` and write the result to variable `o`.
@@ -97,6 +103,7 @@ readability, and allows reordering instructions without changing the argument or
We can further refine the above example to avoid the `mov` instruction:
```rust
+# #[cfg(target_arch = "x86_64")] {
use std::arch::asm;
let mut x: u64 = 3;
@@ -104,6 +111,7 @@ unsafe {
asm!("add {0}, 5", inout(reg) x);
}
assert_eq!(x, 8);
+# }
```
We can see that `inout` is used to specify an argument that is both input and output.
@@ -112,6 +120,7 @@ This is different from specifying an input and output separately in that it is g
It is also possible to specify different variables for the input and output parts of an `inout` operand:
```rust
+# #[cfg(target_arch = "x86_64")] {
use std::arch::asm;
let x: u64 = 3;
@@ -120,6 +129,7 @@ unsafe {
asm!("add {0}, 5", inout(reg) x => y);
}
assert_eq!(y, 8);
+# }
```
## Late output operands
@@ -135,6 +145,7 @@ There is also a `inlateout` variant of this specifier.
Here is an example where `inlateout` *cannot* be used in `release` mode or other optimized cases:
```rust
+# #[cfg(target_arch = "x86_64")] {
use std::arch::asm;
let mut a: u64 = 4;
@@ -150,6 +161,7 @@ unsafe {
);
}
assert_eq!(a, 12);
+# }
```
The above could work well in unoptimized cases (`Debug` mode), but if you want optimized performance (`release` mode or other optimized cases), it could not work.
@@ -158,6 +170,7 @@ That is because in optimized cases, the compiler is free to allocate the same re
However the following example can use `inlateout` since the output is only modified after all input registers have been read:
```rust
+# #[cfg(target_arch = "x86_64")] {
use std::arch::asm;
let mut a: u64 = 4;
@@ -166,6 +179,7 @@ unsafe {
asm!("add {0}, {1}", inlateout(reg) a, in(reg) b);
}
assert_eq!(a, 8);
+# }
```
As you can see, this assembly fragment will still work correctly if `a` and `b` are assigned to the same register.
@@ -177,12 +191,14 @@ Therefore, Rust inline assembly provides some more specific constraint specifier
While `reg` is generally available on any architecture, explicit registers are highly architecture specific. E.g. for x86 the general purpose registers `eax`, `ebx`, `ecx`, `edx`, `ebp`, `esi`, and `edi` among others can be addressed by their name.
```rust,no_run
+# #[cfg(target_arch = "x86_64")] {
use std::arch::asm;
let cmd = 0xd1;
unsafe {
asm!("out 0x64, eax", in("eax") cmd);
}
+# }
```
In this example we call the `out` instruction to output the content of the `cmd` variable to port `0x64`. Since the `out` instruction only accepts `eax` (and its sub registers) as operand we had to use the `eax` constraint specifier.
@@ -192,6 +208,7 @@ In this example we call the `out` instruction to output the content of the `cmd`
Consider this example which uses the x86 `mul` instruction:
```rust
+# #[cfg(target_arch = "x86_64")] {
use std::arch::asm;
fn mul(a: u64, b: u64) -> u128 {
@@ -211,6 +228,7 @@ fn mul(a: u64, b: u64) -> u128 {
((hi as u128) << 64) + lo as u128
}
+# }
```
This uses the `mul` instruction to multiply two 64-bit inputs with a 128-bit result.
@@ -229,6 +247,7 @@ We need to tell the compiler about this since it may need to save and restore th
```rust
use std::arch::asm;
+# #[cfg(target_arch = "x86_64")]
fn main() {
// three entries of four bytes each
let mut name_buf = [0_u8; 12];
@@ -262,6 +281,9 @@ fn main() {
let name = core::str::from_utf8(&name_buf).unwrap();
println!("CPU Manufacturer ID: {}", name);
}
+
+# #[cfg(not(target_arch = "x86_64"))]
+# fn main() {}
```
In the example above we use the `cpuid` instruction to read the CPU manufacturer ID.
@@ -269,13 +291,14 @@ This instruction writes to `eax` with the maximum supported `cpuid` argument and
Even though `eax` is never read we still need to tell the compiler that the register has been modified so that the compiler can save any values that were in these registers before the asm. This is done by declaring it as an output but with `_` instead of a variable name, which indicates that the output value is to be discarded.
-This code also works around the limitation that `ebx` is a reserved register by LLVM. That means that LLVM assumes that it has full control over the register and it must be restored to its original state before exiting the asm block, so it cannot be used as an input or output **except** if the compiler uses it to fulfill a general register class (e.g. `in(reg)`). This makes `reg` operands dangerous when using reserved registers as we could unknowingly corrupt out input or output because they share the same register.
+This code also works around the limitation that `ebx` is a reserved register by LLVM. That means that LLVM assumes that it has full control over the register and it must be restored to its original state before exiting the asm block, so it cannot be used as an input or output **except** if the compiler uses it to fulfill a general register class (e.g. `in(reg)`). This makes `reg` operands dangerous when using reserved registers as we could unknowingly corrupt our input or output because they share the same register.
-To work around this we use `rdi` to store the pointer to the output array, save `ebx` via `push`, read from `ebx` inside the asm block into the array and then restoring `ebx` to its original state via `pop`. The `push` and `pop` use the full 64-bit `rbx` version of the register to ensure that the entire register is saved. On 32 bit targets the code would instead use `ebx` in the `push`/`pop`.
+To work around this we use `rdi` to store the pointer to the output array, save `ebx` via `push`, read from `ebx` inside the asm block into the array and then restore `ebx` to its original state via `pop`. The `push` and `pop` use the full 64-bit `rbx` version of the register to ensure that the entire register is saved. On 32 bit targets the code would instead use `ebx` in the `push`/`pop`.
This can also be used with a general register class to obtain a scratch register for use inside the asm code:
```rust
+# #[cfg(target_arch = "x86_64")] {
use std::arch::asm;
// Multiply x by 6 using shifts and adds
@@ -291,6 +314,7 @@ unsafe {
);
}
assert_eq!(x, 4 * 6);
+# }
```
## Symbol operands and ABI clobbers
@@ -300,6 +324,7 @@ By default, `asm!` assumes that any register not specified as an output will hav
[`clobber_abi`]: ../../reference/inline-assembly.html#abi-clobbers
```rust
+# #[cfg(target_arch = "x86_64")] {
use std::arch::asm;
extern "C" fn foo(arg: i32) -> i32 {
@@ -325,6 +350,7 @@ fn call_foo(arg: i32) -> i32 {
result
}
}
+# }
```
## Register template modifiers
@@ -336,6 +362,7 @@ By default the compiler will always choose the name that refers to the full regi
This default can be overridden by using modifiers on the template string operands, just like you would with format strings:
```rust
+# #[cfg(target_arch = "x86_64")] {
use std::arch::asm;
let mut x: u16 = 0xab;
@@ -345,6 +372,7 @@ unsafe {
}
assert_eq!(x, 0xabab);
+# }
```
In this example, we use the `reg_abcd` register class to restrict the register allocator to the 4 legacy x86 registers (`ax`, `bx`, `cx`, `dx`) of which the first two bytes can be addressed independently.
@@ -361,6 +389,7 @@ You have to manually use the memory address syntax specified by the target archi
For example, on x86/x86_64 using Intel assembly syntax, you should wrap inputs/outputs in `[]` to indicate they are memory operands:
```rust
+# #[cfg(target_arch = "x86_64")] {
use std::arch::asm;
fn load_fpu_control_word(control: u16) {
@@ -368,6 +397,7 @@ fn load_fpu_control_word(control: u16) {
asm!("fldcw [{}]", in(reg) &control, options(nostack));
}
}
+# }
```
## Labels
@@ -383,6 +413,7 @@ As a consequence, you should only use GNU assembler **numeric** [local labels] i
Moreover, on x86 when using the default Intel syntax, due to [an LLVM bug], you shouldn't use labels exclusively made of `0` and `1` digits, e.g. `0`, `11` or `101010`, as they may end up being interpreted as binary values. Using `options(att_syntax)` will avoid any ambiguity, but that affects the syntax of the _entire_ `asm!` block. (See [Options](#options), below, for more on `options`.)
```rust
+# #[cfg(target_arch = "x86_64")] {
use std::arch::asm;
let mut a = 0;
@@ -400,6 +431,7 @@ unsafe {
);
}
assert_eq!(a, 5);
+# }
```
This will decrement the `{0}` register value from 10 to 3, then add 2 and store it in `a`.
@@ -419,6 +451,7 @@ By default, an inline assembly block is treated the same way as an external FFI
Let's take our previous example of an `add` instruction:
```rust
+# #[cfg(target_arch = "x86_64")] {
use std::arch::asm;
let mut a: u64 = 4;
@@ -431,6 +464,7 @@ unsafe {
);
}
assert_eq!(a, 8);
+# }
```
Options can be provided as an optional final argument to the `asm!` macro. We specified three options here:
diff --git a/src/doc/rust-by-example/src/variable_bindings/mut.md b/src/doc/rust-by-example/src/variable_bindings/mut.md
index 0925132f2..c75f034ec 100644
--- a/src/doc/rust-by-example/src/variable_bindings/mut.md
+++ b/src/doc/rust-by-example/src/variable_bindings/mut.md
@@ -15,9 +15,8 @@ fn main() {
println!("After mutation: {}", mutable_binding);
- // Error!
+ // Error! Cannot assign a new value to an immutable variable
_immutable_binding += 1;
- // FIXME ^ Comment out this line
}
```
diff --git a/src/doc/rustc-dev-guide/.github/workflows/ci.yml b/src/doc/rustc-dev-guide/.github/workflows/ci.yml
index 2346698d4..bb0493bf7 100644
--- a/src/doc/rustc-dev-guide/.github/workflows/ci.yml
+++ b/src/doc/rustc-dev-guide/.github/workflows/ci.yml
@@ -46,7 +46,7 @@ jobs:
if: github.event_name != 'push'
run: |
shopt -s globstar
- MAX_LINE_LENGTH=100 bash ci/check_line_lengths.sh src/**/*.md
+ MAX_LINE_LENGTH=100 bash ci/lengthcheck.sh src/**/*.md
- name: Install latest nightly Rust toolchain
if: steps.mdbook-cache.outputs.cache-hit != 'true'
diff --git a/src/doc/rustc-dev-guide/README.md b/src/doc/rustc-dev-guide/README.md
index e501c9161..fdf6c5050 100644
--- a/src/doc/rustc-dev-guide/README.md
+++ b/src/doc/rustc-dev-guide/README.md
@@ -49,10 +49,10 @@ To build a local static HTML site, install [`mdbook`](https://github.com/rust-la
and execute the following command in the root of the repository:
```
-> mdbook build
+> mdbook build --open
```
-The build files are found in the `book` directory.
+The build files are found in the `book/html` directory.
### Link Validations
@@ -67,20 +67,20 @@ including the `<!-- toc -->` marker at the place where you want the TOC.
### Pre-commit script
We also test that line lengths are less than 100 columns. To test this locally,
-you can run `ci/check_line_lengths.sh`.
+you can run `ci/lengthcheck.sh`.
You can also set this to run automatically.
On Linux:
```bash
-ln -s ../../ci/check_line_lengths.sh .git/hooks/pre-commit
+ln -s ../../ci/lengthcheck.sh .git/hooks/pre-commit
```
On Windows:
```powershell
-New-Item -Path .git/hooks/pre-commit -ItemType HardLink -Value <absolute_path/to/check_line_lengths.sh>
+New-Item -Path .git/hooks/pre-commit -ItemType HardLink -Value $(Resolve-Path ci/lengthcheck.sh)
```
## How to fix toolstate failures
diff --git a/src/doc/rustc-dev-guide/book.toml b/src/doc/rustc-dev-guide/book.toml
index a5d794b50..203bfd61e 100644
--- a/src/doc/rustc-dev-guide/book.toml
+++ b/src/doc/rustc-dev-guide/book.toml
@@ -43,10 +43,8 @@ exclude = [
cache-timeout = 86400
warning-policy = "error"
-[output.linkcheck.http-headers]
-'github\.com' = ["Authorization: Bearer $GITHUB_TOKEN"]
-
[output.html.redirect]
"/compiletest.html" = "tests/compiletest.html"
-"/diagnostics/sessiondiagnostic.html" = "diagnostics/diagnostic-structs.html"
+"/diagnostics/sessiondiagnostic.html" = "diagnostic-structs.html"
+"/diagnostics/diagnostic-codes.html" = "error-codes.html"
"/miri.html" = "const-eval/interpret.html"
diff --git a/src/doc/rustc-dev-guide/ci/check_line_lengths.sh b/src/doc/rustc-dev-guide/ci/lengthcheck.sh
index 31cda5c65..76d677be7 100755
--- a/src/doc/rustc-dev-guide/ci/check_line_lengths.sh
+++ b/src/doc/rustc-dev-guide/ci/lengthcheck.sh
@@ -1,5 +1,7 @@
#!/usr/bin/env bash
+# Check files for lines that are too long.
+
if [ "$1" == "--help" ]; then
echo 'Usage:' "[MAX_LINE_LENGTH=n] $0 [file ...]"
exit 1
@@ -10,8 +12,7 @@ if [ "$MAX_LINE_LENGTH" == "" ]; then
fi
if [ "$1" == "" ]; then
- shopt -s globstar
- files=( src/**/*.md )
+ files=( src/*.md src/*/*.md src/*/*/*.md )
else
files=( "$@" )
fi
@@ -22,7 +23,6 @@ echo "Offending files and lines:"
(( bad_lines = 0 ))
(( inside_block = 0 ))
for file in "${files[@]}"; do
- echo "$file"
(( line_no = 0 ))
while IFS="" read -r line || [[ -n "$line" ]] ; do
(( line_no++ ))
@@ -34,7 +34,7 @@ for file in "${files[@]}"; do
&& ! [[ "$line" =~ " | "|"-|-"|"://"|"]:"|\[\^[^\ ]+\]: ]] \
&& (( "${#line}" > $MAX_LINE_LENGTH )) ; then
(( bad_lines++ ))
- echo -e "\t$line_no : $line"
+ echo -e "\t$file:$line_no : $line"
fi
done < "$file"
done
diff --git a/src/doc/rustc-dev-guide/ci/linkcheck.sh b/src/doc/rustc-dev-guide/ci/linkcheck.sh
index 5d49d1337..133e22239 100755
--- a/src/doc/rustc-dev-guide/ci/linkcheck.sh
+++ b/src/doc/rustc-dev-guide/ci/linkcheck.sh
@@ -3,12 +3,16 @@
set -e
set -o pipefail
+set_github_token() {
+ jq '.config.output.linkcheck."http-headers"."github\\.com" = ["Authorization: Bearer $GITHUB_TOKEN"]'
+}
+
# https://docs.github.com/en/actions/reference/environment-variables
if [ "$GITHUB_EVENT_NAME" = "schedule" ] ; then # running in scheduled job
FLAGS=""
+ USE_TOKEN=1
echo "Doing full link check."
- set -x
elif [ "$GITHUB_EVENT_NAME" = "pull_request" ] ; then # running in PR CI build
if [ -z "$BASE_SHA" ]; then
echo "error: unexpected state: BASE_SHA must be non-empty in CI"
@@ -17,9 +21,9 @@ elif [ "$GITHUB_EVENT_NAME" = "pull_request" ] ; then # running in PR CI build
CHANGED_FILES=$(git diff --name-only $BASE_SHA... | tr '\n' ' ')
FLAGS="--no-cache -f $CHANGED_FILES"
+ USE_TOKEN=1
echo "Checking files changed since $BASE_SHA: $CHANGED_FILES"
- set -x
else # running locally
COMMIT_RANGE=master...
CHANGED_FILES=$(git diff --name-only $COMMIT_RANGE | tr '\n' ' ')
@@ -28,4 +32,10 @@ else # running locally
echo "Checking files changed in $COMMIT_RANGE: $CHANGED_FILES"
fi
-exec mdbook-linkcheck $FLAGS
+echo "exec mdbook-linkcheck $FLAGS"
+if [ "$USE_TOKEN" = 1 ]; then
+ config=$(set_github_token)
+ exec mdbook-linkcheck $FLAGS <<<"$config"
+else
+ exec mdbook-linkcheck $FLAGS
+fi
diff --git a/src/doc/rustc-dev-guide/examples/rustc-driver-example.rs b/src/doc/rustc-dev-guide/examples/rustc-driver-example.rs
index 9708ab01d..70e77fd5b 100644
--- a/src/doc/rustc-dev-guide/examples/rustc-driver-example.rs
+++ b/src/doc/rustc-dev-guide/examples/rustc-driver-example.rs
@@ -42,9 +42,10 @@ fn main() {
"#
.into(),
},
- output_dir: None, // Option<PathBuf>
- output_file: None, // Option<PathBuf>
- file_loader: None, // Option<Box<dyn FileLoader + Send + Sync>>
+ output_dir: None, // Option<PathBuf>
+ output_file: None, // Option<PathBuf>
+ file_loader: None, // Option<Box<dyn FileLoader + Send + Sync>>
+ locale_resources: rustc_driver::DEFAULT_LOCALE_RESOURCES,
lint_caps: FxHashMap::default(), // FxHashMap<lint::LintId, lint::Level>
// This is a callback from the driver that is called when [`ParseSess`] is created.
parse_sess_created: None, //Option<Box<dyn FnOnce(&mut ParseSess) + Send>>
diff --git a/src/doc/rustc-dev-guide/examples/rustc-driver-getting-diagnostics.rs b/src/doc/rustc-dev-guide/examples/rustc-driver-getting-diagnostics.rs
index 5bc2312a2..888674aaf 100644
--- a/src/doc/rustc-dev-guide/examples/rustc-driver-getting-diagnostics.rs
+++ b/src/doc/rustc-dev-guide/examples/rustc-driver-getting-diagnostics.rs
@@ -66,6 +66,7 @@ fn main() {
output_dir: None,
output_file: None,
file_loader: None,
+ locale_resources: rustc_driver::DEFAULT_LOCALE_RESOURCES,
lint_caps: rustc_hash::FxHashMap::default(),
parse_sess_created: None,
register_lints: None,
diff --git a/src/doc/rustc-dev-guide/examples/rustc-driver-interacting-with-the-ast.rs b/src/doc/rustc-dev-guide/examples/rustc-driver-interacting-with-the-ast.rs
index 53f8df81a..df0e0385d 100644
--- a/src/doc/rustc-dev-guide/examples/rustc-driver-interacting-with-the-ast.rs
+++ b/src/doc/rustc-dev-guide/examples/rustc-driver-interacting-with-the-ast.rs
@@ -44,6 +44,7 @@ fn main() {
output_dir: None,
output_file: None,
file_loader: None,
+ locale_resources: rustc_driver::DEFAULT_LOCALE_RESOURCES,
lint_caps: rustc_hash::FxHashMap::default(),
parse_sess_created: None,
register_lints: None,
diff --git a/src/doc/rustc-dev-guide/src/SUMMARY.md b/src/doc/rustc-dev-guide/src/SUMMARY.md
index adc397fd8..b01cb6797 100644
--- a/src/doc/rustc-dev-guide/src/SUMMARY.md
+++ b/src/doc/rustc-dev-guide/src/SUMMARY.md
@@ -1,8 +1,7 @@
# Summary
-[About this guide](./about-this-guide.md)
-
[Getting Started](./getting-started.md)
+[About this guide](./about-this-guide.md)
---
@@ -35,17 +34,19 @@
# Contributing to Rust
-- [Introduction](./contributing.md)
+- [Contribution Procedures](./contributing.md)
- [About the compiler team](./compiler-team.md)
- [Using Git](./git.md)
- [Mastering @rustbot](./rustbot.md)
- [Walkthrough: a typical contribution](./walkthrough.md)
-- [Bug Fix Procedure](./bug-fix-procedure.md)
+- [Procedures for Breaking Changes](./bug-fix-procedure.md)
- [Implementing new features](./implementing_new_features.md)
- [Stability attributes](./stability.md)
- [Stabilizing Features](./stabilization_guide.md)
- [Feature Gates](./feature-gates.md)
- [Coding conventions](./conventions.md)
+- [Using external repositories](./external-repos.md)
+- [Fuzzing](./fuzzing.md)
- [Notification groups](notification-groups/about.md)
- [ARM](notification-groups/arm.md)
- [Cleanup Crew](notification-groups/cleanup-crew.md)
@@ -146,7 +147,7 @@
- [Diagnostic and subdiagnostic structs](./diagnostics/diagnostic-structs.md)
- [Translation](./diagnostics/translation.md)
- [`LintStore`](./diagnostics/lintstore.md)
- - [Diagnostic codes](./diagnostics/diagnostic-codes.md)
+ - [Error codes](./diagnostics/error-codes.md)
- [Diagnostic items](./diagnostics/diagnostic-items.md)
- [`ErrorGuaranteed`](./diagnostics/error-guaranteed.md)
diff --git a/src/doc/rustc-dev-guide/src/about-this-guide.md b/src/doc/rustc-dev-guide/src/about-this-guide.md
index 71407854e..944ebf5a8 100644
--- a/src/doc/rustc-dev-guide/src/about-this-guide.md
+++ b/src/doc/rustc-dev-guide/src/about-this-guide.md
@@ -58,14 +58,51 @@ please see the corresponding [subsection on writing documentation in this guide]
You might also find the following sites useful:
-- [rustc API docs] -- rustdoc documentation for the compiler
+- This guide contains information about how various parts of the
+ compiler work and how to contribute to the compiler.
+- [rustc API docs] -- rustdoc documentation for the compiler, devtools, and internal tools
- [Forge] -- contains documentation about Rust infrastructure, team procedures, and more
- [compiler-team] -- the home-base for the Rust compiler team, with description
of the team procedures, active working groups, and the team calendar.
- [std-dev-guide] -- a similar guide for developing the standard library.
+- [The t-compiler zulip][z]
+- [The Forge](https://forge.rust-lang.org/) has more documentation about various procedures.
+- `#contribute` and `#wg-rustup` on [Discord](https://discord.gg/rust-lang).
+- The [Rust Internals forum][rif], a place to ask questions and
+ discuss Rust's internals
+- The [Rust reference][rr], even though it doesn't specifically talk about
+ Rust's internals, is a great resource nonetheless
+- Although out of date, [Tom Lee's great blog article][tlgba] is very helpful
+- [rustaceans.org][ro] is helpful, but mostly dedicated to IRC
+- The [Rust Compiler Testing Docs][rctd]
+- For [@bors], [this cheat sheet][cheatsheet] is helpful
+- Google is always helpful when programming.
+ You can [search all Rust documentation][gsearchdocs] (the standard library,
+ the compiler, the books, the references, and the guides) to quickly find
+ information about the language and compiler.
+- You can also use Rustdoc's built-in search feature to find documentation on
+ types and functions within the crates you're looking at. You can also search
+ by type signature! For example, searching for `* -> vec` should find all
+ functions that return a `Vec<T>`.
+ _Hint:_ Find more tips and keyboard shortcuts by typing `?` on any Rustdoc
+ page!
+
+[rustc dev guide]: about-this-guide.md
+[gsearchdocs]: https://www.google.com/search?q=site:doc.rust-lang.org+your+query+here
+[stddocs]: https://doc.rust-lang.org/std
+[rif]: http://internals.rust-lang.org
+[rr]: https://doc.rust-lang.org/book/README.html
+[rustforge]: https://forge.rust-lang.org/
+[tlgba]: https://tomlee.co/2014/04/a-more-detailed-tour-of-the-rust-compiler/
+[ro]: https://www.rustaceans.org/
+[rctd]: tests/intro.md
+[cheatsheet]: https://bors.rust-lang.org/
+[Miri]: https://github.com/rust-lang/miri
+[@bors]: https://github.com/bors
[GitHub repository]: https://github.com/rust-lang/rustc-dev-guide/
-[rustc API docs]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/
+[rustc API docs]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle
[Forge]: https://forge.rust-lang.org/
[compiler-team]: https://github.com/rust-lang/compiler-team/
[std-dev-guide]: https://std-dev-guide.rust-lang.org/
+[z]: https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler
diff --git a/src/doc/rustc-dev-guide/src/backend/libs-and-metadata.md b/src/doc/rustc-dev-guide/src/backend/libs-and-metadata.md
index 5e005c965..b92a40b8b 100644
--- a/src/doc/rustc-dev-guide/src/backend/libs-and-metadata.md
+++ b/src/doc/rustc-dev-guide/src/backend/libs-and-metadata.md
@@ -103,9 +103,8 @@ The hash includes a variety of elements:
* Hashes of the HIR nodes.
* All of the upstream crate hashes.
* All of the source filenames.
-* Hashes of certain command-line flags (like `-C metadata` via the [Crate
- Disambiguator](#crate-disambiguator), and all CLI options marked with
- `[TRACKED]`).
+* Hashes of certain command-line flags (like `-C metadata` via the [Stable
+ Crate Id](#stable-crate-id), and all CLI options marked with `[TRACKED]`).
See [`compute_hir_hash`] for where the hash is actually computed.
diff --git a/src/doc/rustc-dev-guide/src/backend/monomorph.md b/src/doc/rustc-dev-guide/src/backend/monomorph.md
index cbc56acfc..7726daf4f 100644
--- a/src/doc/rustc-dev-guide/src/backend/monomorph.md
+++ b/src/doc/rustc-dev-guide/src/backend/monomorph.md
@@ -99,7 +99,7 @@ are relatively rare in functions, but closures inherit the generic
parameters of their parent function and it is common for closures to not
use those inherited parameters. Without polymorphization, a copy of these
closures would be created for each copy of the parent function. By
-creating fewer copies, less LLVM IR is generated and needs processed.
+creating fewer copies, less LLVM IR is generated; therefore less needs to be processed.
`unused_generic_params` returns a `FiniteBitSet<u64>` where a bit is set if
the generic parameter of the corresponding index is unused. Any parameters
diff --git a/src/doc/rustc-dev-guide/src/bug-fix-procedure.md b/src/doc/rustc-dev-guide/src/bug-fix-procedure.md
index 2f5e24716..e69ce48f9 100644
--- a/src/doc/rustc-dev-guide/src/bug-fix-procedure.md
+++ b/src/doc/rustc-dev-guide/src/bug-fix-procedure.md
@@ -1,4 +1,4 @@
-# Rustc Bug Fix Procedure
+# Procedures for Breaking Changes
<!-- toc -->
@@ -169,6 +169,13 @@ there were no errors before.
#### Crater and crates.io
+[Crater] is a bot that will compile all crates.io crates and many
+public github repos with the compiler with your changes. A report will then be
+generated with crates that ceased to compile with or began to compile with your
+changes. Crater runs can take a few days to complete.
+
+[Crater]: ./tests/crater.md
+
We should always do a crater run to assess impact. It is polite and considerate
to at least notify the authors of affected crates the breaking change. If we can
submit PRs to fix the problem, so much the better.
diff --git a/src/doc/rustc-dev-guide/src/building/bootstrapping.md b/src/doc/rustc-dev-guide/src/building/bootstrapping.md
index fe34cb500..d53896f81 100644
--- a/src/doc/rustc-dev-guide/src/building/bootstrapping.md
+++ b/src/doc/rustc-dev-guide/src/building/bootstrapping.md
@@ -22,6 +22,13 @@ Note that this documentation mostly covers user-facing information. See
## Stages of bootstrapping
+### Overview
+
+- Stage 0: the pre-compiled compiler
+- Stage 1: from current code, by an earlier compiler
+- Stage 2: the truly current compiler
+- Stage 3: the same-result test
+
Compiling `rustc` is done in stages. Here's a diagram, adapted from Joshua Nelson's
[talk on bootstrapping][rustconf22-talk] at RustConf 2022, with detailed explanations below.
@@ -51,7 +58,7 @@ graph TD
classDef with-s1c fill: lightgreen;
```
-### Stage 0
+### Stage 0: the pre-compiled compiler
The stage0 compiler is usually the current _beta_ `rustc` compiler
and its associated dynamic libraries,
@@ -65,11 +72,11 @@ a compiler (with its set of dependencies)
and its 'target' or 'object' libraries (`std` and `rustc`).
Both are staged, but in a staggered manner.
-### Stage 1
+### Stage 1: from current code, by an earlier compiler
The rustc source code is then compiled with the stage0 compiler to produce the stage1 compiler.
-### Stage 2
+### Stage 2: the truly current compiler
We then rebuild our stage1 compiler with itself to produce the stage2 compiler.
@@ -92,7 +99,7 @@ For development, you usually only want the `stage1` compiler,
which you can build with `./x.py build library`.
See [Building the compiler](./how-to-build-and-run.html#building-the-compiler).
-### Stage 3
+### Stage 3: the same-result test
Stage 3 is optional. To sanity check our new compiler, we
can build the libraries with the stage2 compiler. The result ought
@@ -249,6 +256,10 @@ So the stage2 compiler has to recompile `std` for the target.
### Why does only libstd use `cfg(bootstrap)`?
+NOTE: for docs on `cfg(bootstrap)` itself, see [Complications of Bootstrapping][complications].
+
+[complications]: #complications-of-bootstrapping
+
The `rustc` generated by the stage0 compiler is linked to the freshly-built
`std`, which means that for the most part only `std` needs to be cfg-gated,
so that `rustc` can use features added to std immediately after their addition,
@@ -258,7 +269,7 @@ Note this is different from any other Rust program: stage1 `rustc`
is built by the _beta_ compiler, but using the _master_ version of libstd!
The only time `rustc` uses `cfg(bootstrap)` is when it adds internal lints
-that use diagnostic items. This happens very rarely.
+that use diagnostic items, or when it uses unstable library features that were recently changed.
### What is a 'sysroot'?
diff --git a/src/doc/rustc-dev-guide/src/building/compiler-documenting.md b/src/doc/rustc-dev-guide/src/building/compiler-documenting.md
index 965e004c9..4e72bf994 100644
--- a/src/doc/rustc-dev-guide/src/building/compiler-documenting.md
+++ b/src/doc/rustc-dev-guide/src/building/compiler-documenting.md
@@ -31,6 +31,8 @@ like the standard library (std) or the compiler (rustc).
./x.py doc compiler library
```
+ See [the nightly docs index page](https://doc.rust-lang.org/nightly/) for a full list of books.
+
- Document internal rustc items
Compiler documentation is not built by default.
diff --git a/src/doc/rustc-dev-guide/src/building/how-to-build-and-run.md b/src/doc/rustc-dev-guide/src/building/how-to-build-and-run.md
index 6651b3691..59893bdc1 100644
--- a/src/doc/rustc-dev-guide/src/building/how-to-build-and-run.md
+++ b/src/doc/rustc-dev-guide/src/building/how-to-build-and-run.md
@@ -1,5 +1,7 @@
# How to build and run the compiler
+<!-- toc -->
+
The compiler is built using a tool called `x.py`. You will need to
have Python installed to run it.
@@ -22,6 +24,29 @@ git clone https://github.com/rust-lang/rust.git
cd rust
```
+### Shallow clone the repository
+
+Due to the size of the repository, cloning on a slower internet connection can take a long time.
+To sidestep this, you can use the `--depth N` option with the `git clone` command.
+This instructs `git` to perform a "shallow clone", cloning the repository but truncating it to
+the last `N` commits.
+
+Passing `--depth 1` tells `git` to clone the repository but truncate the history to the latest
+commit that is on the `master` branch, which is usually fine for browsing the source code or
+building the compiler.
+
+```bash
+git clone --depth 1 https://github.com/rust-lang/rust.git
+cd rust
+```
+
+> **NOTE**: A shallow clone limits which `git` commands can be run.
+> If you intend to work on and contribute to the compiler, it is
+> generally recommended to fully clone the repository [as shown above](#get-the-source-code).
+>
+> For example, `git bisect` and `git blame` require access to the commit history,
+> so they don't work if the repository was cloned with `--depth 1`.
+
## What is `x.py`?
`x.py` is the build tool for the `rust` repository. It can build docs, run tests, and compile the
@@ -45,14 +70,41 @@ You can install it with `cargo install --path src/tools/x`.
To start, run `./x.py setup`. This will do some initialization and create a
`config.toml` for you with reasonable defaults.
-Alternatively, you can write `config.toml` by hand. See `config.toml.example` for all the available
+Alternatively, you can write `config.toml` by hand. See `config.example.toml` for all the available
settings and explanations of them. See `src/bootstrap/defaults` for common settings to change.
If you have already built `rustc` and you change settings related to LLVM, then you may have to
execute `rm -rf build` for subsequent configuration changes to take effect. Note that `./x.py
clean` will not cause a rebuild of LLVM.
-## Building the compiler
+## Common `x.py` commands
+
+Here are the basic invocations of the `x.py` commands most commonly used when
+working on `rustc`, `std`, `rustdoc`, and other tools.
+
+| Command | When to use it |
+| --- | --- |
+| `./x.py check` | Quick check to see if most things compile; [rust-analyzer can run this automatically for you][rust-analyzer] |
+| `./x.py build` | Builds `rustc`, `std`, and `rustdoc` |
+| `./x.py test` | Runs all tests |
+| `./x.py fmt` | Formats all code |
+
+As written, these commands are reasonable starting points. However, there are
+additional options and arguments for each of them that are worth learning for
+serious development work. In particular, `./x.py build` and `./x.py test`
+provide many ways to compile or test a subset of the code, which can save a lot
+of time.
+
+Also, note that `x.py` supports all kinds of path suffixes for `compiler`, `library`,
+and `src/tools` directories. So, you can simply run `x.py test tidy` instead of
+`x.py test src/tools/tidy`. Or, `x.py build std` instead of `x.py build library/std`.
+
+[rust-analyzer]: ./building/suggested.html#configuring-rust-analyzer-for-rustc
+
+See the chapters on [building](./building/how-to-build-and-run.md),
+[testing](./tests/intro.md), and [rustdoc](./rustdoc.md) for more details.
+
+### Building the compiler
Note that building will require a relatively large amount of storage space.
You may want to have upwards of 10 or 15 gigabytes available to build the compiler.
@@ -98,7 +150,7 @@ build. The **full** `rustc` build (what you get with `./x.py build
You almost never need to do this.
-## Build specific components
+### Build specific components
If you are working on the standard library, you probably don't need to build
the compiler unless you are planning to use a recently added nightly feature.
@@ -188,7 +240,7 @@ Note that building for some targets requires having external dependencies instal
(e.g. building musl targets requires a local copy of musl).
Any target-specific configuration (e.g. the path to a local copy of musl)
will need to be provided by your `config.toml`.
-Please see `config.toml.example` for information on target-specific configuration keys.
+Please see `config.example.toml` for information on target-specific configuration keys.
For examples of the complete configuration necessary to build a target, please visit
[the rustc book](https://doc.rust-lang.org/rustc/platform-support.html),
diff --git a/src/doc/rustc-dev-guide/src/building/new-target.md b/src/doc/rustc-dev-guide/src/building/new-target.md
index f999a9472..629445be6 100644
--- a/src/doc/rustc-dev-guide/src/building/new-target.md
+++ b/src/doc/rustc-dev-guide/src/building/new-target.md
@@ -4,6 +4,8 @@ These are a set of steps to add support for a new target. There are
numerous end states and paths to get there, so not all sections may be
relevant to your desired goal.
+<!-- toc -->
+
## Specifying a new LLVM
For very new targets, you may need to use a different fork of LLVM
diff --git a/src/doc/rustc-dev-guide/src/building/suggested.md b/src/doc/rustc-dev-guide/src/building/suggested.md
index 2e2592094..3049d87db 100644
--- a/src/doc/rustc-dev-guide/src/building/suggested.md
+++ b/src/doc/rustc-dev-guide/src/building/suggested.md
@@ -3,6 +3,8 @@
The full bootstrapping process takes quite a while. Here are some suggestions
to make your life easier.
+<!-- toc -->
+
## Installing a pre-push hook
CI will automatically fail your build if it doesn't pass `tidy`, our
@@ -24,20 +26,20 @@ You can also install the hook as a step of running `./x.py setup`!
`rust-analyzer` can help you check and format your code whenever you save
a file. By default, `rust-analyzer` runs the `cargo check` and `rustfmt`
commands, but you can override these commands to use more adapted versions
-of these tools when hacking on `rustc`. For example, `x.py setup` will prompt
+of these tools when hacking on `rustc`. For example, `x.py setup vscode` will prompt
you to create a `.vscode/settings.json` file which will configure Visual Studio code.
This will ask `rust-analyzer` to use `./x.py check` to check the sources, and the
stage 0 rustfmt to format them.
-The recommended `rust-analyzer` settings live at [`src/etc/vscode_settings.json`].
+The recommended `rust-analyzer` settings live at [`src/etc/rust_analyzer_settings.json`].
If you have enough free disk space and you would like to be able to run `x.py` commands while
rust-analyzer runs in the background, you can also add `--build-dir build-rust-analyzer` to the
`overrideCommand` to avoid x.py locking.
If you're running `coc.nvim`, you can use `:CocLocalConfig` to create a
-`.vim/coc-settings.json` and copy the settings from [`src/etc/vscode_settings.json`].
+`.vim/coc-settings.json` and copy the settings from [`src/etc/rust_analyzer_settings.json`].
-[`src/etc/vscode_settings.json`]: https://github.com/rust-lang/rust/blob/master/src/etc/vscode_settings.json
+[`src/etc/rust_analyzer_settings.json`]: https://github.com/rust-lang/rust/blob/master/src/etc/rust_analyzer_settings.json
If running `./x.py check` on save is inconvenient, in VS Code you can use a [Build
Task] instead:
@@ -259,8 +261,8 @@ If you're using nix, you can use the following nix-shell to work on Rust:
# This file contains a development shell for working on rustc.
let
- # Build configuration for rust-lang/rust. Based on `config.toml.example` from
- # `1bd30ce2aac40c7698aa4a1b9520aa649ff2d1c5`.
+ # Build configuration for rust-lang/rust. Based on `config.example.toml` (then called
+ # `config.toml.example`) from `1bd30ce2aac40c7698aa4a1b9520aa649ff2d1c5`
config = pkgs.writeText "rustc-config" ''
profile = "compiler" # you may want to choose a different profile, like `library` or `tools`
changelog-seen = 2
@@ -289,7 +291,7 @@ let
# Files that are ignored by ripgrep when searching.
ignoreFile = pkgs.writeText "rustc-rgignore" ''
configure
- config.toml.example
+ config.example.toml
x.py
LICENSE-MIT
LICENSE-APACHE
diff --git a/src/doc/rustc-dev-guide/src/compiler-debugging.md b/src/doc/rustc-dev-guide/src/compiler-debugging.md
index 6052ea58a..65c3cadbb 100644
--- a/src/doc/rustc-dev-guide/src/compiler-debugging.md
+++ b/src/doc/rustc-dev-guide/src/compiler-debugging.md
@@ -42,7 +42,7 @@ otherwise you need to disable new symbol-mangling-version in `config.toml`.
new-symbol-mangling = false
```
-> See the comments in `config.toml.example` for more info.
+> See the comments in `config.example.toml` for more info.
You will need to rebuild the compiler after changing any configuration option.
@@ -269,7 +269,7 @@ on *why* it was changed. See [this tutorial][bisect-tutorial] on how to use
it.
[bisect]: https://github.com/rust-lang/cargo-bisect-rustc
-[bisect-tutorial]: https://github.com/rust-lang/cargo-bisect-rustc/blob/master/TUTORIAL.md
+[bisect-tutorial]: https://rust-lang.github.io/cargo-bisect-rustc/tutorial.html
## Downloading Artifacts from Rust's CI
diff --git a/src/doc/rustc-dev-guide/src/const-eval/interpret.md b/src/doc/rustc-dev-guide/src/const-eval/interpret.md
index ee044505e..fbf781b96 100644
--- a/src/doc/rustc-dev-guide/src/const-eval/interpret.md
+++ b/src/doc/rustc-dev-guide/src/const-eval/interpret.md
@@ -82,7 +82,7 @@ The next statement asserts that said boolean is `0`. In case the assertion
fails, its error message is used for reporting a compile-time error.
Since it does not fail, `Operand::Immediate(Immediate::Scalar(Scalar::Raw {
-data: 4054, .. }))` is stored in the virtual memory was allocated before the
+data: 4054, .. }))` is stored in the virtual memory it was allocated before the
evaluation. `_0` always refers to that location directly.
After the evaluation is done, the return value is converted from [`Operand`] to
diff --git a/src/doc/rustc-dev-guide/src/contributing.md b/src/doc/rustc-dev-guide/src/contributing.md
index 383660fc1..d6037c7f1 100644
--- a/src/doc/rustc-dev-guide/src/contributing.md
+++ b/src/doc/rustc-dev-guide/src/contributing.md
@@ -1,32 +1,7 @@
-# Contributing to Rust
-
-Thank you for your interest in contributing to Rust! There are many ways to
-contribute, and we appreciate all of them.
+# Contribution Procedures
<!-- toc -->
-If you have questions, please make a post on [internals.rust-lang.org][internals] or
-hop on the [Rust Discord server][rust-discord] or [Rust Zulip server][rust-zulip].
-
-As a reminder, all contributors are expected to follow our [Code of Conduct][coc].
-
-If this is your first time contributing, the [Getting Started] and
-[walkthrough] chapters can give you a good example of how a typical
-contribution would go.
-
-[internals]: https://internals.rust-lang.org
-[rust-discord]: http://discord.gg/rust-lang
-[rust-zulip]: https://rust-lang.zulipchat.com
-[coc]: https://www.rust-lang.org/conduct.html
-[walkthrough]: ./walkthrough.md
-[Getting Started]: ./getting-started.md
-
-## Feature Requests
-
-Feature requests need to go through a process to be approved by the relevant
-teams. Usually this requires a Final Comment Period (FCP) or even a Request for
-Comments (RFC). See [Getting Started] for more information about these processes.
-
## Bug Reports
While bugs are unfortunate, they're a reality in software. We can't fix what we
@@ -58,6 +33,80 @@ Opening an issue is as easy as following [this
link](https://github.com/rust-lang/rust/issues/new/choose) and filling out the fields
in the appropriate provided template.
+## Bug Fixes or "Normal" code changes
+
+For most PRs, no special procedures are needed. You can just [open a PR][prs], and it
+will be reviewed, approved, and merged. This includes most bug fixes,
+refactorings, and other user-invisible changes. The next few sections talk
+about exceptions to this rule.
+
+Also, note that it is perfectly acceptable to open WIP PRs or GitHub [Draft
+PRs][draft]. Some people prefer to do this so they can get feedback along the
+way or share their code with a collaborator. Others do this so they can utilize
+the CI to build and test their PR (e.g. if you are developing on a laptop).
+
+[prs]: #pull-requests
+[draft]: https://github.blog/2019-02-14-introducing-draft-pull-requests/
+
+## New Features
+
+Rust has strong backwards-compatibility guarantees. Thus, new features can't
+just be implemented directly in stable Rust. Instead, we have 3 release
+channels: stable, beta, and nightly.
+
+- **Stable**: this is the latest stable release for general usage.
+- **Beta**: this is the next release (will be stable within 6 weeks).
+- **Nightly**: follows the `master` branch of the repo. This is the only
+ channel where unstable, incomplete, or experimental features are usable with
+ feature gates.
+
+See [this chapter on implementing new features](./implementing_new_features.md) for more
+information.
+
+### Breaking Changes
+
+Breaking changes have a [dedicated section][breaking-changes] in the dev-guide.
+
+[breaking-changes]: ./bug-fix-procedure.md
+
+### Major Changes
+
+The compiler team has a special process for large changes, whether or not they
+cause breakage. This process is called a Major Change Proposal (MCP). MCP is a
+relatively lightweight mechanism for getting feedback on large changes to the
+compiler (as opposed to a full RFC or a design meeting with the team).
+
+Example of things that might require MCPs include major refactorings, changes
+to important types, or important changes to how the compiler does something, or
+smaller user-facing changes.
+
+**When in doubt, ask on [zulip][z]. It would be a shame to put a lot of work
+into a PR that ends up not getting merged!** [See this document][mcpinfo] for
+more info on MCPs.
+
+[mcpinfo]: https://forge.rust-lang.org/compiler/mcp.html
+[z]: https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler
+
+### Performance
+
+Compiler performance is important. We have put a lot of effort over the last
+few years into [gradually improving it][perfdash].
+
+[perfdash]: https://perf.rust-lang.org/dashboard.html
+
+If you suspect that your change may cause a performance regression (or
+improvement), you can request a "perf run" (your reviewer may also request one
+before approving). This is yet another bot that will compile a collection of
+benchmarks on a compiler with your changes. The numbers are reported
+[here][perf], and you can see a comparison of your changes against the latest
+master.
+
+For an introduction to the performance of Rust code in general
+which would also be useful in rustc development, see [The Rust Performance Book].
+
+[perf]: https://perf.rust-lang.org
+[The Rust Performance Book]: https://nnethercote.github.io/perf-book/
+
## Pull Requests
Pull requests (or PRs for short) are the primary mechanism we use to change Rust.
@@ -96,6 +145,42 @@ For a full list of possible `groupname` check the `adhoc_groups` section at the
or the list of teams in the [rust-lang teams
database](https://github.com/rust-lang/team/tree/master/teams).
+### Waiting for reviews
+
+> NOTE
+>
+> Pull request reviewers are often working at capacity,
+> and many of them are contributing on a volunteer basis.
+> In order to minimize review delays,
+> pull request authors and assigned reviewers should ensure that the review label
+> (`S-waiting-on-review` and `S-waiting-on-author`) stays updated,
+> invoking these commands when appropriate:
+>
+> - `@rustbot author`:
+> the review is finished,
+> and PR author should check the comments and take action accordingly.
+>
+> - `@rustbot review`:
+> the author is ready for a review,
+> and this PR will be queued again in the reviewer's queue.
+
+Please note that the reviewers are humans, who for the most part work on `rustc`
+in their free time. This means that they can take some time to respond and review
+your PR. It also means that reviewers can miss some PRs that are assigned to them.
+
+To try to move PRs forward, the Triage WG regularly goes through all PRs that
+are waiting for review and haven't been discussed for at least 2 weeks. If you
+don't get a review within 2 weeks, feel free to ask the Triage WG on
+Zulip ([#t-release/triage]). They have knowledge of when to ping, who might be
+on vacation, etc.
+
+The reviewer may request some changes using the GitHub code review interface.
+They may also request special procedures (such as a [crater] run; [see
+below][break]) for some PRs.
+
+[r?]: https://github.com/rust-lang/rust/pull/78133#issuecomment-712692371
+[#t-release/triage]: https://rust-lang.zulipchat.com/#narrow/stream/242269-t-release.2Ftriage
+[break]: #breaking-changes
### CI
In addition to being reviewed by a human, pull requests are automatically tested
@@ -104,6 +189,8 @@ a pull request, CI builds the compiler and tests it against the
[compiler test suite][rctd], and also performs other tests such as checking that
your pull request is in compliance with Rust's style guidelines.
+[rctd]: tests/intro.md
+
Running continuous integration tests allows PR authors to catch mistakes early
without going through a first review cycle, and also helps reviewers stay aware
of the status of a particular pull request.
@@ -135,6 +222,8 @@ Changes that are rolled up are tested and merged alongside other PRs, to
speed the process up. Typically only small changes that are expected not to conflict
with one another are marked as "always roll up".
+Be patient; this can take a while and the queue can sometimes be long. PRs are never merged by hand.
+
[@rustbot]: https://github.com/rustbot
[@bors]: https://github.com/bors
[merge-queue]: https://bors.rust-lang.org/queue/rust
@@ -179,161 +268,19 @@ the issue in question.
[labeling]: ./rustbot.md#issue-relabeling
[closing-keywords]: https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue
-### External Dependencies (subtree)
-
-As a developer to this repository, you don't have to treat the following external projects
-differently from other crates that are directly in this repo:
-
-* [Clippy](https://github.com/rust-lang/rust-clippy)
-* [Miri]
-* [rustfmt](https://github.com/rust-lang/rustfmt)
-* [rust-analyzer](https://github.com/rust-lang/rust-analyzer)
-
-In contrast to `submodule` dependencies
-(see below for those), the `subtree` dependencies are just regular files and directories which can
-be updated in tree. However, if possible, enhancements, bug fixes, etc. specific
-to these tools should be filed against the tools directly in their respective
-upstream repositories. The exception is that when rustc changes are required to
-implement a new tool feature or test, that should happen in one collective rustc PR.
-
-#### Synchronizing a subtree
-
-Periodically the changes made to subtree based dependencies need to be synchronized between this
-repository and the upstream tool repositories.
-
-Subtree synchronizations are typically handled by the respective tool maintainers. Other users
-are welcome to submit synchronization PRs, however, in order to do so you will need to modify
-your local git installation and follow a very precise set of instructions.
-These instructions are documented, along with several useful tips and tricks, in the
-[syncing subtree changes][clippy-sync-docs] section in Clippy's Contributing guide.
-The instructions are applicable for use with any subtree based tool, just be sure to
-use the correct corresponding subtree directory and remote repository.
-
-The synchronization process goes in two directions: `subtree push` and `subtree pull`.
-
-A `subtree push` takes all the changes that happened to the copy in this repo and creates commits
-on the remote repo that match the local changes. Every local
-commit that touched the subtree causes a commit on the remote repo, but
-is modified to move the files from the specified directory to the tool repo root.
-
-A `subtree pull` takes all changes since the last `subtree pull`
-from the tool repo and adds these commits to the rustc repo along with a merge commit that moves
-the tool changes into the specified directory in the Rust repository.
-
-It is recommended that you always do a push first and get that merged to the tool master branch.
-Then, when you do a pull, the merge works without conflicts.
-While it's definitely possible to resolve conflicts during a pull, you may have to redo the conflict
-resolution if your PR doesn't get merged fast enough and there are new conflicts. Do not try to
-rebase the result of a `git subtree pull`, rebasing merge commits is a bad idea in general.
-
-You always need to specify the `-P` prefix to the subtree directory and the corresponding remote
-repository. If you specify the wrong directory or repository
-you'll get very fun merges that try to push the wrong directory to the wrong remote repository.
-Luckily you can just abort this without any consequences by throwing away either the pulled commits
-in rustc or the pushed branch on the remote and try again. It is usually fairly obvious
-that this is happening because you suddenly get thousands of commits that want to be synchronized.
-
-[clippy-sync-docs]: https://doc.rust-lang.org/nightly/clippy/development/infrastructure/sync.html
-
-#### Creating a new subtree dependency
-
-If you want to create a new subtree dependency from an existing repository, call (from this
-repository's root directory!)
-
-```
-git subtree add -P src/tools/clippy https://github.com/rust-lang/rust-clippy.git master
-```
-
-This will create a new commit, which you may not rebase under any circumstances! Delete the commit
-and redo the operation if you need to rebase.
-
-Now you're done, the `src/tools/clippy` directory behaves as if Clippy were
-part of the rustc monorepo, so no one but you (or others that synchronize
-subtrees) actually needs to use `git subtree`.
-
-
-### External Dependencies (submodules)
-
-Building Rust will also use external git repositories tracked using [git
-submodules]. The complete list may be found in the [`.gitmodules`] file. Some
-of these projects are required (like `stdarch` for the standard library) and
-some of them are optional (like [Miri]).
-
-Usage of submodules is discussed more in the [Using Git
-chapter](git.md#git-submodules).
-
-Some of the submodules are allowed to be in a "broken" state where they
-either don't build or their tests don't pass, e.g. the documentation books
-like [The Rust Reference]. Maintainers of these projects will be notified
-when the project is in a broken state, and they should fix them as soon
-as possible. The current status is tracked on the [toolstate website].
-More information may be found on the Forge [Toolstate chapter].
-
-Breakage is not allowed in the beta and stable channels, and must be addressed
-before the PR is merged. They are also not allowed to be broken on master in
-the week leading up to the beta cut.
-
-[git submodules]: https://git-scm.com/book/en/v2/Git-Tools-Submodules
-[`.gitmodules`]: https://github.com/rust-lang/rust/blob/master/.gitmodules
-[The Rust Reference]: https://github.com/rust-lang/reference/
-[toolstate website]: https://rust-lang-nursery.github.io/rust-toolstate/
-[Toolstate chapter]: https://forge.rust-lang.org/infra/toolstate.html
-
-#### Breaking Tools Built With The Compiler
-
-Rust's build system builds a number of tools that make use of the internals of
-the compiler and that are hosted in a separate repository, and included in Rust
-via git submodules (such as [Miri]). If these tools break because of your
-changes, you may run into a sort of "chicken and egg" problem. These tools rely
-on the latest compiler to be built so you can't update them (in their own
-repositories) to reflect your changes to the compiler until those changes are
-merged into the compiler. At the same time, you can't get your changes merged
-into the compiler because the rust-lang/rust build won't pass until those tools
-build and pass their tests.
-
-Luckily, a feature was
-[added to Rust's build](https://github.com/rust-lang/rust/issues/45861) to make
-all of this easy to handle. The idea is that we allow these tools to be
-"broken", so that the rust-lang/rust build passes without trying to build them,
-then land the change in the compiler, and go update the tools that you
-broke. Some tools will require waiting for a nightly release before this can
-happen, while others use the builds uploaded after each bors merge and thus can
-be updated immediately (check the tool's documentation for details). Once you're
-done and the tools are working again, you go back in the compiler and update the
-tools so they can be distributed again.
-
-This should avoid a bunch of synchronization dances and is also much easier on contributors as
-there's no need to block on tools changes going upstream.
-
-Here are those same steps in detail:
-
-1. (optional) First, if it doesn't exist already, create a `config.toml` by copying
- `config.toml.example` in the root directory of the Rust repository.
- Set `submodules = false` in the `[build]` section. This will prevent `x.py`
- from resetting to the original branch after you make your changes. If you
- need to [update any submodules to their latest versions](#updating-submodules),
- see the section of this file about that for more information.
-2. (optional) Run `./x.py test src/tools/cargo` (substituting the submodule
- that broke for `cargo`). Fix any errors in the submodule (and possibly others).
-3. (optional) Make commits for your changes and send them to upstream repositories as a PR.
-4. (optional) Maintainers of these submodules will **not** merge the PR. The PR can't be
- merged because CI will be broken. You'll want to write a message on the PR referencing
- your change, and how the PR should be merged once your change makes it into a nightly.
-5. Wait for your PR to merge.
-6. Wait for a nightly.
-7. (optional) Help land your PR on the upstream repository now that your changes are in nightly.
-8. (optional) Send a PR to rust-lang/rust updating the submodule.
+## External Dependencies
+This section has moved to ["Using External Repositories"](./external-repos.md).
## Writing Documentation
Documentation improvements are very welcome. The source of `doc.rust-lang.org`
is located in [`src/doc`] in the tree, and standard API documentation is generated
-from the source code itself (e.g. [`lib.rs`]). Documentation pull requests function
-in the same way as other pull requests.
+from the source code itself (e.g. [`library/std/src/lib.rs`][std-root]). Documentation pull requests
+function in the same way as other pull requests.
[`src/doc`]: https://github.com/rust-lang/rust/tree/master/src/doc
-[`lib.rs`]: https://github.com/rust-lang/rust/blob/master/library/std/src/lib.rs#L1
+[std-root]: https://github.com/rust-lang/rust/blob/master/library/std/src/lib.rs#L1
To find documentation-related issues, sort by the [A-docs label][adocs].
@@ -343,14 +290,11 @@ You can find documentation style guidelines in [RFC 1574][rfc1574].
[rfc1574]: https://github.com/rust-lang/rfcs/blob/master/text/1574-more-api-documentation-conventions.md#appendix-a-full-conventions-text
-In many cases, you don't need a full `./x.py doc --stage 2`, which will build
-the entire stage 2 compiler and compile the various books published on
-[doc.rust-lang.org][docs]. When updating documentation for the standard library,
-first try `./x.py doc library`. If that fails, or if you need to
-see the output from the latest version of `rustdoc`, add `--stage 1`.
-Results should appear in `build/host/doc`.
-
-[docs]: https://doc.rust-lang.org
+To build the standard library documentation, use `x doc --stage 0 library --open`.
+To build the documentation for a book (e.g. the unstable book), use `x doc src/doc/unstable-book.`
+Results should appear in `build/host/doc`, as well as automatically open in your default browser.
+See [Building Documentation](./building/compiler-documenting.md#building-documentation) for more
+information.
You can also use `rustdoc` directly to check small fixes. For example,
`rustdoc src/doc/reference.md` will render reference to `doc/reference.html`.
@@ -366,7 +310,7 @@ There are issues for beginners and advanced compiler devs alike!
Just a few things to keep in mind:
- Please limit line length to 100 characters. This is enforced by CI, and you can run the checks
- locally with `ci/check_line_lengths.sh`.
+ locally with `ci/lengthcheck.sh`.
- When contributing text to the guide, please contextualize the information with some time period
and/or a reason so that the reader knows how much to trust or mistrust the information.
@@ -431,122 +375,99 @@ updated sort][lru] is good for finding issues like this.
[Thanks to `@rustbot`][rustbot], anyone can help triage issues by adding
appropriate labels to issues that haven't been triaged yet:
-* Yellow, **A**-prefixed labels state which **area** of the project an issue
- relates to.
-
-* Magenta, **B**-prefixed labels identify bugs which are **blockers**.
-
-* Dark blue, **beta-** labels track changes which need to be backported into
- the beta branches.
-
-* Light purple, **C**-prefixed labels represent the **category** of an issue.
-
-* Green, **E**-prefixed labels explain the level of **experience** necessary
- to fix the issue.
-
-* The dark blue **final-comment-period** label marks bugs that are using the
- RFC signoff functionality of [rfcbot] and are currently in the final
- comment period.
-
-* Red, **I**-prefixed labels indicate the **importance** of the issue. The
- [I-nominated][inom] label indicates that an issue has been nominated for
- discussion at the next meeting of the team tagged using a
- **T**-prefixed label. Similarly, the [I-prioritize][ipri] indicates
- that an issue has been requested to be prioritized by the appropriate
- team.
-
-* The purple **metabug** label marks lists of bugs collected by other
- categories.
-
-* Purple gray, **O**-prefixed labels are the **operating system** or platform
- that this issue is specific to.
-
-* Orange, **P**-prefixed labels indicate a bug's **priority**. These labels
- can be assigned by anyone that understand the issue and is able to
- prioritize it, and replace the [I-prioritize][ipri] label.
-
-* The gray **proposed-final-comment-period** label marks bugs that are using
- the RFC signoff functionality of [rfcbot] and are currently awaiting
- signoff of all team members in order to enter the final comment period.
-
-* Pink, **regression**-prefixed labels track regressions from stable to the
- release channels.
-
-* The light orange **relnotes** label marks issues that should be documented in
- the release notes of the next release.
-
-* Gray, **S**-prefixed labels are used for tracking the **status** of pull
- requests.
-
-* Blue, **T**-prefixed bugs denote which **team** the issue belongs to.
-
-If you're looking for somewhere to start, check out the [E-easy][eeasy] tag.
-
-[rustbot]: ./rustbot.md
-[inom]: https://github.com/rust-lang/rust/issues?q=is%3Aopen+is%3Aissue+label%3AI-nominated
-[ipri]: https://github.com/rust-lang/rust/issues?q=is%3Aopen+is%3Aissue+label%3AI-prioritize
-[eeasy]: https://github.com/rust-lang/rust/issues?q=is%3Aopen+is%3Aissue+label%3AE-easy
[lru]: https://github.com/rust-lang/rust/issues?q=is%3Aissue+is%3Aopen+sort%3Aupdated-asc
-[rfcbot]: https://github.com/anp/rfcbot-rs/
-
-## Out-of-tree Contributions
-
-There are a number of other ways to contribute to Rust that don't deal with
-rust-lang/rust:
-
-* Answer questions in the _Get Help!_ channels on the [Rust Discord
- server][rust-discord], on [users.rust-lang.org][users], or on
- [StackOverflow][so].
-* Participate in the [RFC process](https://github.com/rust-lang/rfcs).
-* Find a [requested community library][community-library], build it, and publish
- it to [Crates.io](http://crates.io). Easier said than done, but very, very
- valuable!
+[rustbot]: ./rustbot.md
-[rust-discord]: https://discord.gg/rust-lang
-[users]: https://users.rust-lang.org/
-[so]: http://stackoverflow.com/questions/tagged/rust
-[community-library]: https://github.com/rust-lang/rfcs/labels/A-community-library
+<style>
+.label-color {
+ border-radius:0.5em;
+}
+table td:nth-child(2) {
+ white-space: nowrap;
+}
+
+</style>
+
+| Labels | Color | Description |
+|--------|-------|-------------|
+| [A-] | <span class="label-color" style="background-color:#f7e101;">&#x2003;</span>&nbsp;Yellow | The **area** of the project an issue relates to. |
+| [B-] | <span class="label-color" style="background-color:#d304cb;">&#x2003;</span>&nbsp;Magenta | Issues which are **blockers**. |
+| [beta-] | <span class="label-color" style="background-color:#1e76d9;">&#x2003;</span>&nbsp;Dark Blue | Tracks changes which need to be [backported to beta][beta-backport] |
+| [C-] | <span class="label-color" style="background-color:#f5f1fd;">&#x2003;</span>&nbsp;Light Purple | The **category** of an issue. |
+| [D-] | <span class="label-color" style="background-color:#c9f7a3;">&#x2003;</span>&nbsp;Mossy Green | Issues for **diagnostics**. |
+| [E-] | <span class="label-color" style="background-color:#02e10c;">&#x2003;</span>&nbsp;Green | The **experience** level necessary to fix an issue. |
+| [F-] | <span class="label-color" style="background-color:#f9c0cc;">&#x2003;</span>&nbsp;Peach | Issues for **nightly features**. |
+| [I-] | <span class="label-color" style="background-color:#e10c02;">&#x2003;</span>&nbsp;Red | The **importance** of the issue. |
+| [I-\*-nominated] | <span class="label-color" style="background-color:#e10c02;">&#x2003;</span>&nbsp;Red | The issue has been nominated for discussion at the next meeting of the corresponding team. |
+| [I-prioritize] | <span class="label-color" style="background-color:#e10c02;">&#x2003;</span>&nbsp;Red | The issue has been nominated for prioritization by the team tagged with a **T**-prefixed label. |
+| [metabug] | <span class="label-color" style="background-color:#5319e7;">&#x2003;</span>&nbsp;Purple | Bugs that collect other bugs. |
+| [O-] | <span class="label-color" style="background-color:#6e6ec0;">&#x2003;</span>&nbsp;Purple Grey | The **operating system** or platform that the issue is specific to. |
+| [P-] | <span class="label-color" style="background-color:#eb6420;">&#x2003;</span>&nbsp;Orange | The issue **priority**. These labels can be assigned by anyone that understand the issue and is able to prioritize it, and remove the [I-prioritize] label. |
+| [regression-] | <span class="label-color" style="background-color:#e4008a;">&#x2003;</span>&nbsp;Pink | Tracks regressions from a stable release. |
+| [relnotes] | <span class="label-color" style="background-color:#fad8c7;">&#x2003;</span>&nbsp;Light Orange | Changes that should be documented in the release notes of the next release. |
+| [S-] | <span class="label-color" style="background-color:#d3dddd;">&#x2003;</span>&nbsp;Gray | Tracks the **status** of pull requests. |
+| [S-tracking-] | <span class="label-color" style="background-color:#4682b4;">&#x2003;</span>&nbsp;Steel Blue | Tracks the **status** of [tracking issues]. |
+| [stable-] | <span class="label-color" style="background-color:#00229c;">&#x2003;</span>&nbsp;Dark Blue | Tracks changes which need to be [backported to stable][stable-backport] in anticipation of a point release. |
+| [T-] | <span class="label-color" style="background-color:#bfd4f2;">&#x2003;</span>&nbsp;Blue | Denotes which **team** the issue belongs to. |
+| [WG-] | <span class="label-color" style="background-color:#c2e0c6;">&#x2003;</span>&nbsp;Green | Denotes which **working group** the issue belongs to. |
+
+
+[A-]: https://github.com/rust-lang/rust/labels?q=A
+[B-]: https://github.com/rust-lang/rust/labels?q=B
+[C-]: https://github.com/rust-lang/rust/labels?q=C
+[D-]: https://github.com/rust-lang/rust/labels?q=D
+[E-]: https://github.com/rust-lang/rust/labels?q=E
+[F-]: https://github.com/rust-lang/rust/labels?q=F
+[I-]: https://github.com/rust-lang/rust/labels?q=I
+[O-]: https://github.com/rust-lang/rust/labels?q=O
+[P-]: https://github.com/rust-lang/rust/labels?q=P
+[S-]: https://github.com/rust-lang/rust/labels?q=S
+[T-]: https://github.com/rust-lang/rust/labels?q=T
+[WG-]: https://github.com/rust-lang/rust/labels?q=WG
+[stable-]: https://github.com/rust-lang/rust/labels?q=stable
+[beta-]: https://github.com/rust-lang/rust/labels?q=beta
+[I-\*-nominated]: https://github.com/rust-lang/rust/labels?q=nominated
+[I-prioritize]: https://github.com/rust-lang/rust/labels/I-prioritize
+[tracking issues]: https://github.com/rust-lang/rust/labels/C-tracking-issue
+[beta-backport]: https://forge.rust-lang.org/release/backporting.html#beta-backporting-in-rust-langrust
+[stable-backport]: https://forge.rust-lang.org/release/backporting.html#stable-backporting-in-rust-langrust
+[metabug]: https://github.com/rust-lang/rust/labels/metabug
+[regression-]: https://github.com/rust-lang/rust/labels?q=regression
+[relnotes]: https://github.com/rust-lang/rust/labels/relnotes
+[S-tracking-]: https://github.com/rust-lang/rust/labels?q=s-tracking
+
+### Rfcbot labels
+
+[rfcbot] uses its own labels for tracking the process of coordinating
+asynchronous decisions, such as approving or rejecting a change.
+This is used for [RFCs], issues, and pull requests.
+
+| Labels | Color | Description |
+|--------|-------|-------------|
+| [proposed-final-comment-period] | <span class="label-color" style="background-color:#ededed;">&#x2003;</span>&nbsp;Gray | Currently awaiting signoff of all team members in order to enter the final comment period. |
+| [disposition-merge] | <span class="label-color" style="background-color:#008800;">&#x2003;</span>&nbsp;Green | Indicates the intent is to merge the change. |
+| [disposition-close] | <span class="label-color" style="background-color:#dd0000;">&#x2003;</span>&nbsp;Red | Indicates the intent is to not accept the change and close it. |
+| [disposition-postpone] | <span class="label-color" style="background-color:#ededed;">&#x2003;</span>&nbsp;Gray | Indicates the intent is to not accept the change at this time and postpone it to a later date. |
+| [final-comment-period] | <span class="label-color" style="background-color:#1e76d9;">&#x2003;</span>&nbsp;Blue | Currently soliciting final comments before merging or closing. |
+| [finished-final-comment-period] | <span class="label-color" style="background-color:#f9e189;">&#x2003;</span>&nbsp;Light Yellow | The final comment period has concluded, and the issue will be merged or closed. |
+| [postponed] | <span class="label-color" style="background-color:#fbca04;">&#x2003;</span>&nbsp;Yellow | The issue has been postponed. |
+| [closed] | <span class="label-color" style="background-color:#dd0000;">&#x2003;</span>&nbsp;Red | The issue has been rejected. |
+| [to-announce] | <span class="label-color" style="background-color:#ededed;">&#x2003;</span>&nbsp;Gray | Issues that have finished their final-comment-period and should be publicly announced. Note: the rust-lang/rust repository uses this label differently, to announce issues at the triage meetings. |
+
+[disposition-merge]: https://github.com/rust-lang/rust/labels/disposition-merge
+[disposition-close]: https://github.com/rust-lang/rust/labels/disposition-close
+[disposition-postpone]: https://github.com/rust-lang/rust/labels/disposition-postpone
+[proposed-final-comment-period]: https://github.com/rust-lang/rust/labels/proposed-final-comment-period
+[final-comment-period]: https://github.com/rust-lang/rust/labels/final-comment-period
+[finished-final-comment-period]: https://github.com/rust-lang/rust/labels/finished-final-comment-period
+[postponed]: https://github.com/rust-lang/rfcs/labels/postponed
+[closed]: https://github.com/rust-lang/rfcs/labels/closed
+[to-announce]: https://github.com/rust-lang/rfcs/labels/to-announce
+[rfcbot]: https://github.com/anp/rfcbot-rs/
+[RFCs]: https://github.com/rust-lang/rfcs
## Helpful Links and Information
-For people new to Rust, and just starting to contribute, or even for
-more seasoned developers, some useful places to look for information
-are:
-
-* This guide contains information about how various parts of the
- compiler work and how to contribute to the compiler
-* [Rust Forge][rustforge] contains additional documentation, including
- write-ups of how to achieve common tasks
-* The [Rust Internals forum][rif], a place to ask questions and
- discuss Rust's internals
-* The [generated documentation for Rust's compiler][gdfrustc]
-* The [Rust reference][rr], even though it doesn't specifically talk about
- Rust's internals, is a great resource nonetheless
-* Although out of date, [Tom Lee's great blog article][tlgba] is very helpful
-* [rustaceans.org][ro] is helpful, but mostly dedicated to IRC
-* The [Rust Compiler Testing Docs][rctd]
-* For [@bors], [this cheat sheet][cheatsheet] is helpful
-* Google is always helpful when programming.
- You can [search all Rust documentation][gsearchdocs] (the standard library,
- the compiler, the books, the references, and the guides) to quickly find
- information about the language and compiler.
-* You can also use Rustdoc's built-in search feature to find documentation on
- types and functions within the crates you're looking at. You can also search
- by type signature! For example, searching for `* -> vec` should find all
- functions that return a `Vec<T>`.
- _Hint:_ Find more tips and keyboard shortcuts by typing `?` on any Rustdoc
- page!
-* Don't be afraid to ask! The Rust community is friendly and helpful.
-
-[rustc dev guide]: about-this-guide.md
-[gdfrustc]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/
-[gsearchdocs]: https://www.google.com/search?q=site:doc.rust-lang.org+your+query+here
-[stddocs]: https://doc.rust-lang.org/std
-[rif]: http://internals.rust-lang.org
-[rr]: https://doc.rust-lang.org/book/README.html
-[rustforge]: https://forge.rust-lang.org/
-[tlgba]: https://tomlee.co/2014/04/a-more-detailed-tour-of-the-rust-compiler/
-[ro]: https://www.rustaceans.org/
-[rctd]: tests/intro.md
-[cheatsheet]: https://bors.rust-lang.org/
-[Miri]: https://github.com/rust-lang/miri
+This section has moved to the ["About this guide"][more-links] chapter.
+
+[more-links]: ./about-this-guide.md#other-places-to-find-information
diff --git a/src/doc/rustc-dev-guide/src/conventions.md b/src/doc/rustc-dev-guide/src/conventions.md
index 4dd0a2da9..521aeb4a5 100644
--- a/src/doc/rustc-dev-guide/src/conventions.md
+++ b/src/doc/rustc-dev-guide/src/conventions.md
@@ -29,7 +29,10 @@ pass the <!-- date-check: nov 2022 --> `--edition=2021` argument yourself when c
<a name="copyright"></a>
+<!-- REUSE-IgnoreStart -->
+<!-- Prevent REUSE from interpreting the heading as a copyright notice -->
### Copyright notice
+<!-- REUSE-IgnoreEnd -->
In the past, files began with a copyright and license notice. Please **omit**
this notice for new files licensed under the standard terms (dual
diff --git a/src/doc/rustc-dev-guide/src/diagnostics.md b/src/doc/rustc-dev-guide/src/diagnostics.md
index d32de068e..daaffba7b 100644
--- a/src/doc/rustc-dev-guide/src/diagnostics.md
+++ b/src/doc/rustc-dev-guide/src/diagnostics.md
@@ -69,17 +69,12 @@ surrounded with backticks:
error: the identifier `foo.bar` is invalid
```
-### Error explanations
+### Error codes and explanations
-Some errors include long form descriptions. They may be viewed with the
-`--explain` flag, or via the [error index]. Each explanation comes with an
-example of how to trigger it and advice on how to fix it.
-
-Please read [RFC 1567] for details on how to format and write long error
-codes.
-
-The descriptions are written in Markdown, and all of them are linked in the
-[`rustc_error_codes`] crate.
+Most errors have an associated error code. Error codes are linked to long-form
+explanations which contains an example of how to trigger the error and in-depth
+details about the error. They may be viewed with the `--explain` flag, or via
+the [error index].
As a general rule, give an error a code (with an associated explanation) if the
explanation would give more information than the error itself. A lot of the time
@@ -89,12 +84,15 @@ triggers to include useful information for all cases in the error, in which case
it's a good idea to add an explanation.[^estebank]
As always, if you are not sure, just ask your reviewer!
+If you decide to add a new error with an associated error code, please read
+[this section][error-codes] for a guide and important details about the
+process.
+
[^estebank]: This rule of thumb was suggested by **@estebank** [here][estebank-comment].
-[`rustc_error_codes`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_error_codes/error_codes/index.html
[error index]: https://doc.rust-lang.org/error-index.html
-[RFC 1567]: https://github.com/rust-lang/rfcs/blob/master/text/1567-long-error-codes-explanation-normalization.md
[estebank-comment]: https://github.com/rust-lang/rustc-dev-guide/pull/967#issuecomment-733218283
+[error-codes]: ./diagnostics/error-codes.md
### Lints versus fixed diagnostics
diff --git a/src/doc/rustc-dev-guide/src/diagnostics/diagnostic-codes.md b/src/doc/rustc-dev-guide/src/diagnostics/diagnostic-codes.md
deleted file mode 100644
index 3618b43cd..000000000
--- a/src/doc/rustc-dev-guide/src/diagnostics/diagnostic-codes.md
+++ /dev/null
@@ -1,79 +0,0 @@
-# Diagnostic codes
-We generally try to assign each error message a unique code like `E0123`. These
-codes are defined in the compiler in the `diagnostics.rs` files found in each
-crate, which basically consist of macros. The codes come in two varieties: those
-that have an extended write-up, and those that do not. Whenever possible, if you
-are making a new code, you should write an extended write-up.
-
-### Allocating a fresh code
-
-Error codes are stored in `compiler/rustc_error_codes`.
-
-To create a new error, you first need to find the next available
-code. You can find it with `tidy`:
-
-```
-./x.py test tidy
-```
-
-This will invoke the tidy script, which generally checks that your code obeys
-our coding conventions. One of those jobs is to check that diagnostic codes are
-indeed unique. Once it is finished with that, tidy will print out the lowest
-unused code:
-
-```
-...
-tidy check (x86_64-apple-darwin)
-* 470 error codes
-* highest error code: E0591
-...
-```
-
-Here we see the highest error code in use is `E0591`, so we _probably_ want
-`E0592`. To be sure, run `rg E0592` and check, you should see no references.
-
-Ideally, you will write an extended description for your error,
-which will go in `rustc_error_codes/src/error_codes/E0592.md`.
-To register the error, open `rustc_error_codes/src/error_codes.rs` and add the
-code (in its proper numerical order) into` register_diagnostics!` macro, like
-this:
-
-```rust
-register_diagnostics! {
- ...
- E0592: include_str!("./error_codes/E0592.md"),
-}
-```
-
-But you can also add it without an extended description:
-
-```rust
-register_diagnostics! {
- ...
- E0592, // put a description here
-}
-```
-
-To actually issue the error, you can use the `struct_span_err!` macro:
-
-```rust
-struct_span_err!(self.tcx.sess, // some path to the session here
- span, // whatever span in the source you want
- E0592, // your new error code
- fluent::example::an_error_message)
- .emit() // actually issue the error
-```
-
-If you want to add notes or other snippets, you can invoke methods before you
-call `.emit()`:
-
-```rust
-struct_span_err!(...)
- .span_label(another_span, fluent::example::example_label)
- .span_note(another_span, fluent::example::separate_note)
- .emit_()
-```
-
-For an example of a PR adding an error code, see [#76143].
-
-[#76143]: https://github.com/rust-lang/rust/pull/76143
diff --git a/src/doc/rustc-dev-guide/src/diagnostics/error-codes.md b/src/doc/rustc-dev-guide/src/diagnostics/error-codes.md
new file mode 100644
index 000000000..2dbdb53fe
--- /dev/null
+++ b/src/doc/rustc-dev-guide/src/diagnostics/error-codes.md
@@ -0,0 +1,95 @@
+# Error codes
+We generally try to assign each error message a unique code like `E0123`. These
+codes are defined in the compiler in the `diagnostics.rs` files found in each
+crate, which basically consist of macros. All error codes have an associated
+explanation: new error codes must include them. Note that not all _historical_
+(no longer emitted) error codes have explanations.
+
+## Error explanations
+
+The explanations are written in Markdown (see the [CommonMark Spec] for
+specifics around syntax), and all of them are linked in the [`rustc_error_codes`]
+crate. Please read [RFC 1567] for details on how to format and write long error
+codes. As of <!-- date-check --> February 2023, there is an
+effort[^new-explanations] to replace this largely outdated RFC with a new more
+flexible standard.
+
+Error explanations should expand on the error message and provide details about
+_why_ the error occurs. It is not helpful for users to copy-paste a quick fix;
+explanations should help users understand why their code cannot be accepted by
+the compiler. Rust prides itself on helpful error messages and long-form
+explanations are no exception. However, before error explanations are
+overhauled[^new-explanations] it is a bit open as to how exactly they should be
+written, as always: ask your reviewer or ask around on the Rust Discord or Zulip.
+
+[^new-explanations]: See the draft RFC [here][new-explanations-rfc].
+
+[`rustc_error_codes`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_error_codes/error_codes/index.html
+[CommonMark Spec]: https://spec.commonmark.org/current/
+[RFC 1567]: https://github.com/rust-lang/rfcs/blob/master/text/1567-long-error-codes-explanation-normalization.md
+[new-explanations-rfc]: https://github.com/rust-lang/rfcs/pull/3370
+
+## Allocating a fresh code
+
+Error codes are stored in `compiler/rustc_error_codes`.
+
+To create a new error, you first need to find the next available
+code. You can find it with `tidy`:
+
+```
+./x.py test tidy
+```
+
+This will invoke the tidy script, which generally checks that your code obeys
+our coding conventions. Some of these jobs check error codes and ensure that
+there aren't duplicates, etc (the tidy check is defined in
+`src/tools/tidy/src/error_codes.rs`). Once it is finished with that, tidy will
+print out the highest used error code:
+
+```
+...
+tidy check
+Found 505 error codes
+Highest error code: `E0591`
+...
+```
+
+Here we see the highest error code in use is `E0591`, so we _probably_ want
+`E0592`. To be sure, run `rg E0592` and check, you should see no references.
+
+You will have to write an extended description for your error,
+which will go in `rustc_error_codes/src/error_codes/E0592.md`.
+To register the error, open `rustc_error_codes/src/error_codes.rs` and add the
+code (in its proper numerical order) into` register_diagnostics!` macro, like
+this:
+
+```rust
+register_diagnostics! {
+ ...
+ E0592: include_str!("./error_codes/E0592.md"),
+}
+```
+
+To actually issue the error, you can use the `struct_span_err!` macro:
+
+```rust
+struct_span_err!(self.tcx.sess, // some path to the session here
+ span, // whatever span in the source you want
+ E0592, // your new error code
+ fluent::example::an_error_message)
+ .emit() // actually issue the error
+```
+
+If you want to add notes or other snippets, you can invoke methods before you
+call `.emit()`:
+
+```rust
+struct_span_err!(...)
+ .span_label(another_span, fluent::example::example_label)
+ .span_note(another_span, fluent::example::separate_note)
+ .emit()
+```
+
+For an example of a PR adding an error code, see [#76143].
+
+[#76143]: https://github.com/rust-lang/rust/pull/76143
diff --git a/src/doc/rustc-dev-guide/src/external-repos.md b/src/doc/rustc-dev-guide/src/external-repos.md
new file mode 100644
index 000000000..533f7eb5e
--- /dev/null
+++ b/src/doc/rustc-dev-guide/src/external-repos.md
@@ -0,0 +1,113 @@
+# Using External Repositories
+
+The `rust-lang/rust` git repository depends on several other repos in the `rust-lang` organization.
+There are three main ways we use dependencies:
+1. As a Cargo dependency through crates.io (e.g. `rustc-rayon`)
+2. As a git subtree (e.g. `clippy`)
+3. As a git submodule (e.g. `cargo`)
+
+As a general rule, use crates.io for libraries that could be useful for others in the ecosystem; use
+subtrees for tools that depend on compiler internals and need to be updated if there are breaking
+changes; and use submodules for tools that are independent of the compiler.
+
+## External Dependencies (subtree)
+
+As a developer to this repository, you don't have to treat the following external projects
+differently from other crates that are directly in this repo:
+
+* [Clippy](https://github.com/rust-lang/rust-clippy)
+* [Miri]
+* [rustfmt](https://github.com/rust-lang/rustfmt)
+* [rust-analyzer](https://github.com/rust-lang/rust-analyzer)
+
+[Miri]: https://github.com/rust-lang/miri
+
+In contrast to `submodule` dependencies
+(see below for those), the `subtree` dependencies are just regular files and directories which can
+be updated in tree. However, if possible, enhancements, bug fixes, etc. specific
+to these tools should be filed against the tools directly in their respective
+upstream repositories. The exception is that when rustc changes are required to
+implement a new tool feature or test, that should happen in one collective rustc PR.
+
+### Synchronizing a subtree
+
+Periodically the changes made to subtree based dependencies need to be synchronized between this
+repository and the upstream tool repositories.
+
+Subtree synchronizations are typically handled by the respective tool maintainers. Other users
+are welcome to submit synchronization PRs, however, in order to do so you will need to modify
+your local git installation and follow a very precise set of instructions.
+These instructions are documented, along with several useful tips and tricks, in the
+[syncing subtree changes][clippy-sync-docs] section in Clippy's Contributing guide.
+The instructions are applicable for use with any subtree based tool, just be sure to
+use the correct corresponding subtree directory and remote repository.
+
+The synchronization process goes in two directions: `subtree push` and `subtree pull`.
+
+A `subtree push` takes all the changes that happened to the copy in this repo and creates commits
+on the remote repo that match the local changes. Every local
+commit that touched the subtree causes a commit on the remote repo, but
+is modified to move the files from the specified directory to the tool repo root.
+
+A `subtree pull` takes all changes since the last `subtree pull`
+from the tool repo and adds these commits to the rustc repo along with a merge commit that moves
+the tool changes into the specified directory in the Rust repository.
+
+It is recommended that you always do a push first and get that merged to the tool master branch.
+Then, when you do a pull, the merge works without conflicts.
+While it's definitely possible to resolve conflicts during a pull, you may have to redo the conflict
+resolution if your PR doesn't get merged fast enough and there are new conflicts. Do not try to
+rebase the result of a `git subtree pull`, rebasing merge commits is a bad idea in general.
+
+You always need to specify the `-P` prefix to the subtree directory and the corresponding remote
+repository. If you specify the wrong directory or repository
+you'll get very fun merges that try to push the wrong directory to the wrong remote repository.
+Luckily you can just abort this without any consequences by throwing away either the pulled commits
+in rustc or the pushed branch on the remote and try again. It is usually fairly obvious
+that this is happening because you suddenly get thousands of commits that want to be synchronized.
+
+[clippy-sync-docs]: https://doc.rust-lang.org/nightly/clippy/development/infrastructure/sync.html
+
+### Creating a new subtree dependency
+
+If you want to create a new subtree dependency from an existing repository, call (from this
+repository's root directory!)
+
+```
+git subtree add -P src/tools/clippy https://github.com/rust-lang/rust-clippy.git master
+```
+
+This will create a new commit, which you may not rebase under any circumstances! Delete the commit
+and redo the operation if you need to rebase.
+
+Now you're done, the `src/tools/clippy` directory behaves as if Clippy were
+part of the rustc monorepo, so no one but you (or others that synchronize
+subtrees) actually needs to use `git subtree`.
+
+
+## External Dependencies (submodules)
+
+Building Rust will also use external git repositories tracked using [git
+submodules]. The complete list may be found in the [`.gitmodules`] file. Some
+of these projects are required (like `stdarch` for the standard library) and
+some of them are optional (like `src/doc/book`).
+
+Usage of submodules is discussed more in the [Using Git chapter](git.md#git-submodules).
+
+Some of the submodules are allowed to be in a "broken" state where they
+either don't build or their tests don't pass, e.g. the documentation books
+like [The Rust Reference]. Maintainers of these projects will be notified
+when the project is in a broken state, and they should fix them as soon
+as possible. The current status is tracked on the [toolstate website].
+More information may be found on the Forge [Toolstate chapter].
+In practice, it is very rare for documentation to have broken toolstate.
+
+Breakage is not allowed in the beta and stable channels, and must be addressed
+before the PR is merged. They are also not allowed to be broken on master in
+the week leading up to the beta cut.
+
+[git submodules]: https://git-scm.com/book/en/v2/Git-Tools-Submodules
+[`.gitmodules`]: https://github.com/rust-lang/rust/blob/master/.gitmodules
+[The Rust Reference]: https://github.com/rust-lang/reference/
+[toolstate website]: https://rust-lang-nursery.github.io/rust-toolstate/
+[Toolstate chapter]: https://forge.rust-lang.org/infra/toolstate.html
diff --git a/src/doc/rustc-dev-guide/src/fuzzing.md b/src/doc/rustc-dev-guide/src/fuzzing.md
new file mode 100644
index 000000000..3fb1add01
--- /dev/null
+++ b/src/doc/rustc-dev-guide/src/fuzzing.md
@@ -0,0 +1,149 @@
+# Fuzzing
+
+<!-- date-check: Mar 2023 -->
+
+For the purposes of this guide, *fuzzing* is any testing methodology that
+involves compiling a wide variety of programs in an attempt to uncover bugs in
+rustc. Fuzzing is often used to find internal compiler errors (ICEs). Fuzzing
+can be beneficial, because it can find bugs before users run into them and
+provide small, self-contained programs that make the bug easier to track down.
+However, some common mistakes can reduce the helpfulness of fuzzing and end up
+making contributors' lives harder. To maximize your positive impact on the Rust
+project, please read this guide before reporting fuzzer-generated bugs!
+
+## Guidelines
+
+### In a nutshell
+
+*Please do:*
+
+- Ensure the bug is still present on the latest nightly rustc
+- Include a reasonably minimal, standalone example along with any bug report
+- Include all of the information requested in the bug report template
+- Search for existing reports with the same message and query stack
+- Format the test case with `rustfmt`, if it maintains the bug
+- Indicate that the bug was found by fuzzing
+
+*Please don't:*
+
+- Don't report lots of bugs that use internal features, including but not
+ limited to `custom_mir`, `lang_items`, `no_core`, and `rustc_attrs`.
+- Don't seed your fuzzer with inputs that are known to crash rustc (details
+ below).
+
+### Discussion
+
+If you're not sure whether or not an ICE is a duplicate of one that's already
+been reported, please go ahead and report it and link to issues you think might
+be related. In general, ICEs on the same line but with different *query stacks*
+are usually distinct bugs. For example, [#109020][#109202] and [#109129][#109129]
+had similar error messages:
+
+```
+error: internal compiler error: compiler/rustc_middle/src/ty/normalize_erasing_regions.rs:195:90: Failed to normalize <[closure@src/main.rs:36:25: 36:28] as std::ops::FnOnce<(Emplacable<()>,)>>::Output, maybe try to call `try_normalize_erasing_regions` instead
+```
+```
+error: internal compiler error: compiler/rustc_middle/src/ty/normalize_erasing_regions.rs:195:90: Failed to normalize <() as Project>::Assoc, maybe try to call `try_normalize_erasing_regions` instead
+```
+but different query stacks:
+```
+query stack during panic:
+#0 [fn_abi_of_instance] computing call ABI of `<[closure@src/main.rs:36:25: 36:28] as core::ops::function::FnOnce<(Emplacable<()>,)>>::call_once - shim(vtable)`
+end of query stack
+```
+```
+query stack during panic:
+#0 [check_mod_attrs] checking attributes in top-level module
+#1 [analysis] running analysis passes on this crate
+end of query stack
+```
+
+[#109020]: https://github.com/rust-lang/rust/issues/109020
+[#109129]: https://github.com/rust-lang/rust/issues/109129
+
+## Building a corpus
+
+When building a corpus, be sure to avoid collecting tests that are already
+known to crash rustc. A fuzzer that is seeded with such tests is more likely to
+generate bugs with the same root cause, wasting everyone's time. The simplest
+way to avoid this is to loop over each file in the corpus, see if it causes an
+ICE, and remove it if so.
+
+To build a corpus, you may want to use:
+
+- The rustc/rust-analyzer/clippy test suites (or even source code) --- though avoid
+ tests that are already known to cause failures, which often begin with comments
+ like `// failure-status: 101` or `// known-bug: #NNN`.
+- The already-fixed ICEs in [Glacier][glacier] --- though avoid the unfixed
+ ones in `ices/`!
+
+## Extra credit
+
+Here are a few things you can do to help the Rust project after filing an ICE.
+
+- [Bisect][bisect] the bug to figure out when it was introduced
+- Fix "distractions": problems with the test case that don't contribute to
+ triggering the ICE, such as syntax errors or borrow-checking errors
+- Minimize the test case (see below)
+- Add the minimal test case to [Glacier][glacier]
+
+[bisect]: https://rust-lang.github.io/cargo-bisect-rustc/
+
+## Minimization
+
+It is helpful to carefully *minimize* the fuzzer-generated input. When
+minimizing, be careful to preserve the original error, and avoid introducing
+distracting problems such as syntax, type-checking, or borrow-checking errors.
+
+There are some tools that can help with minimization. If you're not sure how
+to avoid introducing syntax, type-, and borrow-checking errors while using
+these tools, post both the complete and minimized test cases. Generally,
+*syntax-aware* tools give the best results in the least amount of time.
+[`treereduce-rust`][treereduce] and [picireny][picireny] are syntax-aware.
+`halfempty` is not, but is generally a high-quality tool.
+
+[halfempty]: https://github.com/googleprojectzero/halfempty
+[picireny]: https://github.com/renatahodovan/picireny
+[treereduce]: https://github.com/langston-barrett/treereduce
+
+## Effective fuzzing
+
+When fuzzing rustc, you may want to avoid generating machine code, since this
+is mostly done by LLVM. Try `--emit=mir` instead.
+
+A variety of compiler flags can uncover different issues. `-Zmir-opt-level=4`
+will turn on MIR optimization passes that are not run by default, potentially
+uncovering interesting bugs. `-Zvalidate-mir` can help uncover such bugs.
+
+If you're fuzzing a compiler you built, you may want to build it with `-C
+target-cpu=native` or even PGO/BOLT to squeeze out a few more executions per
+second. Of course, it's best to try multiple build configurations and see
+what actually results in superior throughput.
+
+You may want to build rustc from source with debug assertions to find
+additional bugs, though this is a trade-off: it can slow down fuzzing by
+requiring extra work for every execution. To enable debug assertions, add this
+to `config.toml` when compiling rustc:
+
+```toml
+[rust]
+debug-assertions = true
+```
+
+ICEs that require debug assertions to reproduce should be tagged
+[`requires-debug-assertions`][requires-debug-assertions].
+
+[requires-debug-assertions]: https://github.com/rust-lang/rust/labels/requires-debug-assertions
+
+## Existing projects
+
+- [fuzz-rustc][fuzz-rustc] demonstrates how to fuzz rustc with libfuzzer
+- [icemaker][icemaker] runs rustc and other tools on a large number of source
+ files with a variety of flags to catch ICEs
+- [tree-splicer][tree-splicer] generates new source files by combining existing
+ ones while maintaining correct syntax
+
+[glacier]: https://github.com/rust-lang/glacier
+[fuzz-rustc]: https://github.com/dwrensha/fuzz-rustc
+[icemaker]: https://github.com/matthiaskrgr/icemaker/
+[tree-splicer]: https://github.com/langston-barrett/tree-splicer/ \ No newline at end of file
diff --git a/src/doc/rustc-dev-guide/src/generics.md b/src/doc/rustc-dev-guide/src/generics.md
index 0173bee8f..7512b3b47 100644
--- a/src/doc/rustc-dev-guide/src/generics.md
+++ b/src/doc/rustc-dev-guide/src/generics.md
@@ -6,7 +6,7 @@ inference, type checking, and trait solving. Conceptually, during these routines
that one type is equal to another type and want to swap one out for the other and then swap that out
for another type and so on until we eventually get some concrete types (or an error).
-In rustc this is done using the `SubstsRef` that we mentioned above (“substs” = “substitutions”).
+In rustc this is done using [SubstsRef] (“substs” = “substitutions”).
Conceptually, you can think of `SubstsRef` as a list of types that are to be substituted for the
generic type parameters of the ADT.
@@ -18,6 +18,7 @@ is conceptually like a `&'tcx [GenericArgKind<'tcx>]` slice (but it is actually
[list]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.List.html
[`GenericArg`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/subst/struct.GenericArg.html
[`GenericArgKind`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/subst/enum.GenericArgKind.html
+[SubstsRef]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/subst/type.SubstsRef.html
So why do we use this `List` type instead of making it really a slice? It has the length "inline",
so `&List` is only 32 bits. As a consequence, it cannot be "subsliced" (that only works if the
@@ -126,7 +127,7 @@ You may have a couple of followup questions…
**`subst`** How do we actually do the substitutions? There is a function for that too! You use
[`subst`](https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/subst/struct.EarlyBinder.html#method.subst) to
-replace a `SubstRef` with another list of types.
+replace a `SubstsRef` with another list of types.
[Here is an example of actually using `subst` in the compiler][substex]. The exact details are not
too important, but in this piece of code, we happen to be converting from the `rustc_hir::Ty` to
diff --git a/src/doc/rustc-dev-guide/src/getting-started.md b/src/doc/rustc-dev-guide/src/getting-started.md
index bc294d1b3..d5c948994 100644
--- a/src/doc/rustc-dev-guide/src/getting-started.md
+++ b/src/doc/rustc-dev-guide/src/getting-started.md
@@ -1,13 +1,37 @@
# Getting Started
+Thank you for your interest in contributing to Rust! There are many ways to
+contribute, and we appreciate all of them.
+
<!-- toc -->
+If this is your first time contributing, the [walkthrough] chapter can give you a good example of
+how a typical contribution would go.
+
This documentation is _not_ intended to be comprehensive; it is meant to be a
quick guide for the most useful things. For more information, [see this
chapter on how to build and run the compiler](./building/how-to-build-and-run.md).
+[internals]: https://internals.rust-lang.org
+[rust-discord]: http://discord.gg/rust-lang
+[rust-zulip]: https://rust-lang.zulipchat.com
+[coc]: https://www.rust-lang.org/conduct.html
+[walkthrough]: ./walkthrough.md
+[Getting Started]: ./getting-started.md
+
## Asking Questions
+If you have questions, please make a post on the [Rust Zulip server][rust-zulip] or
+[internals.rust-lang.org][internals]. If you are contributing to Rustup, be aware they are not on
+Zulip - you can ask questions in `#wg-rustup` [on Discord][rust-discord].
+See the [list of teams and working groups][governance] and [the Community page][community] on the
+official website for more resources.
+
+[governance]: https://www.rust-lang.org/governance
+[community]: https://www.rust-lang.org/community
+
+As a reminder, all contributors are expected to follow our [Code of Conduct][coc].
+
The compiler team (or `t-compiler`) usually hangs out in Zulip [in this
"stream"][z]; it will be easiest to get questions answered there.
@@ -30,6 +54,12 @@ compiler, [consult this "experts map"][map].
It's not perfectly complete, though, so please also feel free to ask questions
even if you can't figure out who to ping.
+Another way to find experts for a given part of the compiler is to see who has made recent commits.
+For example, to find people who have recently worked on name resolution since the 1.68.2 release,
+you could run `git shortlog -n 1.68.2.. compiler/rustc_resolve/`. Ignore any commits starting with
+"Rollup merge" or commits by `@bors` (see [CI contribution prodcedures](./contributing.md#ci) for
+more information about these commits).
+
[map]: https://github.com/rust-lang/compiler-team/blob/master/content/experts/map.toml
### Etiquette
@@ -42,91 +72,61 @@ Just pinging someone without providing any context can be a bit annoying and
just create noise, so we ask that you be mindful of the fact that the
`t-compiler` folks get a lot of pings in a day.
-## Cloning and Building
-
-### System Requirements
-
-Internet access is required.
-
-The most notable software requirement is that you will need Python 2 or 3, but
-there are various others.
-
-The following hardware is recommended.
-* 30GB+ of free disk space.
-* 8GB+ RAM
-* 2+ cores
-
-More powerful machines will lead to much faster builds. There are various
-strategies to work around lesser hardware in the following chapters.
-
-See [this chapter][prereqs] for more details about software and hardware prerequisites.
-
-[prereqs]: ./building/prerequisites.md
+## What should I work on?
-### Cloning
+The Rust project is quite large and it can be difficult to know which parts of the project need
+help, or are a good starting place for beginners. Here are some suggested starting places.
-You can just do a normal git clone:
+### Easy or mentored issues
-```sh
-git clone https://github.com/rust-lang/rust.git
-cd rust
-```
+If you're looking for somewhere to start, check out the following [issue
+search][help-wanted-search]. See the [Triage] for an explanation of these labels. You can also try
+filtering the search to areas you're interested in. For example:
-### `x.py` Intro
+- `repo:rust-lang/rust-clippy` will only show clippy issues
+- `label:T-compiler` will only show issues related to the compiler
+- `label:A-diagnostics` will only show diagnostic issues
-`rustc` is a [bootstrapping] compiler, which makes it more complex than a
-typical Rust program. As a result, you cannot use Cargo to build it. Instead
-you must use the special tool `x.py`. It is used for the things Cargo is
-normally used for: building, testing, creating releases, formatting, etc.
+Not all important or beginner work has issue labels.
+See below for how to find work that isn't labelled.
-[bootstrapping]: ./building/bootstrapping.md
+[help-wanted-search]: https://github.com/issues?q=is%3Aopen+is%3Aissue+org%3Arust-lang+no%3Aassignee+label%3AE-easy%2C%22good+first+issue%22%2Cgood-first-issue%2CE-medium%2CE-help-wanted%2CE-mentor
+[Triage]: ./contributing.md#issue-triage
-### Configuring the compiler
+### Recurring work
-In the top level of the repo:
+Some work is too large to be done by a single person. In this case, it's common to have "Tracking
+issues" to co-ordinate the work between contributors. Here are some example tracking issues where
+it's easy to pick up work without a large time commitment:
-```sh
-$ ./x.py setup
-```
+- [Rustdoc Askama Migration](https://github.com/rust-lang/rust/issues/108868)
+- [Diagnostic Translation](https://github.com/rust-lang/rust/issues/100717)
+- [Move UI tests to subdirectories](https://github.com/rust-lang/rust/issues/73494)
-This will do some initialization and walk you through an interactive setup to
-create `config.toml`, the primary configuration file.
+If you find more recurring work, please feel free to add it here!
-See [this chapter][config] for more info about configuration.
+### Clippy issues
-[config]: ./building/how-to-build-and-run.md#create-a-configtoml
+The [Clippy] project has spent a long time making its contribution process as friendly to newcomers
+as possible. Consider working on it first to get familiar with the process and the compiler
+internals.
-### Common `x.py` commands
+See [the Clippy contribution guide][clippy-contributing] for instructions on getting started.
-Here are the basic invocations of the `x.py` commands most commonly used when
-working on `rustc`, `std`, `rustdoc`, and other tools.
+[Clippy]: https://doc.rust-lang.org/clippy/
+[clippy-contributing]: https://github.com/rust-lang/rust-clippy/blob/master/CONTRIBUTING.md
-| Command | When to use it |
-| --- | --- |
-| `./x.py check` | Quick check to see if most things compile; [rust-analyzer can run this automatically for you][rust-analyzer] |
-| `./x.py build` | Builds `rustc`, `std`, and `rustdoc` |
-| `./x.py test` | Runs all tests |
-| `./x.py fmt` | Formats all code |
+### Diagnostic issues
-As written, these commands are reasonable starting points. However, there are
-additional options and arguments for each of them that are worth learning for
-serious development work. In particular, `./x.py build` and `./x.py test`
-provide many ways to compile or test a subset of the code, which can save a lot
-of time.
+Many diagnostic issues are self-contained and don't need detailed background knowledge of the
+compiler. You can see a list of diagnostic issues [here][diagnostic-issues].
-Also, note that `x.py` supports all kinds of path suffixes for `compiler`, `library`,
-and `src/tools` directories. So, you can simply run `x.py test tidy` instead of
-`x.py test src/tools/tidy`. Or, `x.py build std` instead of `x.py build library/std`.
-
-[rust-analyzer]: ./building/suggested.html#configuring-rust-analyzer-for-rustc
-
-See the chapters on [building](./building/how-to-build-and-run.md),
-[testing](./tests/intro.md), and [rustdoc](./rustdoc.md) for more details.
+[diagnostic-issues]: https://github.com/rust-lang/rust/issues?q=is%3Aissue+is%3Aopen+label%3AA-diagnostics+no%3Aassignee
### Contributing code to other Rust projects
There are a bunch of other projects that you can contribute to outside of the
-`rust-lang/rust` repo, including `clippy`, `miri`, `chalk`, and many others.
+`rust-lang/rust` repo, including `cargo`, `miri`, `rustup`, and many others.
These repos might have their own contributing guidelines and procedures. Many
of them are owned by working groups (e.g. `chalk` is largely owned by
@@ -146,173 +146,37 @@ incredibly helpful:
- [Writing documentation][wd]: if you are feeling a bit more intrepid, you could try
to read a part of the code and write doc comments for it. This will help you
to learn some part of the compiler while also producing a useful artifact!
+- [Triaging issues][triage]: categorizing, replicating, and minimizing issues is very helpful to the Rust maintainers.
- [Working groups][wg]: there are a bunch of working groups on a wide variety
of rust-related things.
+- Answer questions in the _Get Help!_ channels on the [Rust Discord
+ server][rust-discord], on [users.rust-lang.org][users], or on
+ [StackOverflow][so].
+- Participate in the [RFC process](https://github.com/rust-lang/rfcs).
+- Find a [requested community library][community-library], build it, and publish
+ it to [Crates.io](http://crates.io). Easier said than done, but very, very
+ valuable!
+
+[rust-discord]: https://discord.gg/rust-lang
+[users]: https://users.rust-lang.org/
+[so]: http://stackoverflow.com/questions/tagged/rust
+[community-library]: https://github.com/rust-lang/rfcs/labels/A-community-library
[iceb]: ./notification-groups/cleanup-crew.md
[wd]: ./contributing.md#writing-documentation
[wg]: https://rust-lang.github.io/compiler-team/working-groups/
+[triage]: ./contributing.md#issue-triage
-## Contributor Procedures
-
-There are some official procedures to know about. This is a tour of the
-highlights, but there are a lot more details, which we will link to below.
-
-### Code Review
-
-When you open a PR on the `rust-lang/rust` repo, a bot called `@rustbot` will
-automatically assign a reviewer to the PR based on which files you changed.
-The reviewer is the person that will approve the PR to be tested and merged.
-If you want a specific reviewer (e.g. a team member you've been working with),
-you can specifically request them by writing `r? @user` (e.g. `r? @jyn514`) in
-either the original post or a followup comment
-(you can see [this comment][r?] for example).
-
-Please note that the reviewers are humans, who for the most part work on `rustc`
-in their free time. This means that they can take some time to respond and review
-your PR. It also means that reviewers can miss some PRs that are assigned to them.
-
-To try to move PRs forward, the Triage WG regularly goes through all PRs that
-are waiting for review and haven't been discussed for at least 2 weeks. If you
-don't get a review within 2 weeks, feel free to ask the Triage WG on
-Zulip ([#t-release/triage]). They have knowledge of when to ping, who might be
-on vacation, etc.
-
-The reviewer may request some changes using the GitHub code review interface.
-They may also request special procedures (such as a [crater] run; [see
-below][break]) for some PRs.
-
-[r?]: https://github.com/rust-lang/rust/pull/78133#issuecomment-712692371
-[#t-release/triage]: https://rust-lang.zulipchat.com/#narrow/stream/242269-t-release.2Ftriage
-[break]: #breaking-changes
-
-When the PR is ready to be merged, the reviewer will issue a command to
-`@bors`, the CI bot. Usually, this is `@bors r+` or `@bors r=user` to approve
-a PR (there are few other commands, but they are less relevant here).
-You can see [this comment][r+] for example. This puts the PR in [bors's queue][bors]
-to be tested and merged. Be patient; this can take a while and the queue can
-sometimes be long. PRs are never merged by hand.
-
-[r+]: https://github.com/rust-lang/rust/pull/78133#issuecomment-712726339
-[bors]: https://bors.rust-lang.org/queue/rust
-
-### Bug Fixes or "Normal" code changes
-
-For most PRs, no special procedures are needed. You can just open a PR, and it
-will be reviewed, approved, and merged. This includes most bug fixes,
-refactorings, and other user-invisible changes. The next few sections talk
-about exceptions to this rule.
-
-Also, note that it is perfectly acceptable to open WIP PRs or GitHub [Draft
-PRs][draft]. Some people prefer to do this so they can get feedback along the
-way or share their code with a collaborator. Others do this so they can utilize
-the CI to build and test their PR (e.g. if you are developing on a laptop).
-
-[draft]: https://github.blog/2019-02-14-introducing-draft-pull-requests/
-
-### New Features
-
-Rust has strong backwards-compatibility guarantees. Thus, new features can't
-just be implemented directly in stable Rust. Instead, we have 3 release
-channels: stable, beta, and nightly.
-
-- **Stable**: this is the latest stable release for general usage.
-- **Beta**: this is the next release (will be stable within 6 weeks).
-- **Nightly**: follows the `master` branch of the repo. This is the only
- channel where unstable, incomplete, or experimental features are usable with
- feature gates.
-
-In order to implement a new feature, usually you will need to go through [the
-RFC process][rfc] to propose a design, have discussions, etc. In some cases,
-small features can be added with only an FCP ([see below][break]). If in doubt, ask the
-compiler, language, or libs team (whichever is most relevant).
-
-[rfc]: https://github.com/rust-lang/rfcs/blob/master/README.md
-
-After a feature is approved to be added, a tracking issue is created on the
-`rust-lang/rust` repo, which tracks the progress towards the implementation of
-the feature, any bugs reported, and eventually stabilization.
-
-The feature then needs to be implemented behind a feature gate, which prevents
-it from being accidentally used.
-
-Finally, somebody may propose stabilizing the feature in an upcoming version of
-Rust. This requires a Final Comment Period ([see below][break]) to get the
-approval of the relevant teams.
-
-After that, the feature gate can be removed and the feature turned on for all users.
-
-For more details on this process, see [this chapter on implementing new
-features.](./implementing_new_features.md)
-
-### Breaking Changes
-
-As mentioned above, Rust has strong backwards-compatibility guarantees. To this
-end, we are reluctant to make breaking changes. However, sometimes they are
-needed to correct compiler bugs (e.g. code that compiled but should not) or
-make progress on some features.
-
-Depending on the scale of the breakage, there are a few different actions that
-can be taken. If the reviewer believes the breakage is very minimal (i.e. very
-unlikely to be actually encountered by users), they may just merge the change.
-More often, they will request a Final Comment Period (FCP), which calls for
-rough consensus among the members of a relevant team. The team members can
-discuss the issue and either accept, reject, or request changes on the PR.
-
-If the scale of breakage is large, a deprecation warning may be needed. This is
-a warning that the compiler will display to users whose code will break in the
-future. After some time, an FCP can be used to move forward with the actual
-breakage.
-
-If the scale of breakage is unknown, a team member or contributor may request a
-[crater] run. This is a bot that will compile all crates.io crates and many
-public github repos with the compiler with your changes. A report will then be
-generated with crates that ceased to compile with or began to compile with your
-changes. Crater runs can take a few days to complete.
-
-[crater]: https://github.com/rust-lang/crater
-
-### Major Changes
-
-The compiler team has a special process for large changes, whether or not they
-cause breakage. This process is called a Major Change Proposal (MCP). MCP is a
-relatively lightweight mechanism for getting feedback on large changes to the
-compiler (as opposed to a full RFC or a design meeting with the team).
-
-Example of things that might require MCPs include major refactorings, changes
-to important types, or important changes to how the compiler does something, or
-smaller user-facing changes.
-
-**When in doubt, ask on [zulip][z]. It would be a shame to put a lot of work
-into a PR that ends up not getting merged!** [See this document][mcpinfo] for
-more info on MCPs.
-
-[mcpinfo]: https://forge.rust-lang.org/compiler/mcp.html
-
-### Performance
-
-Compiler performance is important. We have put a lot of effort over the last
-few years into [gradually improving it][perfdash].
-
-[perfdash]: https://perf.rust-lang.org/dashboard.html
+## Cloning and Building
-If you suspect that your change may cause a performance regression (or
-improvement), you can request a "perf run" (your reviewer may also request one
-before approving). This is yet another bot that will compile a collection of
-benchmarks on a compiler with your changes. The numbers are reported
-[here][perf], and you can see a comparison of your changes against the latest
-master.
+See ["How to build and run the compiler"](./building//how-to-build-and-run.md).
-For an introduction to the performance of Rust code in general
-which would also be useful in rustc development, see [The Rust Performance Book].
+## Contributor Procedures
-[perf]: https://perf.rust-lang.org
-[The Rust Performance Book]: https://nnethercote.github.io/perf-book/
+This section has moved to the ["Contribution Procedures"](./contributing.md) chapter.
## Other Resources
-- This guide: talks about how `rustc` works
-- [The t-compiler zulip][z]
-- [The compiler's documentation (rustdocs)](https://doc.rust-lang.org/nightly/nightly-rustc/)
-- [The Forge](https://forge.rust-lang.org/) has more documentation about various procedures.
-- `#contribute` and `#rustdoc` on [Discord](https://discord.gg/rust-lang).
+This section has moved to the ["About this guide"][more-links] chapter.
+
+[more-links]: ./about-this-guide.md#other-places-to-find-information
diff --git a/src/doc/rustc-dev-guide/src/git.md b/src/doc/rustc-dev-guide/src/git.md
index a426157a2..34f2f101e 100644
--- a/src/doc/rustc-dev-guide/src/git.md
+++ b/src/doc/rustc-dev-guide/src/git.md
@@ -508,6 +508,18 @@ See [the docs for `--color-moved`](https://git-scm.com/docs/git-diff#Documentati
See [the relevant section for PR authors](#git-range-diff). This can be useful for comparing code
that was force-pushed to make sure there are no unexpected changes.
+### Ignoring changes to specific files
+
+Many large files in the repo are autogenerated. To view a diff that ignores changes to those files,
+you can use the following syntax (e.g. Cargo.lock):
+
+```
+git log -p ':!Cargo.lock'
+```
+
+Arbitrary patterns are supported (e.g. `:!compiler/*`). Patterns use the same syntax as
+`.gitignore`, with `:` prepended to indicate a pattern.
+
## Git submodules
**NOTE**: submodules are a nice thing to know about, but it *isn't* an absolute
diff --git a/src/doc/rustc-dev-guide/src/identifiers.md b/src/doc/rustc-dev-guide/src/identifiers.md
index 1b60b3b0b..09e85c019 100644
--- a/src/doc/rustc-dev-guide/src/identifiers.md
+++ b/src/doc/rustc-dev-guide/src/identifiers.md
@@ -65,8 +65,7 @@ See the [HIR chapter][hir-map] for more detailed information.
- [`BasicBlock`] identifies a *basic block*. It points to an instance of
[`BasicBlockData`], which can be retrieved by indexing into
- [`Body::basic_blocks()`] (note that you must call a function; the field is
- private).
+ [`Body.basic_blocks`].
- [`Local`] identifies a local variable in a function. Its associated data is in
[`LocalDecl`], which can be retrieved by indexing into [`Body.local_decls`].
@@ -93,7 +92,7 @@ See the [HIR chapter][hir-map] for more detailed information.
[`BasicBlock`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/mir/struct.BasicBlock.html
[`BasicBlockData`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/mir/struct.BasicBlockData.html
-[`Body::basic_blocks()`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/mir/struct.Body.html#method.basic_blocks
+[`Body.basic_blocks`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/mir/struct.Body.html#structfield.basic_blocks
[`Local`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/mir/struct.Local.html
[`LocalDecl`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/mir/struct.LocalDecl.html
[`Body.local_decls`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/mir/struct.Body.html#structfield.local_decls
diff --git a/src/doc/rustc-dev-guide/src/implementing_new_features.md b/src/doc/rustc-dev-guide/src/implementing_new_features.md
index 9bd853080..946637d29 100644
--- a/src/doc/rustc-dev-guide/src/implementing_new_features.md
+++ b/src/doc/rustc-dev-guide/src/implementing_new_features.md
@@ -1,5 +1,7 @@
# Implementing new features
+<!-- toc -->
+
When you want to implement a new significant feature in the compiler,
you need to go through this process to make sure everything goes
smoothly.
diff --git a/src/doc/rustc-dev-guide/src/mir/index.md b/src/doc/rustc-dev-guide/src/mir/index.md
index 0c00928c0..dc0be167b 100644
--- a/src/doc/rustc-dev-guide/src/mir/index.md
+++ b/src/doc/rustc-dev-guide/src/mir/index.md
@@ -217,7 +217,7 @@ over the overflow checks.)
## MIR data types
The MIR data types are defined in the [`compiler/rustc_middle/src/mir/`][mir]
-module. Each of the key concepts mentioned in the previous section
+module. Each of the key concepts mentioned in the previous section
maps in a fairly straightforward way to a Rust type.
The main MIR data type is [`Body`]. It contains the data for a single
@@ -233,15 +233,14 @@ but [you can read about those below](#promoted)).
- **Terminators** are represented by the [`Terminator`].
- **Locals** are represented by a [newtype'd] index type [`Local`].
The data for a local variable is found in the
- [`Body::local_decls`][localdecls] vector). There is also a special constant
+ [`Body::local_decls`][localdecls] vector. There is also a special constant
[`RETURN_PLACE`] identifying the special "local" representing the return value.
-- **Places** are identified by the enum [`Place`]. There are a few
- variants:
+- **Places** are identified by the struct [`Place`]. There are a few
+ fields:
- Local variables like `_1`
- - Static variables `FOO`
- **Projections**, which are fields or other things that "project
- out" from a base place. These are represented by the type
- [`ProjectionElem`]. So e.g. the place `_1.f` is a projection,
+ out" from a base place. These are represented by the [newtype'd] type
+ [`ProjectionElem`]. So e.g. the place `_1.f` is a projection,
with `f` being the "projection element" and `_1` being the base
path. `*_1` is also a projection, with the `*` being represented
by the [`ProjectionElem::Deref`] element.
diff --git a/src/doc/rustc-dev-guide/src/notification-groups/cleanup-crew.md b/src/doc/rustc-dev-guide/src/notification-groups/cleanup-crew.md
index 436b51fd1..2e7b1766c 100644
--- a/src/doc/rustc-dev-guide/src/notification-groups/cleanup-crew.md
+++ b/src/doc/rustc-dev-guide/src/notification-groups/cleanup-crew.md
@@ -77,38 +77,13 @@ various builds of rustc. For recent regressions, it is even able to
use the builds from our CI to track down the regression to a specific
PR; for older regressions, it will simply identify a nightly.
-To learn to use [cargo-bisect-rustc], check out [this blog
-post][learn], which gives a quick introduction to how it works. You
-can also ask questions at the Zulip stream
-[`#t-compiler/cargo-bisect-rustc`][zcbr], or help in improving the tool.
+To learn to use [cargo-bisect-rustc], check out [this blog post][learn], which
+gives a quick introduction to how it works. Additionally, there is a [Guide]
+which goes into more detail on how to use it. You can also ask questions at
+the Zulip stream [`#t-compiler/cargo-bisect-rustc`][zcbr], or help in
+improving the tool.
[cargo-bisect-rustc]: https://github.com/rust-lang/cargo-bisect-rustc/
[learn]: https://blog.rust-lang.org/inside-rust/2019/12/18/bisecting-rust-compiler.html
[zcbr]: https://rust-lang.zulipchat.com/#narrow/stream/217417-t-compiler.2Fcargo-bisect-rustc
-
-### identifying the range of PRs in a nightly
-
-If the regression occurred more than 90 days ago, then
-cargo-bisect-rustc will not able to identify the particular PR that
-caused the regression, just the nightly build. In that case, we can
-identify the set of PRs that this corresponds to by using the git
-history.
-
-The command `rustc +nightly -vV` will cause rustc to output a number
-of useful bits of version info, including the `commit-hash`. Given the
-commit-hash of two nightly versions, you can find all of PRs that have
-landed in between by taking the following steps:
-
-1. Go to an update checkout of the [rust-lang/rust] repository
-2. Execute the command `git log --author=bors --format=oneline SHA1..SHA2`
- * This will list out all of the commits by bors, which is our merge bot
- * Each commit corresponds to one PR, and information about the PR should be in the description
-3. Copy and paste that information into the bug report
-
-Often, just eye-balling the PR descriptions (which are included in the
-commit messages) will give you a good idea which one likely caused the
-problem. But if you're unsure feel free to just ping the compiler team
-(`@rust-lang/compiler`) or else to ping the authors of the PR
-themselves.
-
-[rust-lang/rust]: https://github.com/rust-lang/rust/
+[Guide]: https://rust-lang.github.io/cargo-bisect-rustc/
diff --git a/src/doc/rustc-dev-guide/src/rustc-driver-getting-diagnostics.md b/src/doc/rustc-dev-guide/src/rustc-driver-getting-diagnostics.md
index 3c2102a50..47b9fb5d9 100644
--- a/src/doc/rustc-dev-guide/src/rustc-driver-getting-diagnostics.md
+++ b/src/doc/rustc-dev-guide/src/rustc-driver-getting-diagnostics.md
@@ -7,7 +7,7 @@
To get diagnostics from the compiler,
configure `rustc_interface::Config` to output diagnostic to a buffer,
and run `TyCtxt.analysis`. The following was tested
-with <!-- date-check: Feb 2023 --> `nightly-2023-02-13`:
+with <!-- date-check: mar 2023 --> `nightly-2023-03-27`:
```rust
{{#include ../examples/rustc-driver-getting-diagnostics.rs}}
diff --git a/src/doc/rustc-dev-guide/src/rustc-driver-interacting-with-the-ast.md b/src/doc/rustc-dev-guide/src/rustc-driver-interacting-with-the-ast.md
index d058a5838..4edbbca00 100644
--- a/src/doc/rustc-dev-guide/src/rustc-driver-interacting-with-the-ast.md
+++ b/src/doc/rustc-dev-guide/src/rustc-driver-interacting-with-the-ast.md
@@ -5,7 +5,7 @@
## Getting the type of an expression
To get the type of an expression, use the `global_ctxt` to get a `TyCtxt`.
-The following was tested with <!-- date-check: Feb 2023 --> `nightly-2023-02-13`:
+The following was tested with <!-- date-check: mar 2023 --> `nightly-2023-03-27`:
```rust
{{#include ../examples/rustc-driver-interacting-with-the-ast.rs}}
diff --git a/src/doc/rustc-dev-guide/src/rustdoc-internals.md b/src/doc/rustc-dev-guide/src/rustdoc-internals.md
index a8cc0c376..d58c2d280 100644
--- a/src/doc/rustc-dev-guide/src/rustdoc-internals.md
+++ b/src/doc/rustc-dev-guide/src/rustdoc-internals.md
@@ -7,38 +7,61 @@ see the ["Rustdoc overview" chapter](./rustdoc.md).
## From crate to clean
-In `core.rs` are two central items: the `DocContext` struct, and the `run_core`
-function. The latter is where rustdoc calls out to rustc to compile a crate to
-the point where rustdoc can take over. The former is a state container used
-when crawling through a crate to gather its documentation.
+In `core.rs` are two central items: the `DocContext` struct, and the
+`run_global_ctxt` function. The latter is where rustdoc calls out to rustc to
+compile a crate to the point where rustdoc can take over. The former is a state
+container used when crawling through a crate to gather its documentation.
The main process of crate crawling is done in `clean/mod.rs` through several
-implementations of the `Clean` trait defined within. This is a conversion
-trait, which defines one method:
+functions with names that start with `clean_`. Each function accepts an `hir`
+or `ty` data structure, and outputs a `clean` structure used by rustdoc. For
+example, this function for converting lifetimes:
```rust,ignore
-pub trait Clean<T> {
- fn clean(&self, cx: &DocContext) -> T;
+fn clean_lifetime<'tcx>(lifetime: &hir::Lifetime, cx: &mut DocContext<'tcx>) -> Lifetime {
+ let def = cx.tcx.named_bound_var(lifetime.hir_id);
+ if let Some(
+ rbv::ResolvedArg::EarlyBound(node_id)
+ | rbv::ResolvedArg::LateBound(_, _, node_id)
+ | rbv::ResolvedArg::Free(_, node_id),
+ ) = def
+ {
+ if let Some(lt) = cx.substs.get(&node_id).and_then(|p| p.as_lt()).cloned() {
+ return lt;
+ }
+ }
+ Lifetime(lifetime.ident.name)
}
```
`clean/mod.rs` also defines the types for the "cleaned" AST used later on to
-render documentation pages. Each usually accompanies an implementation of
-`Clean` that takes some AST or HIR type from rustc and converts it into the
+render documentation pages. Each usually accompanies a `clean` function
+that takes some AST or HIR type from rustc and converts it into the
appropriate "cleaned" type. "Big" items like modules or associated items may
-have some extra processing in its `Clean` implementation, but for the most part
+have some extra processing in its `clean` function, but for the most part
these impls are straightforward conversions. The "entry point" to this module
-is the `impl Clean<Crate> for visit_ast::RustdocVisitor`, which is called by
-`run_core` above.
-
-You see, I actually lied a little earlier: There's another AST transformation
-that happens before the events in `clean/mod.rs`. In `visit_ast.rs` is the
-type `RustdocVisitor`, which *actually* crawls a `rustc_hir::Crate` to get the first
-intermediate representation, defined in `doctree.rs`. This pass is mainly to
-get a few intermediate wrappers around the HIR types and to process visibility
-and inlining. This is where `#[doc(inline)]`, `#[doc(no_inline)]`, and
-`#[doc(hidden)]` are processed, as well as the logic for whether a `pub use`
-should get the full page or a "Reexport" line in the module page.
+is `clean::krate`, which is called by
+`run_global_ctxt` above.
+
+The first step in `clean::krate` is to invoke `visit_ast::RustdocVisitor` to
+process the module tree into an intermediate `visit_ast::Module`. This is the
+step that actually crawls the `rustc_hir::Crate`, normalizing various aspects
+of name resolution, such as:
+
+ * showing `#[macro_export]`-ed macros at the crate root, regardless of where
+ they're defined
+ * inlining public `use` exports of private items, or showing a "Reexport"
+ line in the module page
+ * inlining items with `#[doc(hidden)]` if the base item is hidden but the
+ reexport is not
+ * handling `#[doc(inline)]` and `#[doc(no_inline)]`
+ * handling import globs and cycles, so there are no duplicates or infinite
+ directory trees
+
+After this step, `clean::krate` invokes `clean_doc_module`, which actually
+converts the HIR items to the cleaned AST. This is also the step where cross-
+crate inlining is performed, which requires converting `rustc_middle` data
+structures into the cleaned AST instead.
The other major thing that happens in `clean/mod.rs` is the collection of doc
comments and `#[doc=""]` attributes into a separate field of the Attributes
@@ -48,41 +71,28 @@ easier to collect this documentation later in the process.
The primary output of this process is a `clean::Crate` with a tree of Items
which describe the publicly-documentable items in the target crate.
-### Hot potato
+### Passes anything but a gas station
+
+(alternate title: [hot potato](https://www.youtube.com/watch?v=WNFBIt5HxdY))
Before moving on to the next major step, a few important "passes" occur over
-the documentation. These do things like combine the separate "attributes" into
-a single string to make the document easier on the markdown parser,
-or drop items that are not public or deliberately hidden with `#[doc(hidden)]`.
+the cleaned AST. Several of these passes are lints and reports, but some of
+them mutate or generate new items.
+
These are all implemented in the `passes/` directory, one file per pass.
By default, all of these passes are run on a crate, but the ones
regarding dropping private/hidden items can be bypassed by passing
`--document-private-items` to rustdoc. Note that unlike the previous set of AST
transformations, the passes are run on the _cleaned_ crate.
-(Strictly speaking, you can fine-tune the passes run and even add your own, but
-[we're trying to deprecate that][44136]. If you need finer-grain control over
-these passes, please let us know!)
-
-[44136]: https://github.com/rust-lang/rust/issues/44136
-
-Here is the list of passes as of <!-- date-check --> November 2022:
+Here is the list of passes as of <!-- date-check --> March 2023:
- `calculate-doc-coverage` calculates information used for the `--show-coverage`
flag.
-- `check-bare-urls` detects links that are not linkified, e.g., in Markdown such as
- `Go to https://example.com/.` It suggests wrapping the link with angle brackets:
- `Go to <https://example.com/>.` to linkify it. This is the code behind the <!--
- date: 2022-05 --> `rustdoc::bare_urls` lint.
-
-- `check-code-block-syntax` validates syntax inside Rust code blocks
- (<code>```rust</code>)
-
-- `check-doc-test-visibility` runs doctest visibility–related lints.
-
-- `check-invalid-html-tags` detects invalid HTML (like an unclosed `<span>`)
- in doc comments.
+- `check-doc-test-visibility` runs doctest visibility–related lints. This pass
+ runs before `strip-private`, which is why it needs to be separate from
+ `run-lints`.
- `collect-intra-doc-links` resolves [intra-doc links](https://doc.rust-lang.org/nightly/rustdoc/write-documentation/linking-to-items-by-name.html).
@@ -92,44 +102,66 @@ Here is the list of passes as of <!-- date-check --> November 2022:
- `propagate-doc-cfg` propagates `#[doc(cfg(...))]` to child items.
+- `run-lints` runs some of rustdoc's lints, defined in `passes/lint`. This is
+ the last pass to run.
+
+ - `bare_urls` detects links that are not linkified, e.g., in Markdown such as
+ `Go to https://example.com/.` It suggests wrapping the link with angle brackets:
+ `Go to <https://example.com/>.` to linkify it. This is the code behind the <!--
+ date-check: may 2022 --> `rustdoc::bare_urls` lint.
+
+ - `check_code_block_syntax` validates syntax inside Rust code blocks
+ (<code>```rust</code>)
+
+ - `html_tags` detects invalid HTML (like an unclosed `<span>`)
+ in doc comments.
+
+- `strip-hidden` and `strip-private` strip all `doc(hidden)` and private items
+ from the output. `strip-private` implies `strip-priv-imports`. Basically, the
+ goal is to remove items that are not relevant for public documentation. This
+ pass is skipped when `--document-hidden-items` is passed.
+
- `strip-priv-imports` strips all private import statements (`use`, `extern
crate`) from a crate. This is necessary because rustdoc will handle *public*
imports by either inlining the item's documentation to the module or creating
a "Reexports" section with the import in it. The pass ensures that all of
- these imports are actually relevant to documentation.
+ these imports are actually relevant to documentation. It is technically
+ only run when `--document-private-items` is passed, but `strip-private`
+ accomplishes the same thing.
-- `strip-hidden` and `strip-private` strip all `doc(hidden)` and private items
- from the output. `strip-private` implies `strip-priv-imports`. Basically, the
- goal is to remove items that are not relevant for public documentation.
+- `strip-private` strips all private items from a crate which cannot be seen
+ externally. This pass is skipped when `--document-private-items` is passed.
There is also a `stripper` module in `passes/`, but it is a collection of
utility functions for the `strip-*` passes and is not a pass itself.
-## From clean to crate
+## From clean to HTML
This is where the "second phase" in rustdoc begins. This phase primarily lives
-in the `html/` folder, and it all starts with `run()` in `html/render.rs`. This
-code is responsible for setting up the `Context`, `SharedContext`, and `Cache`
-which are used during rendering, copying out the static files which live in
-every rendered set of documentation (things like the fonts, CSS, and JavaScript
-that live in `html/static/`), creating the search index, and printing out the
-source code rendering, before beginning the process of rendering all the
-documentation for the crate.
-
-Several functions implemented directly on `Context` take the `clean::Crate` and
-set up some state between rendering items or recursing on a module's child
-items. From here the "page rendering" begins, via an enormous `write!()` call
-in `html/layout.rs`. The parts that actually generate HTML from the items and
-documentation occurs within a series of `std::fmt::Display` implementations and
-functions that pass around a `&mut std::fmt::Formatter`. The top-level
-implementation that writes out the page body is the `impl<'a> fmt::Display for
-Item<'a>` in `html/render.rs`, which switches out to one of several `item_*`
-functions based on the kind of `Item` being rendered.
+in the `formats/` and `html/` folders, and it all starts with
+`formats::run_format`. This code is responsible for setting up a type that
+`impl FormatRenderer`, which for HTML is [`Context`].
+
+This structure contains methods that get called by `run_format` to drive the
+doc rendering, which includes:
+
+* `init` generates `static.files`, as well as search index and `src/`
+* `item` generates the item HTML files themselves
+* `after_krate` generates other global resources like `all.html`
+
+In `item`, the "page rendering" occurs, via a mixture of [Askama] templates
+and manual `write!()` calls, starting in `html/layout.rs`. The parts that have
+not been converted to templates occur within a series of `std::fmt::Display`
+implementations and functions that pass around a `&mut std::fmt::Formatter`.
+
+The parts that actually generate HTML from the items and documentation start
+with `print_item` defined in `html/render/print_item.rs`, which switches out
+to one of several `item_*` functions based on kind of `Item` being rendered.
Depending on what kind of rendering code you're looking for, you'll probably
-find it either in `html/render.rs` for major items like "what sections should I
-print for a struct page" or `html/format.rs` for smaller component pieces like
-"how should I print a where clause as part of some other item".
+find it either in `html/render/mod.rs` for major items like "what sections
+should I print for a struct page" or `html/format/mod.rs` for smaller
+component pieces like "how should I print a where clause as part of some other item".
Whenever rustdoc comes across an item that should print hand-written
documentation alongside, it calls out to `html/markdown.rs` which interfaces
@@ -148,23 +180,46 @@ to us"][video])
[video]: https://www.youtube.com/watch?v=hOLAGYmUQV0
-It's important to note that the AST cleaning can ask the compiler for
-information (crucially, `DocContext` contains a `TyCtxt`), but page rendering
-cannot. The `clean::Crate` created within `run_core` is passed outside the
-compiler context before being handed to `html::render::run`. This means that a
-lot of the "supplementary data" that isn't immediately available inside an
-item's definition, like which trait is the `Deref` trait used by the language,
-needs to be collected during cleaning, stored in the `DocContext`, and passed
-along to the `SharedContext` during HTML rendering. This manifests as a bunch
-of shared state, context variables, and `RefCell`s.
-
-Also of note is that some items that come from "asking the compiler" don't go
-directly into the `DocContext` - for example, when loading items from a foreign
-crate, rustdoc will ask about trait implementations and generate new `Item`s
-for the impls based on that information. This goes directly into the returned
-`Crate` rather than roundabout through the `DocContext`. This way, these
-implementations can be collected alongside the others, right before rendering
-the HTML.
+It's important to note that rustdoc can ask the compiler for type information
+directly, even during HTML generation. This [didn't used to be the case], and
+a lot of rustdoc's architecture was designed around not doing that, but a
+`TyCtxt` is now passed to `formats::renderer::run_format`, which is used to
+run generation for both HTML and the
+(unstable as of <!-- date-check --> March 2023) JSON format.
+
+[didn't used to be the case]: https://github.com/rust-lang/rust/pull/80090
+
+This change has allowed other changes to remove data from the "clean" AST
+that can be easily derived from `TyCtxt` queries, and we'll usually accept
+PRs that remove fields from "clean" (it's been soft-deprecated), but this
+is complicated from two other constraints that rustdoc runs under:
+
+* Docs can be generated for crates that don't actually pass type checking.
+ This is used for generating docs that cover mutually-exclusive platform
+ configurations, such as `libstd` having a single package of docs that
+ cover all supported operating systems. This means rustdoc has to be able
+ to generate docs from HIR.
+* Docs can inline across crates. Since crate metadata doesn't contain HIR,
+ it must be possible to generate inlined docs from the `rustc_middle` data.
+
+The "clean" AST acts as a common output format for both input formats. There
+is also some data in clean that doesn't correspond directly to HIR, such as
+synthetic `impl`s for auto traits and blanket `impl`s generated by the
+`collect-trait-impls` pass.
+
+Some additional data is stored in
+`html::render::context::{Context, SharedContext}`. These two types serve as
+ways to segregate rustdoc's data for an eventual future with multithreaded doc
+generation, as well as just keeping things organized:
+
+* [`Context`] stores data used for generating the current page, such as its
+ path, a list of HTML IDs that have been used (to avoid duplicate `id=""`),
+ and the pointer to `SharedContext`.
+* [`SharedContext`] stores data that does not vary by page, such as the `tcx`
+ pointer, and a list of all types.
+
+[`Context`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustdoc/html/render/context/struct.Context.html
+[`SharedContext`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustdoc/html/render/context/struct.SharedContext.html
## Other tricks up its sleeve
diff --git a/src/doc/rustc-dev-guide/src/rustdoc.md b/src/doc/rustc-dev-guide/src/rustdoc.md
index d58b27bb7..cbe5e8b1f 100644
--- a/src/doc/rustc-dev-guide/src/rustdoc.md
+++ b/src/doc/rustc-dev-guide/src/rustdoc.md
@@ -1,8 +1,5 @@
# Rustdoc overview
-`rustdoc` uses `rustc` internals (and, of course, the standard library), so you
-will have to build the compiler and `std` once before you can build `rustdoc`.
-
`rustdoc` lives in-tree with the
compiler and standard library. This chapter is about how it works.
For information about Rustdoc's features and how to use them, see
@@ -12,6 +9,11 @@ For more details about how rustdoc works, see the
[Rustdoc internals]: ./rustdoc-internals.md
+<!-- toc -->
+
+`rustdoc` uses `rustc` internals (and, of course, the standard library), so you
+will have to build the compiler and `std` once before you can build `rustdoc`.
+
Rustdoc is implemented entirely within the crate [`librustdoc`][rd]. It runs
the compiler up to the point where we have an internal representation of a
crate (HIR) and the ability to run some queries about the types of items. [HIR]
diff --git a/src/doc/rustc-dev-guide/src/tests/compiletest.md b/src/doc/rustc-dev-guide/src/tests/compiletest.md
index 5fc6ba809..f066992dc 100644
--- a/src/doc/rustc-dev-guide/src/tests/compiletest.md
+++ b/src/doc/rustc-dev-guide/src/tests/compiletest.md
@@ -11,6 +11,16 @@ efficient test execution (parallel execution is supported),
and allows the test author to configure behavior and expected results of both
individual and groups of tests.
+> NOTE:
+> For macOS users, `SIP` (System Integrity Protection) [may consistently
+> check the compiled binary by sending network requests to Apple][zulip],
+> so you may get a huge performance degradation when running tests.
+>
+> You can resolve it by tweaking the following settings:
+> `Privacy & Security -> Developer Tools -> Add Terminal (Or VsCode, etc.)`.
+
+[zulip]: https://rust-lang.zulipchat.com/#narrow/stream/182449-t-compiler.2Fhelp/topic/.E2.9C.94.20Is.20there.20any.20performance.20issue.20for.20MacOS.3F
+
`compiletest` may check test code for success, for runtime failure,
or for compile-time failure.
Tests are typically organized as a Rust source file with annotations in
diff --git a/src/doc/rustc-dev-guide/src/the-parser.md b/src/doc/rustc-dev-guide/src/the-parser.md
index 0d37704e8..f0436350a 100644
--- a/src/doc/rustc-dev-guide/src/the-parser.md
+++ b/src/doc/rustc-dev-guide/src/the-parser.md
@@ -1,8 +1,5 @@
# Lexing and Parsing
-As of <!-- date-check --> January 2021, the lexer and parser are undergoing
-refactoring to allow extracting them into libraries.
-
The very first thing the compiler does is take the program (in Unicode
characters) and turn it into something the compiler can work with more
conveniently than strings. This happens in two stages: Lexing and Parsing.
diff --git a/src/doc/rustc-dev-guide/src/tracing.md b/src/doc/rustc-dev-guide/src/tracing.md
index 0bba73f74..af484ab5f 100644
--- a/src/doc/rustc-dev-guide/src/tracing.md
+++ b/src/doc/rustc-dev-guide/src/tracing.md
@@ -144,6 +144,9 @@ $ RUSTC_LOG=debug rustc +stage1 my-file.rs 2>all-log
# compilers.
$ RUSTC_LOG=rustc_codegen_ssa=info rustc +stage1 my-file.rs
+# This will show all logs in `rustc_codegen_ssa` and `rustc_resolve`.
+$ RUSTC_LOG=rustc_codegen_ssa,rustc_resolve rustc +stage1 my-file.rs
+
# This will show the output of all `info!` calls made by rustdoc
# or any rustc library it calls.
$ RUSTDOC_LOG=info rustdoc +stage1 my-file.rs
diff --git a/src/doc/rustc-dev-guide/src/type-inference.md b/src/doc/rustc-dev-guide/src/type-inference.md
index 2bafeb247..4043789d0 100644
--- a/src/doc/rustc-dev-guide/src/type-inference.md
+++ b/src/doc/rustc-dev-guide/src/type-inference.md
@@ -68,7 +68,7 @@ inference works, or perhaps this blog post on
[Unification in the Chalk project]: http://smallcultfollowing.com/babysteps/blog/2017/03/25/unification-in-chalk-part-1/
All told, the inference context stores five kinds of inference variables
-(as of <!-- date-check --> June 2021):
+(as of <!-- date-check --> March 2023):
- Type variables, which come in three varieties:
- General type variables (the most common). These can be unified with any
diff --git a/src/doc/rustc/src/SUMMARY.md b/src/doc/rustc/src/SUMMARY.md
index 752f1cc4a..8ded2ee59 100644
--- a/src/doc/rustc/src/SUMMARY.md
+++ b/src/doc/rustc/src/SUMMARY.md
@@ -26,8 +26,10 @@
- [armv7-unknown-linux-uclibceabi](platform-support/armv7-unknown-linux-uclibceabi.md)
- [armv7-unknown-linux-uclibceabihf](platform-support/armv7-unknown-linux-uclibceabihf.md)
- [\*-android and \*-androideabi](platform-support/android.md)
+ - [\*-linux-ohos](platform-support/openharmony.md)
- [\*-unknown-fuchsia](platform-support/fuchsia.md)
- [\*-kmc-solid_\*](platform-support/kmc-solid.md)
+ - [loongarch\*-unknown-linux-\*](platform-support/loongarch-linux.md)
- [m68k-unknown-linux-gnu](platform-support/m68k-unknown-linux-gnu.md)
- [mips64-openwrt-linux-musl](platform-support/mips64-openwrt-linux-musl.md)
- [mipsel-sony-psx](platform-support/mipsel-sony-psx.md)
diff --git a/src/doc/rustc/src/codegen-options/index.md b/src/doc/rustc/src/codegen-options/index.md
index c7f120daf..62347f169 100644
--- a/src/doc/rustc/src/codegen-options/index.md
+++ b/src/doc/rustc/src/codegen-options/index.md
@@ -71,9 +71,11 @@ If not specified, debug assertions are automatically enabled only if the
This flag controls the generation of debug information. It takes one of the
following values:
-* `0`: no debug info at all (the default).
-* `1`: line tables only.
-* `2`: full debug info.
+* `0` or `none`: no debug info at all (the default).
+* `line-directives-only`: line info directives only. For the nvptx* targets this enables [profiling](https://reviews.llvm.org/D46061). For other use cases, `line-tables-only` is the better, more compatible choice.
+* `line-tables-only`: line tables only. Generates the minimal amount of debug info for backtraces with filename/line number info, but not anything else, i.e. no variable or function parameter info.
+* `1` or `limited`: debug info without type or variable-level information.
+* `2` or `full`: full debug info.
Note: The [`-g` flag][option-g-debug] is an alias for `-C debuginfo=2`.
diff --git a/src/doc/rustc/src/instrument-coverage.md b/src/doc/rustc/src/instrument-coverage.md
index da91e2559..b0b2f4196 100644
--- a/src/doc/rustc/src/instrument-coverage.md
+++ b/src/doc/rustc/src/instrument-coverage.md
@@ -31,7 +31,7 @@ Rust's source-based code coverage requires the Rust "profiler runtime". Without
The Rust `nightly` distribution channel includes the profiler runtime, by default.
-> **Important**: If you are building the Rust compiler from the source distribution, the profiler runtime is _not_ enabled in the default `config.toml.example`. Edit your `config.toml` file and ensure the `profiler` feature is set it to `true` (either under the `[build]` section, or under the settings for an individual `[target.<triple>]`):
+> **Important**: If you are building the Rust compiler from the source distribution, the profiler runtime is _not_ enabled in the default `config.example.toml`. Edit your `config.toml` file and ensure the `profiler` feature is set it to `true` (either under the `[build]` section, or under the settings for an individual `[target.<triple>]`):
>
> ```toml
> # Build the profiler runtime (required when compiling with options that depend
diff --git a/src/doc/rustc/src/platform-support.md b/src/doc/rustc/src/platform-support.md
index 9eafa27e2..c378532db 100644
--- a/src/doc/rustc/src/platform-support.md
+++ b/src/doc/rustc/src/platform-support.md
@@ -218,6 +218,7 @@ target | std | host | notes
[`aarch64-kmc-solid_asp3`](platform-support/kmc-solid.md) | ✓ | | ARM64 SOLID with TOPPERS/ASP3
[`aarch64-nintendo-switch-freestanding`](platform-support/aarch64-nintendo-switch-freestanding.md) | * | | ARM64 Nintendo Switch, Horizon
[`aarch64-pc-windows-gnullvm`](platform-support/pc-windows-gnullvm.md) | ✓ | ✓ |
+[`aarch64-unknown-linux-ohos`](platform-support/openharmony.md) | ✓ | | ARM64 OpenHarmony |
[`aarch64-unknown-nto-qnx710`](platform-support/nto-qnx.md) | ✓ | | ARM64 QNX Neutrino 7.1 RTOS |
`aarch64-unknown-freebsd` | ✓ | ✓ | ARM64 FreeBSD
`aarch64-unknown-hermit` | ✓ | | ARM64 HermitCore
@@ -240,6 +241,7 @@ target | std | host | notes
[`armv6k-nintendo-3ds`](platform-support/armv6k-nintendo-3ds.md) | ? | | ARMv6K Nintendo 3DS, Horizon (Requires devkitARM toolchain)
`armv7-apple-ios` | ✓ | | ARMv7 iOS, Cortex-a8
[`armv7-sony-vita-newlibeabihf`](platform-support/armv7-sony-vita-newlibeabihf.md) | ? | | ARM Cortex-A9 Sony PlayStation Vita (requires VITASDK toolchain)
+[`armv7-unknown-linux-ohos`](platform-support/openharmony.md) | ✓ | | ARMv7 OpenHarmony |
[`armv7-unknown-linux-uclibceabi`](platform-support/armv7-unknown-linux-uclibceabi.md) | ✓ | ✓ | ARMv7 Linux with uClibc, softfloat
[`armv7-unknown-linux-uclibceabihf`](platform-support/armv7-unknown-linux-uclibceabihf.md) | ✓ | ? | ARMv7 Linux with uClibc, hardfloat
`armv7-unknown-freebsd` | ✓ | ✓ | ARMv7 FreeBSD
@@ -255,6 +257,7 @@ target | std | host | notes
`bpfel-unknown-none` | * | | BPF (little endian)
`hexagon-unknown-linux-musl` | ? | |
`i386-apple-ios` | ✓ | | 32-bit x86 iOS
+[`i586-pc-nto-qnx700`](platform-support/nto-qnx.md) | * | | 32-bit x86 QNX Neutrino 7.0 RTOS |
`i686-apple-darwin` | ✓ | ✓ | 32-bit macOS (10.7+, Lion+)
`i686-pc-windows-msvc` | * | | 32-bit Windows XP support
`i686-unknown-haiku` | ✓ | ✓ | 32-bit Haiku
@@ -263,6 +266,7 @@ target | std | host | notes
`i686-uwp-windows-gnu` | ? | |
`i686-uwp-windows-msvc` | ? | |
`i686-wrs-vxworks` | ? | |
+[`loongarch64-unknown-linux-gnu`](platform-support/loongarch-linux.md) | ? | | LoongArch64 Linux (LP64D ABI)
[`m68k-unknown-linux-gnu`](platform-support/m68k-unknown-linux-gnu.md) | ? | | Motorola 680x0 Linux
`mips-unknown-linux-uclibc` | ✓ | | MIPS Linux with uClibc
[`mips64-openwrt-linux-musl`](platform-support/mips64-openwrt-linux-musl.md) | ? | | MIPS64 for OpenWrt Linux MUSL
@@ -295,6 +299,7 @@ target | std | host | notes
[`riscv32imac-unknown-xous-elf`](platform-support/riscv32imac-unknown-xous-elf.md) | ? | | RISC-V Xous (RV32IMAC ISA)
`riscv32imc-esp-espidf` | ✓ | | RISC-V ESP-IDF
`riscv64gc-unknown-freebsd` | | | RISC-V FreeBSD
+`riscv64gc-unknown-fuchsia` | | | RISC-V Fuchsia
`riscv64gc-unknown-linux-musl` | | | RISC-V Linux (kernel 4.20, musl 1.2.0)
[`riscv64gc-unknown-openbsd`](platform-support/openbsd.md) | ✓ | ✓ | OpenBSD/riscv64
`s390x-unknown-linux-musl` | | | S390x Linux (kernel 3.2, MUSL)
diff --git a/src/doc/rustc/src/platform-support/armeb-unknown-linux-gnueabi.md b/src/doc/rustc/src/platform-support/armeb-unknown-linux-gnueabi.md
index 432e0cfc9..32d3440f1 100644
--- a/src/doc/rustc/src/platform-support/armeb-unknown-linux-gnueabi.md
+++ b/src/doc/rustc/src/platform-support/armeb-unknown-linux-gnueabi.md
@@ -26,7 +26,6 @@ Therefore, you can build Rust with support for the target by adding it to the ta
```toml
[llvm]
download-ci-llvm = false
-skip-rebuild = true
optimize = true
ninja = true
targets = "ARM;X86"
diff --git a/src/doc/rustc/src/platform-support/armv4t-none-eabi.md b/src/doc/rustc/src/platform-support/armv4t-none-eabi.md
index cf831e159..a230eba6b 100644
--- a/src/doc/rustc/src/platform-support/armv4t-none-eabi.md
+++ b/src/doc/rustc/src/platform-support/armv4t-none-eabi.md
@@ -17,13 +17,6 @@ specific with this target, so any ARMv4T device should work fine.
The target is cross-compiled, and uses static linking.
-The linker that comes with rustc cannot link for this platform (the platform is
-too old). You will need the `arm-none-eabi-ld` linker from a GNU Binutils
-targeting ARM. This can be obtained for Windows/Mac/Linux from the [ARM
-Developer Website][arm-dev], or possibly from your OS's package manager.
-
-[arm-dev]: https://developer.arm.com/Tools%20and%20Software/GNU%20Toolchain
-
This target doesn't provide a linker script, you'll need to bring your own
according to the specific device you want to target. Pass
`-Clink-arg=-Tyour_script.ld` as a rustc argument to make the linker use
diff --git a/src/doc/rustc/src/platform-support/loongarch-linux.md b/src/doc/rustc/src/platform-support/loongarch-linux.md
new file mode 100644
index 000000000..d7d31d872
--- /dev/null
+++ b/src/doc/rustc/src/platform-support/loongarch-linux.md
@@ -0,0 +1,92 @@
+# loongarch\*-unknown-linux-\*
+
+**Tier: 3**
+
+[LoongArch] is a new RISC ISA developed by Loongson Technology Corporation Limited.
+
+[LoongArch]: https://loongson.github.io/LoongArch-Documentation/README-EN.html
+
+The target name follow this format: `<machine>-<vendor>-<os><fabi_suffix>`, where `<machine>` specifies the CPU family/model, `<vendor>` specifies the vendor and `<os>` the operating system name.
+While the integer base ABI is implied by the machine field, the floating point base ABI type is encoded into the os field of the specifier using the string suffix `<fabi-suffix>`.
+
+| `<fabi-suffix>` | `Description` |
+|------------------------|--------------------------------------------------------------------|
+| f64 | The base ABI use 64-bits FPRs for parameter passing. (lp64d)|
+| f32 | The base ABI uses 32-bit FPRs for parameter passing. (lp64f)|
+| sf | The base ABI uses no FPR for parameter passing. (lp64s) |
+
+|`ABI type(Base ABI/ABI extension)`| `C library` | `kernel` | `target tuple` |
+|----------------------------------|-------------|----------|----------------------------------|
+| lp64d/base | glibc | linux | loongarch64-unknown-linux-gnu |
+| lp64f/base | glibc | linux | loongarch64-unknown-linux-gnuf32 |
+| lp64s/base | glibc | linux | loongarch64-unknown-linux-gnusf |
+| lp64d/base | musl libc | linux | loongarch64-unknown-linux-musl|
+| lp64f/base | musl libc | linux | loongarch64-unknown-linux-muslf32|
+| lp64s/base | musl libc | linux | loongarch64-unknown-linux-muslsf |
+
+## Target maintainers
+
+- [ZHAI Xiaojuan](https://github.com/zhaixiaojuan) `zhaixiaojuan@loongson.cn`
+- [WANG Rui](https://github.com/heiher) `wangrui@loongson.cn`
+- [ZHAI Xiang](https://github.com/xiangzhai) `zhaixiang@loongson.cn`
+- [WANG Xuerui](https://github.com/xen0n) `git@xen0n.name`
+
+## Requirements
+
+This target is cross-compiled.
+A GNU toolchain for LoongArch target is required. It can be downloaded from https://github.com/loongson/build-tools/releases, or built from the source code of GCC (12.1.0 or later) and Binutils (2.40 or later).
+
+## Building the target
+
+The target can be built by enabling it for a `rustc` build.
+
+```toml
+[build]
+target = ["loongarch64-unknown-linux-gnu"]
+```
+
+Make sure `loongarch64-unknown-linux-gnu-gcc` can be searched from the directories specified in`$PATH`. Alternatively, you can use GNU LoongArch Toolchain by adding the following to `config.toml`:
+
+```toml
+[target.loongarch64-unknown-linux-gnu]
+# ADJUST THIS PATH TO POINT AT YOUR TOOLCHAIN
+cc = "/TOOLCHAIN_PATH/bin/loongarch64-unknown-linux-gnu-gcc"
+cxx = "/TOOLCHAIN_PATH/bin/loongarch64-unknown-linux-gnu-g++"
+ar = "/TOOLCHAIN_PATH/bin/loongarch64-unknown-linux-gnu-ar"
+ranlib = "/TOOLCHAIN_PATH/bin/loongarch64-unknown-linux-gnu-ranlib"
+linker = "/TOOLCHAIN_PATH/bin/loongarch64-unknown-linux-gnu-gcc"
+```
+
+## Cross-compilation
+
+This target can be cross-compiled on a `x86_64-unknown-linux-gnu` host. Cross-compilation on other hosts may work but is not tested.
+
+## Testing
+To test a cross-compiled binary on your build system, install the qemu binary that supports the LoongArch architecture and execute the following commands.
+```text
+CC_loongarch64_unknown_linux_gnu=/TOOLCHAIN_PATH/bin/loongarch64-unknown-linux-gnu-gcc \
+CXX_loongarch64_unknown_linux_gnu=/TOOLCHAIN_PATH/bin/loongarch64-unknown-linux-gnu-g++ \
+AR_loongarch64_unknown_linux_gnu=/TOOLCHAIN_PATH/bin/loongarch64-unknown-linux-gnu-gcc-ar \
+CARGO_TARGET_LOONGARCH64_UNKNOWN_LINUX_GNUN_LINKER=/TOOLCHAIN_PATH/bin/loongarch64-unknown-linux-gnu-gcc \
+# SET TARGET SYSTEM LIBRARY PATH
+CARGO_TARGET_LOONGARCH64_UNKNOWN_LINUX_GNUN_RUNNER="qemu-loongarch64 -L /TOOLCHAIN_PATH/TARGET_LIBRAY_PATH" \
+cargo run --target loongarch64-unknown-linux-gnu --release
+```
+Tested on x86 architecture, other architectures not tested.
+
+## Building Rust programs
+
+Rust does not yet ship pre-compiled artifacts for this target. To compile for this target, you will either need to build Rust with the target enabled (see "Building the target" above), or build your own copy of `std` by using `build-std` or similar.
+
+If `rustc` has support for that target and the library artifacts are available, then Rust static libraries can be built for that target:
+
+```shell
+$ rustc --target loongarch64-unknown-linux-gnu your-code.rs --crate-type staticlib
+$ ls libyour_code.a
+```
+
+On Rust Nightly it's possible to build without the target artifacts available:
+
+```text
+cargo build -Z build-std --target loongarch64-unknown-linux-gnu
+```
diff --git a/src/doc/rustc/src/platform-support/nto-qnx.md b/src/doc/rustc/src/platform-support/nto-qnx.md
index 38198fe6c..0d815c9b5 100644
--- a/src/doc/rustc/src/platform-support/nto-qnx.md
+++ b/src/doc/rustc/src/platform-support/nto-qnx.md
@@ -16,10 +16,18 @@ and [Blackberry QNX][BlackBerry].
## Requirements
-Currently, only cross-compilation for QNX Neutrino on AArch64 and x86_64 are supported (little endian).
+Currently, the following QNX Neutrino versions and compilation targets are supported:
+
+| QNX Neutrino Version | Target Architecture | Full support | `no_std` support |
+|----------------------|---------------------|:------------:|:----------------:|
+| 7.1 | AArch64 | ✓ | ✓ |
+| 7.1 | x86_64 | ✓ | ✓ |
+| 7.0 | x86 | | ✓ |
+
Adding other architectures that are supported by QNX Neutrino is possible.
-The standard library, including `core` and `alloc` (with default allocator) are supported.
+In the table above, 'full support' indicates support for building Rust applications with the full standard library.
+'`no_std` support' indicates that only `core` and `alloc` are available.
For building or using the Rust toolchain for QNX Neutrino, the
[QNX Software Development Platform (SDP)](https://blackberry.qnx.com/en/products/foundation-software/qnx-software-development-platform)
@@ -70,7 +78,7 @@ fn panic(_panic: &PanicInfo<'_>) -> ! {
pub extern "C" fn rust_eh_personality() {}
```
-The QNX Neutrino support of Rust has been tested with QNX Neutrino 7.1.
+The QNX Neutrino support of Rust has been tested with QNX Neutrino 7.0 and 7.1.
There are no further known requirements.
@@ -80,6 +88,7 @@ For conditional compilation, following QNX Neutrino specific attributes are defi
- `target_os` = `"nto"`
- `target_env` = `"nto71"` (for QNX Neutrino 7.1)
+- `target_env` = `"nto70"` (for QNX Neutrino 7.0)
## Building the target
diff --git a/src/doc/rustc/src/platform-support/openharmony.md b/src/doc/rustc/src/platform-support/openharmony.md
new file mode 100644
index 000000000..a8dcc6443
--- /dev/null
+++ b/src/doc/rustc/src/platform-support/openharmony.md
@@ -0,0 +1,128 @@
+# `*-unknown-linux-ohos`
+
+**Tier: 3**
+
+Targets for the [OpenHarmony](https://gitee.com/openharmony/docs/) operating
+system.
+
+## Target maintainers
+
+- Amanieu d'Antras ([@Amanieu](https://github.com/Amanieu))
+
+## Setup
+
+The OpenHarmony SDK doesn't currently support Rust compilation directly, so
+some setup is required.
+
+First, you must obtain the OpenHarmony SDK from [this page](https://gitee.com/openharmony/docs/tree/master/en/release-notes).
+Select the version of OpenHarmony you are developing for and download the "Public SDK package for the standard system".
+
+Create the following shell scripts that wrap Clang from the OpenHarmony SDK:
+
+`aarch64-unknown-linux-ohos-clang.sh`
+
+```sh
+#!/bin/sh
+exec /path/to/ohos-sdk/linux/native/llvm/bin/clang \
+ -target aarch64-linux-ohos \
+ --sysroot=/path/to/ohos-sdk/linux/native/sysroot \
+ -D__MUSL__ \
+ "$@"
+```
+
+`aarch64-unknown-linux-ohos-clang++.sh`
+
+```sh
+#!/bin/sh
+exec /path/to/ohos-sdk/linux/native/llvm/bin/clang++ \
+ -target aarch64-linux-ohos \
+ --sysroot=/path/to/ohos-sdk/linux/native/sysroot \
+ -D__MUSL__ \
+ "$@"
+```
+
+`armv7-unknown-linux-ohos-clang.sh`
+
+```sh
+#!/bin/sh
+exec /path/to/ohos-sdk/linux/native/llvm/bin/clang \
+ -target arm-linux-ohos \
+ --sysroot=/path/to/ohos-sdk/linux/native/sysroot \
+ -D__MUSL__ \
+ -march=armv7-a \
+ -mfloat-abi=softfp \
+ -mtune=generic-armv7-a \
+ -mthumb \
+ "$@"
+```
+
+`armv7-unknown-linux-ohos-clang++.sh`
+
+```sh
+#!/bin/sh
+exec /path/to/ohos-sdk/linux/native/llvm/bin/clang++ \
+ -target arm-linux-ohos \
+ --sysroot=/path/to/ohos-sdk/linux/native/sysroot \
+ -D__MUSL__ \
+ -march=armv7-a \
+ -mfloat-abi=softfp \
+ -mtune=generic-armv7-a \
+ -mthumb \
+ "$@"
+```
+
+Future versions of the OpenHarmony SDK will avoid the need for this process.
+
+## Building the target
+
+To build a rust toolchain, create a `config.toml` with the following contents:
+
+```toml
+profile = "compiler"
+changelog-seen = 2
+
+[build]
+sanitizers = true
+profiler = true
+
+[target.aarch64-unknown-linux-ohos]
+cc = "/path/to/aarch64-unknown-linux-ohos-clang.sh"
+cxx = "/path/to/aarch64-unknown-linux-ohos-clang++.sh"
+ar = "/path/to/ohos-sdk/linux/native/llvm/bin/llvm-ar"
+ranlib = "/path/to/ohos-sdk/linux/native/llvm/bin/llvm-ranlib"
+linker = "/path/to/aarch64-unknown-linux-ohos-clang.sh"
+
+[target.armv7-unknown-linux-ohos]
+cc = "/path/to/armv7-unknown-linux-ohos-clang.sh"
+cxx = "/path/to/armv7-unknown-linux-ohos-clang++.sh"
+ar = "/path/to/ohos-sdk/linux/native/llvm/bin/llvm-ar"
+ranlib = "/path/to/ohos-sdk/linux/native/llvm/bin/llvm-ranlib"
+linker = "/path/to/armv7-unknown-linux-ohos-clang.sh"
+```
+
+## Building Rust programs
+
+Rust does not yet ship pre-compiled artifacts for this target. To compile for
+this target, you will either need to build Rust with the target enabled (see
+"Building the target" above), or build your own copy of `core` by using
+`build-std` or similar.
+
+You will need to configure the linker to use in `~/.cargo/config`:
+```toml
+[target.aarch64-unknown-linux-ohos]
+ar = "/path/to/ohos-sdk/linux/native/llvm/bin/llvm-ar"
+linker = "/path/to/aarch64-unknown-linux-ohos-clang.sh"
+
+[target.armv7-unknown-linux-ohos]
+ar = "/path/to/ohos-sdk/linux/native/llvm/bin/llvm-ar"
+linker = "/path/to/armv7-unknown-linux-ohos-clang.sh"
+```
+
+## Testing
+
+Running the Rust testsuite is possible, but currently difficult due to the way
+the OpenHarmony emulator is set up (no networking).
+
+## Cross-compilation toolchains and C code
+
+You can use the shell scripts above to compile C code for the target.
diff --git a/src/doc/rustdoc/src/command-line-arguments.md b/src/doc/rustdoc/src/command-line-arguments.md
index 2a2e51b2f..b46d80eb3 100644
--- a/src/doc/rustdoc/src/command-line-arguments.md
+++ b/src/doc/rustdoc/src/command-line-arguments.md
@@ -179,7 +179,7 @@ $ rustdoc src/lib.rs --test
This flag will run your code examples as tests. For more, see [the chapter
on documentation tests](write-documentation/documentation-tests.md).
-See also `--test-args`.
+See also `--test-args` and `--test-run-directory`.
## `--test-args`: pass options to test runner
@@ -194,6 +194,19 @@ For more, see [the chapter on documentation tests](write-documentation/documenta
See also `--test`.
+## `--test-run-directory`: run code examples in a specific directory
+
+Using this flag looks like this:
+
+```bash
+$ rustdoc src/lib.rs --test --test-run-directory=/path/to/working/directory
+```
+
+This flag will run your code examples in the specified working directory.
+For more, see [the chapter on documentation tests](write-documentation/documentation-tests.md).
+
+See also `--test`.
+
## `--target`: generate documentation for the specified target triple
Using this flag looks like this:
@@ -320,10 +333,7 @@ $ rustdoc src/lib.rs --extend-css extra.css
```
With this flag, the contents of the files you pass are included at the bottom
-of Rustdoc's `theme.css` file.
-
-While this flag is stable, the contents of `theme.css` are not, so be careful!
-Updates may break your theme extensions.
+of the `theme.css` file.
## `--sysroot`: override the system root
diff --git a/src/doc/rustdoc/src/how-to-read-rustdoc.md b/src/doc/rustdoc/src/how-to-read-rustdoc.md
index 28a004a92..56342f65d 100644
--- a/src/doc/rustdoc/src/how-to-read-rustdoc.md
+++ b/src/doc/rustdoc/src/how-to-read-rustdoc.md
@@ -80,13 +80,31 @@ functions, and "In Return Types" shows matches in the return types of functions.
Both are very useful when looking for a function whose name you can't quite
bring to mind when you know the type you have or want.
-When typing in the search bar, you can prefix your search term with a type
-followed by a colon (such as `mod:`) to restrict the results to just that
-kind of item. (The available items are listed in the help popup.)
-
-Searching for `println!` will search for a macro named `println`, just like
+Names in the search interface can be prefixed with an item type followed by a
+colon (such as `mod:`) to restrict the results to just that kind of item. Also,
+searching for `println!` will search for a macro named `println`, just like
searching for `macro:println` does.
+Function signature searches can query generics, wrapped in angle brackets, and
+traits are normalized like types in the search engine. For example, a function
+with the signature `fn my_function<I: Iterator<Item=u32>>(input: I) -> usize`
+can be matched with the following queries:
+
+* `Iterator<u32> -> usize`
+* `trait:Iterator<primitive:u32> -> primitive:usize`
+* `Iterator -> usize`
+
+Generics and function parameters are order-agnostic, but sensitive to nesting
+and number of matches. For example, a function with the signature
+`fn read_all(&mut self: impl Read) -> Result<Vec<u8>, Error>`
+will match these queries:
+
+* `Read -> Result<Vec<u8>, Error>`
+* `Read -> Result<Error, Vec>`
+* `Read -> Result<Vec<u8>>`
+
+But it *does not* match `Result<Vec, u8>` or `Result<u8<Vec>>`.
+
### Changing displayed theme
You can change the displayed theme by opening the settings menu (the gear
diff --git a/src/doc/rustdoc/src/unstable-features.md b/src/doc/rustdoc/src/unstable-features.md
index b8b5014ab..ae180439d 100644
--- a/src/doc/rustdoc/src/unstable-features.md
+++ b/src/doc/rustdoc/src/unstable-features.md
@@ -38,6 +38,15 @@ future.
Attempting to use these error numbers on stable will result in the code sample being interpreted as
plain text.
+### `missing_doc_code_examples` lint
+
+This lint will emit a warning if an item doesn't have a code example in its documentation.
+It can be enabled using:
+
+```rust,ignore (nightly)
+#![deny(rustdoc::missing_doc_code_examples)]
+```
+
## Extensions to the `#[doc]` attribute
These features operate by extending the `#[doc]` attribute, and thus can be caught by the compiler
@@ -177,9 +186,9 @@ Book][unstable-masked] and [its tracking issue][issue-masked].
This is for Rust compiler internal use only.
Since primitive types are defined in the compiler, there's no place to attach documentation
-attributes. The `#[doc(primitive)]` attribute is used by the standard library to provide a way
-to generate documentation for primitive types, and requires `#![feature(rustdoc_internals)]` to
-enable.
+attributes. The `#[rustc_doc_primitive = "..."]` attribute is used by the standard library to
+provide a way to generate documentation for primitive types, and requires `#![feature(rustc_attrs)]`
+to enable.
### Document keywords
diff --git a/src/doc/rustdoc/src/write-documentation/documentation-tests.md b/src/doc/rustdoc/src/write-documentation/documentation-tests.md
index 1cb5b049d..a7d3186fb 100644
--- a/src/doc/rustdoc/src/write-documentation/documentation-tests.md
+++ b/src/doc/rustdoc/src/write-documentation/documentation-tests.md
@@ -443,3 +443,15 @@ pub struct ReadmeDoctests;
This will include your README as documentation on the hidden struct `ReadmeDoctests`, which will
then be tested alongside the rest of your doctests.
+
+## Controlling the compilation and run directories
+
+By default, `rustdoc --test` will compile and run documentation test examples
+from the same working directory.
+The compilation directory is being used for compiler diagnostics, the `file!()` macro and
+the output of `rustdoc` test runner itself, whereas the run directory has an influence on file-system
+operations within documentation test examples, such as `std::fs::read_to_string`.
+
+The `--test-run-directory` flag allows controlling the run directory separately from the compilation directory.
+This is particularly useful in workspaces, where compiler invocations and thus diagnostics should be
+relative to the workspace directory, but documentation test examples should run relative to the crate directory.
diff --git a/src/doc/rustdoc/src/write-documentation/linking-to-items-by-name.md b/src/doc/rustdoc/src/write-documentation/linking-to-items-by-name.md
index 36bc312b9..eb2285ef9 100644
--- a/src/doc/rustdoc/src/write-documentation/linking-to-items-by-name.md
+++ b/src/doc/rustdoc/src/write-documentation/linking-to-items-by-name.md
@@ -103,6 +103,13 @@ macro_rules! foo {
}
```
+There is one case where the disambiguation will be performed automatically: if an intra doc
+link is resolved at the same time as a trait and as a derive proc-macro. In this case, it'll
+always generate a link to the trait and not emit a "missing disambiguation" warning. A good
+example of this case is when you link to the `Clone` trait: there is also a `Clone`
+proc-macro but it ignores it in this case. If you want to link to the proc-macro, you can
+use the `macro@` disambiguator.
+
## Warnings, re-exports, and scoping
Links are resolved in the scope of the module where the item is defined, even
diff --git a/src/doc/rustdoc/src/write-documentation/what-to-include.md b/src/doc/rustdoc/src/write-documentation/what-to-include.md
index e1e09aa4a..16457ed0f 100644
--- a/src/doc/rustdoc/src/write-documentation/what-to-include.md
+++ b/src/doc/rustdoc/src/write-documentation/what-to-include.md
@@ -39,9 +39,7 @@ warning: 1 warning emitted
As a library author, adding the lint `#![deny(missing_docs)]` is a great way to
ensure the project does not drift away from being documented well, and
`#![warn(missing_docs)]` is a good way to move towards comprehensive
-documentation. In addition to docs, `#![deny(missing_doc_code_examples)]`
-ensures each function contains a usage example. In our example above, the
-warning is resolved by adding crate level documentation.
+documentation.
There are more lints in the upcoming chapter [Lints][rustdoc-lints].
diff --git a/src/doc/style-guide/src/expressions.md b/src/doc/style-guide/src/expressions.md
index c7d0446dd..96f66c89c 100644
--- a/src/doc/style-guide/src/expressions.md
+++ b/src/doc/style-guide/src/expressions.md
@@ -643,7 +643,7 @@ Examples:
```rust
match foo {
foo => bar,
- a_very_long_patten | another_pattern if an_expression() => {
+ a_very_long_pattern | another_pattern if an_expression() => {
no_room_for_this_expression()
}
foo => {
diff --git a/src/doc/unstable-book/src/compiler-flags/cf-protection.md b/src/doc/unstable-book/src/compiler-flags/cf-protection.md
index ab698c82b..efe5f5642 100644
--- a/src/doc/unstable-book/src/compiler-flags/cf-protection.md
+++ b/src/doc/unstable-book/src/compiler-flags/cf-protection.md
@@ -1,5 +1,9 @@
# `cf-protection`
+The tracking issue for this feature is: [#93754](https://github.com/rust-lang/rust/issues/93754).
+
+------------------------
+
This option enables control-flow enforcement technology (CET) on x86; a more detailed description of
CET is available [here]. Similar to `clang`, this flag takes one of the following values:
diff --git a/src/doc/unstable-book/src/compiler-flags/dump-mono-stats-format.md b/src/doc/unstable-book/src/compiler-flags/dump-mono-stats-format.md
index a497a7526..05ffdcf20 100644
--- a/src/doc/unstable-book/src/compiler-flags/dump-mono-stats-format.md
+++ b/src/doc/unstable-book/src/compiler-flags/dump-mono-stats-format.md
@@ -3,4 +3,4 @@
--------------------
The `-Z dump-mono-stats-format` compiler flag controls what file format to use for `-Z dump-mono-stats`.
-The default is markdown; currently JSON is also supported. JSON can be useful for programatically manipulating the results (e.g. to find the item that took the longest to compile).
+The default is markdown; currently JSON is also supported. JSON can be useful for programmatically manipulating the results (e.g. to find the item that took the longest to compile).
diff --git a/src/doc/unstable-book/src/compiler-flags/sanitizer.md b/src/doc/unstable-book/src/compiler-flags/sanitizer.md
index 262cef345..b55348b78 100644
--- a/src/doc/unstable-book/src/compiler-flags/sanitizer.md
+++ b/src/doc/unstable-book/src/compiler-flags/sanitizer.md
@@ -213,7 +213,7 @@ See the [Clang ControlFlowIntegrity documentation][clang-cfi] for more details.
## Example
-```text
+```rust,ignore (making doc tests pass cross-platform is hard)
#![feature(naked_functions)]
use std::arch::asm;
@@ -238,7 +238,7 @@ pub extern "C" fn add_two(x: i32) {
nop
nop
nop
- lea rax, [rdi+2]
+ lea eax, [edi+2]
ret
",
options(noreturn)
diff --git a/src/doc/unstable-book/src/language-features/asm-experimental-arch.md b/src/doc/unstable-book/src/language-features/asm-experimental-arch.md
index 0a48eb4f8..1f52ab750 100644
--- a/src/doc/unstable-book/src/language-features/asm-experimental-arch.md
+++ b/src/doc/unstable-book/src/language-features/asm-experimental-arch.md
@@ -16,6 +16,7 @@ This feature tracks `asm!` and `global_asm!` support for the following architect
- SPIR-V
- AVR
- MSP430
+- M68k
## Register classes
@@ -41,6 +42,9 @@ This feature tracks `asm!` and `global_asm!` support for the following architect
| AVR | `reg_iw` | `r25r24`, `X`, `Z` | `w` |
| AVR | `reg_ptr` | `X`, `Z` | `e` |
| MSP430 | `reg` | `r[0-15]` | `r` |
+| M68k | `reg` | `d[0-7]`, `a[0-7]` | `r` |
+| M68k | `reg_data` | `d[0-7]` | `d` |
+| M68k | `reg_addr` | `a[0-3]` | `a` |
> **Notes**:
> - NVPTX doesn't have a fixed register set, so named registers are not supported.
@@ -70,6 +74,8 @@ This feature tracks `asm!` and `global_asm!` support for the following architect
| AVR | `reg`, `reg_upper` | None | `i8` |
| AVR | `reg_pair`, `reg_iw`, `reg_ptr` | None | `i16` |
| MSP430 | `reg` | None | `i8`, `i16` |
+| M68k | `reg`, `reg_addr` | None | `i16`, `i32` |
+| M68k | `reg_data` | None | `i8`, `i16`, `i32` |
## Register aliases
@@ -88,6 +94,9 @@ This feature tracks `asm!` and `global_asm!` support for the following architect
| MSP430 | `r2` | `sr` |
| MSP430 | `r3` | `cg` |
| MSP430 | `r4` | `fp` |
+| M68k | `a5` | `bp` |
+| M68k | `a6` | `fp` |
+| M68k | `a7` | `sp`, `usp`, `ssp`, `isp` |
> **Notes**:
> - TI does not mandate a frame pointer for MSP430, but toolchains are allowed
@@ -98,7 +107,7 @@ This feature tracks `asm!` and `global_asm!` support for the following architect
| Architecture | Unsupported register | Reason |
| ------------ | --------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| All | `sp` | The stack pointer must be restored to its original value at the end of an asm code block. |
-| All | `fr` (Hexagon), `$fp` (MIPS), `Y` (AVR), `r4` (MSP430) | The frame pointer cannot be used as an input or output. |
+| All | `fr` (Hexagon), `$fp` (MIPS), `Y` (AVR), `r4` (MSP430), `a6` (M68k) | The frame pointer cannot be used as an input or output. |
| All | `r19` (Hexagon) | This is used internally by LLVM as a "base pointer" for functions with complex stack frames. |
| MIPS | `$0` or `$zero` | This is a constant zero register which can't be modified. |
| MIPS | `$1` or `$at` | Reserved for assembler. |
@@ -108,6 +117,7 @@ This feature tracks `asm!` and `global_asm!` support for the following architect
| Hexagon | `lr` | This is the link register which cannot be used as an input or output. |
| AVR | `r0`, `r1`, `r1r0` | Due to an issue in LLVM, the `r0` and `r1` registers cannot be used as inputs or outputs. If modified, they must be restored to their original values before the end of the block. |
|MSP430 | `r0`, `r2`, `r3` | These are the program counter, status register, and constant generator respectively. Neither the status register nor constant generator can be written to. |
+| M68k | `a4`, `a5` | Used internally by LLVM for the base pointer and global base pointer. |
## Template modifiers
@@ -130,3 +140,5 @@ These flags registers must be restored upon exiting the asm block if the `preser
- The status register `SREG`.
- MSP430
- The status register `r2`.
+- M68k
+ - The condition code register `ccr`.
diff --git a/src/doc/unstable-book/src/language-features/box-patterns.md b/src/doc/unstable-book/src/language-features/box-patterns.md
index 584f4295c..a1ac09633 100644
--- a/src/doc/unstable-book/src/language-features/box-patterns.md
+++ b/src/doc/unstable-book/src/language-features/box-patterns.md
@@ -4,8 +4,6 @@ The tracking issue for this feature is: [#29641]
[#29641]: https://github.com/rust-lang/rust/issues/29641
-See also [`box_syntax`](box-syntax.md)
-
------------------------
Box patterns let you match on `Box<T>`s:
diff --git a/src/doc/unstable-book/src/language-features/box-syntax.md b/src/doc/unstable-book/src/language-features/box-syntax.md
deleted file mode 100644
index 9569974d2..000000000
--- a/src/doc/unstable-book/src/language-features/box-syntax.md
+++ /dev/null
@@ -1,22 +0,0 @@
-# `box_syntax`
-
-The tracking issue for this feature is: [#49733]
-
-[#49733]: https://github.com/rust-lang/rust/issues/49733
-
-See also [`box_patterns`](box-patterns.md)
-
-------------------------
-
-Currently the only stable way to create a `Box` is via the `Box::new` method.
-Also it is not possible in stable Rust to destructure a `Box` in a match
-pattern. The unstable `box` keyword can be used to create a `Box`. An example
-usage would be:
-
-```rust
-#![feature(box_syntax)]
-
-fn main() {
- let b = box 5;
-}
-```
diff --git a/src/doc/unstable-book/src/language-features/lang-items.md b/src/doc/unstable-book/src/language-features/lang-items.md
index 39238dffa..6adb3506e 100644
--- a/src/doc/unstable-book/src/language-features/lang-items.md
+++ b/src/doc/unstable-book/src/language-features/lang-items.md
@@ -16,18 +16,26 @@ and one for deallocation. A freestanding program that uses the `Box`
sugar for dynamic allocations via `malloc` and `free`:
```rust,ignore (libc-is-finicky)
-#![feature(lang_items, box_syntax, start, libc, core_intrinsics, rustc_private)]
+#![feature(lang_items, start, libc, core_intrinsics, rustc_private, rustc_attrs)]
#![no_std]
use core::intrinsics;
use core::panic::PanicInfo;
+use core::ptr::NonNull;
extern crate libc;
-struct Unique<T>(*mut T);
+struct Unique<T>(NonNull<T>);
#[lang = "owned_box"]
pub struct Box<T>(Unique<T>);
+impl<T> Box<T> {
+ pub fn new(x: T) -> Self {
+ #[rustc_box]
+ Box::new(x)
+ }
+}
+
#[lang = "exchange_malloc"]
unsafe fn allocate(size: usize, _align: usize) -> *mut u8 {
let p = libc::malloc(size as libc::size_t) as *mut u8;
@@ -47,13 +55,13 @@ unsafe fn box_free<T: ?Sized>(ptr: *mut T) {
#[start]
fn main(_argc: isize, _argv: *const *const u8) -> isize {
- let _x = box 1;
+ let _x = Box::new(1);
0
}
#[lang = "eh_personality"] extern fn rust_eh_personality() {}
-#[lang = "panic_impl"] extern fn rust_begin_panic(info: &PanicInfo) -> ! { unsafe { intrinsics::abort() } }
+#[lang = "panic_impl"] extern fn rust_begin_panic(_info: &PanicInfo) -> ! { intrinsics::abort() }
#[no_mangle] pub extern fn rust_eh_register_frames () {}
#[no_mangle] pub extern fn rust_eh_unregister_frames () {}
```
diff --git a/src/doc/unstable-book/src/language-features/plugin.md b/src/doc/unstable-book/src/language-features/plugin.md
index dfbb468d4..1fade6ce9 100644
--- a/src/doc/unstable-book/src/language-features/plugin.md
+++ b/src/doc/unstable-book/src/language-features/plugin.md
@@ -37,7 +37,7 @@ additional checks for code style, safety, etc. Now let's write a plugin
that warns about any item named `lintme`.
```rust,ignore (requires-stage-2)
-#![feature(box_syntax, rustc_private)]
+#![feature(rustc_private)]
extern crate rustc_ast;
@@ -68,7 +68,7 @@ impl EarlyLintPass for Pass {
#[no_mangle]
fn __rustc_plugin_registrar(reg: &mut Registry) {
reg.lint_store.register_lints(&[&TEST_LINT]);
- reg.lint_store.register_early_pass(|| box Pass);
+ reg.lint_store.register_early_pass(|| Box::new(Pass));
}
```
diff --git a/src/doc/unstable-book/src/the-unstable-book.md b/src/doc/unstable-book/src/the-unstable-book.md
index 554c52c3c..9090b134d 100644
--- a/src/doc/unstable-book/src/the-unstable-book.md
+++ b/src/doc/unstable-book/src/the-unstable-book.md
@@ -5,16 +5,31 @@ each one organized by a "feature flag." That is, when using an unstable
feature of Rust, you must use a flag, like this:
```rust
-#![feature(box_syntax)]
+#![feature(generators, generator_trait)]
+
+use std::ops::{Generator, GeneratorState};
+use std::pin::Pin;
fn main() {
- let five = box 5;
+ let mut generator = || {
+ yield 1;
+ return "foo"
+ };
+
+ match Pin::new(&mut generator).resume(()) {
+ GeneratorState::Yielded(1) => {}
+ _ => panic!("unexpected value from resume"),
+ }
+ match Pin::new(&mut generator).resume(()) {
+ GeneratorState::Complete("foo") => {}
+ _ => panic!("unexpected value from resume"),
+ }
}
```
-The `box_syntax` feature [has a chapter][box] describing how to use it.
+The `generators` feature [has a chapter][generators] describing how to use it.
-[box]: language-features/box-syntax.md
+[generators]: language-features/generators.md
Because this documentation relates to unstable features, we make no guarantees
that what is contained here is accurate or up to date. It's developed on a
diff --git a/src/etc/installer/msi/rust.wxs b/src/etc/installer/msi/rust.wxs
index 0aa0784e5..f29e1e4d2 100644
--- a/src/etc/installer/msi/rust.wxs
+++ b/src/etc/installer/msi/rust.wxs
@@ -119,7 +119,7 @@
<SetProperty Sequence="ui" Before="CostFinalize"
Id="WixAppFolder" Value="WixPerUserFolder">NOT ALLUSERS</SetProperty>
- <!-- UI sets ALLUSERS per user selection; progagate this choice to MSIINSTALLPERUSER before executing installation actions -->
+ <!-- UI sets ALLUSERS per user selection; propagate this choice to MSIINSTALLPERUSER before executing installation actions -->
<SetProperty Sequence="ui" Before="ExecuteAction"
Id="MSIINSTALLPERUSER" Value="1">NOT ALLUSERS</SetProperty>
@@ -167,7 +167,9 @@
<?if $(env.CFG_MINGW)="1" ?>
<Directory Id="Gcc" Name="." />
<?endif?>
+ <!-- tool-rust-docs-start -->
<Directory Id="Docs" Name="." />
+ <!-- tool-rust-docs-end -->
<Directory Id="Cargo" Name="." />
<Directory Id="Std" Name="." />
</Directory>
@@ -209,6 +211,7 @@
<RegistryValue Root="HKMU" Key="$(var.BaseRegKey)" Name="RustShell" Type="integer" Value="1" KeyPath="yes" />
<RemoveFolder Id="ApplicationProgramsFolder1" On="uninstall" />
</Component>
+ <!-- tool-rust-docs-start -->
<Component Id="DocIndexShortcut" Guid="*">
<Shortcut Id="RustDocs"
Name="$(var.ProductName) Documentation"
@@ -217,6 +220,7 @@
<RegistryValue Root="HKMU" Key="$(var.BaseRegKey)" Name="RustDocs" Type="integer" Value="1" KeyPath="yes" />
<RemoveFolder Id="ApplicationProgramsFolder2" On="uninstall" />
</Component>
+ <!-- tool-rust-docs-end -->
</Directory>
</Directory>
@@ -256,6 +260,7 @@
<ComponentGroupRef Id="GccGroup" />
</Feature>
<?endif?>
+ <!-- tool-rust-docs-start -->
<Feature Id="Docs"
Title="HTML documentation"
Display="5"
@@ -264,6 +269,7 @@
<ComponentGroupRef Id="DocsGroup" />
<ComponentRef Id="DocIndexShortcut" />
</Feature>
+ <!-- tool-rust-docs-end -->
<Feature Id="Path"
Title="Add to PATH"
Description="Add Rust to PATH environment variable"
diff --git a/src/etc/installer/pkg/Distribution.xml b/src/etc/installer/pkg/Distribution.xml
index 64f6bab9b..1643fc836 100644
--- a/src/etc/installer/pkg/Distribution.xml
+++ b/src/etc/installer/pkg/Distribution.xml
@@ -15,7 +15,9 @@
<line choice="rustc"/>
<line choice="rust-std"/>
<line choice="cargo"/>
+ <!-- tool-rust-docs-start -->
<line choice="rust-docs"/>
+ <!-- tool-rust-docs-end -->
</line>
<line choice="uninstall" />
</choices-outline>
@@ -55,15 +57,19 @@
>
<pkg-ref id="org.rust-lang.rust-std"/>
</choice>
+ <!-- tool-rust-docs-start -->
<choice id="rust-docs" visible="true"
title="Documentation" description="HTML documentation."
selected="(!choices.uninstall.selected &amp;&amp; choices['rust-docs'].selected) || (choices.uninstall.selected &amp;&amp; choices.install.selected)"
>
<pkg-ref id="org.rust-lang.rust-docs"/>
</choice>
+ <!-- tool-rust-docs-end -->
<pkg-ref id="org.rust-lang.rustc" version="0" onConclusion="none">rustc.pkg</pkg-ref>
<pkg-ref id="org.rust-lang.cargo" version="0" onConclusion="none">cargo.pkg</pkg-ref>
+ <!-- tool-rust-docs-start -->
<pkg-ref id="org.rust-lang.rust-docs" version="0" onConclusion="none">rust-docs.pkg</pkg-ref>
+ <!-- tool-rust-docs-end -->
<pkg-ref id="org.rust-lang.rust-std" version="0" onConclusion="none">rust-std.pkg</pkg-ref>
<pkg-ref id="org.rust-lang.uninstall" version="0" onConclusion="none">uninstall.pkg</pkg-ref>
<background file="rust-logo.png" mime-type="image/png"
diff --git a/src/etc/rust-gdb b/src/etc/rust-gdb
index b950cea79..9abed30ea 100755
--- a/src/etc/rust-gdb
+++ b/src/etc/rust-gdb
@@ -13,6 +13,8 @@ fi
# Find out where the pretty printer Python module is
RUSTC_SYSROOT="$("$RUSTC" --print=sysroot)"
GDB_PYTHON_MODULE_DIRECTORY="$RUSTC_SYSROOT/lib/rustlib/etc"
+# Get the commit hash for path remapping
+RUSTC_COMMIT_HASH="$("$RUSTC" -vV | sed -n 's/commit-hash: \([a-zA-Z0-9_]*\)/\1/p')"
# Run GDB with the additional arguments that load the pretty printers
# Set the environment variable `RUST_GDB` to overwrite the call to a
@@ -21,4 +23,6 @@ RUST_GDB="${RUST_GDB:-gdb}"
PYTHONPATH="$PYTHONPATH:$GDB_PYTHON_MODULE_DIRECTORY" exec ${RUST_GDB} \
--directory="$GDB_PYTHON_MODULE_DIRECTORY" \
-iex "add-auto-load-safe-path $GDB_PYTHON_MODULE_DIRECTORY" \
+ -iex "set substitute-path /rustc/$RUSTC_COMMIT_HASH $RUSTC_SYSROOT/lib/rustlib/src/rust" \
"$@"
+
diff --git a/src/etc/rust-gdbgui b/src/etc/rust-gdbgui
index 590e488e6..913269316 100755
--- a/src/etc/rust-gdbgui
+++ b/src/etc/rust-gdbgui
@@ -42,6 +42,8 @@ fi
# Find out where the pretty printer Python module is
RUSTC_SYSROOT="$("$RUSTC" --print=sysroot)"
GDB_PYTHON_MODULE_DIRECTORY="$RUSTC_SYSROOT/lib/rustlib/etc"
+# Get the commit hash for path remapping
+RUSTC_COMMIT_HASH="$("$RUSTC" -vV | sed -n 's/commit-hash: \([a-zA-Z0-9_]*\)/\1/p')"
# Set the environment variable `RUST_GDB` to overwrite the call to a
# different/specific command (defaults to `gdb`).
@@ -53,7 +55,9 @@ RUST_GDBGUI="${RUST_GDBGUI:-gdbgui}"
# These arguments get passed through to GDB and make it load the
# Rust pretty printers.
-GDB_ARGS="--directory=\"$GDB_PYTHON_MODULE_DIRECTORY\" -iex \"add-auto-load-safe-path $GDB_PYTHON_MODULE_DIRECTORY\""
+GDB_ARGS="--directory=\"$GDB_PYTHON_MODULE_DIRECTORY\"" \
+ "-iex \"add-auto-load-safe-path $GDB_PYTHON_MODULE_DIRECTORY\"" \
+ "-iex \"set substitute-path /rustc/$RUSTC_COMMIT_HASH $RUSTC_SYSROOT/lib/rustlib/src/rust\""
# Finally we execute gdbgui.
PYTHONPATH="$PYTHONPATH:$GDB_PYTHON_MODULE_DIRECTORY" \
diff --git a/src/etc/vscode_settings.json b/src/etc/rust_analyzer_settings.json
index dd01bfaa7..dd01bfaa7 100644
--- a/src/etc/vscode_settings.json
+++ b/src/etc/rust_analyzer_settings.json
diff --git a/src/librustdoc/Cargo.toml b/src/librustdoc/Cargo.toml
index c48f7998c..29912b957 100644
--- a/src/librustdoc/Cargo.toml
+++ b/src/librustdoc/Cargo.toml
@@ -8,7 +8,7 @@ path = "lib.rs"
[dependencies]
arrayvec = { version = "0.7", default-features = false }
-askama = { version = "0.11", default-features = false, features = ["config"] }
+askama = { version = "0.12", default-features = false, features = ["config"] }
itertools = "0.10.1"
minifier = "0.2.2"
once_cell = "1.10.0"
@@ -20,15 +20,13 @@ smallvec = "1.8.1"
tempfile = "3"
tracing = "0.1"
tracing-tree = "0.2.0"
+threadpool = "1.8.1"
[dependencies.tracing-subscriber]
version = "0.3.3"
default-features = false
features = ["fmt", "env-filter", "smallvec", "parking_lot", "ansi"]
-[target.'cfg(windows)'.dependencies]
-rayon = "1.5.1"
-
[dev-dependencies]
expect-test = "1.4.0"
diff --git a/src/librustdoc/askama.toml b/src/librustdoc/askama.toml
index 0c984f637..2732c4bc6 100644
--- a/src/librustdoc/askama.toml
+++ b/src/librustdoc/askama.toml
@@ -1,2 +1,3 @@
[general]
dirs = ["html/templates"]
+whitespace = "suppress"
diff --git a/src/librustdoc/clean/blanket_impl.rs b/src/librustdoc/clean/blanket_impl.rs
index bcdbbcacc..3a3bf6a7a 100644
--- a/src/librustdoc/clean/blanket_impl.rs
+++ b/src/librustdoc/clean/blanket_impl.rs
@@ -1,6 +1,6 @@
use crate::rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt;
use rustc_hir as hir;
-use rustc_infer::infer::{InferOk, TyCtxtInferExt};
+use rustc_infer::infer::{DefineOpaqueTypes, InferOk, TyCtxtInferExt};
use rustc_infer::traits;
use rustc_middle::ty::ToPredicate;
use rustc_span::DUMMY_SP;
@@ -47,8 +47,7 @@ impl<'a, 'tcx> BlanketImplFinder<'a, 'tcx> {
// Require the type the impl is implemented on to match
// our type, and ignore the impl if there was a mismatch.
- let cause = traits::ObligationCause::dummy();
- let Ok(eq_result) = infcx.at(&cause, param_env).eq(impl_trait_ref.self_ty(), impl_ty) else {
+ let Ok(eq_result) = infcx.at(&traits::ObligationCause::dummy(), param_env).eq(DefineOpaqueTypes::No, impl_trait_ref.self_ty(), impl_ty) else {
continue
};
let InferOk { value: (), obligations } = eq_result;
diff --git a/src/librustdoc/clean/cfg.rs b/src/librustdoc/clean/cfg.rs
index dd58a5b51..5177cffe6 100644
--- a/src/librustdoc/clean/cfg.rs
+++ b/src/librustdoc/clean/cfg.rs
@@ -517,6 +517,7 @@ impl<'a> fmt::Display for Display<'a> {
"aarch64" => "AArch64",
"arm" => "ARM",
"asmjs" => "JavaScript",
+ "loongarch64" => "LoongArch LA64",
"m68k" => "M68k",
"mips" => "MIPS",
"mips64" => "MIPS-64",
diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs
index 148243683..cc5d13808 100644
--- a/src/librustdoc/clean/inline.rs
+++ b/src/librustdoc/clean/inline.rs
@@ -36,15 +36,11 @@ use crate::formats::item_type::ItemType;
///
/// The returned value is `None` if the definition could not be inlined,
/// and `Some` of a vector of items if it was successfully expanded.
-///
-/// `parent_module` refers to the parent of the *re-export*, not the original item.
pub(crate) fn try_inline(
cx: &mut DocContext<'_>,
- parent_module: DefId,
- import_def_id: Option<DefId>,
res: Res,
name: Symbol,
- attrs: Option<&[ast::Attribute]>,
+ attrs: Option<(&[ast::Attribute], Option<DefId>)>,
visited: &mut DefIdSet,
) -> Option<Vec<clean::Item>> {
let did = res.opt_def_id()?;
@@ -55,38 +51,17 @@ pub(crate) fn try_inline(
debug!("attrs={:?}", attrs);
- let attrs_without_docs = attrs.map(|attrs| {
- attrs.into_iter().filter(|a| a.doc_str().is_none()).cloned().collect::<Vec<_>>()
+ let attrs_without_docs = attrs.map(|(attrs, def_id)| {
+ (attrs.into_iter().filter(|a| a.doc_str().is_none()).cloned().collect::<Vec<_>>(), def_id)
});
- // We need this ugly code because:
- //
- // ```
- // attrs_without_docs.map(|a| a.as_slice())
- // ```
- //
- // will fail because it returns a temporary slice and:
- //
- // ```
- // attrs_without_docs.map(|s| {
- // vec = s.as_slice();
- // vec
- // })
- // ```
- //
- // will fail because we're moving an uninitialized variable into a closure.
- let vec;
- let attrs_without_docs = match attrs_without_docs {
- Some(s) => {
- vec = s;
- Some(vec.as_slice())
- }
- None => None,
- };
+ let attrs_without_docs =
+ attrs_without_docs.as_ref().map(|(attrs, def_id)| (&attrs[..], *def_id));
+ let import_def_id = attrs.and_then(|(_, def_id)| def_id);
let kind = match res {
Res::Def(DefKind::Trait, did) => {
record_extern_fqn(cx, did, ItemType::Trait);
- build_impls(cx, Some(parent_module), did, attrs_without_docs, &mut ret);
+ build_impls(cx, did, attrs_without_docs, &mut ret);
clean::TraitItem(Box::new(build_external_trait(cx, did)))
}
Res::Def(DefKind::Fn, did) => {
@@ -95,27 +70,27 @@ pub(crate) fn try_inline(
}
Res::Def(DefKind::Struct, did) => {
record_extern_fqn(cx, did, ItemType::Struct);
- build_impls(cx, Some(parent_module), did, attrs_without_docs, &mut ret);
+ build_impls(cx, did, attrs_without_docs, &mut ret);
clean::StructItem(build_struct(cx, did))
}
Res::Def(DefKind::Union, did) => {
record_extern_fqn(cx, did, ItemType::Union);
- build_impls(cx, Some(parent_module), did, attrs_without_docs, &mut ret);
+ build_impls(cx, did, attrs_without_docs, &mut ret);
clean::UnionItem(build_union(cx, did))
}
Res::Def(DefKind::TyAlias, did) => {
record_extern_fqn(cx, did, ItemType::Typedef);
- build_impls(cx, Some(parent_module), did, attrs_without_docs, &mut ret);
+ build_impls(cx, did, attrs_without_docs, &mut ret);
clean::TypedefItem(build_type_alias(cx, did))
}
Res::Def(DefKind::Enum, did) => {
record_extern_fqn(cx, did, ItemType::Enum);
- build_impls(cx, Some(parent_module), did, attrs_without_docs, &mut ret);
+ build_impls(cx, did, attrs_without_docs, &mut ret);
clean::EnumItem(build_enum(cx, did))
}
Res::Def(DefKind::ForeignTy, did) => {
record_extern_fqn(cx, did, ItemType::ForeignType);
- build_impls(cx, Some(parent_module), did, attrs_without_docs, &mut ret);
+ build_impls(cx, did, attrs_without_docs, &mut ret);
clean::ForeignTypeItem
}
// Never inline enum variants but leave them shown as re-exports.
@@ -136,7 +111,7 @@ pub(crate) fn try_inline(
clean::ConstantItem(build_const(cx, did))
}
Res::Def(DefKind::Macro(kind), did) => {
- let mac = build_macro(cx, did, name, import_def_id);
+ let mac = build_macro(cx, did, name, import_def_id, kind);
let type_kind = match kind {
MacroKind::Bang => ItemType::Macro,
@@ -149,7 +124,7 @@ pub(crate) fn try_inline(
_ => return None,
};
- let (attrs, cfg) = merge_attrs(cx, Some(parent_module), load_attrs(cx, did), attrs);
+ let (attrs, cfg) = merge_attrs(cx, load_attrs(cx, did), attrs);
cx.inlined.insert(did.into());
let mut item =
clean::Item::from_def_id_and_attrs_and_parts(did, Some(name), kind, Box::new(attrs), cfg);
@@ -177,8 +152,7 @@ pub(crate) fn try_inline_glob(
// reexported by the glob, e.g. because they are shadowed by something else.
let reexports = cx
.tcx
- .module_reexports(current_mod)
- .unwrap_or_default()
+ .module_children_reexports(current_mod)
.iter()
.filter_map(|child| child.res.opt_def_id())
.collect();
@@ -316,9 +290,8 @@ fn build_type_alias(cx: &mut DocContext<'_>, did: DefId) -> Box<clean::Typedef>
/// Builds all inherent implementations of an ADT (struct/union/enum) or Trait item/path/reexport.
pub(crate) fn build_impls(
cx: &mut DocContext<'_>,
- parent_module: Option<DefId>,
did: DefId,
- attrs: Option<&[ast::Attribute]>,
+ attrs: Option<(&[ast::Attribute], Option<DefId>)>,
ret: &mut Vec<clean::Item>,
) {
let _prof_timer = cx.tcx.sess.prof.generic_activity("build_inherent_impls");
@@ -326,7 +299,7 @@ pub(crate) fn build_impls(
// for each implementation of an item represented by `did`, build the clean::Item for that impl
for &did in tcx.inherent_impls(did).iter() {
- build_impl(cx, parent_module, did, attrs, ret);
+ build_impl(cx, did, attrs, ret);
}
// This pretty much exists expressly for `dyn Error` traits that exist in the `alloc` crate.
@@ -340,28 +313,26 @@ pub(crate) fn build_impls(
let type_ =
if tcx.is_trait(did) { TraitSimplifiedType(did) } else { AdtSimplifiedType(did) };
for &did in tcx.incoherent_impls(type_) {
- build_impl(cx, parent_module, did, attrs, ret);
+ build_impl(cx, did, attrs, ret);
}
}
}
-/// `parent_module` refers to the parent of the re-export, not the original item
pub(crate) fn merge_attrs(
cx: &mut DocContext<'_>,
- parent_module: Option<DefId>,
old_attrs: &[ast::Attribute],
- new_attrs: Option<&[ast::Attribute]>,
+ new_attrs: Option<(&[ast::Attribute], Option<DefId>)>,
) -> (clean::Attributes, Option<Arc<clean::cfg::Cfg>>) {
// NOTE: If we have additional attributes (from a re-export),
// always insert them first. This ensure that re-export
// doc comments show up before the original doc comments
// when we render them.
- if let Some(inner) = new_attrs {
+ if let Some((inner, item_id)) = new_attrs {
let mut both = inner.to_vec();
both.extend_from_slice(old_attrs);
(
- if let Some(new_id) = parent_module {
- Attributes::from_ast_with_additional(old_attrs, (inner, new_id))
+ if let Some(item_id) = item_id {
+ Attributes::from_ast_with_additional(old_attrs, (inner, item_id))
} else {
Attributes::from_ast(&both)
},
@@ -375,9 +346,8 @@ pub(crate) fn merge_attrs(
/// Inline an `impl`, inherent or of a trait. The `did` must be for an `impl`.
pub(crate) fn build_impl(
cx: &mut DocContext<'_>,
- parent_module: Option<DefId>,
did: DefId,
- attrs: Option<&[ast::Attribute]>,
+ attrs: Option<(&[ast::Attribute], Option<DefId>)>,
ret: &mut Vec<clean::Item>,
) {
if !cx.inlined.insert(did.into()) {
@@ -539,7 +509,7 @@ pub(crate) fn build_impl(
record_extern_trait(cx, did);
}
- let (merged_attrs, cfg) = merge_attrs(cx, parent_module, load_attrs(cx, did), attrs);
+ let (merged_attrs, cfg) = merge_attrs(cx, load_attrs(cx, did), attrs);
trace!("merged_attrs={:?}", merged_attrs);
trace!(
@@ -587,7 +557,7 @@ fn build_module_items(
// If we're re-exporting a re-export it may actually re-export something in
// two namespaces, so the target may be listed twice. Make sure we only
// visit each node at most once.
- for &item in cx.tcx.module_children(did).iter() {
+ for item in cx.tcx.module_children(did).iter() {
if item.vis.is_public() {
let res = item.res.expect_non_local();
if let Some(def_id) = res.opt_def_id()
@@ -635,7 +605,7 @@ fn build_module_items(
cfg: None,
inline_stmt_id: None,
});
- } else if let Some(i) = try_inline(cx, did, None, res, item.ident.name, None, visited) {
+ } else if let Some(i) = try_inline(cx, res, item.ident.name, None, visited) {
items.extend(i)
}
}
@@ -681,18 +651,24 @@ fn build_macro(
def_id: DefId,
name: Symbol,
import_def_id: Option<DefId>,
+ macro_kind: MacroKind,
) -> clean::ItemKind {
match CStore::from_tcx(cx.tcx).load_macro_untracked(def_id, cx.sess()) {
- LoadedMacro::MacroDef(item_def, _) => {
- if let ast::ItemKind::MacroDef(ref def) = item_def.kind {
- let vis = cx.tcx.visibility(import_def_id.unwrap_or(def_id));
- clean::MacroItem(clean::Macro {
- source: utils::display_macro_source(cx, name, def, def_id, vis),
- })
- } else {
- unreachable!()
+ LoadedMacro::MacroDef(item_def, _) => match macro_kind {
+ MacroKind::Bang => {
+ if let ast::ItemKind::MacroDef(ref def) = item_def.kind {
+ let vis = cx.tcx.visibility(import_def_id.unwrap_or(def_id));
+ clean::MacroItem(clean::Macro {
+ source: utils::display_macro_source(cx, name, def, def_id, vis),
+ })
+ } else {
+ unreachable!()
+ }
}
- }
+ MacroKind::Derive | MacroKind::Attr => {
+ clean::ProcMacroItem(clean::ProcMacro { kind: macro_kind, helpers: Vec::new() })
+ }
+ },
LoadedMacro::ProcMacro(ext) => clean::ProcMacroItem(clean::ProcMacro {
kind: ext.macro_kind(),
helpers: ext.helper_attrs,
diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs
index 3edc2cd2e..5fa0c120f 100644
--- a/src/librustdoc/clean/mod.rs
+++ b/src/librustdoc/clean/mod.rs
@@ -21,25 +21,24 @@ use rustc_hir::def_id::{DefId, DefIdMap, DefIdSet, LocalDefId, LOCAL_CRATE};
use rustc_hir::PredicateOrigin;
use rustc_hir_analysis::hir_ty_to_ty;
use rustc_infer::infer::region_constraints::{Constraint, RegionConstraintData};
+use rustc_middle::metadata::Reexport;
use rustc_middle::middle::resolve_bound_vars as rbv;
use rustc_middle::ty::fold::TypeFolder;
use rustc_middle::ty::InternalSubsts;
use rustc_middle::ty::TypeVisitableExt;
-use rustc_middle::ty::{self, AdtKind, DefIdTree, EarlyBinder, Ty, TyCtxt};
+use rustc_middle::ty::{self, AdtKind, EarlyBinder, Ty, TyCtxt};
use rustc_middle::{bug, span_bug};
use rustc_span::hygiene::{AstPass, MacroKind};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{self, ExpnKind};
-use std::assert_matches::assert_matches;
+use std::borrow::Cow;
use std::collections::hash_map::Entry;
use std::collections::BTreeMap;
-use std::default::Default;
use std::hash::Hash;
use std::mem;
use thin_vec::ThinVec;
-use crate::clean::inline::merge_attrs;
use crate::core::{self, DocContext, ImplTraitParam};
use crate::formats::item_type::ItemType;
use crate::visit_ast::Module as DocModule;
@@ -270,15 +269,7 @@ fn clean_where_predicate<'tcx>(
let bound_params = wbp
.bound_generic_params
.iter()
- .map(|param| {
- // Higher-ranked params must be lifetimes.
- // Higher-ranked lifetimes can't have bounds.
- assert_matches!(
- param,
- hir::GenericParam { kind: hir::GenericParamKind::Lifetime { .. }, .. }
- );
- Lifetime(param.name.ident().name)
- })
+ .map(|param| clean_generic_param(cx, None, param))
.collect();
WherePredicate::BoundPredicate {
ty: clean_ty(wbp.bounded_ty, cx),
@@ -324,7 +315,7 @@ pub(crate) fn clean_predicate<'tcx>(
ty::PredicateKind::Clause(ty::Clause::ConstArgHasType(..)) => None,
ty::PredicateKind::Subtype(..)
- | ty::PredicateKind::AliasEq(..)
+ | ty::PredicateKind::AliasRelate(..)
| ty::PredicateKind::Coerce(..)
| ty::PredicateKind::ObjectSafe(..)
| ty::PredicateKind::ClosureKind(..)
@@ -410,7 +401,7 @@ fn clean_projection_predicate<'tcx>(
.collect_referenced_late_bound_regions(&pred)
.into_iter()
.filter_map(|br| match br {
- ty::BrNamed(_, name) if br.is_named() => Some(Lifetime(name)),
+ ty::BrNamed(_, name) if br.is_named() => Some(GenericParamDef::lifetime(name)),
_ => None,
})
.collect();
@@ -427,7 +418,7 @@ fn clean_projection<'tcx>(
cx: &mut DocContext<'tcx>,
def_id: Option<DefId>,
) -> Type {
- if cx.tcx.def_kind(ty.skip_binder().def_id) == DefKind::ImplTraitPlaceholder {
+ if cx.tcx.is_impl_trait_in_trait(ty.skip_binder().def_id) {
let bounds = cx
.tcx
.explicit_item_bounds(ty.skip_binder().def_id)
@@ -508,7 +499,6 @@ fn clean_generic_param_def<'tcx>(
ty::GenericParamDefKind::Const { has_default } => (
def.name,
GenericParamDefKind::Const {
- did: def.def_id,
ty: Box::new(clean_middle_ty(
ty::Binder::dummy(
cx.tcx
@@ -578,7 +568,6 @@ fn clean_generic_param<'tcx>(
hir::GenericParamKind::Const { ty, default } => (
param.name.ident().name,
GenericParamDefKind::Const {
- did: param.def_id.to_def_id(),
ty: Box::new(clean_ty(ty, cx)),
default: default
.map(|ct| Box::new(ty::Const::from_anon_const(cx.tcx, ct.def_id).to_string())),
@@ -831,7 +820,7 @@ fn clean_ty_generics<'tcx>(
p.get_bound_params()
.into_iter()
.flatten()
- .map(|param| GenericParamDef::lifetime(param.0))
+ .cloned()
.collect(),
));
}
@@ -919,6 +908,38 @@ fn clean_ty_generics<'tcx>(
}
}
+fn clean_proc_macro<'tcx>(
+ item: &hir::Item<'tcx>,
+ name: &mut Symbol,
+ kind: MacroKind,
+ cx: &mut DocContext<'tcx>,
+) -> ItemKind {
+ let attrs = cx.tcx.hir().attrs(item.hir_id());
+ if kind == MacroKind::Derive &&
+ let Some(derive_name) = attrs
+ .lists(sym::proc_macro_derive)
+ .find_map(|mi| mi.ident())
+ {
+ *name = derive_name.name;
+ }
+
+ let mut helpers = Vec::new();
+ for mi in attrs.lists(sym::proc_macro_derive) {
+ if !mi.has_name(sym::attributes) {
+ continue;
+ }
+
+ if let Some(list) = mi.meta_item_list() {
+ for inner_mi in list {
+ if let Some(ident) = inner_mi.ident() {
+ helpers.push(ident.name);
+ }
+ }
+ }
+ }
+ ProcMacroItem(ProcMacro { kind, helpers })
+}
+
fn clean_fn_or_proc_macro<'tcx>(
item: &hir::Item<'tcx>,
sig: &hir::FnSig<'tcx>,
@@ -940,31 +961,7 @@ fn clean_fn_or_proc_macro<'tcx>(
}
});
match macro_kind {
- Some(kind) => {
- if kind == MacroKind::Derive {
- *name = attrs
- .lists(sym::proc_macro_derive)
- .find_map(|mi| mi.ident())
- .expect("proc-macro derives require a name")
- .name;
- }
-
- let mut helpers = Vec::new();
- for mi in attrs.lists(sym::proc_macro_derive) {
- if !mi.has_name(sym::attributes) {
- continue;
- }
-
- if let Some(list) = mi.meta_item_list() {
- for inner_mi in list {
- if let Some(ident) = inner_mi.ident() {
- helpers.push(ident.name);
- }
- }
- }
- }
- ProcMacroItem(ProcMacro { kind, helpers })
- }
+ Some(kind) => clean_proc_macro(item, name, kind, cx),
None => {
let mut func = clean_function(cx, sig, generics, FunctionArgs::Body(body_id));
clean_fn_decl_legacy_const_generics(&mut func, attrs);
@@ -2013,7 +2010,8 @@ fn clean_generic_args<'tcx>(
generic_args: &hir::GenericArgs<'tcx>,
cx: &mut DocContext<'tcx>,
) -> GenericArgs {
- if generic_args.parenthesized {
+ // FIXME(return_type_notation): Fix RTN parens rendering
+ if generic_args.parenthesized == hir::GenericArgsParentheses::ParenSugar {
let output = clean_ty(generic_args.bindings[0].ty(), cx);
let output = if output != Type::Tuple(Vec::new()) { Some(Box::new(output)) } else { None };
let inputs =
@@ -2066,110 +2064,44 @@ fn clean_bare_fn_ty<'tcx>(
BareFunctionDecl { unsafety: bare_fn.unsafety, abi: bare_fn.abi, decl, generic_params }
}
-/// This visitor is used to go through only the "top level" of a item and not enter any sub
-/// item while looking for a given `Ident` which is stored into `item` if found.
-struct OneLevelVisitor<'hir> {
- map: rustc_middle::hir::map::Map<'hir>,
- item: Option<&'hir hir::Item<'hir>>,
- looking_for: Ident,
+pub(crate) fn reexport_chain<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ import_def_id: LocalDefId,
target_def_id: LocalDefId,
-}
-
-impl<'hir> OneLevelVisitor<'hir> {
- fn new(map: rustc_middle::hir::map::Map<'hir>, target_def_id: LocalDefId) -> Self {
- Self { map, item: None, looking_for: Ident::empty(), target_def_id }
- }
-
- fn reset(&mut self, looking_for: Ident) {
- self.looking_for = looking_for;
- self.item = None;
- }
-}
-
-impl<'hir> hir::intravisit::Visitor<'hir> for OneLevelVisitor<'hir> {
- type NestedFilter = rustc_middle::hir::nested_filter::All;
-
- fn nested_visit_map(&mut self) -> Self::Map {
- self.map
- }
-
- fn visit_item(&mut self, item: &'hir hir::Item<'hir>) {
- if self.item.is_none()
- && item.ident == self.looking_for
- && (matches!(item.kind, hir::ItemKind::Use(_, _))
- || item.owner_id.def_id == self.target_def_id)
+) -> &'tcx [Reexport] {
+ for child in tcx.module_children_reexports(tcx.local_parent(import_def_id)) {
+ if child.res.opt_def_id() == Some(target_def_id.to_def_id())
+ && child.reexport_chain[0].id() == Some(import_def_id.to_def_id())
{
- self.item = Some(item);
+ return &child.reexport_chain;
}
}
+ &[]
}
-/// Because a `Use` item directly links to the imported item, we need to manually go through each
-/// import one by one. To do so, we go to the parent item and look for the `Ident` into it. Then,
-/// if we found the "end item" (the imported one), we stop there because we don't need its
-/// documentation. Otherwise, we repeat the same operation until we find the "end item".
+/// Collect attributes from the whole import chain.
fn get_all_import_attributes<'hir>(
- mut item: &hir::Item<'hir>,
- tcx: TyCtxt<'hir>,
+ cx: &mut DocContext<'hir>,
+ import_def_id: LocalDefId,
target_def_id: LocalDefId,
- attributes: &mut Vec<ast::Attribute>,
is_inline: bool,
-) {
+) -> Vec<(Cow<'hir, ast::Attribute>, Option<DefId>)> {
+ let mut attrs = Vec::new();
let mut first = true;
- let hir_map = tcx.hir();
- let mut visitor = OneLevelVisitor::new(hir_map, target_def_id);
- let mut visited = FxHashSet::default();
-
- // If the item is an import and has at least a path with two parts, we go into it.
- while let hir::ItemKind::Use(path, _) = item.kind && visited.insert(item.hir_id()) {
+ for def_id in reexport_chain(cx.tcx, import_def_id, target_def_id)
+ .iter()
+ .flat_map(|reexport| reexport.id())
+ {
+ let import_attrs = inline::load_attrs(cx, def_id);
if first {
// This is the "original" reexport so we get all its attributes without filtering them.
- attributes.extend_from_slice(hir_map.attrs(item.hir_id()));
+ attrs = import_attrs.iter().map(|attr| (Cow::Borrowed(attr), Some(def_id))).collect();
first = false;
} else {
- add_without_unwanted_attributes(attributes, hir_map.attrs(item.hir_id()), is_inline);
- }
-
- let def_id = if let [.., parent_segment, _] = &path.segments {
- match parent_segment.res {
- hir::def::Res::Def(_, def_id) => def_id,
- _ if parent_segment.ident.name == kw::Crate => {
- // In case the "parent" is the crate, it'll give `Res::Err` so we need to
- // circumvent it this way.
- tcx.parent(item.owner_id.def_id.to_def_id())
- }
- _ => break,
- }
- } else {
- // If the path doesn't have a parent, then the parent is the current module.
- tcx.parent(item.owner_id.def_id.to_def_id())
- };
-
- let Some(parent) = hir_map.get_if_local(def_id) else { break };
-
- // We get the `Ident` we will be looking for into `item`.
- let looking_for = path.segments[path.segments.len() - 1].ident;
- visitor.reset(looking_for);
-
- match parent {
- hir::Node::Item(parent_item) => {
- hir::intravisit::walk_item(&mut visitor, parent_item);
- }
- hir::Node::Crate(m) => {
- hir::intravisit::walk_mod(
- &mut visitor,
- m,
- tcx.local_def_id_to_hir_id(def_id.as_local().unwrap()),
- );
- }
- _ => break,
- }
- if let Some(i) = visitor.item {
- item = i;
- } else {
- break;
+ add_without_unwanted_attributes(&mut attrs, import_attrs, is_inline, Some(def_id));
}
}
+ attrs
}
fn filter_tokens_from_list(
@@ -2215,17 +2147,24 @@ fn filter_tokens_from_list(
/// * `doc(inline)`
/// * `doc(no_inline)`
/// * `doc(hidden)`
-fn add_without_unwanted_attributes(
- attrs: &mut Vec<ast::Attribute>,
- new_attrs: &[ast::Attribute],
+fn add_without_unwanted_attributes<'hir>(
+ attrs: &mut Vec<(Cow<'hir, ast::Attribute>, Option<DefId>)>,
+ new_attrs: &'hir [ast::Attribute],
is_inline: bool,
+ import_parent: Option<DefId>,
) {
- // If it's `#[doc(inline)]`, we don't want all attributes, otherwise we keep everything.
+ // If it's not `#[doc(inline)]`, we don't want all attributes, otherwise we keep everything.
if !is_inline {
- attrs.extend_from_slice(new_attrs);
+ for attr in new_attrs {
+ attrs.push((Cow::Borrowed(attr), import_parent));
+ }
return;
}
for attr in new_attrs {
+ if matches!(attr.kind, ast::AttrKind::DocComment(..)) {
+ attrs.push((Cow::Borrowed(attr), import_parent));
+ continue;
+ }
let mut attr = attr.clone();
match attr.kind {
ast::AttrKind::Normal(ref mut normal) => {
@@ -2252,18 +2191,15 @@ fn add_without_unwanted_attributes(
)
});
args.tokens = TokenStream::new(tokens);
- attrs.push(attr);
+ attrs.push((Cow::Owned(attr), import_parent));
}
ast::AttrArgs::Empty | ast::AttrArgs::Eq(..) => {
- attrs.push(attr);
- continue;
+ attrs.push((Cow::Owned(attr), import_parent));
}
}
}
}
- ast::AttrKind::DocComment(..) => {
- attrs.push(attr);
- }
+ _ => unreachable!(),
}
}
}
@@ -2318,16 +2254,17 @@ fn clean_maybe_renamed_item<'tcx>(
fields: variant_data.fields().iter().map(|x| clean_field(x, cx)).collect(),
}),
ItemKind::Impl(impl_) => return clean_impl(impl_, item.owner_id.def_id, cx),
- // proc macros can have a name set by attributes
- ItemKind::Fn(ref sig, generics, body_id) => {
- clean_fn_or_proc_macro(item, sig, generics, body_id, &mut name, cx)
- }
- ItemKind::Macro(ref macro_def, _) => {
+ ItemKind::Macro(ref macro_def, MacroKind::Bang) => {
let ty_vis = cx.tcx.visibility(def_id);
MacroItem(Macro {
source: display_macro_source(cx, name, macro_def, def_id, ty_vis),
})
}
+ ItemKind::Macro(_, macro_kind) => clean_proc_macro(item, &mut name, macro_kind, cx),
+ // proc macros can have a name set by attributes
+ ItemKind::Fn(ref sig, generics, body_id) => {
+ clean_fn_or_proc_macro(item, sig, generics, body_id, &mut name, cx)
+ }
ItemKind::Trait(_, _, generics, bounds, item_ids) => {
let items = item_ids
.iter()
@@ -2350,26 +2287,28 @@ fn clean_maybe_renamed_item<'tcx>(
_ => unreachable!("not yet converted"),
};
- let mut import_attrs = Vec::new();
- let mut target_attrs = Vec::new();
- if let Some(import_id) = import_id &&
- let Some(hir::Node::Item(use_node)) = cx.tcx.hir().find_by_def_id(import_id)
- {
- let is_inline = inline::load_attrs(cx, import_id.to_def_id()).lists(sym::doc).get_word_attr(sym::inline).is_some();
- // Then we get all the various imports' attributes.
- get_all_import_attributes(use_node, cx.tcx, item.owner_id.def_id, &mut import_attrs, is_inline);
- add_without_unwanted_attributes(&mut target_attrs, inline::load_attrs(cx, def_id), is_inline);
+ let target_attrs = inline::load_attrs(cx, def_id);
+ let attrs = if let Some(import_id) = import_id {
+ let is_inline = inline::load_attrs(cx, import_id.to_def_id())
+ .lists(sym::doc)
+ .get_word_attr(sym::inline)
+ .is_some();
+ let mut attrs =
+ get_all_import_attributes(cx, import_id, item.owner_id.def_id, is_inline);
+ add_without_unwanted_attributes(&mut attrs, target_attrs, is_inline, None);
+ attrs
} else {
// We only keep the item's attributes.
- target_attrs.extend_from_slice(inline::load_attrs(cx, def_id));
- }
+ target_attrs.iter().map(|attr| (Cow::Borrowed(attr), None)).collect()
+ };
- let import_parent = import_id.map(|import_id| cx.tcx.local_parent(import_id).to_def_id());
- let (attrs, cfg) = merge_attrs(cx, import_parent, &target_attrs, Some(&import_attrs));
+ let cfg = attrs.cfg(cx.tcx, &cx.cache.hidden_cfg);
+ let attrs =
+ Attributes::from_ast_iter(attrs.iter().map(|(attr, did)| (&**attr, *did)), false);
let mut item =
Item::from_def_id_and_attrs_and_parts(def_id, Some(name), kind, Box::new(attrs), cfg);
- item.inline_stmt_id = import_id.map(|def_id| def_id.to_def_id());
+ item.inline_stmt_id = import_id.map(|local| local.to_def_id());
vec![item]
})
}
@@ -2450,22 +2389,17 @@ fn clean_extern_crate<'tcx>(
Some(l) => attr::list_contains_name(&l, sym::inline),
None => false,
}
- });
+ })
+ && !cx.output_format.is_json();
let krate_owner_def_id = krate.owner_id.to_def_id();
if please_inline {
- let mut visited = DefIdSet::default();
-
- let res = Res::Def(DefKind::Mod, crate_def_id);
-
if let Some(items) = inline::try_inline(
cx,
- cx.tcx.parent_module(krate.hir_id()).to_def_id(),
- Some(krate_owner_def_id),
- res,
+ Res::Def(DefKind::Mod, crate_def_id),
name,
- Some(attrs),
- &mut visited,
+ Some((attrs, Some(krate_owner_def_id))),
+ &mut Default::default(),
) {
return items;
}
@@ -2589,17 +2523,13 @@ fn clean_use_statement_inner<'tcx>(
denied = true;
}
if !denied {
- let mut visited = DefIdSet::default();
let import_def_id = import.owner_id.to_def_id();
-
if let Some(mut items) = inline::try_inline(
cx,
- cx.tcx.parent_module(import.hir_id()).to_def_id(),
- Some(import_def_id),
path.res,
name,
- Some(attrs),
- &mut visited,
+ Some((attrs, Some(import_def_id))),
+ &mut Default::default(),
) {
items.push(Item::from_def_id_and_parts(
import_def_id,
diff --git a/src/librustdoc/clean/simplify.rs b/src/librustdoc/clean/simplify.rs
index dbbc25739..3c72b0bf9 100644
--- a/src/librustdoc/clean/simplify.rs
+++ b/src/librustdoc/clean/simplify.rs
@@ -49,11 +49,7 @@ pub(crate) fn where_clauses(cx: &DocContext<'_>, clauses: Vec<WP>) -> ThinVec<WP
equalities.retain(|(lhs, rhs, bound_params)| {
let Some((ty, trait_did, name)) = lhs.projection() else { return true; };
let Some((bounds, _)) = tybounds.get_mut(ty) else { return true };
- let bound_params = bound_params
- .into_iter()
- .map(|param| clean::GenericParamDef::lifetime(param.0))
- .collect();
- merge_bounds(cx, bounds, bound_params, trait_did, name, rhs)
+ merge_bounds(cx, bounds, bound_params.clone(), trait_did, name, rhs)
});
// And finally, let's reassemble everything
diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs
index 27d18aad7..6d2ce9e28 100644
--- a/src/librustdoc/clean/types.rs
+++ b/src/librustdoc/clean/types.rs
@@ -1,5 +1,5 @@
+use std::borrow::Cow;
use std::cell::RefCell;
-use std::default::Default;
use std::hash::Hash;
use std::path::PathBuf;
use std::rc::Rc;
@@ -22,7 +22,7 @@ use rustc_hir::{BodyId, Mutability};
use rustc_hir_analysis::check::intrinsic::intrinsic_operation_unsafety;
use rustc_index::vec::IndexVec;
use rustc_middle::ty::fast_reject::SimplifiedType;
-use rustc_middle::ty::{self, DefIdTree, TyCtxt, Visibility};
+use rustc_middle::ty::{self, TyCtxt, Visibility};
use rustc_resolve::rustdoc::{add_doc_fragment, attrs_to_doc_fragments, inner_docs, DocFragment};
use rustc_session::Session;
use rustc_span::hygiene::MacroKind;
@@ -231,14 +231,6 @@ impl ExternalCrate {
hir::ItemKind::Mod(_) => {
as_keyword(Res::Def(DefKind::Mod, id.owner_id.to_def_id()))
}
- hir::ItemKind::Use(path, hir::UseKind::Single)
- if tcx.visibility(id.owner_id).is_public() =>
- {
- path.res
- .iter()
- .find_map(|res| as_keyword(res.expect_non_local()))
- .map(|(_, prim)| (id.owner_id.to_def_id(), prim))
- }
_ => None,
}
})
@@ -256,38 +248,24 @@ impl ExternalCrate {
//
// Note that this loop only searches the top-level items of the crate,
// and this is intentional. If we were to search the entire crate for an
- // item tagged with `#[doc(primitive)]` then we would also have to
+ // item tagged with `#[rustc_doc_primitive]` then we would also have to
// search the entirety of external modules for items tagged
- // `#[doc(primitive)]`, which is a pretty inefficient process (decoding
+ // `#[rustc_doc_primitive]`, which is a pretty inefficient process (decoding
// all that metadata unconditionally).
//
// In order to keep the metadata load under control, the
- // `#[doc(primitive)]` feature is explicitly designed to only allow the
+ // `#[rustc_doc_primitive]` feature is explicitly designed to only allow the
// primitive tags to show up as the top level items in a crate.
//
// Also note that this does not attempt to deal with modules tagged
// duplicately for the same primitive. This is handled later on when
// rendering by delegating everything to a hash map.
let as_primitive = |res: Res<!>| {
- if let Res::Def(DefKind::Mod, def_id) = res {
- let mut prim = None;
- let meta_items = tcx
- .get_attrs(def_id, sym::doc)
- .flat_map(|attr| attr.meta_item_list().unwrap_or_default());
- for meta in meta_items {
- if let Some(v) = meta.value_str() {
- if meta.has_name(sym::primitive) {
- prim = PrimitiveType::from_symbol(v);
- if prim.is_some() {
- break;
- }
- // FIXME: should warn on unknown primitives?
- }
- }
- }
- return prim.map(|p| (def_id, p));
- }
- None
+ let Res::Def(DefKind::Mod, def_id) = res else { return None };
+ tcx.get_attrs(def_id, sym::rustc_doc_primitive).find_map(|attr| {
+ // FIXME: should warn on unknown primitives?
+ Some((def_id, PrimitiveType::from_symbol(attr.value_str()?)?))
+ })
};
if root.is_local() {
@@ -301,15 +279,6 @@ impl ExternalCrate {
hir::ItemKind::Mod(_) => {
as_primitive(Res::Def(DefKind::Mod, id.owner_id.to_def_id()))
}
- hir::ItemKind::Use(path, hir::UseKind::Single)
- if tcx.visibility(id.owner_id).is_public() =>
- {
- path.res
- .iter()
- .find_map(|res| as_primitive(res.expect_non_local()))
- // Pretend the primitive is local.
- .map(|(_, prim)| (id.owner_id.to_def_id(), prim))
- }
_ => None,
}
})
@@ -482,10 +451,12 @@ impl Item {
pub(crate) fn links(&self, cx: &Context<'_>) -> Vec<RenderedLink> {
use crate::html::format::{href, link_tooltip};
- cx.cache()
+ let Some(links) = cx.cache()
.intra_doc_links
- .get(&self.item_id)
- .map_or(&[][..], |v| v.as_slice())
+ .get(&self.item_id) else {
+ return vec![]
+ };
+ links
.iter()
.filter_map(|ItemLink { link: s, link_text, page_id: id, ref fragment }| {
debug!(?id);
@@ -513,10 +484,12 @@ impl Item {
/// the link text, but does need to know which `[]`-bracketed names
/// are actually links.
pub(crate) fn link_names(&self, cache: &Cache) -> Vec<RenderedLink> {
- cache
+ let Some(links) = cache
.intra_doc_links
- .get(&self.item_id)
- .map_or(&[][..], |v| v.as_slice())
+ .get(&self.item_id) else {
+ return vec![];
+ };
+ links
.iter()
.map(|ItemLink { link: s, link_text, .. }| RenderedLink {
original_text: s.clone(),
@@ -713,7 +686,7 @@ impl Item {
return None;
}
// Variants always inherit visibility
- VariantItem(..) => return None,
+ VariantItem(..) | ImplItem(..) => return None,
// Trait items inherit the trait's visibility
AssocConstItem(..) | TyAssocConstItem(..) | AssocTypeItem(..) | TyAssocTypeItem(..)
| TyMethodItem(..) | MethodItem(..) => {
@@ -869,28 +842,13 @@ pub(crate) trait AttributesExt {
type AttributeIterator<'a>: Iterator<Item = ast::NestedMetaItem>
where
Self: 'a;
+ type Attributes<'a>: Iterator<Item = &'a ast::Attribute>
+ where
+ Self: 'a;
fn lists<'a>(&'a self, name: Symbol) -> Self::AttributeIterator<'a>;
- fn span(&self) -> Option<rustc_span::Span>;
-
- fn cfg(&self, tcx: TyCtxt<'_>, hidden_cfg: &FxHashSet<Cfg>) -> Option<Arc<Cfg>>;
-}
-
-impl AttributesExt for [ast::Attribute] {
- type AttributeIterator<'a> = impl Iterator<Item = ast::NestedMetaItem> + 'a;
-
- fn lists<'a>(&'a self, name: Symbol) -> Self::AttributeIterator<'a> {
- self.iter()
- .filter(move |attr| attr.has_name(name))
- .filter_map(ast::Attribute::meta_item_list)
- .flatten()
- }
-
- /// Return the span of the first doc-comment, if it exists.
- fn span(&self) -> Option<rustc_span::Span> {
- self.iter().find(|attr| attr.doc_str().is_some()).map(|attr| attr.span)
- }
+ fn iter<'a>(&'a self) -> Self::Attributes<'a>;
fn cfg(&self, tcx: TyCtxt<'_>, hidden_cfg: &FxHashSet<Cfg>) -> Option<Arc<Cfg>> {
let sess = tcx.sess;
@@ -980,11 +938,48 @@ impl AttributesExt for [ast::Attribute] {
}
}
+impl AttributesExt for [ast::Attribute] {
+ type AttributeIterator<'a> = impl Iterator<Item = ast::NestedMetaItem> + 'a;
+ type Attributes<'a> = impl Iterator<Item = &'a ast::Attribute> + 'a;
+
+ fn lists<'a>(&'a self, name: Symbol) -> Self::AttributeIterator<'a> {
+ self.iter()
+ .filter(move |attr| attr.has_name(name))
+ .filter_map(ast::Attribute::meta_item_list)
+ .flatten()
+ }
+
+ fn iter<'a>(&'a self) -> Self::Attributes<'a> {
+ self.into_iter()
+ }
+}
+
+impl AttributesExt for [(Cow<'_, ast::Attribute>, Option<DefId>)] {
+ type AttributeIterator<'a> = impl Iterator<Item = ast::NestedMetaItem> + 'a
+ where Self: 'a;
+ type Attributes<'a> = impl Iterator<Item = &'a ast::Attribute> + 'a
+ where Self: 'a;
+
+ fn lists<'a>(&'a self, name: Symbol) -> Self::AttributeIterator<'a> {
+ AttributesExt::iter(self)
+ .filter(move |attr| attr.has_name(name))
+ .filter_map(ast::Attribute::meta_item_list)
+ .flatten()
+ }
+
+ fn iter<'a>(&'a self) -> Self::Attributes<'a> {
+ self.into_iter().map(move |(attr, _)| match attr {
+ Cow::Borrowed(attr) => *attr,
+ Cow::Owned(attr) => attr,
+ })
+ }
+}
+
pub(crate) trait NestedAttributesExt {
/// Returns `true` if the attribute list contains a specific `word`
fn has_word(self, word: Symbol) -> bool
where
- Self: std::marker::Sized,
+ Self: Sized,
{
<Self as NestedAttributesExt>::get_word_attr(self, word).is_some()
}
@@ -1014,7 +1009,7 @@ pub(crate) fn collapse_doc_fragments(doc_strings: &[DocFragment]) -> String {
/// A link that has not yet been rendered.
///
/// This link will be turned into a rendered link by [`Item::links`].
-#[derive(Clone, Debug, PartialEq, Eq)]
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub(crate) struct ItemLink {
/// The original link written in the markdown
pub(crate) link: Box<str>,
@@ -1213,9 +1208,9 @@ impl Lifetime {
#[derive(Clone, Debug)]
pub(crate) enum WherePredicate {
- BoundPredicate { ty: Type, bounds: Vec<GenericBound>, bound_params: Vec<Lifetime> },
+ BoundPredicate { ty: Type, bounds: Vec<GenericBound>, bound_params: Vec<GenericParamDef> },
RegionPredicate { lifetime: Lifetime, bounds: Vec<GenericBound> },
- EqPredicate { lhs: Box<Type>, rhs: Box<Term>, bound_params: Vec<Lifetime> },
+ EqPredicate { lhs: Box<Type>, rhs: Box<Term>, bound_params: Vec<GenericParamDef> },
}
impl WherePredicate {
@@ -1227,7 +1222,7 @@ impl WherePredicate {
}
}
- pub(crate) fn get_bound_params(&self) -> Option<&[Lifetime]> {
+ pub(crate) fn get_bound_params(&self) -> Option<&[GenericParamDef]> {
match self {
Self::BoundPredicate { bound_params, .. } | Self::EqPredicate { bound_params, .. } => {
Some(bound_params)
@@ -1241,7 +1236,7 @@ impl WherePredicate {
pub(crate) enum GenericParamDefKind {
Lifetime { outlives: Vec<Lifetime> },
Type { did: DefId, bounds: Vec<GenericBound>, default: Option<Box<Type>>, synthetic: bool },
- Const { did: DefId, ty: Box<Type>, default: Option<Box<String>> },
+ Const { ty: Box<Type>, default: Option<Box<String>> },
}
impl GenericParamDefKind {
@@ -1471,27 +1466,68 @@ impl Type {
result
}
- /// Check if two types are "potentially the same".
+ pub(crate) fn is_borrowed_ref(&self) -> bool {
+ matches!(self, Type::BorrowedRef { .. })
+ }
+
+ /// Check if two types are "the same" for documentation purposes.
+ ///
/// This is different from `Eq`, because it knows that things like
/// `Placeholder` are possible matches for everything.
- pub(crate) fn is_same(&self, other: &Self, cache: &Cache) -> bool {
- match (self, other) {
+ ///
+ /// This relation is not commutative when generics are involved:
+ ///
+ /// ```ignore(private)
+ /// # // see types/tests.rs:is_same_generic for the real test
+ /// use rustdoc::format::cache::Cache;
+ /// use rustdoc::clean::types::{Type, PrimitiveType};
+ /// let cache = Cache::new(false);
+ /// let generic = Type::Generic(rustc_span::symbol::sym::Any);
+ /// let unit = Type::Primitive(PrimitiveType::Unit);
+ /// assert!(!generic.is_same(&unit, &cache));
+ /// assert!(unit.is_same(&generic, &cache));
+ /// ```
+ ///
+ /// An owned type is also the same as its borrowed variants (this is commutative),
+ /// but `&T` is not the same as `&mut T`.
+ pub(crate) fn is_doc_subtype_of(&self, other: &Self, cache: &Cache) -> bool {
+ // Strip the references so that it can compare the actual types, unless both are references.
+ // If both are references, leave them alone and compare the mutabilities later.
+ let (self_cleared, other_cleared) = if !self.is_borrowed_ref() || !other.is_borrowed_ref() {
+ (self.without_borrowed_ref(), other.without_borrowed_ref())
+ } else {
+ (self, other)
+ };
+ match (self_cleared, other_cleared) {
// Recursive cases.
(Type::Tuple(a), Type::Tuple(b)) => {
- a.len() == b.len() && a.iter().zip(b).all(|(a, b)| a.is_same(b, cache))
+ a.len() == b.len() && a.iter().zip(b).all(|(a, b)| a.is_doc_subtype_of(b, cache))
}
- (Type::Slice(a), Type::Slice(b)) => a.is_same(b, cache),
- (Type::Array(a, al), Type::Array(b, bl)) => al == bl && a.is_same(b, cache),
+ (Type::Slice(a), Type::Slice(b)) => a.is_doc_subtype_of(b, cache),
+ (Type::Array(a, al), Type::Array(b, bl)) => al == bl && a.is_doc_subtype_of(b, cache),
(Type::RawPointer(mutability, type_), Type::RawPointer(b_mutability, b_type_)) => {
- mutability == b_mutability && type_.is_same(b_type_, cache)
+ mutability == b_mutability && type_.is_doc_subtype_of(b_type_, cache)
}
(
Type::BorrowedRef { mutability, type_, .. },
Type::BorrowedRef { mutability: b_mutability, type_: b_type_, .. },
- ) => mutability == b_mutability && type_.is_same(b_type_, cache),
- // Placeholders and generics are equal to all other types.
+ ) => mutability == b_mutability && type_.is_doc_subtype_of(b_type_, cache),
+ // Placeholders are equal to all other types.
(Type::Infer, _) | (_, Type::Infer) => true,
- (Type::Generic(_), _) | (_, Type::Generic(_)) => true,
+ // Generics match everything on the right, but not on the left.
+ // If both sides are generic, this returns true.
+ (_, Type::Generic(_)) => true,
+ (Type::Generic(_), _) => false,
+ // Paths account for both the path itself and its generics.
+ (Type::Path { path: a }, Type::Path { path: b }) => {
+ a.def_id() == b.def_id()
+ && a.generics()
+ .zip(b.generics())
+ .map(|(ag, bg)| {
+ ag.iter().zip(bg.iter()).all(|(at, bt)| at.is_doc_subtype_of(bt, cache))
+ })
+ .unwrap_or(true)
+ }
// Other cases, such as primitives, just use recursion.
(a, b) => a
.def_id(cache)
@@ -1782,13 +1818,17 @@ impl PrimitiveType {
}
}
- /// Returns the DefId of the module with `doc(primitive)` for this primitive type.
+ /// Returns the DefId of the module with `rustc_doc_primitive` for this primitive type.
/// Panics if there is no such module.
///
- /// This gives precedence to primitives defined in the current crate, and deprioritizes primitives defined in `core`,
- /// but otherwise, if multiple crates define the same primitive, there is no guarantee of which will be picked.
- /// In particular, if a crate depends on both `std` and another crate that also defines `doc(primitive)`, then
- /// it's entirely random whether `std` or the other crate is picked. (no_std crates are usually fine unless multiple dependencies define a primitive.)
+ /// This gives precedence to primitives defined in the current crate, and deprioritizes
+ /// primitives defined in `core`,
+ /// but otherwise, if multiple crates define the same primitive, there is no guarantee of which
+ /// will be picked.
+ ///
+ /// In particular, if a crate depends on both `std` and another crate that also defines
+ /// `rustc_doc_primitive`, then it's entirely random whether `std` or the other crate is picked.
+ /// (no_std crates are usually fine unless multiple dependencies define a primitive.)
pub(crate) fn primitive_locations(tcx: TyCtxt<'_>) -> &FxHashMap<PrimitiveType, DefId> {
static PRIMITIVE_LOCATIONS: OnceCell<FxHashMap<PrimitiveType, DefId>> = OnceCell::new();
PRIMITIVE_LOCATIONS.get_or_init(|| {
@@ -1978,7 +2018,7 @@ impl Variant {
#[derive(Clone, Debug)]
pub(crate) struct Discriminant {
- // In the case of cross crate re-exports, we don't have the nessesary information
+ // In the case of cross crate re-exports, we don't have the necessary information
// to reconstruct the expression of the discriminant, only the value.
pub(super) expr: Option<BodyId>,
pub(super) value: DefId,
diff --git a/src/librustdoc/clean/types/tests.rs b/src/librustdoc/clean/types/tests.rs
index 20627c2cf..d8c91a968 100644
--- a/src/librustdoc/clean/types/tests.rs
+++ b/src/librustdoc/clean/types/tests.rs
@@ -10,7 +10,7 @@ use rustc_span::symbol::Symbol;
fn create_doc_fragment(s: &str) -> Vec<DocFragment> {
vec![DocFragment {
span: DUMMY_SP,
- parent_module: None,
+ item_id: None,
doc: Symbol::intern(s),
kind: DocFragmentKind::SugaredDoc,
indent: 0,
@@ -69,3 +69,14 @@ fn should_not_trim() {
run_test("\t line1 \n\t line2", "line1 \nline2");
run_test(" \tline1 \n \tline2", "line1 \nline2");
}
+
+#[test]
+fn is_same_generic() {
+ use crate::clean::types::{PrimitiveType, Type};
+ use crate::formats::cache::Cache;
+ let cache = Cache::new(false);
+ let generic = Type::Generic(rustc_span::symbol::sym::Any);
+ let unit = Type::Primitive(PrimitiveType::Unit);
+ assert!(!generic.is_doc_subtype_of(&unit, &cache));
+ assert!(unit.is_doc_subtype_of(&generic, &cache));
+}
diff --git a/src/librustdoc/clean/utils.rs b/src/librustdoc/clean/utils.rs
index c9c1c2c45..cca50df0d 100644
--- a/src/librustdoc/clean/utils.rs
+++ b/src/librustdoc/clean/utils.rs
@@ -17,7 +17,7 @@ use rustc_hir::def_id::{DefId, LOCAL_CRATE};
use rustc_middle::mir;
use rustc_middle::mir::interpret::ConstValue;
use rustc_middle::ty::subst::{GenericArgKind, SubstsRef};
-use rustc_middle::ty::{self, DefIdTree, TyCtxt};
+use rustc_middle::ty::{self, TyCtxt};
use rustc_span::symbol::{kw, sym, Symbol};
use std::fmt::Write as _;
use std::mem;
@@ -195,12 +195,12 @@ pub(crate) fn build_deref_target_impls(
if let Some(prim) = target.primitive_type() {
let _prof_timer = cx.tcx.sess.prof.generic_activity("build_primitive_inherent_impls");
for did in prim.impls(tcx).filter(|did| !did.is_local()) {
- inline::build_impl(cx, None, did, None, ret);
+ inline::build_impl(cx, did, None, ret);
}
} else if let Type::Path { path } = target {
let did = path.def_id();
if !did.is_local() {
- inline::build_impls(cx, None, did, None, ret);
+ inline::build_impls(cx, did, None, ret);
}
}
}
diff --git a/src/librustdoc/config.rs b/src/librustdoc/config.rs
index 2c514a0c8..1be4f364e 100644
--- a/src/librustdoc/config.rs
+++ b/src/librustdoc/config.rs
@@ -1,12 +1,10 @@
use std::collections::BTreeMap;
-use std::convert::TryFrom;
use std::ffi::OsStr;
use std::fmt;
use std::path::PathBuf;
use std::str::FromStr;
use rustc_data_structures::fx::FxHashMap;
-use rustc_driver::print_flag_list;
use rustc_session::config::{
self, parse_crate_types_from_list, parse_externs, parse_target_triple, CrateType,
};
@@ -31,18 +29,13 @@ use crate::passes::{self, Condition};
use crate::scrape_examples::{AllCallLocations, ScrapeExamplesOptions};
use crate::theme;
-#[derive(Clone, Copy, PartialEq, Eq, Debug)]
+#[derive(Clone, Copy, PartialEq, Eq, Debug, Default)]
pub(crate) enum OutputFormat {
Json,
+ #[default]
Html,
}
-impl Default for OutputFormat {
- fn default() -> OutputFormat {
- OutputFormat::Html
- }
-}
-
impl OutputFormat {
pub(crate) fn is_json(&self) -> bool {
matches!(self, OutputFormat::Json)
@@ -235,7 +228,7 @@ pub(crate) struct RenderOptions {
pub(crate) extension_css: Option<PathBuf>,
/// A map of crate names to the URL to use instead of querying the crate's `html_root_url`.
pub(crate) extern_html_root_urls: BTreeMap<String, String>,
- /// Whether to give precedence to `html_root_url` or `--exten-html-root-url`.
+ /// Whether to give precedence to `html_root_url` or `--extern-html-root-url`.
pub(crate) extern_html_root_takes_precedence: bool,
/// A map of the default settings (values are as for DOM storage API). Keys should lack the
/// `rustdoc-` prefix.
@@ -333,14 +326,7 @@ impl Options {
return Err(0);
}
- let z_flags = matches.opt_strs("Z");
- if z_flags.iter().any(|x| *x == "help") {
- print_flag_list("-Z", config::Z_OPTIONS);
- return Err(0);
- }
- let c_flags = matches.opt_strs("C");
- if c_flags.iter().any(|x| *x == "help") {
- print_flag_list("-C", config::CG_OPTIONS);
+ if rustc_driver::describe_flag_categories(&matches) {
return Err(0);
}
diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs
index fbfc58a43..b392ba058 100644
--- a/src/librustdoc/core.rs
+++ b/src/librustdoc/core.rs
@@ -217,13 +217,8 @@ pub(crate) fn create_config(
let crate_types =
if proc_macro_crate { vec![CrateType::ProcMacro] } else { vec![CrateType::Rlib] };
- let resolve_doc_links = if *document_private {
- ResolveDocLinks::All
- } else {
- // Should be `ResolveDocLinks::Exported` in theory, but for some reason rustdoc
- // still tries to request resolutions for links on private items.
- ResolveDocLinks::All
- };
+ let resolve_doc_links =
+ if *document_private { ResolveDocLinks::All } else { ResolveDocLinks::Exported };
let test = scrape_examples_options.map(|opts| opts.scrape_tests).unwrap_or(false);
// plays with error output here!
let sessopts = config::Options {
@@ -308,6 +303,9 @@ pub(crate) fn run_global_ctxt(
// HACK(jynelson) this calls an _extremely_ limited subset of `typeck`
// and might break if queries change their assumptions in the future.
+ tcx.sess.time("type_collecting", || {
+ tcx.hir().for_each_module(|module| tcx.ensure().collect_mod_item_types(module))
+ });
// NOTE: This is copy/pasted from typeck/lib.rs and should be kept in sync with those changes.
tcx.sess.time("item_types_checking", || {
diff --git a/src/librustdoc/docfs.rs b/src/librustdoc/docfs.rs
index be066bdaf..d58b8dc6a 100644
--- a/src/librustdoc/docfs.rs
+++ b/src/librustdoc/docfs.rs
@@ -2,18 +2,20 @@
//!
//! On Windows this indirects IO into threads to work around performance issues
//! with Defender (and other similar virus scanners that do blocking operations).
-//! On other platforms this is a thin shim to fs.
//!
//! Only calls needed to permit this workaround have been abstracted: thus
//! fs::read is still done directly via the fs module; if in future rustdoc
//! needs to read-after-write from a file, then it would be added to this
//! abstraction.
+use std::cmp::max;
use std::fs;
use std::io;
use std::path::{Path, PathBuf};
use std::string::ToString;
use std::sync::mpsc::Sender;
+use std::thread::available_parallelism;
+use threadpool::ThreadPool;
pub(crate) trait PathError {
fn new<S, P: AsRef<Path>>(e: S, path: P) -> Self
@@ -24,11 +26,21 @@ pub(crate) trait PathError {
pub(crate) struct DocFS {
sync_only: bool,
errors: Option<Sender<String>>,
+ pool: ThreadPool,
}
impl DocFS {
pub(crate) fn new(errors: Sender<String>) -> DocFS {
- DocFS { sync_only: false, errors: Some(errors) }
+ const MINIMUM_NB_THREADS: usize = 2;
+ DocFS {
+ sync_only: false,
+ errors: Some(errors),
+ pool: ThreadPool::new(
+ available_parallelism()
+ .map(|nb| max(nb.get(), MINIMUM_NB_THREADS))
+ .unwrap_or(MINIMUM_NB_THREADS),
+ ),
+ }
}
pub(crate) fn set_sync_only(&mut self, sync_only: bool) {
@@ -54,12 +66,11 @@ impl DocFS {
where
E: PathError,
{
- #[cfg(windows)]
if !self.sync_only {
// A possible future enhancement after more detailed profiling would
// be to create the file sync so errors are reported eagerly.
let sender = self.errors.clone().expect("can't write after closing");
- rayon::spawn(move || {
+ self.pool.execute(move || {
fs::write(&path, contents).unwrap_or_else(|e| {
sender.send(format!("\"{}\": {}", path.display(), e)).unwrap_or_else(|_| {
panic!("failed to send error on \"{}\"", path.display())
@@ -70,9 +81,12 @@ impl DocFS {
fs::write(&path, contents).map_err(|e| E::new(e, path))?;
}
- #[cfg(not(windows))]
- fs::write(&path, contents).map_err(|e| E::new(e, path))?;
-
Ok(())
}
}
+
+impl Drop for DocFS {
+ fn drop(&mut self) {
+ self.pool.join();
+ }
+}
diff --git a/src/librustdoc/doctest.rs b/src/librustdoc/doctest.rs
index 9cf84acc7..daf10e5b8 100644
--- a/src/librustdoc/doctest.rs
+++ b/src/librustdoc/doctest.rs
@@ -398,6 +398,8 @@ fn run_test(
compiler.stdin(Stdio::piped());
compiler.stderr(Stdio::piped());
+ debug!("compiler invocation for doctest: {:?}", compiler);
+
let mut child = compiler.spawn().expect("Failed to spawn rustc process");
{
let stdin = child.stdin.as_mut().expect("Failed to open stdin");
@@ -1057,6 +1059,16 @@ impl Tester for Collector {
Ignore::Some(ref ignores) => ignores.iter().any(|s| target_str.contains(s)),
},
ignore_message: None,
+ #[cfg(not(bootstrap))]
+ source_file: "",
+ #[cfg(not(bootstrap))]
+ start_line: 0,
+ #[cfg(not(bootstrap))]
+ start_col: 0,
+ #[cfg(not(bootstrap))]
+ end_line: 0,
+ #[cfg(not(bootstrap))]
+ end_col: 0,
// compiler failures are test failures
should_panic: test::ShouldPanic::No,
compile_fail: config.compile_fail,
@@ -1229,8 +1241,9 @@ impl<'a, 'hir, 'tcx> HirCollector<'a, 'hir, 'tcx> {
if let Some(doc) = attrs.collapsed_doc_value() {
// Use the outermost invocation, so that doctest names come from where the docs were written.
let span = ast_attrs
- .span()
- .map(|span| span.ctxt().outer_expn().expansion_cause().unwrap_or(span))
+ .iter()
+ .find(|attr| attr.doc_str().is_some())
+ .map(|attr| attr.span.ctxt().outer_expn().expansion_cause().unwrap_or(attr.span))
.unwrap_or(DUMMY_SP);
self.collector.set_position(span);
markdown::find_testable_code(
diff --git a/src/librustdoc/formats/cache.rs b/src/librustdoc/formats/cache.rs
index 8dbfaf4bb..c03291820 100644
--- a/src/librustdoc/formats/cache.rs
+++ b/src/librustdoc/formats/cache.rs
@@ -1,6 +1,6 @@
use std::mem;
-use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, DefIdSet};
use rustc_middle::ty::{self, TyCtxt};
use rustc_span::Symbol;
@@ -118,7 +118,7 @@ pub(crate) struct Cache {
/// All intra-doc links resolved so far.
///
/// Links are indexed by the DefId of the item they document.
- pub(crate) intra_doc_links: FxHashMap<ItemId, Vec<clean::ItemLink>>,
+ pub(crate) intra_doc_links: FxHashMap<ItemId, FxIndexSet<clean::ItemLink>>,
/// Cfg that have been hidden via #![doc(cfg_hide(...))]
pub(crate) hidden_cfg: FxHashSet<clean::cfg::Cfg>,
}
@@ -346,6 +346,7 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
self.cache,
),
aliases: item.attrs.get_doc_aliases(),
+ deprecation: item.deprecation(self.tcx),
});
}
}
diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs
index 0e4c5ed68..1b445b898 100644
--- a/src/librustdoc/html/format.rs
+++ b/src/librustdoc/html/format.rs
@@ -1,13 +1,15 @@
//! HTML formatting module
//!
//! This module contains a large number of `fmt::Display` implementations for
-//! various types in `rustdoc::clean`. These implementations all currently
-//! assume that HTML output is desired, although it may be possible to redesign
-//! them in the future to instead emit any format desired.
+//! various types in `rustdoc::clean`.
+//!
+//! These implementations all emit HTML. As an internal implementation detail,
+//! some of them support an alternate format that emits text, but that should
+//! not be used external to this module.
use std::borrow::Cow;
use std::cell::Cell;
-use std::fmt;
+use std::fmt::{self, Write};
use std::iter::{self, once};
use rustc_ast as ast;
@@ -19,7 +21,6 @@ use rustc_hir::def::DefKind;
use rustc_hir::def_id::DefId;
use rustc_metadata::creader::{CStore, LoadedMacro};
use rustc_middle::ty;
-use rustc_middle::ty::DefIdTree;
use rustc_middle::ty::TyCtxt;
use rustc_span::symbol::kw;
use rustc_span::{sym, Symbol};
@@ -127,7 +128,6 @@ impl Buffer {
// the fmt::Result return type imposed by fmt::Write (and avoiding the trait
// import).
pub(crate) fn write_fmt(&mut self, v: fmt::Arguments<'_>) {
- use fmt::Write;
self.buffer.write_fmt(v).unwrap();
}
@@ -136,10 +136,6 @@ impl Buffer {
self.into_inner()
}
- pub(crate) fn is_for_html(&self) -> bool {
- self.for_html
- }
-
pub(crate) fn reserve(&mut self, additional: usize) {
self.buffer.reserve(additional)
}
@@ -280,8 +276,6 @@ pub(crate) fn print_where_clause<'a, 'tcx: 'a>(
indent: usize,
ending: Ending,
) -> impl fmt::Display + 'a + Captures<'tcx> {
- use fmt::Write;
-
display_fn(move |f| {
let mut where_predicates = gens.where_predicates.iter().filter(|pred| {
!matches!(pred, clean::WherePredicate::BoundPredicate { bounds, .. } if bounds.is_empty())
@@ -309,13 +303,13 @@ pub(crate) fn print_where_clause<'a, 'tcx: 'a>(
write!(
f,
"for<{:#}> {ty_cx:#}: {generic_bounds:#}",
- comma_sep(bound_params.iter().map(|lt| lt.print()), true)
+ comma_sep(bound_params.iter().map(|lt| lt.print(cx)), true)
)
} else {
write!(
f,
"for&lt;{}&gt; {ty_cx}: {generic_bounds}",
- comma_sep(bound_params.iter().map(|lt| lt.print()), true)
+ comma_sep(bound_params.iter().map(|lt| lt.print(cx)), true)
)
}
}
@@ -355,10 +349,10 @@ pub(crate) fn print_where_clause<'a, 'tcx: 'a>(
let mut br_with_padding = String::with_capacity(6 * indent + 28);
br_with_padding.push_str("\n");
- let padding_amout =
+ let padding_amount =
if ending == Ending::Newline { indent + 4 } else { indent + "fn where ".len() };
- for _ in 0..padding_amout {
+ for _ in 0..padding_amount {
br_with_padding.push_str(" ");
}
let where_preds = where_preds.to_string().replace('\n', &br_with_padding);
@@ -773,6 +767,12 @@ pub(crate) fn link_tooltip(did: DefId, fragment: &Option<UrlFragment>, cx: &Cont
.or_else(|| cache.external_paths.get(&did))
else { return String::new() };
let mut buf = Buffer::new();
+ let fqp = if *shortty == ItemType::Primitive {
+ // primitives are documented in a crate, but not actually part of it
+ &fqp[fqp.len() - 1..]
+ } else {
+ &fqp
+ };
if let &Some(UrlFragment::Item(id)) = fragment {
write!(buf, "{} ", cx.tcx().def_descr(id));
for component in fqp {
@@ -1138,22 +1138,21 @@ fn fmt_type<'cx>(
// the ugliness comes from inlining across crates where
// everything comes in as a fully resolved QPath (hard to
// look at).
- match href(trait_.def_id(), cx) {
- Ok((ref url, _, ref path)) if !f.alternate() => {
- write!(
- f,
- "<a class=\"associatedtype\" href=\"{url}#{shortty}.{name}\" \
- title=\"type {path}::{name}\">{name}</a>{args}",
- url = url,
- shortty = ItemType::AssocType,
- name = assoc.name,
- path = join_with_double_colon(path),
- args = assoc.args.print(cx),
- )?;
- }
- _ => write!(f, "{}{:#}", assoc.name, assoc.args.print(cx))?,
- }
- Ok(())
+ if !f.alternate() && let Ok((url, _, path)) = href(trait_.def_id(), cx) {
+ write!(
+ f,
+ "<a class=\"associatedtype\" href=\"{url}#{shortty}.{name}\" \
+ title=\"type {path}::{name}\">{name}</a>",
+ shortty = ItemType::AssocType,
+ name = assoc.name,
+ path = join_with_double_colon(&path),
+ )
+ } else {
+ write!(f, "{}", assoc.name)
+ }?;
+
+ // Carry `f.alternate()` into this display w/o branching manually.
+ fmt::Display::fmt(&assoc.args.print(cx), f)
}
}
}
@@ -1307,6 +1306,28 @@ impl clean::BareFunctionDecl {
}
}
+// Implements Write but only counts the bytes "written".
+struct WriteCounter(usize);
+
+impl std::fmt::Write for WriteCounter {
+ fn write_str(&mut self, s: &str) -> fmt::Result {
+ self.0 += s.len();
+ Ok(())
+ }
+}
+
+// Implements Display by emitting the given number of spaces.
+struct Indent(usize);
+
+impl fmt::Display for Indent {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ (0..self.0).for_each(|_| {
+ f.write_char(' ').unwrap();
+ });
+ Ok(())
+ }
+}
+
impl clean::FnDecl {
pub(crate) fn print<'b, 'a: 'b, 'tcx: 'a>(
&'a self,
@@ -1346,95 +1367,80 @@ impl clean::FnDecl {
indent: usize,
cx: &'a Context<'tcx>,
) -> impl fmt::Display + 'a + Captures<'tcx> {
- display_fn(move |f| self.inner_full_print(header_len, indent, f, cx))
+ display_fn(move |f| {
+ // First, generate the text form of the declaration, with no line wrapping, and count the bytes.
+ let mut counter = WriteCounter(0);
+ write!(&mut counter, "{:#}", display_fn(|f| { self.inner_full_print(None, f, cx) }))
+ .unwrap();
+ // If the text form was over 80 characters wide, we will line-wrap our output.
+ let line_wrapping_indent =
+ if header_len + counter.0 > 80 { Some(indent) } else { None };
+ // Generate the final output. This happens to accept `{:#}` formatting to get textual
+ // output but in practice it is only formatted with `{}` to get HTML output.
+ self.inner_full_print(line_wrapping_indent, f, cx)
+ })
}
fn inner_full_print(
&self,
- header_len: usize,
- indent: usize,
+ // For None, the declaration will not be line-wrapped. For Some(n),
+ // the declaration will be line-wrapped, with an indent of n spaces.
+ line_wrapping_indent: Option<usize>,
f: &mut fmt::Formatter<'_>,
cx: &Context<'_>,
) -> fmt::Result {
let amp = if f.alternate() { "&" } else { "&amp;" };
- let mut args = Buffer::html();
- let mut args_plain = Buffer::new();
+
+ write!(f, "(")?;
+ if let Some(n) = line_wrapping_indent {
+ write!(f, "\n{}", Indent(n + 4))?;
+ }
for (i, input) in self.inputs.values.iter().enumerate() {
+ if i > 0 {
+ match line_wrapping_indent {
+ None => write!(f, ", ")?,
+ Some(n) => write!(f, ",\n{}", Indent(n + 4))?,
+ };
+ }
if let Some(selfty) = input.to_self() {
match selfty {
clean::SelfValue => {
- args.push_str("self");
- args_plain.push_str("self");
+ write!(f, "self")?;
}
clean::SelfBorrowed(Some(ref lt), mtbl) => {
- write!(args, "{}{} {}self", amp, lt.print(), mtbl.print_with_space());
- write!(args_plain, "&{} {}self", lt.print(), mtbl.print_with_space());
+ write!(f, "{}{} {}self", amp, lt.print(), mtbl.print_with_space())?;
}
clean::SelfBorrowed(None, mtbl) => {
- write!(args, "{}{}self", amp, mtbl.print_with_space());
- write!(args_plain, "&{}self", mtbl.print_with_space());
+ write!(f, "{}{}self", amp, mtbl.print_with_space())?;
}
clean::SelfExplicit(ref typ) => {
- if f.alternate() {
- write!(args, "self: {:#}", typ.print(cx));
- } else {
- write!(args, "self: {}", typ.print(cx));
- }
- write!(args_plain, "self: {:#}", typ.print(cx));
+ write!(f, "self: ")?;
+ fmt::Display::fmt(&typ.print(cx), f)?;
}
}
} else {
- if i > 0 {
- args.push_str("\n");
- }
if input.is_const {
- args.push_str("const ");
- args_plain.push_str("const ");
- }
- write!(args, "{}: ", input.name);
- write!(args_plain, "{}: ", input.name);
-
- if f.alternate() {
- write!(args, "{:#}", input.type_.print(cx));
- } else {
- write!(args, "{}", input.type_.print(cx));
+ write!(f, "const ")?;
}
- write!(args_plain, "{:#}", input.type_.print(cx));
- }
- if i + 1 < self.inputs.values.len() {
- args.push_str(",");
- args_plain.push_str(",");
+ write!(f, "{}: ", input.name)?;
+ fmt::Display::fmt(&input.type_.print(cx), f)?;
}
}
- let mut args_plain = format!("({})", args_plain.into_inner());
- let mut args = args.into_inner();
-
if self.c_variadic {
- args.push_str(",\n ...");
- args_plain.push_str(", ...");
+ match line_wrapping_indent {
+ None => write!(f, ", ...")?,
+ Some(n) => write!(f, "\n{}...", Indent(n + 4))?,
+ };
}
- let arrow_plain = format!("{:#}", self.output.print(cx));
- let arrow =
- if f.alternate() { arrow_plain.clone() } else { format!("{}", self.output.print(cx)) };
-
- let declaration_len = header_len + args_plain.len() + arrow_plain.len();
- let output = if declaration_len > 80 {
- let full_pad = format!("\n{}", " ".repeat(indent + 4));
- let close_pad = format!("\n{}", " ".repeat(indent));
- format!(
- "({pad}{args}{close}){arrow}",
- pad = if self.inputs.values.is_empty() { "" } else { &full_pad },
- args = args.replace('\n', &full_pad),
- close = close_pad,
- arrow = arrow
- )
- } else {
- format!("({args}){arrow}", args = args.replace('\n', " "), arrow = arrow)
+ match line_wrapping_indent {
+ None => write!(f, ")")?,
+ Some(n) => write!(f, "\n{})", Indent(n))?,
};
- write!(f, "{}", output)
+ fmt::Display::fmt(&self.output.print(cx), f)?;
+ Ok(())
}
}
@@ -1469,7 +1475,7 @@ pub(crate) fn visibility_print_with_space<'a, 'tcx: 'a>(
debug!("path={:?}", path);
// modified from `resolved_path()` to work with `DefPathData`
let last_name = path.data.last().unwrap().data.get_opt_name().unwrap();
- let anchor = anchor(vis_did, last_name, cx).to_string();
+ let anchor = anchor(vis_did, last_name, cx);
let mut s = "pub(in ".to_owned();
for seg in &path.data[..path.data.len() - 1] {
@@ -1491,9 +1497,9 @@ pub(crate) fn visibility_to_src_with_space<'a, 'tcx: 'a>(
tcx: TyCtxt<'tcx>,
item_did: DefId,
) -> impl fmt::Display + 'a + Captures<'tcx> {
- let to_print = match visibility {
- None => String::new(),
- Some(ty::Visibility::Public) => "pub ".to_owned(),
+ let to_print: Cow<'static, str> = match visibility {
+ None => "".into(),
+ Some(ty::Visibility::Public) => "pub ".into(),
Some(ty::Visibility::Restricted(vis_did)) => {
// FIXME(camelid): This may not work correctly if `item_did` is a module.
// However, rustdoc currently never displays a module's
@@ -1501,17 +1507,17 @@ pub(crate) fn visibility_to_src_with_space<'a, 'tcx: 'a>(
let parent_module = find_nearest_parent_module(tcx, item_did);
if vis_did.is_crate_root() {
- "pub(crate) ".to_owned()
+ "pub(crate) ".into()
} else if parent_module == Some(vis_did) {
// `pub(in foo)` where `foo` is the parent module
// is the same as no visibility modifier
- String::new()
+ "".into()
} else if parent_module.and_then(|parent| find_nearest_parent_module(tcx, parent))
== Some(vis_did)
{
- "pub(super) ".to_owned()
+ "pub(super) ".into()
} else {
- format!("pub(in {}) ", tcx.def_path_str(vis_did))
+ format!("pub(in {}) ", tcx.def_path_str(vis_did)).into()
}
}
};
diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs
index 2c9fc4e3c..b61dd5714 100644
--- a/src/librustdoc/html/highlight.rs
+++ b/src/librustdoc/html/highlight.rs
@@ -65,23 +65,6 @@ pub(crate) fn render_item_decl_with_highlighting(src: &str, out: &mut Buffer) {
write!(out, "</pre>");
}
-/// Highlights `src` as a source code page, returning the HTML output.
-pub(crate) fn render_source_with_highlighting(
- src: &str,
- out: &mut Buffer,
- line_numbers: Buffer,
- href_context: HrefContext<'_, '_>,
- decoration_info: DecorationInfo,
- extra: Option<&str>,
-) {
- write_header(out, "", Some(line_numbers), Tooltip::None);
- if let Some(extra) = extra {
- out.push_str(extra);
- }
- write_code(out, src, Some(href_context), Some(decoration_info));
- write_footer(out, None);
-}
-
fn write_header(out: &mut Buffer, class: &str, extra_content: Option<Buffer>, tooltip: Tooltip) {
write!(
out,
@@ -143,8 +126,8 @@ fn can_merge(class1: Option<Class>, class2: Option<Class>, text: &str) -> bool {
/// This type is used as a conveniency to prevent having to pass all its fields as arguments into
/// the various functions (which became its methods).
-struct TokenHandler<'a, 'tcx> {
- out: &'a mut Buffer,
+struct TokenHandler<'a, 'tcx, F: Write> {
+ out: &'a mut F,
/// It contains the closing tag and the associated `Class`.
closing_tags: Vec<(&'static str, Class)>,
/// This is used because we don't automatically generate the closing tag on `ExitSpan` in
@@ -159,7 +142,7 @@ struct TokenHandler<'a, 'tcx> {
href_context: Option<HrefContext<'a, 'tcx>>,
}
-impl<'a, 'tcx> TokenHandler<'a, 'tcx> {
+impl<'a, 'tcx, F: Write> TokenHandler<'a, 'tcx, F> {
fn handle_exit_span(&mut self) {
// We can't get the last `closing_tags` element using `pop()` because `closing_tags` is
// being used in `write_pending_elems`.
@@ -194,8 +177,8 @@ impl<'a, 'tcx> TokenHandler<'a, 'tcx> {
} else {
// We only want to "open" the tag ourselves if we have more than one pending and if the
// current parent tag is not the same as our pending content.
- let close_tag = if self.pending_elems.len() > 1 && current_class.is_some() {
- Some(enter_span(self.out, current_class.unwrap(), &self.href_context))
+ let close_tag = if self.pending_elems.len() > 1 && let Some(current_class) = current_class {
+ Some(enter_span(self.out, current_class, &self.href_context))
} else {
None
};
@@ -211,7 +194,7 @@ impl<'a, 'tcx> TokenHandler<'a, 'tcx> {
}
}
-impl<'a, 'tcx> Drop for TokenHandler<'a, 'tcx> {
+impl<'a, 'tcx, F: Write> Drop for TokenHandler<'a, 'tcx, F> {
/// When leaving, we need to flush all pending data to not have missing content.
fn drop(&mut self) {
if self.pending_exit_span.is_some() {
@@ -233,8 +216,8 @@ impl<'a, 'tcx> Drop for TokenHandler<'a, 'tcx> {
/// item definition.
///
/// More explanations about spans and how we use them here are provided in the
-fn write_code(
- out: &mut Buffer,
+pub(super) fn write_code(
+ out: &mut impl Write,
src: &str,
href_context: Option<HrefContext<'_, '_>>,
decoration_info: Option<DecorationInfo>,
@@ -883,7 +866,7 @@ impl<'src> Classifier<'src> {
/// Called when we start processing a span of text that should be highlighted.
/// The `Class` argument specifies how it should be highlighted.
fn enter_span(
- out: &mut Buffer,
+ out: &mut impl Write,
klass: Class,
href_context: &Option<HrefContext<'_, '_>>,
) -> &'static str {
@@ -894,8 +877,8 @@ fn enter_span(
}
/// Called at the end of a span of highlighted text.
-fn exit_span(out: &mut Buffer, closing_tag: &str) {
- out.write_str(closing_tag);
+fn exit_span(out: &mut impl Write, closing_tag: &str) {
+ out.write_str(closing_tag).unwrap();
}
/// Called for a span of text. If the text should be highlighted differently
@@ -915,7 +898,7 @@ fn exit_span(out: &mut Buffer, closing_tag: &str) {
/// will then try to find this `span` in the `span_correspondance_map`. If found, it'll then
/// generate a link for this element (which corresponds to where its definition is located).
fn string<T: Display>(
- out: &mut Buffer,
+ out: &mut impl Write,
text: T,
klass: Option<Class>,
href_context: &Option<HrefContext<'_, '_>>,
@@ -923,7 +906,7 @@ fn string<T: Display>(
) {
if let Some(closing_tag) = string_without_closing_tag(out, text, klass, href_context, open_tag)
{
- out.write_str(closing_tag);
+ out.write_str(closing_tag).unwrap();
}
}
@@ -937,7 +920,7 @@ fn string<T: Display>(
/// in `span_map.rs::collect_spans_and_sources`. If it cannot retrieve the information, then it's
/// the same as the second point (`klass` is `Some` but doesn't have a [`rustc_span::Span`]).
fn string_without_closing_tag<T: Display>(
- out: &mut Buffer,
+ out: &mut impl Write,
text: T,
klass: Option<Class>,
href_context: &Option<HrefContext<'_, '_>>,
@@ -945,16 +928,16 @@ fn string_without_closing_tag<T: Display>(
) -> Option<&'static str> {
let Some(klass) = klass
else {
- write!(out, "{}", text);
+ write!(out, "{}", text).unwrap();
return None;
};
let Some(def_span) = klass.get_span()
else {
if !open_tag {
- write!(out, "{}", text);
+ write!(out, "{}", text).unwrap();
return None;
}
- write!(out, "<span class=\"{}\">{}", klass.as_html(), text);
+ write!(out, "<span class=\"{}\">{}", klass.as_html(), text).unwrap();
return Some("</span>");
};
@@ -1009,28 +992,28 @@ fn string_without_closing_tag<T: Display>(
if !open_tag {
// We're already inside an element which has the same klass, no need to give it
// again.
- write!(out, "<a href=\"{}\">{}", href, text_s);
+ write!(out, "<a href=\"{}\">{}", href, text_s).unwrap();
} else {
let klass_s = klass.as_html();
if klass_s.is_empty() {
- write!(out, "<a href=\"{}\">{}", href, text_s);
+ write!(out, "<a href=\"{}\">{}", href, text_s).unwrap();
} else {
- write!(out, "<a class=\"{}\" href=\"{}\">{}", klass_s, href, text_s);
+ write!(out, "<a class=\"{}\" href=\"{}\">{}", klass_s, href, text_s).unwrap();
}
}
return Some("</a>");
}
}
if !open_tag {
- write!(out, "{}", text_s);
+ write!(out, "{}", text_s).unwrap();
return None;
}
let klass_s = klass.as_html();
if klass_s.is_empty() {
- write!(out, "{}", text_s);
+ out.write_str(&text_s).unwrap();
Some("")
} else {
- write!(out, "<span class=\"{}\">{}", klass_s, text_s);
+ write!(out, "<span class=\"{}\">{}", klass_s, text_s).unwrap();
Some("</span>")
}
}
diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs
index fe446ae3c..00aadb8e8 100644
--- a/src/librustdoc/html/markdown.rs
+++ b/src/librustdoc/html/markdown.rs
@@ -36,7 +36,6 @@ use rustc_span::{Span, Symbol};
use once_cell::sync::Lazy;
use std::borrow::Cow;
use std::collections::VecDeque;
-use std::default::Default;
use std::fmt::Write;
use std::ops::{ControlFlow, Range};
use std::str;
@@ -556,7 +555,15 @@ fn check_if_allowed_tag(t: &Tag<'_>) -> bool {
}
fn is_forbidden_tag(t: &Tag<'_>) -> bool {
- matches!(t, Tag::CodeBlock(_) | Tag::Table(_) | Tag::TableHead | Tag::TableRow | Tag::TableCell)
+ matches!(
+ t,
+ Tag::CodeBlock(_)
+ | Tag::Table(_)
+ | Tag::TableHead
+ | Tag::TableRow
+ | Tag::TableCell
+ | Tag::FootnoteDefinition(_)
+ )
}
impl<'a, I: Iterator<Item = Event<'a>>> Iterator for SummaryLine<'a, I> {
@@ -589,6 +596,10 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for SummaryLine<'a, I> {
is_start = false;
check_if_allowed_tag(c)
}
+ Event::FootnoteReference(_) => {
+ self.skipped_tags += 1;
+ false
+ }
_ => true,
};
if !is_allowed_tag {
diff --git a/src/librustdoc/html/render/context.rs b/src/librustdoc/html/render/context.rs
index 5e4a59562..ac5054ce1 100644
--- a/src/librustdoc/html/render/context.rs
+++ b/src/librustdoc/html/render/context.rs
@@ -17,10 +17,11 @@ use super::print_item::{full_path, item_path, print_item};
use super::search_index::build_index;
use super::write_shared::write_shared;
use super::{
- collect_spans_and_sources, print_sidebar, scrape_examples_help, sidebar_module_like, AllTypes,
- LinkFromSrc, StylePath,
+ collect_spans_and_sources, scrape_examples_help,
+ sidebar::print_sidebar,
+ sidebar::{sidebar_module_like, Sidebar},
+ AllTypes, LinkFromSrc, StylePath,
};
-
use crate::clean::{self, types::ExternalLocation, ExternalCrate};
use crate::config::{ModuleSorting, RenderOptions};
use crate::docfs::{DocFS, PathError};
@@ -35,6 +36,7 @@ use crate::html::url_parts_builder::UrlPartsBuilder;
use crate::html::{layout, sources, static_files};
use crate::scrape_examples::AllCallLocations;
use crate::try_err;
+use askama::Template;
/// Major driving force in all rustdoc rendering. This contains information
/// about where in the tree-like hierarchy rendering is occurring and controls
@@ -350,7 +352,7 @@ impl<'tcx> Context<'tcx> {
},
);
- path = href.into_inner().to_string_lossy().to_string();
+ path = href.into_inner().to_string_lossy().into_owned();
if let Some(c) = path.as_bytes().last() && *c != b'/' {
path.push('/');
@@ -600,17 +602,18 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
};
let all = shared.all.replace(AllTypes::new());
let mut sidebar = Buffer::html();
- if shared.cache.crate_version.is_some() {
- write!(sidebar, "<h2 class=\"location\">Crate {}</h2>", crate_name)
+
+ let blocks = sidebar_module_like(all.item_sections());
+ let bar = Sidebar {
+ title_prefix: "Crate ",
+ title: crate_name.as_str(),
+ is_crate: false,
+ version: "",
+ blocks: vec![blocks],
+ path: String::new(),
};
- let mut items = Buffer::html();
- sidebar_module_like(&mut items, all.item_sections());
- if !items.is_empty() {
- sidebar.push_str("<div class=\"sidebar-elems\">");
- sidebar.push_buffer(items);
- sidebar.push_str("</div>");
- }
+ bar.render_into(&mut sidebar).unwrap();
let v = layout::render(
&shared.layout,
@@ -649,11 +652,35 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
</noscript>\
<link rel=\"stylesheet\" \
href=\"{static_root_path}{settings_css}\">\
- <script defer src=\"{static_root_path}{settings_js}\"></script>",
+ <script defer src=\"{static_root_path}{settings_js}\"></script>\
+ <link rel=\"preload\" href=\"{static_root_path}{theme_light_css}\" \
+ as=\"style\">\
+ <link rel=\"preload\" href=\"{static_root_path}{theme_dark_css}\" \
+ as=\"style\">\
+ <link rel=\"preload\" href=\"{static_root_path}{theme_ayu_css}\" \
+ as=\"style\">",
static_root_path = page.get_static_root_path(),
settings_css = static_files::STATIC_FILES.settings_css,
settings_js = static_files::STATIC_FILES.settings_js,
- )
+ theme_light_css = static_files::STATIC_FILES.theme_light_css,
+ theme_dark_css = static_files::STATIC_FILES.theme_dark_css,
+ theme_ayu_css = static_files::STATIC_FILES.theme_ayu_css,
+ );
+ // Pre-load all theme CSS files, so that switching feels seamless.
+ //
+ // When loading settings.html as a popover, the equivalent HTML is
+ // generated in main.js.
+ for file in &shared.style_files {
+ if let Ok(theme) = file.basename() {
+ write!(
+ buf,
+ "<link rel=\"preload\" href=\"{root_path}{theme}{suffix}.css\" \
+ as=\"style\">",
+ root_path = page.static_root_path.unwrap_or(""),
+ suffix = page.resource_suffix,
+ );
+ }
+ }
},
&shared.style_files,
);
diff --git a/src/librustdoc/html/render/mod.rs b/src/librustdoc/html/render/mod.rs
index e6a040d02..463184aca 100644
--- a/src/librustdoc/html/render/mod.rs
+++ b/src/librustdoc/html/render/mod.rs
@@ -30,6 +30,7 @@ mod tests;
mod context;
mod print_item;
+mod sidebar;
mod span_map;
mod write_shared;
@@ -37,7 +38,6 @@ pub(crate) use self::context::*;
pub(crate) use self::span_map::{collect_spans_and_sources, LinkFromSrc};
use std::collections::VecDeque;
-use std::default::Default;
use std::fmt::{self, Write};
use std::fs;
use std::iter::Peekable;
@@ -46,14 +46,14 @@ use std::rc::Rc;
use std::str;
use std::string::ToString;
+use askama::Template;
use rustc_ast_pretty::pprust;
use rustc_attr::{ConstStability, Deprecation, StabilityLevel};
+use rustc_data_structures::captures::Captures;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
-use rustc_hir::def::CtorKind;
use rustc_hir::def_id::{DefId, DefIdSet};
use rustc_hir::Mutability;
use rustc_middle::middle::stability;
-use rustc_middle::ty;
use rustc_middle::ty::TyCtxt;
use rustc_span::{
symbol::{sym, Symbol},
@@ -69,7 +69,7 @@ use crate::formats::item_type::ItemType;
use crate::formats::{AssocItemRender, Impl, RenderMode};
use crate::html::escape::Escape;
use crate::html::format::{
- href, join_with_double_colon, print_abi_with_space, print_constness_with_space,
+ display_fn, href, join_with_double_colon, print_abi_with_space, print_constness_with_space,
print_default_space, print_generic_bounds, print_where_clause, visibility_print_with_space,
Buffer, Ending, HrefError, PrintWithSpace,
};
@@ -104,6 +104,7 @@ pub(crate) struct IndexItem {
pub(crate) parent_idx: Option<usize>,
pub(crate) search_type: Option<IndexItemFunctionType>,
pub(crate) aliases: Box<[Symbol]>,
+ pub(crate) deprecation: Option<Deprecation>,
}
/// A type used for the search index.
@@ -407,149 +408,153 @@ fn scrape_examples_help(shared: &SharedContext<'_>) -> String {
)
}
-fn document(
- w: &mut Buffer,
- cx: &mut Context<'_>,
- item: &clean::Item,
- parent: Option<&clean::Item>,
+fn document<'a, 'cx: 'a>(
+ cx: &'a mut Context<'cx>,
+ item: &'a clean::Item,
+ parent: Option<&'a clean::Item>,
heading_offset: HeadingOffset,
-) {
+) -> impl fmt::Display + 'a + Captures<'cx> {
if let Some(ref name) = item.name {
info!("Documenting {}", name);
}
- document_item_info(w, cx, item, parent);
- if parent.is_none() {
- document_full_collapsible(w, item, cx, heading_offset);
- } else {
- document_full(w, item, cx, heading_offset);
- }
+
+ display_fn(move |f| {
+ document_item_info(cx, item, parent).render_into(f).unwrap();
+ if parent.is_none() {
+ write!(f, "{}", document_full_collapsible(item, cx, heading_offset))?;
+ } else {
+ write!(f, "{}", document_full(item, cx, heading_offset))?;
+ }
+ Ok(())
+ })
}
/// Render md_text as markdown.
-fn render_markdown(
- w: &mut Buffer,
- cx: &mut Context<'_>,
- md_text: &str,
+fn render_markdown<'a, 'cx: 'a>(
+ cx: &'a mut Context<'cx>,
+ md_text: &'a str,
links: Vec<RenderedLink>,
heading_offset: HeadingOffset,
-) {
- write!(
- w,
- "<div class=\"docblock\">{}</div>",
- Markdown {
- content: md_text,
- links: &links,
- ids: &mut cx.id_map,
- error_codes: cx.shared.codes,
- edition: cx.shared.edition(),
- playground: &cx.shared.playground,
- heading_offset,
- }
- .into_string()
- )
+) -> impl fmt::Display + 'a + Captures<'cx> {
+ display_fn(move |f| {
+ write!(
+ f,
+ "<div class=\"docblock\">{}</div>",
+ Markdown {
+ content: md_text,
+ links: &links,
+ ids: &mut cx.id_map,
+ error_codes: cx.shared.codes,
+ edition: cx.shared.edition(),
+ playground: &cx.shared.playground,
+ heading_offset,
+ }
+ .into_string()
+ )
+ })
}
/// Writes a documentation block containing only the first paragraph of the documentation. If the
/// docs are longer, a "Read more" link is appended to the end.
-fn document_short(
- w: &mut Buffer,
- item: &clean::Item,
- cx: &mut Context<'_>,
- link: AssocItemLink<'_>,
- parent: &clean::Item,
+fn document_short<'a, 'cx: 'a>(
+ item: &'a clean::Item,
+ cx: &'a mut Context<'cx>,
+ link: AssocItemLink<'a>,
+ parent: &'a clean::Item,
show_def_docs: bool,
-) {
- document_item_info(w, cx, item, Some(parent));
- if !show_def_docs {
- return;
- }
- if let Some(s) = item.doc_value() {
- let (mut summary_html, has_more_content) =
- MarkdownSummaryLine(&s, &item.links(cx)).into_string_with_has_more_content();
+) -> impl fmt::Display + 'a + Captures<'cx> {
+ display_fn(move |f| {
+ document_item_info(cx, item, Some(parent)).render_into(f).unwrap();
+ if !show_def_docs {
+ return Ok(());
+ }
+ if let Some(s) = item.doc_value() {
+ let (mut summary_html, has_more_content) =
+ MarkdownSummaryLine(&s, &item.links(cx)).into_string_with_has_more_content();
- if has_more_content {
- let link = format!(r#" <a{}>Read more</a>"#, assoc_href_attr(item, link, cx));
+ if has_more_content {
+ let link = format!(r#" <a{}>Read more</a>"#, assoc_href_attr(item, link, cx));
- if let Some(idx) = summary_html.rfind("</p>") {
- summary_html.insert_str(idx, &link);
- } else {
- summary_html.push_str(&link);
+ if let Some(idx) = summary_html.rfind("</p>") {
+ summary_html.insert_str(idx, &link);
+ } else {
+ summary_html.push_str(&link);
+ }
}
- }
- write!(w, "<div class='docblock'>{}</div>", summary_html,);
- }
+ write!(f, "<div class='docblock'>{}</div>", summary_html)?;
+ }
+ Ok(())
+ })
}
-fn document_full_collapsible(
- w: &mut Buffer,
- item: &clean::Item,
- cx: &mut Context<'_>,
+fn document_full_collapsible<'a, 'cx: 'a>(
+ item: &'a clean::Item,
+ cx: &'a mut Context<'cx>,
heading_offset: HeadingOffset,
-) {
- document_full_inner(w, item, cx, true, heading_offset);
+) -> impl fmt::Display + 'a + Captures<'cx> {
+ document_full_inner(item, cx, true, heading_offset)
}
-fn document_full(
- w: &mut Buffer,
- item: &clean::Item,
- cx: &mut Context<'_>,
+fn document_full<'a, 'cx: 'a>(
+ item: &'a clean::Item,
+ cx: &'a mut Context<'cx>,
heading_offset: HeadingOffset,
-) {
- document_full_inner(w, item, cx, false, heading_offset);
+) -> impl fmt::Display + 'a + Captures<'cx> {
+ document_full_inner(item, cx, false, heading_offset)
}
-fn document_full_inner(
- w: &mut Buffer,
- item: &clean::Item,
- cx: &mut Context<'_>,
+fn document_full_inner<'a, 'cx: 'a>(
+ item: &'a clean::Item,
+ cx: &'a mut Context<'cx>,
is_collapsible: bool,
heading_offset: HeadingOffset,
-) {
- if let Some(s) = item.collapsed_doc_value() {
- debug!("Doc block: =====\n{}\n=====", s);
- if is_collapsible {
- w.write_str(
- "<details class=\"toggle top-doc\" open>\
- <summary class=\"hideme\">\
- <span>Expand description</span>\
- </summary>",
- );
- render_markdown(w, cx, &s, item.links(cx), heading_offset);
- w.write_str("</details>");
- } else {
- render_markdown(w, cx, &s, item.links(cx), heading_offset);
+) -> impl fmt::Display + 'a + Captures<'cx> {
+ display_fn(move |f| {
+ if let Some(s) = item.collapsed_doc_value() {
+ debug!("Doc block: =====\n{}\n=====", s);
+ if is_collapsible {
+ write!(
+ f,
+ "<details class=\"toggle top-doc\" open>\
+ <summary class=\"hideme\">\
+ <span>Expand description</span>\
+ </summary>{}</details>",
+ render_markdown(cx, &s, item.links(cx), heading_offset)
+ )?;
+ } else {
+ write!(f, "{}", render_markdown(cx, &s, item.links(cx), heading_offset))?;
+ }
}
- }
- let kind = match &*item.kind {
- clean::ItemKind::StrippedItem(box kind) | kind => kind,
- };
+ let kind = match &*item.kind {
+ clean::ItemKind::StrippedItem(box kind) | kind => kind,
+ };
- if let clean::ItemKind::FunctionItem(..) | clean::ItemKind::MethodItem(..) = kind {
- render_call_locations(w, cx, item);
- }
+ if let clean::ItemKind::FunctionItem(..) | clean::ItemKind::MethodItem(..) = kind {
+ render_call_locations(f, cx, item);
+ }
+ Ok(())
+ })
}
+#[derive(Template)]
+#[template(path = "item_info.html")]
+struct ItemInfo {
+ items: Vec<ShortItemInfo>,
+}
/// Add extra information about an item such as:
///
/// * Stability
/// * Deprecated
/// * Required features (through the `doc_cfg` feature)
fn document_item_info(
- w: &mut Buffer,
cx: &mut Context<'_>,
item: &clean::Item,
parent: Option<&clean::Item>,
-) {
- let item_infos = short_item_info(item, cx, parent);
- if !item_infos.is_empty() {
- w.write_str("<span class=\"item-info\">");
- for info in item_infos {
- w.write_str(&info);
- }
- w.write_str("</span>");
- }
+) -> ItemInfo {
+ let items = short_item_info(item, cx, parent);
+ ItemInfo { items }
}
fn portability(item: &clean::Item, parent: Option<&clean::Item>) -> Option<String> {
@@ -567,7 +572,25 @@ fn portability(item: &clean::Item, parent: Option<&clean::Item>) -> Option<Strin
cfg
);
- Some(format!("<div class=\"stab portability\">{}</div>", cfg?.render_long_html()))
+ Some(cfg?.render_long_html())
+}
+
+#[derive(Template)]
+#[template(path = "short_item_info.html")]
+enum ShortItemInfo {
+ /// A message describing the deprecation of this item
+ Deprecation {
+ message: String,
+ },
+ /// The feature corresponding to an unstable item, and optionally
+ /// a tracking issue URL and number.
+ Unstable {
+ feature: String,
+ tracking: Option<(String, u32)>,
+ },
+ Portability {
+ message: String,
+ },
}
/// Render the stability, deprecation and portability information that is displayed at the top of
@@ -576,7 +599,7 @@ fn short_item_info(
item: &clean::Item,
cx: &mut Context<'_>,
parent: Option<&clean::Item>,
-) -> Vec<String> {
+) -> Vec<ShortItemInfo> {
let mut extra_info = vec![];
if let Some(depr @ Deprecation { note, since, is_since_rustc_version: _, suggestion: _ }) =
@@ -602,15 +625,10 @@ fn short_item_info(
if let Some(note) = note {
let note = note.as_str();
let html = MarkdownItemInfo(note, &mut cx.id_map);
- message.push_str(&format!(": {}", html.into_string()));
- }
- extra_info.push(format!(
- "<div class=\"stab deprecated\">\
- <span class=\"emoji\">👎</span>\
- <span>{}</span>\
- </div>",
- message,
- ));
+ message.push_str(": ");
+ message.push_str(&html.into_string());
+ }
+ extra_info.push(ShortItemInfo::Deprecation { message });
}
// Render unstable items. But don't render "rustc_private" crates (internal compiler crates).
@@ -621,26 +639,17 @@ fn short_item_info(
.filter(|stab| stab.feature != sym::rustc_private)
.map(|stab| (stab.level, stab.feature))
{
- let mut message = "<span class=\"emoji\">🔬</span>\
- <span>This is a nightly-only experimental API."
- .to_owned();
-
- let mut feature = format!("<code>{}</code>", Escape(feature.as_str()));
- if let (Some(url), Some(issue)) = (&cx.shared.issue_tracker_base_url, issue) {
- feature.push_str(&format!(
- "&nbsp;<a href=\"{url}{issue}\">#{issue}</a>",
- url = url,
- issue = issue
- ));
- }
-
- message.push_str(&format!(" ({})</span>", feature));
-
- extra_info.push(format!("<div class=\"stab unstable\">{}</div>", message));
+ let tracking = if let (Some(url), Some(issue)) = (&cx.shared.issue_tracker_base_url, issue)
+ {
+ Some((url.clone(), issue.get()))
+ } else {
+ None
+ };
+ extra_info.push(ShortItemInfo::Unstable { feature: feature.to_string(), tracking });
}
- if let Some(portability) = portability(item, parent) {
- extra_info.push(portability);
+ if let Some(message) = portability(item, parent) {
+ extra_info.push(ShortItemInfo::Portability { message });
}
extra_info
@@ -650,7 +659,7 @@ fn short_item_info(
// "Auto Trait Implementations," "Blanket Trait Implementations" (on struct/enum pages).
pub(crate) fn render_impls(
cx: &mut Context<'_>,
- w: &mut Buffer,
+ mut w: impl Write,
impls: &[&Impl],
containing_item: &clean::Item,
toggle_open_by_default: bool,
@@ -662,7 +671,7 @@ pub(crate) fn render_impls(
let did = i.trait_did().unwrap();
let provided_trait_methods = i.inner_impl().provided_trait_methods(tcx);
let assoc_link = AssocItemLink::GotoSource(did.into(), &provided_trait_methods);
- let mut buffer = if w.is_for_html() { Buffer::html() } else { Buffer::new() };
+ let mut buffer = Buffer::new();
render_impl(
&mut buffer,
cx,
@@ -683,7 +692,7 @@ pub(crate) fn render_impls(
})
.collect::<Vec<_>>();
rendered_impls.sort();
- w.write_str(&rendered_impls.join(""));
+ w.write_str(&rendered_impls.join("")).unwrap();
}
/// Build a (possibly empty) `href` attribute (a key-value pair) for the given associated item.
@@ -839,7 +848,7 @@ fn assoc_method(
let (indent, indent_str, end_newline) = if parent == ItemType::Trait {
header_len += 4;
let indent_str = " ";
- render_attributes_in_pre(w, meth, indent_str);
+ write!(w, "{}", render_attributes_in_pre(meth, indent_str));
(4, indent_str, Ending::NoNewline)
} else {
render_attributes_in_code(w, meth);
@@ -1035,10 +1044,16 @@ fn attributes(it: &clean::Item) -> Vec<String> {
// When an attribute is rendered inside a `<pre>` tag, it is formatted using
// a whitespace prefix and newline.
-fn render_attributes_in_pre(w: &mut Buffer, it: &clean::Item, prefix: &str) {
- for a in attributes(it) {
- writeln!(w, "{}{}", prefix, a);
- }
+fn render_attributes_in_pre<'a>(
+ it: &'a clean::Item,
+ prefix: &'a str,
+) -> impl fmt::Display + Captures<'a> {
+ crate::html::format::display_fn(move |f| {
+ for a in attributes(it) {
+ writeln!(f, "{}{}", prefix, a)?;
+ }
+ Ok(())
+ })
}
// When an attribute is rendered inside a <code> tag, it is formatted using
@@ -1064,61 +1079,68 @@ impl<'a> AssocItemLink<'a> {
}
}
-fn write_impl_section_heading(w: &mut Buffer, title: &str, id: &str) {
+fn write_impl_section_heading(mut w: impl fmt::Write, title: &str, id: &str) {
write!(
w,
"<h2 id=\"{id}\" class=\"small-section-header\">\
{title}\
<a href=\"#{id}\" class=\"anchor\">§</a>\
</h2>"
- );
+ )
+ .unwrap();
}
pub(crate) fn render_all_impls(
- w: &mut Buffer,
+ mut w: impl Write,
cx: &mut Context<'_>,
containing_item: &clean::Item,
concrete: &[&Impl],
synthetic: &[&Impl],
blanket_impl: &[&Impl],
) {
- let mut impls = Buffer::empty_from(w);
+ let mut impls = Buffer::html();
render_impls(cx, &mut impls, concrete, containing_item, true);
let impls = impls.into_inner();
if !impls.is_empty() {
- write_impl_section_heading(w, "Trait Implementations", "trait-implementations");
- write!(w, "<div id=\"trait-implementations-list\">{}</div>", impls);
+ write_impl_section_heading(&mut w, "Trait Implementations", "trait-implementations");
+ write!(w, "<div id=\"trait-implementations-list\">{}</div>", impls).unwrap();
}
if !synthetic.is_empty() {
- write_impl_section_heading(w, "Auto Trait Implementations", "synthetic-implementations");
- w.write_str("<div id=\"synthetic-implementations-list\">");
- render_impls(cx, w, synthetic, containing_item, false);
- w.write_str("</div>");
+ write_impl_section_heading(
+ &mut w,
+ "Auto Trait Implementations",
+ "synthetic-implementations",
+ );
+ w.write_str("<div id=\"synthetic-implementations-list\">").unwrap();
+ render_impls(cx, &mut w, synthetic, containing_item, false);
+ w.write_str("</div>").unwrap();
}
if !blanket_impl.is_empty() {
- write_impl_section_heading(w, "Blanket Implementations", "blanket-implementations");
- w.write_str("<div id=\"blanket-implementations-list\">");
- render_impls(cx, w, blanket_impl, containing_item, false);
- w.write_str("</div>");
+ write_impl_section_heading(&mut w, "Blanket Implementations", "blanket-implementations");
+ w.write_str("<div id=\"blanket-implementations-list\">").unwrap();
+ render_impls(cx, &mut w, blanket_impl, containing_item, false);
+ w.write_str("</div>").unwrap();
}
}
-fn render_assoc_items(
- w: &mut Buffer,
- cx: &mut Context<'_>,
- containing_item: &clean::Item,
+fn render_assoc_items<'a, 'cx: 'a>(
+ cx: &'a mut Context<'cx>,
+ containing_item: &'a clean::Item,
it: DefId,
- what: AssocItemRender<'_>,
-) {
+ what: AssocItemRender<'a>,
+) -> impl fmt::Display + 'a + Captures<'cx> {
let mut derefs = DefIdSet::default();
derefs.insert(it);
- render_assoc_items_inner(w, cx, containing_item, it, what, &mut derefs)
+ display_fn(move |f| {
+ render_assoc_items_inner(f, cx, containing_item, it, what, &mut derefs);
+ Ok(())
+ })
}
fn render_assoc_items_inner(
- w: &mut Buffer,
+ mut w: &mut dyn fmt::Write,
cx: &mut Context<'_>,
containing_item: &clean::Item,
it: DefId,
@@ -1131,7 +1153,7 @@ fn render_assoc_items_inner(
let Some(v) = cache.impls.get(&it) else { return };
let (non_trait, traits): (Vec<_>, _) = v.iter().partition(|i| i.inner_impl().trait_.is_none());
if !non_trait.is_empty() {
- let mut tmp_buf = Buffer::empty_from(w);
+ let mut tmp_buf = Buffer::html();
let (render_mode, id) = match what {
AssocItemRender::All => {
write_impl_section_heading(&mut tmp_buf, "Implementations", "implementations");
@@ -1155,7 +1177,7 @@ fn render_assoc_items_inner(
(RenderMode::ForDeref { mut_: deref_mut_ }, cx.derive_id(id))
}
};
- let mut impls_buf = Buffer::empty_from(w);
+ let mut impls_buf = Buffer::html();
for i in &non_trait {
render_impl(
&mut impls_buf,
@@ -1175,10 +1197,10 @@ fn render_assoc_items_inner(
);
}
if !impls_buf.is_empty() {
- w.push_buffer(tmp_buf);
- write!(w, "<div id=\"{}\">", id);
- w.push_buffer(impls_buf);
- w.write_str("</div>");
+ write!(w, "{}", tmp_buf.into_inner()).unwrap();
+ write!(w, "<div id=\"{}\">", id).unwrap();
+ write!(w, "{}", impls_buf.into_inner()).unwrap();
+ w.write_str("</div>").unwrap();
}
}
@@ -1188,7 +1210,7 @@ fn render_assoc_items_inner(
if let Some(impl_) = deref_impl {
let has_deref_mut =
traits.iter().any(|t| t.trait_did() == cx.tcx().lang_items().deref_mut_trait());
- render_deref_methods(w, cx, impl_, containing_item, has_deref_mut, derefs);
+ render_deref_methods(&mut w, cx, impl_, containing_item, has_deref_mut, derefs);
}
// If we were already one level into rendering deref methods, we don't want to render
@@ -1207,7 +1229,7 @@ fn render_assoc_items_inner(
}
fn render_deref_methods(
- w: &mut Buffer,
+ mut w: impl Write,
cx: &mut Context<'_>,
impl_: &Impl,
container_item: &clean::Item,
@@ -1239,10 +1261,10 @@ fn render_deref_methods(
return;
}
}
- render_assoc_items_inner(w, cx, container_item, did, what, derefs);
+ render_assoc_items_inner(&mut w, cx, container_item, did, what, derefs);
} else if let Some(prim) = target.primitive_type() {
if let Some(&did) = cache.primitive_locations.get(&prim) {
- render_assoc_items_inner(w, cx, container_item, did, what, derefs);
+ render_assoc_items_inner(&mut w, cx, container_item, did, what, derefs);
}
}
}
@@ -1291,7 +1313,7 @@ pub(crate) fn notable_traits_button(ty: &clean::Type, cx: &mut Context<'_>) -> O
if let Some(impls) = cx.cache().impls.get(&did) {
for i in impls {
let impl_ = i.inner_impl();
- if !impl_.for_.without_borrowed_ref().is_same(ty.without_borrowed_ref(), cx.cache()) {
+ if !ty.is_doc_subtype_of(&impl_.for_, cx.cache()) {
// Two different types might have the same did,
// without actually being the same.
continue;
@@ -1327,7 +1349,7 @@ fn notable_traits_decl(ty: &clean::Type, cx: &Context<'_>) -> (String, String) {
for i in impls {
let impl_ = i.inner_impl();
- if !impl_.for_.without_borrowed_ref().is_same(ty.without_borrowed_ref(), cx.cache()) {
+ if !ty.is_doc_subtype_of(&impl_.for_, cx.cache()) {
// Two different types might have the same did,
// without actually being the same.
continue;
@@ -1472,37 +1494,45 @@ fn render_impl(
// We need the stability of the item from the trait
// because impls can't have a stability.
if item.doc_value().is_some() {
- document_item_info(&mut info_buffer, cx, it, Some(parent));
- document_full(&mut doc_buffer, item, cx, HeadingOffset::H5);
+ document_item_info(cx, it, Some(parent))
+ .render_into(&mut info_buffer)
+ .unwrap();
+ write!(
+ &mut doc_buffer,
+ "{}",
+ document_full(item, cx, HeadingOffset::H5)
+ );
short_documented = false;
} else {
// In case the item isn't documented,
// provide short documentation from the trait.
- document_short(
+ write!(
&mut doc_buffer,
- it,
- cx,
- link,
- parent,
- rendering_params.show_def_docs,
+ "{}",
+ document_short(
+ it,
+ cx,
+ link,
+ parent,
+ rendering_params.show_def_docs,
+ )
);
}
}
} else {
- document_item_info(&mut info_buffer, cx, item, Some(parent));
+ document_item_info(cx, item, Some(parent))
+ .render_into(&mut info_buffer)
+ .unwrap();
if rendering_params.show_def_docs {
- document_full(&mut doc_buffer, item, cx, HeadingOffset::H5);
+ write!(&mut doc_buffer, "{}", document_full(item, cx, HeadingOffset::H5));
short_documented = false;
}
}
} else {
- document_short(
+ write!(
&mut doc_buffer,
- item,
- cx,
- link,
- parent,
- rendering_params.show_def_docs,
+ "{}",
+ document_short(item, cx, link, parent, rendering_params.show_def_docs,)
);
}
}
@@ -1862,161 +1892,17 @@ pub(crate) fn render_impl_summary(
let is_trait = inner_impl.trait_.is_some();
if is_trait {
if let Some(portability) = portability(&i.impl_item, Some(parent)) {
- write!(w, "<span class=\"item-info\">{}</span>", portability);
+ write!(
+ w,
+ "<span class=\"item-info\"><div class=\"stab portability\">{}</div></span>",
+ portability
+ );
}
}
w.write_str("</section>");
}
-fn print_sidebar(cx: &Context<'_>, it: &clean::Item, buffer: &mut Buffer) {
- if it.is_struct()
- || it.is_trait()
- || it.is_primitive()
- || it.is_union()
- || it.is_enum()
- || it.is_mod()
- || it.is_typedef()
- {
- write!(
- buffer,
- "<h2 class=\"location\"><a href=\"#\">{}{}</a></h2>",
- match *it.kind {
- clean::ModuleItem(..) =>
- if it.is_crate() {
- "Crate "
- } else {
- "Module "
- },
- _ => "",
- },
- it.name.as_ref().unwrap()
- );
- }
-
- buffer.write_str("<div class=\"sidebar-elems\">");
- if it.is_crate() {
- write!(buffer, "<ul class=\"block\">");
- if let Some(ref version) = cx.cache().crate_version {
- write!(buffer, "<li class=\"version\">Version {}</li>", Escape(version));
- }
- write!(buffer, "<li><a id=\"all-types\" href=\"all.html\">All Items</a></li>");
- buffer.write_str("</ul>");
- }
-
- match *it.kind {
- clean::StructItem(ref s) => sidebar_struct(cx, buffer, it, s),
- clean::TraitItem(ref t) => sidebar_trait(cx, buffer, it, t),
- clean::PrimitiveItem(_) => sidebar_primitive(cx, buffer, it),
- clean::UnionItem(ref u) => sidebar_union(cx, buffer, it, u),
- clean::EnumItem(ref e) => sidebar_enum(cx, buffer, it, e),
- clean::TypedefItem(_) => sidebar_typedef(cx, buffer, it),
- clean::ModuleItem(ref m) => sidebar_module(buffer, &m.items),
- clean::ForeignTypeItem => sidebar_foreign_type(cx, buffer, it),
- _ => {}
- }
-
- // The sidebar is designed to display sibling functions, modules and
- // other miscellaneous information. since there are lots of sibling
- // items (and that causes quadratic growth in large modules),
- // we refactor common parts into a shared JavaScript file per module.
- // still, we don't move everything into JS because we want to preserve
- // as much HTML as possible in order to allow non-JS-enabled browsers
- // to navigate the documentation (though slightly inefficiently).
-
- if !it.is_mod() {
- let path: String = cx.current.iter().map(|s| s.as_str()).intersperse("::").collect();
-
- write!(buffer, "<h2><a href=\"index.html\">In {}</a></h2>", path);
- }
-
- // Closes sidebar-elems div.
- buffer.write_str("</div>");
-}
-
-fn get_next_url(used_links: &mut FxHashSet<String>, url: String) -> String {
- if used_links.insert(url.clone()) {
- return url;
- }
- let mut add = 1;
- while !used_links.insert(format!("{}-{}", url, add)) {
- add += 1;
- }
- format!("{}-{}", url, add)
-}
-
-struct SidebarLink {
- name: Symbol,
- url: String,
-}
-
-impl fmt::Display for SidebarLink {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- write!(f, "<a href=\"#{}\">{}</a>", self.url, self.name)
- }
-}
-
-impl PartialEq for SidebarLink {
- fn eq(&self, other: &Self) -> bool {
- self.url == other.url
- }
-}
-
-impl Eq for SidebarLink {}
-
-impl PartialOrd for SidebarLink {
- fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
- Some(self.cmp(other))
- }
-}
-
-impl Ord for SidebarLink {
- fn cmp(&self, other: &Self) -> std::cmp::Ordering {
- self.url.cmp(&other.url)
- }
-}
-
-fn get_methods(
- i: &clean::Impl,
- for_deref: bool,
- used_links: &mut FxHashSet<String>,
- deref_mut: bool,
- tcx: TyCtxt<'_>,
-) -> Vec<SidebarLink> {
- i.items
- .iter()
- .filter_map(|item| match item.name {
- Some(name) if !name.is_empty() && item.is_method() => {
- if !for_deref || should_render_item(item, deref_mut, tcx) {
- Some(SidebarLink {
- name,
- url: get_next_url(used_links, format!("{}.{}", ItemType::Method, name)),
- })
- } else {
- None
- }
- }
- _ => None,
- })
- .collect::<Vec<_>>()
-}
-
-fn get_associated_constants(
- i: &clean::Impl,
- used_links: &mut FxHashSet<String>,
-) -> Vec<SidebarLink> {
- i.items
- .iter()
- .filter_map(|item| match item.name {
- Some(name) if !name.is_empty() && item.is_associated_const() => Some(SidebarLink {
- name,
- url: get_next_url(used_links, format!("{}.{}", ItemType::AssocConst, name)),
- }),
- _ => None,
- })
- .collect::<Vec<_>>()
-}
-
pub(crate) fn small_url_encode(s: String) -> String {
// These characters don't need to be escaped in a URI.
// See https://url.spec.whatwg.org/#query-percent-encode-set
@@ -2082,232 +1968,6 @@ pub(crate) fn small_url_encode(s: String) -> String {
}
}
-pub(crate) fn sidebar_render_assoc_items(
- cx: &Context<'_>,
- out: &mut Buffer,
- id_map: &mut IdMap,
- concrete: Vec<&Impl>,
- synthetic: Vec<&Impl>,
- blanket_impl: Vec<&Impl>,
-) {
- let format_impls = |impls: Vec<&Impl>, id_map: &mut IdMap| {
- let mut links = FxHashSet::default();
-
- let mut ret = impls
- .iter()
- .filter_map(|it| {
- let trait_ = it.inner_impl().trait_.as_ref()?;
- let encoded =
- id_map.derive(get_id_for_impl(&it.inner_impl().for_, Some(trait_), cx));
-
- let i_display = format!("{:#}", trait_.print(cx));
- let out = Escape(&i_display);
- let prefix = match it.inner_impl().polarity {
- ty::ImplPolarity::Positive | ty::ImplPolarity::Reservation => "",
- ty::ImplPolarity::Negative => "!",
- };
- let generated = format!("<a href=\"#{}\">{}{}</a>", encoded, prefix, out);
- if links.insert(generated.clone()) { Some(generated) } else { None }
- })
- .collect::<Vec<String>>();
- ret.sort();
- ret
- };
-
- let concrete_format = format_impls(concrete, id_map);
- let synthetic_format = format_impls(synthetic, id_map);
- let blanket_format = format_impls(blanket_impl, id_map);
-
- if !concrete_format.is_empty() {
- print_sidebar_block(
- out,
- "trait-implementations",
- "Trait Implementations",
- concrete_format.iter(),
- );
- }
-
- if !synthetic_format.is_empty() {
- print_sidebar_block(
- out,
- "synthetic-implementations",
- "Auto Trait Implementations",
- synthetic_format.iter(),
- );
- }
-
- if !blanket_format.is_empty() {
- print_sidebar_block(
- out,
- "blanket-implementations",
- "Blanket Implementations",
- blanket_format.iter(),
- );
- }
-}
-
-fn sidebar_assoc_items(cx: &Context<'_>, out: &mut Buffer, it: &clean::Item) {
- let did = it.item_id.expect_def_id();
- let cache = cx.cache();
-
- if let Some(v) = cache.impls.get(&did) {
- let mut used_links = FxHashSet::default();
- let mut id_map = IdMap::new();
-
- {
- let used_links_bor = &mut used_links;
- let mut assoc_consts = v
- .iter()
- .filter(|i| i.inner_impl().trait_.is_none())
- .flat_map(|i| get_associated_constants(i.inner_impl(), used_links_bor))
- .collect::<Vec<_>>();
- if !assoc_consts.is_empty() {
- // We want links' order to be reproducible so we don't use unstable sort.
- assoc_consts.sort();
-
- print_sidebar_block(
- out,
- "implementations",
- "Associated Constants",
- assoc_consts.iter(),
- );
- }
- let mut methods = v
- .iter()
- .filter(|i| i.inner_impl().trait_.is_none())
- .flat_map(|i| get_methods(i.inner_impl(), false, used_links_bor, false, cx.tcx()))
- .collect::<Vec<_>>();
- if !methods.is_empty() {
- // We want links' order to be reproducible so we don't use unstable sort.
- methods.sort();
-
- print_sidebar_block(out, "implementations", "Methods", methods.iter());
- }
- }
-
- if v.iter().any(|i| i.inner_impl().trait_.is_some()) {
- if let Some(impl_) =
- v.iter().find(|i| i.trait_did() == cx.tcx().lang_items().deref_trait())
- {
- let mut derefs = DefIdSet::default();
- derefs.insert(did);
- sidebar_deref_methods(cx, out, impl_, v, &mut derefs, &mut used_links);
- }
-
- let (synthetic, concrete): (Vec<&Impl>, Vec<&Impl>) =
- v.iter().partition::<Vec<_>, _>(|i| i.inner_impl().kind.is_auto());
- let (blanket_impl, concrete): (Vec<&Impl>, Vec<&Impl>) =
- concrete.into_iter().partition::<Vec<_>, _>(|i| i.inner_impl().kind.is_blanket());
-
- sidebar_render_assoc_items(cx, out, &mut id_map, concrete, synthetic, blanket_impl);
- }
- }
-}
-
-fn sidebar_deref_methods(
- cx: &Context<'_>,
- out: &mut Buffer,
- impl_: &Impl,
- v: &[Impl],
- derefs: &mut DefIdSet,
- used_links: &mut FxHashSet<String>,
-) {
- let c = cx.cache();
-
- debug!("found Deref: {:?}", impl_);
- if let Some((target, real_target)) =
- impl_.inner_impl().items.iter().find_map(|item| match *item.kind {
- clean::AssocTypeItem(box ref t, _) => Some(match *t {
- clean::Typedef { item_type: Some(ref type_), .. } => (type_, &t.type_),
- _ => (&t.type_, &t.type_),
- }),
- _ => None,
- })
- {
- debug!("found target, real_target: {:?} {:?}", target, real_target);
- if let Some(did) = target.def_id(c) &&
- let Some(type_did) = impl_.inner_impl().for_.def_id(c) &&
- // `impl Deref<Target = S> for S`
- (did == type_did || !derefs.insert(did))
- {
- // Avoid infinite cycles
- return;
- }
- let deref_mut = v.iter().any(|i| i.trait_did() == cx.tcx().lang_items().deref_mut_trait());
- let inner_impl = target
- .def_id(c)
- .or_else(|| {
- target.primitive_type().and_then(|prim| c.primitive_locations.get(&prim).cloned())
- })
- .and_then(|did| c.impls.get(&did));
- if let Some(impls) = inner_impl {
- debug!("found inner_impl: {:?}", impls);
- let mut ret = impls
- .iter()
- .filter(|i| i.inner_impl().trait_.is_none())
- .flat_map(|i| get_methods(i.inner_impl(), true, used_links, deref_mut, cx.tcx()))
- .collect::<Vec<_>>();
- if !ret.is_empty() {
- let id = if let Some(target_def_id) = real_target.def_id(c) {
- cx.deref_id_map.get(&target_def_id).expect("Deref section without derived id")
- } else {
- "deref-methods"
- };
- let title = format!(
- "Methods from {}&lt;Target={}&gt;",
- Escape(&format!("{:#}", impl_.inner_impl().trait_.as_ref().unwrap().print(cx))),
- Escape(&format!("{:#}", real_target.print(cx))),
- );
- // We want links' order to be reproducible so we don't use unstable sort.
- ret.sort();
- print_sidebar_block(out, id, &title, ret.iter());
- }
- }
-
- // Recurse into any further impls that might exist for `target`
- if let Some(target_did) = target.def_id(c) &&
- let Some(target_impls) = c.impls.get(&target_did) &&
- let Some(target_deref_impl) = target_impls.iter().find(|i| {
- i.inner_impl()
- .trait_
- .as_ref()
- .map(|t| Some(t.def_id()) == cx.tcx().lang_items().deref_trait())
- .unwrap_or(false)
- })
- {
- sidebar_deref_methods(
- cx,
- out,
- target_deref_impl,
- target_impls,
- derefs,
- used_links,
- );
- }
- }
-}
-
-fn sidebar_struct(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item, s: &clean::Struct) {
- let mut sidebar = Buffer::new();
- let fields = get_struct_fields_name(&s.fields);
-
- if !fields.is_empty() {
- match s.ctor_kind {
- None => {
- print_sidebar_block(&mut sidebar, "fields", "Fields", fields.iter());
- }
- Some(CtorKind::Fn) => print_sidebar_title(&mut sidebar, "fields", "Tuple Fields"),
- Some(CtorKind::Const) => {}
- }
- }
-
- sidebar_assoc_items(cx, &mut sidebar, it);
-
- if !sidebar.is_empty() {
- write!(buf, "<section>{}</section>", sidebar.into_inner());
- }
-}
-
fn get_id_for_impl(for_: &clean::Type, trait_: Option<&clean::Path>, cx: &Context<'_>) -> String {
match trait_ {
Some(t) => small_url_encode(format!("impl-{:#}-for-{:#}", t.print(cx), for_.print(cx))),
@@ -2328,131 +1988,6 @@ fn extract_for_impl_name(item: &clean::Item, cx: &Context<'_>) -> Option<(String
}
}
-fn print_sidebar_title(buf: &mut Buffer, id: &str, title: &str) {
- write!(buf, "<h3><a href=\"#{}\">{}</a></h3>", id, title);
-}
-
-fn print_sidebar_block(
- buf: &mut Buffer,
- id: &str,
- title: &str,
- items: impl Iterator<Item = impl fmt::Display>,
-) {
- print_sidebar_title(buf, id, title);
- buf.push_str("<ul class=\"block\">");
- for item in items {
- write!(buf, "<li>{}</li>", item);
- }
- buf.push_str("</ul>");
-}
-
-fn sidebar_trait(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item, t: &clean::Trait) {
- buf.write_str("<section>");
-
- fn print_sidebar_section(
- out: &mut Buffer,
- items: &[clean::Item],
- id: &str,
- title: &str,
- filter: impl Fn(&clean::Item) -> bool,
- mapper: impl Fn(&str) -> String,
- ) {
- let mut items: Vec<&str> = items
- .iter()
- .filter_map(|m| match m.name {
- Some(ref name) if filter(m) => Some(name.as_str()),
- _ => None,
- })
- .collect::<Vec<_>>();
-
- if !items.is_empty() {
- items.sort_unstable();
- print_sidebar_block(out, id, title, items.into_iter().map(mapper));
- }
- }
-
- print_sidebar_section(
- buf,
- &t.items,
- "required-associated-types",
- "Required Associated Types",
- |m| m.is_ty_associated_type(),
- |sym| format!("<a href=\"#{1}.{0}\">{0}</a>", sym, ItemType::AssocType),
- );
-
- print_sidebar_section(
- buf,
- &t.items,
- "provided-associated-types",
- "Provided Associated Types",
- |m| m.is_associated_type(),
- |sym| format!("<a href=\"#{1}.{0}\">{0}</a>", sym, ItemType::AssocType),
- );
-
- print_sidebar_section(
- buf,
- &t.items,
- "required-associated-consts",
- "Required Associated Constants",
- |m| m.is_ty_associated_const(),
- |sym| format!("<a href=\"#{1}.{0}\">{0}</a>", sym, ItemType::AssocConst),
- );
-
- print_sidebar_section(
- buf,
- &t.items,
- "provided-associated-consts",
- "Provided Associated Constants",
- |m| m.is_associated_const(),
- |sym| format!("<a href=\"#{1}.{0}\">{0}</a>", sym, ItemType::AssocConst),
- );
-
- print_sidebar_section(
- buf,
- &t.items,
- "required-methods",
- "Required Methods",
- |m| m.is_ty_method(),
- |sym| format!("<a href=\"#{1}.{0}\">{0}</a>", sym, ItemType::TyMethod),
- );
-
- print_sidebar_section(
- buf,
- &t.items,
- "provided-methods",
- "Provided Methods",
- |m| m.is_method(),
- |sym| format!("<a href=\"#{1}.{0}\">{0}</a>", sym, ItemType::Method),
- );
-
- if let Some(implementors) = cx.cache().implementors.get(&it.item_id.expect_def_id()) {
- let mut res = implementors
- .iter()
- .filter(|i| !i.is_on_local_type(cx))
- .filter_map(|i| extract_for_impl_name(&i.impl_item, cx))
- .collect::<Vec<_>>();
-
- if !res.is_empty() {
- res.sort();
- print_sidebar_block(
- buf,
- "foreign-impls",
- "Implementations on Foreign Types",
- res.iter().map(|(name, id)| format!("<a href=\"#{}\">{}</a>", id, Escape(name))),
- );
- }
- }
-
- sidebar_assoc_items(cx, buf, it);
-
- print_sidebar_title(buf, "implementors", "Implementors");
- if t.is_auto(cx.tcx()) {
- print_sidebar_title(buf, "synthetic-implementors", "Auto Implementors");
- }
-
- buf.push_str("</section>")
-}
-
/// Returns the list of implementations for the primitive reference type, filtering out any
/// implementations that are on concrete or partially generic types, only keeping implementations
/// of the form `impl<T> Trait for &T`.
@@ -2483,89 +2018,6 @@ pub(crate) fn get_filtered_impls_for_reference<'a>(
(concrete, synthetic, blanket_impl)
}
-fn sidebar_primitive(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item) {
- let mut sidebar = Buffer::new();
-
- if it.name.map(|n| n.as_str() != "reference").unwrap_or(false) {
- sidebar_assoc_items(cx, &mut sidebar, it);
- } else {
- let shared = Rc::clone(&cx.shared);
- let (concrete, synthetic, blanket_impl) = get_filtered_impls_for_reference(&shared, it);
-
- sidebar_render_assoc_items(
- cx,
- &mut sidebar,
- &mut IdMap::new(),
- concrete,
- synthetic,
- blanket_impl,
- );
- }
-
- if !sidebar.is_empty() {
- write!(buf, "<section>{}</section>", sidebar.into_inner());
- }
-}
-
-fn sidebar_typedef(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item) {
- let mut sidebar = Buffer::new();
- sidebar_assoc_items(cx, &mut sidebar, it);
-
- if !sidebar.is_empty() {
- write!(buf, "<section>{}</section>", sidebar.into_inner());
- }
-}
-
-fn get_struct_fields_name(fields: &[clean::Item]) -> Vec<String> {
- let mut fields = fields
- .iter()
- .filter(|f| matches!(*f.kind, clean::StructFieldItem(..)))
- .filter_map(|f| {
- f.name.map(|name| format!("<a href=\"#structfield.{name}\">{name}</a>", name = name))
- })
- .collect::<Vec<_>>();
- fields.sort();
- fields
-}
-
-fn sidebar_union(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item, u: &clean::Union) {
- let mut sidebar = Buffer::new();
- let fields = get_struct_fields_name(&u.fields);
-
- if !fields.is_empty() {
- print_sidebar_block(&mut sidebar, "fields", "Fields", fields.iter());
- }
-
- sidebar_assoc_items(cx, &mut sidebar, it);
-
- if !sidebar.is_empty() {
- write!(buf, "<section>{}</section>", sidebar.into_inner());
- }
-}
-
-fn sidebar_enum(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item, e: &clean::Enum) {
- let mut sidebar = Buffer::new();
-
- let mut variants = e
- .variants()
- .filter_map(|v| {
- v.name
- .as_ref()
- .map(|name| format!("<a href=\"#variant.{name}\">{name}</a>", name = name))
- })
- .collect::<Vec<_>>();
- if !variants.is_empty() {
- variants.sort_unstable();
- print_sidebar_block(&mut sidebar, "variants", "Variants", variants.iter());
- }
-
- sidebar_assoc_items(cx, &mut sidebar, it);
-
- if !sidebar.is_empty() {
- write!(buf, "<section>{}</section>", sidebar.into_inner());
- }
-}
-
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub(crate) enum ItemSection {
Reexports,
@@ -2719,54 +2171,6 @@ fn item_ty_to_section(ty: ItemType) -> ItemSection {
}
}
-pub(crate) fn sidebar_module_like(buf: &mut Buffer, item_sections_in_use: FxHashSet<ItemSection>) {
- use std::fmt::Write as _;
-
- let mut sidebar = String::new();
-
- for &sec in ItemSection::ALL.iter().filter(|sec| item_sections_in_use.contains(sec)) {
- let _ = write!(sidebar, "<li><a href=\"#{}\">{}</a></li>", sec.id(), sec.name());
- }
-
- if !sidebar.is_empty() {
- write!(
- buf,
- "<section>\
- <ul class=\"block\">{}</ul>\
- </section>",
- sidebar
- );
- }
-}
-
-fn sidebar_module(buf: &mut Buffer, items: &[clean::Item]) {
- let item_sections_in_use: FxHashSet<_> = items
- .iter()
- .filter(|it| {
- !it.is_stripped()
- && it
- .name
- .or_else(|| {
- if let clean::ImportItem(ref i) = *it.kind &&
- let clean::ImportKind::Simple(s) = i.kind { Some(s) } else { None }
- })
- .is_some()
- })
- .map(|it| item_ty_to_section(it.type_()))
- .collect();
-
- sidebar_module_like(buf, item_sections_in_use);
-}
-
-fn sidebar_foreign_type(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item) {
- let mut sidebar = Buffer::new();
- sidebar_assoc_items(cx, &mut sidebar, it);
-
- if !sidebar.is_empty() {
- write!(buf, "<section>{}</section>", sidebar.into_inner());
- }
-}
-
/// Returns a list of all paths used in the type.
/// This is used to help deduplicate imported impls
/// for reexported types. If any of the contained
@@ -2825,7 +2229,7 @@ const MAX_FULL_EXAMPLES: usize = 5;
const NUM_VISIBLE_LINES: usize = 10;
/// Generates the HTML for example call locations generated via the --scrape-examples flag.
-fn render_call_locations(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item) {
+fn render_call_locations<W: fmt::Write>(mut w: W, cx: &mut Context<'_>, item: &clean::Item) {
let tcx = cx.tcx();
let def_id = item.item_id.expect_def_id();
let key = tcx.def_path_hash(def_id);
@@ -2834,7 +2238,7 @@ fn render_call_locations(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Ite
// Generate a unique ID so users can link to this section for a given method
let id = cx.id_map.derive("scraped-examples");
write!(
- w,
+ &mut w,
"<div class=\"docblock scraped-example-list\">\
<span></span>\
<h5 id=\"{id}\">\
@@ -2843,7 +2247,8 @@ fn render_call_locations(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Ite
</h5>",
root_path = cx.root_path(),
id = id
- );
+ )
+ .unwrap();
// Create a URL to a particular location in a reverse-dependency's source file
let link_to_loc = |call_data: &CallData, loc: &CallLocation| -> (String, String) {
@@ -2861,7 +2266,7 @@ fn render_call_locations(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Ite
};
// Generate the HTML for a single example, being the title and code block
- let write_example = |w: &mut Buffer, (path, call_data): (&PathBuf, &CallData)| -> bool {
+ let write_example = |mut w: &mut W, (path, call_data): (&PathBuf, &CallData)| -> bool {
let contents = match fs::read_to_string(&path) {
Ok(contents) => contents,
Err(err) => {
@@ -2909,7 +2314,7 @@ fn render_call_locations(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Ite
let locations_encoded = serde_json::to_string(&line_ranges).unwrap();
write!(
- w,
+ &mut w,
"<div class=\"scraped-example {expanded_cls}\" data-locs=\"{locations}\">\
<div class=\"scraped-example-title\">\
{name} (<a href=\"{url}\">{title}</a>)\
@@ -2922,10 +2327,12 @@ fn render_call_locations(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Ite
// The locations are encoded as a data attribute, so they can be read
// later by the JS for interactions.
locations = Escape(&locations_encoded)
- );
+ )
+ .unwrap();
if line_ranges.len() > 1 {
- write!(w, r#"<button class="prev">&pr;</button> <button class="next">&sc;</button>"#);
+ write!(w, r#"<button class="prev">&pr;</button> <button class="next">&sc;</button>"#)
+ .unwrap();
}
// Look for the example file in the source map if it exists, otherwise return a dummy span
@@ -2952,7 +2359,7 @@ fn render_call_locations(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Ite
decoration_info.insert("highlight", byte_ranges);
sources::print_src(
- w,
+ &mut w,
contents_subset,
file_span,
cx,
@@ -2960,7 +2367,7 @@ fn render_call_locations(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Ite
highlight::DecorationInfo(decoration_info),
sources::SourceContext::Embedded { offset: line_min, needs_expansion },
);
- write!(w, "</div></div>");
+ write!(w, "</div></div>").unwrap();
true
};
@@ -2994,7 +2401,7 @@ fn render_call_locations(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Ite
// An example may fail to write if its source can't be read for some reason, so this method
// continues iterating until a write succeeds
- let write_and_skip_failure = |w: &mut Buffer, it: &mut Peekable<_>| {
+ let write_and_skip_failure = |w: &mut W, it: &mut Peekable<_>| {
while let Some(example) = it.next() {
if write_example(&mut *w, example) {
break;
@@ -3003,7 +2410,7 @@ fn render_call_locations(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Ite
};
// Write just one example that's visible by default in the method's description.
- write_and_skip_failure(w, &mut it);
+ write_and_skip_failure(&mut w, &mut it);
// Then add the remaining examples in a hidden section.
if it.peek().is_some() {
@@ -3016,17 +2423,19 @@ fn render_call_locations(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Ite
<div class=\"hide-more\">Hide additional examples</div>\
<div class=\"more-scraped-examples\">\
<div class=\"toggle-line\"><div class=\"toggle-line-inner\"></div></div>"
- );
+ )
+ .unwrap();
// Only generate inline code for MAX_FULL_EXAMPLES number of examples. Otherwise we could
// make the page arbitrarily huge!
for _ in 0..MAX_FULL_EXAMPLES {
- write_and_skip_failure(w, &mut it);
+ write_and_skip_failure(&mut w, &mut it);
}
// For the remaining examples, generate a <ul> containing links to the source files.
if it.peek().is_some() {
- write!(w, r#"<div class="example-links">Additional examples can be found in:<br><ul>"#);
+ write!(w, r#"<div class="example-links">Additional examples can be found in:<br><ul>"#)
+ .unwrap();
it.for_each(|(_, call_data)| {
let (url, _) = link_to_loc(call_data, &call_data.locations[0]);
write!(
@@ -3034,13 +2443,14 @@ fn render_call_locations(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Ite
r#"<li><a href="{url}">{name}</a></li>"#,
url = url,
name = call_data.display_name
- );
+ )
+ .unwrap();
});
- write!(w, "</ul></div>");
+ write!(w, "</ul></div>").unwrap();
}
- write!(w, "</div></details>");
+ write!(w, "</div></details>").unwrap();
}
- write!(w, "</div>");
+ write!(w, "</div>").unwrap();
}
diff --git a/src/librustdoc/html/render/print_item.rs b/src/librustdoc/html/render/print_item.rs
index 2869a3961..9a968e48b 100644
--- a/src/librustdoc/html/render/print_item.rs
+++ b/src/librustdoc/html/render/print_item.rs
@@ -1,5 +1,6 @@
use clean::AttributesExt;
+use rustc_data_structures::captures::Captures;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_hir as hir;
use rustc_hir::def::CtorKind;
@@ -28,8 +29,8 @@ use crate::formats::item_type::ItemType;
use crate::formats::{AssocItemRender, Impl, RenderMode};
use crate::html::escape::Escape;
use crate::html::format::{
- join_with_double_colon, print_abi_with_space, print_constness_with_space, print_where_clause,
- visibility_print_with_space, Buffer, Ending, PrintWithSpace,
+ display_fn, join_with_double_colon, print_abi_with_space, print_constness_with_space,
+ print_where_clause, visibility_print_with_space, Buffer, Ending, PrintWithSpace,
};
use crate::html::layout::Page;
use crate::html::markdown::{HeadingOffset, MarkdownSummaryLine};
@@ -201,7 +202,7 @@ fn should_hide_fields(n_fields: usize) -> bool {
n_fields > 12
}
-fn toggle_open(w: &mut Buffer, text: impl fmt::Display) {
+fn toggle_open(mut w: impl fmt::Write, text: impl fmt::Display) {
write!(
w,
"<details class=\"toggle type-contents-toggle\">\
@@ -209,15 +210,16 @@ fn toggle_open(w: &mut Buffer, text: impl fmt::Display) {
<span>Show {}</span>\
</summary>",
text
- );
+ )
+ .unwrap();
}
-fn toggle_close(w: &mut Buffer) {
- w.write_str("</details>");
+fn toggle_close(mut w: impl fmt::Write) {
+ w.write_str("</details>").unwrap();
}
fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items: &[clean::Item]) {
- document(w, cx, item, None, HeadingOffset::H2);
+ write!(w, "{}", document(cx, item, None, HeadingOffset::H2));
let mut indices = (0..items.len()).filter(|i| !items[*i].is_stripped()).collect::<Vec<usize>>();
@@ -367,7 +369,7 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
..myitem.clone()
};
- let stab_tags = Some(extra_info_tags(&import_item, item, cx.tcx()));
+ let stab_tags = Some(extra_info_tags(&import_item, item, cx.tcx()).to_string());
stab_tags
} else {
None
@@ -461,41 +463,62 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
/// Render the stability, deprecation and portability tags that are displayed in the item's summary
/// at the module level.
-fn extra_info_tags(item: &clean::Item, parent: &clean::Item, tcx: TyCtxt<'_>) -> String {
- let mut tags = String::new();
-
- fn tag_html(class: &str, title: &str, contents: &str) -> String {
- format!(r#"<span class="stab {}" title="{}">{}</span>"#, class, Escape(title), contents)
- }
-
- // The trailing space after each tag is to space it properly against the rest of the docs.
- if let Some(depr) = &item.deprecation(tcx) {
- let mut message = "Deprecated";
- if !stability::deprecation_in_effect(depr) {
- message = "Deprecation planned";
+fn extra_info_tags<'a, 'tcx: 'a>(
+ item: &'a clean::Item,
+ parent: &'a clean::Item,
+ tcx: TyCtxt<'tcx>,
+) -> impl fmt::Display + 'a + Captures<'tcx> {
+ display_fn(move |f| {
+ fn tag_html<'a>(
+ class: &'a str,
+ title: &'a str,
+ contents: &'a str,
+ ) -> impl fmt::Display + 'a {
+ display_fn(move |f| {
+ write!(
+ f,
+ r#"<span class="stab {}" title="{}">{}</span>"#,
+ class,
+ Escape(title),
+ contents
+ )
+ })
}
- tags += &tag_html("deprecated", "", message);
- }
- // The "rustc_private" crates are permanently unstable so it makes no sense
- // to render "unstable" everywhere.
- if item.stability(tcx).as_ref().map(|s| s.is_unstable() && s.feature != sym::rustc_private)
- == Some(true)
- {
- tags += &tag_html("unstable", "", "Experimental");
- }
+ // The trailing space after each tag is to space it properly against the rest of the docs.
+ if let Some(depr) = &item.deprecation(tcx) {
+ let message = if stability::deprecation_in_effect(depr) {
+ "Deprecated"
+ } else {
+ "Deprecation planned"
+ };
+ write!(f, "{}", tag_html("deprecated", "", message))?;
+ }
- let cfg = match (&item.cfg, parent.cfg.as_ref()) {
- (Some(cfg), Some(parent_cfg)) => cfg.simplify_with(parent_cfg),
- (cfg, _) => cfg.as_deref().cloned(),
- };
+ // The "rustc_private" crates are permanently unstable so it makes no sense
+ // to render "unstable" everywhere.
+ if item.stability(tcx).as_ref().map(|s| s.is_unstable() && s.feature != sym::rustc_private)
+ == Some(true)
+ {
+ write!(f, "{}", tag_html("unstable", "", "Experimental"))?;
+ }
- debug!("Portability name={:?} {:?} - {:?} = {:?}", item.name, item.cfg, parent.cfg, cfg);
- if let Some(ref cfg) = cfg {
- tags += &tag_html("portability", &cfg.render_long_plain(), &cfg.render_short_html());
- }
+ let cfg = match (&item.cfg, parent.cfg.as_ref()) {
+ (Some(cfg), Some(parent_cfg)) => cfg.simplify_with(parent_cfg),
+ (cfg, _) => cfg.as_deref().cloned(),
+ };
- tags
+ debug!("Portability name={:?} {:?} - {:?} = {:?}", item.name, item.cfg, parent.cfg, cfg);
+ if let Some(ref cfg) = cfg {
+ write!(
+ f,
+ "{}",
+ tag_html("portability", &cfg.render_long_plain(), &cfg.render_short_html())
+ )
+ } else {
+ Ok(())
+ }
+ })
}
fn item_function(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, f: &clean::Function) {
@@ -522,12 +545,12 @@ fn item_function(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, f: &cle
f.decl.output.as_return().and_then(|output| notable_traits_button(output, cx));
wrap_item(w, |w| {
- render_attributes_in_pre(w, it, "");
w.reserve(header_len);
write!(
w,
- "{vis}{constness}{asyncness}{unsafety}{abi}fn \
+ "{attrs}{vis}{constness}{asyncness}{unsafety}{abi}fn \
{name}{generics}{decl}{notable_traits}{where_clause}",
+ attrs = render_attributes_in_pre(it, ""),
vis = visibility,
constness = constness,
asyncness = asyncness,
@@ -540,7 +563,7 @@ fn item_function(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, f: &cle
notable_traits = notable_traits.unwrap_or_default(),
);
});
- document(w, cx, it, None, HeadingOffset::H2);
+ write!(w, "{}", document(cx, it, None, HeadingOffset::H2));
}
fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean::Trait) {
@@ -558,17 +581,17 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
let must_implement_one_of_functions = tcx.trait_def(t.def_id).must_implement_one_of.clone();
// Output the trait definition
- wrap_item(w, |w| {
- render_attributes_in_pre(w, it, "");
+ wrap_item(w, |mut w| {
write!(
w,
- "{}{}{}trait {}{}{}",
+ "{attrs}{}{}{}trait {}{}{}",
visibility_print_with_space(it.visibility(tcx), it.item_id, cx),
t.unsafety(tcx).print_with_space(),
if t.is_auto(tcx) { "auto " } else { "" },
it.name.unwrap(),
t.generics.print(cx),
- bounds
+ bounds,
+ attrs = render_attributes_in_pre(it, ""),
);
if !t.generics.where_predicates.is_empty() {
@@ -588,7 +611,7 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
if should_hide_fields(count_types) {
toggle = true;
toggle_open(
- w,
+ &mut w,
format_args!("{} associated items", count_types + count_consts + count_methods),
);
}
@@ -612,7 +635,7 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
if !toggle && should_hide_fields(count_types + count_consts) {
toggle = true;
toggle_open(
- w,
+ &mut w,
format_args!(
"{} associated constant{} and {} method{}",
count_consts,
@@ -640,7 +663,7 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
}
if !toggle && should_hide_fields(count_methods) {
toggle = true;
- toggle_open(w, format_args!("{} methods", count_methods));
+ toggle_open(&mut w, format_args!("{} methods", count_methods));
}
if count_consts != 0 && count_methods != 0 {
w.write_str("\n");
@@ -688,14 +711,14 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
}
}
if toggle {
- toggle_close(w);
+ toggle_close(&mut w);
}
w.write_str("}");
}
});
// Trait documentation
- document(w, cx, it, None, HeadingOffset::H2);
+ write!(w, "{}", document(cx, it, None, HeadingOffset::H2));
fn write_small_section_header(w: &mut Buffer, id: &str, title: &str, extra_content: &str) {
write!(
@@ -713,7 +736,7 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
let item_type = m.type_();
let id = cx.derive_id(format!("{}.{}", item_type, name));
let mut content = Buffer::empty_from(w);
- document(&mut content, cx, m, Some(t), HeadingOffset::H5);
+ write!(&mut content, "{}", document(cx, m, Some(t), HeadingOffset::H5));
let toggled = !content.is_empty();
if toggled {
let method_toggle_class = if item_type.is_method() { " method-toggle" } else { "" };
@@ -825,7 +848,7 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
}
// If there are methods directly on this trait object, render them here.
- render_assoc_items(w, cx, it, it.item_id.expect_def_id(), AssocItemRender::All);
+ write!(w, "{}", render_assoc_items(cx, it, it.item_id.expect_def_id(), AssocItemRender::All));
let cloned_shared = Rc::clone(&cx.shared);
let cache = &cloned_shared.cache;
@@ -858,8 +881,8 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
let (mut synthetic, mut concrete): (Vec<&&Impl>, Vec<&&Impl>) =
local.iter().partition(|i| i.inner_impl().kind.is_auto());
- synthetic.sort_by(|a, b| compare_impl(a, b, cx));
- concrete.sort_by(|a, b| compare_impl(a, b, cx));
+ synthetic.sort_by_cached_key(|i| ImplString::new(i, cx));
+ concrete.sort_by_cached_key(|i| ImplString::new(i, cx));
if !foreign.is_empty() {
write_small_section_header(w, "foreign-impls", "Implementations on Foreign Types", "");
@@ -1035,147 +1058,201 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
fn item_trait_alias(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean::TraitAlias) {
wrap_item(w, |w| {
- render_attributes_in_pre(w, it, "");
write!(
w,
- "trait {}{}{} = {};",
+ "{attrs}trait {}{}{} = {};",
it.name.unwrap(),
t.generics.print(cx),
print_where_clause(&t.generics, cx, 0, Ending::Newline),
- bounds(&t.bounds, true, cx)
+ bounds(&t.bounds, true, cx),
+ attrs = render_attributes_in_pre(it, ""),
);
});
- document(w, cx, it, None, HeadingOffset::H2);
+ write!(w, "{}", document(cx, it, None, HeadingOffset::H2));
// Render any items associated directly to this alias, as otherwise they
// won't be visible anywhere in the docs. It would be nice to also show
// associated items from the aliased type (see discussion in #32077), but
// we need #14072 to make sense of the generics.
- render_assoc_items(w, cx, it, it.item_id.expect_def_id(), AssocItemRender::All)
+ write!(w, "{}", render_assoc_items(cx, it, it.item_id.expect_def_id(), AssocItemRender::All))
}
fn item_opaque_ty(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean::OpaqueTy) {
wrap_item(w, |w| {
- render_attributes_in_pre(w, it, "");
write!(
w,
- "type {}{}{where_clause} = impl {bounds};",
+ "{attrs}type {}{}{where_clause} = impl {bounds};",
it.name.unwrap(),
t.generics.print(cx),
where_clause = print_where_clause(&t.generics, cx, 0, Ending::Newline),
bounds = bounds(&t.bounds, false, cx),
+ attrs = render_attributes_in_pre(it, ""),
);
});
- document(w, cx, it, None, HeadingOffset::H2);
+ write!(w, "{}", document(cx, it, None, HeadingOffset::H2));
// Render any items associated directly to this alias, as otherwise they
// won't be visible anywhere in the docs. It would be nice to also show
// associated items from the aliased type (see discussion in #32077), but
// we need #14072 to make sense of the generics.
- render_assoc_items(w, cx, it, it.item_id.expect_def_id(), AssocItemRender::All)
+ write!(w, "{}", render_assoc_items(cx, it, it.item_id.expect_def_id(), AssocItemRender::All))
}
fn item_typedef(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean::Typedef) {
fn write_content(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, t: &clean::Typedef) {
wrap_item(w, |w| {
- render_attributes_in_pre(w, it, "");
write!(
w,
- "{}type {}{}{where_clause} = {type_};",
+ "{attrs}{}type {}{}{where_clause} = {type_};",
visibility_print_with_space(it.visibility(cx.tcx()), it.item_id, cx),
it.name.unwrap(),
t.generics.print(cx),
where_clause = print_where_clause(&t.generics, cx, 0, Ending::Newline),
type_ = t.type_.print(cx),
+ attrs = render_attributes_in_pre(it, ""),
);
});
}
write_content(w, cx, it, t);
- document(w, cx, it, None, HeadingOffset::H2);
+ write!(w, "{}", document(cx, it, None, HeadingOffset::H2));
let def_id = it.item_id.expect_def_id();
// Render any items associated directly to this alias, as otherwise they
// won't be visible anywhere in the docs. It would be nice to also show
// associated items from the aliased type (see discussion in #32077), but
// we need #14072 to make sense of the generics.
- render_assoc_items(w, cx, it, def_id, AssocItemRender::All);
- document_type_layout(w, cx, def_id);
+ write!(w, "{}", render_assoc_items(cx, it, def_id, AssocItemRender::All));
+ write!(w, "{}", document_type_layout(cx, def_id));
}
fn item_union(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Union) {
- wrap_item(w, |w| {
- render_attributes_in_pre(w, it, "");
- render_union(w, it, Some(&s.generics), &s.fields, cx);
- });
+ #[derive(Template)]
+ #[template(path = "item_union.html")]
+ struct ItemUnion<'a, 'cx> {
+ cx: std::cell::RefCell<&'a mut Context<'cx>>,
+ it: &'a clean::Item,
+ s: &'a clean::Union,
+ }
- document(w, cx, it, None, HeadingOffset::H2);
+ impl<'a, 'cx: 'a> ItemUnion<'a, 'cx> {
+ fn render_assoc_items<'b>(
+ &'b self,
+ ) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
+ display_fn(move |f| {
+ let def_id = self.it.item_id.expect_def_id();
+ let mut cx = self.cx.borrow_mut();
+ let v = render_assoc_items(*cx, self.it, def_id, AssocItemRender::All);
+ write!(f, "{v}")
+ })
+ }
+ fn document_type_layout<'b>(
+ &'b self,
+ ) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
+ display_fn(move |f| {
+ let def_id = self.it.item_id.expect_def_id();
+ let cx = self.cx.borrow_mut();
+ let v = document_type_layout(*cx, def_id);
+ write!(f, "{v}")
+ })
+ }
+ fn render_union<'b>(&'b self) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
+ display_fn(move |f| {
+ let cx = self.cx.borrow_mut();
+ let v = render_union(self.it, Some(&self.s.generics), &self.s.fields, *cx);
+ write!(f, "{v}")
+ })
+ }
+ fn render_attributes_in_pre<'b>(
+ &'b self,
+ ) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
+ display_fn(move |f| {
+ let v = render_attributes_in_pre(self.it, "");
+ write!(f, "{v}")
+ })
+ }
+ fn document<'b>(&'b self) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
+ display_fn(move |f| {
+ let mut cx = self.cx.borrow_mut();
+ let v = document(*cx, self.it, None, HeadingOffset::H2);
+ write!(f, "{v}")
+ })
+ }
+ fn document_field<'b>(
+ &'b self,
+ field: &'a clean::Item,
+ ) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
+ display_fn(move |f| {
+ let mut cx = self.cx.borrow_mut();
+ let v = document(*cx, field, Some(self.it), HeadingOffset::H3);
+ write!(f, "{v}")
+ })
+ }
+ fn stability_field(&self, field: &clean::Item) -> Option<String> {
+ let cx = self.cx.borrow();
+ field.stability_class(cx.tcx())
+ }
+ fn print_ty<'b>(
+ &'b self,
+ ty: &'a clean::Type,
+ ) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
+ display_fn(move |f| {
+ let cx = self.cx.borrow();
+ let v = ty.print(*cx);
+ write!(f, "{v}")
+ })
+ }
- let mut fields = s
- .fields
- .iter()
- .filter_map(|f| match *f.kind {
- clean::StructFieldItem(ref ty) => Some((f, ty)),
- _ => None,
- })
- .peekable();
- if fields.peek().is_some() {
- write!(
- w,
- "<h2 id=\"fields\" class=\"fields small-section-header\">\
- Fields<a href=\"#fields\" class=\"anchor\">§</a>\
- </h2>"
- );
- for (field, ty) in fields {
- let name = field.name.expect("union field name");
- let id = format!("{}.{}", ItemType::StructField, name);
- write!(
- w,
- "<span id=\"{id}\" class=\"{shortty} small-section-header\">\
- <a href=\"#{id}\" class=\"anchor field\">§</a>\
- <code>{name}: {ty}</code>\
- </span>",
- shortty = ItemType::StructField,
- ty = ty.print(cx),
- );
- if let Some(stability_class) = field.stability_class(cx.tcx()) {
- write!(w, "<span class=\"stab {stability_class}\"></span>");
- }
- document(w, cx, field, Some(it), HeadingOffset::H3);
+ fn fields_iter(
+ &self,
+ ) -> std::iter::Peekable<impl Iterator<Item = (&'a clean::Item, &'a clean::Type)>> {
+ self.s
+ .fields
+ .iter()
+ .filter_map(|f| match *f.kind {
+ clean::StructFieldItem(ref ty) => Some((f, ty)),
+ _ => None,
+ })
+ .peekable()
}
}
- let def_id = it.item_id.expect_def_id();
- render_assoc_items(w, cx, it, def_id, AssocItemRender::All);
- document_type_layout(w, cx, def_id);
+
+ ItemUnion { cx: std::cell::RefCell::new(cx), it, s }.render_into(w).unwrap();
}
-fn print_tuple_struct_fields(w: &mut Buffer, cx: &Context<'_>, s: &[clean::Item]) {
- for (i, ty) in s.iter().enumerate() {
- if i > 0 {
- w.write_str(", ");
- }
- match *ty.kind {
- clean::StrippedItem(box clean::StructFieldItem(_)) => w.write_str("_"),
- clean::StructFieldItem(ref ty) => write!(w, "{}", ty.print(cx)),
- _ => unreachable!(),
+fn print_tuple_struct_fields<'a, 'cx: 'a>(
+ cx: &'a Context<'cx>,
+ s: &'a [clean::Item],
+) -> impl fmt::Display + 'a + Captures<'cx> {
+ display_fn(|f| {
+ for (i, ty) in s.iter().enumerate() {
+ if i > 0 {
+ f.write_str(", ")?;
+ }
+ match *ty.kind {
+ clean::StrippedItem(box clean::StructFieldItem(_)) => f.write_str("_")?,
+ clean::StructFieldItem(ref ty) => write!(f, "{}", ty.print(cx))?,
+ _ => unreachable!(),
+ }
}
- }
+ Ok(())
+ })
}
fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean::Enum) {
let tcx = cx.tcx();
let count_variants = e.variants().count();
- wrap_item(w, |w| {
- render_attributes_in_pre(w, it, "");
+ wrap_item(w, |mut w| {
write!(
w,
- "{}enum {}{}",
+ "{attrs}{}enum {}{}",
visibility_print_with_space(it.visibility(tcx), it.item_id, cx),
it.name.unwrap(),
e.generics.print(cx),
+ attrs = render_attributes_in_pre(it, ""),
);
if !print_where_clause_and_check(w, &e.generics, cx) {
// If there wasn't a `where` clause, we add a whitespace.
@@ -1189,7 +1266,7 @@ fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean::
w.write_str("{\n");
let toggle = should_hide_fields(count_variants);
if toggle {
- toggle_open(w, format_args!("{} variants", count_variants));
+ toggle_open(&mut w, format_args!("{} variants", count_variants));
}
for v in e.variants() {
w.write_str(" ");
@@ -1199,9 +1276,7 @@ fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean::
clean::VariantItem(ref var) => match var.kind {
clean::VariantKind::CLike => write!(w, "{}", name),
clean::VariantKind::Tuple(ref s) => {
- write!(w, "{}(", name);
- print_tuple_struct_fields(w, cx, s);
- w.write_str(")");
+ write!(w, "{name}({})", print_tuple_struct_fields(cx, s),);
}
clean::VariantKind::Struct(ref s) => {
render_struct(w, v, None, None, &s.fields, " ", false, cx);
@@ -1212,28 +1287,29 @@ fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean::
w.write_str(",\n");
}
- if variants_stripped {
+ if variants_stripped && !it.is_non_exhaustive() {
w.write_str(" // some variants omitted\n");
}
if toggle {
- toggle_close(w);
+ toggle_close(&mut w);
}
w.write_str("}");
}
});
- document(w, cx, it, None, HeadingOffset::H2);
+ write!(w, "{}", document(cx, it, None, HeadingOffset::H2));
if count_variants != 0 {
write!(
w,
"<h2 id=\"variants\" class=\"variants small-section-header\">\
Variants{}<a href=\"#variants\" class=\"anchor\">§</a>\
- </h2>",
- document_non_exhaustive_header(it)
+ </h2>\
+ {}\
+ <div class=\"variants\">",
+ document_non_exhaustive_header(it),
+ document_non_exhaustive(it)
);
- document_non_exhaustive(w, it);
- write!(w, "<div class=\"variants\">");
for variant in e.variants() {
let id = cx.derive_id(format!("{}.{}", ItemType::Variant, variant.name.unwrap()));
write!(
@@ -1254,9 +1330,7 @@ fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean::
let clean::VariantItem(variant_data) = &*variant.kind else { unreachable!() };
if let clean::VariantKind::Tuple(ref s) = variant_data.kind {
- w.write_str("(");
- print_tuple_struct_fields(w, cx, s);
- w.write_str(")");
+ write!(w, "({})", print_tuple_struct_fields(cx, s),);
}
w.write_str("</h3></section>");
@@ -1280,9 +1354,10 @@ fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean::
write!(
w,
"<div class=\"sub-variant\" id=\"{variant_id}\">\
- <h4>{heading}</h4>",
+ <h4>{heading}</h4>\
+ {}",
+ document_non_exhaustive(variant)
);
- document_non_exhaustive(w, variant);
for field in fields {
match *field.kind {
clean::StrippedItem(box clean::StructFieldItem(_)) => {}
@@ -1300,10 +1375,13 @@ fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean::
<code>{f}: {t}</code>\
</span>",
f = field.name.unwrap(),
- t = ty.print(cx)
+ t = ty.print(cx),
+ );
+ write!(
+ w,
+ "{}</div>",
+ document(cx, field, Some(variant), HeadingOffset::H5)
);
- document(w, cx, field, Some(variant), HeadingOffset::H5);
- write!(w, "</div>");
}
_ => unreachable!(),
}
@@ -1311,18 +1389,18 @@ fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean::
w.write_str("</div>");
}
- document(w, cx, variant, Some(it), HeadingOffset::H4);
+ write!(w, "{}", document(cx, variant, Some(it), HeadingOffset::H4));
}
write!(w, "</div>");
}
let def_id = it.item_id.expect_def_id();
- render_assoc_items(w, cx, it, def_id, AssocItemRender::All);
- document_type_layout(w, cx, def_id);
+ write!(w, "{}", render_assoc_items(cx, it, def_id, AssocItemRender::All));
+ write!(w, "{}", document_type_layout(cx, def_id));
}
fn item_macro(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean::Macro) {
highlight::render_item_decl_with_highlighting(&t.source, w);
- document(w, cx, it, None, HeadingOffset::H2)
+ write!(w, "{}", document(cx, it, None, HeadingOffset::H2))
}
fn item_proc_macro(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, m: &clean::ProcMacro) {
@@ -1348,14 +1426,14 @@ fn item_proc_macro(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, m: &c
}
}
});
- document(w, cx, it, None, HeadingOffset::H2)
+ write!(w, "{}", document(cx, it, None, HeadingOffset::H2))
}
fn item_primitive(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item) {
let def_id = it.item_id.expect_def_id();
- document(w, cx, it, None, HeadingOffset::H2);
+ write!(w, "{}", document(cx, it, None, HeadingOffset::H2));
if it.name.map(|n| n.as_str() != "reference").unwrap_or(false) {
- render_assoc_items(w, cx, it, def_id, AssocItemRender::All);
+ write!(w, "{}", render_assoc_items(cx, it, def_id, AssocItemRender::All));
} else {
// We handle the "reference" primitive type on its own because we only want to list
// implementations on generic types.
@@ -1411,7 +1489,7 @@ fn item_constant(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, c: &cle
}
});
- document(w, cx, it, None, HeadingOffset::H2)
+ write!(w, "{}", document(cx, it, None, HeadingOffset::H2))
}
fn item_struct(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Struct) {
@@ -1420,7 +1498,7 @@ fn item_struct(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean
render_struct(w, it, Some(&s.generics), s.ctor_kind, &s.fields, "", true, cx);
});
- document(w, cx, it, None, HeadingOffset::H2);
+ write!(w, "{}", document(cx, it, None, HeadingOffset::H2));
let mut fields = s
.fields
@@ -1436,11 +1514,12 @@ fn item_struct(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean
w,
"<h2 id=\"fields\" class=\"fields small-section-header\">\
{}{}<a href=\"#fields\" class=\"anchor\">§</a>\
- </h2>",
+ </h2>\
+ {}",
if s.ctor_kind.is_none() { "Fields" } else { "Tuple Fields" },
- document_non_exhaustive_header(it)
+ document_non_exhaustive_header(it),
+ document_non_exhaustive(it)
);
- document_non_exhaustive(w, it);
for (index, (field, ty)) in fields.enumerate() {
let field_name =
field.name.map_or_else(|| index.to_string(), |sym| sym.as_str().to_string());
@@ -1454,13 +1533,13 @@ fn item_struct(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean
item_type = ItemType::StructField,
ty = ty.print(cx)
);
- document(w, cx, field, Some(it), HeadingOffset::H3);
+ write!(w, "{}", document(cx, field, Some(it), HeadingOffset::H3));
}
}
}
let def_id = it.item_id.expect_def_id();
- render_assoc_items(w, cx, it, def_id, AssocItemRender::All);
- document_type_layout(w, cx, def_id);
+ write!(w, "{}", render_assoc_items(cx, it, def_id, AssocItemRender::All));
+ write!(w, "{}", document_type_layout(cx, def_id));
}
fn item_static(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Static) {
@@ -1475,7 +1554,7 @@ fn item_static(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean
typ = s.type_.print(cx)
);
});
- document(w, cx, it, None, HeadingOffset::H2)
+ write!(w, "{}", document(cx, it, None, HeadingOffset::H2))
}
fn item_foreign_type(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item) {
@@ -1490,13 +1569,13 @@ fn item_foreign_type(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item) {
);
});
- document(w, cx, it, None, HeadingOffset::H2);
+ write!(w, "{}", document(cx, it, None, HeadingOffset::H2));
- render_assoc_items(w, cx, it, it.item_id.expect_def_id(), AssocItemRender::All)
+ write!(w, "{}", render_assoc_items(cx, it, it.item_id.expect_def_id(), AssocItemRender::All))
}
fn item_keyword(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item) {
- document(w, cx, it, None, HeadingOffset::H2)
+ write!(w, "{}", document(cx, it, None, HeadingOffset::H2))
}
/// Compare two strings treating multi-digit numbers as single units (i.e. natural sort order).
@@ -1575,12 +1654,25 @@ where
w.write_str("</code></pre>");
}
-fn compare_impl<'a, 'b>(lhs: &'a &&Impl, rhs: &'b &&Impl, cx: &Context<'_>) -> Ordering {
- let lhss = format!("{}", lhs.inner_impl().print(false, cx));
- let rhss = format!("{}", rhs.inner_impl().print(false, cx));
+#[derive(PartialEq, Eq)]
+struct ImplString(String);
- // lhs and rhs are formatted as HTML, which may be unnecessary
- compare_names(&lhss, &rhss)
+impl ImplString {
+ fn new(i: &Impl, cx: &Context<'_>) -> ImplString {
+ ImplString(format!("{}", i.inner_impl().print(false, cx)))
+ }
+}
+
+impl PartialOrd for ImplString {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(Ord::cmp(self, other))
+ }
+}
+
+impl Ord for ImplString {
+ fn cmp(&self, other: &Self) -> Ordering {
+ compare_names(&self.0, &other.0)
+ }
}
fn render_implementor(
@@ -1620,64 +1712,69 @@ fn render_implementor(
);
}
-fn render_union(
- w: &mut Buffer,
- it: &clean::Item,
- g: Option<&clean::Generics>,
- fields: &[clean::Item],
- cx: &Context<'_>,
-) {
- let tcx = cx.tcx();
- write!(
- w,
- "{}union {}",
- visibility_print_with_space(it.visibility(tcx), it.item_id, cx),
- it.name.unwrap(),
- );
-
- let where_displayed = g
- .map(|g| {
- write!(w, "{}", g.print(cx));
- print_where_clause_and_check(w, g, cx)
- })
- .unwrap_or(false);
+fn render_union<'a, 'cx: 'a>(
+ it: &'a clean::Item,
+ g: Option<&'a clean::Generics>,
+ fields: &'a [clean::Item],
+ cx: &'a Context<'cx>,
+) -> impl fmt::Display + 'a + Captures<'cx> {
+ display_fn(move |mut f| {
+ let tcx = cx.tcx();
+ write!(
+ f,
+ "{}union {}",
+ visibility_print_with_space(it.visibility(tcx), it.item_id, cx),
+ it.name.unwrap(),
+ )?;
+
+ let where_displayed = g
+ .map(|g| {
+ let mut buf = Buffer::html();
+ write!(buf, "{}", g.print(cx));
+ let where_displayed = print_where_clause_and_check(&mut buf, g, cx);
+ write!(f, "{buf}", buf = buf.into_inner()).unwrap();
+ where_displayed
+ })
+ .unwrap_or(false);
- // If there wasn't a `where` clause, we add a whitespace.
- if !where_displayed {
- w.write_str(" ");
- }
+ // If there wasn't a `where` clause, we add a whitespace.
+ if !where_displayed {
+ f.write_str(" ")?;
+ }
- write!(w, "{{\n");
- let count_fields =
- fields.iter().filter(|f| matches!(*f.kind, clean::StructFieldItem(..))).count();
- let toggle = should_hide_fields(count_fields);
- if toggle {
- toggle_open(w, format_args!("{} fields", count_fields));
- }
+ write!(f, "{{\n")?;
+ let count_fields =
+ fields.iter().filter(|field| matches!(*field.kind, clean::StructFieldItem(..))).count();
+ let toggle = should_hide_fields(count_fields);
+ if toggle {
+ toggle_open(&mut f, format_args!("{} fields", count_fields));
+ }
- for field in fields {
- if let clean::StructFieldItem(ref ty) = *field.kind {
- write!(
- w,
- " {}{}: {},\n",
- visibility_print_with_space(field.visibility(tcx), field.item_id, cx),
- field.name.unwrap(),
- ty.print(cx)
- );
+ for field in fields {
+ if let clean::StructFieldItem(ref ty) = *field.kind {
+ write!(
+ f,
+ " {}{}: {},\n",
+ visibility_print_with_space(field.visibility(tcx), field.item_id, cx),
+ field.name.unwrap(),
+ ty.print(cx)
+ )?;
+ }
}
- }
- if it.has_stripped_entries().unwrap() {
- write!(w, " /* private fields */\n");
- }
- if toggle {
- toggle_close(w);
- }
- w.write_str("}");
+ if it.has_stripped_entries().unwrap() {
+ write!(f, " /* private fields */\n")?;
+ }
+ if toggle {
+ toggle_close(&mut f);
+ }
+ f.write_str("}").unwrap();
+ Ok(())
+ })
}
fn render_struct(
- w: &mut Buffer,
+ mut w: &mut Buffer,
it: &clean::Item,
g: Option<&clean::Generics>,
ty: Option<CtorKind>,
@@ -1699,10 +1796,11 @@ fn render_struct(
}
match ty {
None => {
- let where_diplayed = g.map(|g| print_where_clause_and_check(w, g, cx)).unwrap_or(false);
+ let where_displayed =
+ g.map(|g| print_where_clause_and_check(w, g, cx)).unwrap_or(false);
// If there wasn't a `where` clause, we add a whitespace.
- if !where_diplayed {
+ if !where_displayed {
w.write_str(" {");
} else {
w.write_str("{");
@@ -1712,7 +1810,7 @@ fn render_struct(
let has_visible_fields = count_fields > 0;
let toggle = should_hide_fields(count_fields);
if toggle {
- toggle_open(w, format_args!("{} fields", count_fields));
+ toggle_open(&mut w, format_args!("{} fields", count_fields));
}
for field in fields {
if let clean::StructFieldItem(ref ty) = *field.kind {
@@ -1736,7 +1834,7 @@ fn render_struct(
write!(w, " /* private fields */ ");
}
if toggle {
- toggle_close(w);
+ toggle_close(&mut w);
}
w.write_str("}");
}
@@ -1782,155 +1880,169 @@ fn document_non_exhaustive_header(item: &clean::Item) -> &str {
if item.is_non_exhaustive() { " (Non-exhaustive)" } else { "" }
}
-fn document_non_exhaustive(w: &mut Buffer, item: &clean::Item) {
- if item.is_non_exhaustive() {
- write!(
- w,
- "<details class=\"toggle non-exhaustive\">\
- <summary class=\"hideme\"><span>{}</span></summary>\
- <div class=\"docblock\">",
- {
- if item.is_struct() {
- "This struct is marked as non-exhaustive"
- } else if item.is_enum() {
- "This enum is marked as non-exhaustive"
- } else if item.is_variant() {
- "This variant is marked as non-exhaustive"
- } else {
- "This type is marked as non-exhaustive"
+fn document_non_exhaustive<'a>(item: &'a clean::Item) -> impl fmt::Display + 'a {
+ display_fn(|f| {
+ if item.is_non_exhaustive() {
+ write!(
+ f,
+ "<details class=\"toggle non-exhaustive\">\
+ <summary class=\"hideme\"><span>{}</span></summary>\
+ <div class=\"docblock\">",
+ {
+ if item.is_struct() {
+ "This struct is marked as non-exhaustive"
+ } else if item.is_enum() {
+ "This enum is marked as non-exhaustive"
+ } else if item.is_variant() {
+ "This variant is marked as non-exhaustive"
+ } else {
+ "This type is marked as non-exhaustive"
+ }
}
+ )?;
+
+ if item.is_struct() {
+ f.write_str(
+ "Non-exhaustive structs could have additional fields added in future. \
+ Therefore, non-exhaustive structs cannot be constructed in external crates \
+ using the traditional <code>Struct { .. }</code> syntax; cannot be \
+ matched against without a wildcard <code>..</code>; and \
+ struct update syntax will not work.",
+ )?;
+ } else if item.is_enum() {
+ f.write_str(
+ "Non-exhaustive enums could have additional variants added in future. \
+ Therefore, when matching against variants of non-exhaustive enums, an \
+ extra wildcard arm must be added to account for any future variants.",
+ )?;
+ } else if item.is_variant() {
+ f.write_str(
+ "Non-exhaustive enum variants could have additional fields added in future. \
+ Therefore, non-exhaustive enum variants cannot be constructed in external \
+ crates and cannot be matched against.",
+ )?;
+ } else {
+ f.write_str(
+ "This type will require a wildcard arm in any match statements or constructors.",
+ )?;
}
- );
- if item.is_struct() {
- w.write_str(
- "Non-exhaustive structs could have additional fields added in future. \
- Therefore, non-exhaustive structs cannot be constructed in external crates \
- using the traditional <code>Struct { .. }</code> syntax; cannot be \
- matched against without a wildcard <code>..</code>; and \
- struct update syntax will not work.",
- );
- } else if item.is_enum() {
- w.write_str(
- "Non-exhaustive enums could have additional variants added in future. \
- Therefore, when matching against variants of non-exhaustive enums, an \
- extra wildcard arm must be added to account for any future variants.",
- );
- } else if item.is_variant() {
- w.write_str(
- "Non-exhaustive enum variants could have additional fields added in future. \
- Therefore, non-exhaustive enum variants cannot be constructed in external \
- crates and cannot be matched against.",
- );
- } else {
- w.write_str(
- "This type will require a wildcard arm in any match statements or constructors.",
- );
+ f.write_str("</div></details>")?;
}
-
- w.write_str("</div></details>");
- }
+ Ok(())
+ })
}
-fn document_type_layout(w: &mut Buffer, cx: &Context<'_>, ty_def_id: DefId) {
- fn write_size_of_layout(w: &mut Buffer, layout: &LayoutS, tag_size: u64) {
+fn document_type_layout<'a, 'cx: 'a>(
+ cx: &'a Context<'cx>,
+ ty_def_id: DefId,
+) -> impl fmt::Display + 'a + Captures<'cx> {
+ fn write_size_of_layout(mut w: impl fmt::Write, layout: &LayoutS, tag_size: u64) {
if layout.abi.is_unsized() {
- write!(w, "(unsized)");
+ write!(w, "(unsized)").unwrap();
} else {
let size = layout.size.bytes() - tag_size;
- write!(w, "{size} byte{pl}", pl = if size == 1 { "" } else { "s" },);
+ write!(w, "{size} byte{pl}", pl = if size == 1 { "" } else { "s" }).unwrap();
+ if layout.abi.is_uninhabited() {
+ write!(
+ w,
+ " (<a href=\"https://doc.rust-lang.org/stable/reference/glossary.html#uninhabited\">uninhabited</a>)"
+ ).unwrap();
+ }
}
}
- if !cx.shared.show_type_layout {
- return;
- }
-
- writeln!(
- w,
- "<h2 id=\"layout\" class=\"small-section-header\"> \
- Layout<a href=\"#layout\" class=\"anchor\">§</a></h2>"
- );
- writeln!(w, "<div class=\"docblock\">");
-
- let tcx = cx.tcx();
- let param_env = tcx.param_env(ty_def_id);
- let ty = tcx.type_of(ty_def_id).subst_identity();
- match tcx.layout_of(param_env.and(ty)) {
- Ok(ty_layout) => {
- writeln!(
- w,
- "<div class=\"warning\"><p><strong>Note:</strong> Most layout information is \
- <strong>completely unstable</strong> and may even differ between compilations. \
- The only exception is types with certain <code>repr(...)</code> attributes. \
- Please see the Rust Reference’s \
- <a href=\"https://doc.rust-lang.org/reference/type-layout.html\">“Type Layout”</a> \
- chapter for details on type layout guarantees.</p></div>"
- );
- w.write_str("<p><strong>Size:</strong> ");
- write_size_of_layout(w, &ty_layout.layout.0, 0);
- writeln!(w, "</p>");
- if let Variants::Multiple { variants, tag, tag_encoding, .. } =
- &ty_layout.layout.variants()
- {
- if !variants.is_empty() {
- w.write_str(
- "<p><strong>Size for each variant:</strong></p>\
- <ul>",
- );
-
- let Adt(adt, _) = ty_layout.ty.kind() else {
- span_bug!(tcx.def_span(ty_def_id), "not an adt")
- };
+ display_fn(move |mut f| {
+ if !cx.shared.show_type_layout {
+ return Ok(());
+ }
- let tag_size = if let TagEncoding::Niche { .. } = tag_encoding {
- 0
- } else if let Primitive::Int(i, _) = tag.primitive() {
- i.size().bytes()
- } else {
- span_bug!(tcx.def_span(ty_def_id), "tag is neither niche nor int")
- };
+ writeln!(
+ f,
+ "<h2 id=\"layout\" class=\"small-section-header\"> \
+ Layout<a href=\"#layout\" class=\"anchor\">§</a></h2>"
+ )?;
+ writeln!(f, "<div class=\"docblock\">")?;
- for (index, layout) in variants.iter_enumerated() {
- let name = adt.variant(index).name;
- write!(w, "<li><code>{name}</code>: ");
- write_size_of_layout(w, layout, tag_size);
- writeln!(w, "</li>");
+ let tcx = cx.tcx();
+ let param_env = tcx.param_env(ty_def_id);
+ let ty = tcx.type_of(ty_def_id).subst_identity();
+ match tcx.layout_of(param_env.and(ty)) {
+ Ok(ty_layout) => {
+ writeln!(
+ f,
+ "<div class=\"warning\"><p><strong>Note:</strong> Most layout information is \
+ <strong>completely unstable</strong> and may even differ between compilations. \
+ The only exception is types with certain <code>repr(...)</code> attributes. \
+ Please see the Rust Reference’s \
+ <a href=\"https://doc.rust-lang.org/reference/type-layout.html\">“Type Layout”</a> \
+ chapter for details on type layout guarantees.</p></div>"
+ )?;
+ f.write_str("<p><strong>Size:</strong> ")?;
+ write_size_of_layout(&mut f, &ty_layout.layout.0, 0);
+ writeln!(f, "</p>")?;
+ if let Variants::Multiple { variants, tag, tag_encoding, .. } =
+ &ty_layout.layout.variants()
+ {
+ if !variants.is_empty() {
+ f.write_str(
+ "<p><strong>Size for each variant:</strong></p>\
+ <ul>",
+ )?;
+
+ let Adt(adt, _) = ty_layout.ty.kind() else {
+ span_bug!(tcx.def_span(ty_def_id), "not an adt")
+ };
+
+ let tag_size = if let TagEncoding::Niche { .. } = tag_encoding {
+ 0
+ } else if let Primitive::Int(i, _) = tag.primitive() {
+ i.size().bytes()
+ } else {
+ span_bug!(tcx.def_span(ty_def_id), "tag is neither niche nor int")
+ };
+
+ for (index, layout) in variants.iter_enumerated() {
+ let name = adt.variant(index).name;
+ write!(&mut f, "<li><code>{name}</code>: ")?;
+ write_size_of_layout(&mut f, layout, tag_size);
+ writeln!(&mut f, "</li>")?;
+ }
+ f.write_str("</ul>")?;
}
- w.write_str("</ul>");
}
}
+ // This kind of layout error can occur with valid code, e.g. if you try to
+ // get the layout of a generic type such as `Vec<T>`.
+ Err(LayoutError::Unknown(_)) => {
+ writeln!(
+ f,
+ "<p><strong>Note:</strong> Unable to compute type layout, \
+ possibly due to this type having generic parameters. \
+ Layout can only be computed for concrete, fully-instantiated types.</p>"
+ )?;
+ }
+ // This kind of error probably can't happen with valid code, but we don't
+ // want to panic and prevent the docs from building, so we just let the
+ // user know that we couldn't compute the layout.
+ Err(LayoutError::SizeOverflow(_)) => {
+ writeln!(
+ f,
+ "<p><strong>Note:</strong> Encountered an error during type layout; \
+ the type was too big.</p>"
+ )?;
+ }
+ Err(LayoutError::NormalizationFailure(_, _)) => {
+ writeln!(
+ f,
+ "<p><strong>Note:</strong> Encountered an error during type layout; \
+ the type failed to be normalized.</p>"
+ )?;
+ }
}
- // This kind of layout error can occur with valid code, e.g. if you try to
- // get the layout of a generic type such as `Vec<T>`.
- Err(LayoutError::Unknown(_)) => {
- writeln!(
- w,
- "<p><strong>Note:</strong> Unable to compute type layout, \
- possibly due to this type having generic parameters. \
- Layout can only be computed for concrete, fully-instantiated types.</p>"
- );
- }
- // This kind of error probably can't happen with valid code, but we don't
- // want to panic and prevent the docs from building, so we just let the
- // user know that we couldn't compute the layout.
- Err(LayoutError::SizeOverflow(_)) => {
- writeln!(
- w,
- "<p><strong>Note:</strong> Encountered an error during type layout; \
- the type was too big.</p>"
- );
- }
- Err(LayoutError::NormalizationFailure(_, _)) => {
- writeln!(
- w,
- "<p><strong>Note:</strong> Encountered an error during type layout; \
- the type failed to be normalized.</p>"
- )
- }
- }
- writeln!(w, "</div>");
+ writeln!(f, "</div>")
+ })
}
fn pluralize(count: usize) -> &'static str {
diff --git a/src/librustdoc/html/render/search_index.rs b/src/librustdoc/html/render/search_index.rs
index 090ea2cb1..f5b4a3f5a 100644
--- a/src/librustdoc/html/render/search_index.rs
+++ b/src/librustdoc/html/render/search_index.rs
@@ -7,9 +7,7 @@ use rustc_span::symbol::Symbol;
use serde::ser::{Serialize, SerializeStruct, Serializer};
use crate::clean;
-use crate::clean::types::{
- FnRetTy, Function, GenericBound, Generics, ItemId, Type, WherePredicate,
-};
+use crate::clean::types::{FnRetTy, Function, Generics, ItemId, Type, WherePredicate};
use crate::formats::cache::{Cache, OrphanImplItem};
use crate::formats::item_type::ItemType;
use crate::html::format::join_with_double_colon;
@@ -42,6 +40,7 @@ pub(crate) fn build_index<'tcx>(
parent_idx: None,
search_type: get_function_type_for_search(item, tcx, impl_generics.as_ref(), cache),
aliases: item.attrs.get_doc_aliases(),
+ deprecation: item.deprecation(tcx),
});
}
}
@@ -60,7 +59,7 @@ pub(crate) fn build_index<'tcx>(
// `sort_unstable_by_key` produces lifetime errors
let k1 = (&k1.path, k1.name.as_str(), &k1.ty, &k1.parent);
let k2 = (&k2.path, k2.name.as_str(), &k2.ty, &k2.parent);
- std::cmp::Ord::cmp(&k1, &k2)
+ Ord::cmp(&k1, &k2)
});
// Set up alias indexes.
@@ -253,7 +252,17 @@ pub(crate) fn build_index<'tcx>(
)?;
crate_data.serialize_field(
"q",
- &self.items.iter().map(|item| &item.path).collect::<Vec<_>>(),
+ &self
+ .items
+ .iter()
+ .enumerate()
+ // Serialize as an array of item indices and full paths
+ .filter_map(
+ |(index, item)| {
+ if item.path.is_empty() { None } else { Some((index, &item.path)) }
+ },
+ )
+ .collect::<Vec<_>>(),
)?;
crate_data.serialize_field(
"d",
@@ -307,6 +316,16 @@ pub(crate) fn build_index<'tcx>(
.collect::<Vec<_>>(),
)?;
crate_data.serialize_field(
+ "c",
+ &self
+ .items
+ .iter()
+ .enumerate()
+ // Serialize as an array of deprecated item indices
+ .filter_map(|(index, item)| item.deprecation.map(|_| index))
+ .collect::<Vec<_>>(),
+ )?;
+ crate_data.serialize_field(
"p",
&self.paths.iter().map(|(it, s)| (it, s.as_str())).collect::<Vec<_>>(),
)?;
@@ -467,7 +486,7 @@ fn add_generics_and_bounds_as_types<'tcx, 'a>(
}
// First, check if it's "Self".
- let arg = if let Some(self_) = self_ {
+ let mut arg = if let Some(self_) = self_ {
match &*arg {
Type::BorrowedRef { type_, .. } if type_.is_self_type() => self_,
type_ if type_.is_self_type() => self_,
@@ -477,34 +496,33 @@ fn add_generics_and_bounds_as_types<'tcx, 'a>(
arg
};
+ // strip references from the argument type
+ while let Type::BorrowedRef { type_, .. } = &*arg {
+ arg = &*type_;
+ }
+
// If this argument is a type parameter and not a trait bound or a type, we need to look
// for its bounds.
if let Type::Generic(arg_s) = *arg {
// First we check if the bounds are in a `where` predicate...
- if let Some(where_pred) = generics.where_predicates.iter().find(|g| match g {
- WherePredicate::BoundPredicate { ty, .. } => ty.def_id(cache) == arg.def_id(cache),
+ for where_pred in generics.where_predicates.iter().filter(|g| match g {
+ WherePredicate::BoundPredicate { ty: Type::Generic(ty_s), .. } => *ty_s == arg_s,
_ => false,
}) {
let mut ty_generics = Vec::new();
let bounds = where_pred.get_bounds().unwrap_or_else(|| &[]);
for bound in bounds.iter() {
- if let GenericBound::TraitBound(poly_trait, _) = bound {
- for param_def in poly_trait.generic_params.iter() {
- match &param_def.kind {
- clean::GenericParamDefKind::Type { default: Some(ty), .. } => {
- add_generics_and_bounds_as_types(
- self_,
- generics,
- ty,
- tcx,
- recurse + 1,
- &mut ty_generics,
- cache,
- )
- }
- _ => {}
- }
- }
+ if let Some(path) = bound.get_trait_path() {
+ let ty = Type::Path { path };
+ add_generics_and_bounds_as_types(
+ self_,
+ generics,
+ &ty,
+ tcx,
+ recurse + 1,
+ &mut ty_generics,
+ cache,
+ );
}
}
insert_ty(res, arg.clone(), ty_generics);
diff --git a/src/librustdoc/html/render/sidebar.rs b/src/librustdoc/html/render/sidebar.rs
new file mode 100644
index 000000000..455b4e9ae
--- /dev/null
+++ b/src/librustdoc/html/render/sidebar.rs
@@ -0,0 +1,558 @@
+use std::{borrow::Cow, rc::Rc};
+
+use askama::Template;
+use rustc_data_structures::fx::FxHashSet;
+use rustc_hir::{def::CtorKind, def_id::DefIdSet};
+use rustc_middle::ty::{self, TyCtxt};
+
+use crate::{
+ clean,
+ formats::{item_type::ItemType, Impl},
+ html::{format::Buffer, markdown::IdMap},
+};
+
+use super::{item_ty_to_section, Context, ItemSection};
+
+#[derive(Template)]
+#[template(path = "sidebar.html")]
+pub(super) struct Sidebar<'a> {
+ pub(super) title_prefix: &'static str,
+ pub(super) title: &'a str,
+ pub(super) is_crate: bool,
+ pub(super) version: &'a str,
+ pub(super) blocks: Vec<LinkBlock<'a>>,
+ pub(super) path: String,
+}
+
+impl<'a> Sidebar<'a> {
+ /// Only create a `<section>` if there are any blocks
+ /// which should actually be rendered.
+ pub fn should_render_blocks(&self) -> bool {
+ self.blocks.iter().any(LinkBlock::should_render)
+ }
+}
+
+/// A sidebar section such as 'Methods'.
+pub(crate) struct LinkBlock<'a> {
+ /// The name of this section, e.g. 'Methods'
+ /// as well as the link to it, e.g. `#implementations`.
+ /// Will be rendered inside an `<h3>` tag
+ heading: Link<'a>,
+ links: Vec<Link<'a>>,
+ /// Render the heading even if there are no links
+ force_render: bool,
+}
+
+impl<'a> LinkBlock<'a> {
+ pub fn new(heading: Link<'a>, links: Vec<Link<'a>>) -> Self {
+ Self { heading, links, force_render: false }
+ }
+
+ pub fn forced(heading: Link<'a>) -> Self {
+ Self { heading, links: vec![], force_render: true }
+ }
+
+ pub fn should_render(&self) -> bool {
+ self.force_render || !self.links.is_empty()
+ }
+}
+
+/// A link to an item. Content should not be escaped.
+#[derive(PartialOrd, Ord, PartialEq, Eq, Hash, Clone)]
+pub(crate) struct Link<'a> {
+ /// The content for the anchor tag
+ name: Cow<'a, str>,
+ /// The id of an anchor within the page (without a `#` prefix)
+ href: Cow<'a, str>,
+}
+
+impl<'a> Link<'a> {
+ pub fn new(href: impl Into<Cow<'a, str>>, name: impl Into<Cow<'a, str>>) -> Self {
+ Self { href: href.into(), name: name.into() }
+ }
+ pub fn empty() -> Link<'static> {
+ Link::new("", "")
+ }
+}
+
+pub(super) fn print_sidebar(cx: &Context<'_>, it: &clean::Item, buffer: &mut Buffer) {
+ let blocks: Vec<LinkBlock<'_>> = match *it.kind {
+ clean::StructItem(ref s) => sidebar_struct(cx, it, s),
+ clean::TraitItem(ref t) => sidebar_trait(cx, it, t),
+ clean::PrimitiveItem(_) => sidebar_primitive(cx, it),
+ clean::UnionItem(ref u) => sidebar_union(cx, it, u),
+ clean::EnumItem(ref e) => sidebar_enum(cx, it, e),
+ clean::TypedefItem(_) => sidebar_typedef(cx, it),
+ clean::ModuleItem(ref m) => vec![sidebar_module(&m.items)],
+ clean::ForeignTypeItem => sidebar_foreign_type(cx, it),
+ _ => vec![],
+ };
+ // The sidebar is designed to display sibling functions, modules and
+ // other miscellaneous information. since there are lots of sibling
+ // items (and that causes quadratic growth in large modules),
+ // we refactor common parts into a shared JavaScript file per module.
+ // still, we don't move everything into JS because we want to preserve
+ // as much HTML as possible in order to allow non-JS-enabled browsers
+ // to navigate the documentation (though slightly inefficiently).
+ let (title_prefix, title) = if it.is_struct()
+ || it.is_trait()
+ || it.is_primitive()
+ || it.is_union()
+ || it.is_enum()
+ || it.is_mod()
+ || it.is_typedef()
+ {
+ (
+ match *it.kind {
+ clean::ModuleItem(..) if it.is_crate() => "Crate ",
+ clean::ModuleItem(..) => "Module ",
+ _ => "",
+ },
+ it.name.as_ref().unwrap().as_str(),
+ )
+ } else {
+ ("", "")
+ };
+ let version =
+ if it.is_crate() { cx.cache().crate_version.as_deref().unwrap_or_default() } else { "" };
+ let path: String = if !it.is_mod() {
+ cx.current.iter().map(|s| s.as_str()).intersperse("::").collect()
+ } else {
+ "".into()
+ };
+ let sidebar = Sidebar { title_prefix, title, is_crate: it.is_crate(), version, blocks, path };
+ sidebar.render_into(buffer).unwrap();
+}
+
+fn get_struct_fields_name<'a>(fields: &'a [clean::Item]) -> Vec<Link<'a>> {
+ let mut fields = fields
+ .iter()
+ .filter(|f| matches!(*f.kind, clean::StructFieldItem(..)))
+ .filter_map(|f| {
+ f.name.as_ref().map(|name| Link::new(format!("structfield.{name}"), name.as_str()))
+ })
+ .collect::<Vec<Link<'a>>>();
+ fields.sort();
+ fields
+}
+
+fn sidebar_struct<'a>(
+ cx: &'a Context<'_>,
+ it: &'a clean::Item,
+ s: &'a clean::Struct,
+) -> Vec<LinkBlock<'a>> {
+ let fields = get_struct_fields_name(&s.fields);
+ let field_name = match s.ctor_kind {
+ Some(CtorKind::Fn) => Some("Tuple Fields"),
+ None => Some("Fields"),
+ _ => None,
+ };
+ let mut items = vec![];
+ if let Some(name) = field_name {
+ items.push(LinkBlock::new(Link::new("fields", name), fields));
+ }
+ sidebar_assoc_items(cx, it, &mut items);
+ items
+}
+
+fn sidebar_trait<'a>(
+ cx: &'a Context<'_>,
+ it: &'a clean::Item,
+ t: &'a clean::Trait,
+) -> Vec<LinkBlock<'a>> {
+ fn filter_items<'a>(
+ items: &'a [clean::Item],
+ filt: impl Fn(&clean::Item) -> bool,
+ ty: &str,
+ ) -> Vec<Link<'a>> {
+ let mut res = items
+ .iter()
+ .filter_map(|m: &clean::Item| match m.name {
+ Some(ref name) if filt(m) => Some(Link::new(format!("{ty}.{name}"), name.as_str())),
+ _ => None,
+ })
+ .collect::<Vec<Link<'a>>>();
+ res.sort();
+ res
+ }
+
+ let req_assoc = filter_items(&t.items, |m| m.is_ty_associated_type(), "associatedtype");
+ let prov_assoc = filter_items(&t.items, |m| m.is_associated_type(), "associatedtype");
+ let req_assoc_const =
+ filter_items(&t.items, |m| m.is_ty_associated_const(), "associatedconstant");
+ let prov_assoc_const =
+ filter_items(&t.items, |m| m.is_associated_const(), "associatedconstant");
+ let req_method = filter_items(&t.items, |m| m.is_ty_method(), "tymethod");
+ let prov_method = filter_items(&t.items, |m| m.is_method(), "method");
+ let mut foreign_impls = vec![];
+ if let Some(implementors) = cx.cache().implementors.get(&it.item_id.expect_def_id()) {
+ foreign_impls.extend(
+ implementors
+ .iter()
+ .filter(|i| !i.is_on_local_type(cx))
+ .filter_map(|i| super::extract_for_impl_name(&i.impl_item, cx))
+ .map(|(name, id)| Link::new(id, name)),
+ );
+ foreign_impls.sort();
+ }
+
+ let mut blocks: Vec<LinkBlock<'_>> = [
+ ("required-associated-types", "Required Associated Types", req_assoc),
+ ("provided-associated-types", "Provided Associated Types", prov_assoc),
+ ("required-associated-consts", "Required Associated Constants", req_assoc_const),
+ ("provided-associated-consts", "Provided Associated Constants", prov_assoc_const),
+ ("required-methods", "Required Methods", req_method),
+ ("provided-methods", "Provided Methods", prov_method),
+ ("foreign-impls", "Implementations on Foreign Types", foreign_impls),
+ ]
+ .into_iter()
+ .map(|(id, title, items)| LinkBlock::new(Link::new(id, title), items))
+ .collect();
+ sidebar_assoc_items(cx, it, &mut blocks);
+ blocks.push(LinkBlock::forced(Link::new("implementors", "Implementors")));
+ if t.is_auto(cx.tcx()) {
+ blocks.push(LinkBlock::forced(Link::new("synthetic-implementors", "Auto Implementors")));
+ }
+ blocks
+}
+
+fn sidebar_primitive<'a>(cx: &'a Context<'_>, it: &'a clean::Item) -> Vec<LinkBlock<'a>> {
+ if it.name.map(|n| n.as_str() != "reference").unwrap_or(false) {
+ let mut items = vec![];
+ sidebar_assoc_items(cx, it, &mut items);
+ items
+ } else {
+ let shared = Rc::clone(&cx.shared);
+ let (concrete, synthetic, blanket_impl) =
+ super::get_filtered_impls_for_reference(&shared, it);
+
+ sidebar_render_assoc_items(cx, &mut IdMap::new(), concrete, synthetic, blanket_impl).into()
+ }
+}
+
+fn sidebar_typedef<'a>(cx: &'a Context<'_>, it: &'a clean::Item) -> Vec<LinkBlock<'a>> {
+ let mut items = vec![];
+ sidebar_assoc_items(cx, it, &mut items);
+ items
+}
+
+fn sidebar_union<'a>(
+ cx: &'a Context<'_>,
+ it: &'a clean::Item,
+ u: &'a clean::Union,
+) -> Vec<LinkBlock<'a>> {
+ let fields = get_struct_fields_name(&u.fields);
+ let mut items = vec![LinkBlock::new(Link::new("fields", "Fields"), fields)];
+ sidebar_assoc_items(cx, it, &mut items);
+ items
+}
+
+/// Adds trait implementations into the blocks of links
+fn sidebar_assoc_items<'a>(
+ cx: &'a Context<'_>,
+ it: &'a clean::Item,
+ links: &mut Vec<LinkBlock<'a>>,
+) {
+ let did = it.item_id.expect_def_id();
+ let cache = cx.cache();
+
+ let mut assoc_consts = Vec::new();
+ let mut methods = Vec::new();
+ if let Some(v) = cache.impls.get(&did) {
+ let mut used_links = FxHashSet::default();
+ let mut id_map = IdMap::new();
+
+ {
+ let used_links_bor = &mut used_links;
+ assoc_consts.extend(
+ v.iter()
+ .filter(|i| i.inner_impl().trait_.is_none())
+ .flat_map(|i| get_associated_constants(i.inner_impl(), used_links_bor)),
+ );
+ // We want links' order to be reproducible so we don't use unstable sort.
+ assoc_consts.sort();
+
+ #[rustfmt::skip] // rustfmt makes the pipeline less readable
+ methods.extend(
+ v.iter()
+ .filter(|i| i.inner_impl().trait_.is_none())
+ .flat_map(|i| get_methods(i.inner_impl(), false, used_links_bor, false, cx.tcx())),
+ );
+
+ // We want links' order to be reproducible so we don't use unstable sort.
+ methods.sort();
+ }
+
+ let mut deref_methods = Vec::new();
+ let [concrete, synthetic, blanket] = if v.iter().any(|i| i.inner_impl().trait_.is_some()) {
+ if let Some(impl_) =
+ v.iter().find(|i| i.trait_did() == cx.tcx().lang_items().deref_trait())
+ {
+ let mut derefs = DefIdSet::default();
+ derefs.insert(did);
+ sidebar_deref_methods(
+ cx,
+ &mut deref_methods,
+ impl_,
+ v,
+ &mut derefs,
+ &mut used_links,
+ );
+ }
+
+ let (synthetic, concrete): (Vec<&Impl>, Vec<&Impl>) =
+ v.iter().partition::<Vec<_>, _>(|i| i.inner_impl().kind.is_auto());
+ let (blanket_impl, concrete): (Vec<&Impl>, Vec<&Impl>) =
+ concrete.into_iter().partition::<Vec<_>, _>(|i| i.inner_impl().kind.is_blanket());
+
+ sidebar_render_assoc_items(cx, &mut id_map, concrete, synthetic, blanket_impl)
+ } else {
+ std::array::from_fn(|_| LinkBlock::new(Link::empty(), vec![]))
+ };
+
+ let mut blocks = vec![
+ LinkBlock::new(Link::new("implementations", "Associated Constants"), assoc_consts),
+ LinkBlock::new(Link::new("implementations", "Methods"), methods),
+ ];
+ blocks.append(&mut deref_methods);
+ blocks.extend([concrete, synthetic, blanket]);
+ links.append(&mut blocks);
+ }
+}
+
+fn sidebar_deref_methods<'a>(
+ cx: &'a Context<'_>,
+ out: &mut Vec<LinkBlock<'a>>,
+ impl_: &Impl,
+ v: &[Impl],
+ derefs: &mut DefIdSet,
+ used_links: &mut FxHashSet<String>,
+) {
+ let c = cx.cache();
+
+ debug!("found Deref: {:?}", impl_);
+ if let Some((target, real_target)) =
+ impl_.inner_impl().items.iter().find_map(|item| match *item.kind {
+ clean::AssocTypeItem(box ref t, _) => Some(match *t {
+ clean::Typedef { item_type: Some(ref type_), .. } => (type_, &t.type_),
+ _ => (&t.type_, &t.type_),
+ }),
+ _ => None,
+ })
+ {
+ debug!("found target, real_target: {:?} {:?}", target, real_target);
+ if let Some(did) = target.def_id(c) &&
+ let Some(type_did) = impl_.inner_impl().for_.def_id(c) &&
+ // `impl Deref<Target = S> for S`
+ (did == type_did || !derefs.insert(did))
+ {
+ // Avoid infinite cycles
+ return;
+ }
+ let deref_mut = v.iter().any(|i| i.trait_did() == cx.tcx().lang_items().deref_mut_trait());
+ let inner_impl = target
+ .def_id(c)
+ .or_else(|| {
+ target.primitive_type().and_then(|prim| c.primitive_locations.get(&prim).cloned())
+ })
+ .and_then(|did| c.impls.get(&did));
+ if let Some(impls) = inner_impl {
+ debug!("found inner_impl: {:?}", impls);
+ let mut ret = impls
+ .iter()
+ .filter(|i| i.inner_impl().trait_.is_none())
+ .flat_map(|i| get_methods(i.inner_impl(), true, used_links, deref_mut, cx.tcx()))
+ .collect::<Vec<_>>();
+ if !ret.is_empty() {
+ let id = if let Some(target_def_id) = real_target.def_id(c) {
+ Cow::Borrowed(
+ cx.deref_id_map
+ .get(&target_def_id)
+ .expect("Deref section without derived id")
+ .as_str(),
+ )
+ } else {
+ Cow::Borrowed("deref-methods")
+ };
+ let title = format!(
+ "Methods from {:#}<Target={:#}>",
+ impl_.inner_impl().trait_.as_ref().unwrap().print(cx),
+ real_target.print(cx),
+ );
+ // We want links' order to be reproducible so we don't use unstable sort.
+ ret.sort();
+ out.push(LinkBlock::new(Link::new(id, title), ret));
+ }
+ }
+
+ // Recurse into any further impls that might exist for `target`
+ if let Some(target_did) = target.def_id(c) &&
+ let Some(target_impls) = c.impls.get(&target_did) &&
+ let Some(target_deref_impl) = target_impls.iter().find(|i| {
+ i.inner_impl()
+ .trait_
+ .as_ref()
+ .map(|t| Some(t.def_id()) == cx.tcx().lang_items().deref_trait())
+ .unwrap_or(false)
+ })
+ {
+ sidebar_deref_methods(
+ cx,
+ out,
+ target_deref_impl,
+ target_impls,
+ derefs,
+ used_links,
+ );
+ }
+ }
+}
+
+fn sidebar_enum<'a>(
+ cx: &'a Context<'_>,
+ it: &'a clean::Item,
+ e: &'a clean::Enum,
+) -> Vec<LinkBlock<'a>> {
+ let mut variants = e
+ .variants()
+ .filter_map(|v| v.name)
+ .map(|name| Link::new(format!("variant.{name}"), name.to_string()))
+ .collect::<Vec<_>>();
+ variants.sort_unstable();
+
+ let mut items = vec![LinkBlock::new(Link::new("variants", "Variants"), variants)];
+ sidebar_assoc_items(cx, it, &mut items);
+ items
+}
+
+pub(crate) fn sidebar_module_like(
+ item_sections_in_use: FxHashSet<ItemSection>,
+) -> LinkBlock<'static> {
+ let item_sections = ItemSection::ALL
+ .iter()
+ .copied()
+ .filter(|sec| item_sections_in_use.contains(sec))
+ .map(|sec| Link::new(sec.id(), sec.name()))
+ .collect();
+ LinkBlock::new(Link::empty(), item_sections)
+}
+
+fn sidebar_module(items: &[clean::Item]) -> LinkBlock<'static> {
+ let item_sections_in_use: FxHashSet<_> = items
+ .iter()
+ .filter(|it| {
+ !it.is_stripped()
+ && it
+ .name
+ .or_else(|| {
+ if let clean::ImportItem(ref i) = *it.kind &&
+ let clean::ImportKind::Simple(s) = i.kind { Some(s) } else { None }
+ })
+ .is_some()
+ })
+ .map(|it| item_ty_to_section(it.type_()))
+ .collect();
+
+ sidebar_module_like(item_sections_in_use)
+}
+
+fn sidebar_foreign_type<'a>(cx: &'a Context<'_>, it: &'a clean::Item) -> Vec<LinkBlock<'a>> {
+ let mut items = vec![];
+ sidebar_assoc_items(cx, it, &mut items);
+ items
+}
+
+/// Renders the trait implementations for this type
+fn sidebar_render_assoc_items(
+ cx: &Context<'_>,
+ id_map: &mut IdMap,
+ concrete: Vec<&Impl>,
+ synthetic: Vec<&Impl>,
+ blanket_impl: Vec<&Impl>,
+) -> [LinkBlock<'static>; 3] {
+ let format_impls = |impls: Vec<&Impl>, id_map: &mut IdMap| {
+ let mut links = FxHashSet::default();
+
+ let mut ret = impls
+ .iter()
+ .filter_map(|it| {
+ let trait_ = it.inner_impl().trait_.as_ref()?;
+ let encoded =
+ id_map.derive(super::get_id_for_impl(&it.inner_impl().for_, Some(trait_), cx));
+
+ let prefix = match it.inner_impl().polarity {
+ ty::ImplPolarity::Positive | ty::ImplPolarity::Reservation => "",
+ ty::ImplPolarity::Negative => "!",
+ };
+ let generated = Link::new(encoded, format!("{prefix}{:#}", trait_.print(cx)));
+ if links.insert(generated.clone()) { Some(generated) } else { None }
+ })
+ .collect::<Vec<Link<'static>>>();
+ ret.sort();
+ ret
+ };
+
+ let concrete = format_impls(concrete, id_map);
+ let synthetic = format_impls(synthetic, id_map);
+ let blanket = format_impls(blanket_impl, id_map);
+ [
+ LinkBlock::new(Link::new("trait-implementations", "Trait Implementations"), concrete),
+ LinkBlock::new(
+ Link::new("synthetic-implementations", "Auto Trait Implementations"),
+ synthetic,
+ ),
+ LinkBlock::new(Link::new("blanket-implementations", "Blanket Implementations"), blanket),
+ ]
+}
+
+fn get_next_url(used_links: &mut FxHashSet<String>, url: String) -> String {
+ if used_links.insert(url.clone()) {
+ return url;
+ }
+ let mut add = 1;
+ while !used_links.insert(format!("{}-{}", url, add)) {
+ add += 1;
+ }
+ format!("{}-{}", url, add)
+}
+
+fn get_methods<'a>(
+ i: &'a clean::Impl,
+ for_deref: bool,
+ used_links: &mut FxHashSet<String>,
+ deref_mut: bool,
+ tcx: TyCtxt<'_>,
+) -> Vec<Link<'a>> {
+ i.items
+ .iter()
+ .filter_map(|item| match item.name {
+ Some(ref name) if !name.is_empty() && item.is_method() => {
+ if !for_deref || super::should_render_item(item, deref_mut, tcx) {
+ Some(Link::new(
+ get_next_url(used_links, format!("{}.{}", ItemType::Method, name)),
+ name.as_str(),
+ ))
+ } else {
+ None
+ }
+ }
+ _ => None,
+ })
+ .collect::<Vec<_>>()
+}
+
+fn get_associated_constants<'a>(
+ i: &'a clean::Impl,
+ used_links: &mut FxHashSet<String>,
+) -> Vec<Link<'a>> {
+ i.items
+ .iter()
+ .filter_map(|item| match item.name {
+ Some(ref name) if !name.is_empty() && item.is_associated_const() => Some(Link::new(
+ get_next_url(used_links, format!("{}.{}", ItemType::AssocConst, name)),
+ name.as_str(),
+ )),
+ _ => None,
+ })
+ .collect::<Vec<_>>()
+}
diff --git a/src/librustdoc/html/render/span_map.rs b/src/librustdoc/html/render/span_map.rs
index 4514894ca..eb9262f47 100644
--- a/src/librustdoc/html/render/span_map.rs
+++ b/src/librustdoc/html/render/span_map.rs
@@ -29,12 +29,12 @@ pub(crate) enum LinkFromSrc {
/// This function will do at most two things:
///
-/// 1. Generate a `span` correspondance map which links an item `span` to its definition `span`.
+/// 1. Generate a `span` correspondence map which links an item `span` to its definition `span`.
/// 2. Collect the source code files.
///
-/// It returns the `krate`, the source code files and the `span` correspondance map.
+/// It returns the `krate`, the source code files and the `span` correspondence map.
///
-/// Note about the `span` correspondance map: the keys are actually `(lo, hi)` of `span`s. We don't
+/// Note about the `span` correspondence map: the keys are actually `(lo, hi)` of `span`s. We don't
/// need the `span` context later on, only their position, so instead of keep a whole `Span`, we
/// only keep the `lo` and `hi`.
pub(crate) fn collect_spans_and_sources(
diff --git a/src/librustdoc/html/sources.rs b/src/librustdoc/html/sources.rs
index 2c90bf4fa..c8397967c 100644
--- a/src/librustdoc/html/sources.rs
+++ b/src/librustdoc/html/sources.rs
@@ -1,12 +1,13 @@
use crate::clean;
use crate::docfs::PathError;
use crate::error::Error;
-use crate::html::format::Buffer;
+use crate::html::format;
use crate::html::highlight;
use crate::html::layout;
use crate::html::render::Context;
use crate::visit::DocVisitor;
+use askama::Template;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_hir::def_id::LOCAL_CRATE;
use rustc_middle::ty::TyCtxt;
@@ -15,7 +16,9 @@ use rustc_span::source_map::FileName;
use std::cell::RefCell;
use std::ffi::OsStr;
+use std::fmt;
use std::fs;
+use std::ops::RangeInclusive;
use std::path::{Component, Path, PathBuf};
use std::rc::Rc;
@@ -85,7 +88,7 @@ impl LocalSourcesCollector<'_, '_> {
},
);
- let mut href = href.into_inner().to_string_lossy().to_string();
+ let mut href = href.into_inner().to_string_lossy().into_owned();
if let Some(c) = href.as_bytes().last() && *c != b'/' {
href.push('/');
}
@@ -291,7 +294,7 @@ pub(crate) enum SourceContext {
/// Wrapper struct to render the source code of a file. This will do things like
/// adding line numbers to the left-hand side.
pub(crate) fn print_src(
- buf: &mut Buffer,
+ mut writer: impl fmt::Write,
s: &str,
file_span: rustc_span::Span,
context: &Context<'_>,
@@ -299,39 +302,32 @@ pub(crate) fn print_src(
decoration_info: highlight::DecorationInfo,
source_context: SourceContext,
) {
+ #[derive(Template)]
+ #[template(path = "source.html")]
+ struct Source<Code: std::fmt::Display> {
+ embedded: bool,
+ needs_expansion: bool,
+ lines: RangeInclusive<usize>,
+ code_html: Code,
+ }
let lines = s.lines().count();
- let mut line_numbers = Buffer::empty_from(buf);
- let extra;
- line_numbers.write_str("<pre class=\"src-line-numbers\">");
+ let (embedded, needs_expansion, lines) = match source_context {
+ SourceContext::Standalone => (false, false, 1..=lines),
+ SourceContext::Embedded { offset, needs_expansion } => {
+ (true, needs_expansion, (1 + offset)..=(lines + offset))
+ }
+ };
let current_href = context
.href_from_span(clean::Span::new(file_span), false)
.expect("only local crates should have sources emitted");
- match source_context {
- SourceContext::Standalone => {
- extra = None;
- for line in 1..=lines {
- writeln!(line_numbers, "<a href=\"#{line}\" id=\"{line}\">{line}</a>")
- }
- }
- SourceContext::Embedded { offset, needs_expansion } => {
- extra = if needs_expansion {
- Some(r#"<button class="expand">&varr;</button>"#)
- } else {
- None
- };
- for line_number in 1..=lines {
- let line = line_number + offset;
- writeln!(line_numbers, "<span>{line}</span>")
- }
- }
- }
- line_numbers.write_str("</pre>");
- highlight::render_source_with_highlighting(
- s,
- buf,
- line_numbers,
- highlight::HrefContext { context, file_span, root_path, current_href },
- decoration_info,
- extra,
- );
+ let code = format::display_fn(move |fmt| {
+ highlight::write_code(
+ fmt,
+ s,
+ Some(highlight::HrefContext { context, file_span, root_path, current_href }),
+ Some(decoration_info),
+ );
+ Ok(())
+ });
+ Source { embedded, needs_expansion, lines, code_html: code }.render_into(&mut writer).unwrap();
}
diff --git a/src/librustdoc/html/static/COPYRIGHT.txt b/src/librustdoc/html/static/COPYRIGHT.txt
index 34e48134c..1447df792 100644
--- a/src/librustdoc/html/static/COPYRIGHT.txt
+++ b/src/librustdoc/html/static/COPYRIGHT.txt
@@ -1,3 +1,5 @@
+# REUSE-IgnoreStart
+
These documentation pages include resources by third parties. This copyright
file applies only to those resources. The following third party resources are
included, and carry their own copyright notices and license terms:
@@ -44,3 +46,5 @@ included, and carry their own copyright notices and license terms:
See SourceSerif4-LICENSE.md.
This copyright file is intended to be distributed with rustdoc output.
+
+# REUSE-IgnoreEnd
diff --git a/src/librustdoc/html/static/css/rustdoc.css b/src/librustdoc/html/static/css/rustdoc.css
index 95528e70e..6fbb45086 100644
--- a/src/librustdoc/html/static/css/rustdoc.css
+++ b/src/librustdoc/html/static/css/rustdoc.css
@@ -6,6 +6,10 @@
3. Copy the filenames with updated suffixes from the directory.
*/
+:root {
+ --nav-sub-mobile-padding: 8px;
+}
+
/* See FiraSans-LICENSE.txt for the Fira Sans license. */
@font-face {
font-family: 'Fira Sans';
@@ -87,21 +91,6 @@
box-sizing: border-box;
}
-/* This part handles the "default" theme being used depending on the system one. */
-html {
- content: "";
-}
-@media (prefers-color-scheme: light) {
- html {
- content: "light";
- }
-}
-@media (prefers-color-scheme: dark) {
- html {
- content: "dark";
- }
-}
-
/* General structure and fonts */
body {
@@ -217,7 +206,7 @@ ul.all-items {
a.anchor,
.small-section-header a,
#source-sidebar a,
-pre.rust a,
+.rust a,
.sidebar h2 a,
.sidebar h3 a,
.mobile-topbar h2 a,
@@ -228,43 +217,43 @@ h1 a,
color: var(--main-color);
}
-.content span.enum, .content a.enum,
-.content span.struct, .content a.struct,
-.content span.union, .content a.union,
-.content span.primitive, .content a.primitive,
-.content span.type, .content a.type,
-.content span.foreigntype, .content a.foreigntype {
+span.enum, a.enum,
+span.struct, a.struct,
+span.union, a.union,
+span.primitive, a.primitive,
+span.type, a.type,
+span.foreigntype, a.foreigntype {
color: var(--type-link-color);
}
-.content span.trait, .content a.trait,
-.content span.traitalias, .content a.traitalias {
+span.trait, a.trait,
+span.traitalias, a.traitalias {
color: var(--trait-link-color);
}
-.content span.associatedtype, .content a.associatedtype,
-.content span.constant, .content a.constant,
-.content span.static, .content a.static {
+span.associatedtype, a.associatedtype,
+span.constant, a.constant,
+span.static, a.static {
color: var(--assoc-item-link-color);
}
-.content span.fn, .content a.fn,
-.content span.method, .content a.method,
-.content span.tymethod, .content a.tymethod {
+span.fn, a.fn,
+span.method, a.method,
+span.tymethod, a.tymethod {
color: var(--function-link-color);
}
-.content span.attr, .content a.attr,
-.content span.derive, .content a.derive,
-.content span.macro, .content a.macro {
+span.attr, a.attr,
+span.derive, a.derive,
+span.macro, a.macro {
color: var(--macro-link-color);
}
-.content span.mod, .content a.mod {
+span.mod, a.mod {
color: var(--mod-link-color);
}
-.content span.keyword, .content a.keyword {
+span.keyword, a.keyword {
color: var(--keyword-link-color);
}
@@ -363,7 +352,7 @@ pre.item-decl {
.source .content pre {
padding: 20px;
}
-.rustdoc.source .example-wrap > pre.src-line-numbers {
+.rustdoc.source .example-wrap pre.src-line-numbers {
padding: 20px 0 20px 4px;
}
@@ -395,6 +384,7 @@ img {
font-size: 0.875rem;
flex: 0 0 200px;
overflow-y: scroll;
+ overscroll-behavior: contain;
position: sticky;
height: 100vh;
top: 0;
@@ -407,6 +397,7 @@ img {
overflow-x: hidden;
/* The sidebar is by default hidden */
overflow-y: hidden;
+ z-index: 1;
}
.sidebar, .mobile-topbar, .sidebar-menu-toggle,
@@ -547,14 +538,17 @@ ul.block, .block li {
margin-bottom: 0px;
}
-.rustdoc .example-wrap > pre {
+.rustdoc .example-wrap pre {
margin: 0;
flex-grow: 1;
+}
+
+.rustdoc:not(.source) .example-wrap pre {
overflow: auto hidden;
}
-.rustdoc .example-wrap > pre.example-line-numbers,
-.rustdoc .example-wrap > pre.src-line-numbers {
+.rustdoc .example-wrap pre.example-line-numbers,
+.rustdoc .example-wrap pre.src-line-numbers {
flex-grow: 0;
min-width: fit-content; /* prevent collapsing into nothing in truncated scraped examples */
overflow: initial;
@@ -565,7 +559,7 @@ ul.block, .block li {
color: var(--src-line-numbers-span-color);
}
-.rustdoc .example-wrap > pre.src-line-numbers {
+.rustdoc .example-wrap pre.src-line-numbers {
padding: 14px 0;
}
.src-line-numbers a, .src-line-numbers span {
@@ -713,7 +707,7 @@ h2.small-section-header > .anchor {
}
.main-heading a:hover,
-.example-wrap > pre.rust a:hover,
+.example-wrap .rust a:hover,
.all-items a:hover,
.docblock a:not(.test-arrow):not(.scrape-help):not(.tooltip):hover,
.docblock-short a:not(.test-arrow):not(.scrape-help):not(.tooltip):hover,
@@ -1538,7 +1532,7 @@ However, it's not needed with smaller screen width because the doc/code block is
/*
WARNING: RUSTDOC_MOBILE_BREAKPOINT MEDIA QUERY
If you update this line, then you also need to update the line with the same warning
-in storage.js
+in source-script.js
*/
@media (max-width: 700px) {
/* When linking to an item with an `id` (for instance, by clicking a link in the sidebar,
@@ -1737,7 +1731,7 @@ in storage.js
.source nav.sub {
margin: 0;
- padding: 8px;
+ padding: var(--nav-sub-mobile-padding);
}
}
@@ -1794,6 +1788,7 @@ in storage.js
.sub-logo-container > img {
height: 35px;
width: 35px;
+ margin-bottom: var(--nav-sub-mobile-padding);
}
}
diff --git a/src/librustdoc/html/static/css/settings.css b/src/librustdoc/html/static/css/settings.css
index 920f45c4b..d13c783d2 100644
--- a/src/librustdoc/html/static/css/settings.css
+++ b/src/librustdoc/html/static/css/settings.css
@@ -8,7 +8,7 @@
height: 1.2rem;
width: 1.2rem;
color: inherit;
- border: 1px solid currentColor;
+ border: 2px solid var(--settings-input-border-color);
outline: none;
-webkit-appearance: none;
cursor: pointer;
@@ -52,6 +52,7 @@
}
.setting-check input:checked {
background-color: var(--settings-input-color);
+ border-width: 1px;
}
.setting-radio input:focus, .setting-check input:focus {
box-shadow: 0 0 1px 1px var(--settings-input-color);
diff --git a/src/librustdoc/html/static/css/themes/ayu.css b/src/librustdoc/html/static/css/themes/ayu.css
index 90cf689ad..7145baad2 100644
--- a/src/librustdoc/html/static/css/themes/ayu.css
+++ b/src/librustdoc/html/static/css/themes/ayu.css
@@ -7,6 +7,7 @@ Original by Dempfi (https://github.com/dempfi/ayu)
--main-background-color: #0f1419;
--main-color: #c5c5c5;
--settings-input-color: #ffb454;
+ --settings-input-border-color: #999;
--settings-button-color: #fff;
--settings-button-border-focus: #e0e0e0;
--sidebar-background-color: #14191f;
diff --git a/src/librustdoc/html/static/css/themes/dark.css b/src/librustdoc/html/static/css/themes/dark.css
index e8cd06931..3c1186a56 100644
--- a/src/librustdoc/html/static/css/themes/dark.css
+++ b/src/librustdoc/html/static/css/themes/dark.css
@@ -2,6 +2,7 @@
--main-background-color: #353535;
--main-color: #ddd;
--settings-input-color: #2196f3;
+ --settings-input-border-color: #999;
--settings-button-color: #000;
--settings-button-border-focus: #ffb900;
--sidebar-background-color: #505050;
diff --git a/src/librustdoc/html/static/css/themes/light.css b/src/librustdoc/html/static/css/themes/light.css
index 5e3f14e48..f8c287137 100644
--- a/src/librustdoc/html/static/css/themes/light.css
+++ b/src/librustdoc/html/static/css/themes/light.css
@@ -2,6 +2,7 @@
--main-background-color: white;
--main-color: black;
--settings-input-color: #2196f3;
+ --settings-input-border-color: #717171;
--settings-button-color: #000;
--settings-button-border-focus: #717171;
--sidebar-background-color: #F5F5F5;
diff --git a/src/librustdoc/html/static/fonts/FiraSans-LICENSE.txt b/src/librustdoc/html/static/fonts/FiraSans-LICENSE.txt
index ff9afab06..d7e9c149b 100644
--- a/src/librustdoc/html/static/fonts/FiraSans-LICENSE.txt
+++ b/src/librustdoc/html/static/fonts/FiraSans-LICENSE.txt
@@ -1,3 +1,5 @@
+// REUSE-IgnoreStart
+
Digitized data copyright (c) 2012-2015, The Mozilla Foundation and Telefonica S.A.
with Reserved Font Name < Fira >,
@@ -92,3 +94,5 @@ INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
OTHER DEALINGS IN THE FONT SOFTWARE.
+
+// REUSE-IgnoreEnd
diff --git a/src/librustdoc/html/static/fonts/NanumBarunGothic-LICENSE.txt b/src/librustdoc/html/static/fonts/NanumBarunGothic-LICENSE.txt
index 0bf46682b..4b3edc29e 100644
--- a/src/librustdoc/html/static/fonts/NanumBarunGothic-LICENSE.txt
+++ b/src/librustdoc/html/static/fonts/NanumBarunGothic-LICENSE.txt
@@ -1,3 +1,5 @@
+// REUSE-IgnoreStart
+
Copyright (c) 2010, NAVER Corporation (https://www.navercorp.com/),
with Reserved Font Name Nanum, Naver Nanum, NanumGothic, Naver NanumGothic,
@@ -97,3 +99,5 @@ INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
OTHER DEALINGS IN THE FONT SOFTWARE.
+
+// REUSE-IgnoreEnd
diff --git a/src/librustdoc/html/static/fonts/SourceCodePro-LICENSE.txt b/src/librustdoc/html/static/fonts/SourceCodePro-LICENSE.txt
index 07542572e..0d2941e14 100644
--- a/src/librustdoc/html/static/fonts/SourceCodePro-LICENSE.txt
+++ b/src/librustdoc/html/static/fonts/SourceCodePro-LICENSE.txt
@@ -1,3 +1,5 @@
+// REUSE-IgnoreStart
+
Copyright 2010, 2012 Adobe Systems Incorporated (http://www.adobe.com/), with Reserved Font Name 'Source'. All Rights Reserved. Source is a trademark of Adobe Systems Incorporated in the United States and/or other countries.
This Font Software is licensed under the SIL Open Font License, Version 1.1.
@@ -91,3 +93,5 @@ INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
OTHER DEALINGS IN THE FONT SOFTWARE.
+
+// REUSE-IgnoreEnd
diff --git a/src/librustdoc/html/static/fonts/SourceSerif4-LICENSE.md b/src/librustdoc/html/static/fonts/SourceSerif4-LICENSE.md
index 5871e1f3d..175fa4f47 100644
--- a/src/librustdoc/html/static/fonts/SourceSerif4-LICENSE.md
+++ b/src/librustdoc/html/static/fonts/SourceSerif4-LICENSE.md
@@ -1,3 +1,6 @@
+<!-- REUSE-IgnoreStart -->
+
+Copyright 2014-2021 Adobe (http://www.adobe.com/), with Reserved Font Name 'Source'. All Rights Reserved. Source is a trademark of Adobe in the United States and/or other countries.
Copyright 2014 - 2023 Adobe (http://www.adobe.com/), with Reserved Font Name ‘Source’. All Rights Reserved. Source is a trademark of Adobe in the United States and/or other countries.
This Font Software is licensed under the SIL Open Font License, Version 1.1.
@@ -91,3 +94,5 @@ INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
OTHER DEALINGS IN THE FONT SOFTWARE.
+
+<!-- REUSE-IgnoreEnd -->
diff --git a/src/librustdoc/html/static/js/externs.js b/src/librustdoc/html/static/js/externs.js
index ecbe15a59..4c81a0979 100644
--- a/src/librustdoc/html/static/js/externs.js
+++ b/src/librustdoc/html/static/js/externs.js
@@ -66,6 +66,11 @@ let Row;
let ResultsTable;
/**
+ * @typedef {Map<String, ResultObject>}
+ */
+let Results;
+
+/**
* @typedef {{
* desc: string,
* displayPath: string,
@@ -80,7 +85,7 @@ let ResultsTable;
* ty: number,
* }}
*/
-let Results;
+let ResultObject;
/**
* A pair of [inputs, outputs], or 0 for null. This is stored in the search index.
diff --git a/src/librustdoc/html/static/js/main.js b/src/librustdoc/html/static/js/main.js
index 5e8c0e8d1..6f5987e68 100644
--- a/src/librustdoc/html/static/js/main.js
+++ b/src/librustdoc/html/static/js/main.js
@@ -1,20 +1,9 @@
// Local js definitions:
/* global addClass, getSettingValue, hasClass, searchState */
-/* global onEach, onEachLazy, removeClass */
+/* global onEach, onEachLazy, removeClass, getVar */
"use strict";
-// Get a value from the rustdoc-vars div, which is used to convey data from
-// Rust to the JS. If there is no such element, return null.
-function getVar(name) {
- const el = document.getElementById("rustdoc-vars");
- if (el) {
- return el.attributes["data-" + name].value;
- } else {
- return null;
- }
-}
-
// Given a basename (e.g. "storage") and an extension (e.g. ".js"), return a URL
// for a resource under the root-path, with the resource-suffix.
function resourcePath(basename, extension) {
@@ -187,6 +176,15 @@ function loadCss(cssUrl) {
document.getElementsByTagName("head")[0].appendChild(link);
}
+function preLoadCss(cssUrl) {
+ // https://developer.mozilla.org/en-US/docs/Web/HTML/Link_types/preload
+ const link = document.createElement("link");
+ link.href = cssUrl;
+ link.rel = "preload";
+ link.as = "style";
+ document.getElementsByTagName("head")[0].appendChild(link);
+}
+
(function() {
const isHelpPage = window.location.pathname.endsWith("/help.html");
@@ -207,6 +205,23 @@ function loadCss(cssUrl) {
// hopefully be loaded when the JS will generate the settings content.
loadCss(getVar("static-root-path") + getVar("settings-css"));
loadScript(getVar("static-root-path") + getVar("settings-js"));
+ preLoadCss(getVar("static-root-path") + getVar("theme-light-css"));
+ preLoadCss(getVar("static-root-path") + getVar("theme-dark-css"));
+ preLoadCss(getVar("static-root-path") + getVar("theme-ayu-css"));
+ // Pre-load all theme CSS files, so that switching feels seamless.
+ //
+ // When loading settings.html as a standalone page, the equivalent HTML is
+ // generated in context.rs.
+ setTimeout(() => {
+ const themes = getVar("themes").split(",");
+ for (const theme of themes) {
+ // if there are no themes, do nothing
+ // "".split(",") == [""]
+ if (theme !== "") {
+ preLoadCss(getVar("root-path") + theme + ".css");
+ }
+ }
+ }, 0);
};
window.searchState = {
@@ -311,16 +326,6 @@ function loadCss(cssUrl) {
},
};
- function getPageId() {
- if (window.location.hash) {
- const tmp = window.location.hash.replace(/^#/, "");
- if (tmp.length > 0) {
- return tmp;
- }
- }
- return null;
- }
-
const toggleAllDocsId = "toggle-all-docs";
let savedHash = "";
@@ -341,12 +346,12 @@ function loadCss(cssUrl) {
}
}
// This part is used in case an element is not visible.
- if (savedHash !== window.location.hash) {
- savedHash = window.location.hash;
- if (savedHash.length === 0) {
- return;
+ const pageId = window.location.hash.replace(/^#/, "");
+ if (savedHash !== pageId) {
+ savedHash = pageId;
+ if (pageId !== "") {
+ expandSection(pageId);
}
- expandSection(savedHash.slice(1)); // we remove the '#'
}
}
@@ -685,11 +690,6 @@ function loadCss(cssUrl) {
}
});
-
- const pageId = getPageId();
- if (pageId !== null) {
- expandSection(pageId);
- }
}());
window.rustdoc_add_line_numbers_to_examples = () => {
@@ -725,65 +725,18 @@ function loadCss(cssUrl) {
window.rustdoc_add_line_numbers_to_examples();
}
- let oldSidebarScrollPosition = null;
-
- // Scroll locking used both here and in source-script.js
-
- window.rustdocMobileScrollLock = function() {
- const mobile_topbar = document.querySelector(".mobile-topbar");
- if (window.innerWidth <= window.RUSTDOC_MOBILE_BREAKPOINT) {
- // This is to keep the scroll position on mobile.
- oldSidebarScrollPosition = window.scrollY;
- document.body.style.width = `${document.body.offsetWidth}px`;
- document.body.style.position = "fixed";
- document.body.style.top = `-${oldSidebarScrollPosition}px`;
- if (mobile_topbar) {
- mobile_topbar.style.top = `${oldSidebarScrollPosition}px`;
- mobile_topbar.style.position = "relative";
- }
- } else {
- oldSidebarScrollPosition = null;
- }
- };
-
- window.rustdocMobileScrollUnlock = function() {
- const mobile_topbar = document.querySelector(".mobile-topbar");
- if (oldSidebarScrollPosition !== null) {
- // This is to keep the scroll position on mobile.
- document.body.style.width = "";
- document.body.style.position = "";
- document.body.style.top = "";
- if (mobile_topbar) {
- mobile_topbar.style.top = "";
- mobile_topbar.style.position = "";
- }
- // The scroll position is lost when resetting the style, hence why we store it in
- // `oldSidebarScrollPosition`.
- window.scrollTo(0, oldSidebarScrollPosition);
- oldSidebarScrollPosition = null;
- }
- };
-
function showSidebar() {
window.hideAllModals(false);
- window.rustdocMobileScrollLock();
const sidebar = document.getElementsByClassName("sidebar")[0];
addClass(sidebar, "shown");
}
function hideSidebar() {
- window.rustdocMobileScrollUnlock();
const sidebar = document.getElementsByClassName("sidebar")[0];
removeClass(sidebar, "shown");
}
window.addEventListener("resize", () => {
- if (window.innerWidth > window.RUSTDOC_MOBILE_BREAKPOINT &&
- oldSidebarScrollPosition !== null) {
- // If the user opens the sidebar in "mobile" mode, and then grows the browser window,
- // we need to switch away from mobile mode and make the main content area scrollable.
- hideSidebar();
- }
if (window.CURRENT_TOOLTIP_ELEMENT) {
// As a workaround to the behavior of `contains: layout` used in doc togglers,
// tooltip popovers are positioned using javascript.
@@ -996,9 +949,7 @@ function loadCss(cssUrl) {
<code>enum</code>, <code>trait</code>, <code>type</code>, <code>macro</code>, \
and <code>const</code>.",
"Search functions by type signature (e.g., <code>vec -&gt; usize</code> or \
- <code>-&gt; vec</code>)",
- "Search multiple things at once by splitting your query with comma (e.g., \
- <code>str,u8</code> or <code>String,struct:Vec,test</code>)",
+ <code>-&gt; vec</code> or <code>String, enum:Cow -&gt; bool</code>)",
"You can look for items with an exact name by putting double quotes around \
your request: <code>\"string\"</code>",
"Look for items inside another one by searching for a path: <code>vec::Vec</code>",
diff --git a/src/librustdoc/html/static/js/search.js b/src/librustdoc/html/static/js/search.js
index b98bced41..929dae81c 100644
--- a/src/librustdoc/html/static/js/search.js
+++ b/src/librustdoc/html/static/js/search.js
@@ -76,39 +76,111 @@ function printTab(nb) {
}
/**
- * A function to compute the Levenshtein distance between two strings
- * Licensed under the Creative Commons Attribution-ShareAlike 3.0 Unported
- * Full License can be found at http://creativecommons.org/licenses/by-sa/3.0/legalcode
- * This code is an unmodified version of the code written by Marco de Wit
- * and was found at https://stackoverflow.com/a/18514751/745719
+ * The [edit distance] is a metric for measuring the difference between two strings.
+ *
+ * [edit distance]: https://en.wikipedia.org/wiki/Edit_distance
*/
-const levenshtein_row2 = [];
-function levenshtein(s1, s2) {
- if (s1 === s2) {
- return 0;
- }
- const s1_len = s1.length, s2_len = s2.length;
- if (s1_len && s2_len) {
- let i1 = 0, i2 = 0, a, b, c, c2;
- const row = levenshtein_row2;
- while (i1 < s1_len) {
- row[i1] = ++i1;
- }
- while (i2 < s2_len) {
- c2 = s2.charCodeAt(i2);
- a = i2;
- ++i2;
- b = i2;
- for (i1 = 0; i1 < s1_len; ++i1) {
- c = a + (s1.charCodeAt(i1) !== c2 ? 1 : 0);
- a = row[i1];
- b = b < a ? (b < c ? b + 1 : c) : (a < c ? a + 1 : c);
- row[i1] = b;
- }
- }
- return b;
- }
- return s1_len + s2_len;
+
+/*
+ * This function was translated, mostly line-for-line, from
+ * https://github.com/rust-lang/rust/blob/ff4b772f805ec1e/compiler/rustc_span/src/edit_distance.rs
+ *
+ * The current implementation is the restricted Damerau-Levenshtein algorithm. It is restricted
+ * because it does not permit modifying characters that have already been transposed. The specific
+ * algorithm should not matter to the caller of the methods, which is why it is not noted in the
+ * documentation.
+ */
+const editDistanceState = {
+ current: [],
+ prev: [],
+ prevPrev: [],
+ calculate: function calculate(a, b, limit) {
+ // Ensure that `b` is the shorter string, minimizing memory use.
+ if (a.length < b.length) {
+ const aTmp = a;
+ a = b;
+ b = aTmp;
+ }
+
+ const minDist = a.length - b.length;
+ // If we know the limit will be exceeded, we can return early.
+ if (minDist > limit) {
+ return limit + 1;
+ }
+
+ // Strip common prefix.
+ // We know that `b` is the shorter string, so we don't need to check
+ // `a.length`.
+ while (b.length > 0 && b[0] === a[0]) {
+ a = a.substring(1);
+ b = b.substring(1);
+ }
+ // Strip common suffix.
+ while (b.length > 0 && b[b.length - 1] === a[a.length - 1]) {
+ a = a.substring(0, a.length - 1);
+ b = b.substring(0, b.length - 1);
+ }
+
+ // If either string is empty, the distance is the length of the other.
+ // We know that `b` is the shorter string, so we don't need to check `a`.
+ if (b.length === 0) {
+ return minDist;
+ }
+
+ const aLength = a.length;
+ const bLength = b.length;
+
+ for (let i = 0; i <= bLength; ++i) {
+ this.current[i] = 0;
+ this.prev[i] = i;
+ this.prevPrev[i] = Number.MAX_VALUE;
+ }
+
+ // row by row
+ for (let i = 1; i <= aLength; ++i) {
+ this.current[0] = i;
+ const aIdx = i - 1;
+
+ // column by column
+ for (let j = 1; j <= bLength; ++j) {
+ const bIdx = j - 1;
+
+ // There is no cost to substitute a character with itself.
+ const substitutionCost = a[aIdx] === b[bIdx] ? 0 : 1;
+
+ this.current[j] = Math.min(
+ // deletion
+ this.prev[j] + 1,
+ // insertion
+ this.current[j - 1] + 1,
+ // substitution
+ this.prev[j - 1] + substitutionCost
+ );
+
+ if ((i > 1) && (j > 1) && (a[aIdx] === b[bIdx - 1]) && (a[aIdx - 1] === b[bIdx])) {
+ // transposition
+ this.current[j] = Math.min(
+ this.current[j],
+ this.prevPrev[j - 2] + 1
+ );
+ }
+ }
+
+ // Rotate the buffers, reusing the memory
+ const prevPrevTmp = this.prevPrev;
+ this.prevPrev = this.prev;
+ this.prev = this.current;
+ this.current = prevPrevTmp;
+ }
+
+ // `prev` because we already rotated the buffers.
+ const distance = this.prev[bLength];
+ return distance <= limit ? distance : (limit + 1);
+ },
+};
+
+function editDistance(a, b, limit) {
+ return editDistanceState.calculate(a, b, limit);
}
function initSearch(rawSearchIndex) {
@@ -119,7 +191,7 @@ function initSearch(rawSearchIndex) {
*/
let searchIndex;
let currentResults;
- const ALIASES = Object.create(null);
+ const ALIASES = new Map();
function isWhitespace(c) {
return " \t\n\r".indexOf(c) !== -1;
@@ -282,12 +354,15 @@ function initSearch(rawSearchIndex) {
if (isInGenerics) {
parserState.genericsElems += 1;
}
+ const typeFilter = parserState.typeFilter;
+ parserState.typeFilter = null;
return {
name: name,
fullPath: pathSegments,
pathWithoutLast: pathSegments.slice(0, pathSegments.length - 1),
pathLast: pathSegments[pathSegments.length - 1],
generics: generics,
+ typeFilter,
};
}
@@ -386,9 +461,7 @@ function initSearch(rawSearchIndex) {
if (parserState.pos < parserState.length &&
parserState.userQuery[parserState.pos] === "<"
) {
- if (isInGenerics) {
- throw ["Unexpected ", "<", " after ", "<"];
- } else if (start >= end) {
+ if (start >= end) {
throw ["Found generics without a path"];
}
parserState.pos += 1;
@@ -423,6 +496,11 @@ function initSearch(rawSearchIndex) {
*/
function getItemsBefore(query, parserState, elems, endChar) {
let foundStopChar = true;
+ let start = parserState.pos;
+
+ // If this is a generic, keep the outer item's type filter around.
+ const oldTypeFilter = parserState.typeFilter;
+ parserState.typeFilter = null;
while (parserState.pos < parserState.length) {
const c = parserState.userQuery[parserState.pos];
@@ -434,7 +512,25 @@ function initSearch(rawSearchIndex) {
continue;
} else if (c === ":" && isPathStart(parserState)) {
throw ["Unexpected ", "::", ": paths cannot start with ", "::"];
- } else if (c === ":" || isEndCharacter(c)) {
+ } else if (c === ":") {
+ if (parserState.typeFilter !== null) {
+ throw ["Unexpected ", ":"];
+ }
+ if (elems.length === 0) {
+ throw ["Expected type filter before ", ":"];
+ } else if (query.literalSearch) {
+ throw ["You cannot use quotes on type filter"];
+ }
+ // The type filter doesn't count as an element since it's a modifier.
+ const typeFilterElem = elems.pop();
+ checkExtraTypeFilterCharacters(start, parserState);
+ parserState.typeFilter = typeFilterElem.name;
+ parserState.pos += 1;
+ parserState.totalElems -= 1;
+ query.literalSearch = false;
+ foundStopChar = true;
+ continue;
+ } else if (isEndCharacter(c)) {
let extra = "";
if (endChar === ">") {
extra = "<";
@@ -468,15 +564,10 @@ function initSearch(rawSearchIndex) {
];
}
const posBefore = parserState.pos;
+ start = parserState.pos;
getNextElem(query, parserState, elems, endChar === ">");
- if (endChar !== "") {
- if (parserState.pos >= parserState.length) {
- throw ["Unclosed ", "<"];
- }
- const c2 = parserState.userQuery[parserState.pos];
- if (!isSeparatorCharacter(c2) && c2 !== endChar) {
- throw ["Expected ", endChar, ", found ", c2];
- }
+ if (endChar !== "" && parserState.pos >= parserState.length) {
+ throw ["Unclosed ", "<"];
}
// This case can be encountered if `getNextElem` encountered a "stop character" right
// from the start. For example if you have `,,` or `<>`. In this case, we simply move up
@@ -492,6 +583,8 @@ function initSearch(rawSearchIndex) {
// We are either at the end of the string or on the `endChar` character, let's move forward
// in any case.
parserState.pos += 1;
+
+ parserState.typeFilter = oldTypeFilter;
}
/**
@@ -500,10 +593,10 @@ function initSearch(rawSearchIndex) {
*
* @param {ParserState} parserState
*/
- function checkExtraTypeFilterCharacters(parserState) {
+ function checkExtraTypeFilterCharacters(start, parserState) {
const query = parserState.userQuery;
- for (let pos = 0; pos < parserState.pos; ++pos) {
+ for (let pos = start; pos < parserState.pos; ++pos) {
if (!isIdentCharacter(query[pos]) && !isWhitespaceCharacter(query[pos])) {
throw ["Unexpected ", query[pos], " in type filter"];
}
@@ -519,6 +612,7 @@ function initSearch(rawSearchIndex) {
*/
function parseInput(query, parserState) {
let foundStopChar = true;
+ let start = parserState.pos;
while (parserState.pos < parserState.length) {
const c = parserState.userQuery[parserState.pos];
@@ -540,16 +634,15 @@ function initSearch(rawSearchIndex) {
}
if (query.elems.length === 0) {
throw ["Expected type filter before ", ":"];
- } else if (query.elems.length !== 1 || parserState.totalElems !== 1) {
- throw ["Unexpected ", ":"];
} else if (query.literalSearch) {
throw ["You cannot use quotes on type filter"];
}
- checkExtraTypeFilterCharacters(parserState);
// The type filter doesn't count as an element since it's a modifier.
- parserState.typeFilter = query.elems.pop().name;
+ const typeFilterElem = query.elems.pop();
+ checkExtraTypeFilterCharacters(start, parserState);
+ parserState.typeFilter = typeFilterElem.name;
parserState.pos += 1;
- parserState.totalElems = 0;
+ parserState.totalElems -= 1;
query.literalSearch = false;
foundStopChar = true;
continue;
@@ -581,6 +674,7 @@ function initSearch(rawSearchIndex) {
];
}
const before = query.elems.length;
+ start = parserState.pos;
getNextElem(query, parserState, query.elems, false);
if (query.elems.length === before) {
// Nothing was added, weird... Let's increase the position to not remain stuck.
@@ -588,6 +682,9 @@ function initSearch(rawSearchIndex) {
}
foundStopChar = false;
}
+ if (parserState.typeFilter !== null) {
+ throw ["Unexpected ", ":", " (expected path after type filter)"];
+ }
while (parserState.pos < parserState.length) {
if (isReturnArrow(parserState)) {
parserState.pos += 2;
@@ -615,7 +712,6 @@ function initSearch(rawSearchIndex) {
return {
original: userQuery,
userQuery: userQuery.toLowerCase(),
- typeFilter: NO_TYPE_FILTER,
elems: [],
returned: [],
// Total number of "top" elements (does not include generics).
@@ -666,18 +762,15 @@ function initSearch(rawSearchIndex) {
*
* ident = *(ALPHA / DIGIT / "_")
* path = ident *(DOUBLE-COLON ident) [!]
- * arg = path [generics]
- * arg-without-generic = path
+ * arg = [type-filter *WS COLON *WS] path [generics]
* type-sep = COMMA/WS *(COMMA/WS)
* nonempty-arg-list = *(type-sep) arg *(type-sep arg) *(type-sep)
- * nonempty-arg-list-without-generics = *(type-sep) arg-without-generic
- * *(type-sep arg-without-generic) *(type-sep)
- * generics = OPEN-ANGLE-BRACKET [ nonempty-arg-list-without-generics ] *(type-sep)
- * CLOSE-ANGLE-BRACKET/EOF
+ * generics = OPEN-ANGLE-BRACKET [ nonempty-arg-list ] *(type-sep)
+ * CLOSE-ANGLE-BRACKET
* return-args = RETURN-ARROW *(type-sep) nonempty-arg-list
*
* exact-search = [type-filter *WS COLON] [ RETURN-ARROW ] *WS QUOTE ident QUOTE [ generics ]
- * type-search = [type-filter *WS COLON] [ nonempty-arg-list ] [ return-args ]
+ * type-search = [ nonempty-arg-list ] [ return-args ]
*
* query = *WS (exact-search / type-search) *WS
*
@@ -726,6 +819,20 @@ function initSearch(rawSearchIndex) {
* @return {ParsedQuery} - The parsed query
*/
function parseQuery(userQuery) {
+ function convertTypeFilterOnElem(elem) {
+ if (elem.typeFilter !== null) {
+ let typeFilter = elem.typeFilter;
+ if (typeFilter === "const") {
+ typeFilter = "constant";
+ }
+ elem.typeFilter = itemTypeFromName(typeFilter);
+ } else {
+ elem.typeFilter = NO_TYPE_FILTER;
+ }
+ for (const elem2 of elem.generics) {
+ convertTypeFilterOnElem(elem2);
+ }
+ }
userQuery = userQuery.trim();
const parserState = {
length: userQuery.length,
@@ -740,17 +847,15 @@ function initSearch(rawSearchIndex) {
try {
parseInput(query, parserState);
- if (parserState.typeFilter !== null) {
- let typeFilter = parserState.typeFilter;
- if (typeFilter === "const") {
- typeFilter = "constant";
- }
- query.typeFilter = itemTypeFromName(typeFilter);
+ for (const elem of query.elems) {
+ convertTypeFilterOnElem(elem);
+ }
+ for (const elem of query.returned) {
+ convertTypeFilterOnElem(elem);
}
} catch (err) {
query = newParsedQuery(userQuery);
query.error = err;
- query.typeFilter = -1;
return query;
}
@@ -793,26 +898,34 @@ function initSearch(rawSearchIndex) {
* @return {ResultsTable}
*/
function execQuery(parsedQuery, searchWords, filterCrates, currentCrate) {
- const results_others = {}, results_in_args = {}, results_returned = {};
+ const results_others = new Map(), results_in_args = new Map(),
+ results_returned = new Map();
+ /**
+ * Add extra data to result objects, and filter items that have been
+ * marked for removal.
+ *
+ * @param {[ResultObject]} results
+ * @returns {[ResultObject]}
+ */
function transformResults(results) {
- const duplicates = {};
+ const duplicates = new Set();
const out = [];
for (const result of results) {
if (result.id > -1) {
const obj = searchIndex[result.id];
- obj.lev = result.lev;
+ obj.dist = result.dist;
const res = buildHrefAndPath(obj);
obj.displayPath = pathSplitter(res[0]);
obj.fullPath = obj.displayPath + obj.name;
// To be sure than it some items aren't considered as duplicate.
obj.fullPath += "|" + obj.ty;
- if (duplicates[obj.fullPath]) {
+ if (duplicates.has(obj.fullPath)) {
continue;
}
- duplicates[obj.fullPath] = true;
+ duplicates.add(obj.fullPath);
obj.href = res[1];
out.push(obj);
@@ -824,24 +937,30 @@ function initSearch(rawSearchIndex) {
return out;
}
+ /**
+ * This function takes a result map, and sorts it by various criteria, including edit
+ * distance, substring match, and the crate it comes from.
+ *
+ * @param {Results} results
+ * @param {boolean} isType
+ * @param {string} preferredCrate
+ * @returns {[ResultObject]}
+ */
function sortResults(results, isType, preferredCrate) {
- const userQuery = parsedQuery.userQuery;
- const ar = [];
- for (const entry in results) {
- if (hasOwnPropertyRustdoc(results, entry)) {
- const result = results[entry];
- result.word = searchWords[result.id];
- result.item = searchIndex[result.id] || {};
- ar.push(result);
- }
- }
- results = ar;
// if there are no results then return to default and fail
- if (results.length === 0) {
+ if (results.size === 0) {
return [];
}
- results.sort((aaa, bbb) => {
+ const userQuery = parsedQuery.userQuery;
+ const result_list = [];
+ for (const result of results.values()) {
+ result.word = searchWords[result.id];
+ result.item = searchIndex[result.id] || {};
+ result_list.push(result);
+ }
+
+ result_list.sort((aaa, bbb) => {
let a, b;
// sort by exact match with regard to the last word (mismatch goes later)
@@ -860,8 +979,8 @@ function initSearch(rawSearchIndex) {
// Sort by distance in the path part, if specified
// (less changes required to match means higher rankings)
- a = aaa.path_lev;
- b = bbb.path_lev;
+ a = aaa.path_dist;
+ b = bbb.path_dist;
if (a !== b) {
return a - b;
}
@@ -875,8 +994,15 @@ function initSearch(rawSearchIndex) {
// Sort by distance in the name part, the last part of the path
// (less changes required to match means higher rankings)
- a = (aaa.lev);
- b = (bbb.lev);
+ a = (aaa.dist);
+ b = (bbb.dist);
+ if (a !== b) {
+ return a - b;
+ }
+
+ // sort deprecated items later
+ a = aaa.item.deprecated;
+ b = bbb.item.deprecated;
if (a !== b) {
return a - b;
}
@@ -943,7 +1069,7 @@ function initSearch(rawSearchIndex) {
nameSplit = hasPath ? null : parsedQuery.elems[0].path;
}
- for (const result of results) {
+ for (const result of result_list) {
// this validation does not make sense when searching by types
if (result.dontValidate) {
continue;
@@ -956,72 +1082,87 @@ function initSearch(rawSearchIndex) {
result.id = -1;
}
}
- return transformResults(results);
+ return transformResults(result_list);
}
/**
* This function checks if the object (`row`) generics match the given type (`elem`)
- * generics. If there are no generics on `row`, `defaultLev` is returned.
+ * generics. If there are no generics on `row`, `defaultDistance` is returned.
*
- * @param {Row} row - The object to check.
- * @param {QueryElement} elem - The element from the parsed query.
- * @param {integer} defaultLev - This is the value to return in case there are no generics.
+ * @param {Row} row - The object to check.
+ * @param {QueryElement} elem - The element from the parsed query.
+ * @param {integer} defaultDistance - This is the value to return in case there are no
+ * generics.
*
- * @return {integer} - Returns the best match (if any) or `maxLevDistance + 1`.
+ * @return {integer} - Returns the best match (if any) or `maxEditDistance + 1`.
*/
- function checkGenerics(row, elem, defaultLev, maxLevDistance) {
+ function checkGenerics(row, elem, defaultDistance, maxEditDistance) {
if (row.generics.length === 0) {
- return elem.generics.length === 0 ? defaultLev : maxLevDistance + 1;
+ return elem.generics.length === 0 ? defaultDistance : maxEditDistance + 1;
} else if (row.generics.length > 0 && row.generics[0].name === null) {
- return checkGenerics(row.generics[0], elem, defaultLev, maxLevDistance);
+ return checkGenerics(row.generics[0], elem, defaultDistance, maxEditDistance);
}
// The names match, but we need to be sure that all generics kinda
// match as well.
- let elem_name;
if (elem.generics.length > 0 && row.generics.length >= elem.generics.length) {
- const elems = Object.create(null);
+ const elems = new Map();
for (const entry of row.generics) {
- elem_name = entry.name;
- if (elem_name === "") {
+ if (entry.name === "") {
// Pure generic, needs to check into it.
- if (checkGenerics(entry, elem, maxLevDistance + 1, maxLevDistance) !== 0) {
- return maxLevDistance + 1;
+ if (checkGenerics(entry, elem, maxEditDistance + 1, maxEditDistance)
+ !== 0) {
+ return maxEditDistance + 1;
}
continue;
}
- if (elems[elem_name] === undefined) {
- elems[elem_name] = 0;
+ let currentEntryElems;
+ if (elems.has(entry.name)) {
+ currentEntryElems = elems.get(entry.name);
+ } else {
+ currentEntryElems = [];
+ elems.set(entry.name, currentEntryElems);
}
- elems[elem_name] += 1;
+ currentEntryElems.push(entry);
}
// We need to find the type that matches the most to remove it in order
// to move forward.
- for (const generic of elem.generics) {
- let match = null;
- if (elems[generic.name]) {
- match = generic.name;
- } else {
- for (elem_name in elems) {
- if (!hasOwnPropertyRustdoc(elems, elem_name)) {
- continue;
- }
- if (elem_name === generic) {
- match = elem_name;
- break;
- }
+ const handleGeneric = generic => {
+ if (!elems.has(generic.name)) {
+ return false;
+ }
+ const matchElems = elems.get(generic.name);
+ const matchIdx = matchElems.findIndex(tmp_elem => {
+ if (checkGenerics(tmp_elem, generic, 0, maxEditDistance) !== 0) {
+ return false;
}
+ return typePassesFilter(generic.typeFilter, tmp_elem.ty);
+ });
+ if (matchIdx === -1) {
+ return false;
+ }
+ matchElems.splice(matchIdx, 1);
+ if (matchElems.length === 0) {
+ elems.delete(generic.name);
}
- if (match === null) {
- return maxLevDistance + 1;
+ return true;
+ };
+ // To do the right thing with type filters, we first process generics
+ // that have them, removing matching ones from the "bag," then do the
+ // ones with no type filter, which can match any entry regardless of its
+ // own type.
+ for (const generic of elem.generics) {
+ if (generic.typeFilter !== -1 && !handleGeneric(generic)) {
+ return maxEditDistance + 1;
}
- elems[match] -= 1;
- if (elems[match] === 0) {
- delete elems[match];
+ }
+ for (const generic of elem.generics) {
+ if (generic.typeFilter === -1 && !handleGeneric(generic)) {
+ return maxEditDistance + 1;
}
}
return 0;
}
- return maxLevDistance + 1;
+ return maxEditDistance + 1;
}
/**
@@ -1031,17 +1172,17 @@ function initSearch(rawSearchIndex) {
* @param {Row} row
* @param {QueryElement} elem - The element from the parsed query.
*
- * @return {integer} - Returns a Levenshtein distance to the best match.
+ * @return {integer} - Returns an edit distance to the best match.
*/
- function checkIfInGenerics(row, elem, maxLevDistance) {
- let lev = maxLevDistance + 1;
+ function checkIfInGenerics(row, elem, maxEditDistance) {
+ let dist = maxEditDistance + 1;
for (const entry of row.generics) {
- lev = Math.min(checkType(entry, elem, true, maxLevDistance), lev);
- if (lev === 0) {
+ dist = Math.min(checkType(entry, elem, true, maxEditDistance), dist);
+ if (dist === 0) {
break;
}
}
- return lev;
+ return dist;
}
/**
@@ -1052,67 +1193,73 @@ function initSearch(rawSearchIndex) {
* @param {QueryElement} elem - The element from the parsed query.
* @param {boolean} literalSearch
*
- * @return {integer} - Returns a Levenshtein distance to the best match. If there is
- * no match, returns `maxLevDistance + 1`.
+ * @return {integer} - Returns an edit distance to the best match. If there is
+ * no match, returns `maxEditDistance + 1`.
*/
- function checkType(row, elem, literalSearch, maxLevDistance) {
+ function checkType(row, elem, literalSearch, maxEditDistance) {
if (row.name === null) {
// This is a pure "generic" search, no need to run other checks.
if (row.generics.length > 0) {
- return checkIfInGenerics(row, elem, maxLevDistance);
+ return checkIfInGenerics(row, elem, maxEditDistance);
}
- return maxLevDistance + 1;
+ return maxEditDistance + 1;
}
- let lev = levenshtein(row.name, elem.name);
+ let dist;
+ if (typePassesFilter(elem.typeFilter, row.ty)) {
+ dist = editDistance(row.name, elem.name, maxEditDistance);
+ } else {
+ dist = maxEditDistance + 1;
+ }
if (literalSearch) {
- if (lev !== 0) {
+ if (dist !== 0) {
// The name didn't match, let's try to check if the generics do.
if (elem.generics.length === 0) {
const checkGeneric = row.generics.length > 0;
if (checkGeneric && row.generics
- .findIndex(tmp_elem => tmp_elem.name === elem.name) !== -1) {
+ .findIndex(tmp_elem => tmp_elem.name === elem.name &&
+ typePassesFilter(elem.typeFilter, tmp_elem.ty)) !== -1) {
return 0;
}
}
- return maxLevDistance + 1;
+ return maxEditDistance + 1;
} else if (elem.generics.length > 0) {
- return checkGenerics(row, elem, maxLevDistance + 1, maxLevDistance);
+ return checkGenerics(row, elem, maxEditDistance + 1, maxEditDistance);
}
return 0;
} else if (row.generics.length > 0) {
if (elem.generics.length === 0) {
- if (lev === 0) {
+ if (dist === 0) {
return 0;
}
// The name didn't match so we now check if the type we're looking for is inside
// the generics!
- lev = Math.min(lev, checkIfInGenerics(row, elem, maxLevDistance));
- return lev;
- } else if (lev > maxLevDistance) {
+ dist = Math.min(dist, checkIfInGenerics(row, elem, maxEditDistance));
+ return dist;
+ } else if (dist > maxEditDistance) {
// So our item's name doesn't match at all and has generics.
//
// Maybe it's present in a sub generic? For example "f<A<B<C>>>()", if we're
// looking for "B<C>", we'll need to go down.
- return checkIfInGenerics(row, elem, maxLevDistance);
+ return checkIfInGenerics(row, elem, maxEditDistance);
} else {
// At this point, the name kinda match and we have generics to check, so
// let's go!
- const tmp_lev = checkGenerics(row, elem, lev, maxLevDistance);
- if (tmp_lev > maxLevDistance) {
- return maxLevDistance + 1;
+ const tmp_dist = checkGenerics(row, elem, dist, maxEditDistance);
+ if (tmp_dist > maxEditDistance) {
+ return maxEditDistance + 1;
}
// We compute the median value of both checks and return it.
- return (tmp_lev + lev) / 2;
+ return (tmp_dist + dist) / 2;
}
} else if (elem.generics.length > 0) {
// In this case, we were expecting generics but there isn't so we simply reject this
// one.
- return maxLevDistance + 1;
+ return maxEditDistance + 1;
}
// No generics on our query or on the target type so we can return without doing
// anything else.
- return lev;
+ return dist;
}
/**
@@ -1120,29 +1267,42 @@ function initSearch(rawSearchIndex) {
*
* @param {Row} row
* @param {QueryElement} elem - The element from the parsed query.
- * @param {integer} typeFilter
+ * @param {integer} maxEditDistance
+ * @param {Array<integer>} skipPositions - Do not return one of these positions.
*
- * @return {integer} - Returns a Levenshtein distance to the best match. If there is no
- * match, returns `maxLevDistance + 1`.
+ * @return {dist: integer, position: integer} - Returns an edit distance to the best match.
+ * If there is no match, returns
+ * `maxEditDistance + 1` and position: -1.
*/
- function findArg(row, elem, typeFilter, maxLevDistance) {
- let lev = maxLevDistance + 1;
+ function findArg(row, elem, maxEditDistance, skipPositions) {
+ let dist = maxEditDistance + 1;
+ let position = -1;
if (row && row.type && row.type.inputs && row.type.inputs.length > 0) {
+ let i = 0;
for (const input of row.type.inputs) {
- if (!typePassesFilter(typeFilter, input.ty)) {
+ if (skipPositions.indexOf(i) !== -1) {
+ i += 1;
continue;
}
- lev = Math.min(
- lev,
- checkType(input, elem, parsedQuery.literalSearch, maxLevDistance)
+ const typeDist = checkType(
+ input,
+ elem,
+ parsedQuery.literalSearch,
+ maxEditDistance
);
- if (lev === 0) {
- return 0;
+ if (typeDist === 0) {
+ return {dist: 0, position: i};
}
+ if (typeDist < dist) {
+ dist = typeDist;
+ position = i;
+ }
+ i += 1;
}
}
- return parsedQuery.literalSearch ? maxLevDistance + 1 : lev;
+ dist = parsedQuery.literalSearch ? maxEditDistance + 1 : dist;
+ return {dist, position};
}
/**
@@ -1150,37 +1310,50 @@ function initSearch(rawSearchIndex) {
*
* @param {Row} row
* @param {QueryElement} elem - The element from the parsed query.
- * @param {integer} typeFilter
+ * @param {integer} maxEditDistance
+ * @param {Array<integer>} skipPositions - Do not return one of these positions.
*
- * @return {integer} - Returns a Levenshtein distance to the best match. If there is no
- * match, returns `maxLevDistance + 1`.
+ * @return {dist: integer, position: integer} - Returns an edit distance to the best match.
+ * If there is no match, returns
+ * `maxEditDistance + 1` and position: -1.
*/
- function checkReturned(row, elem, typeFilter, maxLevDistance) {
- let lev = maxLevDistance + 1;
+ function checkReturned(row, elem, maxEditDistance, skipPositions) {
+ let dist = maxEditDistance + 1;
+ let position = -1;
if (row && row.type && row.type.output.length > 0) {
const ret = row.type.output;
+ let i = 0;
for (const ret_ty of ret) {
- if (!typePassesFilter(typeFilter, ret_ty.ty)) {
+ if (skipPositions.indexOf(i) !== -1) {
+ i += 1;
continue;
}
- lev = Math.min(
- lev,
- checkType(ret_ty, elem, parsedQuery.literalSearch, maxLevDistance)
+ const typeDist = checkType(
+ ret_ty,
+ elem,
+ parsedQuery.literalSearch,
+ maxEditDistance
);
- if (lev === 0) {
- return 0;
+ if (typeDist === 0) {
+ return {dist: 0, position: i};
}
+ if (typeDist < dist) {
+ dist = typeDist;
+ position = i;
+ }
+ i += 1;
}
}
- return parsedQuery.literalSearch ? maxLevDistance + 1 : lev;
+ dist = parsedQuery.literalSearch ? maxEditDistance + 1 : dist;
+ return {dist, position};
}
- function checkPath(contains, ty, maxLevDistance) {
+ function checkPath(contains, ty, maxEditDistance) {
if (contains.length === 0) {
return 0;
}
- let ret_lev = maxLevDistance + 1;
+ let ret_dist = maxEditDistance + 1;
const path = ty.path.split("::");
if (ty.parent && ty.parent.name) {
@@ -1190,27 +1363,27 @@ function initSearch(rawSearchIndex) {
const length = path.length;
const clength = contains.length;
if (clength > length) {
- return maxLevDistance + 1;
+ return maxEditDistance + 1;
}
for (let i = 0; i < length; ++i) {
if (i + clength > length) {
break;
}
- let lev_total = 0;
+ let dist_total = 0;
let aborted = false;
for (let x = 0; x < clength; ++x) {
- const lev = levenshtein(path[i + x], contains[x]);
- if (lev > maxLevDistance) {
+ const dist = editDistance(path[i + x], contains[x], maxEditDistance);
+ if (dist > maxEditDistance) {
aborted = true;
break;
}
- lev_total += lev;
+ dist_total += dist;
}
if (!aborted) {
- ret_lev = Math.min(ret_lev, Math.round(lev_total / clength));
+ ret_dist = Math.min(ret_dist, Math.round(dist_total / clength));
}
}
- return ret_lev;
+ return ret_dist;
}
function typePassesFilter(filter, type) {
@@ -1244,6 +1417,7 @@ function initSearch(rawSearchIndex) {
parent: item.parent,
type: item.type,
is_alias: true,
+ deprecated: item.deprecated,
};
}
@@ -1254,22 +1428,22 @@ function initSearch(rawSearchIndex) {
const aliases = [];
const crateAliases = [];
if (filterCrates !== null) {
- if (ALIASES[filterCrates] && ALIASES[filterCrates][lowerQuery]) {
- const query_aliases = ALIASES[filterCrates][lowerQuery];
+ if (ALIASES.has(filterCrates) && ALIASES.get(filterCrates).has(lowerQuery)) {
+ const query_aliases = ALIASES.get(filterCrates).get(lowerQuery);
for (const alias of query_aliases) {
aliases.push(createAliasFromItem(searchIndex[alias]));
}
}
} else {
- Object.keys(ALIASES).forEach(crate => {
- if (ALIASES[crate][lowerQuery]) {
+ for (const [crate, crateAliasesIndex] of ALIASES) {
+ if (crateAliasesIndex.has(lowerQuery)) {
const pushTo = crate === currentCrate ? crateAliases : aliases;
- const query_aliases = ALIASES[crate][lowerQuery];
+ const query_aliases = crateAliasesIndex.get(lowerQuery);
for (const alias of query_aliases) {
pushTo.push(createAliasFromItem(searchIndex[alias]));
}
}
- });
+ }
}
const sortFunc = (aaa, bbb) => {
@@ -1304,41 +1478,41 @@ function initSearch(rawSearchIndex) {
* This function adds the given result into the provided `results` map if it matches the
* following condition:
*
- * * If it is a "literal search" (`parsedQuery.literalSearch`), then `lev` must be 0.
- * * If it is not a "literal search", `lev` must be <= `maxLevDistance`.
+ * * If it is a "literal search" (`parsedQuery.literalSearch`), then `dist` must be 0.
+ * * If it is not a "literal search", `dist` must be <= `maxEditDistance`.
*
* The `results` map contains information which will be used to sort the search results:
*
* * `fullId` is a `string`` used as the key of the object we use for the `results` map.
* * `id` is the index in both `searchWords` and `searchIndex` arrays for this element.
* * `index` is an `integer`` used to sort by the position of the word in the item's name.
- * * `lev` is the main metric used to sort the search results.
- * * `path_lev` is zero if a single-component search query is used, otherwise it's the
+ * * `dist` is the main metric used to sort the search results.
+ * * `path_dist` is zero if a single-component search query is used, otherwise it's the
* distance computed for everything other than the last path component.
*
* @param {Results} results
* @param {string} fullId
* @param {integer} id
* @param {integer} index
- * @param {integer} lev
- * @param {integer} path_lev
+ * @param {integer} dist
+ * @param {integer} path_dist
*/
- function addIntoResults(results, fullId, id, index, lev, path_lev, maxLevDistance) {
- const inBounds = lev <= maxLevDistance || index !== -1;
- if (lev === 0 || (!parsedQuery.literalSearch && inBounds)) {
- if (results[fullId] !== undefined) {
- const result = results[fullId];
- if (result.dontValidate || result.lev <= lev) {
+ function addIntoResults(results, fullId, id, index, dist, path_dist, maxEditDistance) {
+ const inBounds = dist <= maxEditDistance || index !== -1;
+ if (dist === 0 || (!parsedQuery.literalSearch && inBounds)) {
+ if (results.has(fullId)) {
+ const result = results.get(fullId);
+ if (result.dontValidate || result.dist <= dist) {
return;
}
}
- results[fullId] = {
+ results.set(fullId, {
id: id,
index: index,
dontValidate: parsedQuery.literalSearch,
- lev: lev,
- path_lev: path_lev,
- };
+ dist: dist,
+ path_dist: path_dist,
+ });
}
}
@@ -1346,7 +1520,7 @@ function initSearch(rawSearchIndex) {
* This function is called in case the query is only one element (with or without generics).
* This element will be compared to arguments' and returned values' items and also to items.
*
- * Other important thing to note: since there is only one element, we use levenshtein
+ * Other important thing to note: since there is only one element, we use edit
* distance for name comparisons.
*
* @param {Row} row
@@ -1364,24 +1538,24 @@ function initSearch(rawSearchIndex) {
results_others,
results_in_args,
results_returned,
- maxLevDistance
+ maxEditDistance
) {
if (!row || (filterCrates !== null && row.crate !== filterCrates)) {
return;
}
- let lev, index = -1, path_lev = 0;
+ let dist, index = -1, path_dist = 0;
const fullId = row.id;
const searchWord = searchWords[pos];
- const in_args = findArg(row, elem, parsedQuery.typeFilter, maxLevDistance);
- const returned = checkReturned(row, elem, parsedQuery.typeFilter, maxLevDistance);
+ const in_args = findArg(row, elem, maxEditDistance, []);
+ const returned = checkReturned(row, elem, maxEditDistance, []);
- // path_lev is 0 because no parent path information is currently stored
+ // path_dist is 0 because no parent path information is currently stored
// in the search index
- addIntoResults(results_in_args, fullId, pos, -1, in_args, 0, maxLevDistance);
- addIntoResults(results_returned, fullId, pos, -1, returned, 0, maxLevDistance);
+ addIntoResults(results_in_args, fullId, pos, -1, in_args.dist, 0, maxEditDistance);
+ addIntoResults(results_returned, fullId, pos, -1, returned.dist, 0, maxEditDistance);
- if (!typePassesFilter(parsedQuery.typeFilter, row.ty)) {
+ if (!typePassesFilter(elem.typeFilter, row.ty)) {
return;
}
@@ -1403,34 +1577,34 @@ function initSearch(rawSearchIndex) {
// No need to check anything else if it's a "pure" generics search.
if (elem.name.length === 0) {
if (row.type !== null) {
- lev = checkGenerics(row.type, elem, maxLevDistance + 1, maxLevDistance);
- // path_lev is 0 because we know it's empty
- addIntoResults(results_others, fullId, pos, index, lev, 0, maxLevDistance);
+ dist = checkGenerics(row.type, elem, maxEditDistance + 1, maxEditDistance);
+ // path_dist is 0 because we know it's empty
+ addIntoResults(results_others, fullId, pos, index, dist, 0, maxEditDistance);
}
return;
}
if (elem.fullPath.length > 1) {
- path_lev = checkPath(elem.pathWithoutLast, row, maxLevDistance);
- if (path_lev > maxLevDistance) {
+ path_dist = checkPath(elem.pathWithoutLast, row, maxEditDistance);
+ if (path_dist > maxEditDistance) {
return;
}
}
if (parsedQuery.literalSearch) {
if (searchWord === elem.name) {
- addIntoResults(results_others, fullId, pos, index, 0, path_lev);
+ addIntoResults(results_others, fullId, pos, index, 0, path_dist);
}
return;
}
- lev = levenshtein(searchWord, elem.pathLast);
+ dist = editDistance(searchWord, elem.pathLast, maxEditDistance);
- if (index === -1 && lev + path_lev > maxLevDistance) {
+ if (index === -1 && dist + path_dist > maxEditDistance) {
return;
}
- addIntoResults(results_others, fullId, pos, index, lev, path_lev, maxLevDistance);
+ addIntoResults(results_others, fullId, pos, index, dist, path_dist, maxEditDistance);
}
/**
@@ -1442,22 +1616,29 @@ function initSearch(rawSearchIndex) {
* @param {integer} pos - Position in the `searchIndex`.
* @param {Object} results
*/
- function handleArgs(row, pos, results, maxLevDistance) {
+ function handleArgs(row, pos, results, maxEditDistance) {
if (!row || (filterCrates !== null && row.crate !== filterCrates)) {
return;
}
- let totalLev = 0;
- let nbLev = 0;
+ let totalDist = 0;
+ let nbDist = 0;
// If the result is too "bad", we return false and it ends this search.
function checkArgs(elems, callback) {
+ const skipPositions = [];
for (const elem of elems) {
// There is more than one parameter to the query so all checks should be "exact"
- const lev = callback(row, elem, NO_TYPE_FILTER, maxLevDistance);
- if (lev <= 1) {
- nbLev += 1;
- totalLev += lev;
+ const { dist, position } = callback(
+ row,
+ elem,
+ maxEditDistance,
+ skipPositions
+ );
+ if (dist <= 1) {
+ nbDist += 1;
+ totalDist += dist;
+ skipPositions.push(position);
} else {
return false;
}
@@ -1471,11 +1652,11 @@ function initSearch(rawSearchIndex) {
return;
}
- if (nbLev === 0) {
+ if (nbDist === 0) {
return;
}
- const lev = Math.round(totalLev / nbLev);
- addIntoResults(results, row.id, pos, 0, lev, 0, maxLevDistance);
+ const dist = Math.round(totalDist / nbDist);
+ addIntoResults(results, row.id, pos, 0, dist, 0, maxEditDistance);
}
function innerRunQuery() {
@@ -1488,7 +1669,7 @@ function initSearch(rawSearchIndex) {
for (const elem of parsedQuery.returned) {
queryLen += elem.name.length;
}
- const maxLevDistance = Math.floor(queryLen / 3);
+ const maxEditDistance = Math.floor(queryLen / 3);
if (parsedQuery.foundElems === 1) {
if (parsedQuery.elems.length === 1) {
@@ -1503,7 +1684,7 @@ function initSearch(rawSearchIndex) {
results_others,
results_in_args,
results_returned,
- maxLevDistance
+ maxEditDistance
);
}
} else if (parsedQuery.returned.length === 1) {
@@ -1514,15 +1695,22 @@ function initSearch(rawSearchIndex) {
in_returned = checkReturned(
row,
elem,
- parsedQuery.typeFilter,
- maxLevDistance
+ maxEditDistance,
+ []
+ );
+ addIntoResults(
+ results_others,
+ row.id,
+ i,
+ -1,
+ in_returned.dist,
+ maxEditDistance
);
- addIntoResults(results_others, row.id, i, -1, in_returned, maxLevDistance);
}
}
} else if (parsedQuery.foundElems > 0) {
for (i = 0, nSearchWords = searchWords.length; i < nSearchWords; ++i) {
- handleArgs(searchIndex[i], i, results_others, maxLevDistance);
+ handleArgs(searchIndex[i], i, results_others, maxEditDistance);
}
}
}
@@ -1560,7 +1748,7 @@ function initSearch(rawSearchIndex) {
*
* @return {boolean} - Whether the result is valid or not
*/
- function validateResult(name, path, keys, parent, maxLevDistance) {
+ function validateResult(name, path, keys, parent, maxEditDistance) {
if (!keys || !keys.length) {
return true;
}
@@ -1574,8 +1762,8 @@ function initSearch(rawSearchIndex) {
// next if there is a parent, check for exact parent match
(parent !== undefined && parent.name !== undefined &&
parent.name.toLowerCase().indexOf(key) > -1) ||
- // lastly check to see if the name was a levenshtein match
- levenshtein(name, key) <= maxLevDistance)) {
+ // lastly check to see if the name was an editDistance match
+ editDistance(name, key, maxEditDistance) <= maxEditDistance)) {
return false;
}
}
@@ -1762,11 +1950,7 @@ function initSearch(rawSearchIndex) {
function showResults(results, go_to_first, filterCrates) {
const search = searchState.outputElement();
if (go_to_first || (results.others.length === 1
- && getSettingValue("go-to-only-result") === "true"
- // By default, the search DOM element is "empty" (meaning it has no children not
- // text content). Once a search has been run, it won't be empty, even if you press
- // ESC or empty the search input (which also "cancels" the search).
- && (!search.firstChild || search.firstChild.innerText !== searchState.loadingText))
+ && getSettingValue("go-to-only-result") === "true")
) {
const elem = document.createElement("a");
elem.href = results.others[0].href;
@@ -2064,10 +2248,11 @@ function initSearch(rawSearchIndex) {
* n: Array<string>,
* t: String,
* d: Array<string>,
- * q: Array<string>,
+ * q: Array<[Number, string]>,
* i: Array<Number>,
* f: Array<RawFunctionSearchType>,
* p: Array<Object>,
+ * c: Array<Number>
* }}
*/
const crateCorpus = rawSearchIndex[crate];
@@ -2086,6 +2271,7 @@ function initSearch(rawSearchIndex) {
type: null,
id: id,
normalizedName: crate.indexOf("_") === -1 ? crate : crate.replace(/_/g, ""),
+ deprecated: null,
};
id += 1;
searchIndex.push(crateRow);
@@ -2095,14 +2281,20 @@ function initSearch(rawSearchIndex) {
const itemTypes = crateCorpus.t;
// an array of (String) item names
const itemNames = crateCorpus.n;
- // an array of (String) full paths (or empty string for previous path)
- const itemPaths = crateCorpus.q;
+ // an array of [(Number) item index,
+ // (String) full path]
+ // an item whose index is not present will fall back to the previous present path
+ // i.e. if indices 4 and 11 are present, but 5-10 and 12-13 are not present,
+ // 5-10 will fall back to the path for 4 and 12-13 will fall back to the path for 11
+ const itemPaths = new Map(crateCorpus.q);
// an array of (String) descriptions
const itemDescs = crateCorpus.d;
// an array of (Number) the parent path index + 1 to `paths`, or 0 if none
const itemParentIdxs = crateCorpus.i;
// an array of (Object | null) the type of the function, if any
const itemFunctionSearchTypes = crateCorpus.f;
+ // an array of (Number) indices for the deprecated items
+ const deprecatedItems = new Set(crateCorpus.c);
// an array of [(Number) item type,
// (String) name]
const paths = crateCorpus.p;
@@ -2142,12 +2334,13 @@ function initSearch(rawSearchIndex) {
crate: crate,
ty: itemTypes.charCodeAt(i) - charA,
name: itemNames[i],
- path: itemPaths[i] ? itemPaths[i] : lastPath,
+ path: itemPaths.has(i) ? itemPaths.get(i) : lastPath,
desc: itemDescs[i],
parent: itemParentIdxs[i] > 0 ? paths[itemParentIdxs[i] - 1] : undefined,
type: buildFunctionSearchType(itemFunctionSearchTypes[i], lowercasePaths),
id: id,
normalizedName: word.indexOf("_") === -1 ? word : word.replace(/_/g, ""),
+ deprecated: deprecatedItems.has(i),
};
id += 1;
searchIndex.push(row);
@@ -2156,17 +2349,22 @@ function initSearch(rawSearchIndex) {
}
if (aliases) {
- ALIASES[crate] = Object.create(null);
+ const currentCrateAliases = new Map();
+ ALIASES.set(crate, currentCrateAliases);
for (const alias_name in aliases) {
if (!hasOwnPropertyRustdoc(aliases, alias_name)) {
continue;
}
- if (!hasOwnPropertyRustdoc(ALIASES[crate], alias_name)) {
- ALIASES[crate][alias_name] = [];
+ let currentNameAliases;
+ if (currentCrateAliases.has(alias_name)) {
+ currentNameAliases = currentCrateAliases.get(alias_name);
+ } else {
+ currentNameAliases = [];
+ currentCrateAliases.set(alias_name, currentNameAliases);
}
for (const local_alias of aliases[alias_name]) {
- ALIASES[crate][alias_name].push(local_alias + currentIndex);
+ currentNameAliases.push(local_alias + currentIndex);
}
}
}
diff --git a/src/librustdoc/html/static/js/settings.js b/src/librustdoc/html/static/js/settings.js
index 1cd552e7f..ebbe6c1ca 100644
--- a/src/librustdoc/html/static/js/settings.js
+++ b/src/librustdoc/html/static/js/settings.js
@@ -86,12 +86,8 @@
if (settingId === "theme") {
const useSystem = getSettingValue("use-system-theme");
if (useSystem === "true" || settingValue === null) {
- if (useSystem !== "false") {
- settingValue = "system preference";
- } else {
- // This is the default theme.
- settingValue = "light";
- }
+ // "light" is the default theme
+ settingValue = useSystem === "false" ? "light" : "system preference";
}
}
if (settingValue !== null && settingValue !== "null") {
diff --git a/src/librustdoc/html/static/js/source-script.js b/src/librustdoc/html/static/js/source-script.js
index 6c0f03b5b..9aa755173 100644
--- a/src/librustdoc/html/static/js/source-script.js
+++ b/src/librustdoc/html/static/js/source-script.js
@@ -15,8 +15,13 @@ const NAME_OFFSET = 0;
const DIRS_OFFSET = 1;
const FILES_OFFSET = 2;
+// WARNING: RUSTDOC_MOBILE_BREAKPOINT MEDIA QUERY
+// If you update this line, then you also need to update the media query with the same
+// warning in rustdoc.css
+const RUSTDOC_MOBILE_BREAKPOINT = 700;
+
function closeSidebarIfMobile() {
- if (window.innerWidth < window.RUSTDOC_MOBILE_BREAKPOINT) {
+ if (window.innerWidth < RUSTDOC_MOBILE_BREAKPOINT) {
updateLocalStorage("source-sidebar-show", "false");
}
}
@@ -69,12 +74,10 @@ function createDirEntry(elem, parent, fullPath, hasFoundFile) {
function toggleSidebar() {
const child = this.parentNode.children[0];
if (child.innerText === ">") {
- window.rustdocMobileScrollLock();
addClass(document.documentElement, "source-sidebar-expanded");
child.innerText = "<";
updateLocalStorage("source-sidebar-show", "true");
} else {
- window.rustdocMobileScrollUnlock();
removeClass(document.documentElement, "source-sidebar-expanded");
child.innerText = ">";
updateLocalStorage("source-sidebar-show", "false");
diff --git a/src/librustdoc/html/static/js/storage.js b/src/librustdoc/html/static/js/storage.js
index c72ac254f..93979a944 100644
--- a/src/librustdoc/html/static/js/storage.js
+++ b/src/librustdoc/html/static/js/storage.js
@@ -7,31 +7,15 @@
const darkThemes = ["dark", "ayu"];
window.currentTheme = document.getElementById("themeStyle");
-window.mainTheme = document.getElementById("mainThemeStyle");
-
-// WARNING: RUSTDOC_MOBILE_BREAKPOINT MEDIA QUERY
-// If you update this line, then you also need to update the media query with the same
-// warning in rustdoc.css
-window.RUSTDOC_MOBILE_BREAKPOINT = 700;
const settingsDataset = (function() {
const settingsElement = document.getElementById("default-settings");
- if (settingsElement === null) {
- return null;
- }
- const dataset = settingsElement.dataset;
- if (dataset === undefined) {
- return null;
- }
- return dataset;
+ return settingsElement && settingsElement.dataset ? settingsElement.dataset : null;
})();
function getSettingValue(settingName) {
const current = getCurrentValue(settingName);
- if (current !== null) {
- return current;
- }
- if (settingsDataset !== null) {
+ if (current === null && settingsDataset !== null) {
// See the comment for `default_settings.into_iter()` etc. in
// `Options::from_matches` in `librustdoc/config.rs`.
const def = settingsDataset[settingName.replace(/-/g,"_")];
@@ -39,31 +23,27 @@ function getSettingValue(settingName) {
return def;
}
}
- return null;
+ return current;
}
const localStoredTheme = getSettingValue("theme");
-const savedHref = [];
-
// eslint-disable-next-line no-unused-vars
function hasClass(elem, className) {
return elem && elem.classList && elem.classList.contains(className);
}
function addClass(elem, className) {
- if (!elem || !elem.classList) {
- return;
+ if (elem && elem.classList) {
+ elem.classList.add(className);
}
- elem.classList.add(className);
}
// eslint-disable-next-line no-unused-vars
function removeClass(elem, className) {
- if (!elem || !elem.classList) {
- return;
+ if (elem && elem.classList) {
+ elem.classList.remove(className);
}
- elem.classList.remove(className);
}
/**
@@ -73,10 +53,9 @@ function removeClass(elem, className) {
* @param {boolean} [reversed] - Whether to iterate in reverse
*/
function onEach(arr, func, reversed) {
- if (arr && arr.length > 0 && func) {
+ if (arr && arr.length > 0) {
if (reversed) {
- const length = arr.length;
- for (let i = length - 1; i >= 0; --i) {
+ for (let i = arr.length - 1; i >= 0; --i) {
if (func(arr[i])) {
return true;
}
@@ -102,6 +81,7 @@ function onEach(arr, func, reversed) {
* @param {function(?)} func - The callback
* @param {boolean} [reversed] - Whether to iterate in reverse
*/
+// eslint-disable-next-line no-unused-vars
function onEachLazy(lazyArray, func, reversed) {
return onEach(
Array.prototype.slice.call(lazyArray),
@@ -125,34 +105,40 @@ function getCurrentValue(name) {
}
}
-function switchTheme(styleElem, mainStyleElem, newThemeName, saveTheme) {
+// Get a value from the rustdoc-vars div, which is used to convey data from
+// Rust to the JS. If there is no such element, return null.
+const getVar = (function getVar(name) {
+ const el = document.getElementById("rustdoc-vars");
+ return el ? el.attributes["data-" + name].value : null;
+});
+
+function switchTheme(newThemeName, saveTheme) {
// If this new value comes from a system setting or from the previously
// saved theme, no need to save it.
if (saveTheme) {
updateLocalStorage("theme", newThemeName);
}
- if (savedHref.length === 0) {
- onEachLazy(document.getElementsByTagName("link"), el => {
- savedHref.push(el.href);
- });
+ let newHref;
+
+ if (newThemeName === "light" || newThemeName === "dark" || newThemeName === "ayu") {
+ newHref = getVar("static-root-path") + getVar("theme-" + newThemeName + "-css");
+ } else {
+ newHref = getVar("root-path") + newThemeName + getVar("resource-suffix") + ".css";
}
- const newHref = savedHref.find(url => {
- const m = url.match(/static\.files\/(.*)-[a-f0-9]{16}\.css$/);
- if (m && m[1] === newThemeName) {
- return true;
- }
- const m2 = url.match(/\/([^/]*)\.css$/);
- if (m2 && m2[1].startsWith(newThemeName)) {
- return true;
- }
- });
- if (newHref && newHref !== styleElem.href) {
- styleElem.href = newHref;
+
+ if (!window.currentTheme) {
+ document.write(`<link rel="stylesheet" id="themeStyle" href="${newHref}">`);
+ window.currentTheme = document.getElementById("themeStyle");
+ } else if (newHref !== window.currentTheme.href) {
+ window.currentTheme.href = newHref;
}
}
const updateTheme = (function() {
+ // only listen to (prefers-color-scheme: dark) because light is the default
+ const mql = window.matchMedia("(prefers-color-scheme: dark)");
+
/**
* Update the current theme to match whatever the current combination of
* * the preference for using the system theme
@@ -163,60 +149,23 @@ const updateTheme = (function() {
* … dictates that it should be.
*/
function updateTheme() {
- const use = (theme, saveTheme) => {
- switchTheme(window.currentTheme, window.mainTheme, theme, saveTheme);
- };
-
// maybe the user has disabled the setting in the meantime!
if (getSettingValue("use-system-theme") !== "false") {
const lightTheme = getSettingValue("preferred-light-theme") || "light";
const darkTheme = getSettingValue("preferred-dark-theme") || "dark";
+ updateLocalStorage("use-system-theme", "true");
- if (isDarkMode()) {
- use(darkTheme, true);
- } else {
- // prefers a light theme, or has no preference
- use(lightTheme, true);
- }
+ // use light theme if user prefers it, or has no preference
+ switchTheme(mql.matches ? darkTheme : lightTheme, true);
// note: we save the theme so that it doesn't suddenly change when
// the user disables "use-system-theme" and reloads the page or
// navigates to another page
} else {
- use(getSettingValue("theme"), false);
+ switchTheme(getSettingValue("theme"), false);
}
}
- // This is always updated below to a function () => bool.
- let isDarkMode;
-
- // Determine the function for isDarkMode, and if we have
- // `window.matchMedia`, set up an event listener on the preferred color
- // scheme.
- //
- // Otherwise, fall back to the prefers-color-scheme value CSS captured in
- // the "content" property.
- if (window.matchMedia) {
- // only listen to (prefers-color-scheme: dark) because light is the default
- const mql = window.matchMedia("(prefers-color-scheme: dark)");
-
- isDarkMode = () => mql.matches;
-
- if (mql.addEventListener) {
- mql.addEventListener("change", updateTheme);
- } else {
- // This is deprecated, see:
- // https://developer.mozilla.org/en-US/docs/Web/API/MediaQueryList/addListener
- mql.addListener(updateTheme);
- }
- } else {
- // fallback to the CSS computed value
- const cssContent = getComputedStyle(document.documentElement)
- .getPropertyValue("content");
- // (Note: the double-quotes come from that this is a CSS value, which
- // might be a length, string, etc.)
- const cssColorScheme = cssContent || "\"light\"";
- isDarkMode = () => (cssColorScheme === "\"dark\"");
- }
+ mql.addEventListener("change", updateTheme);
return updateTheme;
})();
diff --git a/src/librustdoc/html/templates/STYLE.md b/src/librustdoc/html/templates/STYLE.md
index fff65e3b5..0281b1c47 100644
--- a/src/librustdoc/html/templates/STYLE.md
+++ b/src/librustdoc/html/templates/STYLE.md
@@ -1,37 +1,38 @@
# Style for Templates
-This directory has templates in the [Tera templating language](teradoc), which is very
-similar to [Jinja2](jinjadoc) and [Django](djangodoc) templates, and also to [Askama](askamadoc).
+This directory has templates in the [Tera templating language][teradoc], which is very
+similar to [Jinja2][jinjadoc] and [Django][djangodoc] templates, and also to [Askama][askamadoc].
[teradoc]: https://tera.netlify.app/docs/#templates
-[jinjadoc]: https://jinja.palletsprojects.com/en/3.0.x/templates/
-[djangodoc]: https://docs.djangoproject.com/en/3.2/topics/templates/
-[askamadoc]: https://docs.rs/askama/0.10.5/askama/
+[jinjadoc]: https://jinja.palletsprojects.com/en/3.1.x/templates/
+[djangodoc]: https://docs.djangoproject.com/en/4.1/topics/templates/
+[askamadoc]: https://docs.rs/askama/latest/askama/
We want our rendered output to have as little unnecessary whitespace as
possible, so that pages load quickly. To achieve that we use Tera's
-[whitespace control] features. At the end of most lines, we put an empty comment
-tag with the whitespace control characters: `{#- -#}`. This causes all
-whitespace between the end of the line and the beginning of the next, including
-indentation, to be omitted on render. Sometimes we want to preserve a single
-space. In those cases we put the space at the end of the line, followed by
-`{# -#}`, which is a directive to remove following whitespace but not preceding.
-We also use the whitespace control characters in most instances of tags with
-control flow, for example `{%- if foo -%}`.
+[whitespace control] features. By default, whitespace characters are removed
+around jinja tags (`{% %}` for example). At the end of most lines, we put an
+empty comment tag: `{# #}`. This causes all whitespace between the end of the
+line and the beginning of the next, including indentation, to be omitted on
+render. Sometimes we want to preserve a single space. In those cases we put the
+space at the end of the line, followed by `{#+ #}`, which is a directive to
+remove following whitespace but not preceding. We also use the whitespace
+control characters in most instances of tags with control flow, for example
+`{% if foo %}`.
[whitespace control]: https://tera.netlify.app/docs/#whitespace-control
We want our templates to be readable, so we use indentation and newlines
-liberally. We indent by four spaces after opening an HTML tag _or_ a Tera
+liberally. We indent by four spaces after opening an HTML tag _or_ a Jinja
tag. In most cases an HTML tag should be followed by a newline, but if the
tag has simple contents and fits with its close tag on a single line, the
contents don't necessarily need a new line.
-Tera templates support quite sophisticated control flow. To keep our templates
+Askama templates support quite sophisticated control flow. To keep our templates
simple and understandable, we use only a subset: `if` and `for`. In particular
-we avoid [assignments in the template logic](assignments) and [Tera
-macros](macros). This also may make things easier if we switch to a different
+we avoid [assignments in the template logic][assignments] and [Askama
+macros][macros]. This also may make things easier if we switch to a different
Jinja-style template system, like Askama, in the future.
-[assignments]: https://tera.netlify.app/docs/#assignments
-[macros]: https://tera.netlify.app/docs/#macros
+[assignments]: https://djc.github.io/askama/template_syntax.html#assignments
+[macros]: https://djc.github.io/askama/template_syntax.html#macros
diff --git a/src/librustdoc/html/templates/item_info.html b/src/librustdoc/html/templates/item_info.html
new file mode 100644
index 000000000..d2ea9bdae
--- /dev/null
+++ b/src/librustdoc/html/templates/item_info.html
@@ -0,0 +1,7 @@
+{% if !items.is_empty() %}
+ <span class="item-info"> {# #}
+ {% for item in items %}
+ {{item|safe}} {# #}
+ {% endfor %}
+ </span>
+{% endif %}
diff --git a/src/librustdoc/html/templates/item_union.html b/src/librustdoc/html/templates/item_union.html
new file mode 100644
index 000000000..a01457971
--- /dev/null
+++ b/src/librustdoc/html/templates/item_union.html
@@ -0,0 +1,23 @@
+<pre class="rust item-decl"><code>
+ {{ self.render_attributes_in_pre() | safe }}
+ {{ self.render_union() | safe }}
+</code></pre>
+{{ self.document() | safe }}
+{% if self.fields_iter().peek().is_some() %}
+ <h2 id="fields" class="fields small-section-header">
+ Fields<a href="#fields" class="anchor">§</a>
+ </h2>
+ {% for (field, ty) in self.fields_iter() %}
+ {% let name = field.name.expect("union field name") %}
+ <span id="structfield.{{ name }}" class="{{ ItemType::StructField }} small-section-header">
+ <a href="#structfield.{{ name }}" class="anchor field">§</a>
+ <code>{{ name }}: {{ self.print_ty(ty) | safe }}</code>
+ </span>
+ {% if let Some(stability_class) = self.stability_field(field) %}
+ <span class="stab {{ stability_class }}"></span>
+ {% endif %}
+ {{ self.document_field(field) | safe }}
+ {% endfor %}
+{% endif %}
+{{ self.render_assoc_items() | safe }}
+{{ self.document_type_layout() | safe }}
diff --git a/src/librustdoc/html/templates/page.html b/src/librustdoc/html/templates/page.html
index 7690d8f25..9133f899a 100644
--- a/src/librustdoc/html/templates/page.html
+++ b/src/librustdoc/html/templates/page.html
@@ -1,148 +1,151 @@
-<!DOCTYPE html> {#- -#}
-<html lang="en"> {#- -#}
-<head> {#- -#}
- <meta charset="utf-8"> {#- -#}
- <meta name="viewport" content="width=device-width, initial-scale=1.0"> {#- -#}
- <meta name="generator" content="rustdoc"> {#- -#}
- <meta name="description" content="{{page.description}}"> {#- -#}
- <title>{{page.title}}</title> {#- -#}
- <link rel="preload" as="font" type="font/woff2" crossorigin href="{{static_root_path|safe}}{{files.source_serif_4_regular}}"> {#- -#}
- <link rel="preload" as="font" type="font/woff2" crossorigin href="{{static_root_path|safe}}{{files.fira_sans_regular}}"> {#- -#}
- <link rel="preload" as="font" type="font/woff2" crossorigin href="{{static_root_path|safe}}{{files.fira_sans_medium}}"> {#- -#}
- <link rel="preload" as="font" type="font/woff2" crossorigin href="{{static_root_path|safe}}{{files.source_code_pro_regular}}"> {#- -#}
- <link rel="preload" as="font" type="font/woff2" crossorigin href="{{static_root_path|safe}}{{files.source_serif_4_bold}}"> {#- -#}
- <link rel="preload" as="font" type="font/woff2" crossorigin href="{{static_root_path|safe}}{{files.source_code_pro_semibold}}"> {#- -#}
- <link rel="stylesheet" {# -#}
- href="{{static_root_path|safe}}{{files.normalize_css}}"> {#- -#}
- <link rel="stylesheet" {# -#}
- href="{{static_root_path|safe}}{{files.rustdoc_css}}" {# -#}
- id="mainThemeStyle"> {#- -#}
- <link rel="stylesheet" id="themeStyle" href="{{static_root_path|safe}}{{files.theme_light_css}}"> {#- -#}
- <link rel="stylesheet" disabled href="{{static_root_path|safe}}{{files.theme_dark_css}}"> {#- -#}
- <link rel="stylesheet" disabled href="{{static_root_path|safe}}{{files.theme_ayu_css}}"> {#- -#}
- {%- for theme in themes -%}
- <link rel="stylesheet" disabled href="{{page.root_path|safe}}{{theme}}{{page.resource_suffix}}.css"> {#- -#}
- {%- endfor -%}
- {%- if !layout.default_settings.is_empty() -%}
- <script id="default-settings" {# -#}
- {% for (k, v) in layout.default_settings %}
+<!DOCTYPE html> {# #}
+<html lang="en"> {# #}
+<head> {# #}
+ <meta charset="utf-8"> {# #}
+ <meta name="viewport" content="width=device-width, initial-scale=1.0"> {# #}
+ <meta name="generator" content="rustdoc"> {# #}
+ <meta name="description" content="{{page.description}}"> {# #}
+ <title>{{page.title}}</title> {# #}
+ <link rel="preload" as="font" type="font/woff2" crossorigin href="{{static_root_path|safe}}{{files.source_serif_4_regular}}"> {# #}
+ <link rel="preload" as="font" type="font/woff2" crossorigin href="{{static_root_path|safe}}{{files.fira_sans_regular}}"> {# #}
+ <link rel="preload" as="font" type="font/woff2" crossorigin href="{{static_root_path|safe}}{{files.fira_sans_medium}}"> {# #}
+ <link rel="preload" as="font" type="font/woff2" crossorigin href="{{static_root_path|safe}}{{files.source_code_pro_regular}}"> {# #}
+ <link rel="preload" as="font" type="font/woff2" crossorigin href="{{static_root_path|safe}}{{files.source_serif_4_bold}}"> {# #}
+ <link rel="preload" as="font" type="font/woff2" crossorigin href="{{static_root_path|safe}}{{files.source_code_pro_semibold}}"> {# #}
+ <link rel="stylesheet" {#+ #}
+ href="{{static_root_path|safe}}{{files.normalize_css}}"> {# #}
+ <link rel="stylesheet" {#+ #}
+ href="{{static_root_path|safe}}{{files.rustdoc_css}}" {#+ #}
+ id="mainThemeStyle"> {# #}
+ {% if !layout.default_settings.is_empty() %}
+ <script id="default-settings" {#+ #}
+ {%~ for (k, v) in layout.default_settings ~%}
data-{{k}}="{{v}}"
- {%- endfor -%}
- ></script> {#- -#}
- {%- endif -%}
- <script src="{{static_root_path|safe}}{{files.storage_js}}"></script> {#- -#}
- {%- if page.css_class.contains("crate") -%}
- <script defer src="{{page.root_path|safe}}crates{{page.resource_suffix}}.js"></script> {#- -#}
- {%- else if page.css_class == "source" -%}
- <script defer src="{{static_root_path|safe}}{{files.source_script_js}}"></script> {#- -#}
- <script defer src="{{page.root_path|safe}}source-files{{page.resource_suffix}}.js"></script> {#- -#}
- {%- else if !page.css_class.contains("mod") -%}
- <script defer src="sidebar-items{{page.resource_suffix}}.js"></script> {#- -#}
- {%- endif -%}
- <script defer src="{{static_root_path|safe}}{{files.main_js}}"></script> {#- -#}
- {%- if layout.scrape_examples_extension -%}
- <script defer src="{{static_root_path|safe}}{{files.scrape_examples_js}}"></script> {#- -#}
- {%- endif -%}
- <noscript> {#- -#}
- <link rel="stylesheet" {# -#}
- href="{{static_root_path|safe}}{{files.noscript_css}}"> {#- -#}
- </noscript> {#- -#}
- {%- if layout.css_file_extension.is_some() -%}
- <link rel="stylesheet" {# -#}
- href="{{static_root_path|safe}}theme{{page.resource_suffix}}.css"> {#- -#}
- {%- endif -%}
- {%- if !layout.favicon.is_empty() -%}
- <link rel="icon" href="{{layout.favicon}}"> {#- -#}
- {%- else -%}
- <link rel="alternate icon" type="image/png" {# -#}
- href="{{static_root_path|safe}}{{files.rust_favicon_png_16}}"> {#- -#}
- <link rel="alternate icon" type="image/png" {# -#}
- href="{{static_root_path|safe}}{{files.rust_favicon_png_32}}"> {#- -#}
- <link rel="icon" type="image/svg+xml" {# -#}
- href="{{static_root_path|safe}}{{files.rust_favicon_svg}}"> {#- -#}
- {%- endif -%}
- {{- layout.external_html.in_header|safe -}}
-</head> {#- -#}
-<body class="rustdoc {{page.css_class}}"> {#- -#}
- <!--[if lte IE 11]> {#- -#}
- <div class="warning"> {#- -#}
- This old browser is unsupported and will most likely display funky things. {#- -#}
- </div> {#- -#}
- <![endif]--> {#- -#}
- {{- layout.external_html.before_content|safe -}}
- {%- if page.css_class != "source" -%}
- <nav class="mobile-topbar"> {#- -#}
- <button class="sidebar-menu-toggle">&#9776;</button> {#- -#}
- <a class="logo-container" href="{{page.root_path|safe}}{{krate_with_trailing_slash|safe}}index.html"> {#- -#}
- {%- if !layout.logo.is_empty() -%}
- <img src="{{layout.logo}}" alt="logo"> {#- -#}
- {%- else -%}
- <img class="rust-logo" src="{{static_root_path|safe}}{{files.rust_logo_svg}}" alt="logo"> {#- -#}
- {%- endif -%}
- </a> {#- -#}
- <h2></h2> {#- -#}
- </nav> {#- -#}
- {%- endif -%}
- <nav class="sidebar"> {#- -#}
- {%- if page.css_class != "source" -%}
- <a class="logo-container" href="{{page.root_path|safe}}{{krate_with_trailing_slash|safe}}index.html"> {#- -#}
- {%- if !layout.logo.is_empty() %}
- <img src="{{layout.logo}}" alt="logo"> {#- -#}
- {%- else -%}
- <img class="rust-logo" src="{{static_root_path|safe}}{{files.rust_logo_svg}}" alt="logo"> {#- -#}
- {%- endif -%}
- </a> {#- -#}
- {%- endif -%}
- {{- sidebar|safe -}}
- </nav> {#- -#}
- <main> {#- -#}
- {%- if page.css_class != "source" -%}<div class="width-limiter">{%- endif -%}
- <nav class="sub"> {#- -#}
- {%- if page.css_class == "source" -%}
- <a class="sub-logo-container" href="{{page.root_path|safe}}{{krate_with_trailing_slash|safe}}index.html"> {#- -#}
- {%- if !layout.logo.is_empty() %}
- <img src="{{layout.logo}}" alt="logo"> {#- -#}
- {%- else -%}
- <img class="rust-logo" src="{{static_root_path|safe}}{{files.rust_logo_svg}}" alt="logo"> {#- -#}
- {%- endif -%}
- </a> {#- -#}
- {%- endif -%}
- <form class="search-form"> {#- -#}
- <span></span> {#- This empty span is a hacky fix for Safari - See #93184 -#}
- <input {# -#}
- class="search-input" {# -#}
- name="search" {# -#}
- aria-label="Run search in the documentation" {# -#}
- autocomplete="off" {# -#}
- spellcheck="false" {# -#}
- placeholder="Click or press ‘S’ to search, ‘?’ for more options…" {# -#}
- type="search"> {#- -#}
- <div id="help-button" title="help" tabindex="-1"> {#- -#}
- <a href="{{page.root_path|safe}}help.html">?</a> {#- -#}
- </div> {#- -#}
- <div id="settings-menu" tabindex="-1"> {#- -#}
- <a href="{{page.root_path|safe}}settings.html" title="settings"> {#- -#}
- <img width="22" height="22" alt="Change settings" {# -#}
- src="{{static_root_path|safe}}{{files.wheel_svg}}"> {#- -#}
- </a> {#- -#}
- </div> {#- -#}
- </form> {#- -#}
- </nav> {#- -#}
- <section id="main-content" class="content">{{- content|safe -}}</section> {#- -#}
- {%- if page.css_class != "source" -%}</div>{%- endif -%}
- </main> {#- -#}
- {{- layout.external_html.after_content|safe -}}
- <div id="rustdoc-vars" {# -#}
- data-root-path="{{page.root_path|safe}}" {# -#}
- data-static-root-path="{{static_root_path|safe}}" {# -#}
- data-current-crate="{{layout.krate}}" {# -#}
- data-themes="{{themes|join(",") }}" {# -#}
- data-resource-suffix="{{page.resource_suffix}}" {# -#}
- data-rustdoc-version="{{rustdoc_version}}" {# -#}
- data-search-js="{{files.search_js}}" {# -#}
- data-settings-js="{{files.settings_js}}" {# -#}
- data-settings-css="{{files.settings_css}}" {# -#}
- > {#- -#}
- </div> {#- -#}
-</body> {#- -#}
-</html> {#- -#}
+ {% endfor %}
+ ></script> {# #}
+ {% endif %}
+ <div id="rustdoc-vars" {#+ #}
+ data-root-path="{{page.root_path|safe}}" {#+ #}
+ data-static-root-path="{{static_root_path|safe}}" {#+ #}
+ data-current-crate="{{layout.krate}}" {#+ #}
+ data-themes="{{themes|join(",") }}" {#+ #}
+ data-resource-suffix="{{page.resource_suffix}}" {#+ #}
+ data-rustdoc-version="{{rustdoc_version}}" {#+ #}
+ data-search-js="{{files.search_js}}" {#+ #}
+ data-settings-js="{{files.settings_js}}" {#+ #}
+ data-settings-css="{{files.settings_css}}" {#+ #}
+ data-theme-light-css="{{files.theme_light_css}}" {#+ #}
+ data-theme-dark-css="{{files.theme_dark_css}}" {#+ #}
+ data-theme-ayu-css="{{files.theme_ayu_css}}" {#+ #}
+ > {# #}
+ </div> {# #}
+ <script src="{{static_root_path|safe}}{{files.storage_js}}"></script> {# #}
+ {% if page.css_class.contains("crate") %}
+ <script defer src="{{page.root_path|safe}}crates{{page.resource_suffix}}.js"></script> {# #}
+ {% else if page.css_class == "source" %}
+ <script defer src="{{static_root_path|safe}}{{files.source_script_js}}"></script> {# #}
+ <script defer src="{{page.root_path|safe}}source-files{{page.resource_suffix}}.js"></script> {# #}
+ {% else if !page.css_class.contains("mod") %}
+ <script defer src="sidebar-items{{page.resource_suffix}}.js"></script> {# #}
+ {% endif %}
+ <script defer src="{{static_root_path|safe}}{{files.main_js}}"></script> {# #}
+ {% if layout.scrape_examples_extension %}
+ <script defer src="{{static_root_path|safe}}{{files.scrape_examples_js}}"></script> {# #}
+ {% endif %}
+ <noscript> {# #}
+ <link rel="stylesheet" {#+ #}
+ media="(prefers-color-scheme:light)" {#+ #}
+ href="{{static_root_path|safe}}{{files.theme_light_css}}"> {# #}
+ <link rel="stylesheet" {#+ #}
+ media="(prefers-color-scheme:dark)" {#+ #}
+ href="{{static_root_path|safe}}{{files.theme_dark_css}}"> {# #}
+ <link rel="stylesheet" {#+ #}
+ href="{{static_root_path|safe}}{{files.noscript_css}}"> {# #}
+ </noscript> {# #}
+ {% if layout.css_file_extension.is_some() %}
+ <link rel="stylesheet" {#+ #}
+ href="{{page.root_path|safe}}theme{{page.resource_suffix}}.css"> {# #}
+ {% endif %}
+ {% if !layout.favicon.is_empty() %}
+ <link rel="icon" href="{{layout.favicon}}"> {# #}
+ {% else %}
+ <link rel="alternate icon" type="image/png" {#+ #}
+ href="{{static_root_path|safe}}{{files.rust_favicon_png_16}}"> {# #}
+ <link rel="alternate icon" type="image/png" {#+ #}
+ href="{{static_root_path|safe}}{{files.rust_favicon_png_32}}"> {# #}
+ <link rel="icon" type="image/svg+xml" {#+ #}
+ href="{{static_root_path|safe}}{{files.rust_favicon_svg}}"> {# #}
+ {% endif %}
+ {{ layout.external_html.in_header|safe }}
+</head> {# #}
+<body class="rustdoc {{+page.css_class}}"> {# #}
+ <!--[if lte IE 11]> {# #}
+ <div class="warning"> {# #}
+ This old browser is unsupported and will most likely display funky things. {# #}
+ </div> {# #}
+ <![endif]--> {# #}
+ {{ layout.external_html.before_content|safe }}
+ {% if page.css_class != "source" %}
+ <nav class="mobile-topbar"> {# #}
+ <button class="sidebar-menu-toggle">&#9776;</button> {# #}
+ <a class="logo-container" href="{{page.root_path|safe}}{{krate_with_trailing_slash|safe}}index.html"> {# #}
+ {% if !layout.logo.is_empty() %}
+ <img src="{{layout.logo}}" alt="logo"> {# #}
+ {% else %}
+ <img class="rust-logo" src="{{static_root_path|safe}}{{files.rust_logo_svg}}" alt="logo"> {# #}
+ {% endif %}
+ </a> {# #}
+ <h2></h2> {# #}
+ </nav> {# #}
+ {% endif %}
+ <nav class="sidebar"> {# #}
+ {% if page.css_class != "source" %}
+ <a class="logo-container" href="{{page.root_path|safe}}{{krate_with_trailing_slash|safe}}index.html"> {# #}
+ {% if !layout.logo.is_empty() %}
+ <img src="{{layout.logo}}" alt="logo"> {# #}
+ {% else %}
+ <img class="rust-logo" src="{{static_root_path|safe}}{{files.rust_logo_svg}}" alt="logo"> {# #}
+ {% endif %}
+ </a> {# #}
+ {% endif %}
+ {{ sidebar|safe }}
+ </nav> {# #}
+ <main> {# #}
+ {% if page.css_class != "source" %}<div class="width-limiter">{% endif %}
+ <nav class="sub"> {# #}
+ {% if page.css_class == "source" %}
+ <a class="sub-logo-container" href="{{page.root_path|safe}}{{krate_with_trailing_slash|safe}}index.html"> {# #}
+ {% if !layout.logo.is_empty() %}
+ <img src="{{layout.logo}}" alt="logo"> {# #}
+ {% else %}
+ <img class="rust-logo" src="{{static_root_path|safe}}{{files.rust_logo_svg}}" alt="logo"> {# #}
+ {% endif %}
+ </a> {# #}
+ {% endif %}
+ <form class="search-form"> {# #}
+ <span></span> {# This empty span is a hacky fix for Safari - See #93184 #}
+ <input {#+ #}
+ class="search-input" {#+ #}
+ name="search" {#+ #}
+ aria-label="Run search in the documentation" {#+ #}
+ autocomplete="off" {#+ #}
+ spellcheck="false" {#+ #}
+ placeholder="Click or press ‘S’ to search, ‘?’ for more options…" {#+ #}
+ type="search"> {# #}
+ <div id="help-button" title="help" tabindex="-1"> {# #}
+ <a href="{{page.root_path|safe}}help.html">?</a> {# #}
+ </div> {# #}
+ <div id="settings-menu" tabindex="-1"> {# #}
+ <a href="{{page.root_path|safe}}settings.html" title="settings"> {# #}
+ <img width="22" height="22" alt="Change settings" {#+ #}
+ src="{{static_root_path|safe}}{{files.wheel_svg}}"> {# #}
+ </a> {# #}
+ </div> {# #}
+ </form> {# #}
+ </nav> {# #}
+ <section id="main-content" class="content">{{ content|safe }}</section> {# #}
+ {% if page.css_class != "source" %}</div>{% endif %}
+ </main> {# #}
+ {{ layout.external_html.after_content|safe }}
+</body> {# #}
+</html> {# #}
diff --git a/src/librustdoc/html/templates/print_item.html b/src/librustdoc/html/templates/print_item.html
index 3a1867b7f..edabac9a0 100644
--- a/src/librustdoc/html/templates/print_item.html
+++ b/src/librustdoc/html/templates/print_item.html
@@ -1,28 +1,28 @@
-<div class="main-heading"> {#- -#}
- <h1> {#- -#}
- {{-typ-}}
- {#- The breadcrumbs of the item path, like std::string -#}
- {%- for component in path_components -%}
- <a href="{{component.path|safe}}index.html">{{component.name}}</a>::<wbr>
- {%- endfor -%}
- <a class="{{item_type}}" href="#">{{name}}</a> {#- -#}
- <button id="copy-path" title="Copy item path to clipboard"> {#- -#}
- <img src="{{static_root_path|safe}}{{clipboard_svg}}" {# -#}
- width="19" height="18" {# -#}
- alt="Copy item path"> {#- -#}
- </button> {#- -#}
- </h1> {#- -#}
- <span class="out-of-band"> {#- -#}
+<div class="main-heading"> {# #}
+ <h1> {# #}
+ {{typ}}
+ {# The breadcrumbs of the item path, like std::string #}
+ {% for component in path_components %}
+ <a href="{{component.path|safe}}index.html">{{component.name}}</a>::<wbr>
+ {% endfor %}
+ <a class="{{item_type}}" href="#">{{name}}</a> {# #}
+ <button id="copy-path" title="Copy item path to clipboard"> {# #}
+ <img src="{{static_root_path|safe}}{{clipboard_svg}}" {#+ #}
+ width="19" height="18" {#+ #}
+ alt="Copy item path"> {# #}
+ </button> {# #}
+ </h1> {# #}
+ <span class="out-of-band"> {# #}
{% if !stability_since_raw.is_empty() %}
- {{- stability_since_raw|safe }} · {# -#}
+ {{ stability_since_raw|safe +}} · {#+ #}
{% endif %}
- {%- match src_href -%}
- {%- when Some with (href) -%}
- <a class="srclink" href="{{href|safe}}">source</a> · {# -#}
- {%- else -%}
- {%- endmatch -%}
- <button id="toggle-all-docs" title="collapse all docs"> {#- -#}
- [<span>&#x2212;</span>] {#- -#}
- </button> {#- -#}
- </span> {#- -#}
-</div> {#- -#}
+ {% match src_href %}
+ {% when Some with (href) %}
+ <a class="srclink" href="{{href|safe}}">source</a> · {#+ #}
+ {% else %}
+ {% endmatch %}
+ <button id="toggle-all-docs" title="collapse all docs"> {# #}
+ [<span>&#x2212;</span>] {# #}
+ </button> {# #}
+ </span> {# #}
+</div> {# #}
diff --git a/src/librustdoc/html/templates/short_item_info.html b/src/librustdoc/html/templates/short_item_info.html
new file mode 100644
index 000000000..75d155e91
--- /dev/null
+++ b/src/librustdoc/html/templates/short_item_info.html
@@ -0,0 +1,23 @@
+{% match self %}
+ {% when Self::Deprecation with { message } %}
+ <div class="stab deprecated"> {# #}
+ <span class="emoji">👎</span> {# #}
+ <span>{{message|safe}}</span> {# #}
+ </div> {# #}
+ {% when Self::Unstable with { feature, tracking } %}
+ <div class="stab unstable"> {# #}
+ <span class="emoji">🔬</span> {# #}
+ <span> {# #}
+ This is a nightly-only experimental API. ({# #}
+ <code>{{feature}}</code> {# #}
+ {% match tracking %}
+ {% when Some with ((url, num)) %}
+ &nbsp;<a href="{{url}}{{num}}">#{{num}}</a> {# #}
+ {% when None %}
+ {% endmatch %}
+ ) {# #}
+ </span> {# #}
+ </div> {# #}
+ {% when Self::Portability with { message } %}
+ <div class="stab portability">{{message|safe}}</div> {# #}
+{% endmatch %}
diff --git a/src/librustdoc/html/templates/sidebar.html b/src/librustdoc/html/templates/sidebar.html
new file mode 100644
index 000000000..01d476ad2
--- /dev/null
+++ b/src/librustdoc/html/templates/sidebar.html
@@ -0,0 +1,37 @@
+{% if !title.is_empty() %}
+ <h2 class="location"> {# #}
+ <a href="#">{{title_prefix}}{{title}}</a> {# #}
+ </h2>
+{% endif %}
+<div class="sidebar-elems">
+ {% if is_crate %}
+ <ul class="block">
+ {% if !version.is_empty() %}
+ <li class="version">Version {{+ version}}</li>
+ {% endif %}
+ <li><a id="all-types" href="all.html">All Items</a></li> {# #}
+ </ul>
+ {% endif %}
+
+ {% if self.should_render_blocks() %}
+ <section>
+ {% for block in blocks %}
+ {% if block.should_render() %}
+ {% if !block.heading.name.is_empty() %}
+ <h3><a href="#{{block.heading.href|safe}}">{{block.heading.name}}</a></h3>
+ {% endif %}
+ {% if !block.links.is_empty() %}
+ <ul class="block">
+ {% for link in block.links %}
+ <li><a href="#{{link.href|safe}}">{{link.name}}</a></li>
+ {% endfor %}
+ </ul>
+ {% endif %}
+ {% endif %}
+ {% endfor %}
+ </section>
+ {% endif %}
+ {% if !path.is_empty() %}
+ <h2><a href="index.html">In {{+ path}}</a></h2>
+ {% endif %}
+</div>
diff --git a/src/librustdoc/html/templates/source.html b/src/librustdoc/html/templates/source.html
new file mode 100644
index 000000000..42d01277d
--- /dev/null
+++ b/src/librustdoc/html/templates/source.html
@@ -0,0 +1,21 @@
+<div class="example-wrap"> {# #}
+ {# https://developers.google.com/search/docs/crawling-indexing/robots-meta-tag#data-nosnippet-attr
+ Do not show "1 2 3 4 5 ..." in web search results. #}
+ <div data-nosnippet><pre class="src-line-numbers">
+ {% for line in lines.clone() %}
+ {% if embedded %}
+ <span>{{line|safe}}</span>
+ {%~ else %}
+ <a href="#{{line|safe}}" id="{{line|safe}}">{{line|safe}}</a>
+ {%~ endif %}
+ {% endfor %}
+ </pre></div> {# #}
+ <pre class="rust"> {# #}
+ <code>
+ {% if needs_expansion %}
+ <button class="expand">&varr;</button>
+ {% endif %}
+ {{code_html|safe}}
+ </code> {# #}
+ </pre> {# #}
+</div>
diff --git a/src/librustdoc/json/conversions.rs b/src/librustdoc/json/conversions.rs
index 18c45fd69..cd6509607 100644
--- a/src/librustdoc/json/conversions.rs
+++ b/src/librustdoc/json/conversions.rs
@@ -4,7 +4,6 @@
#![allow(rustc::default_hash_types)]
-use std::convert::From;
use std::fmt;
use rustc_ast::ast;
@@ -249,9 +248,7 @@ pub(crate) fn id_from_item_inner(
// instead, we directly get the primitive symbol and convert it to u32 to
// generate the ID.
if matches!(tcx.def_kind(def_id), DefKind::Mod) &&
- let Some(prim) = tcx.get_attrs(*def_id, sym::doc)
- .flat_map(|attr| attr.meta_item_list().unwrap_or_default())
- .filter(|attr| attr.has_name(sym::primitive))
+ let Some(prim) = tcx.get_attrs(*def_id, sym::rustc_doc_primitive)
.find_map(|attr| attr.value_str()) {
format!(":{}", prim.as_u32())
} else {
@@ -456,7 +453,7 @@ impl FromWithTcx<clean::GenericParamDefKind> for GenericParamDefKind {
default: default.map(|x| (*x).into_tcx(tcx)),
synthetic,
},
- Const { did: _, ty, default } => GenericParamDefKind::Const {
+ Const { ty, default } => GenericParamDefKind::Const {
type_: (*ty).into_tcx(tcx),
default: default.map(|x| *x),
},
@@ -473,9 +470,35 @@ impl FromWithTcx<clean::WherePredicate> for WherePredicate {
bounds: bounds.into_tcx(tcx),
generic_params: bound_params
.into_iter()
- .map(|x| GenericParamDef {
- name: x.0.to_string(),
- kind: GenericParamDefKind::Lifetime { outlives: vec![] },
+ .map(|x| {
+ let name = x.name.to_string();
+ let kind = match x.kind {
+ clean::GenericParamDefKind::Lifetime { outlives } => {
+ GenericParamDefKind::Lifetime {
+ outlives: outlives.iter().map(|lt| lt.0.to_string()).collect(),
+ }
+ }
+ clean::GenericParamDefKind::Type {
+ did: _,
+ bounds,
+ default,
+ synthetic,
+ } => GenericParamDefKind::Type {
+ bounds: bounds
+ .into_iter()
+ .map(|bound| bound.into_tcx(tcx))
+ .collect(),
+ default: default.map(|ty| (*ty).into_tcx(tcx)),
+ synthetic,
+ },
+ clean::GenericParamDefKind::Const { ty, default } => {
+ GenericParamDefKind::Const {
+ type_: (*ty).into_tcx(tcx),
+ default: default.map(|d| *d),
+ }
+ }
+ };
+ GenericParamDef { name, kind }
})
.collect(),
},
diff --git a/src/librustdoc/json/mod.rs b/src/librustdoc/json/mod.rs
index 08bceb59c..d6da6e099 100644
--- a/src/librustdoc/json/mod.rs
+++ b/src/librustdoc/json/mod.rs
@@ -78,7 +78,7 @@ impl<'tcx> JsonRenderer<'tcx> {
// HACK(hkmatsumoto): For impls of primitive types, we index them
// regardless of whether they're local. This is because users can
// document primitive items in an arbitrary crate by using
- // `doc(primitive)`.
+ // `rustc_doc_primitive`.
let mut is_primitive_impl = false;
if let clean::types::ItemKind::ImplItem(ref impl_) = *item.kind &&
impl_.trait_.is_none() &&
diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs
index 4fcf08736..4a88dc525 100644
--- a/src/librustdoc/lib.rs
+++ b/src/librustdoc/lib.rs
@@ -7,14 +7,14 @@
#![feature(assert_matches)]
#![feature(box_patterns)]
#![feature(drain_filter)]
-#![feature(is_terminal)]
#![feature(let_chains)]
#![feature(test)]
#![feature(never_type)]
-#![feature(once_cell)]
+#![feature(lazy_cell)]
#![feature(type_ascription)]
#![feature(iter_intersperse)]
#![feature(type_alias_impl_trait)]
+#![cfg_attr(not(bootstrap), feature(impl_trait_in_assoc_type))]
#![recursion_limit = "256"]
#![warn(rustc::internal)]
#![allow(clippy::collapsible_if, clippy::collapsible_else_if)]
@@ -69,7 +69,6 @@ extern crate test;
#[cfg(feature = "jemalloc")]
extern crate jemalloc_sys;
-use std::default::Default;
use std::env::{self, VarError};
use std::io::{self, IsTerminal};
use std::process;
@@ -203,7 +202,7 @@ fn init_logging() {
.with_verbose_exit(true)
.with_verbose_entry(true)
.with_indent_amount(2);
- #[cfg(parallel_compiler)]
+ #[cfg(all(parallel_compiler, debug_assertions))]
let layer = layer.with_thread_ids(true).with_thread_names(true);
use tracing_subscriber::layer::SubscriberExt;
@@ -284,7 +283,7 @@ fn opts() -> Vec<RustcOptGroup> {
stable("test-args", |o| {
o.optmulti("", "test-args", "arguments to pass to the test runner", "ARGS")
}),
- unstable("test-run-directory", |o| {
+ stable("test-run-directory", |o| {
o.optopt(
"",
"test-run-directory",
diff --git a/src/librustdoc/passes/calculate_doc_coverage.rs b/src/librustdoc/passes/calculate_doc_coverage.rs
index 0b22f943d..be5286b24 100644
--- a/src/librustdoc/passes/calculate_doc_coverage.rs
+++ b/src/librustdoc/passes/calculate_doc_coverage.rs
@@ -8,7 +8,6 @@ use crate::visit::DocVisitor;
use rustc_hir as hir;
use rustc_lint::builtin::MISSING_DOCS;
use rustc_middle::lint::LintLevelSource;
-use rustc_middle::ty::DefIdTree;
use rustc_session::lint;
use rustc_span::FileName;
use serde::Serialize;
diff --git a/src/librustdoc/passes/check_doc_test_visibility.rs b/src/librustdoc/passes/check_doc_test_visibility.rs
index a39d57d42..6b13e6c95 100644
--- a/src/librustdoc/passes/check_doc_test_visibility.rs
+++ b/src/librustdoc/passes/check_doc_test_visibility.rs
@@ -14,7 +14,6 @@ use crate::visit::DocVisitor;
use crate::visit_ast::inherits_doc_hidden;
use rustc_hir as hir;
use rustc_middle::lint::LintLevelSource;
-use rustc_middle::ty::DefIdTree;
use rustc_session::lint;
pub(crate) const CHECK_DOC_TEST_VISIBILITY: Pass = Pass {
diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs
index cbfc58138..2cd9c8a87 100644
--- a/src/librustdoc/passes/collect_intra_doc_links.rs
+++ b/src/librustdoc/passes/collect_intra_doc_links.rs
@@ -13,10 +13,10 @@ use rustc_hir::def::Namespace::*;
use rustc_hir::def::{DefKind, Namespace, PerNS};
use rustc_hir::def_id::{DefId, CRATE_DEF_ID};
use rustc_hir::Mutability;
-use rustc_middle::ty::{DefIdTree, Ty, TyCtxt};
+use rustc_middle::ty::{fast_reject::TreatProjections, Ty, TyCtxt};
use rustc_middle::{bug, ty};
-use rustc_resolve::rustdoc::MalformedGenerics;
-use rustc_resolve::rustdoc::{prepare_to_doc_link_resolution, strip_generics_from_path};
+use rustc_resolve::rustdoc::{has_primitive_or_keyword_docs, prepare_to_doc_link_resolution};
+use rustc_resolve::rustdoc::{strip_generics_from_path, MalformedGenerics};
use rustc_session::lint::Lint;
use rustc_span::hygiene::MacroKind;
use rustc_span::symbol::{sym, Ident, Symbol};
@@ -28,7 +28,7 @@ use std::mem;
use std::ops::Range;
use crate::clean::{self, utils::find_nearest_parent_module};
-use crate::clean::{Crate, Item, ItemId, ItemLink, PrimitiveType};
+use crate::clean::{Crate, Item, ItemLink, PrimitiveType};
use crate::core::DocContext;
use crate::html::markdown::{markdown_links, MarkdownLink};
use crate::lint::{BROKEN_INTRA_DOC_LINKS, PRIVATE_INTRA_DOC_LINKS};
@@ -42,13 +42,23 @@ pub(crate) const COLLECT_INTRA_DOC_LINKS: Pass = Pass {
};
fn collect_intra_doc_links(krate: Crate, cx: &mut DocContext<'_>) -> Crate {
- let mut collector =
- LinkCollector { cx, mod_ids: Vec::new(), visited_links: FxHashMap::default() };
+ let mut collector = LinkCollector { cx, visited_links: FxHashMap::default() };
collector.visit_crate(&krate);
krate
}
-#[derive(Copy, Clone, Debug, Hash)]
+fn filter_assoc_items_by_name_and_namespace<'a>(
+ tcx: TyCtxt<'a>,
+ assoc_items_of: DefId,
+ ident: Ident,
+ ns: Namespace,
+) -> impl Iterator<Item = &ty::AssocItem> + 'a {
+ tcx.associated_items(assoc_items_of).filter_by_name_unhygienic(ident.name).filter(move |item| {
+ item.kind.namespace() == ns && tcx.hygienic_eq(ident, item.ident(tcx), assoc_items_of)
+ })
+}
+
+#[derive(Copy, Clone, Debug, Hash, PartialEq)]
enum Res {
Def(DefKind, DefId),
Primitive(PrimitiveType),
@@ -60,7 +70,7 @@ impl Res {
fn descr(self) -> &'static str {
match self {
Res::Def(kind, id) => ResolveRes::Def(kind, id).descr(),
- Res::Primitive(_) => "builtin type",
+ Res::Primitive(_) => "primitive type",
}
}
@@ -149,7 +159,7 @@ impl TryFrom<ResolveRes> for Res {
#[derive(Debug)]
struct UnresolvedPath<'a> {
/// Item on which the link is resolved, used for resolving `Self`.
- item_id: ItemId,
+ item_id: DefId,
/// The scope the link was resolved in.
module_id: DefId,
/// If part of the link resolved, this has the `Res`.
@@ -225,7 +235,7 @@ impl UrlFragment {
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
struct ResolutionInfo {
- item_id: ItemId,
+ item_id: DefId,
module_id: DefId,
dis: Option<Disambiguator>,
path_str: Box<str>,
@@ -242,11 +252,6 @@ struct DiagnosticInfo<'a> {
struct LinkCollector<'a, 'tcx> {
cx: &'a mut DocContext<'tcx>,
- /// A stack of modules used to decide what scope to resolve in.
- ///
- /// The last module will be used if the parent scope of the current item is
- /// unknown.
- mod_ids: Vec<DefId>,
/// Cache the resolved links so we can avoid resolving (and emitting errors for) the same link.
/// The link will be `None` if it could not be resolved (i.e. the error was cached).
visited_links: FxHashMap<ResolutionInfo, Option<(Res, Option<UrlFragment>)>>,
@@ -262,7 +267,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
fn variant_field<'path>(
&self,
path_str: &'path str,
- item_id: ItemId,
+ item_id: DefId,
module_id: DefId,
) -> Result<(Res, DefId), UnresolvedPath<'path>> {
let tcx = self.cx.tcx;
@@ -286,7 +291,6 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
split.next().map(|f| Symbol::intern(f)).ok_or_else(no_res)?;
let path = split
.next()
- .map(|f| f.to_owned())
// If there's no third component, we saw `[a::b]` before and it failed to resolve.
// So there's no partial res.
.ok_or_else(no_res)?;
@@ -324,45 +328,50 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
prim_ty: PrimitiveType,
ns: Namespace,
item_name: Symbol,
- ) -> Option<(Res, DefId)> {
+ ) -> Vec<(Res, DefId)> {
let tcx = self.cx.tcx;
- prim_ty.impls(tcx).find_map(|impl_| {
- tcx.associated_items(impl_)
- .find_by_name_and_namespace(tcx, Ident::with_dummy_span(item_name), ns, impl_)
+ prim_ty
+ .impls(tcx)
+ .flat_map(|impl_| {
+ filter_assoc_items_by_name_and_namespace(
+ tcx,
+ impl_,
+ Ident::with_dummy_span(item_name),
+ ns,
+ )
.map(|item| (Res::Primitive(prim_ty), item.def_id))
- })
+ })
+ .collect::<Vec<_>>()
}
- fn resolve_self_ty(&self, path_str: &str, ns: Namespace, item_id: ItemId) -> Option<Res> {
+ fn resolve_self_ty(&self, path_str: &str, ns: Namespace, item_id: DefId) -> Option<Res> {
if ns != TypeNS || path_str != "Self" {
return None;
}
let tcx = self.cx.tcx;
- item_id
- .as_def_id()
- .map(|def_id| match tcx.def_kind(def_id) {
- def_kind @ (DefKind::AssocFn
- | DefKind::AssocConst
- | DefKind::AssocTy
- | DefKind::Variant
- | DefKind::Field) => {
- let parent_def_id = tcx.parent(def_id);
- if def_kind == DefKind::Field && tcx.def_kind(parent_def_id) == DefKind::Variant
- {
- tcx.parent(parent_def_id)
- } else {
- parent_def_id
- }
+ let self_id = match tcx.def_kind(item_id) {
+ def_kind @ (DefKind::AssocFn
+ | DefKind::AssocConst
+ | DefKind::AssocTy
+ | DefKind::Variant
+ | DefKind::Field) => {
+ let parent_def_id = tcx.parent(item_id);
+ if def_kind == DefKind::Field && tcx.def_kind(parent_def_id) == DefKind::Variant {
+ tcx.parent(parent_def_id)
+ } else {
+ parent_def_id
}
- _ => def_id,
- })
- .and_then(|self_id| match tcx.def_kind(self_id) {
- DefKind::Impl { .. } => self.def_id_to_res(self_id),
- DefKind::Use => None,
- def_kind => Some(Res::Def(def_kind, self_id)),
- })
+ }
+ _ => item_id,
+ };
+
+ match tcx.def_kind(self_id) {
+ DefKind::Impl { .. } => self.def_id_to_res(self_id),
+ DefKind::Use => None,
+ def_kind => Some(Res::Def(def_kind, self_id)),
+ }
}
/// Convenience wrapper around `doc_link_resolutions`.
@@ -374,7 +383,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
&self,
path_str: &str,
ns: Namespace,
- item_id: ItemId,
+ item_id: DefId,
module_id: DefId,
) -> Option<Res> {
if let res @ Some(..) = self.resolve_self_ty(path_str, ns, item_id) {
@@ -401,16 +410,18 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
&mut self,
path_str: &'path str,
ns: Namespace,
- item_id: ItemId,
+ item_id: DefId,
module_id: DefId,
- ) -> Result<(Res, Option<DefId>), UnresolvedPath<'path>> {
+ ) -> Result<Vec<(Res, Option<DefId>)>, UnresolvedPath<'path>> {
if let Some(res) = self.resolve_path(path_str, ns, item_id, module_id) {
return Ok(match res {
Res::Def(
DefKind::AssocFn | DefKind::AssocConst | DefKind::AssocTy | DefKind::Variant,
def_id,
- ) => (Res::from_def_id(self.cx.tcx, self.cx.tcx.parent(def_id)), Some(def_id)),
- _ => (res, None),
+ ) => {
+ vec![(Res::from_def_id(self.cx.tcx, self.cx.tcx.parent(def_id)), Some(def_id))]
+ }
+ _ => vec![(res, None)],
});
} else if ns == MacroNS {
return Err(UnresolvedPath {
@@ -429,7 +440,6 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
let item_name = Symbol::intern(item_str);
let path_root = split
.next()
- .map(|f| f.to_owned())
// If there's no `::`, it's not an associated item.
// So we can be sure that `rustc_resolve` was accurate when it said it wasn't resolved.
.ok_or_else(|| {
@@ -443,17 +453,24 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
})?;
// FIXME(#83862): this arbitrarily gives precedence to primitives over modules to support
- // links to primitives when `#[doc(primitive)]` is present. It should give an ambiguity
- // error instead and special case *only* modules with `#[doc(primitive)]`, not all
+ // links to primitives when `#[rustc_doc_primitive]` is present. It should give an ambiguity
+ // error instead and special case *only* modules with `#[rustc_doc_primitive]`, not all
// primitives.
- resolve_primitive(&path_root, TypeNS)
+ match resolve_primitive(&path_root, TypeNS)
.or_else(|| self.resolve_path(&path_root, TypeNS, item_id, module_id))
.and_then(|ty_res| {
- self.resolve_associated_item(ty_res, item_name, ns, module_id).map(Ok)
- })
- .unwrap_or_else(|| {
+ let candidates = self
+ .resolve_associated_item(ty_res, item_name, ns, module_id)
+ .into_iter()
+ .map(|(res, def_id)| (res, Some(def_id)))
+ .collect::<Vec<_>>();
+ if !candidates.is_empty() { Some(candidates) } else { None }
+ }) {
+ Some(r) => Ok(r),
+ None => {
if ns == Namespace::ValueNS {
self.variant_field(path_str, item_id, module_id)
+ .map(|(res, def_id)| vec![(res, Some(def_id))])
} else {
Err(UnresolvedPath {
item_id,
@@ -462,8 +479,8 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
unresolved: path_root.into(),
})
}
- })
- .map(|(res, def_id)| (res, Some(def_id)))
+ }
+ }
}
/// Convert a DefId to a Res, where possible.
@@ -545,24 +562,31 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
item_name: Symbol,
ns: Namespace,
module_id: DefId,
- ) -> Option<(Res, DefId)> {
+ ) -> Vec<(Res, DefId)> {
let tcx = self.cx.tcx;
match root_res {
Res::Primitive(prim) => {
- self.resolve_primitive_associated_item(prim, ns, item_name).or_else(|| {
+ let items = self.resolve_primitive_associated_item(prim, ns, item_name);
+ if !items.is_empty() {
+ items
+ // Inherent associated items take precedence over items that come from trait impls.
+ } else {
self.primitive_type_to_ty(prim)
- .and_then(|ty| {
+ .map(|ty| {
resolve_associated_trait_item(ty, module_id, item_name, ns, self.cx)
+ .iter()
+ .map(|item| (root_res, item.def_id))
+ .collect::<Vec<_>>()
})
- .map(|item| (root_res, item.def_id))
- })
+ .unwrap_or(Vec::new())
+ }
}
Res::Def(DefKind::TyAlias, did) => {
// Resolve the link on the type the alias points to.
// FIXME: if the associated item is defined directly on the type alias,
// it will show up on its documentation page, we should link there instead.
- let res = self.def_id_to_res(did)?;
+ let Some(res) = self.def_id_to_res(did) else { return Vec::new() };
self.resolve_associated_item(res, item_name, ns, module_id)
}
Res::Def(
@@ -576,7 +600,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
ty::Adt(adt_def, _) => {
for variant in adt_def.variants() {
if variant.name == item_name {
- return Some((root_res, variant.def_id));
+ return vec![(root_res, variant.def_id)];
}
}
}
@@ -585,43 +609,46 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
}
// Checks if item_name belongs to `impl SomeItem`
- let assoc_item = tcx
+ let mut assoc_items: Vec<_> = tcx
.inherent_impls(did)
.iter()
.flat_map(|&imp| {
- tcx.associated_items(imp).find_by_name_and_namespace(
+ filter_assoc_items_by_name_and_namespace(
tcx,
+ imp,
Ident::with_dummy_span(item_name),
ns,
- imp,
)
})
- .copied()
- // There should only ever be one associated item that matches from any inherent impl
- .next()
+ .map(|item| (root_res, item.def_id))
+ .collect();
+
+ if assoc_items.is_empty() {
// Check if item_name belongs to `impl SomeTrait for SomeItem`
// FIXME(#74563): This gives precedence to `impl SomeItem`:
// Although having both would be ambiguous, use impl version for compatibility's sake.
// To handle that properly resolve() would have to support
// something like [`ambi_fn`](<SomeStruct as SomeTrait>::ambi_fn)
- .or_else(|| {
- resolve_associated_trait_item(
- tcx.type_of(did).subst_identity(),
- module_id,
- item_name,
- ns,
- self.cx,
- )
- });
+ assoc_items = resolve_associated_trait_item(
+ tcx.type_of(did).subst_identity(),
+ module_id,
+ item_name,
+ ns,
+ self.cx,
+ )
+ .into_iter()
+ .map(|item| (root_res, item.def_id))
+ .collect::<Vec<_>>();
+ }
- debug!("got associated item {:?}", assoc_item);
+ debug!("got associated item {:?}", assoc_items);
- if let Some(item) = assoc_item {
- return Some((root_res, item.def_id));
+ if !assoc_items.is_empty() {
+ return assoc_items;
}
if ns != Namespace::ValueNS {
- return None;
+ return Vec::new();
}
debug!("looking for fields named {} for {:?}", item_name, did);
// FIXME: this doesn't really belong in `associated_item` (maybe `variant_field` is better?)
@@ -641,20 +668,27 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
// field syntax) and are handled by the compiler's resolver.
let def = match tcx.type_of(did).subst_identity().kind() {
ty::Adt(def, _) if !def.is_enum() => def,
- _ => return None,
+ _ => return Vec::new(),
};
- let field =
- def.non_enum_variant().fields.iter().find(|item| item.name == item_name)?;
- Some((root_res, field.did))
+ def.non_enum_variant()
+ .fields
+ .iter()
+ .filter(|field| field.name == item_name)
+ .map(|field| (root_res, field.did))
+ .collect::<Vec<_>>()
}
- Res::Def(DefKind::Trait, did) => tcx
- .associated_items(did)
- .find_by_name_and_namespace(tcx, Ident::with_dummy_span(item_name), ns, did)
- .map(|item| {
- let res = Res::Def(item.kind.as_def_kind(), item.def_id);
- (res, item.def_id)
- }),
- _ => None,
+ Res::Def(DefKind::Trait, did) => filter_assoc_items_by_name_and_namespace(
+ tcx,
+ did,
+ Ident::with_dummy_span(item_name),
+ ns,
+ )
+ .map(|item| {
+ let res = Res::Def(item.kind.as_def_kind(), item.def_id);
+ (res, item.def_id)
+ })
+ .collect::<Vec<_>>(),
+ _ => Vec::new(),
}
}
}
@@ -674,7 +708,7 @@ fn resolve_associated_trait_item<'a>(
item_name: Symbol,
ns: Namespace,
cx: &mut DocContext<'a>,
-) -> Option<ty::AssocItem> {
+) -> Vec<ty::AssocItem> {
// FIXME: this should also consider blanket impls (`impl<T> X for T`). Unfortunately
// `get_auto_trait_and_blanket_impls` is broken because the caching behavior is wrong. In the
// meantime, just don't look for these blanket impls.
@@ -682,19 +716,26 @@ fn resolve_associated_trait_item<'a>(
// Next consider explicit impls: `impl MyTrait for MyType`
// Give precedence to inherent impls.
let traits = trait_impls_for(cx, ty, module);
+ let tcx = cx.tcx;
debug!("considering traits {:?}", traits);
- let mut candidates = traits.iter().filter_map(|&(impl_, trait_)| {
- cx.tcx
- .associated_items(trait_)
- .find_by_name_and_namespace(cx.tcx, Ident::with_dummy_span(item_name), ns, trait_)
- .map(|trait_assoc| {
- trait_assoc_to_impl_assoc_item(cx.tcx, impl_, trait_assoc.def_id)
+ let candidates = traits
+ .iter()
+ .flat_map(|&(impl_, trait_)| {
+ filter_assoc_items_by_name_and_namespace(
+ cx.tcx,
+ trait_,
+ Ident::with_dummy_span(item_name),
+ ns,
+ )
+ .map(move |trait_assoc| {
+ trait_assoc_to_impl_assoc_item(tcx, impl_, trait_assoc.def_id)
.unwrap_or(*trait_assoc)
})
- });
+ })
+ .collect::<Vec<_>>();
// FIXME(#74563): warn about ambiguity
- debug!("the candidates were {:?}", candidates.clone().collect::<Vec<_>>());
- candidates.next()
+ debug!("the candidates were {:?}", candidates);
+ candidates
}
/// Find the associated item in the impl `impl_id` that corresponds to the
@@ -735,7 +776,7 @@ fn trait_impls_for<'a>(
trace!("considering explicit impl for trait {:?}", trait_);
// Look at each trait implementation to see if it's an impl for `did`
- tcx.find_map_relevant_impl(trait_, ty, |impl_| {
+ tcx.find_map_relevant_impl(trait_, ty, TreatProjections::ForLookup, |impl_| {
let trait_ref = tcx.impl_trait_ref(impl_).expect("this is not an inherent impl");
// Check if these are the same type.
let impl_type = trait_ref.skip_binder().self_ty();
@@ -768,61 +809,21 @@ fn trait_impls_for<'a>(
/// Check for resolve collisions between a trait and its derive.
///
/// These are common and we should just resolve to the trait in that case.
-fn is_derive_trait_collision<T>(ns: &PerNS<Result<(Res, T), ResolutionFailure<'_>>>) -> bool {
- matches!(
- *ns,
- PerNS {
- type_ns: Ok((Res::Def(DefKind::Trait, _), _)),
- macro_ns: Ok((Res::Def(DefKind::Macro(MacroKind::Derive), _), _)),
- ..
- }
- )
+fn is_derive_trait_collision<T>(ns: &PerNS<Result<Vec<(Res, T)>, ResolutionFailure<'_>>>) -> bool {
+ if let (Ok(type_ns), Ok(macro_ns)) = (&ns.type_ns, &ns.macro_ns) {
+ type_ns.iter().any(|(res, _)| matches!(res, Res::Def(DefKind::Trait, _)))
+ && macro_ns
+ .iter()
+ .any(|(res, _)| matches!(res, Res::Def(DefKind::Macro(MacroKind::Derive), _)))
+ } else {
+ false
+ }
}
impl<'a, 'tcx> DocVisitor for LinkCollector<'a, 'tcx> {
fn visit_item(&mut self, item: &Item) {
- let parent_node =
- item.item_id.as_def_id().and_then(|did| find_nearest_parent_module(self.cx.tcx, did));
- if parent_node.is_some() {
- trace!("got parent node for {:?} {:?}, id {:?}", item.type_(), item.name, item.item_id);
- }
-
- let inner_docs = item.inner_docs(self.cx.tcx);
-
- if item.is_mod() && inner_docs {
- self.mod_ids.push(item.item_id.expect_def_id());
- }
-
- // We want to resolve in the lexical scope of the documentation.
- // In the presence of re-exports, this is not the same as the module of the item.
- // Rather than merging all documentation into one, resolve it one attribute at a time
- // so we know which module it came from.
- for (parent_module, doc) in prepare_to_doc_link_resolution(&item.attrs.doc_strings) {
- if !may_have_doc_links(&doc) {
- continue;
- }
- debug!("combined_docs={}", doc);
- // NOTE: if there are links that start in one crate and end in another, this will not resolve them.
- // This is a degenerate case and it's not supported by rustdoc.
- let parent_node = parent_module.or(parent_node);
- for md_link in preprocessed_markdown_links(&doc) {
- let link = self.resolve_link(item, &doc, parent_node, &md_link);
- if let Some(link) = link {
- self.cx.cache.intra_doc_links.entry(item.item_id).or_default().push(link);
- }
- }
- }
-
- if item.is_mod() {
- if !inner_docs {
- self.mod_ids.push(item.item_id.expect_def_id());
- }
-
- self.visit_item_recur(item);
- self.mod_ids.pop();
- } else {
- self.visit_item_recur(item)
- }
+ self.resolve_links(item);
+ self.visit_item_recur(item)
}
}
@@ -948,14 +949,50 @@ fn preprocessed_markdown_links(s: &str) -> Vec<PreprocessedMarkdownLink> {
}
impl LinkCollector<'_, '_> {
+ fn resolve_links(&mut self, item: &Item) {
+ if !self.cx.render_options.document_private
+ && let Some(def_id) = item.item_id.as_def_id()
+ && let Some(def_id) = def_id.as_local()
+ && !self.cx.tcx.effective_visibilities(()).is_exported(def_id)
+ && !has_primitive_or_keyword_docs(&item.attrs.other_attrs) {
+ // Skip link resolution for non-exported items.
+ return;
+ }
+
+ // We want to resolve in the lexical scope of the documentation.
+ // In the presence of re-exports, this is not the same as the module of the item.
+ // Rather than merging all documentation into one, resolve it one attribute at a time
+ // so we know which module it came from.
+ for (item_id, doc) in prepare_to_doc_link_resolution(&item.attrs.doc_strings) {
+ if !may_have_doc_links(&doc) {
+ continue;
+ }
+ debug!("combined_docs={}", doc);
+ // NOTE: if there are links that start in one crate and end in another, this will not resolve them.
+ // This is a degenerate case and it's not supported by rustdoc.
+ let item_id = item_id.unwrap_or_else(|| item.item_id.expect_def_id());
+ let module_id = match self.cx.tcx.def_kind(item_id) {
+ DefKind::Mod if item.inner_docs(self.cx.tcx) => item_id,
+ _ => find_nearest_parent_module(self.cx.tcx, item_id).unwrap(),
+ };
+ for md_link in preprocessed_markdown_links(&doc) {
+ let link = self.resolve_link(item, item_id, module_id, &doc, &md_link);
+ if let Some(link) = link {
+ self.cx.cache.intra_doc_links.entry(item.item_id).or_default().insert(link);
+ }
+ }
+ }
+ }
+
/// This is the entry point for resolving an intra-doc link.
///
/// FIXME(jynelson): this is way too many arguments
fn resolve_link(
&mut self,
item: &Item,
+ item_id: DefId,
+ module_id: DefId,
dox: &str,
- parent_node: Option<DefId>,
link: &PreprocessedMarkdownLink,
) -> Option<ItemLink> {
let PreprocessedMarkdownLink(pp_link, ori_link) = link;
@@ -972,25 +1009,9 @@ impl LinkCollector<'_, '_> {
pp_link.as_ref().map_err(|err| err.report(self.cx, diag_info.clone())).ok()?;
let disambiguator = *disambiguator;
- // In order to correctly resolve intra-doc links we need to
- // pick a base AST node to work from. If the documentation for
- // this module came from an inner comment (//!) then we anchor
- // our name resolution *inside* the module. If, on the other
- // hand it was an outer comment (///) then we anchor the name
- // resolution in the parent module on the basis that the names
- // used are more likely to be intended to be parent names. For
- // this, we set base_node to None for inner comments since
- // we've already pushed this node onto the resolution stack but
- // for outer comments we explicitly try and resolve against the
- // parent_node first.
- let inner_docs = item.inner_docs(self.cx.tcx);
- let base_node =
- if item.is_mod() && inner_docs { self.mod_ids.last().copied() } else { parent_node };
- let module_id = base_node.expect("doc link without parent module");
-
let (mut res, fragment) = self.resolve_with_disambiguator_cached(
ResolutionInfo {
- item_id: item.item_id,
+ item_id,
module_id,
dis: disambiguator,
path_str: path_str.clone(),
@@ -1017,15 +1038,15 @@ impl LinkCollector<'_, '_> {
res = prim;
} else {
// `[char]` when a `char` module is in scope
- let candidates = vec![res, prim];
- ambiguity_error(self.cx, diag_info, path_str, candidates);
+ let candidates = &[(res, res.def_id(self.cx.tcx)), (prim, None)];
+ ambiguity_error(self.cx, &diag_info, path_str, candidates);
return None;
}
}
}
match res {
- Res::Primitive(prim) => {
+ Res::Primitive(_) => {
if let Some(UrlFragment::Item(id)) = fragment {
// We're actually resolving an associated item of a primitive, so we need to
// verify the disambiguator (if any) matches the type of the associated item.
@@ -1045,15 +1066,6 @@ impl LinkCollector<'_, '_> {
item,
&diag_info,
)?;
-
- // FIXME: it would be nice to check that the feature gate was enabled in the original crate, not just ignore it altogether.
- // However I'm not sure how to check that across crates.
- if prim == PrimitiveType::RawPointer
- && item.item_id.is_local()
- && !self.cx.tcx.features().intra_doc_pointers
- {
- self.report_rawptr_assoc_feature_gate(dox, ori_link, item);
- }
} else {
match disambiguator {
Some(Disambiguator::Primitive | Disambiguator::Namespace(_)) | None => {}
@@ -1132,7 +1144,7 @@ impl LinkCollector<'_, '_> {
}
}
- // item can be non-local e.g. when using #[doc(primitive = "pointer")]
+ // item can be non-local e.g. when using `#[rustc_doc_primitive = "pointer"]`
if let Some((src_id, dst_id)) = id.as_local().and_then(|dst_id| {
item.item_id.expect_def_id().as_local().map(|src_id| (src_id, dst_id))
}) {
@@ -1174,10 +1186,9 @@ impl LinkCollector<'_, '_> {
report_diagnostic(self.cx.tcx, BROKEN_INTRA_DOC_LINKS, &msg, diag_info, callback);
}
- fn report_rawptr_assoc_feature_gate(&self, dox: &str, ori_link: &MarkdownLink, item: &Item) {
- let span =
- super::source_span_for_markdown_range(self.cx.tcx, dox, &ori_link.range, &item.attrs)
- .unwrap_or_else(|| item.attr_span(self.cx.tcx));
+ fn report_rawptr_assoc_feature_gate(&self, dox: &str, ori_link: &Range<usize>, item: &Item) {
+ let span = super::source_span_for_markdown_range(self.cx.tcx, dox, ori_link, &item.attrs)
+ .unwrap_or_else(|| item.attr_span(self.cx.tcx));
rustc_session::parse::feature_err(
&self.cx.tcx.sess.parse_sess,
sym::intra_doc_pointers,
@@ -1202,7 +1213,31 @@ impl LinkCollector<'_, '_> {
}
}
- let res = self.resolve_with_disambiguator(&key, diag.clone()).and_then(|(res, def_id)| {
+ let mut candidates = self.resolve_with_disambiguator(&key, diag.clone());
+
+ // FIXME: it would be nice to check that the feature gate was enabled in the original crate, not just ignore it altogether.
+ // However I'm not sure how to check that across crates.
+ if let Some(candidate) = candidates.get(0) &&
+ candidate.0 == Res::Primitive(PrimitiveType::RawPointer) &&
+ key.path_str.contains("::") // We only want to check this if this is an associated item.
+ {
+ if key.item_id.is_local() && !self.cx.tcx.features().intra_doc_pointers {
+ self.report_rawptr_assoc_feature_gate(diag.dox, &diag.link_range, diag.item);
+ return None;
+ } else {
+ candidates = vec![candidates[0]];
+ }
+ }
+
+ // If there are multiple items with the same "kind" (for example, both "associated types")
+ // and after removing duplicated kinds, only one remains, the `ambiguity_error` function
+ // won't emit an error. So at this point, we can just take the first candidate as it was
+ // the first retrieved and use it to generate the link.
+ if candidates.len() > 1 && !ambiguity_error(self.cx, &diag, &key.path_str, &candidates) {
+ candidates = vec![candidates[0]];
+ }
+
+ if let &[(res, def_id)] = candidates.as_slice() {
let fragment = match (&key.extra_fragment, def_id) {
(Some(_), Some(def_id)) => {
report_anchor_conflict(self.cx, diag, def_id);
@@ -1212,13 +1247,15 @@ impl LinkCollector<'_, '_> {
(None, Some(def_id)) => Some(UrlFragment::Item(def_id)),
(None, None) => None,
};
- Some((res, fragment))
- });
+ let r = Some((res, fragment));
+ self.visited_links.insert(key, r.clone());
+ return r;
+ }
- if res.is_some() || cache_errors {
- self.visited_links.insert(key, res.clone());
+ if cache_errors {
+ self.visited_links.insert(key, None);
}
- res
+ None
}
/// After parsing the disambiguator, resolve the main part of the link.
@@ -1227,16 +1264,16 @@ impl LinkCollector<'_, '_> {
&mut self,
key: &ResolutionInfo,
diag: DiagnosticInfo<'_>,
- ) -> Option<(Res, Option<DefId>)> {
+ ) -> Vec<(Res, Option<DefId>)> {
let disambiguator = key.dis;
let path_str = &key.path_str;
let item_id = key.item_id;
- let base_node = key.module_id;
+ let module_id = key.module_id;
match disambiguator.map(Disambiguator::ns) {
Some(expected_ns) => {
- match self.resolve(path_str, expected_ns, item_id, base_node) {
- Ok(res) => Some(res),
+ match self.resolve(path_str, expected_ns, item_id, module_id) {
+ Ok(candidates) => candidates,
Err(err) => {
// We only looked in one namespace. Try to give a better error if possible.
// FIXME: really it should be `resolution_failure` that does this, not `resolve_with_disambiguator`.
@@ -1245,10 +1282,11 @@ impl LinkCollector<'_, '_> {
for other_ns in [TypeNS, ValueNS, MacroNS] {
if other_ns != expected_ns {
if let Ok(res) =
- self.resolve(path_str, other_ns, item_id, base_node)
+ self.resolve(path_str, other_ns, item_id, module_id) &&
+ !res.is_empty()
{
err = ResolutionFailure::WrongNamespace {
- res: full_res(self.cx.tcx, res),
+ res: full_res(self.cx.tcx, res[0]),
expected_ns,
};
break;
@@ -1262,25 +1300,33 @@ impl LinkCollector<'_, '_> {
None => {
// Try everything!
let mut candidate = |ns| {
- self.resolve(path_str, ns, item_id, base_node)
+ self.resolve(path_str, ns, item_id, module_id)
.map_err(ResolutionFailure::NotResolved)
};
let candidates = PerNS {
macro_ns: candidate(MacroNS),
type_ns: candidate(TypeNS),
- value_ns: candidate(ValueNS).and_then(|(res, def_id)| {
- match res {
- // Constructors are picked up in the type namespace.
- Res::Def(DefKind::Ctor(..), _) => {
- Err(ResolutionFailure::WrongNamespace { res, expected_ns: TypeNS })
+ value_ns: candidate(ValueNS).and_then(|v_res| {
+ for (res, _) in v_res.iter() {
+ match res {
+ // Constructors are picked up in the type namespace.
+ Res::Def(DefKind::Ctor(..), _) => {
+ return Err(ResolutionFailure::WrongNamespace {
+ res: *res,
+ expected_ns: TypeNS,
+ });
+ }
+ _ => {}
}
- _ => Ok((res, def_id)),
}
+ Ok(v_res)
}),
};
- let len = candidates.iter().filter(|res| res.is_ok()).count();
+ let len = candidates
+ .iter()
+ .fold(0, |acc, res| if let Ok(res) = res { acc + res.len() } else { acc });
if len == 0 {
return resolution_failure(
@@ -1290,22 +1336,21 @@ impl LinkCollector<'_, '_> {
disambiguator,
candidates.into_iter().filter_map(|res| res.err()).collect(),
);
- }
-
- if len == 1 {
- Some(candidates.into_iter().find_map(|res| res.ok()).unwrap())
- } else if len == 2 && is_derive_trait_collision(&candidates) {
- Some(candidates.type_ns.unwrap())
+ } else if len == 1 {
+ candidates.into_iter().filter_map(|res| res.ok()).flatten().collect::<Vec<_>>()
} else {
- let ignore_macro = is_derive_trait_collision(&candidates);
- // If we're reporting an ambiguity, don't mention the namespaces that failed
- let mut candidates =
- candidates.map(|candidate| candidate.ok().map(|(res, _)| res));
- if ignore_macro {
- candidates.macro_ns = None;
+ let has_derive_trait_collision = is_derive_trait_collision(&candidates);
+ if len == 2 && has_derive_trait_collision {
+ candidates.type_ns.unwrap()
+ } else {
+ // If we're reporting an ambiguity, don't mention the namespaces that failed
+ let mut candidates = candidates.map(|candidate| candidate.ok());
+ // If there a collision between a trait and a derive, we ignore the derive.
+ if has_derive_trait_collision {
+ candidates.macro_ns = None;
+ }
+ candidates.into_iter().filter_map(|res| res).flatten().collect::<Vec<_>>()
}
- ambiguity_error(self.cx, diag, path_str, candidates.present_items().collect());
- None
}
}
}
@@ -1593,7 +1638,7 @@ fn resolution_failure(
path_str: &str,
disambiguator: Option<Disambiguator>,
kinds: SmallVec<[ResolutionFailure<'_>; 3]>,
-) -> Option<(Res, Option<DefId>)> {
+) -> Vec<(Res, Option<DefId>)> {
let tcx = collector.cx.tcx;
let mut recovered_res = None;
report_diagnostic(
@@ -1652,11 +1697,13 @@ fn resolution_failure(
};
name = start;
for ns in [TypeNS, ValueNS, MacroNS] {
- if let Ok(res) = collector.resolve(start, ns, item_id, module_id) {
- debug!("found partial_res={:?}", res);
- *partial_res = Some(full_res(collector.cx.tcx, res));
- *unresolved = end.into();
- break 'outer;
+ if let Ok(v_res) = collector.resolve(start, ns, item_id, module_id) {
+ debug!("found partial_res={:?}", v_res);
+ if !v_res.is_empty() {
+ *partial_res = Some(full_res(collector.cx.tcx, v_res[0]));
+ *unresolved = end.into();
+ break 'outer;
+ }
}
}
*unresolved = end.into();
@@ -1804,7 +1851,10 @@ fn resolution_failure(
},
);
- recovered_res
+ match recovered_res {
+ Some(r) => vec![r],
+ None => Vec::new(),
+ }
}
fn report_multiple_anchors(cx: &DocContext<'_>, diag_info: DiagnosticInfo<'_>) {
@@ -1889,28 +1939,47 @@ fn report_malformed_generics(
}
/// Report an ambiguity error, where there were multiple possible resolutions.
+///
+/// If all `candidates` have the same kind, it's not possible to disambiguate so in this case,
+/// the function won't emit an error and will return `false`. Otherwise, it'll emit the error and
+/// return `true`.
fn ambiguity_error(
cx: &DocContext<'_>,
- diag_info: DiagnosticInfo<'_>,
+ diag_info: &DiagnosticInfo<'_>,
path_str: &str,
- candidates: Vec<Res>,
-) {
- let mut msg = format!("`{}` is ", path_str);
+ candidates: &[(Res, Option<DefId>)],
+) -> bool {
+ let mut descrs = FxHashSet::default();
+ let kinds = candidates
+ .iter()
+ .map(
+ |(res, def_id)| {
+ if let Some(def_id) = def_id { Res::from_def_id(cx.tcx, *def_id) } else { *res }
+ },
+ )
+ .filter(|res| descrs.insert(res.descr()))
+ .collect::<Vec<_>>();
+ if descrs.len() == 1 {
+ // There is no way for users to disambiguate at this point, so better return the first
+ // candidate and not show a warning.
+ return false;
+ }
- match candidates.as_slice() {
- [first_def, second_def] => {
+ let mut msg = format!("`{}` is ", path_str);
+ match kinds.as_slice() {
+ [res1, res2] => {
msg += &format!(
"both {} {} and {} {}",
- first_def.article(),
- first_def.descr(),
- second_def.article(),
- second_def.descr(),
+ res1.article(),
+ res1.descr(),
+ res2.article(),
+ res2.descr()
);
}
_ => {
- let mut candidates = candidates.iter().peekable();
- while let Some(res) = candidates.next() {
- if candidates.peek().is_some() {
+ let mut kinds = kinds.iter().peekable();
+ while let Some(res) = kinds.next() {
+ if kinds.peek().is_some() {
msg += &format!("{} {}, ", res.article(), res.descr());
} else {
msg += &format!("and {} {}", res.article(), res.descr());
@@ -1919,17 +1988,18 @@ fn ambiguity_error(
}
}
- report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, &msg, &diag_info, |diag, sp| {
+ report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, &msg, diag_info, |diag, sp| {
if let Some(sp) = sp {
diag.span_label(sp, "ambiguous link");
} else {
diag.note("ambiguous link");
}
- for res in candidates {
+ for res in kinds {
suggest_disambiguator(res, diag, path_str, diag_info.ori_link, sp);
}
});
+ true
}
/// In case of an ambiguity or mismatched disambiguator, suggest the correct
diff --git a/src/librustdoc/passes/collect_trait_impls.rs b/src/librustdoc/passes/collect_trait_impls.rs
index 01ed4a60b..8d204ddb7 100644
--- a/src/librustdoc/passes/collect_trait_impls.rs
+++ b/src/librustdoc/passes/collect_trait_impls.rs
@@ -9,7 +9,7 @@ use crate::visit::DocVisitor;
use rustc_data_structures::fx::FxHashSet;
use rustc_hir::def_id::{DefId, DefIdMap, DefIdSet, LOCAL_CRATE};
-use rustc_middle::ty::{self, DefIdTree};
+use rustc_middle::ty;
use rustc_span::symbol::sym;
pub(crate) const COLLECT_TRAIT_IMPLS: Pass = Pass {
@@ -49,7 +49,7 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
let _prof_timer = cx.tcx.sess.prof.generic_activity("build_extern_trait_impls");
for &cnum in cx.tcx.crates(()) {
for &impl_def_id in cx.tcx.trait_impls_in_crate(cnum) {
- inline::build_impl(cx, None, impl_def_id, None, &mut new_items_external);
+ inline::build_impl(cx, impl_def_id, None, &mut new_items_external);
}
}
}
@@ -75,7 +75,7 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
);
parent = cx.tcx.opt_parent(did);
}
- inline::build_impl(cx, None, impl_def_id, Some(&attr_buf), &mut new_items_local);
+ inline::build_impl(cx, impl_def_id, Some((&attr_buf, None)), &mut new_items_local);
attr_buf.clear();
}
}
@@ -84,7 +84,7 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
for def_id in PrimitiveType::all_impls(cx.tcx) {
// Try to inline primitive impls from other crates.
if !def_id.is_local() {
- inline::build_impl(cx, None, def_id, None, &mut new_items_external);
+ inline::build_impl(cx, def_id, None, &mut new_items_external);
}
}
for (prim, did) in PrimitiveType::primitive_locations(cx.tcx) {
diff --git a/src/librustdoc/passes/propagate_doc_cfg.rs b/src/librustdoc/passes/propagate_doc_cfg.rs
index a4bc48690..8a33e51b3 100644
--- a/src/librustdoc/passes/propagate_doc_cfg.rs
+++ b/src/librustdoc/passes/propagate_doc_cfg.rs
@@ -9,7 +9,6 @@ use crate::fold::DocFolder;
use crate::passes::Pass;
use rustc_hir::def_id::LocalDefId;
-use rustc_middle::ty::DefIdTree;
pub(crate) const PROPAGATE_DOC_CFG: Pass = Pass {
name: "propagate-doc-cfg",
@@ -58,7 +57,8 @@ impl<'a, 'tcx> CfgPropagator<'a, 'tcx> {
next_def_id = parent_def_id;
}
- let (_, cfg) = merge_attrs(self.cx, None, item.attrs.other_attrs.as_slice(), Some(&attrs));
+ let (_, cfg) =
+ merge_attrs(self.cx, item.attrs.other_attrs.as_slice(), Some((&attrs, None)));
item.cfg = cfg;
}
}
diff --git a/src/librustdoc/passes/strip_hidden.rs b/src/librustdoc/passes/strip_hidden.rs
index 890b3e8d6..a688aa148 100644
--- a/src/librustdoc/passes/strip_hidden.rs
+++ b/src/librustdoc/passes/strip_hidden.rs
@@ -121,9 +121,14 @@ impl<'a, 'tcx> DocFolder for Stripper<'a, 'tcx> {
// strip things like impl methods but when doing so
// we must not add any items to the `retained` set.
let old = mem::replace(&mut self.update_retained, false);
- let ret = strip_item(self.set_is_in_hidden_item_and_fold(true, i));
+ let ret = self.set_is_in_hidden_item_and_fold(true, i);
self.update_retained = old;
- Some(ret)
+ if ret.is_crate() {
+ // We don't strip the crate, even if it has `#[doc(hidden)]`.
+ Some(ret)
+ } else {
+ Some(strip_item(ret))
+ }
}
_ => {
let ret = self.set_is_in_hidden_item_and_fold(true, i);
diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs
index 157e042e4..f54b70b41 100644
--- a/src/librustdoc/visit_ast.rs
+++ b/src/librustdoc/visit_ast.rs
@@ -8,14 +8,14 @@ use rustc_hir::def_id::{DefId, DefIdMap, LocalDefId, LocalDefIdSet};
use rustc_hir::intravisit::{walk_item, Visitor};
use rustc_hir::{Node, CRATE_HIR_ID};
use rustc_middle::hir::nested_filter;
-use rustc_middle::ty::{DefIdTree, TyCtxt};
+use rustc_middle::ty::TyCtxt;
use rustc_span::def_id::{CRATE_DEF_ID, LOCAL_CRATE};
use rustc_span::symbol::{kw, sym, Symbol};
use rustc_span::Span;
-use std::mem;
+use std::{iter, mem};
-use crate::clean::{cfg::Cfg, AttributesExt, NestedAttributesExt};
+use crate::clean::{cfg::Cfg, reexport_chain, AttributesExt, NestedAttributesExt};
use crate::core;
/// This module is used to store stuff from Rust's AST in a more convenient
@@ -133,7 +133,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
// is declared but also a reexport of itself producing two exports of the same
// macro in the same module.
let mut inserted = FxHashSet::default();
- for export in self.cx.tcx.module_reexports(CRATE_DEF_ID).unwrap_or(&[]) {
+ for export in self.cx.tcx.module_children_reexports(CRATE_DEF_ID) {
if let Res::Def(DefKind::Macro(_), def_id) = export.res &&
let Some(local_def_id) = def_id.as_local() &&
self.cx.tcx.has_attr(def_id, sym::macro_export) &&
@@ -223,6 +223,11 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
) -> bool {
debug!("maybe_inline_local res: {:?}", res);
+ if renamed == Some(kw::Underscore) {
+ // We never inline `_` reexports.
+ return false;
+ }
+
if self.cx.output_format.is_json() {
return false;
}
@@ -259,6 +264,22 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
return false;
}
+ if !please_inline &&
+ let Some(item_def_id) = reexport_chain(self.cx.tcx, def_id, res_did).iter()
+ .flat_map(|reexport| reexport.id()).map(|id| id.expect_local())
+ .chain(iter::once(res_did)).nth(1) &&
+ item_def_id != def_id &&
+ self
+ .cx
+ .cache
+ .effective_visibilities
+ .is_directly_public(self.cx.tcx, item_def_id.to_def_id()) &&
+ !inherits_doc_hidden(self.cx.tcx, item_def_id)
+ {
+ // The imported item is public and not `doc(hidden)` so no need to inline it.
+ return false;
+ }
+
if !self.view_item_stack.insert(res_did) {
return false;
}
@@ -329,8 +350,8 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
self.visit_foreign_item_inner(item, None);
}
}
- // If we're inlining, skip private items or item reexported as "_".
- _ if self.inlining && (!is_pub || renamed == Some(kw::Underscore)) => {}
+ // If we're inlining, skip private items.
+ _ if self.inlining && !is_pub => {}
hir::ItemKind::GlobalAsm(..) => {}
hir::ItemKind::Use(_, hir::UseKind::ListStem) => {}
hir::ItemKind::Use(path, kind) => {
diff --git a/src/rustdoc-json-types/Cargo.toml b/src/rustdoc-json-types/Cargo.toml
index d60699efd..d63caa7ad 100644
--- a/src/rustdoc-json-types/Cargo.toml
+++ b/src/rustdoc-json-types/Cargo.toml
@@ -8,6 +8,7 @@ path = "lib.rs"
[dependencies]
serde = { version = "1.0", features = ["derive"] }
+rustc-hash = "1.1.0"
[dev-dependencies]
serde_json = "1.0"
diff --git a/src/rustdoc-json-types/lib.rs b/src/rustdoc-json-types/lib.rs
index 387d5787d..3cf8ceed6 100644
--- a/src/rustdoc-json-types/lib.rs
+++ b/src/rustdoc-json-types/lib.rs
@@ -3,10 +3,9 @@
//! These types are the public API exposed through the `--output-format json` flag. The [`Crate`]
//! struct is the root of the JSON blob and all other items are contained within.
-use std::collections::HashMap;
-use std::path::PathBuf;
-
+use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
+use std::path::PathBuf;
/// rustdoc format-version.
pub const FORMAT_VERSION: u32 = 24;
@@ -24,11 +23,11 @@ pub struct Crate {
pub includes_private: bool,
/// A collection of all items in the local crate as well as some external traits and their
/// items that are referenced locally.
- pub index: HashMap<Id, Item>,
+ pub index: FxHashMap<Id, Item>,
/// Maps IDs to fully qualified paths and other info helpful for generating links.
- pub paths: HashMap<Id, ItemSummary>,
+ pub paths: FxHashMap<Id, ItemSummary>,
/// Maps `crate_id` of items to a crate name and html_root_url if it exists.
- pub external_crates: HashMap<u32, ExternalCrate>,
+ pub external_crates: FxHashMap<u32, ExternalCrate>,
/// A single version number to be used in the future when making backwards incompatible changes
/// to the JSON output.
pub format_version: u32,
@@ -54,8 +53,8 @@ pub struct ItemSummary {
///
/// Note that items can appear in multiple paths, and the one chosen is implementation
/// defined. Currently, this is the full path to where the item was defined. Eg
- /// [`String`] is currently `["alloc", "string", "String"]` and [`HashMap`] is
- /// `["std", "collections", "hash", "map", "HashMap"]`, but this is subject to change.
+ /// [`String`] is currently `["alloc", "string", "String"]` and [`HashMap`][`std::collections::HashMap`]
+ /// is `["std", "collections", "hash", "map", "HashMap"]`, but this is subject to change.
pub path: Vec<String>,
/// Whether this item is a struct, trait, macro, etc.
pub kind: ItemKind,
@@ -80,7 +79,7 @@ pub struct Item {
/// Some("") if there is some documentation but it is empty (EG `#[doc = ""]`).
pub docs: Option<String>,
/// This mapping resolves [intra-doc links](https://github.com/rust-lang/rfcs/blob/master/text/1946-intra-rustdoc-links.md) from the docstring to their IDs
- pub links: HashMap<String, Id>,
+ pub links: FxHashMap<String, Id>,
/// Stringified versions of the attributes on this item (e.g. `"#[inline]"`)
pub attrs: Vec<String>,
pub deprecation: Option<Deprecation>,
@@ -551,7 +550,7 @@ pub enum Type {
DynTrait(DynTrait),
/// Parameterized types
Generic(String),
- /// Built in numberic (i*, u*, f*) types, bool, and char
+ /// Built in numeric (i*, u*, f*) types, bool, and char
Primitive(String),
/// `extern "ABI" fn`
FunctionPointer(Box<FunctionPointer>),
diff --git a/src/stage0.json b/src/stage0.json
index 03cafd870..0cb95bae7 100644
--- a/src/stage0.json
+++ b/src/stage0.json
@@ -17,298 +17,298 @@
"tool is executed."
],
"compiler": {
- "date": "2023-03-28",
- "version": "1.68.2"
+ "date": "2023-04-20",
+ "version": "1.69.0"
},
"rustfmt": null,
"checksums_sha256": {
- "dist/2023-03-28/cargo-1.68.2-aarch64-apple-darwin.tar.gz": "7317f1a2823a78f531f433d55cf76baf8701d16268bded12e0501fc07e74c6f4",
- "dist/2023-03-28/cargo-1.68.2-aarch64-apple-darwin.tar.xz": "00812cc56dfb8cb06ab997737279e6a3a46a8fca3ac7baf4f9a42b3f6e1d3a3f",
- "dist/2023-03-28/cargo-1.68.2-aarch64-pc-windows-msvc.tar.gz": "c56dbd0fdfe9be73d076a6404d09d8480b4b624e277b87b4fd25d7da9382b979",
- "dist/2023-03-28/cargo-1.68.2-aarch64-pc-windows-msvc.tar.xz": "860a4275795629fd9d9096a5aabab0f8f0a6c69ed61c23528f79a79f427ec756",
- "dist/2023-03-28/cargo-1.68.2-aarch64-unknown-linux-gnu.tar.gz": "09119c8df515f3358dbbb23514a80deb5d9891a5fcd4323667dbc84f32a160da",
- "dist/2023-03-28/cargo-1.68.2-aarch64-unknown-linux-gnu.tar.xz": "a1b1f8ffdd56747453cb9531f70474ddbe603533e7b06647a810e32fc7c3a8ba",
- "dist/2023-03-28/cargo-1.68.2-aarch64-unknown-linux-musl.tar.gz": "d9db2875c301386a64c57339d0fe3560454fb487bec36e2c27a865ff5c9aa46c",
- "dist/2023-03-28/cargo-1.68.2-aarch64-unknown-linux-musl.tar.xz": "8211b2472df3bfbf0a1eaeaec5dc51c5d8b657e192d35eaa4c6432f73d2f1818",
- "dist/2023-03-28/cargo-1.68.2-arm-unknown-linux-gnueabi.tar.gz": "62e72f243e5b8e8d77bebe7e627ecaca6829c722fc26ce69a52e0d780f240dd8",
- "dist/2023-03-28/cargo-1.68.2-arm-unknown-linux-gnueabi.tar.xz": "383cc67e1bb3159d48d2e054e03a3bf85719cd5f63af366f5c4893734018008e",
- "dist/2023-03-28/cargo-1.68.2-arm-unknown-linux-gnueabihf.tar.gz": "b9573233b7a8897e178c1cedf0b23af9641f2d24d2507ba2f59c22d25430b724",
- "dist/2023-03-28/cargo-1.68.2-arm-unknown-linux-gnueabihf.tar.xz": "38453a4c37bff2d7e579a60fcdccad7b0a1f1bd5decc64a811b6d10c009c52e6",
- "dist/2023-03-28/cargo-1.68.2-armv7-unknown-linux-gnueabihf.tar.gz": "6dc62202328089b0ab9b7f743799ef8e0bbb81fb36e8a89509b1c100cf64211a",
- "dist/2023-03-28/cargo-1.68.2-armv7-unknown-linux-gnueabihf.tar.xz": "14eb6b290c5613760e9a6194fba06eb4c4703766984b4b5793a474e811a64d9c",
- "dist/2023-03-28/cargo-1.68.2-i686-pc-windows-gnu.tar.gz": "05ca31c43ba3cd9df4fb1c3d5c85694205fe13fded040d886be2a31a02f860d0",
- "dist/2023-03-28/cargo-1.68.2-i686-pc-windows-gnu.tar.xz": "6ff3a6ff4d34d7d0d886802c656e8003cb8fc410be9165e3937ccb9c4149346a",
- "dist/2023-03-28/cargo-1.68.2-i686-pc-windows-msvc.tar.gz": "52cc6639802e20fb097dee52917a45b3ae2df330c0ed29e3880bb48f33bee04b",
- "dist/2023-03-28/cargo-1.68.2-i686-pc-windows-msvc.tar.xz": "604d47ee65a3b977d778a4d4a727241e80532d52a7a47a590007efafbd45461d",
- "dist/2023-03-28/cargo-1.68.2-i686-unknown-linux-gnu.tar.gz": "f6dbeaaeaf4a755ef45ef006f517ba376f0e81f5c108fe8ec58034de0867e0ca",
- "dist/2023-03-28/cargo-1.68.2-i686-unknown-linux-gnu.tar.xz": "aeee38c0cd35b531d9f00028f720358256e9f3b80b176467c6afc033a16c92aa",
- "dist/2023-03-28/cargo-1.68.2-mips-unknown-linux-gnu.tar.gz": "dca2229faf1175a6c8505449d5218ac710ae05da524acabf20dd563157711031",
- "dist/2023-03-28/cargo-1.68.2-mips-unknown-linux-gnu.tar.xz": "8d24614f31ba67fe10a642e9f49d64066b4ce6789a80a101ae10afcc2a194c97",
- "dist/2023-03-28/cargo-1.68.2-mips64-unknown-linux-gnuabi64.tar.gz": "a10c27917c4739cdda446248ef1758d401cb76643a3e99777b5b0b38fbcf1360",
- "dist/2023-03-28/cargo-1.68.2-mips64-unknown-linux-gnuabi64.tar.xz": "271b899caf65466fd4560649af3e7ec61601be34d943936e546622ae8b4f39a2",
- "dist/2023-03-28/cargo-1.68.2-mips64el-unknown-linux-gnuabi64.tar.gz": "220109f11c4cf656553087b9f2f3bd08ad2714c8bd602586f56602e670d486c7",
- "dist/2023-03-28/cargo-1.68.2-mips64el-unknown-linux-gnuabi64.tar.xz": "0809ff7f2ef910ac0b584ad1f184a0898e5e701b3c0e45b158b129efa50493dd",
- "dist/2023-03-28/cargo-1.68.2-mipsel-unknown-linux-gnu.tar.gz": "c3f0e5a71736cc07d3527432eea945c3fa795ad4669b269615dad0ea5a20be1f",
- "dist/2023-03-28/cargo-1.68.2-mipsel-unknown-linux-gnu.tar.xz": "8664ff821dfd8bb252c61c2b7473e355bd38cd8a6bc15a9ecc98af46f6a2b282",
- "dist/2023-03-28/cargo-1.68.2-powerpc-unknown-linux-gnu.tar.gz": "5a9c71ce2f9930264e19cf7fe7241c68b2b20eafb9aa2461d51e0c769bd902c8",
- "dist/2023-03-28/cargo-1.68.2-powerpc-unknown-linux-gnu.tar.xz": "13ad9e74376fc01d9d770447c141a18dd986dd60bbd4c6177ed80df4a28fc142",
- "dist/2023-03-28/cargo-1.68.2-powerpc64-unknown-linux-gnu.tar.gz": "8fea8801334b48d64136f1eb32b90b84b44dccac7222b1d4147c171ef5609431",
- "dist/2023-03-28/cargo-1.68.2-powerpc64-unknown-linux-gnu.tar.xz": "ad4eded41be32bb19a5f6224daacb9589947956f4be9f884f7ea06323b448088",
- "dist/2023-03-28/cargo-1.68.2-powerpc64le-unknown-linux-gnu.tar.gz": "fd66f5fdbad088bbeb022dfef4bef8ff3744c70f1fed0a0455260b332a674cd6",
- "dist/2023-03-28/cargo-1.68.2-powerpc64le-unknown-linux-gnu.tar.xz": "1eb1c330d281a9478d514724c5089225b3f66880f4a69e9e02b9d389f000adbb",
- "dist/2023-03-28/cargo-1.68.2-riscv64gc-unknown-linux-gnu.tar.gz": "29e2b12210e32c7ef1dad89d2a231ce61497bd7a2462b8a7122cf7dff9f072ec",
- "dist/2023-03-28/cargo-1.68.2-riscv64gc-unknown-linux-gnu.tar.xz": "d5a154406bc6ebc6d5cdd3198aa2b5167ceef130454c56bcf03e37fa6b290882",
- "dist/2023-03-28/cargo-1.68.2-s390x-unknown-linux-gnu.tar.gz": "65c166fbb7fdb6a9d1d99e4ab77e08c8c2a8e38ab3319f7c3ce9a5caeb596d9d",
- "dist/2023-03-28/cargo-1.68.2-s390x-unknown-linux-gnu.tar.xz": "26353406aba935e135e0527673bfd3805de88ff9a63187135e73b5788f385112",
- "dist/2023-03-28/cargo-1.68.2-x86_64-apple-darwin.tar.gz": "c580e7dbf6bde9bf4246380ac1591682981dc7cbdb7b82a95eac8322d866e4bd",
- "dist/2023-03-28/cargo-1.68.2-x86_64-apple-darwin.tar.xz": "f44907243f9f42b9505c692ff4ff0cbbcc9590b146db9491edbd950a443a5e24",
- "dist/2023-03-28/cargo-1.68.2-x86_64-pc-windows-gnu.tar.gz": "f1b563a41a08312ca38c7b9e91d2b3a3461ce6d0b5a88593ffe42f4cfa610b60",
- "dist/2023-03-28/cargo-1.68.2-x86_64-pc-windows-gnu.tar.xz": "9158f8ec0793bd5f993c99fcc656b860507a394a4fb4df17c5350096e045dadd",
- "dist/2023-03-28/cargo-1.68.2-x86_64-pc-windows-msvc.tar.gz": "374b25dfb7ec5ca31e0da96be1e854e7024b9b52580e457065f844197130b2e4",
- "dist/2023-03-28/cargo-1.68.2-x86_64-pc-windows-msvc.tar.xz": "5c071ff9295e6309da4c71214a5b47f16033138bb74b783ad94acf45ba83c884",
- "dist/2023-03-28/cargo-1.68.2-x86_64-unknown-freebsd.tar.gz": "4cc87ec3298db3f5464ab989c50b0641bfa5b5499996a2ad34da92260f8db17f",
- "dist/2023-03-28/cargo-1.68.2-x86_64-unknown-freebsd.tar.xz": "b0a113d7f2992e6aa8827fd095c3bb9507b27b27a83d7aa2c98931f0a4dca9e6",
- "dist/2023-03-28/cargo-1.68.2-x86_64-unknown-illumos.tar.gz": "d1d29e55aa9b8ed2bb9764eff921af59254fd56e81083b136bf27de9bc1e4f4a",
- "dist/2023-03-28/cargo-1.68.2-x86_64-unknown-illumos.tar.xz": "5a656c5932964bb7d243a8a03455c20df1dee9aafec0382a711d794324cf2a95",
- "dist/2023-03-28/cargo-1.68.2-x86_64-unknown-linux-gnu.tar.gz": "cf4e6c9d1a61c1898ffa21353fc9eb4c1512fc6beb6cad433851fbed777f1ea6",
- "dist/2023-03-28/cargo-1.68.2-x86_64-unknown-linux-gnu.tar.xz": "b25d6f88b93cb75868ff4bc9ca0103facd4622825cf53df67546cea6cb60da0f",
- "dist/2023-03-28/cargo-1.68.2-x86_64-unknown-linux-musl.tar.gz": "cfc86423b17adcbcdb4eec35ebc17f20ba73d181349ae01b0e17530332d12cb1",
- "dist/2023-03-28/cargo-1.68.2-x86_64-unknown-linux-musl.tar.xz": "3427b766797da0a1065a84b074b93a309763ca5b94575d0f8b70b1f4e50c159e",
- "dist/2023-03-28/cargo-1.68.2-x86_64-unknown-netbsd.tar.gz": "d1c72bccba9b2b534715c3cd9d2f4ae83ceff312d8533c270d168e4b97ba74d0",
- "dist/2023-03-28/cargo-1.68.2-x86_64-unknown-netbsd.tar.xz": "57a45f1dc54c5e39fdf8a61342d774820adc4f6bdc0ecb53d4ff9aa0c448a02a",
- "dist/2023-03-28/rust-std-1.68.2-aarch64-apple-darwin.tar.gz": "db2be7e5d766799796a71e43f141a5082ac30240e276c8c9b56800ab4638af92",
- "dist/2023-03-28/rust-std-1.68.2-aarch64-apple-darwin.tar.xz": "3c18e4f87fc69a6907d9a76db079f3e59a26764b001157c2fc9c4c8497d5448b",
- "dist/2023-03-28/rust-std-1.68.2-aarch64-apple-ios-sim.tar.gz": "123a67f9f8de9c3a4d793a864e05c5d59c0fb630611b97dd36e26805e549e539",
- "dist/2023-03-28/rust-std-1.68.2-aarch64-apple-ios-sim.tar.xz": "e0287fb4e98b375495923aeddbd71111a17b9ef92c597277f24b9de5bcbe0fa6",
- "dist/2023-03-28/rust-std-1.68.2-aarch64-apple-ios.tar.gz": "71d943a0340fbb3b0aec6f3a61c421b3a600235e3111b2d76464f1cd17326300",
- "dist/2023-03-28/rust-std-1.68.2-aarch64-apple-ios.tar.xz": "cab7d6262087d1bb17fc0163923d8f8c786b2ddbd896caf6a5d959b982e13e1c",
- "dist/2023-03-28/rust-std-1.68.2-aarch64-linux-android.tar.gz": "03f42f35c6b1426983e4dc0af017c4d7953d079f650f783512c06a9b5d30805c",
- "dist/2023-03-28/rust-std-1.68.2-aarch64-linux-android.tar.xz": "5ad8e087347ba337198e4464913f1cc6bf5525f81a6388450faec5b20676c8ab",
- "dist/2023-03-28/rust-std-1.68.2-aarch64-pc-windows-msvc.tar.gz": "351f99049c211b6828aaa964a5ef999eeb2f6d470ad5173379a428db082b65f1",
- "dist/2023-03-28/rust-std-1.68.2-aarch64-pc-windows-msvc.tar.xz": "4d5c76138f9d2ece590515e2dda8cd2b606a061bcfd2024bf147348070cdda88",
- "dist/2023-03-28/rust-std-1.68.2-aarch64-unknown-fuchsia.tar.gz": "2e43837dc70ec0ddde4a5deeef18b61a33fce8873159c672c3ea3719b2c1d314",
- "dist/2023-03-28/rust-std-1.68.2-aarch64-unknown-fuchsia.tar.xz": "b10838f356514d3d85bf9b4cac1971daa30c7a29af532602453115d3f037cc8f",
- "dist/2023-03-28/rust-std-1.68.2-aarch64-unknown-linux-gnu.tar.gz": "74c2cca31e34cbc0913fc2445c4853acb20c52dba2d0c3012a007cc5decc3bb1",
- "dist/2023-03-28/rust-std-1.68.2-aarch64-unknown-linux-gnu.tar.xz": "cbe60945af743804be6822704f986c812dd1fe1d167602aec5350a8f519f9861",
- "dist/2023-03-28/rust-std-1.68.2-aarch64-unknown-linux-musl.tar.gz": "fe2558065f0c2f119ea36be2edd74b862899d01abf463baa0f128839c52e2021",
- "dist/2023-03-28/rust-std-1.68.2-aarch64-unknown-linux-musl.tar.xz": "718c073d24f3627750049bb74420f90d1ac29843ca16933e11aa301a806a0b10",
- "dist/2023-03-28/rust-std-1.68.2-aarch64-unknown-none-softfloat.tar.gz": "d556518078ff7f7f7c2eb7fbe192e15d794f4fc1e6f43939cb6d46e6aad6c655",
- "dist/2023-03-28/rust-std-1.68.2-aarch64-unknown-none-softfloat.tar.xz": "9a3ef9df914cba6a96404a768158a5d068251c97e47e8b77bd1fe163b2e01d70",
- "dist/2023-03-28/rust-std-1.68.2-aarch64-unknown-none.tar.gz": "ef7283c7c449d15b8801fd5a8f5f36beca3ed16666ce23ad5d215544a695dacb",
- "dist/2023-03-28/rust-std-1.68.2-aarch64-unknown-none.tar.xz": "4f90feac2c0a0c940fbfee2f47ff99e2d9c0249a585e850e51d2c17e1a42d1fc",
- "dist/2023-03-28/rust-std-1.68.2-aarch64-unknown-uefi.tar.gz": "7f92979d44a7c494cc67df648f9d55dcfed2a3b1451767810ff86ba64d8af680",
- "dist/2023-03-28/rust-std-1.68.2-aarch64-unknown-uefi.tar.xz": "ee7b6e77b69e27a7e429fe33ed03e08f80e2c653429d3f3ea0eff81a16463566",
- "dist/2023-03-28/rust-std-1.68.2-arm-linux-androideabi.tar.gz": "e6883b69e0d2be0973083e02a3ccac9868ff8d1105f22eb0be944dc981557267",
- "dist/2023-03-28/rust-std-1.68.2-arm-linux-androideabi.tar.xz": "0cb2dbf9cc116848300498eecbdbb9979a3bd6e78a0d970095c7b4753c5f001b",
- "dist/2023-03-28/rust-std-1.68.2-arm-unknown-linux-gnueabi.tar.gz": "6b80d044fe690084ca6fb0c6dc0d9d353fe8ec5fa09fc0d59a1d7eda322b3bb0",
- "dist/2023-03-28/rust-std-1.68.2-arm-unknown-linux-gnueabi.tar.xz": "59da96890b0a2881799b21a3f49e51f730d67b8dc09b87602dd599e8f980e247",
- "dist/2023-03-28/rust-std-1.68.2-arm-unknown-linux-gnueabihf.tar.gz": "5e662e9754fca1ca8e9ed18359b39b8bf0ec06c641f7b61174738347e3774324",
- "dist/2023-03-28/rust-std-1.68.2-arm-unknown-linux-gnueabihf.tar.xz": "565730a2ba02357251f9805b5efe0423adf5411892e0d01cf441456234f81e70",
- "dist/2023-03-28/rust-std-1.68.2-arm-unknown-linux-musleabi.tar.gz": "28a6750eb0bd4017ae16228ad10a3b7d913b11ae493c7fb948eb454b880ce78b",
- "dist/2023-03-28/rust-std-1.68.2-arm-unknown-linux-musleabi.tar.xz": "fd065db4269b5131c2f52638b8f30d0159109850c74c87bdf4c623dc0b32d5c2",
- "dist/2023-03-28/rust-std-1.68.2-arm-unknown-linux-musleabihf.tar.gz": "a033a94fd8bee6e4fea391b5750066b74a4c0b3a4191157632a55af9b8bb2d44",
- "dist/2023-03-28/rust-std-1.68.2-arm-unknown-linux-musleabihf.tar.xz": "4ea9588934eec3d83bee2789f3a15b48aaea05fc8ec1bc175951ec1d0eb6ed22",
- "dist/2023-03-28/rust-std-1.68.2-armebv7r-none-eabi.tar.gz": "ac809aa7b7e61d028d82a262c2e75cd3466468c15116b68fa011056110927cf9",
- "dist/2023-03-28/rust-std-1.68.2-armebv7r-none-eabi.tar.xz": "f9667eff4ee8c8812f07ef3dd94389062236860f820be37cd08a96d472756ef4",
- "dist/2023-03-28/rust-std-1.68.2-armebv7r-none-eabihf.tar.gz": "69e5787f778a82816662a54ae134a5873d9ad3699024b4f5c490f596336b24b4",
- "dist/2023-03-28/rust-std-1.68.2-armebv7r-none-eabihf.tar.xz": "a14e4588c6af1bca6fb083c248e06999bb8c398d62bf557f93e5076bede8a5b1",
- "dist/2023-03-28/rust-std-1.68.2-armv5te-unknown-linux-gnueabi.tar.gz": "897ec452cd656af3f0b9be53f8a48ad37f908f53ba5f694fcf2d30de3cc39cf1",
- "dist/2023-03-28/rust-std-1.68.2-armv5te-unknown-linux-gnueabi.tar.xz": "a7e19b68c44efe52df159f89f8b37790ca613adaa3f77937eb460747552d1e59",
- "dist/2023-03-28/rust-std-1.68.2-armv5te-unknown-linux-musleabi.tar.gz": "a14fe46481437b5647da8a82348245dedbae5069340ac7b61d6eeec71cf2245c",
- "dist/2023-03-28/rust-std-1.68.2-armv5te-unknown-linux-musleabi.tar.xz": "74188e1a1f4f7418d946636b4280c03dc80933f6eedda75ece82b7034f32c6db",
- "dist/2023-03-28/rust-std-1.68.2-armv7-linux-androideabi.tar.gz": "8256c9cc16a9e7dee7aef32b309dd72f9a6cdbb8017f6672c43a759392f1d5db",
- "dist/2023-03-28/rust-std-1.68.2-armv7-linux-androideabi.tar.xz": "d68c72ee56cb0cebd24626f43db10b373d7bb64ffe3e6ccabd60faa648d06882",
- "dist/2023-03-28/rust-std-1.68.2-armv7-unknown-linux-gnueabi.tar.gz": "0ccb53e521a02f778e2c086e7da19966cbcb4a9934475a41dd1540090d8e89c8",
- "dist/2023-03-28/rust-std-1.68.2-armv7-unknown-linux-gnueabi.tar.xz": "39009be69f01e30714c657a5f55475ce9f9c6a591cae85b5d7386944a8e87a80",
- "dist/2023-03-28/rust-std-1.68.2-armv7-unknown-linux-gnueabihf.tar.gz": "53de9bc49050bb15e0f01ac655c3b08610c59effdee7b5e50bcb9880170a14fe",
- "dist/2023-03-28/rust-std-1.68.2-armv7-unknown-linux-gnueabihf.tar.xz": "2b9b98eaf7d18049e4c8625a4cf3c2d610d905c9478a377c6cc3d48f8b31db32",
- "dist/2023-03-28/rust-std-1.68.2-armv7-unknown-linux-musleabi.tar.gz": "55d8e89d624eb8c34ca13207118d5e84e4144c7f1686bd26972b032c6b83a189",
- "dist/2023-03-28/rust-std-1.68.2-armv7-unknown-linux-musleabi.tar.xz": "b9992ee3b16c7b3b2fcf15339ade26084936d6088f578f8b2fab9b0c5c5b085c",
- "dist/2023-03-28/rust-std-1.68.2-armv7-unknown-linux-musleabihf.tar.gz": "3d3b06c29dda5b5bfcfea69a3cb5037d16119772191472826fbb3ec82018ac90",
- "dist/2023-03-28/rust-std-1.68.2-armv7-unknown-linux-musleabihf.tar.xz": "05bef3f9e66537d9f1271dd6b6bf2261ba1db52672223712a8eb9178d036ae53",
- "dist/2023-03-28/rust-std-1.68.2-armv7a-none-eabi.tar.gz": "a99f963a5b5f7605454334ce26b0f45a19d6fed98f575ac01c4266647c86dca0",
- "dist/2023-03-28/rust-std-1.68.2-armv7a-none-eabi.tar.xz": "3bbc611787495173e95f2d9202c0292aca27e87c11bdd5da707236cb5b3c234a",
- "dist/2023-03-28/rust-std-1.68.2-armv7r-none-eabi.tar.gz": "8c83571fb066c0d3a5cd7b29cffbbc2581207e7d927f83aa9542def079044142",
- "dist/2023-03-28/rust-std-1.68.2-armv7r-none-eabi.tar.xz": "5292ee455628bc018766e2a2d2bf13308132d826f0acbe4785cf173583a284d6",
- "dist/2023-03-28/rust-std-1.68.2-armv7r-none-eabihf.tar.gz": "2801d1f8fc3124a99fdaa23244a7435389f4f140aa53c77396691871eb52a5d7",
- "dist/2023-03-28/rust-std-1.68.2-armv7r-none-eabihf.tar.xz": "e3f905cdfec9a9f57b386698d322f0e637f2be2f1e79bfa2e52b25aa6c06a2d9",
- "dist/2023-03-28/rust-std-1.68.2-asmjs-unknown-emscripten.tar.gz": "8fe55fd760e815b97bbff2dd18cdd81865d601a03f3d84e878d32f48f11ee785",
- "dist/2023-03-28/rust-std-1.68.2-asmjs-unknown-emscripten.tar.xz": "0fec97c490656741a82f85259900c5fdb86d947fa2970c39f8184571cb41f9b2",
- "dist/2023-03-28/rust-std-1.68.2-i586-pc-windows-msvc.tar.gz": "ee398d855c8bc6154f4b70356c40a52af4c89c38ac83da6cca965248b9273137",
- "dist/2023-03-28/rust-std-1.68.2-i586-pc-windows-msvc.tar.xz": "a91a1bb2956ff3b8dee5dedd4662eebb5dd98baa229f87deade34e415c639866",
- "dist/2023-03-28/rust-std-1.68.2-i586-unknown-linux-gnu.tar.gz": "d24eb133b3c12b6cb9d8293dca63af9628db201fc4ad15e94d39bd636e0cafaf",
- "dist/2023-03-28/rust-std-1.68.2-i586-unknown-linux-gnu.tar.xz": "07fc71adf63691ae5e103c57c295e665b6dfc096fe4bedfeb5e1d33c133bb9f4",
- "dist/2023-03-28/rust-std-1.68.2-i586-unknown-linux-musl.tar.gz": "aac0678c9051e94a48f8014e5c0195907d5e26c1ad57b230e90fe86c9b8ef426",
- "dist/2023-03-28/rust-std-1.68.2-i586-unknown-linux-musl.tar.xz": "884c3b6cbe8d38bb15943917df8bc87470566014ec50200ed70094c12abbb6bd",
- "dist/2023-03-28/rust-std-1.68.2-i686-linux-android.tar.gz": "4c0c4ca237385caceeb7d198b6fe9cd79b78040cc9142bc38d553fa61271f62e",
- "dist/2023-03-28/rust-std-1.68.2-i686-linux-android.tar.xz": "f594e57950ae46c27fe61421accae1f0feda4d231ce29fbc07bdc2b016bff52a",
- "dist/2023-03-28/rust-std-1.68.2-i686-pc-windows-gnu.tar.gz": "7a0c8c30db0109a093af231c9290ba8553da01cac32fe297444e8398565a0fd7",
- "dist/2023-03-28/rust-std-1.68.2-i686-pc-windows-gnu.tar.xz": "fdc9f2906131b64f3379da995db978d454ad06cb50acd17925bb65ae7c477639",
- "dist/2023-03-28/rust-std-1.68.2-i686-pc-windows-msvc.tar.gz": "51bb5146439365a78a41fd674df23c9771d074fe0e43bcd020d54a0b1a8ec472",
- "dist/2023-03-28/rust-std-1.68.2-i686-pc-windows-msvc.tar.xz": "070225d9f555bdc55c635491c923ec8246156b7af835c5c0d9866edbb448578e",
- "dist/2023-03-28/rust-std-1.68.2-i686-unknown-freebsd.tar.gz": "38efed7b4f3c1ee3b3b7c26eedd0f5addafd36471dbf34146f6bb5293d970da8",
- "dist/2023-03-28/rust-std-1.68.2-i686-unknown-freebsd.tar.xz": "6eec64c8a5ecc5129a21fee147f7d10f628177d07a6c4e0e42e4796ffe88c4f6",
- "dist/2023-03-28/rust-std-1.68.2-i686-unknown-linux-gnu.tar.gz": "0de164eb5673978149a4c5d73371f56021dfe2a38bc7a2dd17226ce97ce4bf3e",
- "dist/2023-03-28/rust-std-1.68.2-i686-unknown-linux-gnu.tar.xz": "695d4469c131b9993c939db0ce73a3866ab8e69dd657ec3137f3d594857f7812",
- "dist/2023-03-28/rust-std-1.68.2-i686-unknown-linux-musl.tar.gz": "16ecbaabc95cd0c1db5e4a8e6c36bc3d74cef9cee3ed3585edcb253bfa336150",
- "dist/2023-03-28/rust-std-1.68.2-i686-unknown-linux-musl.tar.xz": "216a647fb15ffce5271b82c2c016f01b3d69095ab4635d6a895f9b76b6c1c528",
- "dist/2023-03-28/rust-std-1.68.2-i686-unknown-uefi.tar.gz": "f9b8ac7553102dee3075b63b74878c479c6509e908bb4e85bbbbb2df3f60d147",
- "dist/2023-03-28/rust-std-1.68.2-i686-unknown-uefi.tar.xz": "3cec2ff84038d6fa3a0f84b27e0624057ba2894ecdaab9b06bf4e0e11c70aa56",
- "dist/2023-03-28/rust-std-1.68.2-mips-unknown-linux-gnu.tar.gz": "da3efcb29495f1965a27c8c67b060459104762acbd8278a427cb1307bf8a431d",
- "dist/2023-03-28/rust-std-1.68.2-mips-unknown-linux-gnu.tar.xz": "4ab007f223cf723f8f7adf26a6014f4f5aa444f88c19daccc5ac81ea30275c66",
- "dist/2023-03-28/rust-std-1.68.2-mips-unknown-linux-musl.tar.gz": "411366b7cfa00cdb1da76dbd761d55f32d8780ef3b12c10edafe58dfec1286b0",
- "dist/2023-03-28/rust-std-1.68.2-mips-unknown-linux-musl.tar.xz": "0c77f06f410091371b7f6f4952d759fcc0365025213f925afe80e04ab4b53540",
- "dist/2023-03-28/rust-std-1.68.2-mips64-unknown-linux-gnuabi64.tar.gz": "747c57f7259ef1a157717ac8b17dcd031460f81d39a4ce9091dd4b5f2e76ce5f",
- "dist/2023-03-28/rust-std-1.68.2-mips64-unknown-linux-gnuabi64.tar.xz": "b1b44819454c920980e66473112003212920136e4904113258566d130686de09",
- "dist/2023-03-28/rust-std-1.68.2-mips64-unknown-linux-muslabi64.tar.gz": "0b509d1b36d4bdfaea974a359b08c93c326d81644066dd09a370f8f82ca724af",
- "dist/2023-03-28/rust-std-1.68.2-mips64-unknown-linux-muslabi64.tar.xz": "af586b37cac123774addc78d8c4568f22d94cec622ca9de80631f33da2700a6e",
- "dist/2023-03-28/rust-std-1.68.2-mips64el-unknown-linux-gnuabi64.tar.gz": "abd8ff476c603322438e3afdf10cdb9e8206f008ba2a86f7416e2079b41c9143",
- "dist/2023-03-28/rust-std-1.68.2-mips64el-unknown-linux-gnuabi64.tar.xz": "24028147dd7733637cbfe58b7f7acc8e8131ebbb39969a6c3c61ce56203b4af3",
- "dist/2023-03-28/rust-std-1.68.2-mips64el-unknown-linux-muslabi64.tar.gz": "0ad700c67e89f4072b1b75c4b31b92c53585d212f323247c382925e042ef19f0",
- "dist/2023-03-28/rust-std-1.68.2-mips64el-unknown-linux-muslabi64.tar.xz": "a6ae7b9b1710100f52d96dab59c6e7c46a4b057b8ea3d25b8ea2f792aee2f006",
- "dist/2023-03-28/rust-std-1.68.2-mipsel-unknown-linux-gnu.tar.gz": "8bbb2ba4b3d42602e2acf24fcfc9dcbb862e89e4cf46b4b24e1d984c5bd8ce45",
- "dist/2023-03-28/rust-std-1.68.2-mipsel-unknown-linux-gnu.tar.xz": "c299d65b3594c309d092f6a0fe15c205ab489ffa50910fb8126e944a2727cbe5",
- "dist/2023-03-28/rust-std-1.68.2-mipsel-unknown-linux-musl.tar.gz": "93c40a6f2c46e35fb2b4c101000a354ef4ecfb949f8dc6c17fc5566dbdc61cab",
- "dist/2023-03-28/rust-std-1.68.2-mipsel-unknown-linux-musl.tar.xz": "a6bbd8949dc8514161de4e367c5255e8b83a3cf78c8426ffacbb6e8077d0e5e0",
- "dist/2023-03-28/rust-std-1.68.2-nvptx64-nvidia-cuda.tar.gz": "74bfa32f412403f5d453be4c4aa1852a13fd76659f3a4f7f9fdee048c259a6f8",
- "dist/2023-03-28/rust-std-1.68.2-nvptx64-nvidia-cuda.tar.xz": "c7ffadab1939d18864511d0f7d575a3c62ec2d2d43b7736c7650b30e5f08d908",
- "dist/2023-03-28/rust-std-1.68.2-powerpc-unknown-linux-gnu.tar.gz": "e41bac22c22ce54b2becf072ffe790421e1c775723996eab4f578c181100e6ab",
- "dist/2023-03-28/rust-std-1.68.2-powerpc-unknown-linux-gnu.tar.xz": "e75bdc8a6e3f00285a37606240862cdad69a986c3e3d47d6af1376613281233a",
- "dist/2023-03-28/rust-std-1.68.2-powerpc64-unknown-linux-gnu.tar.gz": "0a367b83afae71be859a7063b26b83d35b090f84179f83b6a40a109f5ae16238",
- "dist/2023-03-28/rust-std-1.68.2-powerpc64-unknown-linux-gnu.tar.xz": "354322352cd8b661b84f5e97729d5d7adb7df9cb9bfd43cac378271c40214d7f",
- "dist/2023-03-28/rust-std-1.68.2-powerpc64le-unknown-linux-gnu.tar.gz": "5ff3de9d9549369614e90c58bc75d1f966097ff58fda9d668d197cba5f65be6c",
- "dist/2023-03-28/rust-std-1.68.2-powerpc64le-unknown-linux-gnu.tar.xz": "e69c8c1b7b0df839b271a2c12985b1c56565a34f9b357631ed99fa9263f3d326",
- "dist/2023-03-28/rust-std-1.68.2-riscv32i-unknown-none-elf.tar.gz": "0dc08ab79536b3d755a3f79fc75867aad8bde8c79cbaa427d9988576a4c9fe42",
- "dist/2023-03-28/rust-std-1.68.2-riscv32i-unknown-none-elf.tar.xz": "5fd9c02ae9c6e61f8123c725c0e59bfc10c1f1b2022d6937b93983694333ef4a",
- "dist/2023-03-28/rust-std-1.68.2-riscv32imac-unknown-none-elf.tar.gz": "e8151762c4c6cc24911ef37132c7df3541e25b848c46d008f4bf0d56cfcdfd0b",
- "dist/2023-03-28/rust-std-1.68.2-riscv32imac-unknown-none-elf.tar.xz": "ae89f2342ff0095b6464bf21f6acfc8c22f5bfa463f0096944287572cc47ab5f",
- "dist/2023-03-28/rust-std-1.68.2-riscv32imc-unknown-none-elf.tar.gz": "6b9f3465a22e3ccd1ae446ad90363e772dadf52f05c80fdeb5097d14d9ba4442",
- "dist/2023-03-28/rust-std-1.68.2-riscv32imc-unknown-none-elf.tar.xz": "c79b774efb8ff05943dd5a426f9408894698b9a8b88e76396a82b751de6ffd24",
- "dist/2023-03-28/rust-std-1.68.2-riscv64gc-unknown-linux-gnu.tar.gz": "8409cf5ef0772c042fc3ab902e41048e9f15f7362b185403393519efdea7e947",
- "dist/2023-03-28/rust-std-1.68.2-riscv64gc-unknown-linux-gnu.tar.xz": "8388ac7a5f924d5d6aa441ae97c33c1abf11fd9516f2b9853f7edc0c5d6c453b",
- "dist/2023-03-28/rust-std-1.68.2-riscv64gc-unknown-none-elf.tar.gz": "c271cc91e5c4f1912cb786f167f14e40f862de1ba1637252a984cd2fb767e0a2",
- "dist/2023-03-28/rust-std-1.68.2-riscv64gc-unknown-none-elf.tar.xz": "19fb8778a165cc5bd8b7c8d96a5f2af1a11c4d000f6806ef239be4a1adb12b44",
- "dist/2023-03-28/rust-std-1.68.2-riscv64imac-unknown-none-elf.tar.gz": "b3ce497a5de00dc5d7d95527fb15c708683154c28e0d3a80265415b0d7389f21",
- "dist/2023-03-28/rust-std-1.68.2-riscv64imac-unknown-none-elf.tar.xz": "d72e9b88b7841ca55a939b8689c3f0a443e61942f5032e4122fe510066ff26ff",
- "dist/2023-03-28/rust-std-1.68.2-s390x-unknown-linux-gnu.tar.gz": "cf05d65ef0aad4b35054e6b13a062c71f2ef1a30f7e951276b9986abbb4fb251",
- "dist/2023-03-28/rust-std-1.68.2-s390x-unknown-linux-gnu.tar.xz": "7f909658496692a05dd241c15f79a02f80658d353013dace127fdc69dc5026b4",
- "dist/2023-03-28/rust-std-1.68.2-sparc64-unknown-linux-gnu.tar.gz": "68689effcfa42b6450643b8663bbf940ce065bb878c3d4c5cd238a9c74240359",
- "dist/2023-03-28/rust-std-1.68.2-sparc64-unknown-linux-gnu.tar.xz": "787f5cbbcec4e75b9beba3804ea05e36b7cb9b164c291cf7ce8f775d05634d61",
- "dist/2023-03-28/rust-std-1.68.2-sparcv9-sun-solaris.tar.gz": "47decd5464092484ffee454126dbc6e53dbd71b4f4b67166df36c1d80f555b97",
- "dist/2023-03-28/rust-std-1.68.2-sparcv9-sun-solaris.tar.xz": "f69c32e3dba525a5041d8d3282558c0407d675d47756363e2cf4e7a498b7795d",
- "dist/2023-03-28/rust-std-1.68.2-thumbv6m-none-eabi.tar.gz": "ef4a147ceadb9df7f93b526b04c54357ca521073a4b68ec2257d80442761814d",
- "dist/2023-03-28/rust-std-1.68.2-thumbv6m-none-eabi.tar.xz": "42e9f6c17faa25056b9b406687af8a979b4679e3e98e48e241565e6a2203c7c9",
- "dist/2023-03-28/rust-std-1.68.2-thumbv7em-none-eabi.tar.gz": "48cf91b0d4de7d270e6d74a9531299314c227d92c6b259a02fff3ef1811d59cd",
- "dist/2023-03-28/rust-std-1.68.2-thumbv7em-none-eabi.tar.xz": "e7437ea383aa8fd02ec1c280cd66ee27a3f42902741d8a1b70fb2a1737167a71",
- "dist/2023-03-28/rust-std-1.68.2-thumbv7em-none-eabihf.tar.gz": "80fb0f3173b8c7abc4d47372784ff33f2a9f9806c5bf8c522edcc335f6b1dccb",
- "dist/2023-03-28/rust-std-1.68.2-thumbv7em-none-eabihf.tar.xz": "5c9682e6dd3eddfbddf95bd1b3d9f1198699df6075ef72735a789ace7d271ca5",
- "dist/2023-03-28/rust-std-1.68.2-thumbv7m-none-eabi.tar.gz": "5a07c263c50f99a3d394354b5752b7ae8990d5485353e4c2b5847961f7e2fade",
- "dist/2023-03-28/rust-std-1.68.2-thumbv7m-none-eabi.tar.xz": "e73c7842c661bdcc61fd38b0c35f8bae31dc9a7bddea4f01a5ff3ce1e22ff58f",
- "dist/2023-03-28/rust-std-1.68.2-thumbv7neon-linux-androideabi.tar.gz": "b28de19b62385fe083aa4f1a4d146e7400c14c1f62111a4296b76520488de948",
- "dist/2023-03-28/rust-std-1.68.2-thumbv7neon-linux-androideabi.tar.xz": "017ad76fe9fdba994c1ec38785d1670343cdfea9483b9e90b3669944c4f4af3f",
- "dist/2023-03-28/rust-std-1.68.2-thumbv7neon-unknown-linux-gnueabihf.tar.gz": "e39c3e612956620420d186156807ad69c7205004139f8f291eab9ba8fa33fa73",
- "dist/2023-03-28/rust-std-1.68.2-thumbv7neon-unknown-linux-gnueabihf.tar.xz": "de7eea7ef4f288be803ec2d24e384be2bb80020bdcc4763405b947c9d7c0a2a0",
- "dist/2023-03-28/rust-std-1.68.2-thumbv8m.base-none-eabi.tar.gz": "faedf1f0c86ef191f0659b0e6192f79a4f0a26c0b4104d3cc2d7c2792fa20866",
- "dist/2023-03-28/rust-std-1.68.2-thumbv8m.base-none-eabi.tar.xz": "2b40dc3800897657946545646f933bd954d6d177c1170813e86bf5416c4fed84",
- "dist/2023-03-28/rust-std-1.68.2-thumbv8m.main-none-eabi.tar.gz": "db95d00bea5c05ce7fbb80bcb344e245917b4189ce276a217c503edc28cdab17",
- "dist/2023-03-28/rust-std-1.68.2-thumbv8m.main-none-eabi.tar.xz": "c7ce6676bb94a7dd4a50bba8635a34cebc6010bcd5f711c37fc42dff839e634f",
- "dist/2023-03-28/rust-std-1.68.2-thumbv8m.main-none-eabihf.tar.gz": "7f308e66d78fa22d1563e093b77a358185d50938f467f9f849a884896516eb4f",
- "dist/2023-03-28/rust-std-1.68.2-thumbv8m.main-none-eabihf.tar.xz": "03b47913fbe56d545e5566fa145a3bac6cdc4a1d8f17a25f9a52b2290b6745af",
- "dist/2023-03-28/rust-std-1.68.2-wasm32-unknown-emscripten.tar.gz": "51aa09a286d840dbc74f785176ee4c58b9a92637995ed94627fcdbc91369b4f0",
- "dist/2023-03-28/rust-std-1.68.2-wasm32-unknown-emscripten.tar.xz": "17ce08a1aea3c1143d701d77786b566dc85016b69d608938614629760983d3b8",
- "dist/2023-03-28/rust-std-1.68.2-wasm32-unknown-unknown.tar.gz": "db3931196877c945dfba5890a6c7529f500cb0c2d202f3c69f950c7286b811ed",
- "dist/2023-03-28/rust-std-1.68.2-wasm32-unknown-unknown.tar.xz": "aeffdd2f557e60ee7f82000d39dce8bc638c533ccb612ee60eea0dba04883f34",
- "dist/2023-03-28/rust-std-1.68.2-wasm32-wasi.tar.gz": "c341b885d891b534638a4c68b3c93847512c23bd37f05d54708796d6446159ac",
- "dist/2023-03-28/rust-std-1.68.2-wasm32-wasi.tar.xz": "fc5490235965127c4a1233522f2a96b58480848f9c28bc5f4989b269ed9524d5",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-apple-darwin.tar.gz": "5d6a7d62ae67c2f7aae6eabb782a3125cf9fed6bbc2993d59b3714f4f832e797",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-apple-darwin.tar.xz": "c3eb6fa219297b43a3436fbb9e1717e81c90f5a6f243b036c257520601787092",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-apple-ios.tar.gz": "8d7603bd3d678b4dca1ed6fda2ba1cf6312383b65fb1833a6b437b90eac03d4e",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-apple-ios.tar.xz": "e42fc4770f67ac4e7d9541170652eed13102888852d02f2a5f0d8b9d3b23720a",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-fortanix-unknown-sgx.tar.gz": "9d31af766ac437712d1e7fd5fe898a3b08157ae2379dbbeefcc254cddc7a056b",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-fortanix-unknown-sgx.tar.xz": "228e515261c206e622c7b685da134939f2c6ef59b1ddb57141461498ba3e19ec",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-linux-android.tar.gz": "d2ca68a7e98b90a6b4f1647a5ca3900a786c3ada0131c1877dc4c5f39da45455",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-linux-android.tar.xz": "875bae1e5bdd9901b68eeb14ce583cbb7d133435e5cd8bef60eb4521ef04b9d6",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-pc-solaris.tar.gz": "0017285c6aa16f13b71e80d7f36c1faeb7e2ba5cf902673697914a8fe76ca3d9",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-pc-solaris.tar.xz": "4eb37dc1f0b9d35a8b7660dc2d3d2c09b50ed23a4fd0cd29a7faa3f10b215b8c",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-pc-windows-gnu.tar.gz": "4598f3f44f84353dcf64aab9669b7c3982fccc1e7840f3ef1aa90cadc37864a4",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-pc-windows-gnu.tar.xz": "2a8a5117d358ebe7b7fe16e55fc85b4d4859758dec2b648503418006b94010dd",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-pc-windows-msvc.tar.gz": "a16e98dc5a12366b929d1329583065a374d5c0e3952193a0c506f43c1fd84930",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-pc-windows-msvc.tar.xz": "9f6c17fb1409925f27682d4ab2c783219f4912142865b97a0a15e1979c25320b",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-sun-solaris.tar.gz": "1ff12a55b723bbf2f867c6c9e94e6b1a17d31ffc21213aeec51d6413a72dbd16",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-sun-solaris.tar.xz": "65800cef0252de414cc665f77b90e17ba977cb9f4e29ccbfa727b38df34a797a",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-unknown-freebsd.tar.gz": "c94334345413a28669b271584b385ed0c0d6c410458103d7242353dd8fb9048d",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-unknown-freebsd.tar.xz": "edb8f095cef5b3e1ad04e63cd26e0d8af4732949fc130f08febec026096796bc",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-unknown-fuchsia.tar.gz": "bc15bb7f52589d81f577a5c1e2b17f8326ae229630f993796b41ca0d1ec2a2f8",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-unknown-fuchsia.tar.xz": "e8e27ccdbf4e3220b3e6730891b43273c5bb57ee1589d82101c2d197f5c157fa",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-unknown-illumos.tar.gz": "3c5f604498b267990c635562151e31ba770e36cfff1b9678d5a273c84f7bc433",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-unknown-illumos.tar.xz": "0340f081e7b7f4b272f505eaf0c535d620006b1023b4bf424a261ad7e2e3d2dc",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-unknown-linux-gnu.tar.gz": "92974d3b9d1343e597cf8f7b2dc516fcb8c97cea948cca0552223b928cd44ae7",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-unknown-linux-gnu.tar.xz": "c8a3eaf26b83f1926d86b4db99ca16cbbff8e746e4c63f25f4d75a02a34a3b16",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-unknown-linux-gnux32.tar.gz": "631e67da7ec1d25fc602759a7e348cba99101d780043dce01d187180650aa4fb",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-unknown-linux-gnux32.tar.xz": "f20b32d437a49d39abaee155b22373d0df912661fe6361551baac4d09b69f9ad",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-unknown-linux-musl.tar.gz": "1a6ab58aa4df56048926fa1accd77bba0d4747f5d2d71dfefaa2af9889483b17",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-unknown-linux-musl.tar.xz": "6c2a91e60f4be6538fdb0e4ff0344038f282c23808ca018f526ec586f4ba43ab",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-unknown-netbsd.tar.gz": "bcafa9884314daadb2a073599a1417ee79ca96a9fcdef0d2ea23afe3a0396bb6",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-unknown-netbsd.tar.xz": "31bdab595565233e5d1b9b83c073fb92b46780213c58f14938a4006edd001d49",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-unknown-none.tar.gz": "365c2cfbfc4624cb5ce3f426ac679cc8dea078ba69bf3a3112ede357034adb69",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-unknown-none.tar.xz": "c2d94086e2965a13db8d26b45eda286ef8ba91bac217acff95ca84462ba0b47a",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-unknown-redox.tar.gz": "b63b4f392c473a7681826fdc470e9863edcebe10a38aa5f49aec74affc336928",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-unknown-redox.tar.xz": "4387108878b3bfaea8849fcbb9b9444013434b1e05a9940afe5454717ae35eb2",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-unknown-uefi.tar.gz": "741e591ef0979caeae96c4d2c31994ff2d74b136a720302d45fae56045ac6c02",
- "dist/2023-03-28/rust-std-1.68.2-x86_64-unknown-uefi.tar.xz": "fb03448de28a52b49aa8dfca8b465695035fed817c7d0e442967b152c05b6286",
- "dist/2023-03-28/rustc-1.68.2-aarch64-apple-darwin.tar.gz": "9cbec5ea622b445e620743cabc723534fa6e4871a53cb4743e6b28ce8e3d5112",
- "dist/2023-03-28/rustc-1.68.2-aarch64-apple-darwin.tar.xz": "ddea7da2f70db2ffaeed476494fca0d069541bae0576d83557685c4178c17dfa",
- "dist/2023-03-28/rustc-1.68.2-aarch64-pc-windows-msvc.tar.gz": "96096f2086ea504c77d3d75e90fd2783a2ade7451dc1562386fb21d056eb6e3b",
- "dist/2023-03-28/rustc-1.68.2-aarch64-pc-windows-msvc.tar.xz": "37a2d5d5b9e5650060234ceb0f6ac61ec0f24e85dbedb224e4db5bc3ac1f21e6",
- "dist/2023-03-28/rustc-1.68.2-aarch64-unknown-linux-gnu.tar.gz": "9fd96b0ec4209f8ddd82b6055b49f13993d83488f6ed82e33c7ad577786a6f42",
- "dist/2023-03-28/rustc-1.68.2-aarch64-unknown-linux-gnu.tar.xz": "d78579af4d4b98b3bf49b8a0b7848960e68254150ac517faeb9caf7b9f44ede2",
- "dist/2023-03-28/rustc-1.68.2-aarch64-unknown-linux-musl.tar.gz": "e43c95a457cbda8d9e4b8a2ba65aaf7b2c05664b8ed501ee1c70706b8329a817",
- "dist/2023-03-28/rustc-1.68.2-aarch64-unknown-linux-musl.tar.xz": "b88070e3cbe399122398a74db62fa5d03edb6a5a583ee79a174b2caa1eb27fc5",
- "dist/2023-03-28/rustc-1.68.2-arm-unknown-linux-gnueabi.tar.gz": "ef3624048ef66b43661ce3bc60cdf8afe27d81b959f4fa43ad1c2e2d25ede652",
- "dist/2023-03-28/rustc-1.68.2-arm-unknown-linux-gnueabi.tar.xz": "151efdd16a6c1f871a4ddd66e22f0f7ec39cae7929da348ce14c6435dc38d89c",
- "dist/2023-03-28/rustc-1.68.2-arm-unknown-linux-gnueabihf.tar.gz": "fc9adb8289645c1e3f3e3d1f9a3bde875396b5863b349207cb2efdcc2132da56",
- "dist/2023-03-28/rustc-1.68.2-arm-unknown-linux-gnueabihf.tar.xz": "8918007df327cf6629e00f5390b34204dc408624426e4a1cb823ab3c764c4c19",
- "dist/2023-03-28/rustc-1.68.2-armv7-unknown-linux-gnueabihf.tar.gz": "c98308c066f1edb14967e1fb9db45620e0d03298be10fc643900a30b7b7a7c15",
- "dist/2023-03-28/rustc-1.68.2-armv7-unknown-linux-gnueabihf.tar.xz": "f4d11a7aaa83aeb52c1fe95393939c049385de5956282f1732031961e2806927",
- "dist/2023-03-28/rustc-1.68.2-i686-pc-windows-gnu.tar.gz": "c8da60ea1853846a6c727f00b1db68c783fbead23fe90a1d2080476baec05b98",
- "dist/2023-03-28/rustc-1.68.2-i686-pc-windows-gnu.tar.xz": "a31c786a421f21dfd26e6b105135c39d3555981fe25c2ef5c14a3242d2a93283",
- "dist/2023-03-28/rustc-1.68.2-i686-pc-windows-msvc.tar.gz": "29beba763a02cdcc205456f2d5ed205e0b2149dc47edf6fefae74c6006fa3838",
- "dist/2023-03-28/rustc-1.68.2-i686-pc-windows-msvc.tar.xz": "e67516fbb3b1d3453f7806986770e26e5aab204fa0d97f3a785de7e62b364ac6",
- "dist/2023-03-28/rustc-1.68.2-i686-unknown-linux-gnu.tar.gz": "0eb86af5a05080984c68ed6e445dcbde98c4f391e6a7ada3ebed53109515175d",
- "dist/2023-03-28/rustc-1.68.2-i686-unknown-linux-gnu.tar.xz": "dc533e7d0f0349d92a98973e20b99d69b09a25b8675f28a38fa26b0160615fd4",
- "dist/2023-03-28/rustc-1.68.2-mips-unknown-linux-gnu.tar.gz": "2fe7deb053138e8daa0a4d5fbede087451662f043421682de7e87cce03fdf86a",
- "dist/2023-03-28/rustc-1.68.2-mips-unknown-linux-gnu.tar.xz": "68c6aac6e0e95588b65d6f937032b57ad2a09b2897990a3a930729763f728e12",
- "dist/2023-03-28/rustc-1.68.2-mips64-unknown-linux-gnuabi64.tar.gz": "89378190223a4d1e2b7c9ec309cac8748b9efb0945151b7e5f4fd8c107d6dc0d",
- "dist/2023-03-28/rustc-1.68.2-mips64-unknown-linux-gnuabi64.tar.xz": "894725021b34afdde5afb89686d794c029317df9195f49e0d9a065e1cb4cc9b8",
- "dist/2023-03-28/rustc-1.68.2-mips64el-unknown-linux-gnuabi64.tar.gz": "b8a23eddb567402157cf07c1fc0f039e25af77f2276690bce9e4f4e1c811014a",
- "dist/2023-03-28/rustc-1.68.2-mips64el-unknown-linux-gnuabi64.tar.xz": "3f45f5b63004b14a193ed57143299c81f62246fc40e4a0df2119494d9ee4fe66",
- "dist/2023-03-28/rustc-1.68.2-mipsel-unknown-linux-gnu.tar.gz": "673a69690f0008b7dcbac7c851c81b3c91c6265180604adbfb867b1c69f96e0b",
- "dist/2023-03-28/rustc-1.68.2-mipsel-unknown-linux-gnu.tar.xz": "de72245d5e6db0e4327ce105a7ce3d5597a3792e71b95578dd7ec62af56946fb",
- "dist/2023-03-28/rustc-1.68.2-powerpc-unknown-linux-gnu.tar.gz": "98e0f7633ca880d9b39e270cbd3ac89818ca69925ec40823d00a7c638c35a629",
- "dist/2023-03-28/rustc-1.68.2-powerpc-unknown-linux-gnu.tar.xz": "92b08b1ad69786f3bcc6fd7bb71411ad162dca478f9636027bf809b9de6eea72",
- "dist/2023-03-28/rustc-1.68.2-powerpc64-unknown-linux-gnu.tar.gz": "054e2f567b5f7791425bbe81eb6011b6217e11d7c60f8706607469a099ae17f7",
- "dist/2023-03-28/rustc-1.68.2-powerpc64-unknown-linux-gnu.tar.xz": "a23e71c7da172ece7ef760db043b010c84dd8a491cd9c00303e2c01ad521dec2",
- "dist/2023-03-28/rustc-1.68.2-powerpc64le-unknown-linux-gnu.tar.gz": "dfc8c2d375dee373b02de888641bf071dc837aa3d7b53752785a65e1c534574c",
- "dist/2023-03-28/rustc-1.68.2-powerpc64le-unknown-linux-gnu.tar.xz": "4d493bec7500dd3159c05ee8f2fef71241de575e1cb7a89bf128f01dc3e537cb",
- "dist/2023-03-28/rustc-1.68.2-riscv64gc-unknown-linux-gnu.tar.gz": "b815c4f031db892cb2ac28c293056ce13a121707c1da71a8577e8c80875270bc",
- "dist/2023-03-28/rustc-1.68.2-riscv64gc-unknown-linux-gnu.tar.xz": "de7ef61f6fa5f7ddddbddc0934beec58a3ce49cda96679dbf06895128f1fc228",
- "dist/2023-03-28/rustc-1.68.2-s390x-unknown-linux-gnu.tar.gz": "28e665d1a5ebd73fc9a3d8677b8ad10f9998c137f1135837d42a4ffd121365cf",
- "dist/2023-03-28/rustc-1.68.2-s390x-unknown-linux-gnu.tar.xz": "fbfb02fff83f4a9d7298c511a3ad4b9e73c4d9d0be558edbf7226af0f586517a",
- "dist/2023-03-28/rustc-1.68.2-x86_64-apple-darwin.tar.gz": "e0ba4545a390303a1447417ec19be2ad26ae33ee1b9a7b2e3e970e8a87e30ba7",
- "dist/2023-03-28/rustc-1.68.2-x86_64-apple-darwin.tar.xz": "37831c92a751f63de2dfcc9f9b57f9d7ce5d61748d0849b209a156b24f994f63",
- "dist/2023-03-28/rustc-1.68.2-x86_64-pc-windows-gnu.tar.gz": "1e86ffe40a94f717d1114637eda2a44e0543cdffffb2ca8274a3f54686e4bb97",
- "dist/2023-03-28/rustc-1.68.2-x86_64-pc-windows-gnu.tar.xz": "e1d4a0a444076d6eccadd6e08f4d204e8b45c3b954203a4bb0663affd4e9c5e0",
- "dist/2023-03-28/rustc-1.68.2-x86_64-pc-windows-msvc.tar.gz": "f64bdaab8920a26b357e8849427d4adaf6f29f98deff45c935c7b66e4a246f8f",
- "dist/2023-03-28/rustc-1.68.2-x86_64-pc-windows-msvc.tar.xz": "69b277735603d0c8f1f476e53b785a6072340ff10bfde03f4df55d89c12d9ce1",
- "dist/2023-03-28/rustc-1.68.2-x86_64-unknown-freebsd.tar.gz": "a3bce6d0bdde3e59a982f1a1c7c68e99b3fcf199e1d45233cdcd250a9da9c0c3",
- "dist/2023-03-28/rustc-1.68.2-x86_64-unknown-freebsd.tar.xz": "9ec0e03f6e8f7b4b52fe2e65cb8bb45edea4ab3677e8df9fe530a1d9a645e824",
- "dist/2023-03-28/rustc-1.68.2-x86_64-unknown-illumos.tar.gz": "6adef91820350b81ab35e8e18005392f9a64213f0489bf35b70f6775376f2bf3",
- "dist/2023-03-28/rustc-1.68.2-x86_64-unknown-illumos.tar.xz": "f47c91dd6abbdeb8a5afe9a339ad172ba5be011cf7be4ff124e4b909a25e77d3",
- "dist/2023-03-28/rustc-1.68.2-x86_64-unknown-linux-gnu.tar.gz": "532b1b20c83a8a849249b792fb7f38210559d6803cc465699641f7c21fcca5ae",
- "dist/2023-03-28/rustc-1.68.2-x86_64-unknown-linux-gnu.tar.xz": "d33d493381dd17a4b491d0e978cdb6700badb5905e831dd5f7fe75ffbf8e0584",
- "dist/2023-03-28/rustc-1.68.2-x86_64-unknown-linux-musl.tar.gz": "ad5d833fd32803cfdafaf4e42ba96d08da94420d6ac05243348a9f6c9256b60c",
- "dist/2023-03-28/rustc-1.68.2-x86_64-unknown-linux-musl.tar.xz": "9308f27bda32a02b1debe5fcc5ee633a234652037687c46f4e57d9dc29f90b3e",
- "dist/2023-03-28/rustc-1.68.2-x86_64-unknown-netbsd.tar.gz": "7f2981c2f84093f740631022c10494dcf293eb15243d254c25b8a1d9f4348e1c",
- "dist/2023-03-28/rustc-1.68.2-x86_64-unknown-netbsd.tar.xz": "19f87fefa64b6d26a11f7fdae020228e91bec71527abf3350663fe165a135aea"
+ "dist/2023-04-20/cargo-1.69.0-aarch64-apple-darwin.tar.gz": "b185ea41a0ad76ac23b08744732c51e4811528291f7193d612a42e3e54ecd535",
+ "dist/2023-04-20/cargo-1.69.0-aarch64-apple-darwin.tar.xz": "48e4f318dfcb3c61a010850a7a3ea11a1e4cacf0cc071bf1cd6ebdf7aaf0ec9e",
+ "dist/2023-04-20/cargo-1.69.0-aarch64-pc-windows-msvc.tar.gz": "9e047a2ac5f0aea3ee0fb7ef59c5e29f1b90f698dbb1498a8c8ee79d9373a850",
+ "dist/2023-04-20/cargo-1.69.0-aarch64-pc-windows-msvc.tar.xz": "82b93bb4491177c262fe3c2ed7b22e82e06727d39e72886159578f0c47ac5c45",
+ "dist/2023-04-20/cargo-1.69.0-aarch64-unknown-linux-gnu.tar.gz": "6ba6e4a9295b03d01b7dac94b7941d71c029343dc3abfd6cc4733a99fc3c7976",
+ "dist/2023-04-20/cargo-1.69.0-aarch64-unknown-linux-gnu.tar.xz": "b0ffb368d4e66a5808f96659cc598030761cb06966ae8d5299596b14fbc09364",
+ "dist/2023-04-20/cargo-1.69.0-aarch64-unknown-linux-musl.tar.gz": "803303b02c7f40e3eb507d4096cc7abd591c32f2921cbfb98d17ab8b6dd7f83a",
+ "dist/2023-04-20/cargo-1.69.0-aarch64-unknown-linux-musl.tar.xz": "8fb122605ae32f84b2eee261d2d00da35dfb34f776886f24b089293ce37e9e8b",
+ "dist/2023-04-20/cargo-1.69.0-arm-unknown-linux-gnueabi.tar.gz": "0d93ea24db4e3fc7739c68661a466fa4fbe8e158c1f10ef06e8d9cee1f7de73f",
+ "dist/2023-04-20/cargo-1.69.0-arm-unknown-linux-gnueabi.tar.xz": "bdae08c6209dc24a4c4dd1aa42ccb39b48b7743faadda99190c9ab4c5ec6425a",
+ "dist/2023-04-20/cargo-1.69.0-arm-unknown-linux-gnueabihf.tar.gz": "dcc8d9dffc209bd665ad45586b271569a261795162426ffbab24336e04f7a3c6",
+ "dist/2023-04-20/cargo-1.69.0-arm-unknown-linux-gnueabihf.tar.xz": "e2076c5386d0ae13be0a8e8d99096bab15e2bf0f749d0410c2d45376ff837aeb",
+ "dist/2023-04-20/cargo-1.69.0-armv7-unknown-linux-gnueabihf.tar.gz": "8cd5ca78c7efebcd735b7264f8a926480f4bed334b9e031d68f75494e669ff60",
+ "dist/2023-04-20/cargo-1.69.0-armv7-unknown-linux-gnueabihf.tar.xz": "f709ae6c7d92d95b9fdf27188b7fc04bbf95089091803ee7eb1c4e78d38fee7d",
+ "dist/2023-04-20/cargo-1.69.0-i686-pc-windows-gnu.tar.gz": "363bea80afae170d9877754e185ad2af705b95099a5961176cac776824f3afda",
+ "dist/2023-04-20/cargo-1.69.0-i686-pc-windows-gnu.tar.xz": "5bb26a69498295d2780eb690ea519d877aac24f93f2789da9f21ac9450b04c3f",
+ "dist/2023-04-20/cargo-1.69.0-i686-pc-windows-msvc.tar.gz": "caae66efe790bf765c0b26943428481a4cf14f69c011bfc7e68a12a6adfcc0d1",
+ "dist/2023-04-20/cargo-1.69.0-i686-pc-windows-msvc.tar.xz": "dcca36e98acd82ee356302f33ecdb451208f404efabd28a76c548e99b0fe3f52",
+ "dist/2023-04-20/cargo-1.69.0-i686-unknown-linux-gnu.tar.gz": "45f966d2965e41e0598fa9dce780766163685935647c8de09610c73f5f85823f",
+ "dist/2023-04-20/cargo-1.69.0-i686-unknown-linux-gnu.tar.xz": "4c9b1b2fb692bb0c81e524df6368723f061c8bb0d21a1f487eb8e5c2bdf323ab",
+ "dist/2023-04-20/cargo-1.69.0-mips-unknown-linux-gnu.tar.gz": "7997f46aeaa844ac83a6f781c6bc7389594f2eac120f44163ca3c4a173fbe4e0",
+ "dist/2023-04-20/cargo-1.69.0-mips-unknown-linux-gnu.tar.xz": "bdaef2f95b0485dc2a5cde74c08bd269174bbbb553226c5b5d2287e52841b061",
+ "dist/2023-04-20/cargo-1.69.0-mips64-unknown-linux-gnuabi64.tar.gz": "d584fcced4891a16baa6013122a3d53e05f42ba7247d75c30f70219c7fc49519",
+ "dist/2023-04-20/cargo-1.69.0-mips64-unknown-linux-gnuabi64.tar.xz": "e952b2dfe477a1768fd88383f65f82bd4be097f2f0582de42745c8fbad052cf5",
+ "dist/2023-04-20/cargo-1.69.0-mips64el-unknown-linux-gnuabi64.tar.gz": "ca74d5cb2705e95763a5018677ac10bbd0a41dec324568c10fc0e03f587fd8cc",
+ "dist/2023-04-20/cargo-1.69.0-mips64el-unknown-linux-gnuabi64.tar.xz": "07d73849ed1b469e4d81c9666dc9e4fcc6b6ece2025371fabb58fae9dd3c1bf9",
+ "dist/2023-04-20/cargo-1.69.0-mipsel-unknown-linux-gnu.tar.gz": "0ba8a052eaf7877c75f3d1a22d102144138a9a3ed10a8c4b0ea724a805759a01",
+ "dist/2023-04-20/cargo-1.69.0-mipsel-unknown-linux-gnu.tar.xz": "df114ac589fb50fcc3027e26c6e201fc530aadef1bcc8f6396c761a457ba7bbb",
+ "dist/2023-04-20/cargo-1.69.0-powerpc-unknown-linux-gnu.tar.gz": "05167bc46932bd8de5eb95d5ed91c8ceddc0a0138cdf7765f981db3d920c784f",
+ "dist/2023-04-20/cargo-1.69.0-powerpc-unknown-linux-gnu.tar.xz": "26c774db5e21ddf66107b677d5c6612d50611186feaa68ff11c34a61e4d5a57e",
+ "dist/2023-04-20/cargo-1.69.0-powerpc64-unknown-linux-gnu.tar.gz": "2bfd3c9b0b384cf9e13180f29aee234a822a10dfcff132541f2da2ce72403932",
+ "dist/2023-04-20/cargo-1.69.0-powerpc64-unknown-linux-gnu.tar.xz": "160692a0fc5fe1b48b617e063c6ce1d4546e108b32c0049dbde95602a30af133",
+ "dist/2023-04-20/cargo-1.69.0-powerpc64le-unknown-linux-gnu.tar.gz": "23e66703392ea5ec4c1c793b51fef83cd244bc57d23ae42be5cf0b0888eb9758",
+ "dist/2023-04-20/cargo-1.69.0-powerpc64le-unknown-linux-gnu.tar.xz": "9a3fe155d763b382ea18753133cb0e2186993e2850568134e2f7e468f2d07197",
+ "dist/2023-04-20/cargo-1.69.0-riscv64gc-unknown-linux-gnu.tar.gz": "816deb02a3805bd0b3c3a91a7fec1a54543a55f3d490992cb0f612d3ecfa3e2f",
+ "dist/2023-04-20/cargo-1.69.0-riscv64gc-unknown-linux-gnu.tar.xz": "9bc29f493c353313b968243fbdf5147c9ca401f7f8205aed63f180b5757161e2",
+ "dist/2023-04-20/cargo-1.69.0-s390x-unknown-linux-gnu.tar.gz": "9b046efbf06aaa70cddc98138483feba77bd954e1b879ab4e7f02cb5c0806348",
+ "dist/2023-04-20/cargo-1.69.0-s390x-unknown-linux-gnu.tar.xz": "812907846a454a182b05eab76658b49eabd8d06d2b5e8df56f29d73971f91b03",
+ "dist/2023-04-20/cargo-1.69.0-x86_64-apple-darwin.tar.gz": "3ed0b5eaaf7e908f196b4882aad757cb2a623ca3c8e8e74471422df5e93ebfb0",
+ "dist/2023-04-20/cargo-1.69.0-x86_64-apple-darwin.tar.xz": "8a01c5f5454708a60d39734e0688cadd83c9709412a02bdcd4984d246f8b299a",
+ "dist/2023-04-20/cargo-1.69.0-x86_64-pc-windows-gnu.tar.gz": "8ab16b569ad8b1bf8c7b151acbe9d77c08e744ec2511502ea8d1a01ece1bf097",
+ "dist/2023-04-20/cargo-1.69.0-x86_64-pc-windows-gnu.tar.xz": "6afc9b5b4192b66382ef428daf340fbed615024696c094d8f783b5782c8cad41",
+ "dist/2023-04-20/cargo-1.69.0-x86_64-pc-windows-msvc.tar.gz": "9fef20492ad1e296f5f458b0b118e33b42a3c956ad20d9f5d683e0235f1d9d77",
+ "dist/2023-04-20/cargo-1.69.0-x86_64-pc-windows-msvc.tar.xz": "992db17f28426d99d1a4f41e9b06df2c03464b3cda7c2121732a0a43cfc59c8d",
+ "dist/2023-04-20/cargo-1.69.0-x86_64-unknown-freebsd.tar.gz": "797b9f3844d91323b42ec82fb6c06864b13cd70f0cae42c29a3d205e72ad2782",
+ "dist/2023-04-20/cargo-1.69.0-x86_64-unknown-freebsd.tar.xz": "98f3157fe6ebef8d276e13a9f517ad3ccc59c62d3a13ba940c49d223c72f2836",
+ "dist/2023-04-20/cargo-1.69.0-x86_64-unknown-illumos.tar.gz": "905e09513b9ea0af77257e5bb97bea77006f3d1323a33e1969d3f0cf2fb1dedd",
+ "dist/2023-04-20/cargo-1.69.0-x86_64-unknown-illumos.tar.xz": "12530275246dcc8025b8f5359b7d97c3a51c8922132987e4a2433753d8d3ebca",
+ "dist/2023-04-20/cargo-1.69.0-x86_64-unknown-linux-gnu.tar.gz": "7ee899206f592a86687478465970aa6b57772ccbe9a1f1b7695aa1237c2325a6",
+ "dist/2023-04-20/cargo-1.69.0-x86_64-unknown-linux-gnu.tar.xz": "336eeabf231a7665c26c127a37b8aefffe28cb087c5c8d4ba0460419f5f8eff2",
+ "dist/2023-04-20/cargo-1.69.0-x86_64-unknown-linux-musl.tar.gz": "c17cd0511c1b1b826d78533deeffffa0f7a4fa2d010ec660afa85bf24d977d9c",
+ "dist/2023-04-20/cargo-1.69.0-x86_64-unknown-linux-musl.tar.xz": "17f5c6481a881599adb5f665480d9985073155eda9c9fdc1e5663563db51b970",
+ "dist/2023-04-20/cargo-1.69.0-x86_64-unknown-netbsd.tar.gz": "9edfe8166acc4cf6e28f17290cea6ae87e8d6c983ef910fb3082c05363ee373e",
+ "dist/2023-04-20/cargo-1.69.0-x86_64-unknown-netbsd.tar.xz": "e808fbe1391879376faa9e0001bef6a370d9ad533b31a0241da8465d58675e4e",
+ "dist/2023-04-20/rust-std-1.69.0-aarch64-apple-darwin.tar.gz": "00307d648acc269a0874ba8de4f8eb3bd3b85a0f10e3da59ba1ff8c840e92b34",
+ "dist/2023-04-20/rust-std-1.69.0-aarch64-apple-darwin.tar.xz": "fdb1f29341f51e8b119f69e98b657a12fa60f12edfccfa494ae282de0553d4fd",
+ "dist/2023-04-20/rust-std-1.69.0-aarch64-apple-ios-sim.tar.gz": "2291ac2949ca832ea6db3795f17e981d7c563fa5b102eaea39811482320374b2",
+ "dist/2023-04-20/rust-std-1.69.0-aarch64-apple-ios-sim.tar.xz": "105f15574591f3a297c0b755b8205ca8672aa6238da432ab59cb5422374e042d",
+ "dist/2023-04-20/rust-std-1.69.0-aarch64-apple-ios.tar.gz": "ef1a7dc02df8cbb0b22e897253cd22b7de33e7958031604bc9d0a8fb9fd6c9a8",
+ "dist/2023-04-20/rust-std-1.69.0-aarch64-apple-ios.tar.xz": "94cf2ba63b38addc81a9c75d3dfda0275bdafe20d3948b3c6b53f8c6f692da4c",
+ "dist/2023-04-20/rust-std-1.69.0-aarch64-linux-android.tar.gz": "0faba7a6b0789f19d16ae235d46fac30e89131df29ed382ed70218ccd57b4ddf",
+ "dist/2023-04-20/rust-std-1.69.0-aarch64-linux-android.tar.xz": "4ae8f5c8c1845f666115f26ab5bc8341986f8035fb93887b6ba01c090004ad69",
+ "dist/2023-04-20/rust-std-1.69.0-aarch64-pc-windows-msvc.tar.gz": "6bc44180bda86171560be7cecc36698a1eab5e8cba676a87d89c24d1f091d7a4",
+ "dist/2023-04-20/rust-std-1.69.0-aarch64-pc-windows-msvc.tar.xz": "2254b8272362837809e0b53f018973b3d016e079537230937c299f55fccadf99",
+ "dist/2023-04-20/rust-std-1.69.0-aarch64-unknown-fuchsia.tar.gz": "74d501377215c640cd30ee7e6cd444f30d90e01eccf7cc881cde276709eb535d",
+ "dist/2023-04-20/rust-std-1.69.0-aarch64-unknown-fuchsia.tar.xz": "44d2db28e20a490c68444516fa3eda623a9a541ab1c1848b6eaa86817b43346e",
+ "dist/2023-04-20/rust-std-1.69.0-aarch64-unknown-linux-gnu.tar.gz": "8f42b40c0a0658ee75ce758652c9821fac7db3fbd8d20f7fb2483ec2c57ee0ac",
+ "dist/2023-04-20/rust-std-1.69.0-aarch64-unknown-linux-gnu.tar.xz": "c3c5346b1e95ea9bd806b0dd9ff9aa618976fb38f4f3a615af4964bb4dd15633",
+ "dist/2023-04-20/rust-std-1.69.0-aarch64-unknown-linux-musl.tar.gz": "07788bc6a1d17e8f0791c3367734c3c65165ca806ee83d01dad303059690b19d",
+ "dist/2023-04-20/rust-std-1.69.0-aarch64-unknown-linux-musl.tar.xz": "729a22a51089b9a96ff7abf350ee963649bbb320e43e9ed511a47689fd80e17c",
+ "dist/2023-04-20/rust-std-1.69.0-aarch64-unknown-none-softfloat.tar.gz": "0ec4c2e2606553837fab6d13af656d3449653a9077a5bb83d52e43ccc51f1607",
+ "dist/2023-04-20/rust-std-1.69.0-aarch64-unknown-none-softfloat.tar.xz": "6371f7e2308d4021d4dbd2d468bc3ee7f83434765300e6c15bfa4de843ccfaf8",
+ "dist/2023-04-20/rust-std-1.69.0-aarch64-unknown-none.tar.gz": "793676cf620513684d6027faed70a30c51a265b730aa6f930d05e3c76043b20f",
+ "dist/2023-04-20/rust-std-1.69.0-aarch64-unknown-none.tar.xz": "4ec6982e74d01790a2cd167c7e84f8a859a3b0232a1f4275d13fc244fc49d096",
+ "dist/2023-04-20/rust-std-1.69.0-aarch64-unknown-uefi.tar.gz": "7e6e32252fd6d7f7e9f3d1fdfeedd67310d7cce0fe35e3e3f0f9a7df5379eb28",
+ "dist/2023-04-20/rust-std-1.69.0-aarch64-unknown-uefi.tar.xz": "6aecf80e8216efce5d26421904015a41cae65eade2c9b744d343c2a848089b0d",
+ "dist/2023-04-20/rust-std-1.69.0-arm-linux-androideabi.tar.gz": "66e2609bff426ce6dfc1c05639aa2aeebee1a0d0d5082dcacee73dab0d422f3c",
+ "dist/2023-04-20/rust-std-1.69.0-arm-linux-androideabi.tar.xz": "9f821298c15cf7a2c58f21c27d6e9efca9a10e559e2478a03498d211605ff57e",
+ "dist/2023-04-20/rust-std-1.69.0-arm-unknown-linux-gnueabi.tar.gz": "096aa203d690339f3831052b9dac52d91cc5dd63627c6c89688c16d7f70dca4a",
+ "dist/2023-04-20/rust-std-1.69.0-arm-unknown-linux-gnueabi.tar.xz": "3c6f8fc7ccd747c85855944510b13166779f43926e970e40bfe7835252734c7f",
+ "dist/2023-04-20/rust-std-1.69.0-arm-unknown-linux-gnueabihf.tar.gz": "f0630a13adf0f86f5db528291a838645e31ce63e1e052ef5284aecd8ae6cecca",
+ "dist/2023-04-20/rust-std-1.69.0-arm-unknown-linux-gnueabihf.tar.xz": "c2f4a3332dfe1520a3761a9d072b8edcb6b5c0a84b1b24c3a7ac621e86b4e13e",
+ "dist/2023-04-20/rust-std-1.69.0-arm-unknown-linux-musleabi.tar.gz": "7be1e4ee98a1e6adc233bf8ce5499fc5147345ca90c7bb120959ebb2254fb9a7",
+ "dist/2023-04-20/rust-std-1.69.0-arm-unknown-linux-musleabi.tar.xz": "c0837c5f36381aaa8b297d478b935cdcbe9ee3af741f2891b9a75dc0dcb6a036",
+ "dist/2023-04-20/rust-std-1.69.0-arm-unknown-linux-musleabihf.tar.gz": "ad6ae81c5ec8588d62257b45516ce49dcd395d27741a009c5beaf1a522dde8d4",
+ "dist/2023-04-20/rust-std-1.69.0-arm-unknown-linux-musleabihf.tar.xz": "3d924216ba8f1c57f14578a254e3043256cc55211c18263eb2c1134a0021133f",
+ "dist/2023-04-20/rust-std-1.69.0-armebv7r-none-eabi.tar.gz": "8b03c7898ec352726ead95bd8c8f8b55707de7528d58ffff3e835e345eade1af",
+ "dist/2023-04-20/rust-std-1.69.0-armebv7r-none-eabi.tar.xz": "16797d6cd21aa3c4987727a4b89851ff1f9fc11870b40e70f87e42024adf0323",
+ "dist/2023-04-20/rust-std-1.69.0-armebv7r-none-eabihf.tar.gz": "042eddf9c2a41a06cda2064737ca1bbd98deb7a8a806037bc1ef605df48f2013",
+ "dist/2023-04-20/rust-std-1.69.0-armebv7r-none-eabihf.tar.xz": "2e48b2c1e0b2b60d1baf12bb282f6ff7fdc907745d2d819a43080b3f1a516f64",
+ "dist/2023-04-20/rust-std-1.69.0-armv5te-unknown-linux-gnueabi.tar.gz": "2e6b90f114fb81cd8d746895560552d82c09c08bdac97116a29f336019ca3982",
+ "dist/2023-04-20/rust-std-1.69.0-armv5te-unknown-linux-gnueabi.tar.xz": "e90362811e8cb7d8f4d9e850f57b6bf9b94ea9136de6501918f636c24d39ee55",
+ "dist/2023-04-20/rust-std-1.69.0-armv5te-unknown-linux-musleabi.tar.gz": "e51d66540ac036582d8a84af72a5a6da27bc338ea42fcbc5ca7679a8abb4acbc",
+ "dist/2023-04-20/rust-std-1.69.0-armv5te-unknown-linux-musleabi.tar.xz": "a348ed99e575c9e735bd8053b11a42408bf27dc690c4afdf5d25876813b403fe",
+ "dist/2023-04-20/rust-std-1.69.0-armv7-linux-androideabi.tar.gz": "e2dbafb21fc6af9e9b2daa2139f3c4665cc38ad8ff56031f534b1334ceed7b08",
+ "dist/2023-04-20/rust-std-1.69.0-armv7-linux-androideabi.tar.xz": "3e3a8346e665d5f7e2169be345130ba4ab4196c3a6a192a1bf6369b5abd26deb",
+ "dist/2023-04-20/rust-std-1.69.0-armv7-unknown-linux-gnueabi.tar.gz": "fecd5d2244534451ed8ea3f901bfaaf6c2d10c77913caa1c3406a763b70b59af",
+ "dist/2023-04-20/rust-std-1.69.0-armv7-unknown-linux-gnueabi.tar.xz": "cd9917a868a11489b0eaee928f082dcaf95f843ff52c894f72af1ace3c32408e",
+ "dist/2023-04-20/rust-std-1.69.0-armv7-unknown-linux-gnueabihf.tar.gz": "08edc4547495299393a0e18f8eff740d7cf31e00ad2b31671688e5e4438abe16",
+ "dist/2023-04-20/rust-std-1.69.0-armv7-unknown-linux-gnueabihf.tar.xz": "d0f5a2046354688e21cc10a8feeb35b2c5c05c8adf5c9cf474dc47eadf417fd5",
+ "dist/2023-04-20/rust-std-1.69.0-armv7-unknown-linux-musleabi.tar.gz": "97294b981cb60fd1ad374bc24d8a90c80c62390dbed17f012d6b14c79be8b53a",
+ "dist/2023-04-20/rust-std-1.69.0-armv7-unknown-linux-musleabi.tar.xz": "997954b5551eb5386dd66ffe7b9660e02b04e5336c172c0e65985cc5ad5afee6",
+ "dist/2023-04-20/rust-std-1.69.0-armv7-unknown-linux-musleabihf.tar.gz": "fadb41835ada212a248f663caa0c727e32a3c8d9471bea25f63a948b760e3124",
+ "dist/2023-04-20/rust-std-1.69.0-armv7-unknown-linux-musleabihf.tar.xz": "ac3594d012a194b9f1adeb8eada22e90aeb1e20c24eed295455caaeff3217dc0",
+ "dist/2023-04-20/rust-std-1.69.0-armv7a-none-eabi.tar.gz": "e4d2220efeb177d8b8611cd5106e013b064184d409cc255b6cae6bae63102375",
+ "dist/2023-04-20/rust-std-1.69.0-armv7a-none-eabi.tar.xz": "387ebdb7831aa33792dbb37122f3e1eba26fc6609dcd39ec55b7d40cb06c1d7b",
+ "dist/2023-04-20/rust-std-1.69.0-armv7r-none-eabi.tar.gz": "9c84550b56e9df136c6901577cf793a736c18cd7bf8e208add18ad3a8bc205d5",
+ "dist/2023-04-20/rust-std-1.69.0-armv7r-none-eabi.tar.xz": "45e2f46b15546611ce1f11fec827ea36cff3513ed0e56fea98aa3a1f3c0f63ad",
+ "dist/2023-04-20/rust-std-1.69.0-armv7r-none-eabihf.tar.gz": "a027bf24f3bdc61541e3f968d72157a23ce4ed1d3034b26b2170d60dad36a336",
+ "dist/2023-04-20/rust-std-1.69.0-armv7r-none-eabihf.tar.xz": "1f042663eb05563a00c62fe94f500f5d4b42dd96d736c916a43473e833a8c175",
+ "dist/2023-04-20/rust-std-1.69.0-asmjs-unknown-emscripten.tar.gz": "b2ac5bdec00dc572128701a4f58ceb7047826f410fca523c33bd92fb523d3206",
+ "dist/2023-04-20/rust-std-1.69.0-asmjs-unknown-emscripten.tar.xz": "c79a0ebcd78c1f4ef02dcdf5353b5d40b214cfe3078e8fd679e5fdcfc0dd953e",
+ "dist/2023-04-20/rust-std-1.69.0-i586-pc-windows-msvc.tar.gz": "613ca46ef08adb9a17b75bc1a005ad60bc0891f82baaa74505e38fb794a1f067",
+ "dist/2023-04-20/rust-std-1.69.0-i586-pc-windows-msvc.tar.xz": "b4fb87926419095bfe644e21800cb610784453d496529a905423493b91df1392",
+ "dist/2023-04-20/rust-std-1.69.0-i586-unknown-linux-gnu.tar.gz": "613270943dd6a3dcfad6a9227089bc3bbb30984f3d6d45781da527823c143559",
+ "dist/2023-04-20/rust-std-1.69.0-i586-unknown-linux-gnu.tar.xz": "a8125d72e06f2d866472a7aca3bd20a247160171d23a75c4207761a05e00ed5b",
+ "dist/2023-04-20/rust-std-1.69.0-i586-unknown-linux-musl.tar.gz": "41c00fc1e8f6fbeed5edefcca626b7b349d950ba5893b2e6fe7653fbd0f2639c",
+ "dist/2023-04-20/rust-std-1.69.0-i586-unknown-linux-musl.tar.xz": "97e5d07965f978a95f2e52040c9255014fe74f1d591f9b41ef66ee29fbdf3709",
+ "dist/2023-04-20/rust-std-1.69.0-i686-linux-android.tar.gz": "3660b69a847330ef336773ae36acf327865c0eca7093e159e84d34741856fed7",
+ "dist/2023-04-20/rust-std-1.69.0-i686-linux-android.tar.xz": "3ef1a76919d426ed7370585c6f3098ebd63e3b3ed5903830ad8124c69aa446ec",
+ "dist/2023-04-20/rust-std-1.69.0-i686-pc-windows-gnu.tar.gz": "418e391397ee55e0310d940d36ae1150aaae6c02a8816a0199b9ccbaf2c8c7c2",
+ "dist/2023-04-20/rust-std-1.69.0-i686-pc-windows-gnu.tar.xz": "baca4756366308de5f58bd6d29ac4812be88b7c18caae97ff6478f6775a86b2c",
+ "dist/2023-04-20/rust-std-1.69.0-i686-pc-windows-msvc.tar.gz": "7305a813acd54621b8d82c372bea9c90e8a874c993c2a67bbb450d7048dbe707",
+ "dist/2023-04-20/rust-std-1.69.0-i686-pc-windows-msvc.tar.xz": "64d48421916f1efca0c9013cba41ac7057360da3306476310aab0c957cf9cb0c",
+ "dist/2023-04-20/rust-std-1.69.0-i686-unknown-freebsd.tar.gz": "feb636176841d919b700bb586f72476d4f5ad70c1dbcca28d4fa54c3a76e4cdc",
+ "dist/2023-04-20/rust-std-1.69.0-i686-unknown-freebsd.tar.xz": "7f3e92d9ce81293f257e32047966b5513a404e8c072cefe45ccd0ee0121b0633",
+ "dist/2023-04-20/rust-std-1.69.0-i686-unknown-linux-gnu.tar.gz": "d54849ab7168e16210107b812871bea7f282a3f50b4b34aa252f04f25f8a8bf9",
+ "dist/2023-04-20/rust-std-1.69.0-i686-unknown-linux-gnu.tar.xz": "bef330af5bfb381a01349186e05402983495a3e2d4d1c35723a8443039d19a2d",
+ "dist/2023-04-20/rust-std-1.69.0-i686-unknown-linux-musl.tar.gz": "9a5a1c439351b1327fc60e04d641fdb54fdecacda95ba1ec4fff202a805d4791",
+ "dist/2023-04-20/rust-std-1.69.0-i686-unknown-linux-musl.tar.xz": "699286334b8a2c10c8676f8345b5589681e334bf6789a174947b6885faa78b85",
+ "dist/2023-04-20/rust-std-1.69.0-i686-unknown-uefi.tar.gz": "3903ee3752a13fd5717aa1816363461f7bb8f72022da29559dfe0ac0168f79bc",
+ "dist/2023-04-20/rust-std-1.69.0-i686-unknown-uefi.tar.xz": "e82cb58dc0c5640a0d7d8ddcea7c6245dbd596b0c9cc3d41157017e1308ff746",
+ "dist/2023-04-20/rust-std-1.69.0-mips-unknown-linux-gnu.tar.gz": "032e5b35b2883a9a5fc29e191c1103be8ea33af90b35d26305da5bdb847dd65d",
+ "dist/2023-04-20/rust-std-1.69.0-mips-unknown-linux-gnu.tar.xz": "89849b93af9d4922554c938bc1c7641498d13c3a8b2b464f3bb7a060eae7a580",
+ "dist/2023-04-20/rust-std-1.69.0-mips-unknown-linux-musl.tar.gz": "968099776e6d19183cfb5e97d5f13daf69b24c1088af46d49378843049caf4af",
+ "dist/2023-04-20/rust-std-1.69.0-mips-unknown-linux-musl.tar.xz": "6c5276f3821b4feb8a6de1b9c44e5b3ca312169f34a00d6d9302680bd2109b57",
+ "dist/2023-04-20/rust-std-1.69.0-mips64-unknown-linux-gnuabi64.tar.gz": "2393e2cecb12fe20e2774fce2b52599b5082f93e7041c4f7816aad1e378d9d1d",
+ "dist/2023-04-20/rust-std-1.69.0-mips64-unknown-linux-gnuabi64.tar.xz": "413527cb7b1b876013cc721aa8f93b78b7021d26f7673a1a9f568e5b51af42d9",
+ "dist/2023-04-20/rust-std-1.69.0-mips64-unknown-linux-muslabi64.tar.gz": "7948c622a68fc4726d9c855f03eab0fab2b448cd7303f17f69c592e931184e58",
+ "dist/2023-04-20/rust-std-1.69.0-mips64-unknown-linux-muslabi64.tar.xz": "0578d0efe7d09ea0edbaeea9b44822632ef4e7e2c210c7df47e8ff7fc141cb0f",
+ "dist/2023-04-20/rust-std-1.69.0-mips64el-unknown-linux-gnuabi64.tar.gz": "72b17108862ff9740ee594f2eff517e08a02380903b4a9ea35aa5296a13aa5bb",
+ "dist/2023-04-20/rust-std-1.69.0-mips64el-unknown-linux-gnuabi64.tar.xz": "738d2b7ef7683cef511252fea994594cfffdef6f6650dd457c9b312ddb17a807",
+ "dist/2023-04-20/rust-std-1.69.0-mips64el-unknown-linux-muslabi64.tar.gz": "6d87304fcbc9e07a290d37317cd3c8e96469b4654e232daf66ae83eb9e6e7093",
+ "dist/2023-04-20/rust-std-1.69.0-mips64el-unknown-linux-muslabi64.tar.xz": "37a7618464f2a0253dd3fb51037a8f42649a26f8240ddab77bd57f688b70a40a",
+ "dist/2023-04-20/rust-std-1.69.0-mipsel-unknown-linux-gnu.tar.gz": "3101831757ce93c10b4c394e1f979176e8d1ca72492247d2a043f4a9fcceb24c",
+ "dist/2023-04-20/rust-std-1.69.0-mipsel-unknown-linux-gnu.tar.xz": "22bb266cf4e6da4d6867144873579b0da47e59c8ec9cb1a329ee8a7f418e8ee3",
+ "dist/2023-04-20/rust-std-1.69.0-mipsel-unknown-linux-musl.tar.gz": "e909ba817e0c5d5e90424bc7dfeb9b4a42c01871101dd5898a3243f9ff9d033f",
+ "dist/2023-04-20/rust-std-1.69.0-mipsel-unknown-linux-musl.tar.xz": "c9bedee259b0cf93cd22fa7c7ece8a1cfdb1882efca2b04e18a16175d36a9519",
+ "dist/2023-04-20/rust-std-1.69.0-nvptx64-nvidia-cuda.tar.gz": "0f61f851ac53581c569f2f7b43f8c088cea338fb8b5cc22be283af9bc04c33d1",
+ "dist/2023-04-20/rust-std-1.69.0-nvptx64-nvidia-cuda.tar.xz": "e7238c3adf53ed42858b5ddd3936b5fca58e27df23f4feb920f6f81e64abfb97",
+ "dist/2023-04-20/rust-std-1.69.0-powerpc-unknown-linux-gnu.tar.gz": "dd5c877591ad9df7a5cb541e23be18d1ca7ccc04850c20118b1eab35625845b0",
+ "dist/2023-04-20/rust-std-1.69.0-powerpc-unknown-linux-gnu.tar.xz": "6303fc2be557467af0f1ff9cc756fd14ae2b9db7f17f42a1d2238ed934351e56",
+ "dist/2023-04-20/rust-std-1.69.0-powerpc64-unknown-linux-gnu.tar.gz": "fad166839757b27d9078803c54ba46aed7688dd43dcbe2a14b5d05c21327c7aa",
+ "dist/2023-04-20/rust-std-1.69.0-powerpc64-unknown-linux-gnu.tar.xz": "6a4b8b5307141c8207ed67d4fbaf9345c42fb3662e389b9dd3d9a4086c1b6efe",
+ "dist/2023-04-20/rust-std-1.69.0-powerpc64le-unknown-linux-gnu.tar.gz": "f214837e0cf46dbe2be6652a518e49d7b24936fecc8f4cff01aa9fc63512ed1f",
+ "dist/2023-04-20/rust-std-1.69.0-powerpc64le-unknown-linux-gnu.tar.xz": "c44bcf91a9d13412a59ba75e7f8bd14c7f60d44cdb7b21916e502c90b4b378ce",
+ "dist/2023-04-20/rust-std-1.69.0-riscv32i-unknown-none-elf.tar.gz": "c0e8f8fe373f856fe543b82df887e9c2f2a36d8fe1bbbb00404c8f38e380a7c0",
+ "dist/2023-04-20/rust-std-1.69.0-riscv32i-unknown-none-elf.tar.xz": "d06a27b43698dfdbf80db53e4cabad33f00aad5617c7374deb3802906fe4b62c",
+ "dist/2023-04-20/rust-std-1.69.0-riscv32imac-unknown-none-elf.tar.gz": "69320d4b3225317367be4ea5a6625ae2cf391cfdb5d3f8bf3989e8f42f0ff110",
+ "dist/2023-04-20/rust-std-1.69.0-riscv32imac-unknown-none-elf.tar.xz": "44a8c64837c311a9412b0a777f58d50f9c83f4d3f569b58ea7fdd5b16d1d25b9",
+ "dist/2023-04-20/rust-std-1.69.0-riscv32imc-unknown-none-elf.tar.gz": "41d2fcce0ab65cda817fee363e7e569550bc011c0d86aa57bd6c80e1bc60184e",
+ "dist/2023-04-20/rust-std-1.69.0-riscv32imc-unknown-none-elf.tar.xz": "547dea9f9d82916ecf33a4a0d9b604262c86b2368707deb39891be4a9fdeafaf",
+ "dist/2023-04-20/rust-std-1.69.0-riscv64gc-unknown-linux-gnu.tar.gz": "b8b03db1ca7286976099fae83b972b320aa69ea50b94c8a1a4001e9a3b85c888",
+ "dist/2023-04-20/rust-std-1.69.0-riscv64gc-unknown-linux-gnu.tar.xz": "8c32a848e2688b2900c3e073da8814ce5649ce6e0362be30d53517d7a9ef21ff",
+ "dist/2023-04-20/rust-std-1.69.0-riscv64gc-unknown-none-elf.tar.gz": "d7258e46ea8744c38f01967c17c0a5a628820db94458286710aa6f68230fc519",
+ "dist/2023-04-20/rust-std-1.69.0-riscv64gc-unknown-none-elf.tar.xz": "e9b8d1a80ca59d188d8654d031e9165920a02fae3baac7dddf4736c6585de7f8",
+ "dist/2023-04-20/rust-std-1.69.0-riscv64imac-unknown-none-elf.tar.gz": "ef8c38f492fe2367757f5c70ca6c73d6a75cccfe7f22d32a41e588e4997c8040",
+ "dist/2023-04-20/rust-std-1.69.0-riscv64imac-unknown-none-elf.tar.xz": "0eca0161d4d19ab8dd5159429760588b1f02626899a667b5c219ca74e2ff72d0",
+ "dist/2023-04-20/rust-std-1.69.0-s390x-unknown-linux-gnu.tar.gz": "82dbdd45414afca9ac3a1c867a6819cf98ea27ddff7311fd5c83fdb7a041d362",
+ "dist/2023-04-20/rust-std-1.69.0-s390x-unknown-linux-gnu.tar.xz": "5c98653199b89bff6f368bed61fca7860f7179364ce9e9474c90af38e2629baf",
+ "dist/2023-04-20/rust-std-1.69.0-sparc64-unknown-linux-gnu.tar.gz": "51a0c52962998c429500fe1f7bb0fef6d911a1e4990080649704ec8bb60574bc",
+ "dist/2023-04-20/rust-std-1.69.0-sparc64-unknown-linux-gnu.tar.xz": "7d50ff8499d8925ea973aa659e1a88190547615b3cfecb79e776ef8de953b755",
+ "dist/2023-04-20/rust-std-1.69.0-sparcv9-sun-solaris.tar.gz": "6c21c66d2e398d0a5e80c706a15cc847722827b961e1f70411c1af27ea6112f6",
+ "dist/2023-04-20/rust-std-1.69.0-sparcv9-sun-solaris.tar.xz": "30c2e8bb50cddcf083025c74b1b08eee146ac018244193fac93856207fc94ae2",
+ "dist/2023-04-20/rust-std-1.69.0-thumbv6m-none-eabi.tar.gz": "47f9e2a194ace9189cb5188792c960beef16217dc13cb567831ad484cd2f93cd",
+ "dist/2023-04-20/rust-std-1.69.0-thumbv6m-none-eabi.tar.xz": "94348fef7014cc2cc56fae5ff4272ae1bc6d25fd0d19d2f673cdaeab31cee2fc",
+ "dist/2023-04-20/rust-std-1.69.0-thumbv7em-none-eabi.tar.gz": "cfc1b8ef3055187692cca52b57999c41db006d220a51dec97733c1d1d8ba4ea0",
+ "dist/2023-04-20/rust-std-1.69.0-thumbv7em-none-eabi.tar.xz": "f4dccfcc825ca4c0808cdfb8decb178a6fae750dfd059cabffdbd20102ce7ecc",
+ "dist/2023-04-20/rust-std-1.69.0-thumbv7em-none-eabihf.tar.gz": "d185f87a0f9276c66c784152639ce4548116b159c9aa51a6024301f35a1bbe91",
+ "dist/2023-04-20/rust-std-1.69.0-thumbv7em-none-eabihf.tar.xz": "bbaed6340fa2484607798e2c036ec163bedd30f58e44ca589928dff2884da4bf",
+ "dist/2023-04-20/rust-std-1.69.0-thumbv7m-none-eabi.tar.gz": "9a9335ee62a18feb08d5c23f766e831e617db89cacfaa13e2903ab759b313838",
+ "dist/2023-04-20/rust-std-1.69.0-thumbv7m-none-eabi.tar.xz": "3ddb333fbac410c22ff1dc45f6472917322b868816639ec788c154747c3e1234",
+ "dist/2023-04-20/rust-std-1.69.0-thumbv7neon-linux-androideabi.tar.gz": "4ede5d3089ffa894ef2b27a124f0e2d58ebd4699b603ee2bb4904787824d324c",
+ "dist/2023-04-20/rust-std-1.69.0-thumbv7neon-linux-androideabi.tar.xz": "1c4e4d14807940674f8b23c2e85406e1fd59d73a9d51c64fdbb59e1a5b7bbe5b",
+ "dist/2023-04-20/rust-std-1.69.0-thumbv7neon-unknown-linux-gnueabihf.tar.gz": "6d9765056f095182a39e67f51fea587f445096307c77a46421c8eaeb8aec4ac4",
+ "dist/2023-04-20/rust-std-1.69.0-thumbv7neon-unknown-linux-gnueabihf.tar.xz": "527fc9fa5b19ecab8cce352070834063b2556b7d3c7c87e55915ca938cd2c2a4",
+ "dist/2023-04-20/rust-std-1.69.0-thumbv8m.base-none-eabi.tar.gz": "980023ef9aa5f133a725cdedd1f3d1301ce7810bb130dc78253874d340c2090b",
+ "dist/2023-04-20/rust-std-1.69.0-thumbv8m.base-none-eabi.tar.xz": "d122ae463680fd87bb72fab28f8705585dbfdd9b9821f36034fcb482ac18b93b",
+ "dist/2023-04-20/rust-std-1.69.0-thumbv8m.main-none-eabi.tar.gz": "fd6513f30b105670bb6e511989db13e5d7503d725c77584d10751bc526343fb1",
+ "dist/2023-04-20/rust-std-1.69.0-thumbv8m.main-none-eabi.tar.xz": "0d01c05009b6315c3087a1d30ba4d8c788d29fa47ca87b633666076fce39cf60",
+ "dist/2023-04-20/rust-std-1.69.0-thumbv8m.main-none-eabihf.tar.gz": "5ac4be3ff3cc30533477d296b0cd3669b0c60abc0141c384667fdc19ca6f04a9",
+ "dist/2023-04-20/rust-std-1.69.0-thumbv8m.main-none-eabihf.tar.xz": "37a96cf44a4623eddbf0e036933dfdc3c3d2094e9e7746c01e0fdf71491aa47a",
+ "dist/2023-04-20/rust-std-1.69.0-wasm32-unknown-emscripten.tar.gz": "5caa0780daa7cba8db7e6ddef564e01c0ba44960879d5f25f0fe8256a07fdb2d",
+ "dist/2023-04-20/rust-std-1.69.0-wasm32-unknown-emscripten.tar.xz": "2f1940260c06d1689a79219cde37f2cb23208f51b403a5e434ab9869c79adc97",
+ "dist/2023-04-20/rust-std-1.69.0-wasm32-unknown-unknown.tar.gz": "895f79222ea5d9cc50a1257a8ac208c1525ccaf8f25dcc138183a6cd61be79d6",
+ "dist/2023-04-20/rust-std-1.69.0-wasm32-unknown-unknown.tar.xz": "4ea9d3b776eb2831f79db0f2d14965343dab6c52103c2f48c3f0903acce85592",
+ "dist/2023-04-20/rust-std-1.69.0-wasm32-wasi.tar.gz": "ff1700395e39d853ce28b867e17108d3c5fe3f276e3c210b08dc180438d6badb",
+ "dist/2023-04-20/rust-std-1.69.0-wasm32-wasi.tar.xz": "72fbbbd50d46c22a0c0162c5535c36b18626b32f17a6760538e4fac02450b3c2",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-apple-darwin.tar.gz": "e44d71250dc5a238da0dc4784dad59d562862653adecd31ea52e0920b85c6a7c",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-apple-darwin.tar.xz": "20161f5c41856762d1ce946737feb833bb7acd2817a4068f4e3044b176e5f73c",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-apple-ios.tar.gz": "882a02c1fedff14cb4f7f09bd588386ab3f63e6b33fbf5ad5587af33886a9b51",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-apple-ios.tar.xz": "22d6d7a072ce523e79cfdce7ad09982b399d8baed0d810a2dceb01c261f2b85e",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-fortanix-unknown-sgx.tar.gz": "8fac6617253d245eb94361d96ec7284577384d169c5d93153696525d89dd91ba",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-fortanix-unknown-sgx.tar.xz": "9e5fe109092b8f172c96789ca058d9a01330ddba17aec1c23ef95d4547ba5210",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-linux-android.tar.gz": "8cb25ade421817865833a61114648e7765b69635c4d8b09d471a3e7e058aa272",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-linux-android.tar.xz": "c7ed56d9c4b3c03b532a409bf84bc8c676bb5b2504a969dc44078f2c40b74991",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-pc-solaris.tar.gz": "bd608aaea7615cb98a70afb8b9320849cdbe6329fb2dd6945539a9dbc67b9142",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-pc-solaris.tar.xz": "0c35e1e2a2abce437f7f15425bc607d7373eb7f11e058823b2450dfc639d7e1d",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-pc-windows-gnu.tar.gz": "09ded4a4c27c16aff9c9911640b1bdf6e1172237ce540ed4dc3e166e9438f0d7",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-pc-windows-gnu.tar.xz": "aa1d30f2f66d0198ea304047262f9142c406618a35acc466c7ad2b2c1469435d",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-pc-windows-msvc.tar.gz": "92716346a9049ae0dd826818dc280935e1af3010ecb779e3f6846efc579ec993",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-pc-windows-msvc.tar.xz": "0d1f0399d4d9dba7e9daf2e957c0e7a6107538810a3265ba4f4e04763b21f001",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-sun-solaris.tar.gz": "67282df0d68c1bcafa4bc91ee2dcd73162dbe6853e76d9e451e35b86cb59206c",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-sun-solaris.tar.xz": "e02525a18d1ba50b4ae01816f33404331ec60a793d2106296839726cd5ac3aed",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-unknown-freebsd.tar.gz": "eed4b3f3358a8887b0f6a62e021469878a8990af9b94c2fe87d3c1b0220913bb",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-unknown-freebsd.tar.xz": "2cb020438164d1bc40ab6e5909c0ae3a71ee2d549d3821bad1d99d2eb70b2f57",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-unknown-fuchsia.tar.gz": "cceaf7bfeb626618cf6a9515baf933bbe424f7b5880fa04d41afaa6cd4bfcc09",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-unknown-fuchsia.tar.xz": "03205a4e7b01b91fd689198c301af1456b4efc333021ab3e0e3ec1e0518bf557",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-unknown-illumos.tar.gz": "d736e1f9da70759c027614b998973b06f177e41be9a2fc3545b5e77d5db0c9c0",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-unknown-illumos.tar.xz": "7b8252e74c50479d43d14217b5a9cf3882f6ecd7b2ff9e7a133f2233065abd66",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-unknown-linux-gnu.tar.gz": "b6986b4042af7b17fc8f51127018617b32d45cd555c582efa816ac194d4b53df",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-unknown-linux-gnu.tar.xz": "4c95739e6f0f1d4defd937f6d60360b566e051dfb2fa71879d0f9751392f3709",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-unknown-linux-gnux32.tar.gz": "560e938f5dfed51bb9b99614cd3dd1a01a035670cb76760ccd96922a7a5bf53f",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-unknown-linux-gnux32.tar.xz": "0166c4a3df3e2d86f448daa46076e3201999f1be1cc54031b28e6e00445ead1b",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-unknown-linux-musl.tar.gz": "9eb5cd25b488bfae389fe205d447199127cbf87b00974e7f850e7f1df19d4485",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-unknown-linux-musl.tar.xz": "29d98c004629ab536bd2461f715f26ddce3b0f0212db85939c7bc66a8d234b2d",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-unknown-netbsd.tar.gz": "5b94230d2d7e6cb0c08fc1ac4604a35b5c57c7998bb5df4121bef54c340a28f1",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-unknown-netbsd.tar.xz": "9875615c721f6b5fa06f370bf8c1f44a2ec2890472a40a5b26206cac3185d35f",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-unknown-none.tar.gz": "067beff146e3426076d2426da98f6d8ff8ccb1aefb5afe96f816a786580304e7",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-unknown-none.tar.xz": "8947fd736a2204bacd7d5c6efa9f4210a45e279ed49a72a773dad34fcc7da0d3",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-unknown-redox.tar.gz": "cacdece6813823fdbb9967178c08ec8ceadb0ef41dd9913908437e5eedc77fcf",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-unknown-redox.tar.xz": "8bf860514dcc4d0f0d7f5c35f48d4aa6c5c9733af13fb9387f19d456c62b6708",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-unknown-uefi.tar.gz": "02c61dec4851d522d8aa754681cf37db81f893e5a2c40566975ee99139e9f748",
+ "dist/2023-04-20/rust-std-1.69.0-x86_64-unknown-uefi.tar.xz": "631f840671a475824cd60206eccbd5ed743289b08b39f8d6f06c259a0bcea128",
+ "dist/2023-04-20/rustc-1.69.0-aarch64-apple-darwin.tar.gz": "aaecbc9591591b42f02befedb5c4a04c8faeecfacbaffb5c9ee4ad1f77b0a3ed",
+ "dist/2023-04-20/rustc-1.69.0-aarch64-apple-darwin.tar.xz": "4cabd9d45777ca0d5cf88e32391940f7983083727f136421385fdb60d4d00d6f",
+ "dist/2023-04-20/rustc-1.69.0-aarch64-pc-windows-msvc.tar.gz": "d771891b523628d0eb46f40b4b612cc7772cfc81c90f07d8a716c143a51d3f4d",
+ "dist/2023-04-20/rustc-1.69.0-aarch64-pc-windows-msvc.tar.xz": "71b56ad71e36350a9d43b1cdf607d488228fa53ac5ccf04e64f6661a0998b2c4",
+ "dist/2023-04-20/rustc-1.69.0-aarch64-unknown-linux-gnu.tar.gz": "b240a2d8182adb0aa2978ba8ec4812014b3a93a1aa36f10ca2ef3b1f4d80a47f",
+ "dist/2023-04-20/rustc-1.69.0-aarch64-unknown-linux-gnu.tar.xz": "d14166fa0b2832dd87b3f35c08d0bc829f83aa01a539b45df3d923469ee119be",
+ "dist/2023-04-20/rustc-1.69.0-aarch64-unknown-linux-musl.tar.gz": "192e9ab084f82aa71acf40cab6a39752d8e301c78ce2303f773fee082e146765",
+ "dist/2023-04-20/rustc-1.69.0-aarch64-unknown-linux-musl.tar.xz": "c8d318691ed0acfd7fc70713fc14b3ca34693cac68f1b593ea12e15cd3f175fb",
+ "dist/2023-04-20/rustc-1.69.0-arm-unknown-linux-gnueabi.tar.gz": "6a32cf470f510d0882f9570524a5616db644df6a409c99922d5c74ed8bb8a42d",
+ "dist/2023-04-20/rustc-1.69.0-arm-unknown-linux-gnueabi.tar.xz": "c6f226dd756836c88977591acadb4c41c4eb6e5e58aaa2042cfb9538e24d2732",
+ "dist/2023-04-20/rustc-1.69.0-arm-unknown-linux-gnueabihf.tar.gz": "e5e5fa9ad8e7e5d455f515c8308e870b9916ad836f0ceb36897a16c3bab24924",
+ "dist/2023-04-20/rustc-1.69.0-arm-unknown-linux-gnueabihf.tar.xz": "213183f703239b2fb489837be29e0f9b27681ae25ac9bd2f2e7960f133aa432c",
+ "dist/2023-04-20/rustc-1.69.0-armv7-unknown-linux-gnueabihf.tar.gz": "53dc923efcfc8c6dffe8a635d24668d0146732228deeb6a560e1e3baedfa4fd8",
+ "dist/2023-04-20/rustc-1.69.0-armv7-unknown-linux-gnueabihf.tar.xz": "257e049d561c71f1765bb72f1bfe98aa8407604dc85965ae178e110bc05948be",
+ "dist/2023-04-20/rustc-1.69.0-i686-pc-windows-gnu.tar.gz": "09f6d97a1abe649462ce1ddd0b0a88aab3423c59966b6032847d52b78291813f",
+ "dist/2023-04-20/rustc-1.69.0-i686-pc-windows-gnu.tar.xz": "1c4435d12c94c26ec46371e767f975d666b50b5e746f0b8218ee07bbb1cfebf0",
+ "dist/2023-04-20/rustc-1.69.0-i686-pc-windows-msvc.tar.gz": "3125fe8d3787a714988fe4b42a2b708a58d176c355130df5ae87d8844ca82940",
+ "dist/2023-04-20/rustc-1.69.0-i686-pc-windows-msvc.tar.xz": "48f63fef58e5c71b71156e3eec7de761df330fbbbe857ad2d6c1e5cea1d39d07",
+ "dist/2023-04-20/rustc-1.69.0-i686-unknown-linux-gnu.tar.gz": "aca43ad204a3ced049e389f4187afbe14015acdd803bdf6269b0a6a45ab9ffa8",
+ "dist/2023-04-20/rustc-1.69.0-i686-unknown-linux-gnu.tar.xz": "788ff61555857680d5ea7cf76023f74a7f4820dd1c26abf76536561d4de3cbe6",
+ "dist/2023-04-20/rustc-1.69.0-mips-unknown-linux-gnu.tar.gz": "59905d9f35bce7b60cda2c63ea9190cc25b6b2f57685bc31d76ef84a1dce9106",
+ "dist/2023-04-20/rustc-1.69.0-mips-unknown-linux-gnu.tar.xz": "ef17b364df355f7322c1fdd3b4cc4296d2d5d489b58fdd4b12374a1f2975a455",
+ "dist/2023-04-20/rustc-1.69.0-mips64-unknown-linux-gnuabi64.tar.gz": "b8845364e72c5e4d443964e93a696bec03c4a4a4f678afbde919348c8f36a517",
+ "dist/2023-04-20/rustc-1.69.0-mips64-unknown-linux-gnuabi64.tar.xz": "f9823eb52b271c7ac9a3558e0ce46af097129df9c93cd465969ffd1f8134fb56",
+ "dist/2023-04-20/rustc-1.69.0-mips64el-unknown-linux-gnuabi64.tar.gz": "e96f6b9f06cffc4355699e68e53cb7aa3efaed9f51591432cc5abd51dfa2fa04",
+ "dist/2023-04-20/rustc-1.69.0-mips64el-unknown-linux-gnuabi64.tar.xz": "d2087d9a0dbca57a56873ed93b7b09a113c2a46e4e3510d58bc5b1a84abe2bd9",
+ "dist/2023-04-20/rustc-1.69.0-mipsel-unknown-linux-gnu.tar.gz": "171f6b39356f311d595a87bae356f7f6795dccc7c3f3909b50d23032fa610f4b",
+ "dist/2023-04-20/rustc-1.69.0-mipsel-unknown-linux-gnu.tar.xz": "9df4110f716a309323a776952fcf0dccaae878e7abf0d148c14a2a03c5170850",
+ "dist/2023-04-20/rustc-1.69.0-powerpc-unknown-linux-gnu.tar.gz": "9c392fadcf728fac7bee91434989980b7093a1f210a9815cf90bc76829bb9085",
+ "dist/2023-04-20/rustc-1.69.0-powerpc-unknown-linux-gnu.tar.xz": "716e790b7658206c59dc8019c1b603f230c5ce945229463367fade862cea60b9",
+ "dist/2023-04-20/rustc-1.69.0-powerpc64-unknown-linux-gnu.tar.gz": "31bd957c3c2af76d883f9ba60b71bbb508b8d59c317d703d2554186ab833ce43",
+ "dist/2023-04-20/rustc-1.69.0-powerpc64-unknown-linux-gnu.tar.xz": "313d0944a1fc41c1c5ce0a054b3cebccd6f471ce8e8272e5e68f36b8d08e5bb1",
+ "dist/2023-04-20/rustc-1.69.0-powerpc64le-unknown-linux-gnu.tar.gz": "e72b2b5729fc8f3dc5f6786d81e601fc6b367055c28a49f82fea842d4b27dbeb",
+ "dist/2023-04-20/rustc-1.69.0-powerpc64le-unknown-linux-gnu.tar.xz": "1781b5be30fa4fadf7608cc9cd3a2de93b3671a09adfa98ae4d975f789ba2275",
+ "dist/2023-04-20/rustc-1.69.0-riscv64gc-unknown-linux-gnu.tar.gz": "72e10b38b490c6c1ca970fe215ccb2b8794878e2e8165db3f3efe02c51bcf6df",
+ "dist/2023-04-20/rustc-1.69.0-riscv64gc-unknown-linux-gnu.tar.xz": "03606f1ad3fd196e73e0d255ab6342e4e8fef2edaa1afcc577411aa8a68f0133",
+ "dist/2023-04-20/rustc-1.69.0-s390x-unknown-linux-gnu.tar.gz": "21517868b8991366b07965d7daecd101f8e1ca8fb137f0ab90e4807bbe888263",
+ "dist/2023-04-20/rustc-1.69.0-s390x-unknown-linux-gnu.tar.xz": "3cef208c0761bc83d156a14a5a94821cc9421772f0f376ea54765a6d00842b21",
+ "dist/2023-04-20/rustc-1.69.0-x86_64-apple-darwin.tar.gz": "7b337037b7b7b2ec71cd369009cd94a32019466cdae56b6d6a8cfb74481a3de5",
+ "dist/2023-04-20/rustc-1.69.0-x86_64-apple-darwin.tar.xz": "63136b7163226f41ec5ff14b5344b3b5741db85a412c6b1162a1da457a76cd2d",
+ "dist/2023-04-20/rustc-1.69.0-x86_64-pc-windows-gnu.tar.gz": "8ee73321d9d4d9929aa034af513a8c8ca73fad7ee79a72fb590e728697a5f2f8",
+ "dist/2023-04-20/rustc-1.69.0-x86_64-pc-windows-gnu.tar.xz": "086563a9b8fd7d31677bc3b091b7d015910b44aac74c6ce2cfcd8da42da819f5",
+ "dist/2023-04-20/rustc-1.69.0-x86_64-pc-windows-msvc.tar.gz": "5b4baa74b0a078674ad9ce52725ac61343428b2f7de1dcee1a10041560f5d2d8",
+ "dist/2023-04-20/rustc-1.69.0-x86_64-pc-windows-msvc.tar.xz": "438dd475deccdc78c7580eb0098b2864e6a329ac87bda1559440a7100cbbc47a",
+ "dist/2023-04-20/rustc-1.69.0-x86_64-unknown-freebsd.tar.gz": "a8d03185ede0b020a6eaba16a077363c8524768a587b30899dcc497607698f75",
+ "dist/2023-04-20/rustc-1.69.0-x86_64-unknown-freebsd.tar.xz": "c608b776adcb0a04edeb43a898e0803c4d4e285294cd21e692017531e51932f9",
+ "dist/2023-04-20/rustc-1.69.0-x86_64-unknown-illumos.tar.gz": "f3dce28649299ea5d8ab234a82c52f6da5cc95aafdf622dc76f8aebfc3400c00",
+ "dist/2023-04-20/rustc-1.69.0-x86_64-unknown-illumos.tar.xz": "5cf5f7e486dce727e559cb1dbb9518b981ccba4c1b5c488979e19d76964eff3e",
+ "dist/2023-04-20/rustc-1.69.0-x86_64-unknown-linux-gnu.tar.gz": "edd616432207bee868bd585fdd7c7229699f8bc3ffdcfc90e5a7404aba0d30f4",
+ "dist/2023-04-20/rustc-1.69.0-x86_64-unknown-linux-gnu.tar.xz": "70e97ab5b9600328b977268fc92ca4aa53064e4e97468df35215d4396e509279",
+ "dist/2023-04-20/rustc-1.69.0-x86_64-unknown-linux-musl.tar.gz": "9a1fd85b9c69b98eb14a33d54e71d3f8708fd24ac09e4b54a3433c6a421a166d",
+ "dist/2023-04-20/rustc-1.69.0-x86_64-unknown-linux-musl.tar.xz": "7d9e64850a79292d2cda6f4464dfd67d2cbd22ac8c7f3e170291396ef8be8041",
+ "dist/2023-04-20/rustc-1.69.0-x86_64-unknown-netbsd.tar.gz": "6d9c4d3682627539fdbe1349c5f5c92606d47a222c02d701425c0dec0b7afceb",
+ "dist/2023-04-20/rustc-1.69.0-x86_64-unknown-netbsd.tar.xz": "b7fcfbef784ad51286eae4eed5b70678b7ae9c8e52e56f31fd2ce03beeb445de"
}
}
diff --git a/src/tools/bump-stage0/src/main.rs b/src/tools/bump-stage0/src/main.rs
index f530a4d73..b007f9a22 100644
--- a/src/tools/bump-stage0/src/main.rs
+++ b/src/tools/bump-stage0/src/main.rs
@@ -2,7 +2,6 @@ use anyhow::{Context, Error};
use curl::easy::Easy;
use indexmap::IndexMap;
use std::collections::HashMap;
-use std::convert::TryInto;
const PATH: &str = "src/stage0.json";
const COMPILER_COMPONENTS: &[&str] = &["rustc", "rust-std", "cargo"];
diff --git a/src/tools/clippy/.cargo/config.toml b/src/tools/clippy/.cargo/config.toml
index f3dd9275a..4d80d3ce6 100644
--- a/src/tools/clippy/.cargo/config.toml
+++ b/src/tools/clippy/.cargo/config.toml
@@ -11,3 +11,6 @@ target-dir = "target"
[unstable]
binary-dep-depinfo = true
+
+[profile.dev]
+split-debuginfo = "unpacked"
diff --git a/src/tools/clippy/.editorconfig b/src/tools/clippy/.editorconfig
index ec6e107d5..bc7642bf8 100644
--- a/src/tools/clippy/.editorconfig
+++ b/src/tools/clippy/.editorconfig
@@ -11,6 +11,7 @@ trim_trailing_whitespace = true
insert_final_newline = true
indent_style = space
indent_size = 4
+max_line_length = 120
[*.md]
# double whitespace at end of line
diff --git a/src/tools/clippy/.github/workflows/clippy_bors.yml b/src/tools/clippy/.github/workflows/clippy_bors.yml
index 24e677ce8..93198aabd 100644
--- a/src/tools/clippy/.github/workflows/clippy_bors.yml
+++ b/src/tools/clippy/.github/workflows/clippy_bors.yml
@@ -180,6 +180,8 @@ jobs:
# Run
- name: Build Integration Test
+ env:
+ CARGO_PROFILE_DEV_SPLIT_DEBUGINFO: off
run: cargo test --test integration --features integration --no-run
# Upload
diff --git a/src/tools/clippy/.github/workflows/remark.yml b/src/tools/clippy/.github/workflows/remark.yml
index 81ef072bb..116058b7c 100644
--- a/src/tools/clippy/.github/workflows/remark.yml
+++ b/src/tools/clippy/.github/workflows/remark.yml
@@ -29,7 +29,7 @@ jobs:
- name: Install mdbook
run: |
mkdir mdbook
- curl -Lf https://github.com/rust-lang/mdBook/releases/download/v0.4.18/mdbook-v0.4.18-x86_64-unknown-linux-gnu.tar.gz | tar -xz --directory=./mdbook
+ curl -Lf https://github.com/rust-lang/mdBook/releases/download/v0.4.28/mdbook-v0.4.28-x86_64-unknown-linux-gnu.tar.gz | tar -xz --directory=./mdbook
echo `pwd`/mdbook >> $GITHUB_PATH
# Run
diff --git a/src/tools/clippy/CHANGELOG.md b/src/tools/clippy/CHANGELOG.md
index 765826ed8..559b560dd 100644
--- a/src/tools/clippy/CHANGELOG.md
+++ b/src/tools/clippy/CHANGELOG.md
@@ -6,11 +6,158 @@ document.
## Unreleased / Beta / In Rust Nightly
-[d822110d...master](https://github.com/rust-lang/rust-clippy/compare/d822110d...master)
+[7f27e2e7...master](https://github.com/rust-lang/rust-clippy/compare/7f27e2e7...master)
+
+## Rust 1.68
+
+Current stable, released 2023-03-09
+
+[d822110d...7f27e2e7](https://github.com/rust-lang/rust-clippy/compare/d822110d...7f27e2e7)
+
+### New Lints
+
+* [`permissions_set_readonly_false`]
+ [#10063](https://github.com/rust-lang/rust-clippy/pull/10063)
+* [`almost_complete_range`]
+ [#10043](https://github.com/rust-lang/rust-clippy/pull/10043)
+* [`size_of_ref`]
+ [#10098](https://github.com/rust-lang/rust-clippy/pull/10098)
+* [`semicolon_outside_block`]
+ [#9826](https://github.com/rust-lang/rust-clippy/pull/9826)
+* [`semicolon_inside_block`]
+ [#9826](https://github.com/rust-lang/rust-clippy/pull/9826)
+* [`transmute_null_to_fn`]
+ [#10099](https://github.com/rust-lang/rust-clippy/pull/10099)
+* [`fn_null_check`]
+ [#10099](https://github.com/rust-lang/rust-clippy/pull/10099)
+
+### Moves and Deprecations
+
+* Moved [`manual_clamp`] to `nursery` (Now allow-by-default)
+ [#10101](https://github.com/rust-lang/rust-clippy/pull/10101)
+* Moved [`mutex_atomic`] to `restriction`
+ [#10115](https://github.com/rust-lang/rust-clippy/pull/10115)
+* Renamed `derive_hash_xor_eq` to [`derived_hash_with_manual_eq`]
+ [#10184](https://github.com/rust-lang/rust-clippy/pull/10184)
+
+### Enhancements
+
+* [`collapsible_str_replace`]: Now takes MSRV into consideration. The minimal version is 1.58
+ [#10047](https://github.com/rust-lang/rust-clippy/pull/10047)
+* [`unused_self`]: No longer lints, if the method body contains a `todo!()` call
+ [#10166](https://github.com/rust-lang/rust-clippy/pull/10166)
+* [`derivable_impls`]: Now suggests deriving `Default` for enums with default unit variants
+ [#10161](https://github.com/rust-lang/rust-clippy/pull/10161)
+* [`arithmetic_side_effects`]: Added two new config values
+ `arithmetic-side-effects-allowed-binary` and `arithmetic-side-effects-allowed-unary`
+ to allow operation on user types
+ [#9840](https://github.com/rust-lang/rust-clippy/pull/9840)
+* [`large_const_arrays`], [`large_stack_arrays`]: avoid integer overflow when calculating
+ total array size
+ [#10103](https://github.com/rust-lang/rust-clippy/pull/10103)
+* [`indexing_slicing`]: add new config `suppress-restriction-lint-in-const` to enable
+ restriction lints, even if the suggestion might not be applicable
+ [#9920](https://github.com/rust-lang/rust-clippy/pull/9920)
+* [`needless_borrow`], [`redundant_clone`]: Now track references better and detect more cases
+ [#9701](https://github.com/rust-lang/rust-clippy/pull/9701)
+* [`derived_hash_with_manual_eq`]: Now allows `#[derive(PartialEq)]` with custom `Hash`
+ implementations
+ [#10184](https://github.com/rust-lang/rust-clippy/pull/10184)
+* [`manual_is_ascii_check`]: Now detects ranges with `.contains()` calls
+ [#10053](https://github.com/rust-lang/rust-clippy/pull/10053)
+* [`transmuting_null`]: Now detects `const` pointers to all types
+ [#10099](https://github.com/rust-lang/rust-clippy/pull/10099)
+* [`needless_return`]: Now detects more cases for returns of owned values
+ [#10110](https://github.com/rust-lang/rust-clippy/pull/10110)
+
+### False Positive Fixes
+
+* [`field_reassign_with_default`]: No longer lints cases, where values are initializes from
+ closures capturing struct values
+ [#10143](https://github.com/rust-lang/rust-clippy/pull/10143)
+* [`seek_to_start_instead_of_rewind`]: No longer lints, if the return of `seek` is used.
+ [#10096](https://github.com/rust-lang/rust-clippy/pull/10096)
+* [`manual_filter`]: Now ignores if expressions where the else branch has side effects or
+ doesn't return `None`
+ [#10091](https://github.com/rust-lang/rust-clippy/pull/10091)
+* [`implicit_clone`]: No longer lints if the type doesn't implement clone
+ [#10022](https://github.com/rust-lang/rust-clippy/pull/10022)
+* [`match_wildcard_for_single_variants`]: No longer lints on wildcards with a guard
+ [#10056](https://github.com/rust-lang/rust-clippy/pull/10056)
+* [`drop_ref`]: No longer lints idiomatic expression in `match` arms
+ [#10142](https://github.com/rust-lang/rust-clippy/pull/10142)
+* [`arithmetic_side_effects`]: No longer lints on corner cases with negative number literals
+ [#9867](https://github.com/rust-lang/rust-clippy/pull/9867)
+* [`string_lit_as_bytes`]: No longer lints in scrutinies of `match` statements
+ [#10012](https://github.com/rust-lang/rust-clippy/pull/10012)
+* [`manual_assert`]: No longer lints in `else if` statements
+ [#10013](https://github.com/rust-lang/rust-clippy/pull/10013)
+* [`needless_return`]: don't lint when using `do yeet`
+ [#10109](https://github.com/rust-lang/rust-clippy/pull/10109)
+* All lints: No longer lint in enum discriminant values when the suggestion won't work in a
+ const context
+ [#10008](https://github.com/rust-lang/rust-clippy/pull/10008)
+* [`single_element_loop`]: No longer lints, if the loop contains a `break` or `continue`
+ [#10162](https://github.com/rust-lang/rust-clippy/pull/10162)
+* [`uninlined_format_args`]: No longer suggests inlining arguments in `assert!` and
+ `debug_assert!` macros before 2021 edition
+ [#10055](https://github.com/rust-lang/rust-clippy/pull/10055)
+* [`explicit_counter_loop`]: No longer ignores counter changes after `continue` expressions
+ [#10094](https://github.com/rust-lang/rust-clippy/pull/10094)
+* [`from_over_into`]: No longer lints on opaque types
+ [#9982](https://github.com/rust-lang/rust-clippy/pull/9982)
+* [`expl_impl_clone_on_copy`]: No longer lints on `#[repr(packed)]` structs with generic
+ parameters
+ [#10189](https://github.com/rust-lang/rust-clippy/pull/10189)
+
+### Suggestion Fixes/Improvements
+
+* [`zero_ptr`]: Now suggests `core::` paths for `no_std` crates
+ [#10023](https://github.com/rust-lang/rust-clippy/pull/10023)
+* [`useless_conversion`]: Now suggests removing calls to `into_iter()` on an expression
+ implementing `Iterator`
+ [#10020](https://github.com/rust-lang/rust-clippy/pull/10020)
+* [`box_default`]: The suggestion now uses short paths
+ [#10153](https://github.com/rust-lang/rust-clippy/pull/10153)
+* [`default_trait_access`], [`clone_on_copy`]: The suggestion now uses short paths
+ [#10160](https://github.com/rust-lang/rust-clippy/pull/10160)
+* [`comparison_to_empty`]: The suggestion now removes unused deref operations
+ [#9962](https://github.com/rust-lang/rust-clippy/pull/9962)
+* [`manual_let_else`]: Suggestions for or-patterns now include required brackets.
+ [#9966](https://github.com/rust-lang/rust-clippy/pull/9966)
+* [`match_single_binding`]: suggestion no longer introduces unneeded semicolons
+ [#10060](https://github.com/rust-lang/rust-clippy/pull/10060)
+* [`case_sensitive_file_extension_comparisons`]: Now displays a suggestion with `Path`
+ [#10107](https://github.com/rust-lang/rust-clippy/pull/10107)
+* [`empty_structs_with_brackets`]: The suggestion is no longer machine applicable, to avoid
+ errors when accessing struct fields
+ [#10141](https://github.com/rust-lang/rust-clippy/pull/10141)
+* [`identity_op`]: Removes borrows in the suggestion when needed
+ [#10004](https://github.com/rust-lang/rust-clippy/pull/10004)
+* [`suboptimal_flops`]: The suggestion now includes parentheses when required
+ [#10113](https://github.com/rust-lang/rust-clippy/pull/10113)
+* [`iter_kv_map`]: Now handles `mut` and reference annotations in the suggestion
+ [#10159](https://github.com/rust-lang/rust-clippy/pull/10159)
+* [`redundant_static_lifetimes`]: The suggestion no longer removes `mut` from references
+ [#10006](https://github.com/rust-lang/rust-clippy/pull/10006)
+
+### ICE Fixes
+
+* [`new_ret_no_self`]: Now avoids a stack overflow for `impl Trait` types
+ [#10086](https://github.com/rust-lang/rust-clippy/pull/10086)
+* [`unnecessary_to_owned`]: Now handles compiler generated notes better
+ [#10027](https://github.com/rust-lang/rust-clippy/pull/10027)
+
+### Others
+
+* `SYSROOT` and `--sysroot` can now be set at the same time
+ [#10149](https://github.com/rust-lang/rust-clippy/pull/10149)
+* Fix error when providing an `array-size-threshold` in `clippy.toml`
+ [#10423](https://github.com/rust-lang/rust-clippy/pull/10423)
## Rust 1.67
-Current stable, released 2023-01-26
+Released 2023-01-26
[4f142aa1...d822110d](https://github.com/rust-lang/rust-clippy/compare/4f142aa1...d822110d)
@@ -41,8 +188,6 @@ Current stable, released 2023-01-26
### Moves and Deprecations
-* Moved [`uninlined_format_args`] to `style` (Now warn-by-default)
- [#9865](https://github.com/rust-lang/rust-clippy/pull/9865)
* Moved [`needless_collect`] to `nursery` (Now allow-by-default)
[#9705](https://github.com/rust-lang/rust-clippy/pull/9705)
* Moved [`or_fun_call`] to `nursery` (Now allow-by-default)
@@ -278,7 +423,7 @@ Released 2022-12-15
[#9490](https://github.com/rust-lang/rust-clippy/pull/9490)
* [`almost_complete_letter_range`]: No longer lints in external macros
[#9467](https://github.com/rust-lang/rust-clippy/pull/9467)
-* [`drop_copy`]: No longer lints on idiomatic cases in match arms
+* [`drop_copy`]: No longer lints on idiomatic cases in match arms
[#9491](https://github.com/rust-lang/rust-clippy/pull/9491)
* [`question_mark`]: No longer lints in const context
[#9487](https://github.com/rust-lang/rust-clippy/pull/9487)
@@ -4237,6 +4382,7 @@ Released 2018-09-13
<!-- begin autogenerated links to lint list -->
[`absurd_extreme_comparisons`]: https://rust-lang.github.io/rust-clippy/master/index.html#absurd_extreme_comparisons
[`alloc_instead_of_core`]: https://rust-lang.github.io/rust-clippy/master/index.html#alloc_instead_of_core
+[`allow_attributes`]: https://rust-lang.github.io/rust-clippy/master/index.html#allow_attributes
[`allow_attributes_without_reason`]: https://rust-lang.github.io/rust-clippy/master/index.html#allow_attributes_without_reason
[`almost_complete_letter_range`]: https://rust-lang.github.io/rust-clippy/master/index.html#almost_complete_letter_range
[`almost_complete_range`]: https://rust-lang.github.io/rust-clippy/master/index.html#almost_complete_range
@@ -4295,6 +4441,7 @@ Released 2018-09-13
[`chars_last_cmp`]: https://rust-lang.github.io/rust-clippy/master/index.html#chars_last_cmp
[`chars_next_cmp`]: https://rust-lang.github.io/rust-clippy/master/index.html#chars_next_cmp
[`checked_conversions`]: https://rust-lang.github.io/rust-clippy/master/index.html#checked_conversions
+[`clear_with_drain`]: https://rust-lang.github.io/rust-clippy/master/index.html#clear_with_drain
[`clone_double_ref`]: https://rust-lang.github.io/rust-clippy/master/index.html#clone_double_ref
[`clone_on_copy`]: https://rust-lang.github.io/rust-clippy/master/index.html#clone_on_copy
[`clone_on_ref_ptr`]: https://rust-lang.github.io/rust-clippy/master/index.html#clone_on_ref_ptr
@@ -4307,6 +4454,7 @@ Released 2018-09-13
[`collapsible_if`]: https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_if
[`collapsible_match`]: https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_match
[`collapsible_str_replace`]: https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_str_replace
+[`collection_is_never_read`]: https://rust-lang.github.io/rust-clippy/master/index.html#collection_is_never_read
[`comparison_chain`]: https://rust-lang.github.io/rust-clippy/master/index.html#comparison_chain
[`comparison_to_empty`]: https://rust-lang.github.io/rust-clippy/master/index.html#comparison_to_empty
[`const_static_lifetime`]: https://rust-lang.github.io/rust-clippy/master/index.html#const_static_lifetime
@@ -4485,6 +4633,7 @@ Released 2018-09-13
[`large_const_arrays`]: https://rust-lang.github.io/rust-clippy/master/index.html#large_const_arrays
[`large_digit_groups`]: https://rust-lang.github.io/rust-clippy/master/index.html#large_digit_groups
[`large_enum_variant`]: https://rust-lang.github.io/rust-clippy/master/index.html#large_enum_variant
+[`large_futures`]: https://rust-lang.github.io/rust-clippy/master/index.html#large_futures
[`large_include_file`]: https://rust-lang.github.io/rust-clippy/master/index.html#large_include_file
[`large_stack_arrays`]: https://rust-lang.github.io/rust-clippy/master/index.html#large_stack_arrays
[`large_types_passed_by_value`]: https://rust-lang.github.io/rust-clippy/master/index.html#large_types_passed_by_value
@@ -4497,6 +4646,8 @@ Released 2018-09-13
[`let_underscore_must_use`]: https://rust-lang.github.io/rust-clippy/master/index.html#let_underscore_must_use
[`let_underscore_untyped`]: https://rust-lang.github.io/rust-clippy/master/index.html#let_underscore_untyped
[`let_unit_value`]: https://rust-lang.github.io/rust-clippy/master/index.html#let_unit_value
+[`let_with_type_underscore`]: https://rust-lang.github.io/rust-clippy/master/index.html#let_with_type_underscore
+[`lines_filter_map_ok`]: https://rust-lang.github.io/rust-clippy/master/index.html#lines_filter_map_ok
[`linkedlist`]: https://rust-lang.github.io/rust-clippy/master/index.html#linkedlist
[`logic_bug`]: https://rust-lang.github.io/rust-clippy/master/index.html#logic_bug
[`lossy_float_literal`]: https://rust-lang.github.io/rust-clippy/master/index.html#lossy_float_literal
@@ -4514,6 +4665,7 @@ Released 2018-09-13
[`manual_instant_elapsed`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_instant_elapsed
[`manual_is_ascii_check`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_is_ascii_check
[`manual_let_else`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_let_else
+[`manual_main_separator_str`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_main_separator_str
[`manual_map`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_map
[`manual_memcpy`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_memcpy
[`manual_non_exhaustive`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_non_exhaustive
@@ -4522,6 +4674,7 @@ Released 2018-09-13
[`manual_rem_euclid`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_rem_euclid
[`manual_retain`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_retain
[`manual_saturating_arithmetic`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_saturating_arithmetic
+[`manual_slice_size_calculation`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_slice_size_calculation
[`manual_split_once`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_split_once
[`manual_str_repeat`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_str_repeat
[`manual_string_new`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_string_new
@@ -4560,6 +4713,7 @@ Released 2018-09-13
[`mismatching_type_param_order`]: https://rust-lang.github.io/rust-clippy/master/index.html#mismatching_type_param_order
[`misnamed_getters`]: https://rust-lang.github.io/rust-clippy/master/index.html#misnamed_getters
[`misrefactored_assign_op`]: https://rust-lang.github.io/rust-clippy/master/index.html#misrefactored_assign_op
+[`missing_assert_message`]: https://rust-lang.github.io/rust-clippy/master/index.html#missing_assert_message
[`missing_const_for_fn`]: https://rust-lang.github.io/rust-clippy/master/index.html#missing_const_for_fn
[`missing_docs_in_private_items`]: https://rust-lang.github.io/rust-clippy/master/index.html#missing_docs_in_private_items
[`missing_enforced_import_renames`]: https://rust-lang.github.io/rust-clippy/master/index.html#missing_enforced_import_renames
@@ -4689,6 +4843,7 @@ Released 2018-09-13
[`read_zero_byte_vec`]: https://rust-lang.github.io/rust-clippy/master/index.html#read_zero_byte_vec
[`recursive_format_impl`]: https://rust-lang.github.io/rust-clippy/master/index.html#recursive_format_impl
[`redundant_allocation`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_allocation
+[`redundant_async_block`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_async_block
[`redundant_clone`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_clone
[`redundant_closure`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure
[`redundant_closure_call`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure_call
@@ -4770,6 +4925,7 @@ Released 2018-09-13
[`suspicious_arithmetic_impl`]: https://rust-lang.github.io/rust-clippy/master/index.html#suspicious_arithmetic_impl
[`suspicious_assignment_formatting`]: https://rust-lang.github.io/rust-clippy/master/index.html#suspicious_assignment_formatting
[`suspicious_command_arg_space`]: https://rust-lang.github.io/rust-clippy/master/index.html#suspicious_command_arg_space
+[`suspicious_doc_comments`]: https://rust-lang.github.io/rust-clippy/master/index.html#suspicious_doc_comments
[`suspicious_else_formatting`]: https://rust-lang.github.io/rust-clippy/master/index.html#suspicious_else_formatting
[`suspicious_map`]: https://rust-lang.github.io/rust-clippy/master/index.html#suspicious_map
[`suspicious_op_assign_impl`]: https://rust-lang.github.io/rust-clippy/master/index.html#suspicious_op_assign_impl
@@ -4782,6 +4938,7 @@ Released 2018-09-13
[`tabs_in_doc_comments`]: https://rust-lang.github.io/rust-clippy/master/index.html#tabs_in_doc_comments
[`temporary_assignment`]: https://rust-lang.github.io/rust-clippy/master/index.html#temporary_assignment
[`temporary_cstring_as_ptr`]: https://rust-lang.github.io/rust-clippy/master/index.html#temporary_cstring_as_ptr
+[`tests_outside_test_module`]: https://rust-lang.github.io/rust-clippy/master/index.html#tests_outside_test_module
[`to_digit_is_some`]: https://rust-lang.github.io/rust-clippy/master/index.html#to_digit_is_some
[`to_string_in_display`]: https://rust-lang.github.io/rust-clippy/master/index.html#to_string_in_display
[`to_string_in_format_args`]: https://rust-lang.github.io/rust-clippy/master/index.html#to_string_in_format_args
@@ -4823,6 +4980,7 @@ Released 2018-09-13
[`unit_hash`]: https://rust-lang.github.io/rust-clippy/master/index.html#unit_hash
[`unit_return_expecting_ord`]: https://rust-lang.github.io/rust-clippy/master/index.html#unit_return_expecting_ord
[`unknown_clippy_lints`]: https://rust-lang.github.io/rust-clippy/master/index.html#unknown_clippy_lints
+[`unnecessary_box_returns`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_box_returns
[`unnecessary_cast`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_cast
[`unnecessary_filter_map`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_filter_map
[`unnecessary_find_map`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_find_map
@@ -4836,6 +4994,7 @@ Released 2018-09-13
[`unnecessary_safety_doc`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_safety_doc
[`unnecessary_self_imports`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_self_imports
[`unnecessary_sort_by`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_sort_by
+[`unnecessary_struct_initialization`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_struct_initialization
[`unnecessary_to_owned`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_to_owned
[`unnecessary_unwrap`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_unwrap
[`unnecessary_wraps`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_wraps
diff --git a/src/tools/clippy/CONTRIBUTING.md b/src/tools/clippy/CONTRIBUTING.md
index 3158080d2..3df132803 100644
--- a/src/tools/clippy/CONTRIBUTING.md
+++ b/src/tools/clippy/CONTRIBUTING.md
@@ -50,7 +50,7 @@ a [developer guide] and is a good place to start your journey.
All issues on Clippy are mentored, if you want help simply ask someone from the
Clippy team directly by mentioning them in the issue or over on [Zulip]. All
currently active team members can be found
-[here](https://github.com/rust-lang/highfive/blob/master/highfive/configs/rust-lang/rust-clippy.json#L3)
+[here](https://github.com/rust-lang/rust-clippy/blob/master/triagebot.toml#L18)
Some issues are easier than others. The [`good-first-issue`] label can be used to find the easy
issues. You can use `@rustbot claim` to assign the issue to yourself.
diff --git a/src/tools/clippy/COPYRIGHT b/src/tools/clippy/COPYRIGHT
index a6be75b5e..82703b18f 100644
--- a/src/tools/clippy/COPYRIGHT
+++ b/src/tools/clippy/COPYRIGHT
@@ -1,3 +1,5 @@
+// REUSE-IgnoreStart
+
Copyright 2014-2022 The Rust Project Developers
Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
@@ -5,3 +7,5 @@ http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
<LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
option. All files in the project carrying such notice may not be
copied, modified, or distributed except according to those terms.
+
+// REUSE-IgnoreEnd
diff --git a/src/tools/clippy/Cargo.toml b/src/tools/clippy/Cargo.toml
index 70d126809..c35dfcbd8 100644
--- a/src/tools/clippy/Cargo.toml
+++ b/src/tools/clippy/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "clippy"
-version = "0.1.69"
+version = "0.1.70"
description = "A bunch of helpful lints to avoid common pitfalls in Rust"
repository = "https://github.com/rust-lang/rust-clippy"
readme = "README.md"
diff --git a/src/tools/clippy/README.md b/src/tools/clippy/README.md
index 3e7379ace..85798e0e8 100644
--- a/src/tools/clippy/README.md
+++ b/src/tools/clippy/README.md
@@ -11,7 +11,7 @@ Lints are divided into categories, each with a default [lint level](https://doc.
You can choose how much Clippy is supposed to ~~annoy~~ help you by changing the lint level by category.
| Category | Description | Default level |
-| --------------------- | ----------------------------------------------------------------------------------- | ------------- |
+|-----------------------|-------------------------------------------------------------------------------------|---------------|
| `clippy::all` | all lints that are on by default (correctness, suspicious, style, complexity, perf) | **warn/deny** |
| `clippy::correctness` | code that is outright wrong or useless | **deny** |
| `clippy::suspicious` | code that is most likely wrong or useless | **warn** |
@@ -130,7 +130,7 @@ for example.
You can add Clippy to Travis CI in the same way you use it locally:
-```yml
+```yaml
language: rust
rust:
- stable
@@ -253,7 +253,7 @@ rust-version = "1.30"
The MSRV can also be specified as an attribute, like below.
-```rust
+```rust,ignore
#![feature(custom_inner_attributes)]
#![clippy::msrv = "1.30.0"]
@@ -275,6 +275,8 @@ If you want to contribute to Clippy, you can find more information in [CONTRIBUT
## License
+<!-- REUSE-IgnoreStart -->
+
Copyright 2014-2022 The Rust Project Developers
Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
@@ -282,3 +284,5 @@ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
<LICENSE-MIT or [https://opensource.org/licenses/MIT](https://opensource.org/licenses/MIT)>, at your
option. Files in the project may not be
copied, modified, or distributed except according to those terms.
+
+<!-- REUSE-IgnoreEnd -->
diff --git a/src/tools/clippy/book/src/README.md b/src/tools/clippy/book/src/README.md
index df4a1f270..3b6270962 100644
--- a/src/tools/clippy/book/src/README.md
+++ b/src/tools/clippy/book/src/README.md
@@ -14,7 +14,7 @@ much Clippy is supposed to ~~annoy~~ help you by changing the lint level by
category.
| Category | Description | Default level |
-| --------------------- | ----------------------------------------------------------------------------------- | ------------- |
+|-----------------------|-------------------------------------------------------------------------------------|---------------|
| `clippy::all` | all lints that are on by default (correctness, suspicious, style, complexity, perf) | **warn/deny** |
| `clippy::correctness` | code that is outright wrong or useless | **deny** |
| `clippy::suspicious` | code that is most likely wrong or useless | **warn** |
diff --git a/src/tools/clippy/book/src/SUMMARY.md b/src/tools/clippy/book/src/SUMMARY.md
index 0649f7a63..cbd73376d 100644
--- a/src/tools/clippy/book/src/SUMMARY.md
+++ b/src/tools/clippy/book/src/SUMMARY.md
@@ -13,6 +13,7 @@
- [Development](development/README.md)
- [Basics](development/basics.md)
- [Adding Lints](development/adding_lints.md)
+ - [Type Checking](development/type_checking.md)
- [Common Tools](development/common_tools_writing_lints.md)
- [Infrastructure](development/infrastructure/README.md)
- [Syncing changes between Clippy and rust-lang/rust](development/infrastructure/sync.md)
diff --git a/src/tools/clippy/book/src/configuration.md b/src/tools/clippy/book/src/configuration.md
index 87f4a697a..1304f6a8c 100644
--- a/src/tools/clippy/book/src/configuration.md
+++ b/src/tools/clippy/book/src/configuration.md
@@ -3,7 +3,7 @@
> **Note:** The configuration file is unstable and may be deprecated in the future.
Some lints can be configured in a TOML file named `clippy.toml` or `.clippy.toml`. It contains a
-basic `variable = value` mapping eg.
+basic `variable = value` mapping e.g.
```toml
avoid-breaking-exported-api = false
@@ -60,7 +60,7 @@ And to warn on `lint_name`, run
cargo clippy -- -W clippy::lint_name
```
-This also works with lint groups. For example you can run Clippy with warnings for all lints enabled:
+This also works with lint groups. For example, you can run Clippy with warnings for all lints enabled:
```terminal
cargo clippy -- -W clippy::pedantic
@@ -84,7 +84,7 @@ msrv = "1.30.0"
The MSRV can also be specified as an attribute, like below.
-```rust
+```rust,ignore
#![feature(custom_inner_attributes)]
#![clippy::msrv = "1.30.0"]
@@ -96,7 +96,28 @@ fn main() {
You can also omit the patch version when specifying the MSRV, so `msrv = 1.30`
is equivalent to `msrv = 1.30.0`.
-Note: `custom_inner_attributes` is an unstable feature so it has to be enabled explicitly.
+Note: `custom_inner_attributes` is an unstable feature, so it has to be enabled explicitly.
Lints that recognize this configuration option can be
found [here](https://rust-lang.github.io/rust-clippy/master/index.html#msrv)
+
+### Disabling evaluation of certain code
+
+> **Note:** This should only be used in cases where other solutions, like `#[allow(clippy::all)]`, are not sufficient.
+
+Very rarely, you may wish to prevent Clippy from evaluating certain sections of code entirely. You can do this with
+[conditional compilation](https://doc.rust-lang.org/reference/conditional-compilation.html) by checking that the
+`cargo-clippy` feature is not set. You may need to provide a stub so that the code compiles:
+
+```rust
+#[cfg(not(feature = "cargo-clippy"))]
+include!(concat!(env!("OUT_DIR"), "/my_big_function-generated.rs"));
+
+#[cfg(feature = "cargo-clippy")]
+fn my_big_function(_input: &str) -> Option<MyStruct> {
+ None
+}
+```
+
+This feature is not actually part of your crate, so specifying `--all-features` to other tools, e.g. `cargo test
+--all-features`, will not disable it.
diff --git a/src/tools/clippy/book/src/development/README.md b/src/tools/clippy/book/src/development/README.md
index 5cf7201cf..616e6d182 100644
--- a/src/tools/clippy/book/src/development/README.md
+++ b/src/tools/clippy/book/src/development/README.md
@@ -5,7 +5,7 @@ making Clippy better by contributing to it. In that case, welcome to the
project!
> _Note:_ If you're just interested in using Clippy, there's nothing to see from
-> this point onward and you should return to one of the earlier chapters.
+> this point onward, and you should return to one of the earlier chapters.
## Getting started
diff --git a/src/tools/clippy/book/src/development/adding_lints.md b/src/tools/clippy/book/src/development/adding_lints.md
index f57dc627d..9dacaaaae 100644
--- a/src/tools/clippy/book/src/development/adding_lints.md
+++ b/src/tools/clippy/book/src/development/adding_lints.md
@@ -18,6 +18,7 @@ because that's clearly a non-descriptive name.
- [Cargo lints](#cargo-lints)
- [Rustfix tests](#rustfix-tests)
- [Testing manually](#testing-manually)
+ - [Running directly](#running-directly)
- [Lint declaration](#lint-declaration)
- [Lint registration](#lint-registration)
- [Lint passes](#lint-passes)
@@ -186,6 +187,15 @@ cargo dev lint input.rs
from the working copy root. With tests in place, let's have a look at
implementing our lint now.
+## Running directly
+
+While it's easier to just use `cargo dev lint`, it might be desirable to get
+`target/release/cargo-clippy` and `target/release/clippy-driver` to work as well in some cases.
+By default, they don't work because clippy dynamically links rustc. To help them find rustc,
+add the path printed by`rustc --print target-libdir` (ran inside this workspace so that the rustc version matches)
+to your library search path.
+On linux, this can be done by setting the `LD_LIBRARY_PATH` environment variable to that path.
+
## Lint declaration
Let's start by opening the new file created in the `clippy_lints` crate at
@@ -265,7 +275,7 @@ When declaring a new lint by hand and `cargo dev update_lints` is used, the lint
pass may have to be registered manually in the `register_plugins` function in
`clippy_lints/src/lib.rs`:
-```rust
+```rust,ignore
store.register_early_pass(|| Box::new(foo_functions::FooFunctions));
```
@@ -291,7 +301,7 @@ either [`EarlyLintPass`][early_lint_pass] or [`LateLintPass`][late_lint_pass].
In short, the `LateLintPass` has access to type information while the
`EarlyLintPass` doesn't. If you don't need access to type information, use the
-`EarlyLintPass`. The `EarlyLintPass` is also faster. However linting speed
+`EarlyLintPass`. The `EarlyLintPass` is also faster. However, linting speed
hasn't really been a concern with Clippy so far.
Since we don't need type information for checking the function name, we used
@@ -308,7 +318,7 @@ implementation of the lint logic.
Let's start by implementing the `EarlyLintPass` for our `FooFunctions`:
-```rust
+```rust,ignore
impl EarlyLintPass for FooFunctions {
fn check_fn(&mut self, cx: &EarlyContext<'_>, fn_kind: FnKind<'_>, span: Span, _: NodeId) {
// TODO: Emit lint here
@@ -327,10 +337,10 @@ variety of lint emission functions. They can all be found in
[`clippy_utils/src/diagnostics.rs`][diagnostics].
`span_lint_and_help` seems most appropriate in this case. It allows us to
-provide an extra help message and we can't really suggest a better name
+provide an extra help message, and we can't really suggest a better name
automatically. This is how it looks:
-```rust
+```rust,ignore
impl EarlyLintPass for FooFunctions {
fn check_fn(&mut self, cx: &EarlyContext<'_>, fn_kind: FnKind<'_>, span: Span, _: NodeId) {
span_lint_and_help(
@@ -469,7 +479,7 @@ the value from `clippy.toml`. This can be accounted for using the
`extract_msrv_attr!(LintContext)` macro and passing
`LateContext`/`EarlyContext`.
-```rust
+```rust,ignore
impl<'tcx> LateLintPass<'tcx> for ManualStrip {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
...
@@ -483,7 +493,7 @@ the lint's test file, `tests/ui/manual_strip.rs` in this example. It should
have a case for the version below the MSRV and one with the same contents but
for the MSRV version itself.
-```rust
+```rust,ignore
...
#[clippy::msrv = "1.44"]
@@ -514,7 +524,7 @@ define_Conf! {
If you have trouble implementing your lint, there is also the internal `author`
lint to generate Clippy code that detects the offending pattern. It does not
-work for all of the Rust syntax, but can give a good starting point.
+work for all the Rust syntax, but can give a good starting point.
The quickest way to use it, is the [Rust playground:
play.rust-lang.org][author_example]. Put the code you want to lint into the
@@ -607,7 +617,7 @@ output in the `stdout` part.
## PR Checklist
-Before submitting your PR make sure you followed all of the basic requirements:
+Before submitting your PR make sure you followed all the basic requirements:
<!-- Sync this with `.github/PULL_REQUEST_TEMPLATE` -->
@@ -627,7 +637,7 @@ for some users. Adding a configuration is done in the following steps:
1. Adding a new configuration entry to [`clippy_lints::utils::conf`] like this:
- ```rust
+ ```rust,ignore
/// Lint: LINT_NAME.
///
/// <The configuration field doc comment>
@@ -680,7 +690,7 @@ for some users. Adding a configuration is done in the following steps:
configuration value is now cloned or copied into a local value that is then
passed to the impl struct like this:
- ```rust
+ ```rust,ignore
// Default generated registration:
store.register_*_pass(|| box module::StructName);
diff --git a/src/tools/clippy/book/src/development/basics.md b/src/tools/clippy/book/src/development/basics.md
index 6fb53236e..7615dc12f 100644
--- a/src/tools/clippy/book/src/development/basics.md
+++ b/src/tools/clippy/book/src/development/basics.md
@@ -4,8 +4,8 @@ This document explains the basics for hacking on Clippy. Besides others, this
includes how to build and test Clippy. For a more in depth description on the
codebase take a look at [Adding Lints] or [Common Tools].
-[Adding Lints]: https://github.com/rust-lang/rust-clippy/blob/master/book/src/development/adding_lints.md
-[Common Tools]: https://github.com/rust-lang/rust-clippy/blob/master/book/src/development/common_tools_writing_lints.md
+[Adding Lints]: adding_lints.md
+[Common Tools]: common_tools_writing_lints.md
- [Basics for hacking on Clippy](#basics-for-hacking-on-clippy)
- [Get the Code](#get-the-code)
@@ -125,7 +125,7 @@ We follow a rustc no merge-commit policy. See
## Common Abbreviations
| Abbreviation | Meaning |
-| ------------ | -------------------------------------- |
+|--------------|----------------------------------------|
| UB | Undefined Behavior |
| FP | False Positive |
| FN | False Negative |
diff --git a/src/tools/clippy/book/src/development/common_tools_writing_lints.md b/src/tools/clippy/book/src/development/common_tools_writing_lints.md
index f5aa06e4b..09171d86a 100644
--- a/src/tools/clippy/book/src/development/common_tools_writing_lints.md
+++ b/src/tools/clippy/book/src/development/common_tools_writing_lints.md
@@ -3,7 +3,7 @@
You may need following tooltips to catch up with common operations.
- [Common tools for writing lints](#common-tools-for-writing-lints)
- - [Retrieving the type of an expression](#retrieving-the-type-of-an-expression)
+ - [Retrieving the type of expression](#retrieving-the-type-of-expression)
- [Checking if an expr is calling a specific method](#checking-if-an-expr-is-calling-a-specific-method)
- [Checking for a specific type](#checking-for-a-specific-type)
- [Checking if a type implements a specific trait](#checking-if-a-type-implements-a-specific-trait)
@@ -16,7 +16,7 @@ Useful Rustc dev guide links:
- [Type checking](https://rustc-dev-guide.rust-lang.org/type-checking.html)
- [Ty module](https://rustc-dev-guide.rust-lang.org/ty.html)
-## Retrieving the type of an expression
+## Retrieving the type of expression
Sometimes you may want to retrieve the type `Ty` of an expression `Expr`, for
example to answer following questions:
@@ -45,7 +45,7 @@ impl LateLintPass<'_> for MyStructLint {
}
```
-Similarly in [`TypeckResults`][TypeckResults] methods, you have the
+Similarly, in [`TypeckResults`][TypeckResults] methods, you have the
[`pat_ty()`][pat_ty] method to retrieve a type from a pattern.
Two noticeable items here:
@@ -192,7 +192,7 @@ functions to deal with macros:
- `span.from_expansion()`: detects if a span is from macro expansion or
desugaring. Checking this is a common first step in a lint.
- ```rust
+ ```rust,ignore
if expr.span.from_expansion() {
// just forget it
return;
@@ -203,11 +203,11 @@ functions to deal with macros:
if so, which macro call expanded it. It is sometimes useful to check if the
context of two spans are equal.
- ```rust
+ ```rust,ignore
// expands to `1 + 0`, but don't lint
1 + mac!()
```
- ```rust
+ ```rust,ignore
if left.span.ctxt() != right.span.ctxt() {
// the coder most likely cannot modify this expression
return;
@@ -246,7 +246,7 @@ functions to deal with macros:
`macro_rules!` with `a == $b`, `$b` is expanded to some expression with a
different context from `a`.
- ```rust
+ ```rust,ignore
macro_rules! m {
($a:expr, $b:expr) => {
if $a.is_some() {
diff --git a/src/tools/clippy/book/src/development/infrastructure/book.md b/src/tools/clippy/book/src/development/infrastructure/book.md
index dbd624ecd..de5de4beb 100644
--- a/src/tools/clippy/book/src/development/infrastructure/book.md
+++ b/src/tools/clippy/book/src/development/infrastructure/book.md
@@ -13,7 +13,7 @@ guide to Clippy that you're reading right now. The Clippy book is formatted with
While not strictly necessary since the book source is simply Markdown text
files, having mdBook locally will allow you to build, test and serve the book
locally to view changes before you commit them to the repository. You likely
-already have `cargo` installed, so the easiest option is to simply:
+already have `cargo` installed, so the easiest option is to:
```shell
cargo install mdbook
@@ -26,7 +26,7 @@ instructions for other options.
The book's
[src](https://github.com/rust-lang/rust-clippy/tree/master/book/src)
-directory contains all of the markdown files used to generate the book. If you
+directory contains all the markdown files used to generate the book. If you
want to see your changes in real time, you can use the mdBook `serve` command to
run a web server locally that will automatically update changes as they are
made. From the top level of your `rust-clippy` directory:
diff --git a/src/tools/clippy/book/src/development/infrastructure/changelog_update.md b/src/tools/clippy/book/src/development/infrastructure/changelog_update.md
index d1ac7237b..df9b1bbe1 100644
--- a/src/tools/clippy/book/src/development/infrastructure/changelog_update.md
+++ b/src/tools/clippy/book/src/development/infrastructure/changelog_update.md
@@ -101,7 +101,7 @@ Look for the [`beta-accepted`] label and make sure to also include the PRs with
that label in the changelog. If you can, remove the `beta-accepted` labels
**after** the changelog PR was merged.
-> _Note:_ Some of those PRs might even got backported to the previous `beta`.
+> _Note:_ Some of those PRs might even get backported to the previous `beta`.
> Those have to be included in the changelog of the _previous_ release.
### 4. Update `clippy::version` attributes
diff --git a/src/tools/clippy/book/src/development/infrastructure/release.md b/src/tools/clippy/book/src/development/infrastructure/release.md
index 057228180..98fabf8e8 100644
--- a/src/tools/clippy/book/src/development/infrastructure/release.md
+++ b/src/tools/clippy/book/src/development/infrastructure/release.md
@@ -44,7 +44,7 @@ $ git push origin backport_remerge # This can be pushed to your fork
```
After this, open a PR to the master branch. In this PR, the commit hash of the
-`HEAD` of the `beta` branch must exists. In addition to that, no files should be
+`HEAD` of the `beta` branch must exist. In addition to that, no files should be
changed by this PR.
## Update the `beta` branch
diff --git a/src/tools/clippy/book/src/development/infrastructure/sync.md b/src/tools/clippy/book/src/development/infrastructure/sync.md
index 02cfc11b5..e1fe92f95 100644
--- a/src/tools/clippy/book/src/development/infrastructure/sync.md
+++ b/src/tools/clippy/book/src/development/infrastructure/sync.md
@@ -19,8 +19,7 @@ to beta. For reference, the first sync following this cadence was performed the
2020-08-27.
This process is described in detail in the following sections. For general
-information about `subtree`s in the Rust repository see [Rust's
-`CONTRIBUTING.md`][subtree].
+information about `subtree`s in the Rust repository see [the rustc-dev-guide][subtree].
## Patching git-subtree to work with big repos
@@ -47,7 +46,7 @@ sudo chown --reference=/usr/lib/git-core/git-subtree~ /usr/lib/git-core/git-subt
> _Note:_ If you are a Debian user, `dash` is the shell used by default for
> scripts instead of `sh`. This shell has a hardcoded recursion limit set to
-> 1000. In order to make this process work, you need to force the script to run
+> 1,000. In order to make this process work, you need to force the script to run
> `bash` instead. You can do this by editing the first line of the `git-subtree`
> script and changing `sh` to `bash`.
@@ -71,10 +70,10 @@ $ git remote add clippy-local /path/to/rust-clippy
## Performing the sync from [`rust-lang/rust`] to Clippy
-Here is a TL;DR version of the sync process (all of the following commands have
+Here is a TL;DR version of the sync process (all the following commands have
to be run inside the `rust` directory):
-1. Clone the [`rust-lang/rust`] repository or make sure it is up to date.
+1. Clone the [`rust-lang/rust`] repository or make sure it is up-to-date.
2. Checkout the commit from the latest available nightly. You can get it using
`rustup check`.
3. Sync the changes to the rust-copy of Clippy to your Clippy fork:
@@ -107,7 +106,7 @@ to be run inside the `rust` directory):
## Performing the sync from Clippy to [`rust-lang/rust`]
-All of the following commands have to be run inside the `rust` directory.
+All the following commands have to be run inside the `rust` directory.
1. Make sure you have checked out the latest `master` of `rust-lang/rust`.
2. Sync the `rust-lang/rust-clippy` master to the rust-copy of Clippy:
@@ -118,5 +117,5 @@ All of the following commands have to be run inside the `rust` directory.
3. Open a PR to [`rust-lang/rust`]
[gitgitgadget-pr]: https://github.com/gitgitgadget/git/pull/493
-[subtree]: https://rustc-dev-guide.rust-lang.org/contributing.html#external-dependencies-subtree
+[subtree]: https://rustc-dev-guide.rust-lang.org/external-repos.html#external-dependencies-subtree
[`rust-lang/rust`]: https://github.com/rust-lang/rust
diff --git a/src/tools/clippy/book/src/development/proposals/README.md b/src/tools/clippy/book/src/development/proposals/README.md
index 78fe34ebf..059c22ce1 100644
--- a/src/tools/clippy/book/src/development/proposals/README.md
+++ b/src/tools/clippy/book/src/development/proposals/README.md
@@ -6,6 +6,6 @@ or around Clippy in the long run.
Besides adding more and more lints and improve the lints that Clippy already
has, Clippy is also interested in making the experience of its users, developers
and maintainers better over time. Projects that address bigger picture things
-like this usually take more time and it is useful to have a proposal for those
+like this usually take more time, and it is useful to have a proposal for those
first. This is the place where such proposals are collected, so that we can
refer to them when working on them.
diff --git a/src/tools/clippy/book/src/development/proposals/roadmap-2021.md b/src/tools/clippy/book/src/development/proposals/roadmap-2021.md
index fe8b080f5..4406616bb 100644
--- a/src/tools/clippy/book/src/development/proposals/roadmap-2021.md
+++ b/src/tools/clippy/book/src/development/proposals/roadmap-2021.md
@@ -52,8 +52,8 @@ In the following, plans to improve the usability are covered.
#### No Output After `cargo check`
-Currently when `cargo clippy` is run after `cargo check`, it does not produce
-any output. This is especially problematic since `rust-analyzer` is on the rise
+Currently, when `cargo clippy` is run after `cargo check`, it does not produce
+any output. This is especially problematic since `rust-analyzer` is on the rise,
and it uses `cargo check` for checking code. A fix is already implemented, but
it still has to be pushed over the finish line. This also includes the
stabilization of the `cargo clippy --fix` command or the support of multi-span
@@ -221,7 +221,7 @@ regarding the user facing issues.
Rust's roadmap process was established by [RFC 1728] in 2016. Since then every
year a roadmap was published, that defined the bigger plans for the coming
-years. This years roadmap can be found [here][Rust Roadmap 2021].
+years. This year roadmap can be found [here][Rust Roadmap 2021].
[RFC 1728]: https://rust-lang.github.io/rfcs/1728-north-star.html
diff --git a/src/tools/clippy/book/src/development/proposals/syntax-tree-patterns.md b/src/tools/clippy/book/src/development/proposals/syntax-tree-patterns.md
index c5587c4bf..36d722609 100644
--- a/src/tools/clippy/book/src/development/proposals/syntax-tree-patterns.md
+++ b/src/tools/clippy/book/src/development/proposals/syntax-tree-patterns.md
@@ -16,7 +16,7 @@ lints. For non-trivial lints, it often requires nested pattern matching of AST /
HIR nodes. For example, testing that an expression is a boolean literal requires
the following checks:
-```rust
+```rust,ignore
if let ast::ExprKind::Lit(lit) = &expr.node {
if let ast::LitKind::Bool(_) = &lit.node {
...
@@ -28,7 +28,7 @@ Writing this kind of matching code quickly becomes a complex task and the
resulting code is often hard to comprehend. The code below shows a simplified
version of the pattern matching required by the `collapsible_if` lint:
-```rust
+```rust,ignore
// simplified version of the collapsible_if lint
if let ast::ExprKind::If(check, then, None) = &expr.node {
if then.stmts.len() == 1 {
@@ -68,13 +68,13 @@ The second part of the motivation is clippy's dependence on unstable
compiler-internal data structures. Clippy lints are currently written against
the compiler's AST / HIR which means that even small changes in these data
structures might break a lot of lints. The second goal of this RFC is to **make
-lints independant of the compiler's AST / HIR data structures**.
+lints independent of the compiler's AST / HIR data structures**.
# Approach
A lot of complexity in writing lints currently seems to come from having to
manually implement the matching logic (see code samples above). It's an
-imparative style that describes *how* to match a syntax tree node instead of
+imperative style that describes *how* to match a syntax tree node instead of
specifying *what* should be matched against declaratively. In other areas, it's
common to use declarative patterns to describe desired information and let the
implementation do the actual matching. A well-known example of this approach are
@@ -111,7 +111,7 @@ expressions that are boolean literals with value `false`.
The pattern can then be used to implement lints in the following way:
-```rust
+```rust,ignore
...
impl EarlyLintPass for MyAwesomeLint {
@@ -270,7 +270,7 @@ pattern!{
// matches if expressions that **may or may not** have an else block
// Attn: `If(_, _, _)` matches only ifs that **have** an else block
//
- // | if with else block | if witout else block
+ // | if with else block | if without else block
// If(_, _, _) | match | no match
// If(_, _, _?) | match | match
// If(_, _, ()) | no match | match
@@ -346,7 +346,7 @@ pattern!{
one could get references to the nodes that matched the subpatterns in the
following way:
-```rust
+```rust,ignore
...
fn check_expr(expr: &syntax::ast::Expr) {
if let Some(result) = my_pattern(expr) {
@@ -372,7 +372,7 @@ matches arrays that consist of any number of literal expressions. Because those
expressions are named `foo`, the result struct contains a `foo` attribute which
is a vector of expressions:
-```rust
+```rust,ignore
...
if let Some(result) = my_pattern_seq(expr) {
result.foo // type: Vec<&syntax::ast::Expr>
@@ -394,7 +394,7 @@ In the pattern above, the `bar` name is only defined if the pattern matches a
boolean literal. If it matches an integer literal, the name isn't set. To
account for this, the result struct's `bar` attribute is an option type:
-```rust
+```rust,ignore
...
if let Some(result) = my_pattern_alt(expr) {
result.bar // type: Option<&bool>
@@ -404,7 +404,7 @@ if let Some(result) = my_pattern_alt(expr) {
It's also possible to use a name in multiple alternation branches if they have
compatible types:
-```rust
+```rust,ignore
pattern!{
// matches if expression is a boolean or integer literal
my_pattern_mult: Expr =
@@ -519,7 +519,7 @@ The `Alt`, `Seq` and `Opt` structs look like these:
> Note: The current implementation can be found
> [here](https://github.com/fkohlgrueber/pattern-matching/blob/dfb3bc9fbab69cec7c91e72564a63ebaa2ede638/pattern-match/src/matchers.rs#L35-L60).
-```rust
+```rust,ignore
pub enum Alt<T> {
Any,
Elmt(Box<T>),
@@ -568,7 +568,7 @@ another example, `Array( Lit(_)* )` is a valid pattern because the parameter of
## The IsMatch Trait
-The pattern syntax and the *PatternTree* are independant of specific syntax tree
+The pattern syntax and the *PatternTree* are independent of specific syntax tree
implementations (rust ast / hir, syn, ...). When looking at the different
pattern examples in the previous sections, it can be seen that the patterns
don't contain any information specific to a certain syntax tree implementation.
@@ -580,7 +580,7 @@ implementations is the `IsMatch` trait. It defines how to match *PatternTree*
nodes against specific syntax tree nodes. A simplified implementation of the
`IsMatch` trait is shown below:
-```rust
+```rust,ignore
pub trait IsMatch<O> {
fn is_match(&self, other: &'o O) -> bool;
}
@@ -619,7 +619,7 @@ approach (matching against the coarse pattern first and checking for additional
properties later) might be slower than the current practice of checking for
structure and additional properties in one pass. For example, the following lint
-```rust
+```rust,ignore
pattern!{
pat_if_without_else: Expr =
If(
@@ -644,7 +644,7 @@ first matches against the pattern and then checks that the `then` block doesn't
start with a comment. Using clippy's current approach, it's possible to check
for these conditions earlier:
-```rust
+```rust,ignore
fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &ast::Expr) {
if_chain! {
if let ast::ExprKind::If(ref check, ref then, None) = expr.node;
@@ -708,7 +708,7 @@ is similar to actual Rust syntax (probably like the `quote!` macro). For
example, a pattern that matches `if` expressions that have `false` in their
condition could look like this:
-```rust
+```rust,ignore
if false {
#[*]
}
@@ -717,7 +717,7 @@ if false {
#### Problems
Extending Rust syntax (which is quite complex by itself) with additional syntax
-needed for specifying patterns (alternations, sequences, repetisions, named
+needed for specifying patterns (alternations, sequences, repetitions, named
submatches, ...) might become difficult to read and really hard to parse
properly.
@@ -742,7 +742,7 @@ affects the structure of the resulting AST. `1 + 0 + 0` is parsed as `(1 + 0) +
Another example of a problem would be named submatches. Take a look at this
pattern:
-```rust
+```rust,ignore
fn test() {
1 #foo
}
@@ -858,11 +858,11 @@ would be evaluated as soon as the `Block(_)#then` was matched.
Another idea in this area would be to introduce a syntax for backreferences.
They could be used to require that multiple parts of a pattern should match the
same value. For example, the `assign_op_pattern` lint that searches for `a = a
-op b` and recommends changing it to `a op= b` requires that both occurrances of
+op b` and recommends changing it to `a op= b` requires that both occurrences of
`a` are the same. Using `=#...` as syntax for backreferences, the lint could be
implemented like this:
-```rust
+```rust,ignore
pattern!{
assign_op_pattern: Expr =
Assign(_#target, Binary(_, =#target, _)
@@ -882,7 +882,7 @@ least two return statements" could be a practical addition.
For patterns like "a literal that is not a boolean literal" one currently needs
to list all alternatives except the boolean case. Introducing a negation
operator that allows to write `Lit(!Bool(_))` might be a good idea. This pattern
-would be eqivalent to `Lit( Char(_) | Int(_) )` (given that currently only three
+would be equivalent to `Lit( Char(_) | Int(_) )` (given that currently only three
literal types are implemented).
#### Functional composition
diff --git a/src/tools/clippy/book/src/development/type_checking.md b/src/tools/clippy/book/src/development/type_checking.md
new file mode 100644
index 000000000..5ce434b99
--- /dev/null
+++ b/src/tools/clippy/book/src/development/type_checking.md
@@ -0,0 +1,144 @@
+# Type Checking
+
+When we work on a new lint or improve an existing lint, we might want
+to retrieve the type `Ty` of an expression `Expr` for a variety of
+reasons. This can be achieved by utilizing the [`LateContext`][LateContext]
+that is available for [`LateLintPass`][LateLintPass].
+
+## `LateContext` and `TypeckResults`
+
+The lint context [`LateContext`][LateContext] and [`TypeckResults`][TypeckResults]
+(returned by `LateContext::typeck_results`) are the two most useful data structures
+in `LateLintPass`. They allow us to jump to type definitions and other compilation
+stages such as HIR.
+
+> Note: `LateContext.typeck_results`'s return value is [`TypeckResults`][TypeckResults]
+> and is created in the type checking step, it includes useful information such as types of
+> expressions, ways to resolve methods and so on.
+
+`TypeckResults` contains useful methods such as [`expr_ty`][expr_ty],
+which gives us access to the underlying structure [`Ty`][Ty] of a given expression.
+
+```rust
+pub fn expr_ty(&self, expr: &Expr<'_>) -> Ty<'tcx>
+```
+
+As a side note, besides `expr_ty`, [`TypeckResults`][TypeckResults] contains a
+[`pat_ty()`][pat_ty] method that is useful for retrieving a type from a pattern.
+
+## `Ty`
+
+`Ty` struct contains the type information of an expression.
+Let's take a look at `rustc_middle`'s [`Ty`][Ty] struct to examine this struct:
+
+```rust
+pub struct Ty<'tcx>(Interned<'tcx, WithStableHash<TyS<'tcx>>>);
+```
+
+At a first glance, this struct looks quite esoteric. But at a closer look,
+we will see that this struct contains many useful methods for type checking.
+
+For instance, [`is_char`][is_char] checks if the given `Ty` struct corresponds
+to the primitive character type.
+
+### `is_*` Usage
+
+In some scenarios, all we need to do is check if the `Ty` of an expression
+is a specific type, such as `char` type, so we could write the following:
+
+```rust
+impl LateLintPass<'_> for MyStructLint {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
+ // Get type of `expr`
+ let ty = cx.typeck_results().expr_ty(expr);
+
+ // Check if the `Ty` of this expression is of character type
+ if ty.is_char() {
+ println!("Our expression is a char!");
+ }
+ }
+}
+```
+
+Furthermore, if we examine the [source code][is_char_source] for `is_char`,
+we find something very interesting:
+
+```rust
+#[inline]
+pub fn is_char(self) -> bool {
+ matches!(self.kind(), Char)
+}
+```
+
+Indeed, we just discovered `Ty`'s [`kind` method][kind], which provides us
+with [`TyKind`][TyKind] of a `Ty`.
+
+## `TyKind`
+
+`TyKind` defines the kinds of types in Rust's type system.
+Peeking into [`TyKind` documentation][TyKind], we will see that it is an
+enum of 27 variants, including items such as `Bool`, `Int`, `Ref`, etc.
+
+### `kind` Usage
+
+The `TyKind` of `Ty` can be returned by calling [`Ty.kind` method][kind].
+We often use this method to perform pattern matching in Clippy.
+
+For instance, if we want to check for a `struct`, we could examine if the
+`ty.kind` corresponds to an [`Adt`][Adt] (algebraic data type) and if its
+[`AdtDef`][AdtDef] is a struct:
+
+```rust
+impl LateLintPass<'_> for MyStructLint {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
+ // Get type of `expr`
+ let ty = cx.typeck_results().expr_ty(expr);
+ // Match its kind to enter the type
+ match ty.kind {
+ ty::Adt(adt_def, _) if adt_def.is_struct() => println!("Our `expr` is a struct!"),
+ _ => ()
+ }
+ }
+}
+```
+
+## `hir::Ty` and `ty::Ty`
+
+We've been talking about [`ty::Ty`][middle_ty] this whole time without addressing [`hir::Ty`][hir_ty], but the latter
+is also important to understand.
+
+`hir::Ty` would represent *what* an user wrote, while `ty::Ty` would understand the meaning of it (because it has more
+information).
+
+**Example: `fn foo(x: u32) -> u32 { x }`**
+
+Here the HIR sees the types without "thinking" about them, it knows that the function takes an `u32` and returns
+an `u32`. But at the `ty::Ty` level the compiler understands that they're the same type, in-depth lifetimes, etc...
+
+you can use the [`hir_ty_to_ty`][hir_ty_to_ty] function to convert from a `hir::Ty` to a `ty::Ty`
+
+## Useful Links
+
+Below are some useful links to further explore the concepts covered
+in this chapter:
+
+- [Stages of compilation](https://rustc-dev-guide.rust-lang.org/compiler-src.html#the-main-stages-of-compilation)
+- [Diagnostic items](https://rustc-dev-guide.rust-lang.org/diagnostics/diagnostic-items.html)
+- [Type checking](https://rustc-dev-guide.rust-lang.org/type-checking.html)
+- [Ty module](https://rustc-dev-guide.rust-lang.org/ty.html)
+
+[Adt]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/enum.TyKind.html#variant.Adt
+[AdtDef]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/adt/struct.AdtDef.html
+[expr_ty]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TypeckResults.html#method.expr_ty
+[is_char]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.Ty.html#method.is_char
+[is_char_source]: https://doc.rust-lang.org/nightly/nightly-rustc/src/rustc_middle/ty/sty.rs.html#1831-1834
+[kind]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.Ty.html#method.kind
+[LateContext]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint/struct.LateContext.html
+[LateLintPass]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint/trait.LateLintPass.html
+[pat_ty]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/context/struct.TypeckResults.html#method.pat_ty
+[Ty]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.Ty.html
+[TyKind]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/enum.TyKind.html
+[TypeckResults]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TypeckResults.html
+[middle_ty]: https://doc.rust-lang.org/beta/nightly-rustc/rustc_middle/ty/struct.Ty.html
+[hir_ty]: https://doc.rust-lang.org/beta/nightly-rustc/rustc_hir/struct.Ty.html
+[hir_ty_to_ty]: https://doc.rust-lang.org/beta/nightly-rustc/rustc_hir_analysis/fn.hir_ty_to_ty.html
diff --git a/src/tools/clippy/book/src/installation.md b/src/tools/clippy/book/src/installation.md
index cce888b17..d54fff9de 100644
--- a/src/tools/clippy/book/src/installation.md
+++ b/src/tools/clippy/book/src/installation.md
@@ -17,8 +17,8 @@ $ rustup component add clippy [--toolchain=<name>]
## From Source
-Take a look at the [Basics] chapter in the Clippy developer guide to find step
-by step instructions on how to build and install Clippy from source.
+Take a look at the [Basics] chapter in the Clippy developer guide to find step-by-step
+instructions on how to build and install Clippy from source.
[Basics]: development/basics.md#install-from-source
[Usage]: usage.md
diff --git a/src/tools/clippy/book/src/lint_configuration.md b/src/tools/clippy/book/src/lint_configuration.md
index 995dd2f04..78e1a55cf 100644
--- a/src/tools/clippy/book/src/lint_configuration.md
+++ b/src/tools/clippy/book/src/lint_configuration.md
@@ -54,6 +54,7 @@ Please use that command to update the file and do not edit it by hand.
| [allow-mixed-uninlined-format-args](#allow-mixed-uninlined-format-args) | `true` |
| [suppress-restriction-lint-in-const](#suppress-restriction-lint-in-const) | `false` |
| [missing-docs-in-crate-items](#missing-docs-in-crate-items) | `false` |
+| [future-size-threshold](#future-size-threshold) | `16384` |
### arithmetic-side-effects-allowed
Suppress checking of the passed type names in all types of operations.
@@ -130,6 +131,7 @@ Suppress lints whenever the suggested change would cause breakage for other crat
* [option_option](https://rust-lang.github.io/rust-clippy/master/index.html#option_option)
* [linkedlist](https://rust-lang.github.io/rust-clippy/master/index.html#linkedlist)
* [rc_mutex](https://rust-lang.github.io/rust-clippy/master/index.html#rc_mutex)
+* [unnecessary_box_returns](https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_box_returns)
### msrv
@@ -193,7 +195,7 @@ The maximum cognitive complexity a function can have
### disallowed-names
The list of disallowed names to lint about. NB: `bar` is not here since it has legitimate uses. The value
`".."` can be used as part of the list to indicate, that the configured values should be appended to the
-default configuration of Clippy. By default any configuration will replace the default value.
+default configuration of Clippy. By default, any configuration will replace the default value.
**Default Value:** `["foo", "baz", "quux"]` (`Vec<String>`)
@@ -203,7 +205,7 @@ default configuration of Clippy. By default any configuration will replace the d
### doc-valid-idents
The list of words this lint should not consider as identifiers needing ticks. The value
`".."` can be used as part of the list to indicate, that the configured values should be appended to the
-default configuration of Clippy. By default any configuraction will replace the default value. For example:
+default configuration of Clippy. By default, any configuration will replace the default value. For example:
* `doc-valid-idents = ["ClipPy"]` would replace the default list with `["ClipPy"]`.
* `doc-valid-idents = ["ClipPy", ".."]` would append `ClipPy` to the default list.
@@ -413,7 +415,7 @@ For internal testing only, ignores the current `publish` settings in the Cargo m
Enforce the named macros always use the braces specified.
A `MacroMatcher` can be added like so `{ name = "macro_name", brace = "(" }`. If the macro
-is could be used with a full path two `MacroMatcher`s have to be added one with the full path
+could be used with a full path two `MacroMatcher`s have to be added one with the full path
`crate_name::macro_name` and one with just the macro name.
**Default Value:** `[]` (`Vec<crate::nonstandard_macro_braces::MacroMatcher>`)
@@ -447,7 +449,7 @@ Whether to apply the raw pointer heuristic to determine if a type is `Send`.
### max-suggested-slice-pattern-length
When Clippy suggests using a slice pattern, this is the maximum number of elements allowed in
-the slice pattern that is suggested. If more elements would be necessary, the lint is suppressed.
+the slice pattern that is suggested. If more elements are necessary, the lint is suppressed.
For example, `[_, _, _, e, ..]` is a slice pattern with 4 elements.
**Default Value:** `3` (`u64`)
@@ -519,6 +521,7 @@ for the generic parameters for determining interior mutability
**Default Value:** `["bytes::Bytes"]` (`Vec<String>`)
* [mutable_key_type](https://rust-lang.github.io/rust-clippy/master/index.html#mutable_key_type)
+* [ifs_same_cond](https://rust-lang.github.io/rust-clippy/master/index.html#ifs_same_cond)
### allow-mixed-uninlined-format-args
@@ -550,4 +553,12 @@ crate. For example, `pub(crate)` items.
* [missing_docs_in_private_items](https://rust-lang.github.io/rust-clippy/master/index.html#missing_docs_in_private_items)
+### future-size-threshold
+The maximum byte size a `Future` can have, before it triggers the `clippy::large_futures` lint
+
+**Default Value:** `16384` (`u64`)
+
+* [large_futures](https://rust-lang.github.io/rust-clippy/master/index.html#large_futures)
+
+
diff --git a/src/tools/clippy/book/src/lints.md b/src/tools/clippy/book/src/lints.md
index 35e30960b..442dc6391 100644
--- a/src/tools/clippy/book/src/lints.md
+++ b/src/tools/clippy/book/src/lints.md
@@ -17,7 +17,7 @@ The different lint groups were defined in the [Clippy 1.0 RFC].
The `clippy::correctness` group is the only lint group in Clippy which lints are
deny-by-default and abort the compilation when triggered. This is for good
reason: If you see a `correctness` lint, it means that your code is outright
-wrong or useless and you should try to fix it.
+wrong or useless, and you should try to fix it.
Lints in this category are carefully picked and should be free of false
positives. So just `#[allow]`ing those lints is not recommended.
@@ -41,7 +41,7 @@ simplify your code. It mostly focuses on code that can be written in a shorter
and more readable way, while preserving the semantics.
If you should see a complexity lint, it usually means that you can remove or
-replace some code and it is recommended to do so. However, if you need the more
+replace some code, and it is recommended to do so. However, if you need the more
complex code for some expressiveness reason, it is recommended to allow
complexity lints on a case-by-case basis.
@@ -50,9 +50,9 @@ complexity lints on a case-by-case basis.
The `clippy::perf` group gives you suggestions on how you can increase the
performance of your code. Those lints are mostly about code that the compiler
can't trivially optimize, but has to be written in a slightly different way to
-make the optimizer's job easier.
+make the optimizer job easier.
-Perf lints are usually easy to apply and it is recommended to do so.
+Perf lints are usually easy to apply, and it is recommended to do so.
## Style
@@ -91,7 +91,7 @@ and your use case.
Lints from this group will restrict you in some way. If you enable a restriction
lint for your crate it is recommended to also fix code that this lint triggers
-on. However, those lints are really strict by design and you might want to
+on. However, those lints are really strict by design, and you might want to
`#[allow]` them in some special cases, with a comment justifying that.
## Cargo
diff --git a/src/tools/clippy/book/src/usage.md b/src/tools/clippy/book/src/usage.md
index 61a90445d..32084a919 100644
--- a/src/tools/clippy/book/src/usage.md
+++ b/src/tools/clippy/book/src/usage.md
@@ -19,7 +19,7 @@ cargo clippy
### Lint configuration
The above command will run the default set of lints, which are included in the
-lint group `clippy::all`. You might want to use even more lints or you might not
+lint group `clippy::all`. You might want to use even more lints, or you may not
agree with every Clippy lint, and for that there are ways to configure lint
levels.
@@ -98,7 +98,7 @@ other of Clippy's lint groups.
You can configure lint levels in source code the same way you can configure
`rustc` lints:
-```rust
+```rust,ignore
#![allow(clippy::style)]
#[warn(clippy::double_neg)]
diff --git a/src/tools/clippy/clippy_dev/src/lib.rs b/src/tools/clippy/clippy_dev/src/lib.rs
index e70488165..3a8b070d7 100644
--- a/src/tools/clippy/clippy_dev/src/lib.rs
+++ b/src/tools/clippy/clippy_dev/src/lib.rs
@@ -1,5 +1,5 @@
+#![feature(lazy_cell)]
#![feature(let_chains)]
-#![feature(once_cell)]
#![feature(rustc_private)]
#![cfg_attr(feature = "deny-warnings", deny(warnings))]
// warn on lints, that are included in `rust-lang/rust`s bootstrap
diff --git a/src/tools/clippy/clippy_dev/src/new_lint.rs b/src/tools/clippy/clippy_dev/src/new_lint.rs
index 420214d92..13a277034 100644
--- a/src/tools/clippy/clippy_dev/src/new_lint.rs
+++ b/src/tools/clippy/clippy_dev/src/new_lint.rs
@@ -369,9 +369,7 @@ fn create_lint_for_ty(lint: &LintData<'_>, enable_msrv: bool, ty: &str) -> io::R
}}
todo!();
}}
- "#,
- context_import = context_import,
- name_upper = name_upper,
+ "#
);
} else {
let _: fmt::Result = writedoc!(
@@ -385,9 +383,7 @@ fn create_lint_for_ty(lint: &LintData<'_>, enable_msrv: bool, ty: &str) -> io::R
pub(super) fn check(cx: &{context_import}) {{
todo!();
}}
- "#,
- context_import = context_import,
- name_upper = name_upper,
+ "#
);
}
diff --git a/src/tools/clippy/clippy_dev/src/update_lints.rs b/src/tools/clippy/clippy_dev/src/update_lints.rs
index 779e4d0e1..95222a9ac 100644
--- a/src/tools/clippy/clippy_dev/src/update_lints.rs
+++ b/src/tools/clippy/clippy_dev/src/update_lints.rs
@@ -537,17 +537,13 @@ fn declare_deprecated(name: &str, path: &Path, reason: &str) -> io::Result<()> {
/// Nothing. This lint has been deprecated.
///
/// ### Deprecation reason
- /// {}
- #[clippy::version = \"{}\"]
- pub {},
- \"{}\"
+ /// {deprecation_reason}
+ #[clippy::version = \"{version}\"]
+ pub {name},
+ \"{reason}\"
}}
- ",
- deprecation_reason,
- version,
- name,
- reason,
+ "
)
}
diff --git a/src/tools/clippy/clippy_lints/Cargo.toml b/src/tools/clippy/clippy_lints/Cargo.toml
index 796f1ff16..18e8bf772 100644
--- a/src/tools/clippy/clippy_lints/Cargo.toml
+++ b/src/tools/clippy/clippy_lints/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "clippy_lints"
-version = "0.1.69"
+version = "0.1.70"
description = "A bunch of helpful lints to avoid common pitfalls in Rust"
repository = "https://github.com/rust-lang/rust-clippy"
readme = "README.md"
@@ -9,6 +9,7 @@ keywords = ["clippy", "lint", "plugin"]
edition = "2021"
[dependencies]
+arrayvec = { version = "0.7", default-features = false }
cargo_metadata = "0.15.3"
clippy_utils = { path = "../clippy_utils" }
declare_clippy_lint = { path = "../declare_clippy_lint" }
diff --git a/src/tools/clippy/clippy_lints/src/allow_attributes.rs b/src/tools/clippy/clippy_lints/src/allow_attributes.rs
new file mode 100644
index 000000000..15d46e954
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/allow_attributes.rs
@@ -0,0 +1,71 @@
+use ast::AttrStyle;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use rustc_ast as ast;
+use rustc_errors::Applicability;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// Detects uses of the `#[allow]` attribute and suggests replacing it with
+ /// the `#[expect]` (See [RFC 2383](https://rust-lang.github.io/rfcs/2383-lint-reasons.html))
+ ///
+ /// The expect attribute is still unstable and requires the `lint_reasons`
+ /// on nightly. It can be enabled by adding `#![feature(lint_reasons)]` to
+ /// the crate root.
+ ///
+ /// This lint only warns outer attributes (`#[allow]`), as inner attributes
+ /// (`#![allow]`) are usually used to enable or disable lints on a global scale.
+ ///
+ /// ### Why is this bad?
+ ///
+ /// `#[expect]` attributes suppress the lint emission, but emit a warning, if
+ /// the expectation is unfulfilled. This can be useful to be notified when the
+ /// lint is no longer triggered.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// #[allow(unused_mut)]
+ /// fn foo() -> usize {
+ /// let mut a = Vec::new();
+ /// a.len()
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust,ignore
+ /// #![feature(lint_reasons)]
+ /// #[expect(unused_mut)]
+ /// fn foo() -> usize {
+ /// let mut a = Vec::new();
+ /// a.len()
+ /// }
+ /// ```
+ #[clippy::version = "1.69.0"]
+ pub ALLOW_ATTRIBUTES,
+ restriction,
+ "`#[allow]` will not trigger if a warning isn't found. `#[expect]` triggers if there are no warnings."
+}
+
+declare_lint_pass!(AllowAttribute => [ALLOW_ATTRIBUTES]);
+
+impl LateLintPass<'_> for AllowAttribute {
+ // Separate each crate's features.
+ fn check_attribute(&mut self, cx: &LateContext<'_>, attr: &ast::Attribute) {
+ if_chain! {
+ if cx.tcx.features().lint_reasons;
+ if let AttrStyle::Outer = attr.style;
+ if let Some(ident) = attr.ident();
+ if ident.name == rustc_span::symbol::sym::allow;
+ then {
+ span_lint_and_sugg(
+ cx,
+ ALLOW_ATTRIBUTES,
+ ident.span,
+ "#[allow] attribute found",
+ "replace it with",
+ "expect".into(),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/almost_complete_range.rs b/src/tools/clippy/clippy_lints/src/almost_complete_range.rs
index 42e14b5cd..32d80f42e 100644
--- a/src/tools/clippy/clippy_lints/src/almost_complete_range.rs
+++ b/src/tools/clippy/clippy_lints/src/almost_complete_range.rs
@@ -24,7 +24,7 @@ declare_clippy_lint! {
/// ```rust
/// let _ = 'a'..='z';
/// ```
- #[clippy::version = "1.63.0"]
+ #[clippy::version = "1.68.0"]
pub ALMOST_COMPLETE_RANGE,
suspicious,
"almost complete range"
diff --git a/src/tools/clippy/clippy_lints/src/booleans.rs b/src/tools/clippy/clippy_lints/src/booleans.rs
index e8106beec..455f0df7c 100644
--- a/src/tools/clippy/clippy_lints/src/booleans.rs
+++ b/src/tools/clippy/clippy_lints/src/booleans.rs
@@ -7,7 +7,7 @@ use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::intravisit::{walk_expr, FnKind, Visitor};
use rustc_hir::{BinOpKind, Body, Expr, ExprKind, FnDecl, UnOp};
-use rustc_lint::{LateContext, LateLintPass};
+use rustc_lint::{LateContext, LateLintPass, Level};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::def_id::LocalDefId;
use rustc_span::source_map::Span;
@@ -430,23 +430,25 @@ impl<'a, 'tcx> NonminimalBoolVisitor<'a, 'tcx> {
}
}
let nonminimal_bool_lint = |suggestions: Vec<_>| {
- span_lint_hir_and_then(
- self.cx,
- NONMINIMAL_BOOL,
- e.hir_id,
- e.span,
- "this boolean expression can be simplified",
- |diag| {
- diag.span_suggestions(
- e.span,
- "try",
- suggestions.into_iter(),
- // nonminimal_bool can produce minimal but
- // not human readable expressions (#3141)
- Applicability::Unspecified,
- );
- },
- );
+ if self.cx.tcx.lint_level_at_node(NONMINIMAL_BOOL, e.hir_id).0 != Level::Allow {
+ span_lint_hir_and_then(
+ self.cx,
+ NONMINIMAL_BOOL,
+ e.hir_id,
+ e.span,
+ "this boolean expression can be simplified",
+ |diag| {
+ diag.span_suggestions(
+ e.span,
+ "try",
+ suggestions.into_iter(),
+ // nonminimal_bool can produce minimal but
+ // not human readable expressions (#3141)
+ Applicability::Unspecified,
+ );
+ },
+ );
+ }
};
if improvements.is_empty() {
let mut visitor = NotSimplificationVisitor { cx: self.cx };
@@ -495,18 +497,20 @@ struct NotSimplificationVisitor<'a, 'tcx> {
impl<'a, 'tcx> Visitor<'tcx> for NotSimplificationVisitor<'a, 'tcx> {
fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
- if let ExprKind::Unary(UnOp::Not, inner) = &expr.kind {
- if let Some(suggestion) = simplify_not(self.cx, inner) {
- span_lint_and_sugg(
- self.cx,
- NONMINIMAL_BOOL,
- expr.span,
- "this boolean expression can be simplified",
- "try",
- suggestion,
- Applicability::MachineApplicable,
- );
- }
+ if let ExprKind::Unary(UnOp::Not, inner) = &expr.kind &&
+ !inner.span.from_expansion() &&
+ let Some(suggestion) = simplify_not(self.cx, inner)
+ && self.cx.tcx.lint_level_at_node(NONMINIMAL_BOOL, expr.hir_id).0 != Level::Allow
+ {
+ span_lint_and_sugg(
+ self.cx,
+ NONMINIMAL_BOOL,
+ expr.span,
+ "this boolean expression can be simplified",
+ "try",
+ suggestion,
+ Applicability::MachineApplicable,
+ );
}
walk_expr(self, expr);
diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_possible_truncation.rs b/src/tools/clippy/clippy_lints/src/casts/cast_possible_truncation.rs
index 823970e35..95c2ecbf7 100644
--- a/src/tools/clippy/clippy_lints/src/casts/cast_possible_truncation.rs
+++ b/src/tools/clippy/clippy_lints/src/casts/cast_possible_truncation.rs
@@ -2,8 +2,9 @@ use clippy_utils::consts::{constant, Constant};
use clippy_utils::diagnostics::{span_lint, span_lint_and_then};
use clippy_utils::expr_or_init;
use clippy_utils::source::snippet;
+use clippy_utils::sugg::Sugg;
use clippy_utils::ty::{get_discriminant_value, is_isize_or_usize};
-use rustc_errors::{Applicability, SuggestionStyle};
+use rustc_errors::{Applicability, Diagnostic, SuggestionStyle};
use rustc_hir::def::{DefKind, Res};
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::LateContext;
@@ -163,19 +164,34 @@ pub(super) fn check(
_ => return,
};
- let name_of_cast_from = snippet(cx, cast_expr.span, "..");
- let cast_to_snip = snippet(cx, cast_to_span, "..");
- let suggestion = format!("{cast_to_snip}::try_from({name_of_cast_from})");
-
span_lint_and_then(cx, CAST_POSSIBLE_TRUNCATION, expr.span, &msg, |diag| {
diag.help("if this is intentional allow the lint with `#[allow(clippy::cast_possible_truncation)]` ...");
- diag.span_suggestion_with_style(
- expr.span,
- "... or use `try_from` and handle the error accordingly",
- suggestion,
- Applicability::Unspecified,
- // always show the suggestion in a separate line
- SuggestionStyle::ShowAlways,
- );
+ if !cast_from.is_floating_point() {
+ offer_suggestion(cx, expr, cast_expr, cast_to_span, diag);
+ }
});
}
+
+fn offer_suggestion(
+ cx: &LateContext<'_>,
+ expr: &Expr<'_>,
+ cast_expr: &Expr<'_>,
+ cast_to_span: Span,
+ diag: &mut Diagnostic,
+) {
+ let cast_to_snip = snippet(cx, cast_to_span, "..");
+ let suggestion = if cast_to_snip == "_" {
+ format!("{}.try_into()", Sugg::hir(cx, cast_expr, "..").maybe_par())
+ } else {
+ format!("{cast_to_snip}::try_from({})", Sugg::hir(cx, cast_expr, ".."))
+ };
+
+ diag.span_suggestion_with_style(
+ expr.span,
+ "... or use `try_from` and handle the error accordingly",
+ suggestion,
+ Applicability::Unspecified,
+ // always show the suggestion in a separate line
+ SuggestionStyle::ShowAlways,
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_slice_from_raw_parts.rs b/src/tools/clippy/clippy_lints/src/casts/cast_slice_from_raw_parts.rs
index 627b795d6..1233c632a 100644
--- a/src/tools/clippy/clippy_lints/src/casts/cast_slice_from_raw_parts.rs
+++ b/src/tools/clippy/clippy_lints/src/casts/cast_slice_from_raw_parts.rs
@@ -1,6 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::msrvs::{self, Msrv};
-use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::source::snippet_with_context;
use clippy_utils::{match_def_path, paths};
use if_chain::if_chain;
use rustc_errors::Applicability;
@@ -34,6 +34,8 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>,
if let ExprKind::Path(ref qpath) = fun.kind;
if let Some(fun_def_id) = cx.qpath_res(qpath, fun.hir_id).opt_def_id();
if let Some(rpk) = raw_parts_kind(cx, fun_def_id);
+ let ctxt = expr.span.ctxt();
+ if cast_expr.span.ctxt() == ctxt;
then {
let func = match rpk {
RawPartsKind::Immutable => "from_raw_parts",
@@ -41,8 +43,8 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>,
};
let span = expr.span;
let mut applicability = Applicability::MachineApplicable;
- let ptr = snippet_with_applicability(cx, ptr_arg.span, "ptr", &mut applicability);
- let len = snippet_with_applicability(cx, len_arg.span, "len", &mut applicability);
+ let ptr = snippet_with_context(cx, ptr_arg.span, ctxt, "ptr", &mut applicability).0;
+ let len = snippet_with_context(cx, len_arg.span, ctxt, "len", &mut applicability).0;
span_lint_and_sugg(
cx,
CAST_SLICE_FROM_RAW_PARTS,
diff --git a/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs b/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs
index e8531157e..a8926b29a 100644
--- a/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs
+++ b/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs
@@ -143,7 +143,7 @@ impl<'tcx> LateLintPass<'tcx> for CognitiveComplexity {
span: Span,
def_id: LocalDefId,
) {
- if !cx.tcx.has_attr(def_id.to_def_id(), sym::test) {
+ if !cx.tcx.has_attr(def_id, sym::test) {
let expr = if is_async_fn(kind) {
match get_async_fn_body(cx.tcx, body) {
Some(b) => b,
diff --git a/src/tools/clippy/clippy_lints/src/collection_is_never_read.rs b/src/tools/clippy/clippy_lints/src/collection_is_never_read.rs
new file mode 100644
index 000000000..5e2eb5789
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/collection_is_never_read.rs
@@ -0,0 +1,141 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::ty::{is_type_diagnostic_item, is_type_lang_item};
+use clippy_utils::visitors::for_each_expr_with_closures;
+use clippy_utils::{get_enclosing_block, get_parent_node, path_to_local_id};
+use core::ops::ControlFlow;
+use rustc_hir::{Block, ExprKind, HirId, LangItem, Local, Node, PatKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::symbol::sym;
+use rustc_span::Symbol;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for collections that are never queried.
+ ///
+ /// ### Why is this bad?
+ /// Putting effort into constructing a collection but then never querying it might indicate that
+ /// the author forgot to do whatever they intended to do with the collection. Example: Clone
+ /// a vector, sort it for iteration, but then mistakenly iterate the original vector
+ /// instead.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let samples = vec![3, 1, 2];
+ /// let mut sorted_samples = samples.clone();
+ /// sorted_samples.sort();
+ /// for sample in &samples { // Oops, meant to use `sorted_samples`.
+ /// println!("{sample}");
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # let samples = vec![3, 1, 2];
+ /// let mut sorted_samples = samples.clone();
+ /// sorted_samples.sort();
+ /// for sample in &sorted_samples {
+ /// println!("{sample}");
+ /// }
+ /// ```
+ #[clippy::version = "1.69.0"]
+ pub COLLECTION_IS_NEVER_READ,
+ nursery,
+ "a collection is never queried"
+}
+declare_lint_pass!(CollectionIsNeverRead => [COLLECTION_IS_NEVER_READ]);
+
+// Add `String` here when it is added to diagnostic items
+static COLLECTIONS: [Symbol; 9] = [
+ sym::BTreeMap,
+ sym::BTreeSet,
+ sym::BinaryHeap,
+ sym::HashMap,
+ sym::HashSet,
+ sym::LinkedList,
+ sym::Option,
+ sym::Vec,
+ sym::VecDeque,
+];
+
+impl<'tcx> LateLintPass<'tcx> for CollectionIsNeverRead {
+ fn check_local(&mut self, cx: &LateContext<'tcx>, local: &'tcx Local<'tcx>) {
+ // Look for local variables whose type is a container. Search surrounding bock for read access.
+ if match_acceptable_type(cx, local, &COLLECTIONS)
+ && let PatKind::Binding(_, local_id, _, _) = local.pat.kind
+ && let Some(enclosing_block) = get_enclosing_block(cx, local.hir_id)
+ && has_no_read_access(cx, local_id, enclosing_block)
+ {
+ span_lint(cx, COLLECTION_IS_NEVER_READ, local.span, "collection is never read");
+ }
+ }
+}
+
+fn match_acceptable_type(cx: &LateContext<'_>, local: &Local<'_>, collections: &[rustc_span::Symbol]) -> bool {
+ let ty = cx.typeck_results().pat_ty(local.pat);
+ collections.iter().any(|&sym| is_type_diagnostic_item(cx, ty, sym))
+ // String type is a lang item but not a diagnostic item for now so we need a separate check
+ || is_type_lang_item(cx, ty, LangItem::String)
+}
+
+fn has_no_read_access<'tcx>(cx: &LateContext<'tcx>, id: HirId, block: &'tcx Block<'tcx>) -> bool {
+ let mut has_access = false;
+ let mut has_read_access = false;
+
+ // Inspect all expressions and sub-expressions in the block.
+ for_each_expr_with_closures(cx, block, |expr| {
+ // Ignore expressions that are not simply `id`.
+ if !path_to_local_id(expr, id) {
+ return ControlFlow::Continue(());
+ }
+
+ // `id` is being accessed. Investigate if it's a read access.
+ has_access = true;
+
+ // `id` appearing in the left-hand side of an assignment is not a read access:
+ //
+ // id = ...; // Not reading `id`.
+ if let Some(Node::Expr(parent)) = get_parent_node(cx.tcx, expr.hir_id)
+ && let ExprKind::Assign(lhs, ..) = parent.kind
+ && path_to_local_id(lhs, id)
+ {
+ return ControlFlow::Continue(());
+ }
+
+ // Look for method call with receiver `id`. It might be a non-read access:
+ //
+ // id.foo(args)
+ //
+ // Only assuming this for "official" methods defined on the type. For methods defined in extension
+ // traits (identified as local, based on the orphan rule), pessimistically assume that they might
+ // have side effects, so consider them a read.
+ if let Some(Node::Expr(parent)) = get_parent_node(cx.tcx, expr.hir_id)
+ && let ExprKind::MethodCall(_, receiver, _, _) = parent.kind
+ && path_to_local_id(receiver, id)
+ && let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(parent.hir_id)
+ && !method_def_id.is_local()
+ {
+ // The method call is a statement, so the return value is not used. That's not a read access:
+ //
+ // id.foo(args);
+ if let Some(Node::Stmt(..)) = get_parent_node(cx.tcx, parent.hir_id) {
+ return ControlFlow::Continue(());
+ }
+
+ // The method call is not a statement, so its return value is used somehow but its type is the
+ // unit type, so this is not a real read access. Examples:
+ //
+ // let y = x.clear();
+ // println!("{:?}", x.clear());
+ if cx.typeck_results().expr_ty(parent).is_unit() {
+ return ControlFlow::Continue(());
+ }
+ }
+
+ // Any other access to `id` is a read access. Stop searching.
+ has_read_access = true;
+ ControlFlow::Break(())
+ });
+
+ // Ignore collections that have no access at all. Other lints should catch them.
+ has_access && !has_read_access
+}
diff --git a/src/tools/clippy/clippy_lints/src/copies.rs b/src/tools/clippy/clippy_lints/src/copies.rs
index f10c35cde..970f50049 100644
--- a/src/tools/clippy/clippy_lints/src/copies.rs
+++ b/src/tools/clippy/clippy_lints/src/copies.rs
@@ -1,18 +1,20 @@
use clippy_utils::diagnostics::{span_lint_and_note, span_lint_and_then};
use clippy_utils::source::{first_line_of_span, indent_of, reindent_multiline, snippet, snippet_opt};
-use clippy_utils::ty::needs_ordered_drop;
+use clippy_utils::ty::{is_interior_mut_ty, needs_ordered_drop};
use clippy_utils::visitors::for_each_expr;
use clippy_utils::{
- capture_local_usage, eq_expr_value, get_enclosing_block, hash_expr, hash_stmt, if_sequence, is_else_clause,
- is_lint_allowed, path_to_local, search_same, ContainsName, HirEqInterExpr, SpanlessEq,
+ capture_local_usage, def_path_def_ids, eq_expr_value, find_binding_init, get_enclosing_block, hash_expr, hash_stmt,
+ if_sequence, is_else_clause, is_lint_allowed, path_to_local, search_same, ContainsName, HirEqInterExpr, SpanlessEq,
};
use core::iter;
use core::ops::ControlFlow;
use rustc_errors::Applicability;
+use rustc_hir::def_id::DefIdSet;
use rustc_hir::intravisit;
use rustc_hir::{BinOpKind, Block, Expr, ExprKind, HirId, HirIdSet, Stmt, StmtKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_middle::query::Key;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::hygiene::walk_chain;
use rustc_span::source_map::SourceMap;
use rustc_span::{BytePos, Span, Symbol};
@@ -159,7 +161,21 @@ declare_clippy_lint! {
"`if` statement with shared code in all blocks"
}
-declare_lint_pass!(CopyAndPaste => [
+pub struct CopyAndPaste {
+ ignore_interior_mutability: Vec<String>,
+ ignored_ty_ids: DefIdSet,
+}
+
+impl CopyAndPaste {
+ pub fn new(ignore_interior_mutability: Vec<String>) -> Self {
+ Self {
+ ignore_interior_mutability,
+ ignored_ty_ids: DefIdSet::new(),
+ }
+ }
+}
+
+impl_lint_pass!(CopyAndPaste => [
IFS_SAME_COND,
SAME_FUNCTIONS_IN_IF_CONDITION,
IF_SAME_THEN_ELSE,
@@ -167,10 +183,18 @@ declare_lint_pass!(CopyAndPaste => [
]);
impl<'tcx> LateLintPass<'tcx> for CopyAndPaste {
+ fn check_crate(&mut self, cx: &LateContext<'tcx>) {
+ for ignored_ty in &self.ignore_interior_mutability {
+ let path: Vec<&str> = ignored_ty.split("::").collect();
+ for id in def_path_def_ids(cx, path.as_slice()) {
+ self.ignored_ty_ids.insert(id);
+ }
+ }
+ }
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if !expr.span.from_expansion() && matches!(expr.kind, ExprKind::If(..)) && !is_else_clause(cx.tcx, expr) {
let (conds, blocks) = if_sequence(expr);
- lint_same_cond(cx, &conds);
+ lint_same_cond(cx, &conds, &self.ignored_ty_ids);
lint_same_fns_in_if_cond(cx, &conds);
let all_same =
!is_lint_allowed(cx, IF_SAME_THEN_ELSE, expr.hir_id) && lint_if_same_then_else(cx, &conds, &blocks);
@@ -547,9 +571,39 @@ fn check_for_warn_of_moved_symbol(cx: &LateContext<'_>, symbols: &[(HirId, Symbo
})
}
+fn method_caller_is_mutable(cx: &LateContext<'_>, caller_expr: &Expr<'_>, ignored_ty_ids: &DefIdSet) -> bool {
+ let caller_ty = cx.typeck_results().expr_ty(caller_expr);
+ // Check if given type has inner mutability and was not set to ignored by the configuration
+ let is_inner_mut_ty = is_interior_mut_ty(cx, caller_ty)
+ && !matches!(caller_ty.ty_adt_id(), Some(adt_id) if ignored_ty_ids.contains(&adt_id));
+
+ is_inner_mut_ty
+ || caller_ty.is_mutable_ptr()
+ // `find_binding_init` will return the binding iff its not mutable
+ || path_to_local(caller_expr)
+ .and_then(|hid| find_binding_init(cx, hid))
+ .is_none()
+}
+
/// Implementation of `IFS_SAME_COND`.
-fn lint_same_cond(cx: &LateContext<'_>, conds: &[&Expr<'_>]) {
- for (i, j) in search_same(conds, |e| hash_expr(cx, e), |lhs, rhs| eq_expr_value(cx, lhs, rhs)) {
+fn lint_same_cond(cx: &LateContext<'_>, conds: &[&Expr<'_>], ignored_ty_ids: &DefIdSet) {
+ for (i, j) in search_same(
+ conds,
+ |e| hash_expr(cx, e),
+ |lhs, rhs| {
+ // Ignore eq_expr side effects iff one of the expressin kind is a method call
+ // and the caller is not a mutable, including inner mutable type.
+ if let ExprKind::MethodCall(_, caller, _, _) = lhs.kind {
+ if method_caller_is_mutable(cx, caller, ignored_ty_ids) {
+ false
+ } else {
+ SpanlessEq::new(cx).eq_expr(lhs, rhs)
+ }
+ } else {
+ eq_expr_value(cx, lhs, rhs)
+ }
+ },
+ ) {
span_lint_and_note(
cx,
IFS_SAME_COND,
diff --git a/src/tools/clippy/clippy_lints/src/declared_lints.rs b/src/tools/clippy/clippy_lints/src/declared_lints.rs
index cd5dd7a57..f24dab627 100644
--- a/src/tools/clippy/clippy_lints/src/declared_lints.rs
+++ b/src/tools/clippy/clippy_lints/src/declared_lints.rs
@@ -35,6 +35,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::utils::internal_lints::produce_ice::PRODUCE_ICE_INFO,
#[cfg(feature = "internal")]
crate::utils::internal_lints::unnecessary_def_path::UNNECESSARY_DEF_PATH_INFO,
+ crate::allow_attributes::ALLOW_ATTRIBUTES_INFO,
crate::almost_complete_range::ALMOST_COMPLETE_RANGE_INFO,
crate::approx_const::APPROX_CONSTANT_INFO,
crate::as_conversions::AS_CONVERSIONS_INFO,
@@ -92,6 +93,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::cognitive_complexity::COGNITIVE_COMPLEXITY_INFO,
crate::collapsible_if::COLLAPSIBLE_ELSE_IF_INFO,
crate::collapsible_if::COLLAPSIBLE_IF_INFO,
+ crate::collection_is_never_read::COLLECTION_IS_NEVER_READ_INFO,
crate::comparison_chain::COMPARISON_CHAIN_INFO,
crate::copies::BRANCHES_SHARING_CODE_INFO,
crate::copies::IFS_SAME_COND_INFO,
@@ -216,6 +218,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::iter_not_returning_iterator::ITER_NOT_RETURNING_ITERATOR_INFO,
crate::large_const_arrays::LARGE_CONST_ARRAYS_INFO,
crate::large_enum_variant::LARGE_ENUM_VARIANT_INFO,
+ crate::large_futures::LARGE_FUTURES_INFO,
crate::large_include_file::LARGE_INCLUDE_FILE_INFO,
crate::large_stack_arrays::LARGE_STACK_ARRAYS_INFO,
crate::len_zero::COMPARISON_TO_EMPTY_INFO,
@@ -226,8 +229,10 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::let_underscore::LET_UNDERSCORE_LOCK_INFO,
crate::let_underscore::LET_UNDERSCORE_MUST_USE_INFO,
crate::let_underscore::LET_UNDERSCORE_UNTYPED_INFO,
+ crate::let_with_type_underscore::LET_WITH_TYPE_UNDERSCORE_INFO,
crate::lifetimes::EXTRA_UNUSED_LIFETIMES_INFO,
crate::lifetimes::NEEDLESS_LIFETIMES_INFO,
+ crate::lines_filter_map_ok::LINES_FILTER_MAP_OK_INFO,
crate::literal_representation::DECIMAL_LITERAL_REPRESENTATION_INFO,
crate::literal_representation::INCONSISTENT_DIGIT_GROUPING_INFO,
crate::literal_representation::LARGE_DIGIT_GROUPS_INFO,
@@ -260,9 +265,11 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::manual_clamp::MANUAL_CLAMP_INFO,
crate::manual_is_ascii_check::MANUAL_IS_ASCII_CHECK_INFO,
crate::manual_let_else::MANUAL_LET_ELSE_INFO,
+ crate::manual_main_separator_str::MANUAL_MAIN_SEPARATOR_STR_INFO,
crate::manual_non_exhaustive::MANUAL_NON_EXHAUSTIVE_INFO,
crate::manual_rem_euclid::MANUAL_REM_EUCLID_INFO,
crate::manual_retain::MANUAL_RETAIN_INFO,
+ crate::manual_slice_size_calculation::MANUAL_SLICE_SIZE_CALCULATION_INFO,
crate::manual_string_new::MANUAL_STRING_NEW_INFO,
crate::manual_strip::MANUAL_STRIP_INFO,
crate::map_unit_fn::OPTION_MAP_UNIT_FN_INFO,
@@ -303,6 +310,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::methods::CASE_SENSITIVE_FILE_EXTENSION_COMPARISONS_INFO,
crate::methods::CHARS_LAST_CMP_INFO,
crate::methods::CHARS_NEXT_CMP_INFO,
+ crate::methods::CLEAR_WITH_DRAIN_INFO,
crate::methods::CLONED_INSTEAD_OF_COPIED_INFO,
crate::methods::CLONE_DOUBLE_REF_INFO,
crate::methods::CLONE_ON_COPY_INFO,
@@ -416,6 +424,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::misc_early::UNSEPARATED_LITERAL_SUFFIX_INFO,
crate::misc_early::ZERO_PREFIXED_LITERAL_INFO,
crate::mismatching_type_param_order::MISMATCHING_TYPE_PARAM_ORDER_INFO,
+ crate::missing_assert_message::MISSING_ASSERT_MESSAGE_INFO,
crate::missing_const_for_fn::MISSING_CONST_FOR_FN_INFO,
crate::missing_doc::MISSING_DOCS_IN_PRIVATE_ITEMS_INFO,
crate::missing_enforced_import_rename::MISSING_ENFORCED_IMPORT_RENAMES_INFO,
@@ -517,6 +526,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::ranges::REVERSED_EMPTY_RANGES_INFO,
crate::rc_clone_in_vec_init::RC_CLONE_IN_VEC_INIT_INFO,
crate::read_zero_byte_vec::READ_ZERO_BYTE_VEC_INFO,
+ crate::redundant_async_block::REDUNDANT_ASYNC_BLOCK_INFO,
crate::redundant_clone::REDUNDANT_CLONE_INFO,
crate::redundant_closure_call::REDUNDANT_CLOSURE_CALL_INFO,
crate::redundant_else::REDUNDANT_ELSE_INFO,
@@ -559,6 +569,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::strings::STR_TO_STRING_INFO,
crate::strings::TRIM_SPLIT_WHITESPACE_INFO,
crate::strlen_on_c_strings::STRLEN_ON_C_STRINGS_INFO,
+ crate::suspicious_doc_comments::SUSPICIOUS_DOC_COMMENTS_INFO,
crate::suspicious_operation_groupings::SUSPICIOUS_OPERATION_GROUPINGS_INFO,
crate::suspicious_trait_impl::SUSPICIOUS_ARITHMETIC_IMPL_INFO,
crate::suspicious_trait_impl::SUSPICIOUS_OP_ASSIGN_IMPL_INFO,
@@ -568,6 +579,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::swap_ptr_to_ref::SWAP_PTR_TO_REF_INFO,
crate::tabs_in_doc_comments::TABS_IN_DOC_COMMENTS_INFO,
crate::temporary_assignment::TEMPORARY_ASSIGNMENT_INFO,
+ crate::tests_outside_test_module::TESTS_OUTSIDE_TEST_MODULE_INFO,
crate::to_digit_is_some::TO_DIGIT_IS_SOME_INFO,
crate::trailing_empty_array::TRAILING_EMPTY_ARRAY_INFO,
crate::trait_bounds::TRAIT_DUPLICATION_IN_BOUNDS_INFO,
@@ -610,8 +622,10 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::unit_types::UNIT_CMP_INFO,
crate::unnamed_address::FN_ADDRESS_COMPARISONS_INFO,
crate::unnamed_address::VTABLE_ADDRESS_COMPARISONS_INFO,
+ crate::unnecessary_box_returns::UNNECESSARY_BOX_RETURNS_INFO,
crate::unnecessary_owned_empty_strings::UNNECESSARY_OWNED_EMPTY_STRINGS_INFO,
crate::unnecessary_self_imports::UNNECESSARY_SELF_IMPORTS_INFO,
+ crate::unnecessary_struct_initialization::UNNECESSARY_STRUCT_INITIALIZATION_INFO,
crate::unnecessary_wraps::UNNECESSARY_WRAPS_INFO,
crate::unnested_or_patterns::UNNESTED_OR_PATTERNS_INFO,
crate::unsafe_removed_from_name::UNSAFE_REMOVED_FROM_NAME_INFO,
diff --git a/src/tools/clippy/clippy_lints/src/default.rs b/src/tools/clippy/clippy_lints/src/default.rs
index 080d44e63..80c22742b 100644
--- a/src/tools/clippy/clippy_lints/src/default.rs
+++ b/src/tools/clippy/clippy_lints/src/default.rs
@@ -1,5 +1,5 @@
use clippy_utils::diagnostics::{span_lint_and_note, span_lint_and_sugg};
-use clippy_utils::source::snippet_with_macro_callsite;
+use clippy_utils::source::snippet_with_context;
use clippy_utils::ty::{has_drop, is_copy};
use clippy_utils::{
any_parent_is_automatically_derived, contains_name, get_parent_expr, is_from_proc_macro, match_def_path, paths,
@@ -160,6 +160,8 @@ impl<'tcx> LateLintPass<'tcx> for Default {
}
};
+ let init_ctxt = local.span.ctxt();
+
// find all "later statement"'s where the fields of the binding set as
// Default::default() get reassigned, unless the reassignment refers to the original binding
let mut first_assign = None;
@@ -169,7 +171,7 @@ impl<'tcx> LateLintPass<'tcx> for Default {
// find out if and which field was set by this `consecutive_statement`
if let Some((field_ident, assign_rhs)) = field_reassigned_by_stmt(consecutive_statement, binding_name) {
// interrupt and cancel lint if assign_rhs references the original binding
- if contains_name(binding_name, assign_rhs, cx) {
+ if contains_name(binding_name, assign_rhs, cx) || init_ctxt != consecutive_statement.span.ctxt() {
cancel_lint = true;
break;
}
@@ -204,11 +206,12 @@ impl<'tcx> LateLintPass<'tcx> for Default {
.iter()
.all(|field| assigned_fields.iter().any(|(a, _)| a == &field.name));
+ let mut app = Applicability::Unspecified;
let field_list = assigned_fields
.into_iter()
.map(|(field, rhs)| {
// extract and store the assigned value for help message
- let value_snippet = snippet_with_macro_callsite(cx, rhs.span, "..");
+ let value_snippet = snippet_with_context(cx, rhs.span, init_ctxt, "..", &mut app).0;
format!("{field}: {value_snippet}")
})
.collect::<Vec<String>>()
diff --git a/src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs b/src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs
index 1ad929864..f296b80d2 100644
--- a/src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs
+++ b/src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs
@@ -1,11 +1,12 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::last_path_segment;
-use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::source::snippet_with_context;
use clippy_utils::{match_def_path, paths};
use rustc_errors::Applicability;
use rustc_hir::{def, Expr, ExprKind, GenericArg, QPath, TyKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::SyntaxContext;
declare_clippy_lint! {
/// ### What it does
@@ -38,9 +39,11 @@ impl<'tcx> LateLintPass<'tcx> for DefaultIterEmpty {
&& let QPath::Resolved(None, path) = ty_path
&& let def::Res::Def(_, def_id) = &path.res
&& match_def_path(cx, *def_id, &paths::ITER_EMPTY)
+ && let ctxt = expr.span.ctxt()
+ && ty.span.ctxt() == ctxt
{
let mut applicability = Applicability::MachineApplicable;
- let sugg = make_sugg(cx, ty_path, &mut applicability);
+ let sugg = make_sugg(cx, ty_path, ctxt, &mut applicability);
span_lint_and_sugg(
cx,
DEFAULT_INSTEAD_OF_ITER_EMPTY,
@@ -54,14 +57,19 @@ impl<'tcx> LateLintPass<'tcx> for DefaultIterEmpty {
}
}
-fn make_sugg(cx: &LateContext<'_>, ty_path: &rustc_hir::QPath<'_>, applicability: &mut Applicability) -> String {
+fn make_sugg(
+ cx: &LateContext<'_>,
+ ty_path: &rustc_hir::QPath<'_>,
+ ctxt: SyntaxContext,
+ applicability: &mut Applicability,
+) -> String {
if let Some(last) = last_path_segment(ty_path).args
&& let Some(iter_ty) = last.args.iter().find_map(|arg| match arg {
GenericArg::Type(ty) => Some(ty),
_ => None,
})
{
- format!("std::iter::empty::<{}>()", snippet_with_applicability(cx, iter_ty.span, "..", applicability))
+ format!("std::iter::empty::<{}>()", snippet_with_context(cx, iter_ty.span, ctxt, "..", applicability).0)
} else {
"std::iter::empty()".to_owned()
}
diff --git a/src/tools/clippy/clippy_lints/src/derivable_impls.rs b/src/tools/clippy/clippy_lints/src/derivable_impls.rs
index f95b8ccf0..8f68f90a2 100644
--- a/src/tools/clippy/clippy_lints/src/derivable_impls.rs
+++ b/src/tools/clippy/clippy_lints/src/derivable_impls.rs
@@ -8,7 +8,7 @@ use rustc_hir::{
Body, Expr, ExprKind, GenericArg, Impl, ImplItemKind, Item, ItemKind, Node, PathSegment, QPath, Ty, TyKind,
};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_middle::ty::{AdtDef, DefIdTree};
+use rustc_middle::ty::{Adt, AdtDef, SubstsRef};
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::sym;
@@ -81,13 +81,18 @@ fn check_struct<'tcx>(
self_ty: &Ty<'_>,
func_expr: &Expr<'_>,
adt_def: AdtDef<'_>,
+ substs: SubstsRef<'_>,
) {
if let TyKind::Path(QPath::Resolved(_, p)) = self_ty.kind {
- if let Some(PathSegment { args: Some(a), .. }) = p.segments.last() {
- for arg in a.args {
- if !matches!(arg, GenericArg::Lifetime(_)) {
- return;
- }
+ if let Some(PathSegment { args, .. }) = p.segments.last() {
+ let args = args.map(|a| a.args).unwrap_or(&[]);
+
+ // substs contains the generic parameters of the type declaration, while args contains the arguments
+ // used at instantiation time. If both len are not equal, it means that some parameters were not
+ // provided (which means that the default values were used); in this case we will not risk
+ // suggesting too broad a rewrite. We won't either if any argument is a type or a const.
+ if substs.len() != args.len() || args.iter().any(|arg| !matches!(arg, GenericArg::Lifetime(_))) {
+ return;
}
}
}
@@ -176,7 +181,7 @@ impl<'tcx> LateLintPass<'tcx> for DerivableImpls {
self_ty,
..
}) = item.kind;
- if !cx.tcx.has_attr(item.owner_id.to_def_id(), sym::automatically_derived);
+ if !cx.tcx.has_attr(item.owner_id, sym::automatically_derived);
if !item.span.from_expansion();
if let Some(def_id) = trait_ref.trait_def_id();
if cx.tcx.is_diagnostic_item(sym::Default, def_id);
@@ -184,7 +189,7 @@ impl<'tcx> LateLintPass<'tcx> for DerivableImpls {
if let Some(Node::ImplItem(impl_item)) = cx.tcx.hir().find(impl_item_hir);
if let ImplItemKind::Fn(_, b) = &impl_item.kind;
if let Body { value: func_expr, .. } = cx.tcx.hir().body(*b);
- if let Some(adt_def) = cx.tcx.type_of(item.owner_id).subst_identity().ty_adt_def();
+ if let &Adt(adt_def, substs) = cx.tcx.type_of(item.owner_id).subst_identity().kind();
if let attrs = cx.tcx.hir().attrs(item.hir_id());
if !attrs.iter().any(|attr| attr.doc_str().is_some());
if let child_attrs = cx.tcx.hir().attrs(impl_item_hir);
@@ -192,7 +197,7 @@ impl<'tcx> LateLintPass<'tcx> for DerivableImpls {
then {
if adt_def.is_struct() {
- check_struct(cx, item, self_ty, func_expr, adt_def);
+ check_struct(cx, item, self_ty, func_expr, adt_def, substs);
} else if adt_def.is_enum() && self.msrv.meets(msrvs::DEFAULT_ENUM_ATTRIBUTE) {
check_enum(cx, item, func_expr, adt_def);
}
diff --git a/src/tools/clippy/clippy_lints/src/derive.rs b/src/tools/clippy/clippy_lints/src/derive.rs
index b8428d66a..f425dd5fb 100644
--- a/src/tools/clippy/clippy_lints/src/derive.rs
+++ b/src/tools/clippy/clippy_lints/src/derive.rs
@@ -24,8 +24,8 @@ use rustc_span::sym;
declare_clippy_lint! {
/// ### What it does
- /// Checks for deriving `Hash` but implementing `PartialEq`
- /// explicitly or vice versa.
+ /// Lints against manual `PartialEq` implementations for types with a derived `Hash`
+ /// implementation.
///
/// ### Why is this bad?
/// The implementation of these traits must agree (for
@@ -54,8 +54,8 @@ declare_clippy_lint! {
declare_clippy_lint! {
/// ### What it does
- /// Checks for deriving `Ord` but implementing `PartialOrd`
- /// explicitly or vice versa.
+ /// Lints against manual `PartialOrd` and `Ord` implementations for types with a derived `Ord`
+ /// or `PartialOrd` implementation.
///
/// ### Why is this bad?
/// The implementation of these traits must agree (for
@@ -212,7 +212,7 @@ impl<'tcx> LateLintPass<'tcx> for Derive {
}) = item.kind
{
let ty = cx.tcx.type_of(item.owner_id).subst_identity();
- let is_automatically_derived = cx.tcx.has_attr(item.owner_id.to_def_id(), sym::automatically_derived);
+ let is_automatically_derived = cx.tcx.has_attr(item.owner_id, sym::automatically_derived);
check_hash_peq(cx, item.span, trait_ref, ty, is_automatically_derived);
check_ord_partial_ord(cx, item.span, trait_ref, ty, is_automatically_derived);
diff --git a/src/tools/clippy/clippy_lints/src/disallowed_script_idents.rs b/src/tools/clippy/clippy_lints/src/disallowed_script_idents.rs
index 084190f00..c9fad98e4 100644
--- a/src/tools/clippy/clippy_lints/src/disallowed_script_idents.rs
+++ b/src/tools/clippy/clippy_lints/src/disallowed_script_idents.rs
@@ -32,7 +32,7 @@ declare_clippy_lint! {
/// ### Example
/// ```rust
/// // Assuming that `clippy.toml` contains the following line:
- /// // allowed-locales = ["Latin", "Cyrillic"]
+ /// // allowed-scripts = ["Latin", "Cyrillic"]
/// let counter = 10; // OK, latin is allowed.
/// let счётчик = 10; // OK, cyrillic is allowed.
/// let zähler = 10; // OK, it's still latin.
diff --git a/src/tools/clippy/clippy_lints/src/exit.rs b/src/tools/clippy/clippy_lints/src/exit.rs
index 9c8b0d076..8ba6a9e48 100644
--- a/src/tools/clippy/clippy_lints/src/exit.rs
+++ b/src/tools/clippy/clippy_lints/src/exit.rs
@@ -11,7 +11,7 @@ declare_clippy_lint! {
///
/// ### Why is this bad?
/// Exit terminates the program at the location it is called. For unrecoverable
- /// errors `panics` should be used to provide a stacktrace and potentualy other
+ /// errors `panics` should be used to provide a stacktrace and potentially other
/// information. A normal termination or one with an error code should happen in
/// the main function.
///
diff --git a/src/tools/clippy/clippy_lints/src/explicit_write.rs b/src/tools/clippy/clippy_lints/src/explicit_write.rs
index c0ea6f338..315df6c71 100644
--- a/src/tools/clippy/clippy_lints/src/explicit_write.rs
+++ b/src/tools/clippy/clippy_lints/src/explicit_write.rs
@@ -1,5 +1,5 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
-use clippy_utils::macros::FormatArgsExpn;
+use clippy_utils::macros::{find_format_args, format_args_inputs_span};
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::{is_expn_of, match_function_call, paths};
use if_chain::if_chain;
@@ -8,7 +8,7 @@ use rustc_hir::def::Res;
use rustc_hir::{BindingAnnotation, Block, BlockCheckMode, Expr, ExprKind, Node, PatKind, QPath, Stmt, StmtKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
-use rustc_span::sym;
+use rustc_span::{sym, ExpnId};
declare_clippy_lint! {
/// ### What it does
@@ -43,23 +43,22 @@ declare_lint_pass!(ExplicitWrite => [EXPLICIT_WRITE]);
impl<'tcx> LateLintPass<'tcx> for ExplicitWrite {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
- if_chain! {
- // match call to unwrap
- if let ExprKind::MethodCall(unwrap_fun, write_call, [], _) = expr.kind;
- if unwrap_fun.ident.name == sym::unwrap;
+ // match call to unwrap
+ if let ExprKind::MethodCall(unwrap_fun, write_call, [], _) = expr.kind
+ && unwrap_fun.ident.name == sym::unwrap
// match call to write_fmt
- if let ExprKind::MethodCall(write_fun, write_recv, [write_arg], _) = look_in_block(cx, &write_call.kind);
- if write_fun.ident.name == sym!(write_fmt);
+ && let ExprKind::MethodCall(write_fun, write_recv, [write_arg], _) = look_in_block(cx, &write_call.kind)
+ && write_fun.ident.name == sym!(write_fmt)
// match calls to std::io::stdout() / std::io::stderr ()
- if let Some(dest_name) = if match_function_call(cx, write_recv, &paths::STDOUT).is_some() {
+ && let Some(dest_name) = if match_function_call(cx, write_recv, &paths::STDOUT).is_some() {
Some("stdout")
} else if match_function_call(cx, write_recv, &paths::STDERR).is_some() {
Some("stderr")
} else {
None
- };
- if let Some(format_args) = FormatArgsExpn::parse(cx, write_arg);
- then {
+ }
+ {
+ find_format_args(cx, write_arg, ExpnId::root(), |format_args| {
let calling_macro =
// ordering is important here, since `writeln!` uses `write!` internally
if is_expn_of(write_call.span, "writeln").is_some() {
@@ -92,7 +91,7 @@ impl<'tcx> LateLintPass<'tcx> for ExplicitWrite {
let mut applicability = Applicability::MachineApplicable;
let inputs_snippet = snippet_with_applicability(
cx,
- format_args.inputs_span(),
+ format_args_inputs_span(format_args),
"..",
&mut applicability,
);
@@ -104,8 +103,8 @@ impl<'tcx> LateLintPass<'tcx> for ExplicitWrite {
"try this",
format!("{prefix}{sugg_mac}!({inputs_snippet})"),
applicability,
- )
- }
+ );
+ });
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/extra_unused_type_parameters.rs b/src/tools/clippy/clippy_lints/src/extra_unused_type_parameters.rs
index 20565e1d2..eeb4de8b5 100644
--- a/src/tools/clippy/clippy_lints/src/extra_unused_type_parameters.rs
+++ b/src/tools/clippy/clippy_lints/src/extra_unused_type_parameters.rs
@@ -1,10 +1,10 @@
-use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_then};
use clippy_utils::trait_ref_of_method;
-use rustc_data_structures::fx::FxHashMap;
-use rustc_errors::MultiSpan;
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_errors::Applicability;
use rustc_hir::intravisit::{walk_impl_item, walk_item, walk_param_bound, walk_ty, Visitor};
use rustc_hir::{
- BodyId, ExprKind, GenericBound, GenericParamKind, Generics, ImplItem, ImplItemKind, Item, ItemKind,
+ BodyId, ExprKind, GenericBound, GenericParam, GenericParamKind, Generics, ImplItem, ImplItemKind, Item, ItemKind,
PredicateOrigin, Ty, TyKind, WherePredicate,
};
use rustc_lint::{LateContext, LateLintPass, LintContext};
@@ -53,13 +53,19 @@ impl ExtraUnusedTypeParameters {
}
}
- /// Don't lint external macros or functions with empty bodies. Also, don't lint public items if
- /// the `avoid_breaking_exported_api` config option is set.
- fn check_false_positive(&self, cx: &LateContext<'_>, span: Span, def_id: LocalDefId, body_id: BodyId) -> bool {
+ /// Don't lint external macros or functions with empty bodies. Also, don't lint exported items
+ /// if the `avoid_breaking_exported_api` config option is set.
+ fn is_empty_exported_or_macro(
+ &self,
+ cx: &LateContext<'_>,
+ span: Span,
+ def_id: LocalDefId,
+ body_id: BodyId,
+ ) -> bool {
let body = cx.tcx.hir().body(body_id).value;
let fn_empty = matches!(&body.kind, ExprKind::Block(blk, None) if blk.stmts.is_empty() && blk.expr.is_none());
let is_exported = cx.effective_visibilities.is_exported(def_id);
- in_external_macro(cx.sess(), span) || (self.avoid_breaking_exported_api && is_exported) || fn_empty
+ in_external_macro(cx.sess(), span) || fn_empty || (is_exported && self.avoid_breaking_exported_api)
}
}
@@ -69,85 +75,129 @@ impl_lint_pass!(ExtraUnusedTypeParameters => [EXTRA_UNUSED_TYPE_PARAMETERS]);
/// trait bounds those parameters have.
struct TypeWalker<'cx, 'tcx> {
cx: &'cx LateContext<'tcx>,
- /// Collection of all the function's type parameters.
+ /// Collection of the function's type parameters. Once the function has been walked, this will
+ /// contain only unused type parameters.
ty_params: FxHashMap<DefId, Span>,
- /// Collection of any (inline) trait bounds corresponding to each type parameter.
- bounds: FxHashMap<DefId, Span>,
+ /// Collection of any inline trait bounds corresponding to each type parameter.
+ inline_bounds: FxHashMap<DefId, Span>,
+ /// Collection of any type parameters with trait bounds that appear in a where clause.
+ where_bounds: FxHashSet<DefId>,
/// The entire `Generics` object of the function, useful for querying purposes.
generics: &'tcx Generics<'tcx>,
- /// The value of this will remain `true` if *every* parameter:
- /// 1. Is a type parameter, and
- /// 2. Goes unused in the function.
- /// Otherwise, if any type parameters end up being used, or if any lifetime or const-generic
- /// parameters are present, this will be set to `false`.
- all_params_unused: bool,
}
impl<'cx, 'tcx> TypeWalker<'cx, 'tcx> {
fn new(cx: &'cx LateContext<'tcx>, generics: &'tcx Generics<'tcx>) -> Self {
- let mut all_params_unused = true;
let ty_params = generics
.params
.iter()
- .filter_map(|param| {
- if let GenericParamKind::Type { synthetic, .. } = param.kind {
- (!synthetic).then_some((param.def_id.into(), param.span))
- } else {
- if !param.is_elided_lifetime() {
- all_params_unused = false;
- }
- None
- }
+ .filter_map(|param| match param.kind {
+ GenericParamKind::Type { synthetic, .. } if !synthetic => Some((param.def_id.into(), param.span)),
+ _ => None,
})
.collect();
Self {
cx,
ty_params,
- bounds: FxHashMap::default(),
+ inline_bounds: FxHashMap::default(),
+ where_bounds: FxHashSet::default(),
generics,
- all_params_unused,
}
}
- fn mark_param_used(&mut self, def_id: DefId) {
- if self.ty_params.remove(&def_id).is_some() {
- self.all_params_unused = false;
- }
+ fn get_bound_span(&self, param: &'tcx GenericParam<'tcx>) -> Span {
+ self.inline_bounds
+ .get(&param.def_id.to_def_id())
+ .map_or(param.span, |bound_span| param.span.with_hi(bound_span.hi()))
+ }
+
+ fn emit_help(&self, spans: Vec<Span>, msg: &str, help: &'static str) {
+ span_lint_and_help(self.cx, EXTRA_UNUSED_TYPE_PARAMETERS, spans, msg, None, help);
+ }
+
+ fn emit_sugg(&self, spans: Vec<Span>, msg: &str, help: &'static str) {
+ let suggestions: Vec<(Span, String)> = spans.iter().copied().zip(std::iter::repeat(String::new())).collect();
+ span_lint_and_then(self.cx, EXTRA_UNUSED_TYPE_PARAMETERS, spans, msg, |diag| {
+ diag.multipart_suggestion(help, suggestions, Applicability::MachineApplicable);
+ });
}
fn emit_lint(&self) {
- let (msg, help) = match self.ty_params.len() {
+ let explicit_params = self
+ .generics
+ .params
+ .iter()
+ .filter(|param| !param.is_elided_lifetime() && !param.is_impl_trait())
+ .collect::<Vec<_>>();
+
+ let extra_params = explicit_params
+ .iter()
+ .enumerate()
+ .filter(|(_, param)| self.ty_params.contains_key(&param.def_id.to_def_id()))
+ .collect::<Vec<_>>();
+
+ let (msg, help) = match extra_params.len() {
0 => return,
1 => (
- "type parameter goes unused in function definition",
+ format!(
+ "type parameter `{}` goes unused in function definition",
+ extra_params[0].1.name.ident()
+ ),
"consider removing the parameter",
),
_ => (
- "type parameters go unused in function definition",
+ format!(
+ "type parameters go unused in function definition: {}",
+ extra_params
+ .iter()
+ .map(|(_, param)| param.name.ident().to_string())
+ .collect::<Vec<_>>()
+ .join(", ")
+ ),
"consider removing the parameters",
),
};
- let source_map = self.cx.sess().source_map();
- let span = if self.all_params_unused {
- self.generics.span.into() // Remove the entire list of generics
+ // If any parameters are bounded in where clauses, don't try to form a suggestion.
+ // Otherwise, the leftover where bound would produce code that wouldn't compile.
+ if extra_params
+ .iter()
+ .any(|(_, param)| self.where_bounds.contains(&param.def_id.to_def_id()))
+ {
+ let spans = extra_params
+ .iter()
+ .map(|(_, param)| self.get_bound_span(param))
+ .collect::<Vec<_>>();
+ self.emit_help(spans, &msg, help);
} else {
- MultiSpan::from_spans(
- self.ty_params
+ let spans = if explicit_params.len() == extra_params.len() {
+ vec![self.generics.span] // Remove the entire list of generics
+ } else {
+ let mut end: Option<LocalDefId> = None;
+ extra_params
.iter()
- .map(|(def_id, &span)| {
- // Extend the span past any trait bounds, and include the comma at the end.
- let span_to_extend = self.bounds.get(def_id).copied().map_or(span, Span::shrink_to_hi);
- let comma_range = source_map.span_extend_to_next_char(span_to_extend, '>', false);
- let comma_span = source_map.span_through_char(comma_range, ',');
- span.with_hi(comma_span.hi())
+ .rev()
+ .map(|(idx, param)| {
+ if let Some(next) = explicit_params.get(idx + 1) && end != Some(next.def_id) {
+ // Extend the current span forward, up until the next param in the list.
+ param.span.until(next.span)
+ } else {
+ // Extend the current span back to include the comma following the previous
+ // param. If the span of the next param in the list has already been
+ // extended, we continue the chain. This is why we're iterating in reverse.
+ end = Some(param.def_id);
+
+ // idx will never be 0, else we'd be removing the entire list of generics
+ let prev = explicit_params[idx - 1];
+ let prev_span = self.get_bound_span(prev);
+ self.get_bound_span(param).with_lo(prev_span.hi())
+ }
})
- .collect(),
- )
+ .collect()
+ };
+ self.emit_sugg(spans, &msg, help);
};
-
- span_lint_and_help(self.cx, EXTRA_UNUSED_TYPE_PARAMETERS, span, msg, None, help);
}
}
@@ -162,7 +212,7 @@ impl<'cx, 'tcx> Visitor<'tcx> for TypeWalker<'cx, 'tcx> {
fn visit_ty(&mut self, t: &'tcx Ty<'tcx>) {
if let Some((def_id, _)) = t.peel_refs().as_generic_param() {
- self.mark_param_used(def_id);
+ self.ty_params.remove(&def_id);
} else if let TyKind::OpaqueDef(id, _, _) = t.kind {
// Explicitly walk OpaqueDef. Normally `walk_ty` would do the job, but it calls
// `visit_nested_item`, which checks that `Self::NestedFilter::INTER` is set. We're
@@ -176,9 +226,18 @@ impl<'cx, 'tcx> Visitor<'tcx> for TypeWalker<'cx, 'tcx> {
fn visit_where_predicate(&mut self, predicate: &'tcx WherePredicate<'tcx>) {
if let WherePredicate::BoundPredicate(predicate) = predicate {
- // Collect spans for any bounds on type parameters. We only keep bounds that appear in
- // the list of generics (not in a where-clause).
+ // Collect spans for any bounds on type parameters.
if let Some((def_id, _)) = predicate.bounded_ty.peel_refs().as_generic_param() {
+ match predicate.origin {
+ PredicateOrigin::GenericParam => {
+ self.inline_bounds.insert(def_id, predicate.span);
+ },
+ PredicateOrigin::WhereClause => {
+ self.where_bounds.insert(def_id);
+ },
+ PredicateOrigin::ImplTrait => (),
+ }
+
// If the bound contains non-public traits, err on the safe side and don't lint the
// corresponding parameter.
if !predicate
@@ -187,12 +246,10 @@ impl<'cx, 'tcx> Visitor<'tcx> for TypeWalker<'cx, 'tcx> {
.filter_map(bound_to_trait_def_id)
.all(|id| self.cx.effective_visibilities.is_exported(id))
{
- self.mark_param_used(def_id);
- } else if let PredicateOrigin::GenericParam = predicate.origin {
- self.bounds.insert(def_id, predicate.span);
+ self.ty_params.remove(&def_id);
}
}
- // Only walk the right-hand side of where-bounds
+ // Only walk the right-hand side of where bounds
for bound in predicate.bounds {
walk_param_bound(self, bound);
}
@@ -207,7 +264,7 @@ impl<'cx, 'tcx> Visitor<'tcx> for TypeWalker<'cx, 'tcx> {
impl<'tcx> LateLintPass<'tcx> for ExtraUnusedTypeParameters {
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) {
if let ItemKind::Fn(_, generics, body_id) = item.kind
- && !self.check_false_positive(cx, item.span, item.owner_id.def_id, body_id)
+ && !self.is_empty_exported_or_macro(cx, item.span, item.owner_id.def_id, body_id)
{
let mut walker = TypeWalker::new(cx, generics);
walk_item(&mut walker, item);
@@ -219,7 +276,7 @@ impl<'tcx> LateLintPass<'tcx> for ExtraUnusedTypeParameters {
// Only lint on inherent methods, not trait methods.
if let ImplItemKind::Fn(.., body_id) = item.kind
&& trait_ref_of_method(cx, item.owner_id.def_id).is_none()
- && !self.check_false_positive(cx, item.span, item.owner_id.def_id, body_id)
+ && !self.is_empty_exported_or_macro(cx, item.span, item.owner_id.def_id, body_id)
{
let mut walker = TypeWalker::new(cx, item.generics);
walk_impl_item(&mut walker, item);
diff --git a/src/tools/clippy/clippy_lints/src/fn_null_check.rs b/src/tools/clippy/clippy_lints/src/fn_null_check.rs
index 91c8c340c..d8f4a5fe2 100644
--- a/src/tools/clippy/clippy_lints/src/fn_null_check.rs
+++ b/src/tools/clippy/clippy_lints/src/fn_null_check.rs
@@ -25,7 +25,7 @@ declare_clippy_lint! {
///
/// if fn_ptr.is_none() { ... }
/// ```
- #[clippy::version = "1.67.0"]
+ #[clippy::version = "1.68.0"]
pub FN_NULL_CHECK,
correctness,
"`fn()` type assumed to be nullable"
diff --git a/src/tools/clippy/clippy_lints/src/format.rs b/src/tools/clippy/clippy_lints/src/format.rs
index d0fab6949..d34d6e927 100644
--- a/src/tools/clippy/clippy_lints/src/format.rs
+++ b/src/tools/clippy/clippy_lints/src/format.rs
@@ -1,14 +1,13 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
-use clippy_utils::macros::{root_macro_call_first_node, FormatArgsExpn};
-use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::macros::{find_format_arg_expr, find_format_args, root_macro_call_first_node};
+use clippy_utils::source::{snippet_opt, snippet_with_context};
use clippy_utils::sugg::Sugg;
-use if_chain::if_chain;
+use rustc_ast::{FormatArgsPiece, FormatOptions, FormatTrait};
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
use rustc_session::{declare_lint_pass, declare_tool_lint};
-use rustc_span::symbol::kw;
use rustc_span::{sym, Span};
declare_clippy_lint! {
@@ -44,55 +43,53 @@ declare_lint_pass!(UselessFormat => [USELESS_FORMAT]);
impl<'tcx> LateLintPass<'tcx> for UselessFormat {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
- let (format_args, call_site) = if_chain! {
- if let Some(macro_call) = root_macro_call_first_node(cx, expr);
- if cx.tcx.is_diagnostic_item(sym::format_macro, macro_call.def_id);
- if let Some(format_args) = FormatArgsExpn::find_nested(cx, expr, macro_call.expn);
- then {
- (format_args, macro_call.span)
- } else {
- return
- }
- };
+ let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return };
+ if !cx.tcx.is_diagnostic_item(sym::format_macro, macro_call.def_id) {
+ return;
+ }
+
+ find_format_args(cx, expr, macro_call.expn, |format_args| {
+ let mut applicability = Applicability::MachineApplicable;
+ let call_site = macro_call.span;
- let mut applicability = Applicability::MachineApplicable;
- if format_args.args.is_empty() {
- match *format_args.format_string.parts {
- [] => span_useless_format_empty(cx, call_site, "String::new()".to_owned(), applicability),
- [_] => {
+ match (format_args.arguments.all_args(), &format_args.template[..]) {
+ ([], []) => span_useless_format_empty(cx, call_site, "String::new()".to_owned(), applicability),
+ ([], [_]) => {
// Simulate macro expansion, converting {{ and }} to { and }.
- let s_expand = format_args.format_string.snippet.replace("{{", "{").replace("}}", "}");
+ let Some(snippet) = snippet_opt(cx, format_args.span) else { return };
+ let s_expand = snippet.replace("{{", "{").replace("}}", "}");
let sugg = format!("{s_expand}.to_string()");
span_useless_format(cx, call_site, sugg, applicability);
},
- [..] => {},
- }
- } else if let [arg] = &*format_args.args {
- let value = arg.param.value;
- if_chain! {
- if format_args.format_string.parts == [kw::Empty];
- if arg.format.is_default();
- if match cx.typeck_results().expr_ty(value).peel_refs().kind() {
- ty::Adt(adt, _) => Some(adt.did()) == cx.tcx.lang_items().string(),
- ty::Str => true,
- _ => false,
- };
- then {
- let is_new_string = match value.kind {
- ExprKind::Binary(..) => true,
- ExprKind::MethodCall(path, ..) => path.ident.name == sym::to_string,
- _ => false,
- };
- let sugg = if is_new_string {
- snippet_with_applicability(cx, value.span, "..", &mut applicability).into_owned()
- } else {
- let sugg = Sugg::hir_with_applicability(cx, value, "<arg>", &mut applicability);
- format!("{}.to_string()", sugg.maybe_par())
- };
- span_useless_format(cx, call_site, sugg, applicability);
- }
+ ([arg], [piece]) => {
+ if let Ok(value) = find_format_arg_expr(expr, arg)
+ && let FormatArgsPiece::Placeholder(placeholder) = piece
+ && placeholder.format_trait == FormatTrait::Display
+ && placeholder.format_options == FormatOptions::default()
+ && match cx.typeck_results().expr_ty(value).peel_refs().kind() {
+ ty::Adt(adt, _) => Some(adt.did()) == cx.tcx.lang_items().string(),
+ ty::Str => true,
+ _ => false,
+ }
+ {
+ let is_new_string = match value.kind {
+ ExprKind::Binary(..) => true,
+ ExprKind::MethodCall(path, ..) => path.ident.name == sym::to_string,
+ _ => false,
+ };
+ let sugg = if is_new_string {
+ snippet_with_context(cx, value.span, call_site.ctxt(), "..", &mut applicability).0.into_owned()
+ } else {
+ let sugg = Sugg::hir_with_context(cx, value, call_site.ctxt(), "<arg>", &mut applicability);
+ format!("{}.to_string()", sugg.maybe_par())
+ };
+ span_useless_format(cx, call_site, sugg, applicability);
+
+ }
+ },
+ _ => {},
}
- };
+ });
}
}
diff --git a/src/tools/clippy/clippy_lints/src/format_args.rs b/src/tools/clippy/clippy_lints/src/format_args.rs
index c511d85e9..08e45ed7d 100644
--- a/src/tools/clippy/clippy_lints/src/format_args.rs
+++ b/src/tools/clippy/clippy_lints/src/format_args.rs
@@ -1,27 +1,31 @@
+use arrayvec::ArrayVec;
use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then};
use clippy_utils::is_diag_trait_item;
-use clippy_utils::macros::FormatParamKind::{Implicit, Named, NamedInline, Numbered, Starred};
use clippy_utils::macros::{
- is_assert_macro, is_format_macro, is_panic, root_macro_call, Count, FormatArg, FormatArgsExpn, FormatParam,
- FormatParamUsage,
+ find_format_arg_expr, find_format_args, format_arg_removal_span, format_placeholder_format_span, is_assert_macro,
+ is_format_macro, is_panic, root_macro_call, root_macro_call_first_node, FormatParamUsage,
};
use clippy_utils::msrvs::{self, Msrv};
use clippy_utils::source::snippet_opt;
use clippy_utils::ty::{implements_trait, is_type_lang_item};
use if_chain::if_chain;
use itertools::Itertools;
+use rustc_ast::{
+ FormatArgPosition, FormatArgPositionKind, FormatArgsPiece, FormatArgumentKind, FormatCount, FormatOptions,
+ FormatPlaceholder, FormatTrait,
+};
use rustc_errors::{
Applicability,
SuggestionStyle::{CompletelyHidden, ShowCode},
};
-use rustc_hir::{Expr, ExprKind, HirId, LangItem, QPath};
+use rustc_hir::{Expr, ExprKind, LangItem};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::ty::adjustment::{Adjust, Adjustment};
use rustc_middle::ty::Ty;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::def_id::DefId;
use rustc_span::edition::Edition::Edition2021;
-use rustc_span::{sym, ExpnData, ExpnKind, Span, Symbol};
+use rustc_span::{sym, Span, Symbol};
declare_clippy_lint! {
/// ### What it does
@@ -184,72 +188,79 @@ impl FormatArgs {
impl<'tcx> LateLintPass<'tcx> for FormatArgs {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
- if let Some(format_args) = FormatArgsExpn::parse(cx, expr)
- && let expr_expn_data = expr.span.ctxt().outer_expn_data()
- && let outermost_expn_data = outermost_expn_data(expr_expn_data)
- && let Some(macro_def_id) = outermost_expn_data.macro_def_id
- && is_format_macro(cx, macro_def_id)
- && let ExpnKind::Macro(_, name) = outermost_expn_data.kind
- {
- for arg in &format_args.args {
- check_unused_format_specifier(cx, arg);
- if !arg.format.is_default() {
- continue;
- }
- if is_aliased(&format_args, arg.param.value.hir_id) {
- continue;
+ let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return };
+ if !is_format_macro(cx, macro_call.def_id) {
+ return;
+ }
+ let name = cx.tcx.item_name(macro_call.def_id);
+
+ find_format_args(cx, expr, macro_call.expn, |format_args| {
+ for piece in &format_args.template {
+ if let FormatArgsPiece::Placeholder(placeholder) = piece
+ && let Ok(index) = placeholder.argument.index
+ && let Some(arg) = format_args.arguments.all_args().get(index)
+ {
+ let arg_expr = find_format_arg_expr(expr, arg);
+
+ check_unused_format_specifier(cx, placeholder, arg_expr);
+
+ if placeholder.format_trait != FormatTrait::Display
+ || placeholder.format_options != FormatOptions::default()
+ || is_aliased(format_args, index)
+ {
+ continue;
+ }
+
+ if let Ok(arg_hir_expr) = arg_expr {
+ check_format_in_format_args(cx, macro_call.span, name, arg_hir_expr);
+ check_to_string_in_format_args(cx, name, arg_hir_expr);
+ }
}
- check_format_in_format_args(cx, outermost_expn_data.call_site, name, arg.param.value);
- check_to_string_in_format_args(cx, name, arg.param.value);
}
+
if self.msrv.meets(msrvs::FORMAT_ARGS_CAPTURE) {
- check_uninlined_args(cx, &format_args, outermost_expn_data.call_site, macro_def_id, self.ignore_mixed);
+ check_uninlined_args(cx, format_args, macro_call.span, macro_call.def_id, self.ignore_mixed);
}
- }
+ });
}
extract_msrv_attr!(LateContext);
}
-fn check_unused_format_specifier(cx: &LateContext<'_>, arg: &FormatArg<'_>) {
- let param_ty = cx.typeck_results().expr_ty(arg.param.value).peel_refs();
+fn check_unused_format_specifier(
+ cx: &LateContext<'_>,
+ placeholder: &FormatPlaceholder,
+ arg_expr: Result<&Expr<'_>, &rustc_ast::Expr>,
+) {
+ let ty_or_ast_expr = arg_expr.map(|expr| cx.typeck_results().expr_ty(expr).peel_refs());
- if let Count::Implied(Some(mut span)) = arg.format.precision
- && !span.is_empty()
- {
- span_lint_and_then(
- cx,
- UNUSED_FORMAT_SPECS,
- span,
- "empty precision specifier has no effect",
- |diag| {
- if param_ty.is_floating_point() {
- diag.note("a precision specifier is not required to format floats");
- }
+ let is_format_args = match ty_or_ast_expr {
+ Ok(ty) => is_type_lang_item(cx, ty, LangItem::FormatArguments),
+ Err(expr) => matches!(expr.peel_parens_and_refs().kind, rustc_ast::ExprKind::FormatArgs(_)),
+ };
- if arg.format.is_default() {
- // If there's no other specifiers remove the `:` too
- span = arg.format_span();
- }
+ let options = &placeholder.format_options;
- diag.span_suggestion_verbose(span, "remove the `.`", "", Applicability::MachineApplicable);
- },
- );
- }
+ let arg_span = match arg_expr {
+ Ok(expr) => expr.span,
+ Err(expr) => expr.span,
+ };
- if is_type_lang_item(cx, param_ty, LangItem::FormatArguments) && !arg.format.is_default_for_trait() {
+ if let Some(placeholder_span) = placeholder.span
+ && is_format_args
+ && *options != FormatOptions::default()
+ {
span_lint_and_then(
cx,
UNUSED_FORMAT_SPECS,
- arg.span,
+ placeholder_span,
"format specifiers have no effect on `format_args!()`",
|diag| {
- let mut suggest_format = |spec, span| {
+ let mut suggest_format = |spec| {
let message = format!("for the {spec} to apply consider using `format!()`");
- if let Some(mac_call) = root_macro_call(arg.param.value.span)
+ if let Some(mac_call) = root_macro_call(arg_span)
&& cx.tcx.is_diagnostic_item(sym::format_args_macro, mac_call.def_id)
- && arg.span.eq_ctxt(mac_call.span)
{
diag.span_suggestion(
cx.sess().source_map().span_until_char(mac_call.span, '!'),
@@ -257,25 +268,27 @@ fn check_unused_format_specifier(cx: &LateContext<'_>, arg: &FormatArg<'_>) {
"format",
Applicability::MaybeIncorrect,
);
- } else if let Some(span) = span {
- diag.span_help(span, message);
+ } else {
+ diag.help(message);
}
};
- if !arg.format.width.is_implied() {
- suggest_format("width", arg.format.width.span());
+ if options.width.is_some() {
+ suggest_format("width");
}
- if !arg.format.precision.is_implied() {
- suggest_format("precision", arg.format.precision.span());
+ if options.precision.is_some() {
+ suggest_format("precision");
}
- diag.span_suggestion_verbose(
- arg.format_span(),
- "if the current behavior is intentional, remove the format specifiers",
- "",
- Applicability::MaybeIncorrect,
- );
+ if let Some(format_span) = format_placeholder_format_span(placeholder) {
+ diag.span_suggestion_verbose(
+ format_span,
+ "if the current behavior is intentional, remove the format specifiers",
+ "",
+ Applicability::MaybeIncorrect,
+ );
+ }
},
);
}
@@ -283,12 +296,12 @@ fn check_unused_format_specifier(cx: &LateContext<'_>, arg: &FormatArg<'_>) {
fn check_uninlined_args(
cx: &LateContext<'_>,
- args: &FormatArgsExpn<'_>,
+ args: &rustc_ast::FormatArgs,
call_site: Span,
def_id: DefId,
ignore_mixed: bool,
) {
- if args.format_string.span.from_expansion() {
+ if args.span.from_expansion() {
return;
}
if call_site.edition() < Edition2021 && (is_panic(cx, def_id) || is_assert_macro(cx, def_id)) {
@@ -303,7 +316,13 @@ fn check_uninlined_args(
// we cannot remove any other arguments in the format string,
// because the index numbers might be wrong after inlining.
// Example of an un-inlinable format: print!("{}{1}", foo, 2)
- if !args.params().all(|p| check_one_arg(args, &p, &mut fixes, ignore_mixed)) || fixes.is_empty() {
+ for (pos, usage) in format_arg_positions(args) {
+ if !check_one_arg(args, pos, usage, &mut fixes, ignore_mixed) {
+ return;
+ }
+ }
+
+ if fixes.is_empty() {
return;
}
@@ -332,47 +351,40 @@ fn check_uninlined_args(
}
fn check_one_arg(
- args: &FormatArgsExpn<'_>,
- param: &FormatParam<'_>,
+ args: &rustc_ast::FormatArgs,
+ pos: &FormatArgPosition,
+ usage: FormatParamUsage,
fixes: &mut Vec<(Span, String)>,
ignore_mixed: bool,
) -> bool {
- if matches!(param.kind, Implicit | Starred | Named(_) | Numbered)
- && let ExprKind::Path(QPath::Resolved(None, path)) = param.value.kind
- && let [segment] = path.segments
+ let index = pos.index.unwrap();
+ let arg = &args.arguments.all_args()[index];
+
+ if !matches!(arg.kind, FormatArgumentKind::Captured(_))
+ && let rustc_ast::ExprKind::Path(None, path) = &arg.expr.kind
+ && let [segment] = path.segments.as_slice()
&& segment.args.is_none()
- && let Some(arg_span) = args.value_with_prev_comma_span(param.value.hir_id)
+ && let Some(arg_span) = format_arg_removal_span(args, index)
+ && let Some(pos_span) = pos.span
{
- let replacement = match param.usage {
+ let replacement = match usage {
FormatParamUsage::Argument => segment.ident.name.to_string(),
FormatParamUsage::Width => format!("{}$", segment.ident.name),
FormatParamUsage::Precision => format!(".{}$", segment.ident.name),
};
- fixes.push((param.span, replacement));
+ fixes.push((pos_span, replacement));
fixes.push((arg_span, String::new()));
true // successful inlining, continue checking
} else {
// Do not continue inlining (return false) in case
// * if we can't inline a numbered argument, e.g. `print!("{0} ...", foo.bar, ...)`
// * if allow_mixed_uninlined_format_args is false and this arg hasn't been inlined already
- param.kind != Numbered && (!ignore_mixed || matches!(param.kind, NamedInline(_)))
- }
-}
-
-fn outermost_expn_data(expn_data: ExpnData) -> ExpnData {
- if expn_data.call_site.from_expansion() {
- outermost_expn_data(expn_data.call_site.ctxt().outer_expn_data())
- } else {
- expn_data
+ pos.kind != FormatArgPositionKind::Number
+ && (!ignore_mixed || matches!(arg.kind, FormatArgumentKind::Captured(_)))
}
}
-fn check_format_in_format_args(
- cx: &LateContext<'_>,
- call_site: Span,
- name: Symbol,
- arg: &Expr<'_>,
-) {
+fn check_format_in_format_args(cx: &LateContext<'_>, call_site: Span, name: Symbol, arg: &Expr<'_>) {
let expn_data = arg.span.ctxt().outer_expn_data();
if expn_data.call_site.from_expansion() {
return;
@@ -443,9 +455,33 @@ fn check_to_string_in_format_args(cx: &LateContext<'_>, name: Symbol, value: &Ex
}
}
-/// Returns true if `hir_id` is referred to by multiple format params
-fn is_aliased(args: &FormatArgsExpn<'_>, hir_id: HirId) -> bool {
- args.params().filter(|param| param.value.hir_id == hir_id).at_most_one().is_err()
+fn format_arg_positions(
+ format_args: &rustc_ast::FormatArgs,
+) -> impl Iterator<Item = (&FormatArgPosition, FormatParamUsage)> {
+ format_args.template.iter().flat_map(|piece| match piece {
+ FormatArgsPiece::Placeholder(placeholder) => {
+ let mut positions = ArrayVec::<_, 3>::new();
+
+ positions.push((&placeholder.argument, FormatParamUsage::Argument));
+ if let Some(FormatCount::Argument(position)) = &placeholder.format_options.width {
+ positions.push((position, FormatParamUsage::Width));
+ }
+ if let Some(FormatCount::Argument(position)) = &placeholder.format_options.precision {
+ positions.push((position, FormatParamUsage::Precision));
+ }
+
+ positions
+ },
+ FormatArgsPiece::Literal(_) => ArrayVec::new(),
+ })
+}
+
+/// Returns true if the format argument at `index` is referred to by multiple format params
+fn is_aliased(format_args: &rustc_ast::FormatArgs, index: usize) -> bool {
+ format_arg_positions(format_args)
+ .filter(|(position, _)| position.index == Ok(index))
+ .at_most_one()
+ .is_err()
}
fn count_needed_derefs<'tcx, I>(mut ty: Ty<'tcx>, mut iter: I) -> (usize, Ty<'tcx>)
@@ -455,7 +491,11 @@ where
let mut n_total = 0;
let mut n_needed = 0;
loop {
- if let Some(Adjustment { kind: Adjust::Deref(overloaded_deref), target }) = iter.next() {
+ if let Some(Adjustment {
+ kind: Adjust::Deref(overloaded_deref),
+ target,
+ }) = iter.next()
+ {
n_total += 1;
if overloaded_deref.is_some() {
n_needed = n_total;
diff --git a/src/tools/clippy/clippy_lints/src/format_impl.rs b/src/tools/clippy/clippy_lints/src/format_impl.rs
index ed1342a54..e3ddbfb59 100644
--- a/src/tools/clippy/clippy_lints/src/format_impl.rs
+++ b/src/tools/clippy/clippy_lints/src/format_impl.rs
@@ -1,11 +1,13 @@
use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg};
-use clippy_utils::macros::{is_format_macro, root_macro_call_first_node, FormatArg, FormatArgsExpn};
+use clippy_utils::macros::{find_format_arg_expr, find_format_args, is_format_macro, root_macro_call_first_node};
use clippy_utils::{get_parent_as_impl, is_diag_trait_item, path_to_local, peel_ref_operators};
use if_chain::if_chain;
+use rustc_ast::{FormatArgsPiece, FormatTrait};
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, Impl, ImplItem, ImplItemKind, QPath};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::Span;
use rustc_span::{sym, symbol::kw, Symbol};
declare_clippy_lint! {
@@ -89,7 +91,7 @@ declare_clippy_lint! {
}
#[derive(Clone, Copy)]
-struct FormatTrait {
+struct FormatTraitNames {
/// e.g. `sym::Display`
name: Symbol,
/// `f` in `fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {}`
@@ -99,7 +101,7 @@ struct FormatTrait {
#[derive(Default)]
pub struct FormatImpl {
// Whether we are inside Display or Debug trait impl - None for neither
- format_trait_impl: Option<FormatTrait>,
+ format_trait_impl: Option<FormatTraitNames>,
}
impl FormatImpl {
@@ -161,43 +163,57 @@ fn check_to_string_in_display(cx: &LateContext<'_>, expr: &Expr<'_>) {
}
}
-fn check_self_in_format_args<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, impl_trait: FormatTrait) {
+fn check_self_in_format_args<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, impl_trait: FormatTraitNames) {
// Check each arg in format calls - do we ever use Display on self (directly or via deref)?
- if_chain! {
- if let Some(outer_macro) = root_macro_call_first_node(cx, expr);
- if let macro_def_id = outer_macro.def_id;
- if let Some(format_args) = FormatArgsExpn::find_nested(cx, expr, outer_macro.expn);
- if is_format_macro(cx, macro_def_id);
- then {
- for arg in format_args.args {
- if arg.format.r#trait != impl_trait.name {
- continue;
+ if let Some(outer_macro) = root_macro_call_first_node(cx, expr)
+ && let macro_def_id = outer_macro.def_id
+ && is_format_macro(cx, macro_def_id)
+ {
+ find_format_args(cx, expr, outer_macro.expn, |format_args| {
+ for piece in &format_args.template {
+ if let FormatArgsPiece::Placeholder(placeholder) = piece
+ && let trait_name = match placeholder.format_trait {
+ FormatTrait::Display => sym::Display,
+ FormatTrait::Debug => sym::Debug,
+ FormatTrait::LowerExp => sym!(LowerExp),
+ FormatTrait::UpperExp => sym!(UpperExp),
+ FormatTrait::Octal => sym!(Octal),
+ FormatTrait::Pointer => sym::Pointer,
+ FormatTrait::Binary => sym!(Binary),
+ FormatTrait::LowerHex => sym!(LowerHex),
+ FormatTrait::UpperHex => sym!(UpperHex),
+ }
+ && trait_name == impl_trait.name
+ && let Ok(index) = placeholder.argument.index
+ && let Some(arg) = format_args.arguments.all_args().get(index)
+ && let Ok(arg_expr) = find_format_arg_expr(expr, arg)
+ {
+ check_format_arg_self(cx, expr.span, arg_expr, impl_trait);
}
- check_format_arg_self(cx, expr, &arg, impl_trait);
}
- }
+ });
}
}
-fn check_format_arg_self(cx: &LateContext<'_>, expr: &Expr<'_>, arg: &FormatArg<'_>, impl_trait: FormatTrait) {
+fn check_format_arg_self(cx: &LateContext<'_>, span: Span, arg: &Expr<'_>, impl_trait: FormatTraitNames) {
// Handle multiple dereferencing of references e.g. &&self
// Handle dereference of &self -> self that is equivalent (i.e. via *self in fmt() impl)
// Since the argument to fmt is itself a reference: &self
- let reference = peel_ref_operators(cx, arg.param.value);
+ let reference = peel_ref_operators(cx, arg);
let map = cx.tcx.hir();
// Is the reference self?
if path_to_local(reference).map(|x| map.name(x)) == Some(kw::SelfLower) {
- let FormatTrait { name, .. } = impl_trait;
+ let FormatTraitNames { name, .. } = impl_trait;
span_lint(
cx,
RECURSIVE_FORMAT_IMPL,
- expr.span,
+ span,
&format!("using `self` as `{name}` in `impl {name}` will cause infinite recursion"),
);
}
}
-fn check_print_in_format_impl(cx: &LateContext<'_>, expr: &Expr<'_>, impl_trait: FormatTrait) {
+fn check_print_in_format_impl(cx: &LateContext<'_>, expr: &Expr<'_>, impl_trait: FormatTraitNames) {
if_chain! {
if let Some(macro_call) = root_macro_call_first_node(cx, expr);
if let Some(name) = cx.tcx.get_diagnostic_name(macro_call.def_id);
@@ -227,7 +243,7 @@ fn check_print_in_format_impl(cx: &LateContext<'_>, expr: &Expr<'_>, impl_trait:
}
}
-fn is_format_trait_impl(cx: &LateContext<'_>, impl_item: &ImplItem<'_>) -> Option<FormatTrait> {
+fn is_format_trait_impl(cx: &LateContext<'_>, impl_item: &ImplItem<'_>) -> Option<FormatTraitNames> {
if_chain! {
if impl_item.ident.name == sym::fmt;
if let ImplItemKind::Fn(_, body_id) = impl_item.kind;
@@ -241,7 +257,7 @@ fn is_format_trait_impl(cx: &LateContext<'_>, impl_item: &ImplItem<'_>) -> Optio
.and_then(|param| param.pat.simple_ident())
.map(|ident| ident.name);
- Some(FormatTrait {
+ Some(FormatTraitNames {
name,
formatter_name,
})
diff --git a/src/tools/clippy/clippy_lints/src/functions/impl_trait_in_params.rs b/src/tools/clippy/clippy_lints/src/functions/impl_trait_in_params.rs
index 2811a73f6..d3d0d91c1 100644
--- a/src/tools/clippy/clippy_lints/src/functions/impl_trait_in_params.rs
+++ b/src/tools/clippy/clippy_lints/src/functions/impl_trait_in_params.rs
@@ -22,7 +22,7 @@ pub(super) fn check_fn<'tcx>(cx: &LateContext<'_>, kind: &'tcx FnKind<'_>, body:
if let Some(gen_span) = generics.span_for_param_suggestion() {
diag.span_suggestion_with_style(
gen_span,
- "add a type paremeter",
+ "add a type parameter",
format!(", {{ /* Generic name */ }}: {}", &param.name.ident().as_str()[5..]),
rustc_errors::Applicability::HasPlaceholders,
rustc_errors::SuggestionStyle::ShowAlways,
@@ -35,7 +35,7 @@ pub(super) fn check_fn<'tcx>(cx: &LateContext<'_>, kind: &'tcx FnKind<'_>, body:
ident.span.ctxt(),
ident.span.parent(),
),
- "add a type paremeter",
+ "add a type parameter",
format!("<{{ /* Generic name */ }}: {}>", &param.name.ident().as_str()[5..]),
rustc_errors::Applicability::HasPlaceholders,
rustc_errors::SuggestionStyle::ShowAlways,
diff --git a/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs b/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs
index 8b53ee68e..e5945939e 100644
--- a/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs
+++ b/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs
@@ -97,7 +97,7 @@ pub fn check_fn(cx: &LateContext<'_>, kind: FnKind<'_>, decl: &FnDecl<'_>, body:
let Some(correct_field) = correct_field else {
// There is no field corresponding to the getter name.
- // FIXME: This can be a false positive if the correct field is reachable trought deeper autodereferences than used_field is
+ // FIXME: This can be a false positive if the correct field is reachable through deeper autodereferences than used_field is
return;
};
diff --git a/src/tools/clippy/clippy_lints/src/functions/mod.rs b/src/tools/clippy/clippy_lints/src/functions/mod.rs
index d2852b4ac..7c5e44bb7 100644
--- a/src/tools/clippy/clippy_lints/src/functions/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/functions/mod.rs
@@ -185,7 +185,7 @@ declare_clippy_lint! {
/// ### Examples
/// ```rust
/// // this could be annotated with `#[must_use]`.
- /// fn id<T>(t: T) -> T { t }
+ /// pub fn id<T>(t: T) -> T { t }
/// ```
#[clippy::version = "1.40.0"]
pub MUST_USE_CANDIDATE,
diff --git a/src/tools/clippy/clippy_lints/src/functions/must_use.rs b/src/tools/clippy/clippy_lints/src/functions/must_use.rs
index 29bdc46b6..d0ad26282 100644
--- a/src/tools/clippy/clippy_lints/src/functions/must_use.rs
+++ b/src/tools/clippy/clippy_lints/src/functions/must_use.rs
@@ -1,7 +1,9 @@
+use hir::FnSig;
use rustc_ast::ast::Attribute;
use rustc_errors::Applicability;
use rustc_hir::def_id::DefIdSet;
use rustc_hir::{self as hir, def::Res, QPath};
+use rustc_infer::infer::TyCtxtInferExt;
use rustc_lint::{LateContext, LintContext};
use rustc_middle::{
lint::in_external_macro,
@@ -22,13 +24,13 @@ use super::{DOUBLE_MUST_USE, MUST_USE_CANDIDATE, MUST_USE_UNIT};
pub(super) fn check_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) {
let attrs = cx.tcx.hir().attrs(item.hir_id());
- let attr = cx.tcx.get_attr(item.owner_id.to_def_id(), sym::must_use);
+ let attr = cx.tcx.get_attr(item.owner_id, sym::must_use);
if let hir::ItemKind::Fn(ref sig, _generics, ref body_id) = item.kind {
let is_public = cx.effective_visibilities.is_exported(item.owner_id.def_id);
let fn_header_span = item.span.with_hi(sig.decl.output.span().hi());
if let Some(attr) = attr {
- check_needless_must_use(cx, sig.decl, item.owner_id, item.span, fn_header_span, attr);
- } else if is_public && !is_proc_macro(cx.sess(), attrs) && !attrs.iter().any(|a| a.has_name(sym::no_mangle)) {
+ check_needless_must_use(cx, sig.decl, item.owner_id, item.span, fn_header_span, attr, sig);
+ } else if is_public && !is_proc_macro(attrs) && !attrs.iter().any(|a| a.has_name(sym::no_mangle)) {
check_must_use_candidate(
cx,
sig.decl,
@@ -47,13 +49,10 @@ pub(super) fn check_impl_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::Imp
let is_public = cx.effective_visibilities.is_exported(item.owner_id.def_id);
let fn_header_span = item.span.with_hi(sig.decl.output.span().hi());
let attrs = cx.tcx.hir().attrs(item.hir_id());
- let attr = cx.tcx.get_attr(item.owner_id.to_def_id(), sym::must_use);
+ let attr = cx.tcx.get_attr(item.owner_id, sym::must_use);
if let Some(attr) = attr {
- check_needless_must_use(cx, sig.decl, item.owner_id, item.span, fn_header_span, attr);
- } else if is_public
- && !is_proc_macro(cx.sess(), attrs)
- && trait_ref_of_method(cx, item.owner_id.def_id).is_none()
- {
+ check_needless_must_use(cx, sig.decl, item.owner_id, item.span, fn_header_span, attr, sig);
+ } else if is_public && !is_proc_macro(attrs) && trait_ref_of_method(cx, item.owner_id.def_id).is_none() {
check_must_use_candidate(
cx,
sig.decl,
@@ -73,12 +72,12 @@ pub(super) fn check_trait_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::Tr
let fn_header_span = item.span.with_hi(sig.decl.output.span().hi());
let attrs = cx.tcx.hir().attrs(item.hir_id());
- let attr = cx.tcx.get_attr(item.owner_id.to_def_id(), sym::must_use);
+ let attr = cx.tcx.get_attr(item.owner_id, sym::must_use);
if let Some(attr) = attr {
- check_needless_must_use(cx, sig.decl, item.owner_id, item.span, fn_header_span, attr);
+ check_needless_must_use(cx, sig.decl, item.owner_id, item.span, fn_header_span, attr, sig);
} else if let hir::TraitFn::Provided(eid) = *eid {
let body = cx.tcx.hir().body(eid);
- if attr.is_none() && is_public && !is_proc_macro(cx.sess(), attrs) {
+ if attr.is_none() && is_public && !is_proc_macro(attrs) {
check_must_use_candidate(
cx,
sig.decl,
@@ -100,6 +99,7 @@ fn check_needless_must_use(
item_span: Span,
fn_header_span: Span,
attr: &Attribute,
+ sig: &FnSig<'_>,
) {
if in_external_macro(cx.sess(), item_span) {
return;
@@ -115,6 +115,15 @@ fn check_needless_must_use(
},
);
} else if attr.value_str().is_none() && is_must_use_ty(cx, return_ty(cx, item_id)) {
+ // Ignore async functions unless Future::Output type is a must_use type
+ if sig.header.is_async() {
+ let infcx = cx.tcx.infer_ctxt().build();
+ if let Some(future_ty) = infcx.get_impl_future_output_ty(return_ty(cx, item_id))
+ && !is_must_use_ty(cx, future_ty) {
+ return;
+ }
+ }
+
span_lint_and_help(
cx,
DOUBLE_MUST_USE,
diff --git a/src/tools/clippy/clippy_lints/src/future_not_send.rs b/src/tools/clippy/clippy_lints/src/future_not_send.rs
index 9fb73a371..ed0bd58c7 100644
--- a/src/tools/clippy/clippy_lints/src/future_not_send.rs
+++ b/src/tools/clippy/clippy_lints/src/future_not_send.rs
@@ -9,7 +9,7 @@ use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::def_id::LocalDefId;
use rustc_span::{sym, Span};
use rustc_trait_selection::traits::error_reporting::suggestions::TypeErrCtxtExt;
-use rustc_trait_selection::traits::{self, FulfillmentError};
+use rustc_trait_selection::traits::{self, FulfillmentError, ObligationCtxt};
declare_clippy_lint! {
/// ### What it does
@@ -79,8 +79,10 @@ impl<'tcx> LateLintPass<'tcx> for FutureNotSend {
let send_trait = cx.tcx.get_diagnostic_item(sym::Send).unwrap();
let span = decl.output.span();
let infcx = cx.tcx.infer_ctxt().build();
+ let ocx = ObligationCtxt::new(&infcx);
let cause = traits::ObligationCause::misc(span, fn_def_id);
- let send_errors = traits::fully_solve_bound(&infcx, cause, cx.param_env, ret_ty, send_trait);
+ ocx.register_bound(cause, cx.param_env, ret_ty, send_trait);
+ let send_errors = ocx.select_all_or_error();
if !send_errors.is_empty() {
span_lint_and_then(
cx,
diff --git a/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs b/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs
index 9cadaaa49..725bd3d54 100644
--- a/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs
+++ b/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs
@@ -1,8 +1,10 @@
use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::eager_or_lazy::switch_to_eager_eval;
use clippy_utils::msrvs::{self, Msrv};
-use clippy_utils::source::snippet_with_macro_callsite;
+use clippy_utils::source::snippet_with_context;
+use clippy_utils::sugg::Sugg;
use clippy_utils::{contains_return, higher, is_else_clause, is_res_lang_ctor, path_res, peel_blocks};
+use rustc_errors::Applicability;
use rustc_hir::LangItem::{OptionNone, OptionSome};
use rustc_hir::{Expr, ExprKind, Stmt, StmtKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
@@ -72,21 +74,20 @@ impl<'tcx> LateLintPass<'tcx> for IfThenSomeElseNone {
return;
}
+ let ctxt = expr.span.ctxt();
+
if let Some(higher::If { cond, then, r#else: Some(els) }) = higher::If::hir(expr)
&& let ExprKind::Block(then_block, _) = then.kind
&& let Some(then_expr) = then_block.expr
&& let ExprKind::Call(then_call, [then_arg]) = then_expr.kind
+ && then_expr.span.ctxt() == ctxt
&& is_res_lang_ctor(cx, path_res(cx, then_call), OptionSome)
&& is_res_lang_ctor(cx, path_res(cx, peel_blocks(els)), OptionNone)
&& !stmts_contains_early_return(then_block.stmts)
{
- let cond_snip = snippet_with_macro_callsite(cx, cond.span, "[condition]");
- let cond_snip = if matches!(cond.kind, ExprKind::Unary(_, _) | ExprKind::Binary(_, _, _)) {
- format!("({cond_snip})")
- } else {
- cond_snip.into_owned()
- };
- let arg_snip = snippet_with_macro_callsite(cx, then_arg.span, "");
+ let mut app = Applicability::Unspecified;
+ let cond_snip = Sugg::hir_with_context(cx, cond, expr.span.ctxt(), "[condition]", &mut app).maybe_par().to_string();
+ let arg_snip = snippet_with_context(cx, then_arg.span, ctxt, "[body]", &mut app).0;
let mut method_body = if then_block.stmts.is_empty() {
arg_snip.into_owned()
} else {
diff --git a/src/tools/clippy/clippy_lints/src/implicit_saturating_add.rs b/src/tools/clippy/clippy_lints/src/implicit_saturating_add.rs
index 6e1934393..57e6caa87 100644
--- a/src/tools/clippy/clippy_lints/src/implicit_saturating_add.rs
+++ b/src/tools/clippy/clippy_lints/src/implicit_saturating_add.rs
@@ -1,7 +1,7 @@
use clippy_utils::consts::{constant, Constant};
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::get_parent_expr;
-use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::source::snippet_with_context;
use if_chain::if_chain;
use rustc_ast::ast::{LitIntType, LitKind};
use rustc_errors::Applicability;
@@ -55,6 +55,9 @@ impl<'tcx> LateLintPass<'tcx> for ImplicitSaturatingAdd {
if let ExprKind::AssignOp(op1, target, value) = ex.kind;
let ty = cx.typeck_results().expr_ty(target);
if Some(c) == get_int_max(ty);
+ let ctxt = expr.span.ctxt();
+ if ex.span.ctxt() == ctxt;
+ if expr1.span.ctxt() == ctxt;
if clippy_utils::SpanlessEq::new(cx).eq_expr(l, target);
if BinOpKind::Add == op1.node;
if let ExprKind::Lit(ref lit) = value.kind;
@@ -62,8 +65,15 @@ impl<'tcx> LateLintPass<'tcx> for ImplicitSaturatingAdd {
if block.expr.is_none();
then {
let mut app = Applicability::MachineApplicable;
- let code = snippet_with_applicability(cx, target.span, "_", &mut app);
- let sugg = if let Some(parent) = get_parent_expr(cx, expr) && let ExprKind::If(_cond, _then, Some(else_)) = parent.kind && else_.hir_id == expr.hir_id {format!("{{{code} = {code}.saturating_add(1); }}")} else {format!("{code} = {code}.saturating_add(1);")};
+ let code = snippet_with_context(cx, target.span, ctxt, "_", &mut app).0;
+ let sugg = if let Some(parent) = get_parent_expr(cx, expr)
+ && let ExprKind::If(_cond, _then, Some(else_)) = parent.kind
+ && else_.hir_id == expr.hir_id
+ {
+ format!("{{{code} = {code}.saturating_add(1); }}")
+ } else {
+ format!("{code} = {code}.saturating_add(1);")
+ };
span_lint_and_sugg(cx, IMPLICIT_SATURATING_ADD, expr.span, "manual saturating add detected", "use instead", sugg, app);
}
}
diff --git a/src/tools/clippy/clippy_lints/src/infinite_iter.rs b/src/tools/clippy/clippy_lints/src/infinite_iter.rs
index d1d2db27c..fe28c526b 100644
--- a/src/tools/clippy/clippy_lints/src/infinite_iter.rs
+++ b/src/tools/clippy/clippy_lints/src/infinite_iter.rs
@@ -167,7 +167,7 @@ fn is_infinite(cx: &LateContext<'_>, expr: &Expr<'_>) -> Finiteness {
Finite
},
ExprKind::Block(block, _) => block.expr.as_ref().map_or(Finite, |e| is_infinite(cx, e)),
- ExprKind::Box(e) | ExprKind::AddrOf(BorrowKind::Ref, _, e) => is_infinite(cx, e),
+ ExprKind::AddrOf(BorrowKind::Ref, _, e) => is_infinite(cx, e),
ExprKind::Call(path, _) => {
if let ExprKind::Path(ref qpath) = path.kind {
cx.qpath_res(qpath, path.hir_id)
diff --git a/src/tools/clippy/clippy_lints/src/instant_subtraction.rs b/src/tools/clippy/clippy_lints/src/instant_subtraction.rs
index 668110c7c..34e999158 100644
--- a/src/tools/clippy/clippy_lints/src/instant_subtraction.rs
+++ b/src/tools/clippy/clippy_lints/src/instant_subtraction.rs
@@ -1,6 +1,6 @@
use clippy_utils::diagnostics::{self, span_lint_and_sugg};
use clippy_utils::msrvs::{self, Msrv};
-use clippy_utils::source;
+use clippy_utils::source::snippet_with_context;
use clippy_utils::sugg::Sugg;
use clippy_utils::ty;
use rustc_errors::Applicability;
@@ -161,14 +161,9 @@ fn print_unchecked_duration_subtraction_sugg(
) {
let mut applicability = Applicability::MachineApplicable;
- let left_expr =
- source::snippet_with_applicability(cx, left_expr.span, "std::time::Instant::now()", &mut applicability);
- let right_expr = source::snippet_with_applicability(
- cx,
- right_expr.span,
- "std::time::Duration::from_secs(1)",
- &mut applicability,
- );
+ let ctxt = expr.span.ctxt();
+ let left_expr = snippet_with_context(cx, left_expr.span, ctxt, "<instant>", &mut applicability).0;
+ let right_expr = snippet_with_context(cx, right_expr.span, ctxt, "<duration>", &mut applicability).0;
diagnostics::span_lint_and_sugg(
cx,
diff --git a/src/tools/clippy/clippy_lints/src/items_after_statements.rs b/src/tools/clippy/clippy_lints/src/items_after_statements.rs
index 46d439b44..a7ec57e28 100644
--- a/src/tools/clippy/clippy_lints/src/items_after_statements.rs
+++ b/src/tools/clippy/clippy_lints/src/items_after_statements.rs
@@ -1,8 +1,8 @@
//! lint when items are used after statements
-use clippy_utils::diagnostics::span_lint;
-use rustc_ast::ast::{Block, ItemKind, StmtKind};
-use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
+use clippy_utils::diagnostics::span_lint_hir;
+use rustc_hir::{Block, ItemKind, StmtKind};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_session::{declare_lint_pass, declare_tool_lint};
@@ -52,33 +52,34 @@ declare_clippy_lint! {
declare_lint_pass!(ItemsAfterStatements => [ITEMS_AFTER_STATEMENTS]);
-impl EarlyLintPass for ItemsAfterStatements {
- fn check_block(&mut self, cx: &EarlyContext<'_>, item: &Block) {
- if in_external_macro(cx.sess(), item.span) {
+impl LateLintPass<'_> for ItemsAfterStatements {
+ fn check_block(&mut self, cx: &LateContext<'_>, block: &Block<'_>) {
+ if in_external_macro(cx.sess(), block.span) {
return;
}
- // skip initial items and trailing semicolons
- let stmts = item
+ // skip initial items
+ let stmts = block
.stmts
.iter()
- .map(|stmt| &stmt.kind)
- .skip_while(|s| matches!(**s, StmtKind::Item(..) | StmtKind::Empty));
+ .skip_while(|stmt| matches!(stmt.kind, StmtKind::Item(..)));
// lint on all further items
for stmt in stmts {
- if let StmtKind::Item(ref it) = *stmt {
- if in_external_macro(cx.sess(), it.span) {
+ if let StmtKind::Item(item_id) = stmt.kind {
+ let item = cx.tcx.hir().item(item_id);
+ if in_external_macro(cx.sess(), item.span) || !item.span.eq_ctxt(block.span) {
return;
}
- if let ItemKind::MacroDef(..) = it.kind {
+ if let ItemKind::Macro(..) = item.kind {
// do not lint `macro_rules`, but continue processing further statements
continue;
}
- span_lint(
+ span_lint_hir(
cx,
ITEMS_AFTER_STATEMENTS,
- it.span,
+ item.hir_id(),
+ item.span,
"adding items after statements is confusing, since items exist from the \
start of the scope",
);
diff --git a/src/tools/clippy/clippy_lints/src/large_futures.rs b/src/tools/clippy/clippy_lints/src/large_futures.rs
new file mode 100644
index 000000000..1b0544813
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/large_futures.rs
@@ -0,0 +1,87 @@
+use clippy_utils::source::snippet;
+use clippy_utils::{diagnostics::span_lint_and_sugg, ty::implements_trait};
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind, LangItem, MatchSource, QPath};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_target::abi::Size;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// It checks for the size of a `Future` created by `async fn` or `async {}`.
+ ///
+ /// ### Why is this bad?
+ /// Due to the current [unideal implemention](https://github.com/rust-lang/rust/issues/69826) of `Generator`,
+ /// large size of a `Future` may cause stack overflows.
+ ///
+ /// ### Example
+ /// ```rust
+ /// async fn wait(f: impl std::future::Future<Output = ()>) {}
+ ///
+ /// async fn big_fut(arg: [u8; 1024]) {}
+ ///
+ /// pub async fn test() {
+ /// let fut = big_fut([0u8; 1024]);
+ /// wait(fut).await;
+ /// }
+ /// ```
+ ///
+ /// `Box::pin` the big future instead.
+ ///
+ /// ```rust
+ /// async fn wait(f: impl std::future::Future<Output = ()>) {}
+ ///
+ /// async fn big_fut(arg: [u8; 1024]) {}
+ ///
+ /// pub async fn test() {
+ /// let fut = Box::pin(big_fut([0u8; 1024]));
+ /// wait(fut).await;
+ /// }
+ /// ```
+ #[clippy::version = "1.68.0"]
+ pub LARGE_FUTURES,
+ pedantic,
+ "large future may lead to unexpected stack overflows"
+}
+
+#[derive(Copy, Clone)]
+pub struct LargeFuture {
+ future_size_threshold: u64,
+}
+
+impl LargeFuture {
+ pub fn new(future_size_threshold: u64) -> Self {
+ Self { future_size_threshold }
+ }
+}
+
+impl_lint_pass!(LargeFuture => [LARGE_FUTURES]);
+
+impl<'tcx> LateLintPass<'tcx> for LargeFuture {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
+ if matches!(expr.span.ctxt().outer_expn_data().kind, rustc_span::ExpnKind::Macro(..)) {
+ return;
+ }
+ if let ExprKind::Match(expr, _, MatchSource::AwaitDesugar) = expr.kind {
+ if let ExprKind::Call(func, [expr, ..]) = expr.kind
+ && let ExprKind::Path(QPath::LangItem(LangItem::IntoFutureIntoFuture, ..)) = func.kind
+ && let ty = cx.typeck_results().expr_ty(expr)
+ && let Some(future_trait_def_id) = cx.tcx.lang_items().future_trait()
+ && implements_trait(cx, ty, future_trait_def_id, &[])
+ && let Ok(layout) = cx.tcx.layout_of(cx.param_env.and(ty))
+ && let size = layout.layout.size()
+ && size >= Size::from_bytes(self.future_size_threshold)
+ {
+ span_lint_and_sugg(
+ cx,
+ LARGE_FUTURES,
+ expr.span,
+ &format!("large future with a size of {} bytes", size.bytes()),
+ "consider `Box::pin` on it",
+ format!("Box::pin({})", snippet(cx, expr.span, "..")),
+ Applicability::Unspecified,
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/len_zero.rs b/src/tools/clippy/clippy_lints/src/len_zero.rs
index e13bc4797..0805b4b19 100644
--- a/src/tools/clippy/clippy_lints/src/len_zero.rs
+++ b/src/tools/clippy/clippy_lints/src/len_zero.rs
@@ -1,13 +1,14 @@
use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg, span_lint_and_then};
-use clippy_utils::source::snippet_with_applicability;
-use clippy_utils::{get_item_name, get_parent_as_impl, is_lint_allowed, peel_ref_operators};
+use clippy_utils::source::snippet_with_context;
+use clippy_utils::{get_item_name, get_parent_as_impl, is_lint_allowed, peel_ref_operators, sugg::Sugg};
use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::def_id::DefIdSet;
use rustc_hir::{
- def_id::DefId, AssocItemKind, BinOpKind, Expr, ExprKind, FnRetTy, ImplItem, ImplItemKind, ImplicitSelfKind, Item,
- ItemKind, Mutability, Node, TraitItemRef, TyKind, UnOp,
+ def::Res, def_id::DefId, lang_items::LangItem, AssocItemKind, BinOpKind, Expr, ExprKind, FnRetTy, GenericArg,
+ GenericBound, ImplItem, ImplItemKind, ImplicitSelfKind, Item, ItemKind, Mutability, Node, PathSegment, PrimTy,
+ QPath, TraitItemRef, TyKind, TypeBindingKind,
};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::{self, AssocKind, FnSig, Ty};
@@ -16,7 +17,6 @@ use rustc_span::{
source_map::{Span, Spanned, Symbol},
symbol::sym,
};
-use std::borrow::Cow;
declare_clippy_lint! {
/// ### What it does
@@ -251,33 +251,98 @@ fn check_trait_items(cx: &LateContext<'_>, visited_trait: &Item<'_>, trait_items
}
#[derive(Debug, Clone, Copy)]
-enum LenOutput<'tcx> {
+enum LenOutput {
Integral,
Option(DefId),
- Result(DefId, Ty<'tcx>),
+ Result(DefId),
}
-fn parse_len_output<'tcx>(cx: &LateContext<'_>, sig: FnSig<'tcx>) -> Option<LenOutput<'tcx>> {
+
+fn extract_future_output<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<&'tcx PathSegment<'tcx>> {
+ if let ty::Alias(_, alias_ty) = ty.kind() &&
+ let Some(Node::Item(item)) = cx.tcx.hir().get_if_local(alias_ty.def_id) &&
+ let Item { kind: ItemKind::OpaqueTy(opaque), .. } = item &&
+ opaque.bounds.len() == 1 &&
+ let GenericBound::LangItemTrait(LangItem::Future, _, _, generic_args) = &opaque.bounds[0] &&
+ generic_args.bindings.len() == 1 &&
+ let TypeBindingKind::Equality {
+ term: rustc_hir::Term::Ty(rustc_hir::Ty {kind: TyKind::Path(QPath::Resolved(_, path)), .. }),
+ } = &generic_args.bindings[0].kind &&
+ path.segments.len() == 1 {
+ return Some(&path.segments[0]);
+ }
+
+ None
+}
+
+fn is_first_generic_integral<'tcx>(segment: &'tcx PathSegment<'tcx>) -> bool {
+ if let Some(generic_args) = segment.args {
+ if generic_args.args.is_empty() {
+ return false;
+ }
+ let arg = &generic_args.args[0];
+ if let GenericArg::Type(rustc_hir::Ty {
+ kind: TyKind::Path(QPath::Resolved(_, path)),
+ ..
+ }) = arg
+ {
+ let segments = &path.segments;
+ let segment = &segments[0];
+ let res = &segment.res;
+ if matches!(res, Res::PrimTy(PrimTy::Uint(_))) || matches!(res, Res::PrimTy(PrimTy::Int(_))) {
+ return true;
+ }
+ }
+ }
+
+ false
+}
+
+fn parse_len_output<'tcx>(cx: &LateContext<'tcx>, sig: FnSig<'tcx>) -> Option<LenOutput> {
+ if let Some(segment) = extract_future_output(cx, sig.output()) {
+ let res = segment.res;
+
+ if matches!(res, Res::PrimTy(PrimTy::Uint(_))) || matches!(res, Res::PrimTy(PrimTy::Int(_))) {
+ return Some(LenOutput::Integral);
+ }
+
+ if let Res::Def(_, def_id) = res {
+ if cx.tcx.is_diagnostic_item(sym::Option, def_id) && is_first_generic_integral(segment) {
+ return Some(LenOutput::Option(def_id));
+ } else if cx.tcx.is_diagnostic_item(sym::Result, def_id) && is_first_generic_integral(segment) {
+ return Some(LenOutput::Result(def_id));
+ }
+ }
+
+ return None;
+ }
+
match *sig.output().kind() {
ty::Int(_) | ty::Uint(_) => Some(LenOutput::Integral),
ty::Adt(adt, subs) if cx.tcx.is_diagnostic_item(sym::Option, adt.did()) => {
subs.type_at(0).is_integral().then(|| LenOutput::Option(adt.did()))
},
- ty::Adt(adt, subs) if cx.tcx.is_diagnostic_item(sym::Result, adt.did()) => subs
- .type_at(0)
- .is_integral()
- .then(|| LenOutput::Result(adt.did(), subs.type_at(1))),
+ ty::Adt(adt, subs) if cx.tcx.is_diagnostic_item(sym::Result, adt.did()) => {
+ subs.type_at(0).is_integral().then(|| LenOutput::Result(adt.did()))
+ },
_ => None,
}
}
-impl<'tcx> LenOutput<'tcx> {
- fn matches_is_empty_output(self, ty: Ty<'tcx>) -> bool {
+impl LenOutput {
+ fn matches_is_empty_output<'tcx>(self, cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
+ if let Some(segment) = extract_future_output(cx, ty) {
+ return match (self, segment.res) {
+ (_, Res::PrimTy(PrimTy::Bool)) => true,
+ (Self::Option(_), Res::Def(_, def_id)) if cx.tcx.is_diagnostic_item(sym::Option, def_id) => true,
+ (Self::Result(_), Res::Def(_, def_id)) if cx.tcx.is_diagnostic_item(sym::Result, def_id) => true,
+ _ => false,
+ };
+ }
+
match (self, ty.kind()) {
(_, &ty::Bool) => true,
(Self::Option(id), &ty::Adt(adt, subs)) if id == adt.did() => subs.type_at(0).is_bool(),
- (Self::Result(id, err_ty), &ty::Adt(adt, subs)) if id == adt.did() => {
- subs.type_at(0).is_bool() && subs.type_at(1) == err_ty
- },
+ (Self::Result(id), &ty::Adt(adt, subs)) if id == adt.did() => subs.type_at(0).is_bool(),
_ => false,
}
}
@@ -301,9 +366,14 @@ impl<'tcx> LenOutput<'tcx> {
}
/// Checks if the given signature matches the expectations for `is_empty`
-fn check_is_empty_sig<'tcx>(sig: FnSig<'tcx>, self_kind: ImplicitSelfKind, len_output: LenOutput<'tcx>) -> bool {
+fn check_is_empty_sig<'tcx>(
+ cx: &LateContext<'tcx>,
+ sig: FnSig<'tcx>,
+ self_kind: ImplicitSelfKind,
+ len_output: LenOutput,
+) -> bool {
match &**sig.inputs_and_output {
- [arg, res] if len_output.matches_is_empty_output(*res) => {
+ [arg, res] if len_output.matches_is_empty_output(cx, *res) => {
matches!(
(arg.kind(), self_kind),
(ty::Ref(_, _, Mutability::Not), ImplicitSelfKind::ImmRef)
@@ -315,11 +385,11 @@ fn check_is_empty_sig<'tcx>(sig: FnSig<'tcx>, self_kind: ImplicitSelfKind, len_o
}
/// Checks if the given type has an `is_empty` method with the appropriate signature.
-fn check_for_is_empty<'tcx>(
- cx: &LateContext<'tcx>,
+fn check_for_is_empty(
+ cx: &LateContext<'_>,
span: Span,
self_kind: ImplicitSelfKind,
- output: LenOutput<'tcx>,
+ output: LenOutput,
impl_ty: DefId,
item_name: Symbol,
item_kind: &str,
@@ -352,6 +422,7 @@ fn check_for_is_empty<'tcx>(
Some(is_empty)
if !(is_empty.fn_has_self_parameter
&& check_is_empty_sig(
+ cx,
cx.tcx.fn_sig(is_empty.def_id).subst_identity().skip_binder(),
self_kind,
output,
@@ -431,7 +502,7 @@ fn check_len(
&format!("using `{op}is_empty` is clearer and more explicit"),
format!(
"{op}{}.is_empty()",
- snippet_with_applicability(cx, receiver.span, "_", &mut applicability)
+ snippet_with_context(cx, receiver.span, span.ctxt(), "_", &mut applicability).0,
),
applicability,
);
@@ -444,13 +515,7 @@ fn check_empty_expr(cx: &LateContext<'_>, span: Span, lit1: &Expr<'_>, lit2: &Ex
let mut applicability = Applicability::MachineApplicable;
let lit1 = peel_ref_operators(cx, lit1);
- let mut lit_str = snippet_with_applicability(cx, lit1.span, "_", &mut applicability);
-
- // Wrap the expression in parentheses if it's a deref expression. Otherwise operator precedence will
- // cause the code to dereference boolean(won't compile).
- if let ExprKind::Unary(UnOp::Deref, _) = lit1.kind {
- lit_str = Cow::from(format!("({lit_str})"));
- }
+ let lit_str = Sugg::hir_with_context(cx, lit1, span.ctxt(), "_", &mut applicability).maybe_par();
span_lint_and_sugg(
cx,
diff --git a/src/tools/clippy/clippy_lints/src/let_underscore.rs b/src/tools/clippy/clippy_lints/src/let_underscore.rs
index 7600777fa..51b5de27d 100644
--- a/src/tools/clippy/clippy_lints/src/let_underscore.rs
+++ b/src/tools/clippy/clippy_lints/src/let_underscore.rs
@@ -124,7 +124,7 @@ declare_clippy_lint! {
/// ```
#[clippy::version = "1.69.0"]
pub LET_UNDERSCORE_UNTYPED,
- pedantic,
+ restriction,
"non-binding `let` without a type annotation"
}
diff --git a/src/tools/clippy/clippy_lints/src/let_with_type_underscore.rs b/src/tools/clippy/clippy_lints/src/let_with_type_underscore.rs
new file mode 100644
index 000000000..c01e3882d
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/let_with_type_underscore.rs
@@ -0,0 +1,45 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use rustc_hir::{Local, TyKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Detects when a variable is declared with an explicit type of `_`.
+ /// ### Why is this bad?
+ /// It adds noise, `: _` provides zero clarity or utility.
+ /// ### Example
+ /// ```rust,ignore
+ /// let my_number: _ = 1;
+ /// ```
+ /// Use instead:
+ /// ```rust,ignore
+ /// let my_number = 1;
+ /// ```
+ #[clippy::version = "1.69.0"]
+ pub LET_WITH_TYPE_UNDERSCORE,
+ complexity,
+ "unneeded underscore type (`_`) in a variable declaration"
+}
+declare_lint_pass!(UnderscoreTyped => [LET_WITH_TYPE_UNDERSCORE]);
+
+impl LateLintPass<'_> for UnderscoreTyped {
+ fn check_local<'tcx>(&mut self, cx: &LateContext<'tcx>, local: &'tcx Local<'tcx>) {
+ if_chain! {
+ if !in_external_macro(cx.tcx.sess, local.span);
+ if let Some(ty) = local.ty; // Ensure that it has a type defined
+ if let TyKind::Infer = &ty.kind; // that type is '_'
+ if local.span.ctxt() == ty.span.ctxt();
+ then {
+ span_lint_and_help(cx,
+ LET_WITH_TYPE_UNDERSCORE,
+ local.span,
+ "variable declared with type underscore",
+ Some(ty.span.with_lo(local.pat.span.hi())),
+ "remove the explicit type `_` declaration"
+ )
+ }
+ };
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/lib.rs b/src/tools/clippy/clippy_lints/src/lib.rs
index c626e0bd9..b0ec14855 100644
--- a/src/tools/clippy/clippy_lints/src/lib.rs
+++ b/src/tools/clippy/clippy_lints/src/lib.rs
@@ -1,13 +1,11 @@
#![feature(array_windows)]
#![feature(binary_heap_into_iter_sorted)]
#![feature(box_patterns)]
-#![feature(drain_filter)]
#![feature(if_let_guard)]
#![feature(iter_intersperse)]
#![feature(let_chains)]
#![feature(lint_reasons)]
#![feature(never_type)]
-#![feature(once_cell)]
#![feature(rustc_private)]
#![feature(stmt_expr_attributes)]
#![recursion_limit = "512"]
@@ -67,6 +65,7 @@ mod declared_lints;
mod renamed_lints;
// begin lints modules, do not remove this comment, it’s used in `update_lints`
+mod allow_attributes;
mod almost_complete_range;
mod approx_const;
mod as_conversions;
@@ -87,6 +86,7 @@ mod casts;
mod checked_conversions;
mod cognitive_complexity;
mod collapsible_if;
+mod collection_is_never_read;
mod comparison_chain;
mod copies;
mod copy_iterator;
@@ -161,12 +161,15 @@ mod items_after_statements;
mod iter_not_returning_iterator;
mod large_const_arrays;
mod large_enum_variant;
+mod large_futures;
mod large_include_file;
mod large_stack_arrays;
mod len_zero;
mod let_if_seq;
mod let_underscore;
+mod let_with_type_underscore;
mod lifetimes;
+mod lines_filter_map_ok;
mod literal_representation;
mod loops;
mod macro_use;
@@ -177,9 +180,11 @@ mod manual_bits;
mod manual_clamp;
mod manual_is_ascii_check;
mod manual_let_else;
+mod manual_main_separator_str;
mod manual_non_exhaustive;
mod manual_rem_euclid;
mod manual_retain;
+mod manual_slice_size_calculation;
mod manual_string_new;
mod manual_strip;
mod map_unit_fn;
@@ -192,6 +197,7 @@ mod minmax;
mod misc;
mod misc_early;
mod mismatching_type_param_order;
+mod missing_assert_message;
mod missing_const_for_fn;
mod missing_doc;
mod missing_enforced_import_rename;
@@ -249,6 +255,7 @@ mod question_mark_used;
mod ranges;
mod rc_clone_in_vec_init;
mod read_zero_byte_vec;
+mod redundant_async_block;
mod redundant_clone;
mod redundant_closure_call;
mod redundant_else;
@@ -276,6 +283,7 @@ mod slow_vector_initialization;
mod std_instead_of_core;
mod strings;
mod strlen_on_c_strings;
+mod suspicious_doc_comments;
mod suspicious_operation_groupings;
mod suspicious_trait_impl;
mod suspicious_xor_used_as_pow;
@@ -283,6 +291,7 @@ mod swap;
mod swap_ptr_to_ref;
mod tabs_in_doc_comments;
mod temporary_assignment;
+mod tests_outside_test_module;
mod to_digit_is_some;
mod trailing_empty_array;
mod trait_bounds;
@@ -294,8 +303,10 @@ mod uninit_vec;
mod unit_return_expecting_ord;
mod unit_types;
mod unnamed_address;
+mod unnecessary_box_returns;
mod unnecessary_owned_empty_strings;
mod unnecessary_self_imports;
+mod unnecessary_struct_initialization;
mod unnecessary_wraps;
mod unnested_or_patterns;
mod unsafe_removed_from_name;
@@ -338,13 +349,17 @@ pub fn register_pre_expansion_lints(store: &mut rustc_lint::LintStore, sess: &Se
}
#[doc(hidden)]
-pub fn read_conf(sess: &Session, path: &io::Result<Option<PathBuf>>) -> Conf {
+pub fn read_conf(sess: &Session, path: &io::Result<(Option<PathBuf>, Vec<String>)>) -> Conf {
+ if let Ok((_, warnings)) = path {
+ for warning in warnings {
+ sess.warn(warning);
+ }
+ }
let file_name = match path {
- Ok(Some(path)) => path,
- Ok(None) => return Conf::default(),
+ Ok((Some(path), _)) => path,
+ Ok((None, _)) => return Conf::default(),
Err(error) => {
- sess.struct_err(format!("error finding Clippy's configuration file: {error}"))
- .emit();
+ sess.err(format!("error finding Clippy's configuration file: {error}"));
return Conf::default();
},
};
@@ -533,6 +548,7 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
.collect(),
))
});
+ store.register_early_pass(|| Box::new(utils::format_args_collector::FormatArgsCollector));
store.register_late_pass(|_| Box::new(utils::dump_hir::DumpHir));
store.register_late_pass(|_| Box::new(utils::author::Author));
let await_holding_invalid_types = conf.await_holding_invalid_types.clone();
@@ -651,7 +667,8 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(|_| Box::new(empty_enum::EmptyEnum));
store.register_late_pass(|_| Box::new(invalid_upcast_comparisons::InvalidUpcastComparisons));
store.register_late_pass(|_| Box::new(regex::Regex));
- store.register_late_pass(|_| Box::new(copies::CopyAndPaste));
+ let ignore_interior_mutability = conf.ignore_interior_mutability.clone();
+ store.register_late_pass(move |_| Box::new(copies::CopyAndPaste::new(ignore_interior_mutability.clone())));
store.register_late_pass(|_| Box::new(copy_iterator::CopyIterator));
store.register_late_pass(|_| Box::new(format::UselessFormat));
store.register_late_pass(|_| Box::new(swap::Swap));
@@ -738,7 +755,7 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_early_pass(|| Box::new(unused_unit::UnusedUnit));
store.register_late_pass(|_| Box::new(returns::Return));
store.register_early_pass(|| Box::new(collapsible_if::CollapsibleIf));
- store.register_early_pass(|| Box::new(items_after_statements::ItemsAfterStatements));
+ store.register_late_pass(|_| Box::new(items_after_statements::ItemsAfterStatements));
store.register_early_pass(|| Box::new(precedence::Precedence));
store.register_late_pass(|_| Box::new(needless_parens_on_range_literals::NeedlessParensOnRangeLiterals));
store.register_early_pass(|| Box::new(needless_continue::NeedlessContinue));
@@ -800,6 +817,8 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(move |_| Box::new(dereference::Dereferencing::new(msrv())));
store.register_late_pass(|_| Box::new(option_if_let_else::OptionIfLetElse));
store.register_late_pass(|_| Box::new(future_not_send::FutureNotSend));
+ let future_size_threshold = conf.future_size_threshold;
+ store.register_late_pass(move |_| Box::new(large_futures::LargeFuture::new(future_size_threshold)));
store.register_late_pass(|_| Box::new(if_let_mutex::IfLetMutex));
store.register_late_pass(|_| Box::new(if_not_else::IfNotElse));
store.register_late_pass(|_| Box::new(equatable_if_let::PatternEquality));
@@ -924,6 +943,22 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
))
});
store.register_late_pass(|_| Box::new(no_mangle_with_rust_abi::NoMangleWithRustAbi));
+ store.register_late_pass(|_| Box::new(collection_is_never_read::CollectionIsNeverRead));
+ store.register_late_pass(|_| Box::new(missing_assert_message::MissingAssertMessage));
+ store.register_late_pass(|_| Box::new(redundant_async_block::RedundantAsyncBlock));
+ store.register_late_pass(|_| Box::new(let_with_type_underscore::UnderscoreTyped));
+ store.register_late_pass(|_| Box::new(allow_attributes::AllowAttribute));
+ store.register_late_pass(move |_| Box::new(manual_main_separator_str::ManualMainSeparatorStr::new(msrv())));
+ store.register_late_pass(|_| Box::new(unnecessary_struct_initialization::UnnecessaryStruct));
+ store.register_late_pass(move |_| {
+ Box::new(unnecessary_box_returns::UnnecessaryBoxReturns::new(
+ avoid_breaking_exported_api,
+ ))
+ });
+ store.register_late_pass(|_| Box::new(lines_filter_map_ok::LinesFilterMapOk));
+ store.register_late_pass(|_| Box::new(tests_outside_test_module::TestsOutsideTestModule));
+ store.register_late_pass(|_| Box::new(manual_slice_size_calculation::ManualSliceSizeCalculation));
+ store.register_early_pass(|| Box::new(suspicious_doc_comments::SuspiciousDocComments));
// add lints here, do not remove this comment, it's used in `new_lint`
}
diff --git a/src/tools/clippy/clippy_lints/src/lines_filter_map_ok.rs b/src/tools/clippy/clippy_lints/src/lines_filter_map_ok.rs
new file mode 100644
index 000000000..b0f927647
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/lines_filter_map_ok.rs
@@ -0,0 +1,100 @@
+use clippy_utils::{
+ diagnostics::span_lint_and_then, is_diag_item_method, is_trait_method, match_def_path, path_to_local_id, paths,
+ ty::match_type,
+};
+use rustc_errors::Applicability;
+use rustc_hir::{Body, Closure, Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Detect uses of `lines.filter_map(Result::ok)` or `lines.flat_map(Result::ok)`
+ /// when `lines` has type `std::io::Lines`.
+ ///
+ /// ### Why is this bad?
+ /// `Lines` instances might produce a never-ending stream of `Err`, in which case
+ /// `filter_map(Result::ok)` will enter an infinite loop while waiting for an
+ /// `Ok` variant. Calling `next()` once is sufficient to enter the infinite loop,
+ /// even in the absence of explicit loops in the user code.
+ ///
+ /// This situation can arise when working with user-provided paths. On some platforms,
+ /// `std::fs::File::open(path)` might return `Ok(fs)` even when `path` is a directory,
+ /// but any later attempt to read from `fs` will return an error.
+ ///
+ /// ### Known problems
+ /// This lint suggests replacing `filter_map()` or `flat_map()` applied to a `Lines`
+ /// instance in all cases. There two cases where the suggestion might not be
+ /// appropriate or necessary:
+ ///
+ /// - If the `Lines` instance can never produce any error, or if an error is produced
+ /// only once just before terminating the iterator, using `map_while()` is not
+ /// necessary but will not do any harm.
+ /// - If the `Lines` instance can produce intermittent errors then recover and produce
+ /// successful results, using `map_while()` would stop at the first error.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::{fs::File, io::{self, BufRead, BufReader}};
+ /// # let _ = || -> io::Result<()> {
+ /// let mut lines = BufReader::new(File::open("some-path")?).lines().filter_map(Result::ok);
+ /// // If "some-path" points to a directory, the next statement never terminates:
+ /// let first_line: Option<String> = lines.next();
+ /// # Ok(()) };
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # use std::{fs::File, io::{self, BufRead, BufReader}};
+ /// # let _ = || -> io::Result<()> {
+ /// let mut lines = BufReader::new(File::open("some-path")?).lines().map_while(Result::ok);
+ /// let first_line: Option<String> = lines.next();
+ /// # Ok(()) };
+ /// ```
+ #[clippy::version = "1.70.0"]
+ pub LINES_FILTER_MAP_OK,
+ suspicious,
+ "filtering `std::io::Lines` with `filter_map()` or `flat_map()` might cause an infinite loop"
+}
+declare_lint_pass!(LinesFilterMapOk => [LINES_FILTER_MAP_OK]);
+
+impl LateLintPass<'_> for LinesFilterMapOk {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if let ExprKind::MethodCall(fm_method, fm_receiver, [fm_arg], fm_span) = expr.kind &&
+ is_trait_method(cx, expr, sym::Iterator) &&
+ (fm_method.ident.as_str() == "filter_map" || fm_method.ident.as_str() == "flat_map") &&
+ match_type(cx, cx.typeck_results().expr_ty_adjusted(fm_receiver), &paths::STD_IO_LINES)
+ {
+ let lint = match &fm_arg.kind {
+ // Detect `Result::ok`
+ ExprKind::Path(qpath) =>
+ cx.qpath_res(qpath, fm_arg.hir_id).opt_def_id().map(|did|
+ match_def_path(cx, did, &paths::CORE_RESULT_OK_METHOD)).unwrap_or_default(),
+ // Detect `|x| x.ok()`
+ ExprKind::Closure(Closure { body, .. }) =>
+ if let Body { params: [param], value, .. } = cx.tcx.hir().body(*body) &&
+ let ExprKind::MethodCall(method, receiver, [], _) = value.kind &&
+ path_to_local_id(receiver, param.pat.hir_id) &&
+ let Some(method_did) = cx.typeck_results().type_dependent_def_id(value.hir_id)
+ {
+ is_diag_item_method(cx, method_did, sym::Result) && method.ident.as_str() == "ok"
+ } else {
+ false
+ }
+ _ => false,
+ };
+ if lint {
+ span_lint_and_then(cx,
+ LINES_FILTER_MAP_OK,
+ fm_span,
+ &format!("`{}()` will run forever if the iterator repeatedly produces an `Err`", fm_method.ident),
+ |diag| {
+ diag.span_note(
+ fm_receiver.span,
+ "this expression returning a `std::io::Lines` may produce an infinite number of `Err` in case of a read error");
+ diag.span_suggestion(fm_span, "replace with", "map_while(Result::ok)", Applicability::MaybeIncorrect);
+ });
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs b/src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs
index 8c27c0940..1e02a30e3 100644
--- a/src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs
@@ -9,7 +9,7 @@ use rustc_errors::Applicability;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::{Expr, Pat, PatKind};
use rustc_lint::LateContext;
-use rustc_middle::ty::{self, DefIdTree};
+use rustc_middle::ty;
use rustc_span::source_map::Span;
/// Check for unnecessary `if let` usage in a for loop where only the `Some` or `Ok` variant of the
diff --git a/src/tools/clippy/clippy_lints/src/loops/never_loop.rs b/src/tools/clippy/clippy_lints/src/loops/never_loop.rs
index b1bc10802..f0a1b1dfe 100644
--- a/src/tools/clippy/clippy_lints/src/loops/never_loop.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/never_loop.rs
@@ -124,8 +124,7 @@ fn stmt_to_expr<'tcx>(stmt: &Stmt<'tcx>) -> Option<(&'tcx Expr<'tcx>, Option<&'t
#[allow(clippy::too_many_lines)]
fn never_loop_expr(expr: &Expr<'_>, ignore_ids: &mut Vec<HirId>, main_loop_id: HirId) -> NeverLoopResult {
match expr.kind {
- ExprKind::Box(e)
- | ExprKind::Unary(_, e)
+ ExprKind::Unary(_, e)
| ExprKind::Cast(e, _)
| ExprKind::Type(e, _)
| ExprKind::Field(e, _)
diff --git a/src/tools/clippy/clippy_lints/src/loops/same_item_push.rs b/src/tools/clippy/clippy_lints/src/loops/same_item_push.rs
index 540656a2c..9d9341559 100644
--- a/src/tools/clippy/clippy_lints/src/loops/same_item_push.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/same_item_push.rs
@@ -1,15 +1,17 @@
use super::SAME_ITEM_PUSH;
use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::path_to_local;
-use clippy_utils::source::snippet_with_macro_callsite;
+use clippy_utils::source::snippet_with_context;
use clippy_utils::ty::{implements_trait, is_type_diagnostic_item};
use if_chain::if_chain;
use rustc_data_structures::fx::FxHashSet;
+use rustc_errors::Applicability;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::intravisit::{walk_expr, Visitor};
use rustc_hir::{BindingAnnotation, Block, Expr, ExprKind, HirId, Mutability, Node, Pat, PatKind, Stmt, StmtKind};
use rustc_lint::LateContext;
use rustc_span::symbol::sym;
+use rustc_span::SyntaxContext;
use std::iter::Iterator;
/// Detects for loop pushing the same item into a Vec
@@ -20,9 +22,10 @@ pub(super) fn check<'tcx>(
body: &'tcx Expr<'_>,
_: &'tcx Expr<'_>,
) {
- fn emit_lint(cx: &LateContext<'_>, vec: &Expr<'_>, pushed_item: &Expr<'_>) {
- let vec_str = snippet_with_macro_callsite(cx, vec.span, "");
- let item_str = snippet_with_macro_callsite(cx, pushed_item.span, "");
+ fn emit_lint(cx: &LateContext<'_>, vec: &Expr<'_>, pushed_item: &Expr<'_>, ctxt: SyntaxContext) {
+ let mut app = Applicability::Unspecified;
+ let vec_str = snippet_with_context(cx, vec.span, ctxt, "", &mut app).0;
+ let item_str = snippet_with_context(cx, pushed_item.span, ctxt, "", &mut app).0;
span_lint_and_help(
cx,
@@ -43,7 +46,7 @@ pub(super) fn check<'tcx>(
walk_expr(&mut same_item_push_visitor, body);
if_chain! {
if same_item_push_visitor.should_lint();
- if let Some((vec, pushed_item)) = same_item_push_visitor.vec_push;
+ if let Some((vec, pushed_item, ctxt)) = same_item_push_visitor.vec_push;
let vec_ty = cx.typeck_results().expr_ty(vec);
let ty = vec_ty.walk().nth(1).unwrap().expect_ty();
if cx
@@ -69,11 +72,11 @@ pub(super) fn check<'tcx>(
then {
match init.kind {
// immutable bindings that are initialized with literal
- ExprKind::Lit(..) => emit_lint(cx, vec, pushed_item),
+ ExprKind::Lit(..) => emit_lint(cx, vec, pushed_item, ctxt),
// immutable bindings that are initialized with constant
ExprKind::Path(ref path) => {
if let Res::Def(DefKind::Const, ..) = cx.qpath_res(path, init.hir_id) {
- emit_lint(cx, vec, pushed_item);
+ emit_lint(cx, vec, pushed_item, ctxt);
}
}
_ => {},
@@ -82,11 +85,11 @@ pub(super) fn check<'tcx>(
}
},
// constant
- Res::Def(DefKind::Const, ..) => emit_lint(cx, vec, pushed_item),
+ Res::Def(DefKind::Const, ..) => emit_lint(cx, vec, pushed_item, ctxt),
_ => {},
}
},
- ExprKind::Lit(..) => emit_lint(cx, vec, pushed_item),
+ ExprKind::Lit(..) => emit_lint(cx, vec, pushed_item, ctxt),
_ => {},
}
}
@@ -98,7 +101,7 @@ struct SameItemPushVisitor<'a, 'tcx> {
non_deterministic_expr: bool,
multiple_pushes: bool,
// this field holds the last vec push operation visited, which should be the only push seen
- vec_push: Option<(&'tcx Expr<'tcx>, &'tcx Expr<'tcx>)>,
+ vec_push: Option<(&'tcx Expr<'tcx>, &'tcx Expr<'tcx>, SyntaxContext)>,
cx: &'a LateContext<'tcx>,
used_locals: FxHashSet<HirId>,
}
@@ -118,7 +121,7 @@ impl<'a, 'tcx> SameItemPushVisitor<'a, 'tcx> {
if_chain! {
if !self.non_deterministic_expr;
if !self.multiple_pushes;
- if let Some((vec, _)) = self.vec_push;
+ if let Some((vec, _, _)) = self.vec_push;
if let Some(hir_id) = path_to_local(vec);
then {
!self.used_locals.contains(&hir_id)
@@ -173,7 +176,10 @@ impl<'a, 'tcx> Visitor<'tcx> for SameItemPushVisitor<'a, 'tcx> {
// Given some statement, determine if that statement is a push on a Vec. If it is, return
// the Vec being pushed into and the item being pushed
-fn get_vec_push<'tcx>(cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) -> Option<(&'tcx Expr<'tcx>, &'tcx Expr<'tcx>)> {
+fn get_vec_push<'tcx>(
+ cx: &LateContext<'tcx>,
+ stmt: &'tcx Stmt<'_>,
+) -> Option<(&'tcx Expr<'tcx>, &'tcx Expr<'tcx>, SyntaxContext)> {
if_chain! {
// Extract method being called
if let StmtKind::Semi(semi_stmt) = &stmt.kind;
@@ -184,7 +190,7 @@ fn get_vec_push<'tcx>(cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) -> Option<(&
if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(self_expr), sym::Vec);
if path.ident.name.as_str() == "push";
then {
- return Some((self_expr, pushed_item))
+ return Some((self_expr, pushed_item, semi_stmt.span.ctxt()))
}
}
None
diff --git a/src/tools/clippy/clippy_lints/src/manual_async_fn.rs b/src/tools/clippy/clippy_lints/src/manual_async_fn.rs
index 3778eb4c7..577bc1d66 100644
--- a/src/tools/clippy/clippy_lints/src/manual_async_fn.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_async_fn.rs
@@ -1,12 +1,11 @@
use clippy_utils::diagnostics::span_lint_and_then;
-use clippy_utils::match_function_call_with_def_id;
use clippy_utils::source::{position_before_rarrow, snippet_block, snippet_opt};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::intravisit::FnKind;
use rustc_hir::{
AsyncGeneratorKind, Block, Body, Closure, Expr, ExprKind, FnDecl, FnRetTy, GeneratorKind, GenericArg, GenericBound,
- ItemKind, LifetimeName, Term, TraitRef, Ty, TyKind, TypeBindingKind,
+ ImplItem, Item, ItemKind, LifetimeName, Node, Term, TraitRef, Ty, TyKind, TypeBindingKind,
};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
@@ -46,7 +45,7 @@ impl<'tcx> LateLintPass<'tcx> for ManualAsyncFn {
decl: &'tcx FnDecl<'_>,
body: &'tcx Body<'_>,
span: Span,
- _: LocalDefId,
+ def_id: LocalDefId,
) {
if_chain! {
if let Some(header) = kind.header();
@@ -60,6 +59,8 @@ impl<'tcx> LateLintPass<'tcx> for ManualAsyncFn {
if let ExprKind::Block(block, _) = body.value.kind;
if block.stmts.is_empty();
if let Some(closure_body) = desugared_async_block(cx, block);
+ if let Node::Item(Item {vis_span, ..}) | Node::ImplItem(ImplItem {vis_span, ..}) =
+ cx.tcx.hir().get_by_def_id(def_id);
then {
let header_span = span.with_hi(ret_ty.span.hi());
@@ -70,15 +71,22 @@ impl<'tcx> LateLintPass<'tcx> for ManualAsyncFn {
"this function can be simplified using the `async fn` syntax",
|diag| {
if_chain! {
+ if let Some(vis_snip) = snippet_opt(cx, *vis_span);
if let Some(header_snip) = snippet_opt(cx, header_span);
if let Some(ret_pos) = position_before_rarrow(&header_snip);
if let Some((ret_sugg, ret_snip)) = suggested_ret(cx, output);
then {
+ let header_snip = if vis_snip.is_empty() {
+ format!("async {}", &header_snip[..ret_pos])
+ } else {
+ format!("{} async {}", vis_snip, &header_snip[vis_snip.len() + 1..ret_pos])
+ };
+
let help = format!("make the function `async` and {ret_sugg}");
diag.span_suggestion(
header_span,
help,
- format!("async {}{ret_snip}", &header_snip[..ret_pos]),
+ format!("{header_snip}{ret_snip}"),
Applicability::MachineApplicable
);
@@ -175,16 +183,10 @@ fn captures_all_lifetimes(inputs: &[Ty<'_>], output_lifetimes: &[LifetimeName])
fn desugared_async_block<'tcx>(cx: &LateContext<'tcx>, block: &'tcx Block<'tcx>) -> Option<&'tcx Body<'tcx>> {
if_chain! {
if let Some(block_expr) = block.expr;
- if let Some(args) = cx
- .tcx
- .lang_items()
- .identity_future_fn()
- .and_then(|def_id| match_function_call_with_def_id(cx, block_expr, def_id));
- if args.len() == 1;
if let Expr {
kind: ExprKind::Closure(&Closure { body, .. }),
..
- } = args[0];
+ } = block_expr;
let closure_body = cx.tcx.hir().body(body);
if closure_body.generator_kind == Some(GeneratorKind::Async(AsyncGeneratorKind::Block));
then {
diff --git a/src/tools/clippy/clippy_lints/src/manual_bits.rs b/src/tools/clippy/clippy_lints/src/manual_bits.rs
index 462d73cf0..bc815dc4a 100644
--- a/src/tools/clippy/clippy_lints/src/manual_bits.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_bits.rs
@@ -1,11 +1,12 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::get_parent_expr;
use clippy_utils::msrvs::{self, Msrv};
-use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::source::snippet_with_context;
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind, GenericArg, QPath};
-use rustc_lint::{LateContext, LateLintPass};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::{self, Ty};
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::sym;
@@ -55,13 +56,17 @@ impl<'tcx> LateLintPass<'tcx> for ManualBits {
if_chain! {
if let ExprKind::Binary(bin_op, left_expr, right_expr) = expr.kind;
if let BinOpKind::Mul = &bin_op.node;
+ if !in_external_macro(cx.sess(), expr.span);
+ let ctxt = expr.span.ctxt();
+ if left_expr.span.ctxt() == ctxt;
+ if right_expr.span.ctxt() == ctxt;
if let Some((real_ty, resolved_ty, other_expr)) = get_one_size_of_ty(cx, left_expr, right_expr);
if matches!(resolved_ty.kind(), ty::Int(_) | ty::Uint(_));
if let ExprKind::Lit(lit) = &other_expr.kind;
if let LitKind::Int(8, _) = lit.node;
then {
let mut app = Applicability::MachineApplicable;
- let ty_snip = snippet_with_applicability(cx, real_ty.span, "..", &mut app);
+ let ty_snip = snippet_with_context(cx, real_ty.span, ctxt, "..", &mut app).0;
let sugg = create_sugg(cx, expr, format!("{ty_snip}::BITS"));
span_lint_and_sugg(
diff --git a/src/tools/clippy/clippy_lints/src/manual_clamp.rs b/src/tools/clippy/clippy_lints/src/manual_clamp.rs
index f239736d3..440362b96 100644
--- a/src/tools/clippy/clippy_lints/src/manual_clamp.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_clamp.rs
@@ -6,7 +6,8 @@ use clippy_utils::ty::implements_trait;
use clippy_utils::visitors::is_const_evaluatable;
use clippy_utils::MaybePath;
use clippy_utils::{
- eq_expr_value, is_diag_trait_item, is_trait_method, path_res, path_to_local_id, peel_blocks, peel_blocks_with_stmt,
+ eq_expr_value, in_constant, is_diag_trait_item, is_trait_method, path_res, path_to_local_id, peel_blocks,
+ peel_blocks_with_stmt,
};
use itertools::Itertools;
use rustc_errors::Applicability;
@@ -117,7 +118,7 @@ impl<'tcx> LateLintPass<'tcx> for ManualClamp {
if !self.msrv.meets(msrvs::CLAMP) {
return;
}
- if !expr.span.from_expansion() {
+ if !expr.span.from_expansion() && !in_constant(cx, expr.hir_id) {
let suggestion = is_if_elseif_else_pattern(cx, expr)
.or_else(|| is_max_min_pattern(cx, expr))
.or_else(|| is_call_max_min_pattern(cx, expr))
@@ -130,7 +131,7 @@ impl<'tcx> LateLintPass<'tcx> for ManualClamp {
}
fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx Block<'tcx>) {
- if !self.msrv.meets(msrvs::CLAMP) {
+ if !self.msrv.meets(msrvs::CLAMP) || in_constant(cx, block.hir_id) {
return;
}
for suggestion in is_two_if_pattern(cx, block) {
diff --git a/src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs b/src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs
index 2fd32c009..31264261f 100644
--- a/src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs
@@ -1,5 +1,5 @@
use clippy_utils::msrvs::{self, Msrv};
-use clippy_utils::{diagnostics::span_lint_and_sugg, higher, in_constant, macros::root_macro_call, source::snippet};
+use clippy_utils::{diagnostics::span_lint_and_sugg, higher, in_constant, macros::root_macro_call, sugg::Sugg};
use rustc_ast::ast::RangeLimits;
use rustc_ast::LitKind::{Byte, Char};
use rustc_errors::Applicability;
@@ -115,15 +115,8 @@ fn check_is_ascii(cx: &LateContext<'_>, span: Span, recv: &Expr<'_>, range: &Cha
CharRange::Otherwise => None,
} {
let default_snip = "..";
- // `snippet_with_applicability` may set applicability to `MaybeIncorrect` for
- // macro span, so we check applicability manually by comparing `recv` is not default.
- let recv = snippet(cx, recv.span, default_snip);
-
- let applicability = if recv == default_snip {
- Applicability::HasPlaceholders
- } else {
- Applicability::MachineApplicable
- };
+ let mut app = Applicability::MachineApplicable;
+ let recv = Sugg::hir_with_context(cx, recv, span.ctxt(), default_snip, &mut app).maybe_par();
span_lint_and_sugg(
cx,
@@ -132,7 +125,7 @@ fn check_is_ascii(cx: &LateContext<'_>, span: Span, recv: &Expr<'_>, range: &Cha
"manual check for common ascii range",
"try",
format!("{recv}.{sugg}()"),
- applicability,
+ app,
);
}
}
diff --git a/src/tools/clippy/clippy_lints/src/manual_main_separator_str.rs b/src/tools/clippy/clippy_lints/src/manual_main_separator_str.rs
new file mode 100644
index 000000000..c292bbe4e
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/manual_main_separator_str.rs
@@ -0,0 +1,74 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::msrvs::{self, Msrv};
+use clippy_utils::{is_trait_method, match_def_path, paths, peel_hir_expr_refs};
+use rustc_errors::Applicability;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::{Expr, ExprKind, Mutability, QPath};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for references on `std::path::MAIN_SEPARATOR.to_string()` used
+ /// to build a `&str`.
+ ///
+ /// ### Why is this bad?
+ /// There exists a `std::path::MAIN_SEPARATOR_STR` which does not require
+ /// an extra memory allocation.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let s: &str = &std::path::MAIN_SEPARATOR.to_string();
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let s: &str = std::path::MAIN_SEPARATOR_STR;
+ /// ```
+ #[clippy::version = "1.70.0"]
+ pub MANUAL_MAIN_SEPARATOR_STR,
+ complexity,
+ "`&std::path::MAIN_SEPARATOR.to_string()` can be replaced by `std::path::MAIN_SEPARATOR_STR`"
+}
+
+pub struct ManualMainSeparatorStr {
+ msrv: Msrv,
+}
+
+impl ManualMainSeparatorStr {
+ #[must_use]
+ pub fn new(msrv: Msrv) -> Self {
+ Self { msrv }
+ }
+}
+
+impl_lint_pass!(ManualMainSeparatorStr => [MANUAL_MAIN_SEPARATOR_STR]);
+
+impl LateLintPass<'_> for ManualMainSeparatorStr {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if self.msrv.meets(msrvs::PATH_MAIN_SEPARATOR_STR) &&
+ let (target, _) = peel_hir_expr_refs(expr) &&
+ is_trait_method(cx, target, sym::ToString) &&
+ let ExprKind::MethodCall(path, receiver, &[], _) = target.kind &&
+ path.ident.name == sym::to_string &&
+ let ExprKind::Path(QPath::Resolved(None, path)) = receiver.kind &&
+ let Res::Def(DefKind::Const, receiver_def_id) = path.res &&
+ match_def_path(cx, receiver_def_id, &paths::PATH_MAIN_SEPARATOR) &&
+ let ty::Ref(_, ty, Mutability::Not) = cx.typeck_results().expr_ty_adjusted(expr).kind() &&
+ ty.is_str()
+ {
+ span_lint_and_sugg(
+ cx,
+ MANUAL_MAIN_SEPARATOR_STR,
+ expr.span,
+ "taking a reference on `std::path::MAIN_SEPARATOR` conversion to `String`",
+ "replace with",
+ "std::path::MAIN_SEPARATOR_STR".to_owned(),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+
+ extract_msrv_attr!(LateContext);
+}
diff --git a/src/tools/clippy/clippy_lints/src/manual_non_exhaustive.rs b/src/tools/clippy/clippy_lints/src/manual_non_exhaustive.rs
index 9a84068d4..0e22485db 100644
--- a/src/tools/clippy/clippy_lints/src/manual_non_exhaustive.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_non_exhaustive.rs
@@ -8,7 +8,6 @@ use rustc_errors::Applicability;
use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res};
use rustc_hir::{self as hir, Expr, ExprKind, QPath};
use rustc_lint::{EarlyContext, EarlyLintPass, LateContext, LateLintPass, LintContext};
-use rustc_middle::ty::DefIdTree;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::def_id::{DefId, LocalDefId};
use rustc_span::{sym, Span};
diff --git a/src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs b/src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs
index 38f41d077..aafee9271 100644
--- a/src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs
@@ -1,7 +1,7 @@
use clippy_utils::consts::{constant_full_int, FullInt};
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::msrvs::{self, Msrv};
-use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::source::snippet_with_context;
use clippy_utils::{in_constant, path_to_local};
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind, Node, TyKind};
@@ -60,12 +60,16 @@ impl<'tcx> LateLintPass<'tcx> for ManualRemEuclid {
return;
}
+ // (x % c + c) % c
if let ExprKind::Binary(op1, expr1, right) = expr.kind
&& op1.node == BinOpKind::Rem
+ && let ctxt = expr.span.ctxt()
+ && expr1.span.ctxt() == ctxt
&& let Some(const1) = check_for_unsigned_int_constant(cx, right)
&& let ExprKind::Binary(op2, left, right) = expr1.kind
&& op2.node == BinOpKind::Add
&& let Some((const2, expr2)) = check_for_either_unsigned_int_constant(cx, left, right)
+ && expr2.span.ctxt() == ctxt
&& let ExprKind::Binary(op3, expr3, right) = expr2.kind
&& op3.node == BinOpKind::Rem
&& let Some(const3) = check_for_unsigned_int_constant(cx, right)
@@ -86,7 +90,7 @@ impl<'tcx> LateLintPass<'tcx> for ManualRemEuclid {
};
let mut app = Applicability::MachineApplicable;
- let rem_of = snippet_with_applicability(cx, expr3.span, "_", &mut app);
+ let rem_of = snippet_with_context(cx, expr3.span, ctxt, "_", &mut app).0;
span_lint_and_sugg(
cx,
MANUAL_REM_EUCLID,
diff --git a/src/tools/clippy/clippy_lints/src/manual_slice_size_calculation.rs b/src/tools/clippy/clippy_lints/src/manual_slice_size_calculation.rs
new file mode 100644
index 000000000..92ee79453
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/manual_slice_size_calculation.rs
@@ -0,0 +1,93 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::{expr_or_init, in_constant};
+use rustc_hir::{BinOpKind, Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::symbol::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// When `a` is `&[T]`, detect `a.len() * size_of::<T>()` and suggest `size_of_val(a)`
+ /// instead.
+ ///
+ /// ### Why is this better?
+ /// * Shorter to write
+ /// * Removes the need for the human and the compiler to worry about overflow in the
+ /// multiplication
+ /// * Potentially faster at runtime as rust emits special no-wrapping flags when it
+ /// calculates the byte length
+ /// * Less turbofishing
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let data : &[i32] = &[1, 2, 3];
+ /// let newlen = data.len() * std::mem::size_of::<i32>();
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # let data : &[i32] = &[1, 2, 3];
+ /// let newlen = std::mem::size_of_val(data);
+ /// ```
+ #[clippy::version = "1.70.0"]
+ pub MANUAL_SLICE_SIZE_CALCULATION,
+ complexity,
+ "manual slice size calculation"
+}
+declare_lint_pass!(ManualSliceSizeCalculation => [MANUAL_SLICE_SIZE_CALCULATION]);
+
+impl<'tcx> LateLintPass<'tcx> for ManualSliceSizeCalculation {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
+ // Does not apply inside const because size_of_value is not cost in stable.
+ if !in_constant(cx, expr.hir_id)
+ && let ExprKind::Binary(ref op, left, right) = expr.kind
+ && BinOpKind::Mul == op.node
+ && let Some(_receiver) = simplify(cx, left, right)
+ {
+ span_lint_and_help(
+ cx,
+ MANUAL_SLICE_SIZE_CALCULATION,
+ expr.span,
+ "manual slice size calculation",
+ None,
+ "consider using std::mem::size_of_value instead");
+ }
+ }
+}
+
+fn simplify<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr1: &'tcx Expr<'tcx>,
+ expr2: &'tcx Expr<'tcx>,
+) -> Option<&'tcx Expr<'tcx>> {
+ let expr1 = expr_or_init(cx, expr1);
+ let expr2 = expr_or_init(cx, expr2);
+
+ simplify_half(cx, expr1, expr2).or_else(|| simplify_half(cx, expr2, expr1))
+}
+
+fn simplify_half<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr1: &'tcx Expr<'tcx>,
+ expr2: &'tcx Expr<'tcx>,
+) -> Option<&'tcx Expr<'tcx>> {
+ if
+ // expr1 is `[T1].len()`?
+ let ExprKind::MethodCall(method_path, receiver, _, _) = expr1.kind
+ && method_path.ident.name == sym::len
+ && let receiver_ty = cx.typeck_results().expr_ty(receiver)
+ && let ty::Slice(ty1) = receiver_ty.peel_refs().kind()
+ // expr2 is `size_of::<T2>()`?
+ && let ExprKind::Call(func, _) = expr2.kind
+ && let ExprKind::Path(ref func_qpath) = func.kind
+ && let Some(def_id) = cx.qpath_res(func_qpath, func.hir_id).opt_def_id()
+ && cx.tcx.is_diagnostic_item(sym::mem_size_of, def_id)
+ && let Some(ty2) = cx.typeck_results().node_substs(func.hir_id).types().next()
+ // T1 == T2?
+ && *ty1 == ty2
+ {
+ Some(receiver)
+ } else {
+ None
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/match_result_ok.rs b/src/tools/clippy/clippy_lints/src/match_result_ok.rs
index a020282d2..6ec978403 100644
--- a/src/tools/clippy/clippy_lints/src/match_result_ok.rs
+++ b/src/tools/clippy/clippy_lints/src/match_result_ok.rs
@@ -1,11 +1,11 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::higher;
-use clippy_utils::method_chain_args;
-use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::is_res_lang_ctor;
+use clippy_utils::source::snippet_with_context;
use clippy_utils::ty::is_type_diagnostic_item;
use if_chain::if_chain;
use rustc_errors::Applicability;
-use rustc_hir::{Expr, ExprKind, PatKind, QPath};
+use rustc_hir::{Expr, ExprKind, LangItem, PatKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::sym;
@@ -58,17 +58,18 @@ impl<'tcx> LateLintPass<'tcx> for MatchResultOk {
};
if_chain! {
- if let ExprKind::MethodCall(ok_path, result_types_0, ..) = let_expr.kind; //check is expr.ok() has type Result<T,E>.ok(, _)
- if let PatKind::TupleStruct(QPath::Resolved(_, x), y, _) = let_pat.kind; //get operation
- if method_chain_args(let_expr, &["ok"]).is_some(); //test to see if using ok() method use std::marker::Sized;
- if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(result_types_0), sym::Result);
- if rustc_hir_pretty::to_string(rustc_hir_pretty::NO_ANN, |s| s.print_path(x, false)) == "Some";
-
+ if let ExprKind::MethodCall(ok_path, recv, [], ..) = let_expr.kind; //check is expr.ok() has type Result<T,E>.ok(, _)
+ if let PatKind::TupleStruct(ref pat_path, [ok_pat], _) = let_pat.kind; //get operation
+ if ok_path.ident.as_str() == "ok";
+ if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(recv), sym::Result);
+ if is_res_lang_ctor(cx, cx.qpath_res(pat_path, let_pat.hir_id), LangItem::OptionSome);
+ let ctxt = expr.span.ctxt();
+ if let_expr.span.ctxt() == ctxt;
+ if let_pat.span.ctxt() == ctxt;
then {
-
let mut applicability = Applicability::MachineApplicable;
- let some_expr_string = snippet_with_applicability(cx, y[0].span, "", &mut applicability);
- let trimmed_ok = snippet_with_applicability(cx, let_expr.span.until(ok_path.ident.span), "", &mut applicability);
+ let some_expr_string = snippet_with_context(cx, ok_pat.span, ctxt, "", &mut applicability).0;
+ let trimmed_ok = snippet_with_context(cx, recv.span, ctxt, "", &mut applicability).0;
let sugg = format!(
"{ifwhile} let Ok({some_expr_string}) = {}",
trimmed_ok.trim().trim_end_matches('.'),
diff --git a/src/tools/clippy/clippy_lints/src/matches/manual_unwrap_or.rs b/src/tools/clippy/clippy_lints/src/matches/manual_unwrap_or.rs
index 587c926dc..b94501bf0 100644
--- a/src/tools/clippy/clippy_lints/src/matches/manual_unwrap_or.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/manual_unwrap_or.rs
@@ -10,7 +10,6 @@ use rustc_hir::def::{DefKind, Res};
use rustc_hir::LangItem::{OptionNone, ResultErr};
use rustc_hir::{Arm, Expr, PatKind};
use rustc_lint::LateContext;
-use rustc_middle::ty::DefIdTree;
use rustc_span::sym;
use super::MANUAL_UNWRAP_OR;
@@ -33,14 +32,8 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>, scrutinee:
let reindented_or_body =
reindent_multiline(or_body_snippet.into(), true, Some(indent));
- let suggestion = if scrutinee.span.from_expansion() {
- // we don't want parentheses around macro, e.g. `(some_macro!()).unwrap_or(0)`
- sugg::Sugg::hir_with_macro_callsite(cx, scrutinee, "..")
- }
- else {
- sugg::Sugg::hir(cx, scrutinee, "..").maybe_par()
- };
-
+ let mut app = Applicability::MachineApplicable;
+ let suggestion = sugg::Sugg::hir_with_context(cx, scrutinee, expr.span.ctxt(), "..", &mut app).maybe_par();
span_lint_and_sugg(
cx,
MANUAL_UNWRAP_OR, expr.span,
@@ -49,7 +42,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>, scrutinee:
format!(
"{suggestion}.unwrap_or({reindented_or_body})",
),
- Applicability::MachineApplicable,
+ app,
);
}
}
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_bool.rs b/src/tools/clippy/clippy_lints/src/matches/match_bool.rs
index 1c216e135..df1e585f1 100644
--- a/src/tools/clippy/clippy_lints/src/matches/match_bool.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/match_bool.rs
@@ -10,9 +10,9 @@ use rustc_middle::ty;
use super::MATCH_BOOL;
-pub(crate) fn check(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>], expr: &Expr<'_>) {
+pub(crate) fn check(cx: &LateContext<'_>, scrutinee: &Expr<'_>, arms: &[Arm<'_>], expr: &Expr<'_>) {
// Type of expression is `bool`.
- if *cx.typeck_results().expr_ty(ex).kind() == ty::Bool {
+ if *cx.typeck_results().expr_ty(scrutinee).kind() == ty::Bool {
span_lint_and_then(
cx,
MATCH_BOOL,
@@ -36,24 +36,26 @@ pub(crate) fn check(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>], expr:
};
if let Some((true_expr, false_expr)) = exprs {
+ let mut app = Applicability::HasPlaceholders;
+ let ctxt = expr.span.ctxt();
let sugg = match (is_unit_expr(true_expr), is_unit_expr(false_expr)) {
(false, false) => Some(format!(
"if {} {} else {}",
- snippet(cx, ex.span, "b"),
- expr_block(cx, true_expr, None, "..", Some(expr.span)),
- expr_block(cx, false_expr, None, "..", Some(expr.span))
+ snippet(cx, scrutinee.span, "b"),
+ expr_block(cx, true_expr, ctxt, "..", Some(expr.span), &mut app),
+ expr_block(cx, false_expr, ctxt, "..", Some(expr.span), &mut app)
)),
(false, true) => Some(format!(
"if {} {}",
- snippet(cx, ex.span, "b"),
- expr_block(cx, true_expr, None, "..", Some(expr.span))
+ snippet(cx, scrutinee.span, "b"),
+ expr_block(cx, true_expr, ctxt, "..", Some(expr.span), &mut app)
)),
(true, false) => {
- let test = Sugg::hir(cx, ex, "..");
+ let test = Sugg::hir(cx, scrutinee, "..");
Some(format!(
"if {} {}",
!test,
- expr_block(cx, false_expr, None, "..", Some(expr.span))
+ expr_block(cx, false_expr, ctxt, "..", Some(expr.span), &mut app)
))
},
(true, true) => None,
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_ref_pats.rs b/src/tools/clippy/clippy_lints/src/matches/match_ref_pats.rs
index 80f964ba1..aba4c85c5 100644
--- a/src/tools/clippy/clippy_lints/src/matches/match_ref_pats.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/match_ref_pats.rs
@@ -1,13 +1,14 @@
use clippy_utils::diagnostics::{multispan_sugg, span_lint_and_then};
-use clippy_utils::source::snippet;
+use clippy_utils::source::{snippet, walk_span_to_context};
use clippy_utils::sugg::Sugg;
use core::iter::once;
+use rustc_errors::Applicability;
use rustc_hir::{BorrowKind, Expr, ExprKind, Mutability, Pat, PatKind};
use rustc_lint::LateContext;
use super::MATCH_REF_PATS;
-pub(crate) fn check<'a, 'b, I>(cx: &LateContext<'_>, ex: &Expr<'_>, pats: I, expr: &Expr<'_>)
+pub(crate) fn check<'a, 'b, I>(cx: &LateContext<'_>, scrutinee: &Expr<'_>, pats: I, expr: &Expr<'_>)
where
'b: 'a,
I: Clone + Iterator<Item = &'a Pat<'b>>,
@@ -17,13 +18,28 @@ where
}
let (first_sugg, msg, title);
- let span = ex.span.source_callsite();
- if let ExprKind::AddrOf(BorrowKind::Ref, Mutability::Not, inner) = ex.kind {
- first_sugg = once((span, Sugg::hir_with_macro_callsite(cx, inner, "..").to_string()));
+ let ctxt = expr.span.ctxt();
+ let mut app = Applicability::Unspecified;
+ if let ExprKind::AddrOf(BorrowKind::Ref, Mutability::Not, inner) = scrutinee.kind {
+ if scrutinee.span.ctxt() != ctxt {
+ return;
+ }
+ first_sugg = once((
+ scrutinee.span,
+ Sugg::hir_with_context(cx, inner, ctxt, "..", &mut app).to_string(),
+ ));
msg = "try";
title = "you don't need to add `&` to both the expression and the patterns";
} else {
- first_sugg = once((span, Sugg::hir_with_macro_callsite(cx, ex, "..").deref().to_string()));
+ let Some(span) = walk_span_to_context(scrutinee.span, ctxt) else {
+ return;
+ };
+ first_sugg = once((
+ span,
+ Sugg::hir_with_context(cx, scrutinee, ctxt, "..", &mut app)
+ .deref()
+ .to_string(),
+ ));
msg = "instead of prefixing all patterns with `&`, you can dereference the expression";
title = "you don't need to add `&` to all patterns";
}
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_single_binding.rs b/src/tools/clippy/clippy_lints/src/matches/match_single_binding.rs
index 065a5c726..89da7a55c 100644
--- a/src/tools/clippy/clippy_lints/src/matches/match_single_binding.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/match_single_binding.rs
@@ -1,10 +1,9 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::macros::HirNode;
-use clippy_utils::source::{indent_of, snippet, snippet_block, snippet_with_applicability};
-use clippy_utils::sugg::Sugg;
+use clippy_utils::source::{indent_of, snippet, snippet_block_with_context, snippet_with_applicability};
use clippy_utils::{get_parent_expr, is_refutable, peel_blocks};
use rustc_errors::Applicability;
-use rustc_hir::{Arm, Expr, ExprKind, Node, PatKind};
+use rustc_hir::{Arm, Expr, ExprKind, Node, PatKind, StmtKind};
use rustc_lint::LateContext;
use rustc_span::Span;
@@ -24,21 +23,30 @@ pub(crate) fn check<'a>(cx: &LateContext<'a>, ex: &Expr<'a>, arms: &[Arm<'_>], e
let matched_vars = ex.span;
let bind_names = arms[0].pat.span;
let match_body = peel_blocks(arms[0].body);
- let mut snippet_body = if match_body.span.from_expansion() {
- Sugg::hir_with_macro_callsite(cx, match_body, "..").to_string()
- } else {
- snippet_block(cx, match_body.span, "..", Some(expr.span)).to_string()
- };
+ let mut app = Applicability::MaybeIncorrect;
+ let mut snippet_body = snippet_block_with_context(
+ cx,
+ match_body.span,
+ arms[0].span.ctxt(),
+ "..",
+ Some(expr.span),
+ &mut app,
+ )
+ .0
+ .to_string();
// Do we need to add ';' to suggestion ?
- if let ExprKind::Block(block, _) = match_body.kind {
- // macro + expr_ty(body) == ()
- if block.span.from_expansion() && cx.typeck_results().expr_ty(match_body).is_unit() {
- snippet_body.push(';');
+ if let Node::Stmt(stmt) = cx.tcx.hir().get_parent(expr.hir_id)
+ && let StmtKind::Expr(_) = stmt.kind
+ && match match_body.kind {
+ // We don't need to add a ; to blocks, unless that block is from a macro expansion
+ ExprKind::Block(block, _) => block.span.from_expansion(),
+ _ => true,
}
+ {
+ snippet_body.push(';');
}
- let mut applicability = Applicability::MaybeIncorrect;
match arms[0].pat.kind {
PatKind::Binding(..) | PatKind::Tuple(_, _) | PatKind::Struct(..) => {
let (target_span, sugg) = match opt_parent_assign_span(cx, ex) {
@@ -48,7 +56,7 @@ pub(crate) fn check<'a>(cx: &LateContext<'a>, ex: &Expr<'a>, arms: &[Arm<'_>], e
(ex, expr),
(bind_names, matched_vars),
&snippet_body,
- &mut applicability,
+ &mut app,
Some(span),
true,
);
@@ -60,7 +68,7 @@ pub(crate) fn check<'a>(cx: &LateContext<'a>, ex: &Expr<'a>, arms: &[Arm<'_>], e
"this assignment could be simplified",
"consider removing the `match` expression",
sugg,
- applicability,
+ app,
);
return;
@@ -69,10 +77,10 @@ pub(crate) fn check<'a>(cx: &LateContext<'a>, ex: &Expr<'a>, arms: &[Arm<'_>], e
span,
format!(
"let {} = {};\n{}let {} = {snippet_body};",
- snippet_with_applicability(cx, bind_names, "..", &mut applicability),
- snippet_with_applicability(cx, matched_vars, "..", &mut applicability),
+ snippet_with_applicability(cx, bind_names, "..", &mut app),
+ snippet_with_applicability(cx, matched_vars, "..", &mut app),
" ".repeat(indent_of(cx, expr.span).unwrap_or(0)),
- snippet_with_applicability(cx, pat_span, "..", &mut applicability)
+ snippet_with_applicability(cx, pat_span, "..", &mut app)
),
),
None => {
@@ -81,7 +89,7 @@ pub(crate) fn check<'a>(cx: &LateContext<'a>, ex: &Expr<'a>, arms: &[Arm<'_>], e
(ex, expr),
(bind_names, matched_vars),
&snippet_body,
- &mut applicability,
+ &mut app,
None,
true,
);
@@ -96,7 +104,7 @@ pub(crate) fn check<'a>(cx: &LateContext<'a>, ex: &Expr<'a>, arms: &[Arm<'_>], e
"this match could be written as a `let` statement",
"consider using a `let` statement",
sugg,
- applicability,
+ app,
);
},
PatKind::Wild => {
@@ -106,7 +114,7 @@ pub(crate) fn check<'a>(cx: &LateContext<'a>, ex: &Expr<'a>, arms: &[Arm<'_>], e
(ex, expr),
(bind_names, matched_vars),
&snippet_body,
- &mut applicability,
+ &mut app,
None,
false,
);
@@ -118,7 +126,7 @@ pub(crate) fn check<'a>(cx: &LateContext<'a>, ex: &Expr<'a>, arms: &[Arm<'_>], e
"this match could be replaced by its scrutinee and body",
"consider using the scrutinee and body instead",
sugg,
- applicability,
+ app,
);
} else {
span_lint_and_sugg(
diff --git a/src/tools/clippy/clippy_lints/src/matches/mod.rs b/src/tools/clippy/clippy_lints/src/matches/mod.rs
index 7b15a307f..97ecca450 100644
--- a/src/tools/clippy/clippy_lints/src/matches/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/mod.rs
@@ -925,7 +925,7 @@ declare_clippy_lint! {
#[clippy::version = "1.66.0"]
pub MANUAL_FILTER,
complexity,
- "reimplentation of `filter`"
+ "reimplementation of `filter`"
}
#[derive(Default)]
diff --git a/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs b/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs
index 81bebff34..7b609ff3d 100644
--- a/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs
@@ -1,6 +1,6 @@
use super::REDUNDANT_PATTERN_MATCHING;
use clippy_utils::diagnostics::span_lint_and_then;
-use clippy_utils::source::snippet;
+use clippy_utils::source::{snippet, walk_span_to_context};
use clippy_utils::sugg::Sugg;
use clippy_utils::ty::{is_type_diagnostic_item, needs_ordered_drop};
use clippy_utils::visitors::any_temporaries_need_ordered_drop;
@@ -12,7 +12,7 @@ use rustc_hir::def::{DefKind, Res};
use rustc_hir::LangItem::{self, OptionNone, OptionSome, PollPending, PollReady, ResultErr, ResultOk};
use rustc_hir::{Arm, Expr, ExprKind, Node, Pat, PatKind, QPath, UnOp};
use rustc_lint::LateContext;
-use rustc_middle::ty::{self, subst::GenericArgKind, DefIdTree, Ty};
+use rustc_middle::ty::{self, subst::GenericArgKind, Ty};
use rustc_span::{sym, Symbol};
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
@@ -150,22 +150,25 @@ fn find_sugg_for_if_let<'tcx>(
// if/while let ... = ... { ... }
// ^^^^^^^^^^^^^^^^^^^^^^^^^^^
let expr_span = expr.span;
+ let ctxt = expr.span.ctxt();
// if/while let ... = ... { ... }
- // ^^^
- let op_span = result_expr.span.source_callsite();
+ // ^^^
+ let Some(res_span) = walk_span_to_context(result_expr.span.source_callsite(), ctxt) else {
+ return;
+ };
// if/while let ... = ... { ... }
- // ^^^^^^^^^^^^^^^^^^^
- let span = expr_span.until(op_span.shrink_to_hi());
+ // ^^^^^^^^^^^^^^^^^^^^^^
+ let span = expr_span.until(res_span.shrink_to_hi());
- let app = if needs_drop {
+ let mut app = if needs_drop {
Applicability::MaybeIncorrect
} else {
Applicability::MachineApplicable
};
- let sugg = Sugg::hir_with_macro_callsite(cx, result_expr, "_")
+ let sugg = Sugg::hir_with_context(cx, result_expr, ctxt, "_", &mut app)
.maybe_par()
.to_string();
diff --git a/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs b/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs
index b33a24781..04225beeb 100644
--- a/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs
@@ -321,7 +321,6 @@ impl<'a, 'tcx> Visitor<'tcx> for SigDropHelper<'a, 'tcx> {
self.has_significant_drop = true;
}
}
- ExprKind::Box(..) |
ExprKind::Array(..) |
ExprKind::Call(..) |
ExprKind::Unary(..) |
diff --git a/src/tools/clippy/clippy_lints/src/matches/single_match.rs b/src/tools/clippy/clippy_lints/src/matches/single_match.rs
index 19b49c44d..ad47c1389 100644
--- a/src/tools/clippy/clippy_lints/src/matches/single_match.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/single_match.rs
@@ -67,8 +67,10 @@ fn report_single_pattern(
els: Option<&Expr<'_>>,
) {
let lint = if els.is_some() { SINGLE_MATCH_ELSE } else { SINGLE_MATCH };
+ let ctxt = expr.span.ctxt();
+ let mut app = Applicability::HasPlaceholders;
let els_str = els.map_or(String::new(), |els| {
- format!(" else {}", expr_block(cx, els, None, "..", Some(expr.span)))
+ format!(" else {}", expr_block(cx, els, ctxt, "..", Some(expr.span), &mut app))
});
let (pat, pat_ref_count) = peel_hir_pat_refs(arms[0].pat);
@@ -103,7 +105,7 @@ fn report_single_pattern(
// PartialEq for different reference counts may not exist.
"&".repeat(ref_count_diff),
snippet(cx, arms[0].pat.span, ".."),
- expr_block(cx, arms[0].body, None, "..", Some(expr.span)),
+ expr_block(cx, arms[0].body, ctxt, "..", Some(expr.span), &mut app),
);
(msg, sugg)
} else {
@@ -112,21 +114,13 @@ fn report_single_pattern(
"if let {} = {} {}{els_str}",
snippet(cx, arms[0].pat.span, ".."),
snippet(cx, ex.span, ".."),
- expr_block(cx, arms[0].body, None, "..", Some(expr.span)),
+ expr_block(cx, arms[0].body, ctxt, "..", Some(expr.span), &mut app),
);
(msg, sugg)
}
};
- span_lint_and_sugg(
- cx,
- lint,
- expr.span,
- msg,
- "try this",
- sugg,
- Applicability::HasPlaceholders,
- );
+ span_lint_and_sugg(cx, lint, expr.span, msg, "try this", sugg, app);
}
fn check_opt_like<'a>(
diff --git a/src/tools/clippy/clippy_lints/src/mem_replace.rs b/src/tools/clippy/clippy_lints/src/mem_replace.rs
index 35024ec12..8a921d4af 100644
--- a/src/tools/clippy/clippy_lints/src/mem_replace.rs
+++ b/src/tools/clippy/clippy_lints/src/mem_replace.rs
@@ -1,12 +1,13 @@
use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_sugg, span_lint_and_then};
use clippy_utils::msrvs::{self, Msrv};
use clippy_utils::source::{snippet, snippet_with_applicability};
+use clippy_utils::sugg::Sugg;
use clippy_utils::ty::is_non_aggregate_primitive_type;
-use clippy_utils::{is_default_equivalent, is_res_lang_ctor, path_res};
+use clippy_utils::{is_default_equivalent, is_res_lang_ctor, path_res, peel_ref_operators};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::LangItem::OptionNone;
-use rustc_hir::{BorrowKind, Expr, ExprKind, Mutability, QPath};
+use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::lint::in_external_macro;
use rustc_session::{declare_tool_lint, impl_lint_pass};
@@ -101,40 +102,26 @@ declare_clippy_lint! {
impl_lint_pass!(MemReplace =>
[MEM_REPLACE_OPTION_WITH_NONE, MEM_REPLACE_WITH_UNINIT, MEM_REPLACE_WITH_DEFAULT]);
-fn check_replace_option_with_none(cx: &LateContext<'_>, src: &Expr<'_>, dest: &Expr<'_>, expr_span: Span) {
- // Check that second argument is `Option::None`
- if is_res_lang_ctor(cx, path_res(cx, src), OptionNone) {
- // Since this is a late pass (already type-checked),
- // and we already know that the second argument is an
- // `Option`, we do not need to check the first
- // argument's type. All that's left is to get
- // replacee's path.
- let replaced_path = match dest.kind {
- ExprKind::AddrOf(BorrowKind::Ref, Mutability::Mut, replaced) => {
- if let ExprKind::Path(QPath::Resolved(None, replaced_path)) = replaced.kind {
- replaced_path
- } else {
- return;
- }
- },
- ExprKind::Path(QPath::Resolved(None, replaced_path)) => replaced_path,
- _ => return,
- };
-
- let mut applicability = Applicability::MachineApplicable;
- span_lint_and_sugg(
- cx,
- MEM_REPLACE_OPTION_WITH_NONE,
- expr_span,
- "replacing an `Option` with `None`",
- "consider `Option::take()` instead",
- format!(
- "{}.take()",
- snippet_with_applicability(cx, replaced_path.span, "", &mut applicability)
- ),
- applicability,
- );
- }
+fn check_replace_option_with_none(cx: &LateContext<'_>, dest: &Expr<'_>, expr_span: Span) {
+ // Since this is a late pass (already type-checked),
+ // and we already know that the second argument is an
+ // `Option`, we do not need to check the first
+ // argument's type. All that's left is to get
+ // the replacee's expr after peeling off the `&mut`
+ let sugg_expr = peel_ref_operators(cx, dest);
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ MEM_REPLACE_OPTION_WITH_NONE,
+ expr_span,
+ "replacing an `Option` with `None`",
+ "consider `Option::take()` instead",
+ format!(
+ "{}.take()",
+ Sugg::hir_with_context(cx, sugg_expr, expr_span.ctxt(), "", &mut applicability).maybe_par()
+ ),
+ applicability,
+ );
}
fn check_replace_with_uninit(cx: &LateContext<'_>, src: &Expr<'_>, dest: &Expr<'_>, expr_span: Span) {
@@ -200,10 +187,6 @@ fn check_replace_with_default(cx: &LateContext<'_>, src: &Expr<'_>, dest: &Expr<
if is_non_aggregate_primitive_type(expr_type) {
return;
}
- // disable lint for Option since it is covered in another lint
- if is_res_lang_ctor(cx, path_res(cx, src), OptionNone) {
- return;
- }
if is_default_equivalent(cx, src) && !in_external_macro(cx.tcx.sess, expr_span) {
span_lint_and_then(
cx,
@@ -246,11 +229,13 @@ impl<'tcx> LateLintPass<'tcx> for MemReplace {
if let Some(def_id) = cx.qpath_res(func_qpath, func.hir_id).opt_def_id();
if cx.tcx.is_diagnostic_item(sym::mem_replace, def_id);
then {
- check_replace_option_with_none(cx, src, dest, expr.span);
- check_replace_with_uninit(cx, src, dest, expr.span);
- if self.msrv.meets(msrvs::MEM_TAKE) {
+ // Check that second argument is `Option::None`
+ if is_res_lang_ctor(cx, path_res(cx, src), OptionNone) {
+ check_replace_option_with_none(cx, dest, expr.span);
+ } else if self.msrv.meets(msrvs::MEM_TAKE) {
check_replace_with_default(cx, src, dest, expr.span);
}
+ check_replace_with_uninit(cx, src, dest, expr.span);
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs b/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs
index 4720a6e68..008533488 100644
--- a/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs
@@ -1,6 +1,6 @@
use super::{contains_return, BIND_INSTEAD_OF_MAP};
use clippy_utils::diagnostics::{multispan_sugg_with_applicability, span_lint_and_sugg, span_lint_and_then};
-use clippy_utils::source::{snippet, snippet_with_macro_callsite};
+use clippy_utils::source::{snippet, snippet_with_context};
use clippy_utils::{peel_blocks, visitors::find_all_ret_expressions};
use if_chain::if_chain;
use rustc_errors::Applicability;
@@ -8,7 +8,6 @@ use rustc_hir as hir;
use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res};
use rustc_hir::{LangItem, QPath};
use rustc_lint::LateContext;
-use rustc_middle::ty::DefIdTree;
use rustc_span::Span;
pub(crate) struct OptionAndThenSome;
@@ -77,11 +76,8 @@ pub(crate) trait BindInsteadOfMap {
if !contains_return(inner_expr);
if let Some(msg) = Self::lint_msg(cx);
then {
- let some_inner_snip = if inner_expr.span.from_expansion() {
- snippet_with_macro_callsite(cx, inner_expr.span, "_")
- } else {
- snippet(cx, inner_expr.span, "_")
- };
+ let mut app = Applicability::MachineApplicable;
+ let some_inner_snip = snippet_with_context(cx, inner_expr.span, closure_expr.span.ctxt(), "_", &mut app).0;
let closure_args_snip = snippet(cx, closure_args_span, "..");
let option_snip = snippet(cx, recv.span, "..");
@@ -93,7 +89,7 @@ pub(crate) trait BindInsteadOfMap {
&msg,
"try this",
note,
- Applicability::MachineApplicable,
+ app,
);
true
} else {
diff --git a/src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs b/src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs
index 56b7fbb9d..079df2226 100644
--- a/src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs
@@ -6,7 +6,7 @@ use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
use rustc_lint::Lint;
-use rustc_middle::ty::{self, DefIdTree};
+use rustc_middle::ty;
/// Wrapper fn for `CHARS_NEXT_CMP` and `CHARS_LAST_CMP` lints.
pub(super) fn check(
diff --git a/src/tools/clippy/clippy_lints/src/methods/clear_with_drain.rs b/src/tools/clippy/clippy_lints/src/methods/clear_with_drain.rs
new file mode 100644
index 000000000..67ad58d5a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/clear_with_drain.rs
@@ -0,0 +1,53 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::is_range_full;
+use clippy_utils::ty::{is_type_diagnostic_item, is_type_lang_item};
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_hir::{Expr, ExprKind, LangItem, QPath};
+use rustc_lint::LateContext;
+use rustc_span::symbol::sym;
+use rustc_span::Span;
+
+use super::CLEAR_WITH_DRAIN;
+
+// Add `String` here when it is added to diagnostic items
+const ACCEPTABLE_TYPES_WITH_ARG: [rustc_span::Symbol; 2] = [sym::Vec, sym::VecDeque];
+
+const ACCEPTABLE_TYPES_WITHOUT_ARG: [rustc_span::Symbol; 3] = [sym::BinaryHeap, sym::HashMap, sym::HashSet];
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, span: Span, arg: Option<&Expr<'_>>) {
+ if let Some(arg) = arg {
+ if match_acceptable_type(cx, recv, &ACCEPTABLE_TYPES_WITH_ARG)
+ && let ExprKind::Path(QPath::Resolved(None, container_path)) = recv.kind
+ && is_range_full(cx, arg, Some(container_path))
+ {
+ suggest(cx, expr, recv, span);
+ }
+ } else if match_acceptable_type(cx, recv, &ACCEPTABLE_TYPES_WITHOUT_ARG) {
+ suggest(cx, expr, recv, span);
+ }
+}
+
+fn match_acceptable_type(cx: &LateContext<'_>, expr: &hir::Expr<'_>, types: &[rustc_span::Symbol]) -> bool {
+ let expr_ty = cx.typeck_results().expr_ty(expr).peel_refs();
+ types.iter().any(|&ty| is_type_diagnostic_item(cx, expr_ty, ty))
+ // String type is a lang item but not a diagnostic item for now so we need a separate check
+ || is_type_lang_item(cx, expr_ty, LangItem::String)
+}
+
+fn suggest(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, span: Span) {
+ if let Some(adt) = cx.typeck_results().expr_ty(recv).ty_adt_def()
+ // Use `opt_item_name` while `String` is not a diagnostic item
+ && let Some(ty_name) = cx.tcx.opt_item_name(adt.did())
+ {
+ span_lint_and_sugg(
+ cx,
+ CLEAR_WITH_DRAIN,
+ span.with_hi(expr.span.hi()),
+ &format!("`drain` used to clear a `{ty_name}`"),
+ "try",
+ "clear()".to_string(),
+ Applicability::MachineApplicable,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/clone_on_ref_ptr.rs b/src/tools/clippy/clippy_lints/src/methods/clone_on_ref_ptr.rs
index 355f53532..5e8ad0861 100644
--- a/src/tools/clippy/clippy_lints/src/methods/clone_on_ref_ptr.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/clone_on_ref_ptr.rs
@@ -1,6 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::paths;
-use clippy_utils::source::snippet_with_macro_callsite;
+use clippy_utils::source::snippet_with_context;
use clippy_utils::ty::{is_type_diagnostic_item, match_type};
use rustc_errors::Applicability;
use rustc_hir as hir;
@@ -33,7 +33,9 @@ pub(super) fn check(
return;
};
- let snippet = snippet_with_macro_callsite(cx, receiver.span, "..");
+ // Sometimes unnecessary ::<_> after Rc/Arc/Weak
+ let mut app = Applicability::Unspecified;
+ let snippet = snippet_with_context(cx, receiver.span, expr.span.ctxt(), "..", &mut app).0;
span_lint_and_sugg(
cx,
@@ -42,7 +44,7 @@ pub(super) fn check(
"using `.clone()` on a ref-counted pointer",
"try this",
format!("{caller_type}::<{}>::clone(&{snippet})", subst.type_at(0)),
- Applicability::Unspecified, // Sometimes unnecessary ::<_> after Rc/Arc/Weak
+ app,
);
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs b/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs
index a22285058..92d21bb89 100644
--- a/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs
@@ -1,5 +1,5 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
-use clippy_utils::macros::{root_macro_call_first_node, FormatArgsExpn};
+use clippy_utils::macros::{find_format_args, format_args_inputs_span, root_macro_call_first_node};
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::ty::{is_type_diagnostic_item, is_type_lang_item};
use rustc_errors::Applicability;
@@ -136,18 +136,19 @@ pub(super) fn check<'tcx>(
if !cx.tcx.is_diagnostic_item(sym::format_macro, macro_call.def_id) {
return;
}
- let Some(format_args) = FormatArgsExpn::find_nested(cx, arg_root, macro_call.expn) else { return };
- let span = format_args.inputs_span();
- let sugg = snippet_with_applicability(cx, span, "..", &mut applicability);
- span_lint_and_sugg(
- cx,
- EXPECT_FUN_CALL,
- span_replace_word,
- &format!("use of `{name}` followed by a function call"),
- "try this",
- format!("unwrap_or_else({closure_args} panic!({sugg}))"),
- applicability,
- );
+ find_format_args(cx, arg_root, macro_call.expn, |format_args| {
+ let span = format_args_inputs_span(format_args);
+ let sugg = snippet_with_applicability(cx, span, "..", &mut applicability);
+ span_lint_and_sugg(
+ cx,
+ EXPECT_FUN_CALL,
+ span_replace_word,
+ &format!("use of `{name}` followed by a function call"),
+ "try this",
+ format!("unwrap_or_else({closure_args} panic!({sugg}))"),
+ applicability,
+ );
+ });
return;
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_with_drain.rs b/src/tools/clippy/clippy_lints/src/methods/iter_with_drain.rs
index 3da230e12..f6772c5c6 100644
--- a/src/tools/clippy/clippy_lints/src/methods/iter_with_drain.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/iter_with_drain.rs
@@ -1,7 +1,5 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
-use clippy_utils::higher::Range;
-use clippy_utils::is_integer_const;
-use rustc_ast::ast::RangeLimits;
+use clippy_utils::is_range_full;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, QPath};
use rustc_lint::LateContext;
@@ -15,8 +13,8 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, span
&& let Some(adt) = cx.typeck_results().expr_ty(recv).ty_adt_def()
&& let Some(ty_name) = cx.tcx.get_diagnostic_name(adt.did())
&& matches!(ty_name, sym::Vec | sym::VecDeque)
- && let Some(range) = Range::hir(arg)
- && is_full_range(cx, recv, range)
+ && let ExprKind::Path(QPath::Resolved(None, container_path)) = recv.kind
+ && is_range_full(cx, arg, Some(container_path))
{
span_lint_and_sugg(
cx,
@@ -29,19 +27,3 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, span
);
};
}
-
-fn is_full_range(cx: &LateContext<'_>, container: &Expr<'_>, range: Range<'_>) -> bool {
- range.start.map_or(true, |e| is_integer_const(cx, e, 0))
- && range.end.map_or(true, |e| {
- if range.limits == RangeLimits::HalfOpen
- && let ExprKind::Path(QPath::Resolved(None, container_path)) = container.kind
- && let ExprKind::MethodCall(name, self_arg, [], _) = e.kind
- && name.ident.name == sym::len
- && let ExprKind::Path(QPath::Resolved(None, path)) = self_arg.kind
- {
- container_path.res == path.res
- } else {
- false
- }
- })
-}
diff --git a/src/tools/clippy/clippy_lints/src/methods/mod.rs b/src/tools/clippy/clippy_lints/src/methods/mod.rs
index 702df4b28..64bf55ba2 100644
--- a/src/tools/clippy/clippy_lints/src/methods/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/mod.rs
@@ -9,6 +9,7 @@ mod chars_last_cmp;
mod chars_last_cmp_with_unwrap;
mod chars_next_cmp;
mod chars_next_cmp_with_unwrap;
+mod clear_with_drain;
mod clone_on_copy;
mod clone_on_ref_ptr;
mod cloned_instead_of_copied;
@@ -110,7 +111,7 @@ use clippy_utils::ty::{contains_ty_adt_constructor_opaque, implements_trait, is_
use clippy_utils::{contains_return, is_bool, is_trait_method, iter_input_pats, return_ty};
use if_chain::if_chain;
use rustc_hir as hir;
-use rustc_hir::{Expr, ExprKind, TraitItem, TraitItemKind};
+use rustc_hir::{Expr, ExprKind, Node, Stmt, StmtKind, TraitItem, TraitItemKind};
use rustc_hir_analysis::hir_ty_to_ty;
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
@@ -340,8 +341,9 @@ declare_clippy_lint! {
declare_clippy_lint! {
/// ### What it does
- /// Checks for methods with certain name prefixes and which
- /// doesn't match how self is taken. The actual rules are:
+ /// Checks for methods with certain name prefixes or suffixes, and which
+ /// do not adhere to standard conventions regarding how `self` is taken.
+ /// The actual rules are:
///
/// |Prefix |Postfix |`self` taken | `self` type |
/// |-------|------------|-------------------------------|--------------|
@@ -3189,6 +3191,31 @@ declare_clippy_lint! {
"single command line argument that looks like it should be multiple arguments"
}
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `.drain(..)` for the sole purpose of clearing a container.
+ ///
+ /// ### Why is this bad?
+ /// This creates an unnecessary iterator that is dropped immediately.
+ ///
+ /// Calling `.clear()` also makes the intent clearer.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let mut v = vec![1, 2, 3];
+ /// v.drain(..);
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let mut v = vec![1, 2, 3];
+ /// v.clear();
+ /// ```
+ #[clippy::version = "1.69.0"]
+ pub CLEAR_WITH_DRAIN,
+ nursery,
+ "calling `drain` in order to `clear` a container"
+}
+
pub struct Methods {
avoid_breaking_exported_api: bool,
msrv: Msrv,
@@ -3317,6 +3344,7 @@ impl_lint_pass!(Methods => [
SEEK_TO_START_INSTEAD_OF_REWIND,
NEEDLESS_COLLECT,
SUSPICIOUS_COMMAND_ARG_SPACE,
+ CLEAR_WITH_DRAIN,
]);
/// Extracts a method call name, args, and `Span` of the method name.
@@ -3561,8 +3589,15 @@ impl Methods {
Some(("bytes", recv2, [], _, _)) => bytes_count_to_len::check(cx, expr, recv, recv2),
_ => {},
},
- ("drain", [arg]) => {
- iter_with_drain::check(cx, expr, recv, span, arg);
+ ("drain", ..) => {
+ if let Node::Stmt(Stmt { hir_id: _, kind, .. }) = cx.tcx.hir().get_parent(expr.hir_id)
+ && matches!(kind, StmtKind::Semi(_))
+ && args.len() <= 1
+ {
+ clear_with_drain::check(cx, expr, recv, span, args.first());
+ } else if let [arg] = args {
+ iter_with_drain::check(cx, expr, recv, span, arg);
+ }
},
("ends_with", [arg]) => {
if let ExprKind::MethodCall(.., span) = expr.kind {
diff --git a/src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs b/src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs
index 3a23ecc50..41ceef19e 100644
--- a/src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs
@@ -6,7 +6,6 @@ use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_hir::LangItem::{OptionNone, OptionSome};
use rustc_lint::LateContext;
-use rustc_middle::ty::DefIdTree;
use rustc_span::symbol::sym;
use super::OPTION_MAP_OR_NONE;
diff --git a/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs b/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs
index 4460f38fc..7ce28ea93 100644
--- a/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs
@@ -1,6 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::eager_or_lazy::switch_to_lazy_eval;
-use clippy_utils::source::{snippet, snippet_with_macro_callsite};
+use clippy_utils::source::snippet_with_context;
use clippy_utils::ty::{implements_trait, is_type_diagnostic_item};
use clippy_utils::{contains_return, is_trait_item, last_path_segment};
use if_chain::if_chain;
@@ -9,7 +9,6 @@ use rustc_hir as hir;
use rustc_lint::LateContext;
use rustc_span::source_map::Span;
use rustc_span::symbol::{kw, sym, Symbol};
-use std::borrow::Cow;
use super::OR_FUN_CALL;
@@ -111,37 +110,24 @@ pub(super) fn check<'tcx>(
if poss.contains(&name);
then {
+ let ctxt = span.ctxt();
+ let mut app = Applicability::HasPlaceholders;
let sugg = {
let (snippet_span, use_lambda) = match (fn_has_arguments, fun_span) {
(false, Some(fun_span)) => (fun_span, false),
_ => (arg.span, true),
};
- let format_span = |span: Span| {
- let not_macro_argument_snippet = snippet_with_macro_callsite(cx, span, "..");
- let snip = if not_macro_argument_snippet == "vec![]" {
- let macro_expanded_snipped = snippet(cx, snippet_span, "..");
- match macro_expanded_snipped.strip_prefix("$crate::vec::") {
- Some(stripped) => Cow::Owned(stripped.to_owned()),
- None => macro_expanded_snipped,
- }
- } else {
- not_macro_argument_snippet
- };
-
- snip.to_string()
- };
-
- let snip = format_span(snippet_span);
+ let snip = snippet_with_context(cx, snippet_span, ctxt, "..", &mut app).0;
let snip = if use_lambda {
let l_arg = if fn_has_arguments { "_" } else { "" };
format!("|{l_arg}| {snip}")
} else {
- snip
+ snip.into_owned()
};
if let Some(f) = second_arg {
- let f = format_span(f.span);
+ let f = snippet_with_context(cx, f.span, ctxt, "..", &mut app).0;
format!("{snip}, {f}")
} else {
snip
@@ -155,7 +141,7 @@ pub(super) fn check<'tcx>(
&format!("use of `{name}` followed by a function call"),
"try this",
format!("{name}_{suffix}({sugg})"),
- Applicability::HasPlaceholders,
+ app,
);
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs
index 5201da52b..67618f703 100644
--- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs
@@ -33,10 +33,6 @@ struct SortByKeyDetection {
/// contains a and the other replaces it with b)
fn mirrored_exprs(a_expr: &Expr<'_>, a_ident: &Ident, b_expr: &Expr<'_>, b_ident: &Ident) -> bool {
match (&a_expr.kind, &b_expr.kind) {
- // Two boxes with mirrored contents
- (ExprKind::Box(left_expr), ExprKind::Box(right_expr)) => {
- mirrored_exprs(left_expr, a_ident, right_expr, b_ident)
- },
// Two arrays with mirrored contents
(ExprKind::Array(left_exprs), ExprKind::Array(right_exprs)) => {
iter::zip(*left_exprs, *right_exprs).all(|(left, right)| mirrored_exprs(left, a_ident, right, b_ident))
diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs
index df26b36b7..4c4c003ca 100644
--- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs
@@ -369,10 +369,10 @@ fn can_change_type<'a>(cx: &LateContext<'a>, mut expr: &'a Expr<'a>, mut ty: Ty<
Node::Item(item) => {
if let ItemKind::Fn(_, _, body_id) = &item.kind
&& let output_ty = return_ty(cx, item.owner_id)
- && Inherited::build(cx.tcx, item.owner_id.def_id).enter(|inherited| {
- let fn_ctxt = FnCtxt::new(inherited, cx.param_env, item.owner_id.def_id);
- fn_ctxt.can_coerce(ty, output_ty)
- }) {
+ && let inherited = Inherited::new(cx.tcx, item.owner_id.def_id)
+ && let fn_ctxt = FnCtxt::new(&inherited, cx.param_env, item.owner_id.def_id)
+ && fn_ctxt.can_coerce(ty, output_ty)
+ {
if has_lifetime(output_ty) && has_lifetime(ty) {
return false;
}
diff --git a/src/tools/clippy/clippy_lints/src/misc.rs b/src/tools/clippy/clippy_lints/src/misc.rs
index 0705029a6..3752b9a94 100644
--- a/src/tools/clippy/clippy_lints/src/misc.rs
+++ b/src/tools/clippy/clippy_lints/src/misc.rs
@@ -1,5 +1,5 @@
use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg, span_lint_hir_and_then};
-use clippy_utils::source::{snippet, snippet_opt};
+use clippy_utils::source::{snippet, snippet_opt, snippet_with_context};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::intravisit::FnKind;
@@ -181,20 +181,17 @@ impl<'tcx> LateLintPass<'tcx> for LintPass {
if let PatKind::Binding(BindingAnnotation(ByRef::Yes, mutabl), .., name, None) = local.pat.kind;
if let Some(init) = local.init;
then {
- // use the macro callsite when the init span (but not the whole local span)
- // comes from an expansion like `vec![1, 2, 3]` in `let ref _ = vec![1, 2, 3];`
- let sugg_init = if init.span.from_expansion() && !local.span.from_expansion() {
- Sugg::hir_with_macro_callsite(cx, init, "..")
- } else {
- Sugg::hir(cx, init, "..")
- };
+ let ctxt = local.span.ctxt();
+ let mut app = Applicability::MachineApplicable;
+ let sugg_init = Sugg::hir_with_context(cx, init, ctxt, "..", &mut app);
let (mutopt, initref) = if mutabl == Mutability::Mut {
("mut ", sugg_init.mut_addr())
} else {
("", sugg_init.addr())
};
let tyopt = if let Some(ty) = local.ty {
- format!(": &{mutopt}{ty}", ty=snippet(cx, ty.span, ".."))
+ let ty_snip = snippet_with_context(cx, ty.span, ctxt, "_", &mut app).0;
+ format!(": &{mutopt}{ty_snip}")
} else {
String::new()
};
@@ -212,7 +209,7 @@ impl<'tcx> LateLintPass<'tcx> for LintPass {
"let {name}{tyopt} = {initref};",
name=snippet(cx, name.span, ".."),
),
- Applicability::MachineApplicable,
+ app,
);
}
);
diff --git a/src/tools/clippy/clippy_lints/src/missing_assert_message.rs b/src/tools/clippy/clippy_lints/src/missing_assert_message.rs
new file mode 100644
index 000000000..2214a568d
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/missing_assert_message.rs
@@ -0,0 +1,82 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::macros::{find_assert_args, find_assert_eq_args, root_macro_call_first_node, PanicExpn};
+use clippy_utils::{is_in_cfg_test, is_in_test_function};
+use rustc_hir::Expr;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks assertions without a custom panic message.
+ ///
+ /// ### Why is this bad?
+ /// Without a good custom message, it'd be hard to understand what went wrong when the assertion fails.
+ /// A good custom message should be more about why the failure of the assertion is problematic
+ /// and not what is failed because the assertion already conveys that.
+ ///
+ /// ### Known problems
+ /// This lint cannot check the quality of the custom panic messages.
+ /// Hence, you can suppress this lint simply by adding placeholder messages
+ /// like "assertion failed". However, we recommend coming up with good messages
+ /// that provide useful information instead of placeholder messages that
+ /// don't provide any extra information.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # struct Service { ready: bool }
+ /// fn call(service: Service) {
+ /// assert!(service.ready);
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # struct Service { ready: bool }
+ /// fn call(service: Service) {
+ /// assert!(service.ready, "`service.poll_ready()` must be called first to ensure that service is ready to receive requests");
+ /// }
+ /// ```
+ #[clippy::version = "1.69.0"]
+ pub MISSING_ASSERT_MESSAGE,
+ restriction,
+ "checks assertions without a custom panic message"
+}
+
+declare_lint_pass!(MissingAssertMessage => [MISSING_ASSERT_MESSAGE]);
+
+impl<'tcx> LateLintPass<'tcx> for MissingAssertMessage {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return };
+ let single_argument = match cx.tcx.get_diagnostic_name(macro_call.def_id) {
+ Some(sym::assert_macro | sym::debug_assert_macro) => true,
+ Some(
+ sym::assert_eq_macro | sym::assert_ne_macro | sym::debug_assert_eq_macro | sym::debug_assert_ne_macro,
+ ) => false,
+ _ => return,
+ };
+
+ // This lint would be very noisy in tests, so just ignore if we're in test context
+ if is_in_test_function(cx.tcx, expr.hir_id) || is_in_cfg_test(cx.tcx, expr.hir_id) {
+ return;
+ }
+
+ let panic_expn = if single_argument {
+ let Some((_, panic_expn)) = find_assert_args(cx, expr, macro_call.expn) else { return };
+ panic_expn
+ } else {
+ let Some((_, _, panic_expn)) = find_assert_eq_args(cx, expr, macro_call.expn) else { return };
+ panic_expn
+ };
+
+ if let PanicExpn::Empty = panic_expn {
+ span_lint_and_help(
+ cx,
+ MISSING_ASSERT_MESSAGE,
+ macro_call.span,
+ "assert without any message",
+ None,
+ "consider describing why the failing assert is problematic",
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/missing_const_for_fn.rs b/src/tools/clippy/clippy_lints/src/missing_const_for_fn.rs
index 87bd007a2..f1831a304 100644
--- a/src/tools/clippy/clippy_lints/src/missing_const_for_fn.rs
+++ b/src/tools/clippy/clippy_lints/src/missing_const_for_fn.rs
@@ -41,6 +41,7 @@ declare_clippy_lint! {
/// can't be const as it calls a non-const function. Making `a` const and running Clippy again,
/// will suggest to make `b` const, too.
///
+ /// If you are marking a public function with `const`, removing it again will break API compatibility.
/// ### Example
/// ```rust
/// # struct Foo {
diff --git a/src/tools/clippy/clippy_lints/src/missing_doc.rs b/src/tools/clippy/clippy_lints/src/missing_doc.rs
index 9659ca8ce..f2773cad4 100644
--- a/src/tools/clippy/clippy_lints/src/missing_doc.rs
+++ b/src/tools/clippy/clippy_lints/src/missing_doc.rs
@@ -8,12 +8,12 @@
use clippy_utils::attrs::is_doc_hidden;
use clippy_utils::diagnostics::span_lint;
use clippy_utils::is_from_proc_macro;
-use hir::def_id::LocalDefId;
use if_chain::if_chain;
use rustc_ast::ast::{self, MetaItem, MetaItemKind};
use rustc_hir as hir;
+use rustc_hir::def_id::LocalDefId;
use rustc_lint::{LateContext, LateLintPass, LintContext};
-use rustc_middle::ty::{DefIdTree, Visibility};
+use rustc_middle::ty::Visibility;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::def_id::CRATE_DEF_ID;
use rustc_span::source_map::Span;
@@ -21,8 +21,7 @@ use rustc_span::sym;
declare_clippy_lint! {
/// ### What it does
- /// Warns if there is missing doc for any documentable item
- /// (public or private).
+ /// Warns if there is missing doc for any private documentable item
///
/// ### Why is this bad?
/// Doc is good. *rustc* has a `MISSING_DOCS`
@@ -32,7 +31,7 @@ declare_clippy_lint! {
#[clippy::version = "pre 1.29.0"]
pub MISSING_DOCS_IN_PRIVATE_ITEMS,
restriction,
- "detects missing documentation for public and private members"
+ "detects missing documentation for private members"
}
pub struct MissingDoc {
@@ -107,11 +106,14 @@ impl MissingDoc {
if vis == Visibility::Public || vis != Visibility::Restricted(CRATE_DEF_ID.into()) {
return;
}
+ } else if def_id != CRATE_DEF_ID && cx.effective_visibilities.is_exported(def_id) {
+ return;
}
let has_doc = attrs
.iter()
.any(|a| a.doc_str().is_some() || Self::has_include(a.meta()));
+
if !has_doc {
span_lint(
cx,
diff --git a/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs b/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs
index 63c575fca..5418616de 100644
--- a/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs
+++ b/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs
@@ -11,6 +11,7 @@ use rustc_ast::Mutability;
use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty;
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::Span;
@@ -120,33 +121,15 @@ fn collect_unsafe_exprs<'tcx>(
unsafe_ops.push(("raw pointer dereference occurs here", expr.span));
},
- ExprKind::Call(path_expr, _) => match path_expr.kind {
- ExprKind::Path(QPath::Resolved(
- _,
- hir::Path {
- res: Res::Def(kind, def_id),
- ..
- },
- )) if kind.is_fn_like() => {
- let sig = cx.tcx.fn_sig(*def_id);
- if sig.0.unsafety() == Unsafety::Unsafe {
- unsafe_ops.push(("unsafe function call occurs here", expr.span));
- }
- },
-
- ExprKind::Path(QPath::TypeRelative(..)) => {
- if let Some(sig) = cx
- .typeck_results()
- .type_dependent_def_id(path_expr.hir_id)
- .map(|def_id| cx.tcx.fn_sig(def_id))
- {
- if sig.0.unsafety() == Unsafety::Unsafe {
- unsafe_ops.push(("unsafe function call occurs here", expr.span));
- }
- }
- },
-
- _ => {},
+ ExprKind::Call(path_expr, _) => {
+ let sig = match *cx.typeck_results().expr_ty(path_expr).kind() {
+ ty::FnDef(id, _) => cx.tcx.fn_sig(id).skip_binder(),
+ ty::FnPtr(sig) => sig,
+ _ => return Continue(Descend::Yes),
+ };
+ if sig.unsafety() == Unsafety::Unsafe {
+ unsafe_ops.push(("unsafe function call occurs here", expr.span));
+ }
},
ExprKind::MethodCall(..) => {
diff --git a/src/tools/clippy/clippy_lints/src/mut_key.rs b/src/tools/clippy/clippy_lints/src/mut_key.rs
index 8aa814b74..309f67521 100644
--- a/src/tools/clippy/clippy_lints/src/mut_key.rs
+++ b/src/tools/clippy/clippy_lints/src/mut_key.rs
@@ -1,10 +1,11 @@
use clippy_utils::diagnostics::span_lint;
+use clippy_utils::ty::is_interior_mut_ty;
use clippy_utils::{def_path_def_ids, trait_ref_of_method};
use rustc_data_structures::fx::FxHashSet;
use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass};
-use rustc_middle::ty::TypeVisitableExt;
-use rustc_middle::ty::{Adt, Array, Ref, Slice, Tuple, Ty};
+use rustc_middle::query::Key;
+use rustc_middle::ty::{Adt, Ty};
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::def_id::LocalDefId;
use rustc_span::source_map::Span;
@@ -153,53 +154,18 @@ impl MutableKeyType {
let is_keyed_type = [sym::HashMap, sym::BTreeMap, sym::HashSet, sym::BTreeSet]
.iter()
.any(|diag_item| cx.tcx.is_diagnostic_item(*diag_item, def.did()));
- if is_keyed_type && self.is_interior_mutable_type(cx, substs.type_at(0)) {
- span_lint(cx, MUTABLE_KEY_TYPE, span, "mutable key type");
+ if !is_keyed_type {
+ return;
}
- }
- }
- /// Determines if a type contains interior mutability which would affect its implementation of
- /// [`Hash`] or [`Ord`].
- fn is_interior_mutable_type<'tcx>(&self, cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
- match *ty.kind() {
- Ref(_, inner_ty, mutbl) => mutbl == hir::Mutability::Mut || self.is_interior_mutable_type(cx, inner_ty),
- Slice(inner_ty) => self.is_interior_mutable_type(cx, inner_ty),
- Array(inner_ty, size) => {
- size.try_eval_target_usize(cx.tcx, cx.param_env)
- .map_or(true, |u| u != 0)
- && self.is_interior_mutable_type(cx, inner_ty)
- },
- Tuple(fields) => fields.iter().any(|ty| self.is_interior_mutable_type(cx, ty)),
- Adt(def, substs) => {
- // Special case for collections in `std` who's impl of `Hash` or `Ord` delegates to
- // that of their type parameters. Note: we don't include `HashSet` and `HashMap`
- // because they have no impl for `Hash` or `Ord`.
- let def_id = def.did();
- let is_std_collection = [
- sym::Option,
- sym::Result,
- sym::LinkedList,
- sym::Vec,
- sym::VecDeque,
- sym::BTreeMap,
- sym::BTreeSet,
- sym::Rc,
- sym::Arc,
- ]
- .iter()
- .any(|diag_item| cx.tcx.is_diagnostic_item(*diag_item, def_id));
- let is_box = Some(def_id) == cx.tcx.lang_items().owned_box();
- if is_std_collection || is_box || self.ignore_mut_def_ids.contains(&def_id) {
- // The type is mutable if any of its type parameters are
- substs.types().any(|ty| self.is_interior_mutable_type(cx, ty))
- } else {
- !ty.has_escaping_bound_vars()
- && cx.tcx.layout_of(cx.param_env.and(ty)).is_ok()
- && !ty.is_freeze(cx.tcx, cx.param_env)
- }
- },
- _ => false,
+ let subst_ty = substs.type_at(0);
+ // Determines if a type contains interior mutability which would affect its implementation of
+ // [`Hash`] or [`Ord`].
+ if is_interior_mut_ty(cx, subst_ty)
+ && !matches!(subst_ty.ty_adt_id(), Some(adt_id) if self.ignore_mut_def_ids.contains(&adt_id))
+ {
+ span_lint(cx, MUTABLE_KEY_TYPE, span, "mutable key type");
+ }
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/needless_bool.rs b/src/tools/clippy/clippy_lints/src/needless_bool.rs
index a4eec95b3..c87059bf6 100644
--- a/src/tools/clippy/clippy_lints/src/needless_bool.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_bool.rs
@@ -340,18 +340,11 @@ fn suggest_bool_comparison<'a, 'tcx>(
cx: &LateContext<'tcx>,
e: &'tcx Expr<'_>,
expr: &Expr<'_>,
- mut applicability: Applicability,
+ mut app: Applicability,
message: &str,
conv_hint: impl FnOnce(Sugg<'a>) -> Sugg<'a>,
) {
- let hint = if expr.span.from_expansion() {
- if applicability != Applicability::Unspecified {
- applicability = Applicability::MaybeIncorrect;
- }
- Sugg::hir_with_macro_callsite(cx, expr, "..")
- } else {
- Sugg::hir_with_applicability(cx, expr, "..", &mut applicability)
- };
+ let hint = Sugg::hir_with_context(cx, expr, e.span.ctxt(), "..", &mut app);
span_lint_and_sugg(
cx,
BOOL_COMPARISON,
@@ -359,7 +352,7 @@ fn suggest_bool_comparison<'a, 'tcx>(
message,
"try simplifying it as shown",
conv_hint(hint).to_string(),
- applicability,
+ app,
);
}
diff --git a/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs b/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs
index 1ab81aee7..0bb1775aa 100644
--- a/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs
@@ -122,11 +122,11 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue {
let sized_trait = need!(cx.tcx.lang_items().sized_trait());
- let preds = traits::elaborate_predicates(cx.tcx, cx.param_env.caller_bounds().iter())
+ let preds = traits::elaborate(cx.tcx, cx.param_env.caller_bounds().iter())
.filter(|p| !p.is_global())
- .filter_map(|obligation| {
+ .filter_map(|pred| {
// Note that we do not want to deal with qualified predicates here.
- match obligation.predicate.kind().no_bound_vars() {
+ match pred.kind().no_bound_vars() {
Some(ty::PredicateKind::Clause(ty::Clause::Trait(pred))) if pred.def_id() != sized_trait => {
Some(pred)
},
diff --git a/src/tools/clippy/clippy_lints/src/needless_question_mark.rs b/src/tools/clippy/clippy_lints/src/needless_question_mark.rs
index 97c8cfbd3..e2a7ba02a 100644
--- a/src/tools/clippy/clippy_lints/src/needless_question_mark.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_question_mark.rs
@@ -6,7 +6,6 @@ use rustc_errors::Applicability;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::{AsyncGeneratorKind, Block, Body, Expr, ExprKind, GeneratorKind, LangItem, MatchSource, QPath};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_middle::ty::DefIdTree;
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/neg_multiply.rs b/src/tools/clippy/clippy_lints/src/neg_multiply.rs
index fb9a4abd0..ed3e2c6e7 100644
--- a/src/tools/clippy/clippy_lints/src/neg_multiply.rs
+++ b/src/tools/clippy/clippy_lints/src/neg_multiply.rs
@@ -1,6 +1,6 @@
use clippy_utils::consts::{self, Constant};
use clippy_utils::diagnostics::span_lint_and_sugg;
-use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::source::snippet_with_context;
use clippy_utils::sugg::has_enclosing_paren;
use if_chain::if_chain;
use rustc_ast::util::parser::PREC_PREFIX;
@@ -60,8 +60,8 @@ fn check_mul(cx: &LateContext<'_>, span: Span, lit: &Expr<'_>, exp: &Expr<'_>) {
then {
let mut applicability = Applicability::MachineApplicable;
- let snip = snippet_with_applicability(cx, exp.span, "..", &mut applicability);
- let suggestion = if exp.precedence().order() < PREC_PREFIX && !has_enclosing_paren(&snip) {
+ let (snip, from_macro) = snippet_with_context(cx, exp.span, span.ctxt(), "..", &mut applicability);
+ let suggestion = if !from_macro && exp.precedence().order() < PREC_PREFIX && !has_enclosing_paren(&snip) {
format!("-({snip})")
} else {
format!("-{snip}")
diff --git a/src/tools/clippy/clippy_lints/src/no_effect.rs b/src/tools/clippy/clippy_lints/src/no_effect.rs
index 79c1ae486..e3712190e 100644
--- a/src/tools/clippy/clippy_lints/src/no_effect.rs
+++ b/src/tools/clippy/clippy_lints/src/no_effect.rs
@@ -127,8 +127,7 @@ fn has_no_effect(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
| ExprKind::Type(inner, _)
| ExprKind::Unary(_, inner)
| ExprKind::Field(inner, _)
- | ExprKind::AddrOf(_, _, inner)
- | ExprKind::Box(inner) => has_no_effect(cx, inner),
+ | ExprKind::AddrOf(_, _, inner) => has_no_effect(cx, inner),
ExprKind::Struct(_, fields, ref base) => {
!has_drop(cx, cx.typeck_results().expr_ty(expr))
&& fields.iter().all(|field| has_no_effect(cx, field.expr))
@@ -234,8 +233,7 @@ fn reduce_expression<'a>(cx: &LateContext<'_>, expr: &'a Expr<'a>) -> Option<Vec
| ExprKind::Type(inner, _)
| ExprKind::Unary(_, inner)
| ExprKind::Field(inner, _)
- | ExprKind::AddrOf(_, _, inner)
- | ExprKind::Box(inner) => reduce_expression(cx, inner).or_else(|| Some(vec![inner])),
+ | ExprKind::AddrOf(_, _, inner) => reduce_expression(cx, inner).or_else(|| Some(vec![inner])),
ExprKind::Struct(_, fields, ref base) => {
if has_drop(cx, cx.typeck_results().expr_ty(expr)) {
None
diff --git a/src/tools/clippy/clippy_lints/src/no_mangle_with_rust_abi.rs b/src/tools/clippy/clippy_lints/src/no_mangle_with_rust_abi.rs
index bc64ccb29..8fd9ae351 100644
--- a/src/tools/clippy/clippy_lints/src/no_mangle_with_rust_abi.rs
+++ b/src/tools/clippy/clippy_lints/src/no_mangle_with_rust_abi.rs
@@ -1,9 +1,10 @@
-use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::source::snippet_with_applicability;
use rustc_errors::Applicability;
use rustc_hir::{Item, ItemKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{BytePos, Pos};
use rustc_target::spec::abi::Abi;
declare_clippy_lint! {
@@ -38,25 +39,28 @@ impl<'tcx> LateLintPass<'tcx> for NoMangleWithRustAbi {
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) {
if let ItemKind::Fn(fn_sig, _, _) = &item.kind {
let attrs = cx.tcx.hir().attrs(item.hir_id());
- let mut applicability = Applicability::MachineApplicable;
- let snippet = snippet_with_applicability(cx, fn_sig.span, "..", &mut applicability);
+ let mut app = Applicability::MaybeIncorrect;
+ let snippet = snippet_with_applicability(cx, fn_sig.span, "..", &mut app);
for attr in attrs {
if let Some(ident) = attr.ident()
&& ident.name == rustc_span::sym::no_mangle
&& fn_sig.header.abi == Abi::Rust
- && !snippet.contains("extern") {
+ && let Some((fn_attrs, _)) = snippet.split_once("fn")
+ && !fn_attrs.contains("extern")
+ {
+ let sugg_span = fn_sig.span
+ .with_lo(fn_sig.span.lo() + BytePos::from_usize(fn_attrs.len()))
+ .shrink_to_lo();
- let suggestion = snippet.split_once("fn")
- .map_or(String::new(), |(first, second)| format!(r#"{first}extern "C" fn{second}"#));
-
- span_lint_and_sugg(
+ span_lint_and_then(
cx,
NO_MANGLE_WITH_RUST_ABI,
fn_sig.span,
- "attribute #[no_mangle] set on a Rust ABI function",
- "try",
- suggestion,
- applicability
+ "`#[no_mangle]` set on a function with the default (`Rust`) ABI",
+ |diag| {
+ diag.span_suggestion(sugg_span, "set an ABI", "extern \"C\" ", app)
+ .span_suggestion(sugg_span, "or explicitly set the default", "extern \"Rust\" ", app);
+ },
);
}
}
diff --git a/src/tools/clippy/clippy_lints/src/non_octal_unix_permissions.rs b/src/tools/clippy/clippy_lints/src/non_octal_unix_permissions.rs
index 2ecb04874..e1de494eb 100644
--- a/src/tools/clippy/clippy_lints/src/non_octal_unix_permissions.rs
+++ b/src/tools/clippy/clippy_lints/src/non_octal_unix_permissions.rs
@@ -53,6 +53,7 @@ impl<'tcx> LateLintPass<'tcx> for NonOctalUnixPermissions {
|| is_type_diagnostic_item(cx, obj_ty, sym::DirBuilder)))
|| (path.ident.name == sym!(set_mode) && match_type(cx, obj_ty, &paths::PERMISSIONS));
if let ExprKind::Lit(_) = param.kind;
+ if param.span.ctxt() == expr.span.ctxt();
then {
let Some(snip) = snippet_opt(cx, param.span) else {
@@ -71,6 +72,7 @@ impl<'tcx> LateLintPass<'tcx> for NonOctalUnixPermissions {
if let Some(def_id) = cx.qpath_res(path, func.hir_id).opt_def_id();
if match_def_path(cx, def_id, &paths::PERMISSIONS_FROM_MODE);
if let ExprKind::Lit(_) = param.kind;
+ if param.span.ctxt() == expr.span.ctxt();
if let Some(snip) = snippet_opt(cx, param.span);
if !snip.starts_with("0o");
then {
diff --git a/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs b/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs
index 87a8a2ed1..e57137356 100644
--- a/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs
+++ b/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs
@@ -143,6 +143,10 @@ impl ArithmeticSideEffects {
return;
}
let has_valid_op = if Self::is_integral(lhs_ty) && Self::is_integral(rhs_ty) {
+ if let hir::BinOpKind::Shl | hir::BinOpKind::Shr = op.node {
+ // At least for integers, shifts are already handled by the CTFE
+ return;
+ }
let (actual_lhs, lhs_ref_counter) = peel_hir_expr_refs(lhs);
let (actual_rhs, rhs_ref_counter) = peel_hir_expr_refs(rhs);
match (
@@ -150,11 +154,22 @@ impl ArithmeticSideEffects {
Self::literal_integer(cx, actual_rhs),
) {
(None, None) => false,
- (None, Some(n)) | (Some(n), None) => match (&op.node, n) {
- (hir::BinOpKind::Div | hir::BinOpKind::Rem, 0) => false,
+ (None, Some(n)) => match (&op.node, n) {
+ // Division and module are always valid if applied to non-zero integers
+ (hir::BinOpKind::Div | hir::BinOpKind::Rem, local_n) if local_n != 0 => true,
+ // Adding or subtracting zeros is always a no-op
+ (hir::BinOpKind::Add | hir::BinOpKind::Sub, 0)
+ // Multiplication by 1 or 0 will never overflow
+ | (hir::BinOpKind::Mul, 0 | 1)
+ => true,
+ _ => false,
+ },
+ (Some(n), None) => match (&op.node, n) {
+ // Adding or subtracting zeros is always a no-op
(hir::BinOpKind::Add | hir::BinOpKind::Sub, 0)
- | (hir::BinOpKind::Div | hir::BinOpKind::Rem, _)
- | (hir::BinOpKind::Mul, 0 | 1) => true,
+ // Multiplication by 1 or 0 will never overflow
+ | (hir::BinOpKind::Mul, 0 | 1)
+ => true,
_ => false,
},
(Some(_), Some(_)) => {
diff --git a/src/tools/clippy/clippy_lints/src/option_if_let_else.rs b/src/tools/clippy/clippy_lints/src/option_if_let_else.rs
index c5ea09590..bbbcda069 100644
--- a/src/tools/clippy/clippy_lints/src/option_if_let_else.rs
+++ b/src/tools/clippy/clippy_lints/src/option_if_let_else.rs
@@ -12,6 +12,7 @@ use rustc_hir::{
};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::SyntaxContext;
declare_clippy_lint! {
/// ### What it does
@@ -95,10 +96,10 @@ struct OptionOccurrence {
none_expr: String,
}
-fn format_option_in_sugg(cx: &LateContext<'_>, cond_expr: &Expr<'_>, as_ref: bool, as_mut: bool) -> String {
+fn format_option_in_sugg(cond_sugg: Sugg<'_>, as_ref: bool, as_mut: bool) -> String {
format!(
"{}{}",
- Sugg::hir_with_macro_callsite(cx, cond_expr, "..").maybe_par(),
+ cond_sugg.maybe_par(),
if as_mut {
".as_mut()"
} else if as_ref {
@@ -111,6 +112,7 @@ fn format_option_in_sugg(cx: &LateContext<'_>, cond_expr: &Expr<'_>, as_ref: boo
fn try_get_option_occurrence<'tcx>(
cx: &LateContext<'tcx>,
+ ctxt: SyntaxContext,
pat: &Pat<'tcx>,
expr: &Expr<'_>,
if_then: &'tcx Expr<'_>,
@@ -160,11 +162,23 @@ fn try_get_option_occurrence<'tcx>(
}
}
+ let mut app = Applicability::Unspecified;
return Some(OptionOccurrence {
- option: format_option_in_sugg(cx, cond_expr, as_ref, as_mut),
+ option: format_option_in_sugg(
+ Sugg::hir_with_context(cx, cond_expr, ctxt, "..", &mut app),
+ as_ref,
+ as_mut,
+ ),
method_sugg: method_sugg.to_string(),
- some_expr: format!("|{capture_mut}{capture_name}| {}", Sugg::hir_with_macro_callsite(cx, some_body, "..")),
- none_expr: format!("{}{}", if method_sugg == "map_or" { "" } else { "|| " }, Sugg::hir_with_macro_callsite(cx, none_body, "..")),
+ some_expr: format!(
+ "|{capture_mut}{capture_name}| {}",
+ Sugg::hir_with_context(cx, some_body, ctxt, "..", &mut app),
+ ),
+ none_expr: format!(
+ "{}{}",
+ if method_sugg == "map_or" { "" } else { "|| " },
+ Sugg::hir_with_context(cx, none_body, ctxt, "..", &mut app),
+ ),
});
}
}
@@ -194,7 +208,7 @@ fn detect_option_if_let_else<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>) ->
}) = higher::IfLet::hir(cx, expr)
{
if !is_else_clause(cx.tcx, expr) {
- return try_get_option_occurrence(cx, let_pat, let_expr, if_then, if_else);
+ return try_get_option_occurrence(cx, expr.span.ctxt(), let_pat, let_expr, if_then, if_else);
}
}
None
@@ -203,7 +217,7 @@ fn detect_option_if_let_else<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>) ->
fn detect_option_match<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>) -> Option<OptionOccurrence> {
if let ExprKind::Match(ex, arms, MatchSource::Normal) = expr.kind {
if let Some((let_pat, if_then, if_else)) = try_convert_match(cx, arms) {
- return try_get_option_occurrence(cx, let_pat, ex, if_then, if_else);
+ return try_get_option_occurrence(cx, expr.span.ctxt(), let_pat, ex, if_then, if_else);
}
}
None
diff --git a/src/tools/clippy/clippy_lints/src/partialeq_ne_impl.rs b/src/tools/clippy/clippy_lints/src/partialeq_ne_impl.rs
index 5aa3c6f2f..a8c4823fe 100644
--- a/src/tools/clippy/clippy_lints/src/partialeq_ne_impl.rs
+++ b/src/tools/clippy/clippy_lints/src/partialeq_ne_impl.rs
@@ -36,7 +36,7 @@ impl<'tcx> LateLintPass<'tcx> for PartialEqNeImpl {
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
if_chain! {
if let ItemKind::Impl(Impl { of_trait: Some(ref trait_ref), items: impl_items, .. }) = item.kind;
- if !cx.tcx.has_attr(item.owner_id.to_def_id(), sym::automatically_derived);
+ if !cx.tcx.has_attr(item.owner_id, sym::automatically_derived);
if let Some(eq_trait) = cx.tcx.lang_items().eq_trait();
if trait_ref.path.res.def_id() == eq_trait;
then {
diff --git a/src/tools/clippy/clippy_lints/src/permissions_set_readonly_false.rs b/src/tools/clippy/clippy_lints/src/permissions_set_readonly_false.rs
index e7095ec19..664d44d65 100644
--- a/src/tools/clippy/clippy_lints/src/permissions_set_readonly_false.rs
+++ b/src/tools/clippy/clippy_lints/src/permissions_set_readonly_false.rs
@@ -21,7 +21,7 @@ declare_clippy_lint! {
/// let mut permissions = metadata.permissions();
/// permissions.set_readonly(false);
/// ```
- #[clippy::version = "1.66.0"]
+ #[clippy::version = "1.68.0"]
pub PERMISSIONS_SET_READONLY_FALSE,
suspicious,
"Checks for calls to `std::fs::Permissions.set_readonly` with argument `false`"
diff --git a/src/tools/clippy/clippy_lints/src/redundant_async_block.rs b/src/tools/clippy/clippy_lints/src/redundant_async_block.rs
new file mode 100644
index 000000000..a0f831764
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/redundant_async_block.rs
@@ -0,0 +1,108 @@
+use std::ops::ControlFlow;
+
+use clippy_utils::{
+ diagnostics::span_lint_and_sugg,
+ peel_blocks,
+ source::{snippet, walk_span_to_context},
+ visitors::for_each_expr,
+};
+use rustc_errors::Applicability;
+use rustc_hir::{AsyncGeneratorKind, Closure, Expr, ExprKind, GeneratorKind, MatchSource};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::{lint::in_external_macro, ty::UpvarCapture};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `async` block that only returns `await` on a future.
+ ///
+ /// ### Why is this bad?
+ /// It is simpler and more efficient to use the future directly.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let f = async {
+ /// 1 + 2
+ /// };
+ /// let fut = async {
+ /// f.await
+ /// };
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let f = async {
+ /// 1 + 2
+ /// };
+ /// let fut = f;
+ /// ```
+ #[clippy::version = "1.69.0"]
+ pub REDUNDANT_ASYNC_BLOCK,
+ complexity,
+ "`async { future.await }` can be replaced by `future`"
+}
+declare_lint_pass!(RedundantAsyncBlock => [REDUNDANT_ASYNC_BLOCK]);
+
+impl<'tcx> LateLintPass<'tcx> for RedundantAsyncBlock {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ let span = expr.span;
+ if !in_external_macro(cx.tcx.sess, span) &&
+ let Some(body_expr) = desugar_async_block(cx, expr) &&
+ let Some(expr) = desugar_await(peel_blocks(body_expr)) &&
+ // The await prefix must not come from a macro as its content could change in the future.
+ expr.span.ctxt() == body_expr.span.ctxt() &&
+ // An async block does not have immediate side-effects from a `.await` point-of-view.
+ (!expr.can_have_side_effects() || desugar_async_block(cx, expr).is_some()) &&
+ let Some(shortened_span) = walk_span_to_context(expr.span, span.ctxt())
+ {
+ span_lint_and_sugg(
+ cx,
+ REDUNDANT_ASYNC_BLOCK,
+ span,
+ "this async expression only awaits a single future",
+ "you can reduce it to",
+ snippet(cx, shortened_span, "..").into_owned(),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+}
+
+/// If `expr` is a desugared `async` block, return the original expression if it does not capture
+/// any variable by ref.
+fn desugar_async_block<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> Option<&'tcx Expr<'tcx>> {
+ if let ExprKind::Closure(Closure { body, def_id, .. }) = expr.kind &&
+ let body = cx.tcx.hir().body(*body) &&
+ matches!(body.generator_kind, Some(GeneratorKind::Async(AsyncGeneratorKind::Block)))
+ {
+ cx
+ .typeck_results()
+ .closure_min_captures
+ .get(def_id)
+ .map_or(true, |m| {
+ m.values().all(|places| {
+ places
+ .iter()
+ .all(|place| matches!(place.info.capture_kind, UpvarCapture::ByValue))
+ })
+ })
+ .then_some(body.value)
+ } else {
+ None
+ }
+}
+
+/// If `expr` is a desugared `.await`, return the original expression if it does not come from a
+/// macro expansion.
+fn desugar_await<'tcx>(expr: &'tcx Expr<'_>) -> Option<&'tcx Expr<'tcx>> {
+ if let ExprKind::Match(match_value, _, MatchSource::AwaitDesugar) = expr.kind &&
+ let ExprKind::Call(_, [into_future_arg]) = match_value.kind &&
+ let ctxt = expr.span.ctxt() &&
+ for_each_expr(into_future_arg, |e|
+ walk_span_to_context(e.span, ctxt)
+ .map_or(ControlFlow::Break(()), |_| ControlFlow::Continue(()))).is_none()
+ {
+ Some(into_future_arg)
+ } else {
+ None
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/redundant_static_lifetimes.rs b/src/tools/clippy/clippy_lints/src/redundant_static_lifetimes.rs
index 44bf824aa..038dfe8e4 100644
--- a/src/tools/clippy/clippy_lints/src/redundant_static_lifetimes.rs
+++ b/src/tools/clippy/clippy_lints/src/redundant_static_lifetimes.rs
@@ -1,7 +1,7 @@
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::msrvs::{self, Msrv};
use clippy_utils::source::snippet;
-use rustc_ast::ast::{Item, ItemKind, Ty, TyKind};
+use rustc_ast::ast::{ConstItem, Item, ItemKind, StaticItem, Ty, TyKind};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
use rustc_session::{declare_tool_lint, impl_lint_pass};
@@ -100,13 +100,13 @@ impl EarlyLintPass for RedundantStaticLifetimes {
}
if !item.span.from_expansion() {
- if let ItemKind::Const(_, ref var_type, _) = item.kind {
+ if let ItemKind::Const(box ConstItem { ty: ref var_type, .. }) = item.kind {
Self::visit_type(var_type, cx, "constants have by default a `'static` lifetime");
// Don't check associated consts because `'static` cannot be elided on those (issue
// #2438)
}
- if let ItemKind::Static(ref var_type, _, _) = item.kind {
+ if let ItemKind::Static(box StaticItem { ty: ref var_type, .. }) = item.kind {
Self::visit_type(var_type, cx, "statics have by default a `'static` lifetime");
}
}
diff --git a/src/tools/clippy/clippy_lints/src/ref_option_ref.rs b/src/tools/clippy/clippy_lints/src/ref_option_ref.rs
index 448a32b77..c984a8286 100644
--- a/src/tools/clippy/clippy_lints/src/ref_option_ref.rs
+++ b/src/tools/clippy/clippy_lints/src/ref_option_ref.rs
@@ -3,7 +3,7 @@ use clippy_utils::last_path_segment;
use clippy_utils::source::snippet;
use if_chain::if_chain;
use rustc_errors::Applicability;
-use rustc_hir::{GenericArg, Mutability, Ty, TyKind};
+use rustc_hir::{GenericArg, GenericArgsParentheses, Mutability, Ty, TyKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::symbol::sym;
@@ -47,7 +47,7 @@ impl<'tcx> LateLintPass<'tcx> for RefOptionRef {
if cx.tcx.is_diagnostic_item(sym::Option, def_id);
if let Some(params) = last_path_segment(qpath).args ;
- if !params.parenthesized;
+ if params.parenthesized == GenericArgsParentheses::No;
if let Some(inner_ty) = params.args.iter().find_map(|arg| match arg {
GenericArg::Type(inner_ty) => Some(inner_ty),
_ => None,
diff --git a/src/tools/clippy/clippy_lints/src/returns.rs b/src/tools/clippy/clippy_lints/src/returns.rs
index f0d7dd23a..df126d761 100644
--- a/src/tools/clippy/clippy_lints/src/returns.rs
+++ b/src/tools/clippy/clippy_lints/src/returns.rs
@@ -9,7 +9,7 @@ use rustc_hir::intravisit::FnKind;
use rustc_hir::{Block, Body, Expr, ExprKind, FnDecl, LangItem, MatchSource, PatKind, QPath, StmtKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_middle::ty::subst::GenericArgKind;
+use rustc_middle::ty::{self, subst::GenericArgKind, Ty};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::def_id::LocalDefId;
use rustc_span::source_map::Span;
@@ -175,7 +175,7 @@ impl<'tcx> LateLintPass<'tcx> for Return {
} else {
RetReplacement::Empty
};
- check_final_expr(cx, body.value, vec![], replacement);
+ check_final_expr(cx, body.value, vec![], replacement, None);
},
FnKind::ItemFn(..) | FnKind::Method(..) => {
check_block_return(cx, &body.value.kind, sp, vec![]);
@@ -188,11 +188,11 @@ impl<'tcx> LateLintPass<'tcx> for Return {
fn check_block_return<'tcx>(cx: &LateContext<'tcx>, expr_kind: &ExprKind<'tcx>, sp: Span, mut semi_spans: Vec<Span>) {
if let ExprKind::Block(block, _) = expr_kind {
if let Some(block_expr) = block.expr {
- check_final_expr(cx, block_expr, semi_spans, RetReplacement::Empty);
+ check_final_expr(cx, block_expr, semi_spans, RetReplacement::Empty, None);
} else if let Some(stmt) = block.stmts.iter().last() {
match stmt.kind {
StmtKind::Expr(expr) => {
- check_final_expr(cx, expr, semi_spans, RetReplacement::Empty);
+ check_final_expr(cx, expr, semi_spans, RetReplacement::Empty, None);
},
StmtKind::Semi(semi_expr) => {
// Remove ending semicolons and any whitespace ' ' in between.
@@ -202,7 +202,7 @@ fn check_block_return<'tcx>(cx: &LateContext<'tcx>, expr_kind: &ExprKind<'tcx>,
span_find_starting_semi(cx.sess().source_map(), semi_span.with_hi(sp.hi()));
semi_spans.push(semi_span_to_remove);
}
- check_final_expr(cx, semi_expr, semi_spans, RetReplacement::Empty);
+ check_final_expr(cx, semi_expr, semi_spans, RetReplacement::Empty, None);
},
_ => (),
}
@@ -216,6 +216,7 @@ fn check_final_expr<'tcx>(
semi_spans: Vec<Span>, /* containing all the places where we would need to remove semicolons if finding an
* needless return */
replacement: RetReplacement<'tcx>,
+ match_ty_opt: Option<Ty<'_>>,
) {
let peeled_drop_expr = expr.peel_drop_temps();
match &peeled_drop_expr.kind {
@@ -244,7 +245,22 @@ fn check_final_expr<'tcx>(
RetReplacement::Expr(snippet, applicability)
}
} else {
- replacement
+ match match_ty_opt {
+ Some(match_ty) => {
+ match match_ty.kind() {
+ // If the code got till here with
+ // tuple not getting detected before it,
+ // then we are sure it's going to be Unit
+ // type
+ ty::Tuple(_) => RetReplacement::Unit,
+ // We don't want to anything in this case
+ // cause we can't predict what the user would
+ // want here
+ _ => return,
+ }
+ },
+ None => replacement,
+ }
};
if !cx.tcx.hir().attrs(expr.hir_id).is_empty() {
@@ -268,8 +284,9 @@ fn check_final_expr<'tcx>(
// note, if without else is going to be a type checking error anyways
// (except for unit type functions) so we don't match it
ExprKind::Match(_, arms, MatchSource::Normal) => {
+ let match_ty = cx.typeck_results().expr_ty(peeled_drop_expr);
for arm in arms.iter() {
- check_final_expr(cx, arm.body, semi_spans.clone(), RetReplacement::Unit);
+ check_final_expr(cx, arm.body, semi_spans.clone(), RetReplacement::Unit, Some(match_ty));
}
},
// if it's a whole block, check it
@@ -293,6 +310,7 @@ fn emit_return_lint(cx: &LateContext<'_>, ret_span: Span, semi_spans: Vec<Span>,
if ret_span.from_expansion() {
return;
}
+
let applicability = replacement.applicability().unwrap_or(Applicability::MachineApplicable);
let return_replacement = replacement.to_string();
let sugg_help = replacement.sugg_help();
diff --git a/src/tools/clippy/clippy_lints/src/semicolon_if_nothing_returned.rs b/src/tools/clippy/clippy_lints/src/semicolon_if_nothing_returned.rs
index 66638eed9..355f907e2 100644
--- a/src/tools/clippy/clippy_lints/src/semicolon_if_nothing_returned.rs
+++ b/src/tools/clippy/clippy_lints/src/semicolon_if_nothing_returned.rs
@@ -1,7 +1,6 @@
use crate::rustc_lint::LintContext;
use clippy_utils::diagnostics::span_lint_and_sugg;
-use clippy_utils::source::snippet_with_macro_callsite;
-use clippy_utils::sugg;
+use clippy_utils::source::snippet_with_context;
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Block, ExprKind};
@@ -44,7 +43,8 @@ impl<'tcx> LateLintPass<'tcx> for SemicolonIfNothingReturned {
if let Some(expr) = block.expr;
let t_expr = cx.typeck_results().expr_ty(expr);
if t_expr.is_unit();
- if let snippet = snippet_with_macro_callsite(cx, expr.span, "}");
+ let mut app = Applicability::MaybeIncorrect;
+ if let snippet = snippet_with_context(cx, expr.span, block.span.ctxt(), "}", &mut app).0;
if !snippet.ends_with('}') && !snippet.ends_with(';');
if cx.sess().source_map().is_multiline(block.span);
then {
@@ -52,17 +52,14 @@ impl<'tcx> LateLintPass<'tcx> for SemicolonIfNothingReturned {
if let ExprKind::DropTemps(..) = &expr.kind {
return;
}
-
- let sugg = sugg::Sugg::hir_with_macro_callsite(cx, expr, "..");
- let suggestion = format!("{sugg};");
span_lint_and_sugg(
cx,
SEMICOLON_IF_NOTHING_RETURNED,
expr.span.source_callsite(),
"consider adding a `;` to the last statement for consistent formatting",
"add a `;` here",
- suggestion,
- Applicability::MaybeIncorrect,
+ format!("{snippet};"),
+ app,
);
}
}
diff --git a/src/tools/clippy/clippy_lints/src/shadow.rs b/src/tools/clippy/clippy_lints/src/shadow.rs
index 87f966ced..ae7d19624 100644
--- a/src/tools/clippy/clippy_lints/src/shadow.rs
+++ b/src/tools/clippy/clippy_lints/src/shadow.rs
@@ -213,8 +213,7 @@ fn is_self_shadow(cx: &LateContext<'_>, pat: &Pat<'_>, mut expr: &Expr<'_>, hir_
}
loop {
expr = match expr.kind {
- ExprKind::Box(e)
- | ExprKind::AddrOf(_, _, e)
+ ExprKind::AddrOf(_, _, e)
| ExprKind::Block(
&Block {
stmts: [],
diff --git a/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs b/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs
index c3e99aa00..869358fb1 100644
--- a/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs
+++ b/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs
@@ -404,7 +404,6 @@ impl<'cx, 'sdt, 'tcx> Visitor<'tcx> for SigDropFinder<'cx, 'sdt, 'tcx> {
| hir::ExprKind::Assign(..)
| hir::ExprKind::AssignOp(..)
| hir::ExprKind::Binary(..)
- | hir::ExprKind::Box(..)
| hir::ExprKind::Call(..)
| hir::ExprKind::Field(..)
| hir::ExprKind::If(..)
diff --git a/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs b/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs
index d46f6a635..5743dd21c 100644
--- a/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs
+++ b/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs
@@ -1,6 +1,7 @@
use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_sugg};
use rustc_ast::node_id::{NodeId, NodeMap};
-use rustc_ast::{ptr::P, Crate, Item, ItemKind, MacroDef, ModKind, UseTreeKind};
+use rustc_ast::visit::{walk_expr, Visitor};
+use rustc_ast::{ptr::P, Crate, Expr, ExprKind, Item, ItemKind, MacroDef, ModKind, Ty, TyKind, UseTreeKind};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
use rustc_session::{declare_tool_lint, impl_lint_pass};
@@ -55,7 +56,7 @@ impl EarlyLintPass for SingleComponentPathImports {
return;
}
- self.check_mod(cx, &krate.items);
+ self.check_mod(&krate.items);
}
fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) {
@@ -84,8 +85,43 @@ impl EarlyLintPass for SingleComponentPathImports {
}
}
+#[derive(Default)]
+struct ImportUsageVisitor {
+ // keep track of imports reused with `self` keyword, such as `self::std` in the example below.
+ // Removing the `use std;` would make this a compile error (#10549)
+ // ```
+ // use std;
+ //
+ // fn main() {
+ // let _ = self::std::io::stdout();
+ // }
+ // ```
+ imports_referenced_with_self: Vec<Symbol>,
+}
+
+impl<'tcx> Visitor<'tcx> for ImportUsageVisitor {
+ fn visit_expr(&mut self, expr: &Expr) {
+ if let ExprKind::Path(_, path) = &expr.kind
+ && path.segments.len() > 1
+ && path.segments[0].ident.name == kw::SelfLower
+ {
+ self.imports_referenced_with_self.push(path.segments[1].ident.name);
+ }
+ walk_expr(self, expr);
+ }
+
+ fn visit_ty(&mut self, ty: &Ty) {
+ if let TyKind::Path(_, path) = &ty.kind
+ && path.segments.len() > 1
+ && path.segments[0].ident.name == kw::SelfLower
+ {
+ self.imports_referenced_with_self.push(path.segments[1].ident.name);
+ }
+ }
+}
+
impl SingleComponentPathImports {
- fn check_mod(&mut self, cx: &EarlyContext<'_>, items: &[P<Item>]) {
+ fn check_mod(&mut self, items: &[P<Item>]) {
// keep track of imports reused with `self` keyword, such as `self::crypto_hash` in the example
// below. Removing the `use crypto_hash;` would make this a compile error
// ```
@@ -108,18 +144,16 @@ impl SingleComponentPathImports {
// ```
let mut macros = Vec::new();
+ let mut import_usage_visitor = ImportUsageVisitor::default();
for item in items {
- self.track_uses(
- cx,
- item,
- &mut imports_reused_with_self,
- &mut single_use_usages,
- &mut macros,
- );
+ self.track_uses(item, &mut imports_reused_with_self, &mut single_use_usages, &mut macros);
+ import_usage_visitor.visit_item(item);
}
for usage in single_use_usages {
- if !imports_reused_with_self.contains(&usage.name) {
+ if !imports_reused_with_self.contains(&usage.name)
+ && !import_usage_visitor.imports_referenced_with_self.contains(&usage.name)
+ {
self.found.entry(usage.item_id).or_default().push(usage);
}
}
@@ -127,7 +161,6 @@ impl SingleComponentPathImports {
fn track_uses(
&mut self,
- cx: &EarlyContext<'_>,
item: &Item,
imports_reused_with_self: &mut Vec<Symbol>,
single_use_usages: &mut Vec<SingleUse>,
@@ -139,7 +172,7 @@ impl SingleComponentPathImports {
match &item.kind {
ItemKind::Mod(_, ModKind::Loaded(ref items, ..)) => {
- self.check_mod(cx, items);
+ self.check_mod(items);
},
ItemKind::MacroDef(MacroDef { macro_rules: true, .. }) => {
macros.push(item.ident.name);
diff --git a/src/tools/clippy/clippy_lints/src/size_of_ref.rs b/src/tools/clippy/clippy_lints/src/size_of_ref.rs
index 3fcdb4288..8abec06c6 100644
--- a/src/tools/clippy/clippy_lints/src/size_of_ref.rs
+++ b/src/tools/clippy/clippy_lints/src/size_of_ref.rs
@@ -45,7 +45,7 @@ declare_clippy_lint! {
/// }
/// }
/// ```
- #[clippy::version = "1.67.0"]
+ #[clippy::version = "1.68.0"]
pub SIZE_OF_REF,
suspicious,
"Argument to `std::mem::size_of_val()` is a double-reference, which is almost certainly unintended"
diff --git a/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs b/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs
index d6b336bef..a13bc7a51 100644
--- a/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs
+++ b/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs
@@ -2,7 +2,6 @@ use clippy_utils::diagnostics::span_lint_and_help;
use rustc_hir::def_id::DefId;
use rustc_hir::{def::Res, HirId, Path, PathSegment};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_middle::ty::DefIdTree;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::{sym, symbol::kw, Span};
diff --git a/src/tools/clippy/clippy_lints/src/suspicious_doc_comments.rs b/src/tools/clippy/clippy_lints/src/suspicious_doc_comments.rs
new file mode 100644
index 000000000..e5746ca99
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/suspicious_doc_comments.rs
@@ -0,0 +1,94 @@
+use clippy_utils::diagnostics::{multispan_sugg_with_applicability, span_lint_and_then};
+use if_chain::if_chain;
+use rustc_ast::{token::CommentKind, AttrKind, AttrStyle, Attribute, Item};
+use rustc_errors::Applicability;
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Detects the use of outer doc comments (`///`, `/**`) followed by a bang (`!`): `///!`
+ ///
+ /// ### Why is this bad?
+ /// Triple-slash comments (known as "outer doc comments") apply to items that follow it.
+ /// An outer doc comment followed by a bang (i.e. `///!`) has no specific meaning.
+ ///
+ /// The user most likely meant to write an inner doc comment (`//!`, `/*!`), which
+ /// applies to the parent item (i.e. the item that the comment is contained in,
+ /// usually a module or crate).
+ ///
+ /// ### Known problems
+ /// Inner doc comments can only appear before items, so there are certain cases where the suggestion
+ /// made by this lint is not valid code. For example:
+ /// ```rs
+ /// fn foo() {}
+ /// ///!
+ /// fn bar() {}
+ /// ```
+ /// This lint detects the doc comment and suggests changing it to `//!`, but an inner doc comment
+ /// is not valid at that position.
+ ///
+ /// ### Example
+ /// In this example, the doc comment is attached to the *function*, rather than the *module*.
+ /// ```rust
+ /// pub mod util {
+ /// ///! This module contains utility functions.
+ ///
+ /// pub fn dummy() {}
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// pub mod util {
+ /// //! This module contains utility functions.
+ ///
+ /// pub fn dummy() {}
+ /// }
+ /// ```
+ #[clippy::version = "1.70.0"]
+ pub SUSPICIOUS_DOC_COMMENTS,
+ suspicious,
+ "suspicious usage of (outer) doc comments"
+}
+declare_lint_pass!(SuspiciousDocComments => [SUSPICIOUS_DOC_COMMENTS]);
+
+const WARNING: &str = "this is an outer doc comment and does not apply to the parent module or crate";
+const HELP: &str = "use an inner doc comment to document the parent module or crate";
+
+impl EarlyLintPass for SuspiciousDocComments {
+ fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) {
+ let replacements = collect_doc_comment_replacements(&item.attrs);
+
+ if let Some(((lo_span, _), (hi_span, _))) = replacements.first().zip(replacements.last()) {
+ let span = lo_span.to(*hi_span);
+
+ span_lint_and_then(cx, SUSPICIOUS_DOC_COMMENTS, span, WARNING, |diag| {
+ multispan_sugg_with_applicability(diag, HELP, Applicability::MaybeIncorrect, replacements);
+ });
+ }
+ }
+}
+
+fn collect_doc_comment_replacements(attrs: &[Attribute]) -> Vec<(Span, String)> {
+ attrs
+ .iter()
+ .filter_map(|attr| {
+ if_chain! {
+ if let AttrKind::DocComment(com_kind, sym) = attr.kind;
+ if let AttrStyle::Outer = attr.style;
+ if let Some(com) = sym.as_str().strip_prefix('!');
+ then {
+ let sugg = match com_kind {
+ CommentKind::Line => format!("//!{com}"),
+ CommentKind::Block => format!("/*!{com}*/")
+ };
+ Some((attr.span, sugg))
+ } else {
+ None
+ }
+ }
+ })
+ .collect()
+}
diff --git a/src/tools/clippy/clippy_lints/src/suspicious_operation_groupings.rs b/src/tools/clippy/clippy_lints/src/suspicious_operation_groupings.rs
index e111c7d22..fab8e9c2e 100644
--- a/src/tools/clippy/clippy_lints/src/suspicious_operation_groupings.rs
+++ b/src/tools/clippy/clippy_lints/src/suspicious_operation_groupings.rs
@@ -578,7 +578,7 @@ fn ident_difference_expr_with_base_location(
| (Assign(_, _, _), Assign(_, _, _))
| (TryBlock(_), TryBlock(_))
| (Await(_), Await(_))
- | (Async(_, _, _), Async(_, _, _))
+ | (Async(_, _), Async(_, _))
| (Block(_, _), Block(_, _))
| (Closure(_), Closure(_))
| (Match(_, _), Match(_, _))
@@ -596,8 +596,7 @@ fn ident_difference_expr_with_base_location(
| (MethodCall(_), MethodCall(_))
| (Call(_, _), Call(_, _))
| (ConstBlock(_), ConstBlock(_))
- | (Array(_), Array(_))
- | (Box(_), Box(_)) => {
+ | (Array(_), Array(_)) => {
// keep going
},
_ => {
diff --git a/src/tools/clippy/clippy_lints/src/swap.rs b/src/tools/clippy/clippy_lints/src/swap.rs
index 0f062cecf..f7eef03d1 100644
--- a/src/tools/clippy/clippy_lints/src/swap.rs
+++ b/src/tools/clippy/clippy_lints/src/swap.rs
@@ -1,15 +1,17 @@
use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then};
-use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::source::snippet_with_context;
use clippy_utils::sugg::Sugg;
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::{can_mut_borrow_both, eq_expr_value, in_constant, std_or_core};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Block, Expr, ExprKind, PatKind, QPath, Stmt, StmtKind};
-use rustc_lint::{LateContext, LateLintPass};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
use rustc_middle::ty;
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::Spanned;
+use rustc_span::SyntaxContext;
use rustc_span::{sym, symbol::Ident, Span};
declare_clippy_lint! {
@@ -80,43 +82,45 @@ impl<'tcx> LateLintPass<'tcx> for Swap {
}
fn generate_swap_warning(cx: &LateContext<'_>, e1: &Expr<'_>, e2: &Expr<'_>, span: Span, is_xor_based: bool) {
+ let ctxt = span.ctxt();
let mut applicability = Applicability::MachineApplicable;
if !can_mut_borrow_both(cx, e1, e2) {
- if let ExprKind::Index(lhs1, idx1) = e1.kind {
- if let ExprKind::Index(lhs2, idx2) = e2.kind {
- if eq_expr_value(cx, lhs1, lhs2) {
- let ty = cx.typeck_results().expr_ty(lhs1).peel_refs();
+ if let ExprKind::Index(lhs1, idx1) = e1.kind
+ && let ExprKind::Index(lhs2, idx2) = e2.kind
+ && eq_expr_value(cx, lhs1, lhs2)
+ && e1.span.ctxt() == ctxt
+ && e2.span.ctxt() == ctxt
+ {
+ let ty = cx.typeck_results().expr_ty(lhs1).peel_refs();
- if matches!(ty.kind(), ty::Slice(_))
- || matches!(ty.kind(), ty::Array(_, _))
- || is_type_diagnostic_item(cx, ty, sym::Vec)
- || is_type_diagnostic_item(cx, ty, sym::VecDeque)
- {
- let slice = Sugg::hir_with_applicability(cx, lhs1, "<slice>", &mut applicability);
- span_lint_and_sugg(
- cx,
- MANUAL_SWAP,
- span,
- &format!("this looks like you are swapping elements of `{slice}` manually"),
- "try",
- format!(
- "{}.swap({}, {})",
- slice.maybe_par(),
- snippet_with_applicability(cx, idx1.span, "..", &mut applicability),
- snippet_with_applicability(cx, idx2.span, "..", &mut applicability),
- ),
- applicability,
- );
- }
- }
+ if matches!(ty.kind(), ty::Slice(_))
+ || matches!(ty.kind(), ty::Array(_, _))
+ || is_type_diagnostic_item(cx, ty, sym::Vec)
+ || is_type_diagnostic_item(cx, ty, sym::VecDeque)
+ {
+ let slice = Sugg::hir_with_applicability(cx, lhs1, "<slice>", &mut applicability);
+ span_lint_and_sugg(
+ cx,
+ MANUAL_SWAP,
+ span,
+ &format!("this looks like you are swapping elements of `{slice}` manually"),
+ "try",
+ format!(
+ "{}.swap({}, {});",
+ slice.maybe_par(),
+ snippet_with_context(cx, idx1.span, ctxt, "..", &mut applicability).0,
+ snippet_with_context(cx, idx2.span, ctxt, "..", &mut applicability).0,
+ ),
+ applicability,
+ );
}
}
return;
}
- let first = Sugg::hir_with_applicability(cx, e1, "..", &mut applicability);
- let second = Sugg::hir_with_applicability(cx, e2, "..", &mut applicability);
+ let first = Sugg::hir_with_context(cx, e1, ctxt, "..", &mut applicability);
+ let second = Sugg::hir_with_context(cx, e2, ctxt, "..", &mut applicability);
let Some(sugg) = std_or_core(cx) else { return };
span_lint_and_then(
@@ -128,7 +132,7 @@ fn generate_swap_warning(cx: &LateContext<'_>, e1: &Expr<'_>, e2: &Expr<'_>, spa
diag.span_suggestion(
span,
"try",
- format!("{sugg}::mem::swap({}, {})", first.mut_addr(), second.mut_addr()),
+ format!("{sugg}::mem::swap({}, {});", first.mut_addr(), second.mut_addr()),
applicability,
);
if !is_xor_based {
@@ -144,19 +148,19 @@ fn check_manual_swap(cx: &LateContext<'_>, block: &Block<'_>) {
return;
}
- for w in block.stmts.windows(3) {
+ for [s1, s2, s3] in block.stmts.array_windows::<3>() {
if_chain! {
// let t = foo();
- if let StmtKind::Local(tmp) = w[0].kind;
+ if let StmtKind::Local(tmp) = s1.kind;
if let Some(tmp_init) = tmp.init;
if let PatKind::Binding(.., ident, None) = tmp.pat.kind;
// foo() = bar();
- if let StmtKind::Semi(first) = w[1].kind;
+ if let StmtKind::Semi(first) = s2.kind;
if let ExprKind::Assign(lhs1, rhs1, _) = first.kind;
// bar() = t;
- if let StmtKind::Semi(second) = w[2].kind;
+ if let StmtKind::Semi(second) = s3.kind;
if let ExprKind::Assign(lhs2, rhs2, _) = second.kind;
if let ExprKind::Path(QPath::Resolved(None, rhs2)) = rhs2.kind;
if rhs2.segments.len() == 1;
@@ -164,8 +168,15 @@ fn check_manual_swap(cx: &LateContext<'_>, block: &Block<'_>) {
if ident.name == rhs2.segments[0].ident.name;
if eq_expr_value(cx, tmp_init, lhs1);
if eq_expr_value(cx, rhs1, lhs2);
+
+ let ctxt = s1.span.ctxt();
+ if s2.span.ctxt() == ctxt;
+ if s3.span.ctxt() == ctxt;
+ if first.span.ctxt() == ctxt;
+ if second.span.ctxt() == ctxt;
+
then {
- let span = w[0].span.to(second.span);
+ let span = s1.span.to(s3.span);
generate_swap_warning(cx, lhs1, lhs2, span, false);
}
}
@@ -178,8 +189,10 @@ fn check_suspicious_swap(cx: &LateContext<'_>, block: &Block<'_>) {
if let Some((lhs0, rhs0)) = parse(first)
&& let Some((lhs1, rhs1)) = parse(second)
&& first.span.eq_ctxt(second.span)
+ && !in_external_macro(cx.sess(), first.span)
&& is_same(cx, lhs0, rhs1)
&& is_same(cx, lhs1, rhs0)
+ && !is_same(cx, lhs1, rhs1) // Ignore a = b; a = a (#10421)
&& let Some(lhs_sugg) = match &lhs0 {
ExprOrIdent::Expr(expr) => Sugg::hir_opt(cx, expr),
ExprOrIdent::Ident(ident) => Some(Sugg::NonParen(ident.as_str().into())),
@@ -246,17 +259,20 @@ fn parse<'a, 'hir>(stmt: &'a Stmt<'hir>) -> Option<(ExprOrIdent<'hir>, &'a Expr<
/// Implementation of the xor case for `MANUAL_SWAP` lint.
fn check_xor_swap(cx: &LateContext<'_>, block: &Block<'_>) {
- for window in block.stmts.windows(3) {
+ for [s1, s2, s3] in block.stmts.array_windows::<3>() {
+ let ctxt = s1.span.ctxt();
if_chain! {
- if let Some((lhs0, rhs0)) = extract_sides_of_xor_assign(&window[0]);
- if let Some((lhs1, rhs1)) = extract_sides_of_xor_assign(&window[1]);
- if let Some((lhs2, rhs2)) = extract_sides_of_xor_assign(&window[2]);
+ if let Some((lhs0, rhs0)) = extract_sides_of_xor_assign(s1, ctxt);
+ if let Some((lhs1, rhs1)) = extract_sides_of_xor_assign(s2, ctxt);
+ if let Some((lhs2, rhs2)) = extract_sides_of_xor_assign(s3, ctxt);
if eq_expr_value(cx, lhs0, rhs1);
if eq_expr_value(cx, lhs2, rhs1);
if eq_expr_value(cx, lhs1, rhs0);
if eq_expr_value(cx, lhs1, rhs2);
+ if s2.span.ctxt() == ctxt;
+ if s3.span.ctxt() == ctxt;
then {
- let span = window[0].span.to(window[2].span);
+ let span = s1.span.to(s3.span);
generate_swap_warning(cx, lhs0, rhs0, span, true);
}
};
@@ -264,9 +280,12 @@ fn check_xor_swap(cx: &LateContext<'_>, block: &Block<'_>) {
}
/// Returns the lhs and rhs of an xor assignment statement.
-fn extract_sides_of_xor_assign<'a, 'hir>(stmt: &'a Stmt<'hir>) -> Option<(&'a Expr<'hir>, &'a Expr<'hir>)> {
- if let StmtKind::Semi(expr) = stmt.kind {
- if let ExprKind::AssignOp(
+fn extract_sides_of_xor_assign<'a, 'hir>(
+ stmt: &'a Stmt<'hir>,
+ ctxt: SyntaxContext,
+) -> Option<(&'a Expr<'hir>, &'a Expr<'hir>)> {
+ if let StmtKind::Semi(expr) = stmt.kind
+ && let ExprKind::AssignOp(
Spanned {
node: BinOpKind::BitXor,
..
@@ -274,9 +293,10 @@ fn extract_sides_of_xor_assign<'a, 'hir>(stmt: &'a Stmt<'hir>) -> Option<(&'a Ex
lhs,
rhs,
) = expr.kind
- {
- return Some((lhs, rhs));
- }
+ && expr.span.ctxt() == ctxt
+ {
+ Some((lhs, rhs))
+ } else {
+ None
}
- None
}
diff --git a/src/tools/clippy/clippy_lints/src/tests_outside_test_module.rs b/src/tools/clippy/clippy_lints/src/tests_outside_test_module.rs
new file mode 100644
index 000000000..0a0a77082
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/tests_outside_test_module.rs
@@ -0,0 +1,71 @@
+use clippy_utils::{diagnostics::span_lint_and_note, is_in_cfg_test, is_in_test_function};
+use rustc_hir::{intravisit::FnKind, Body, FnDecl};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{def_id::LocalDefId, Span};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Triggers when a testing function (marked with the `#[test]` attribute) isn't inside a testing module
+ /// (marked with `#[cfg(test)]`).
+ /// ### Why is this bad?
+ /// The idiomatic (and more performant) way of writing tests is inside a testing module (flagged with `#[cfg(test)]`),
+ /// having test functions outside of this module is confusing and may lead to them being "hidden".
+ /// ### Example
+ /// ```rust
+ /// #[test]
+ /// fn my_cool_test() {
+ /// // [...]
+ /// }
+ ///
+ /// #[cfg(test)]
+ /// mod tests {
+ /// // [...]
+ /// }
+ ///
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// #[cfg(test)]
+ /// mod tests {
+ /// #[test]
+ /// fn my_cool_test() {
+ /// // [...]
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "1.70.0"]
+ pub TESTS_OUTSIDE_TEST_MODULE,
+ restriction,
+ "A test function is outside the testing module."
+}
+
+declare_lint_pass!(TestsOutsideTestModule => [TESTS_OUTSIDE_TEST_MODULE]);
+
+impl LateLintPass<'_> for TestsOutsideTestModule {
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'_>,
+ kind: FnKind<'_>,
+ _: &FnDecl<'_>,
+ body: &Body<'_>,
+ sp: Span,
+ _: LocalDefId,
+ ) {
+ if_chain! {
+ if !matches!(kind, FnKind::Closure);
+ if is_in_test_function(cx.tcx, body.id().hir_id);
+ if !is_in_cfg_test(cx.tcx, body.id().hir_id);
+ then {
+ span_lint_and_note(
+ cx,
+ TESTS_OUTSIDE_TEST_MODULE,
+ sp,
+ "this function marked with #[test] is outside a #[cfg(test)] module",
+ None,
+ "move it to a testing module marked with #[cfg(test)]",
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/transmute/mod.rs b/src/tools/clippy/clippy_lints/src/transmute/mod.rs
index c01cbe509..0dc30f7a9 100644
--- a/src/tools/clippy/clippy_lints/src/transmute/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/transmute/mod.rs
@@ -458,7 +458,7 @@ declare_clippy_lint! {
/// ```rust
/// let null_fn: Option<fn()> = None;
/// ```
- #[clippy::version = "1.67.0"]
+ #[clippy::version = "1.68.0"]
pub TRANSMUTE_NULL_TO_FN,
correctness,
"transmute results in a null function pointer, which is undefined behavior"
diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmutes_expressible_as_ptr_casts.rs b/src/tools/clippy/clippy_lints/src/transmute/transmutes_expressible_as_ptr_casts.rs
index 8530b4324..85cd74f23 100644
--- a/src/tools/clippy/clippy_lints/src/transmute/transmutes_expressible_as_ptr_casts.rs
+++ b/src/tools/clippy/clippy_lints/src/transmute/transmutes_expressible_as_ptr_casts.rs
@@ -2,8 +2,9 @@ use super::utils::check_cast;
use super::TRANSMUTES_EXPRESSIBLE_AS_PTR_CASTS;
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::sugg::Sugg;
+use rustc_ast::ExprPrecedence;
use rustc_errors::Applicability;
-use rustc_hir::Expr;
+use rustc_hir::{Expr, Node};
use rustc_lint::LateContext;
use rustc_middle::ty::{cast::CastKind, Ty};
@@ -19,7 +20,7 @@ pub(super) fn check<'tcx>(
) -> bool {
use CastKind::{AddrPtrCast, ArrayPtrCast, FnPtrAddrCast, FnPtrPtrCast, PtrAddrCast, PtrPtrCast};
let mut app = Applicability::MachineApplicable;
- let sugg = match check_cast(cx, e, from_ty, to_ty) {
+ let mut sugg = match check_cast(cx, e, from_ty, to_ty) {
Some(PtrPtrCast | AddrPtrCast | ArrayPtrCast | FnPtrPtrCast | FnPtrAddrCast) => {
Sugg::hir_with_context(cx, arg, e.span.ctxt(), "..", &mut app)
.as_ty(to_ty.to_string())
@@ -39,6 +40,12 @@ pub(super) fn check<'tcx>(
_ => return false,
};
+ if let Node::Expr(parent) = cx.tcx.hir().get_parent(e.hir_id)
+ && parent.precedence().order() > ExprPrecedence::Cast.order()
+ {
+ sugg = format!("({sugg})");
+ }
+
span_lint_and_sugg(
cx,
TRANSMUTES_EXPRESSIBLE_AS_PTR_CASTS,
diff --git a/src/tools/clippy/clippy_lints/src/transmute/utils.rs b/src/tools/clippy/clippy_lints/src/transmute/utils.rs
index cddaf9450..62efd13b8 100644
--- a/src/tools/clippy/clippy_lints/src/transmute/utils.rs
+++ b/src/tools/clippy/clippy_lints/src/transmute/utils.rs
@@ -33,38 +33,37 @@ pub(super) fn check_cast<'tcx>(
let hir_id = e.hir_id;
let local_def_id = hir_id.owner.def_id;
- Inherited::build(cx.tcx, local_def_id).enter(|inherited| {
- let fn_ctxt = FnCtxt::new(inherited, cx.param_env, local_def_id);
+ let inherited = Inherited::new(cx.tcx, local_def_id);
+ let fn_ctxt = FnCtxt::new(&inherited, cx.param_env, local_def_id);
- // If we already have errors, we can't be sure we can pointer cast.
+ // If we already have errors, we can't be sure we can pointer cast.
+ assert!(
+ !fn_ctxt.errors_reported_since_creation(),
+ "Newly created FnCtxt contained errors"
+ );
+
+ if let Ok(check) = cast::CastCheck::new(
+ &fn_ctxt,
+ e,
+ from_ty,
+ to_ty,
+ // We won't show any error to the user, so we don't care what the span is here.
+ DUMMY_SP,
+ DUMMY_SP,
+ hir::Constness::NotConst,
+ ) {
+ let res = check.do_check(&fn_ctxt);
+
+ // do_check's documentation says that it might return Ok and create
+ // errors in the fcx instead of returning Err in some cases. Those cases
+ // should be filtered out before getting here.
assert!(
!fn_ctxt.errors_reported_since_creation(),
- "Newly created FnCtxt contained errors"
+ "`fn_ctxt` contained errors after cast check!"
);
- if let Ok(check) = cast::CastCheck::new(
- &fn_ctxt,
- e,
- from_ty,
- to_ty,
- // We won't show any error to the user, so we don't care what the span is here.
- DUMMY_SP,
- DUMMY_SP,
- hir::Constness::NotConst,
- ) {
- let res = check.do_check(&fn_ctxt);
-
- // do_check's documentation says that it might return Ok and create
- // errors in the fcx instead of returning Err in some cases. Those cases
- // should be filtered out before getting here.
- assert!(
- !fn_ctxt.errors_reported_since_creation(),
- "`fn_ctxt` contained errors after cast check!"
- );
-
- res.ok()
- } else {
- None
- }
- })
+ res.ok()
+ } else {
+ None
+ }
}
diff --git a/src/tools/clippy/clippy_lints/src/types/borrowed_box.rs b/src/tools/clippy/clippy_lints/src/types/borrowed_box.rs
index 65dfe7637..acdf54710 100644
--- a/src/tools/clippy/clippy_lints/src/types/borrowed_box.rs
+++ b/src/tools/clippy/clippy_lints/src/types/borrowed_box.rs
@@ -20,7 +20,7 @@ pub(super) fn check(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, lt: &Lifetime, m
if let QPath::Resolved(None, path) = *qpath;
if let [ref bx] = *path.segments;
if let Some(params) = bx.args;
- if !params.parenthesized;
+ if params.parenthesized == hir::GenericArgsParentheses::No;
if let Some(inner) = params.args.iter().find_map(|arg| match arg {
GenericArg::Type(ty) => Some(ty),
_ => None,
diff --git a/src/tools/clippy/clippy_lints/src/types/utils.rs b/src/tools/clippy/clippy_lints/src/types/utils.rs
index 7f43b7841..a30748db8 100644
--- a/src/tools/clippy/clippy_lints/src/types/utils.rs
+++ b/src/tools/clippy/clippy_lints/src/types/utils.rs
@@ -1,6 +1,6 @@
use clippy_utils::last_path_segment;
use if_chain::if_chain;
-use rustc_hir::{GenericArg, QPath, TyKind};
+use rustc_hir::{GenericArg, GenericArgsParentheses, QPath, TyKind};
use rustc_lint::LateContext;
use rustc_span::source_map::Span;
@@ -8,7 +8,7 @@ pub(super) fn match_borrows_parameter(_cx: &LateContext<'_>, qpath: &QPath<'_>)
let last = last_path_segment(qpath);
if_chain! {
if let Some(params) = last.args;
- if !params.parenthesized;
+ if params.parenthesized == GenericArgsParentheses::No;
if let Some(ty) = params.args.iter().find_map(|arg| match arg {
GenericArg::Type(ty) => Some(ty),
_ => None,
diff --git a/src/tools/clippy/clippy_lints/src/unit_types/let_unit_value.rs b/src/tools/clippy/clippy_lints/src/unit_types/let_unit_value.rs
index d6167a621..cc7c2b039 100644
--- a/src/tools/clippy/clippy_lints/src/unit_types/let_unit_value.rs
+++ b/src/tools/clippy/clippy_lints/src/unit_types/let_unit_value.rs
@@ -1,11 +1,11 @@
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::get_parent_node;
-use clippy_utils::source::snippet_with_macro_callsite;
+use clippy_utils::source::snippet_with_context;
use clippy_utils::visitors::{for_each_local_assignment, for_each_value_source};
use core::ops::ControlFlow;
use rustc_errors::Applicability;
use rustc_hir::def::{DefKind, Res};
-use rustc_hir::{Expr, ExprKind, HirId, HirIdSet, Local, Node, PatKind, QPath, TyKind};
+use rustc_hir::{Expr, ExprKind, HirId, HirIdSet, Local, MatchSource, Node, PatKind, QPath, TyKind};
use rustc_lint::{LateContext, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty;
@@ -41,6 +41,10 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, local: &'tcx Local<'_>) {
);
}
} else {
+ if let ExprKind::Match(_, _, MatchSource::AwaitDesugar) = init.kind {
+ return
+ }
+
span_lint_and_then(
cx,
LET_UNIT_VALUE,
@@ -48,12 +52,13 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, local: &'tcx Local<'_>) {
"this let-binding has unit value",
|diag| {
if let Some(expr) = &local.init {
- let snip = snippet_with_macro_callsite(cx, expr.span, "()");
+ let mut app = Applicability::MachineApplicable;
+ let snip = snippet_with_context(cx, expr.span, local.span.ctxt(), "()", &mut app).0;
diag.span_suggestion(
local.span,
"omit the `let` binding",
format!("{snip};"),
- Applicability::MachineApplicable, // snippet
+ app,
);
}
},
diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_box_returns.rs b/src/tools/clippy/clippy_lints/src/unnecessary_box_returns.rs
new file mode 100644
index 000000000..912bcda63
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unnecessary_box_returns.rs
@@ -0,0 +1,120 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use rustc_errors::Applicability;
+use rustc_hir::{def_id::LocalDefId, FnDecl, FnRetTy, ImplItemKind, Item, ItemKind, Node, TraitItem, TraitItemKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::Symbol;
+
+declare_clippy_lint! {
+ /// ### What it does
+ ///
+ /// Checks for a return type containing a `Box<T>` where `T` implements `Sized`
+ ///
+ /// ### Why is this bad?
+ ///
+ /// It's better to just return `T` in these cases. The caller may not need
+ /// the value to be boxed, and it's expensive to free the memory once the
+ /// `Box<T>` been dropped.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn foo() -> Box<String> {
+ /// Box::new(String::from("Hello, world!"))
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// fn foo() -> String {
+ /// String::from("Hello, world!")
+ /// }
+ /// ```
+ #[clippy::version = "1.70.0"]
+ pub UNNECESSARY_BOX_RETURNS,
+ pedantic,
+ "Needlessly returning a Box"
+}
+
+pub struct UnnecessaryBoxReturns {
+ avoid_breaking_exported_api: bool,
+}
+
+impl_lint_pass!(UnnecessaryBoxReturns => [UNNECESSARY_BOX_RETURNS]);
+
+impl UnnecessaryBoxReturns {
+ pub fn new(avoid_breaking_exported_api: bool) -> Self {
+ Self {
+ avoid_breaking_exported_api,
+ }
+ }
+
+ fn check_fn_item(&mut self, cx: &LateContext<'_>, decl: &FnDecl<'_>, def_id: LocalDefId, name: Symbol) {
+ // we don't want to tell someone to break an exported function if they ask us not to
+ if self.avoid_breaking_exported_api && cx.effective_visibilities.is_exported(def_id) {
+ return;
+ }
+
+ // functions which contain the word "box" are exempt from this lint
+ if name.as_str().contains("box") {
+ return;
+ }
+
+ let FnRetTy::Return(return_ty_hir) = &decl.output else { return };
+
+ let return_ty = cx
+ .tcx
+ .erase_late_bound_regions(cx.tcx.fn_sig(def_id).skip_binder())
+ .output();
+
+ if !return_ty.is_box() {
+ return;
+ }
+
+ let boxed_ty = return_ty.boxed_ty();
+
+ // it's sometimes useful to return Box<T> if T is unsized, so don't lint those
+ if boxed_ty.is_sized(cx.tcx, cx.param_env) {
+ span_lint_and_then(
+ cx,
+ UNNECESSARY_BOX_RETURNS,
+ return_ty_hir.span,
+ format!("boxed return of the sized type `{boxed_ty}`").as_str(),
+ |diagnostic| {
+ diagnostic.span_suggestion(
+ return_ty_hir.span,
+ "try",
+ boxed_ty.to_string(),
+ // the return value and function callers also needs to
+ // be changed, so this can't be MachineApplicable
+ Applicability::Unspecified,
+ );
+ diagnostic.help("changing this also requires a change to the return expressions in this function");
+ },
+ );
+ }
+ }
+}
+
+impl LateLintPass<'_> for UnnecessaryBoxReturns {
+ fn check_trait_item(&mut self, cx: &LateContext<'_>, item: &TraitItem<'_>) {
+ let TraitItemKind::Fn(signature, _) = &item.kind else { return };
+ self.check_fn_item(cx, signature.decl, item.owner_id.def_id, item.ident.name);
+ }
+
+ fn check_impl_item(&mut self, cx: &LateContext<'_>, item: &rustc_hir::ImplItem<'_>) {
+ // Ignore implementations of traits, because the lint should be on the
+ // trait, not on the implmentation of it.
+ let Node::Item(parent) = cx.tcx.hir().get_parent(item.hir_id()) else { return };
+ let ItemKind::Impl(parent) = parent.kind else { return };
+ if parent.of_trait.is_some() {
+ return;
+ }
+
+ let ImplItemKind::Fn(signature, ..) = &item.kind else { return };
+ self.check_fn_item(cx, signature.decl, item.owner_id.def_id, item.ident.name);
+ }
+
+ fn check_item(&mut self, cx: &LateContext<'_>, item: &Item<'_>) {
+ let ItemKind::Fn(signature, ..) = &item.kind else { return };
+ self.check_fn_item(cx, signature.decl, item.owner_id.def_id, item.ident.name);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_struct_initialization.rs b/src/tools/clippy/clippy_lints/src/unnecessary_struct_initialization.rs
new file mode 100644
index 000000000..084b03198
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unnecessary_struct_initialization.rs
@@ -0,0 +1,88 @@
+use clippy_utils::{diagnostics::span_lint_and_sugg, get_parent_expr, path_to_local, source::snippet, ty::is_copy};
+use rustc_hir::{BindingAnnotation, Expr, ExprKind, Node, PatKind, UnOp};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for initialization of a `struct` by copying a base without setting
+ /// any field.
+ ///
+ /// ### Why is this bad?
+ /// Readability suffers from unnecessary struct building.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct S { s: String }
+ ///
+ /// let a = S { s: String::from("Hello, world!") };
+ /// let b = S { ..a };
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// struct S { s: String }
+ ///
+ /// let a = S { s: String::from("Hello, world!") };
+ /// let b = a;
+ /// ```
+ ///
+ /// ### Known Problems
+ /// Has false positives when the base is a place expression that cannot be
+ /// moved out of, see [#10547](https://github.com/rust-lang/rust-clippy/issues/10547).
+ #[clippy::version = "1.70.0"]
+ pub UNNECESSARY_STRUCT_INITIALIZATION,
+ nursery,
+ "struct built from a base that can be written mode concisely"
+}
+declare_lint_pass!(UnnecessaryStruct => [UNNECESSARY_STRUCT_INITIALIZATION]);
+
+impl LateLintPass<'_> for UnnecessaryStruct {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if let ExprKind::Struct(_, &[], Some(base)) = expr.kind {
+ if let Some(parent) = get_parent_expr(cx, expr) &&
+ let parent_ty = cx.typeck_results().expr_ty_adjusted(parent) &&
+ parent_ty.is_any_ptr()
+ {
+ if is_copy(cx, cx.typeck_results().expr_ty(expr)) && path_to_local(base).is_some() {
+ // When the type implements `Copy`, a reference to the new struct works on the
+ // copy. Using the original would borrow it.
+ return;
+ }
+
+ if parent_ty.is_mutable_ptr() && !is_mutable(cx, base) {
+ // The original can be used in a mutable reference context only if it is mutable.
+ return;
+ }
+ }
+
+ // TODO: do not propose to replace *XX if XX is not Copy
+ if let ExprKind::Unary(UnOp::Deref, target) = base.kind &&
+ matches!(target.kind, ExprKind::Path(..)) &&
+ !is_copy(cx, cx.typeck_results().expr_ty(expr))
+ {
+ // `*base` cannot be used instead of the struct in the general case if it is not Copy.
+ return;
+ }
+
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_STRUCT_INITIALIZATION,
+ expr.span,
+ "unnecessary struct building",
+ "replace with",
+ snippet(cx, base.span, "..").into_owned(),
+ rustc_errors::Applicability::MachineApplicable,
+ );
+ }
+ }
+}
+
+fn is_mutable(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ if let Some(hir_id) = path_to_local(expr) &&
+ let Node::Pat(pat) = cx.tcx.hir().get(hir_id)
+ {
+ matches!(pat.kind, PatKind::Binding(BindingAnnotation::MUT, ..))
+ } else {
+ true
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/use_self.rs b/src/tools/clippy/clippy_lints/src/use_self.rs
index e7c540006..5a0298745 100644
--- a/src/tools/clippy/clippy_lints/src/use_self.rs
+++ b/src/tools/clippy/clippy_lints/src/use_self.rs
@@ -10,8 +10,8 @@ use rustc_hir::{
def::{CtorOf, DefKind, Res},
def_id::LocalDefId,
intravisit::{walk_inf, walk_ty, Visitor},
- Expr, ExprKind, FnRetTy, FnSig, GenericArg, HirId, Impl, ImplItemKind, Item, ItemKind, Pat, PatKind, Path, QPath,
- TyKind,
+ Expr, ExprKind, FnRetTy, FnSig, GenericArg, GenericArgsParentheses, GenericParam, GenericParamKind, HirId, Impl,
+ ImplItemKind, Item, ItemKind, Pat, PatKind, Path, QPath, Ty, TyKind,
};
use rustc_hir_analysis::hir_ty_to_ty;
use rustc_lint::{LateContext, LateLintPass};
@@ -96,19 +96,27 @@ impl<'tcx> LateLintPass<'tcx> for UseSelf {
// avoid linting on nested items, we push `StackItem::NoCheck` on the stack to signal, that
// we're in an `impl` or nested item, that we don't want to lint
let stack_item = if_chain! {
- if let ItemKind::Impl(Impl { self_ty, .. }) = item.kind;
+ if let ItemKind::Impl(Impl { self_ty, generics,.. }) = item.kind;
if let TyKind::Path(QPath::Resolved(_, item_path)) = self_ty.kind;
let parameters = &item_path.segments.last().expect(SEGMENTS_MSG).args;
if parameters.as_ref().map_or(true, |params| {
- !params.parenthesized && !params.args.iter().any(|arg| matches!(arg, GenericArg::Lifetime(_)))
+ params.parenthesized == GenericArgsParentheses::No
+ && !params.args.iter().any(|arg| matches!(arg, GenericArg::Lifetime(_)))
});
if !item.span.from_expansion();
if !is_from_proc_macro(cx, item); // expensive, should be last check
then {
+ // Self cannot be used inside const generic parameters
+ let types_to_skip = generics.params.iter().filter_map(|param| {
+ match param {
+ GenericParam { kind: GenericParamKind::Const { ty: Ty { hir_id, ..}, ..}, ..} => Some(*hir_id),
+ _ => None,
+ }
+ }).chain(std::iter::once(self_ty.hir_id)).collect();
StackItem::Check {
impl_id: item.owner_id.def_id,
in_body: 0,
- types_to_skip: std::iter::once(self_ty.hir_id).collect(),
+ types_to_skip,
}
} else {
StackItem::NoCheck
diff --git a/src/tools/clippy/clippy_lints/src/useless_conversion.rs b/src/tools/clippy/clippy_lints/src/useless_conversion.rs
index fede625f7..ddbe6b2c7 100644
--- a/src/tools/clippy/clippy_lints/src/useless_conversion.rs
+++ b/src/tools/clippy/clippy_lints/src/useless_conversion.rs
@@ -1,5 +1,5 @@
use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_sugg};
-use clippy_utils::source::{snippet, snippet_with_macro_callsite};
+use clippy_utils::source::{snippet, snippet_with_context};
use clippy_utils::sugg::Sugg;
use clippy_utils::ty::{is_copy, is_type_diagnostic_item, same_type_and_consts};
use clippy_utils::{get_parent_expr, is_trait_method, match_def_path, path_to_local, paths};
@@ -68,15 +68,16 @@ impl<'tcx> LateLintPass<'tcx> for UselessConversion {
let a = cx.typeck_results().expr_ty(e);
let b = cx.typeck_results().expr_ty(recv);
if same_type_and_consts(a, b) {
- let sugg = snippet_with_macro_callsite(cx, recv.span, "<expr>").to_string();
+ let mut app = Applicability::MachineApplicable;
+ let sugg = snippet_with_context(cx, recv.span, e.span.ctxt(), "<expr>", &mut app).0;
span_lint_and_sugg(
cx,
USELESS_CONVERSION,
e.span,
&format!("useless conversion to the same type: `{b}`"),
"consider removing `.into()`",
- sugg,
- Applicability::MachineApplicable, // snippet
+ sugg.into_owned(),
+ app,
);
}
}
@@ -165,7 +166,8 @@ impl<'tcx> LateLintPass<'tcx> for UselessConversion {
if same_type_and_consts(a, b);
then {
- let sugg = Sugg::hir_with_macro_callsite(cx, arg, "<expr>").maybe_par();
+ let mut app = Applicability::MachineApplicable;
+ let sugg = Sugg::hir_with_context(cx, arg, e.span.ctxt(), "<expr>", &mut app).maybe_par();
let sugg_msg =
format!("consider removing `{}()`", snippet(cx, path.span, "From::from"));
span_lint_and_sugg(
@@ -175,7 +177,7 @@ impl<'tcx> LateLintPass<'tcx> for UselessConversion {
&format!("useless conversion to the same type: `{b}`"),
&sugg_msg,
sugg.to_string(),
- Applicability::MachineApplicable, // snippet
+ app,
);
}
}
diff --git a/src/tools/clippy/clippy_lints/src/utils/author.rs b/src/tools/clippy/clippy_lints/src/utils/author.rs
index c37e5bb67..bc4adf159 100644
--- a/src/tools/clippy/clippy_lints/src/utils/author.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/author.rs
@@ -395,11 +395,6 @@ impl<'a, 'tcx> PrintVisitor<'a, 'tcx> {
}
self.expr(field!(let_expr.init));
},
- ExprKind::Box(inner) => {
- bind!(self, inner);
- kind!("Box({inner})");
- self.expr(inner);
- },
ExprKind::Array(elements) => {
bind!(self, elements);
kind!("Array({elements})");
@@ -588,7 +583,7 @@ impl<'a, 'tcx> PrintVisitor<'a, 'tcx> {
},
}
},
- ExprKind::Err(_) => kind!("Err"),
+ ExprKind::Err(_) => kind!("Err(_)"),
ExprKind::DropTemps(expr) => {
bind!(self, expr);
kind!("DropTemps({expr})");
diff --git a/src/tools/clippy/clippy_lints/src/utils/conf.rs b/src/tools/clippy/clippy_lints/src/utils/conf.rs
index 1c7f3e96d..896a01af3 100644
--- a/src/tools/clippy/clippy_lints/src/utils/conf.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/conf.rs
@@ -249,7 +249,7 @@ define_Conf! {
/// arithmetic-side-effects-allowed-unary = ["SomeType", "AnotherType"]
/// ```
(arithmetic_side_effects_allowed_unary: rustc_data_structures::fx::FxHashSet<String> = <_>::default()),
- /// Lint: ENUM_VARIANT_NAMES, LARGE_TYPES_PASSED_BY_VALUE, TRIVIALLY_COPY_PASS_BY_REF, UNNECESSARY_WRAPS, UNUSED_SELF, UPPER_CASE_ACRONYMS, WRONG_SELF_CONVENTION, BOX_COLLECTION, REDUNDANT_ALLOCATION, RC_BUFFER, VEC_BOX, OPTION_OPTION, LINKEDLIST, RC_MUTEX.
+ /// Lint: ENUM_VARIANT_NAMES, LARGE_TYPES_PASSED_BY_VALUE, TRIVIALLY_COPY_PASS_BY_REF, UNNECESSARY_WRAPS, UNUSED_SELF, UPPER_CASE_ACRONYMS, WRONG_SELF_CONVENTION, BOX_COLLECTION, REDUNDANT_ALLOCATION, RC_BUFFER, VEC_BOX, OPTION_OPTION, LINKEDLIST, RC_MUTEX, UNNECESSARY_BOX_RETURNS.
///
/// Suppress lints whenever the suggested change would cause breakage for other crates.
(avoid_breaking_exported_api: bool = true),
@@ -275,13 +275,13 @@ define_Conf! {
///
/// The list of disallowed names to lint about. NB: `bar` is not here since it has legitimate uses. The value
/// `".."` can be used as part of the list to indicate, that the configured values should be appended to the
- /// default configuration of Clippy. By default any configuration will replace the default value.
+ /// default configuration of Clippy. By default, any configuration will replace the default value.
(disallowed_names: Vec<String> = super::DEFAULT_DISALLOWED_NAMES.iter().map(ToString::to_string).collect()),
/// Lint: DOC_MARKDOWN.
///
/// The list of words this lint should not consider as identifiers needing ticks. The value
/// `".."` can be used as part of the list to indicate, that the configured values should be appended to the
- /// default configuration of Clippy. By default any configuraction will replace the default value. For example:
+ /// default configuration of Clippy. By default, any configuration will replace the default value. For example:
/// * `doc-valid-idents = ["ClipPy"]` would replace the default list with `["ClipPy"]`.
/// * `doc-valid-idents = ["ClipPy", ".."]` would append `ClipPy` to the default list.
///
@@ -390,7 +390,7 @@ define_Conf! {
/// Enforce the named macros always use the braces specified.
///
/// A `MacroMatcher` can be added like so `{ name = "macro_name", brace = "(" }`. If the macro
- /// is could be used with a full path two `MacroMatcher`s have to be added one with the full path
+ /// could be used with a full path two `MacroMatcher`s have to be added one with the full path
/// `crate_name::macro_name` and one with just the macro name.
(standard_macro_braces: Vec<crate::nonstandard_macro_braces::MacroMatcher> = Vec::new()),
/// Lint: MISSING_ENFORCED_IMPORT_RENAMES.
@@ -408,7 +408,7 @@ define_Conf! {
/// Lint: INDEX_REFUTABLE_SLICE.
///
/// When Clippy suggests using a slice pattern, this is the maximum number of elements allowed in
- /// the slice pattern that is suggested. If more elements would be necessary, the lint is suppressed.
+ /// the slice pattern that is suggested. If more elements are necessary, the lint is suppressed.
/// For example, `[_, _, _, e, ..]` is a slice pattern with 4 elements.
(max_suggested_slice_pattern_length: u64 = 3),
/// Lint: AWAIT_HOLDING_INVALID_TYPE.
@@ -437,7 +437,7 @@ define_Conf! {
///
/// The maximum size of the `Err`-variant in a `Result` returned from a function
(large_error_threshold: u64 = 128),
- /// Lint: MUTABLE_KEY_TYPE.
+ /// Lint: MUTABLE_KEY_TYPE, IFS_SAME_COND.
///
/// A list of paths to types that should be treated like `Arc`, i.e. ignored but
/// for the generic parameters for determining interior mutability
@@ -459,6 +459,10 @@ define_Conf! {
/// Whether to **only** check for missing documentation in items visible within the current
/// crate. For example, `pub(crate)` items.
(missing_docs_in_crate_items: bool = false),
+ /// Lint: LARGE_FUTURES.
+ ///
+ /// The maximum byte size a `Future` can have, before it triggers the `clippy::large_futures` lint
+ (future_size_threshold: u64 = 16 * 1024),
}
/// Search for the configuration file.
@@ -466,7 +470,7 @@ define_Conf! {
/// # Errors
///
/// Returns any unexpected filesystem error encountered when searching for the config file
-pub fn lookup_conf_file() -> io::Result<Option<PathBuf>> {
+pub fn lookup_conf_file() -> io::Result<(Option<PathBuf>, Vec<String>)> {
/// Possible filename to search for.
const CONFIG_FILE_NAMES: [&str; 2] = [".clippy.toml", "clippy.toml"];
@@ -474,9 +478,11 @@ pub fn lookup_conf_file() -> io::Result<Option<PathBuf>> {
// If neither of those exist, use ".".
let mut current = env::var_os("CLIPPY_CONF_DIR")
.or_else(|| env::var_os("CARGO_MANIFEST_DIR"))
- .map_or_else(|| PathBuf::from("."), PathBuf::from);
+ .map_or_else(|| PathBuf::from("."), PathBuf::from)
+ .canonicalize()?;
let mut found_config: Option<PathBuf> = None;
+ let mut warnings = vec![];
loop {
for config_file_name in &CONFIG_FILE_NAMES {
@@ -487,12 +493,12 @@ pub fn lookup_conf_file() -> io::Result<Option<PathBuf>> {
Ok(md) if md.is_dir() => {},
Ok(_) => {
// warn if we happen to find two config files #8323
- if let Some(ref found_config_) = found_config {
- eprintln!(
- "Using config file `{}`\nWarning: `{}` will be ignored.",
- found_config_.display(),
- config_file.display(),
- );
+ if let Some(ref found_config) = found_config {
+ warnings.push(format!(
+ "using config file `{}`, `{}` will be ignored",
+ found_config.display(),
+ config_file.display()
+ ));
} else {
found_config = Some(config_file);
}
@@ -502,12 +508,12 @@ pub fn lookup_conf_file() -> io::Result<Option<PathBuf>> {
}
if found_config.is_some() {
- return Ok(found_config);
+ return Ok((found_config, warnings));
}
// If the current directory has no parent, we're done searching.
if !current.pop() {
- return Ok(None);
+ return Ok((None, warnings));
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/utils/format_args_collector.rs b/src/tools/clippy/clippy_lints/src/utils/format_args_collector.rs
new file mode 100644
index 000000000..09fcb82c3
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/utils/format_args_collector.rs
@@ -0,0 +1,98 @@
+use clippy_utils::macros::collect_ast_format_args;
+use clippy_utils::source::snippet_opt;
+use itertools::Itertools;
+use rustc_ast::{Expr, ExprKind, FormatArgs};
+use rustc_lexer::{tokenize, TokenKind};
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::hygiene;
+use std::iter::once;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Collects [`rustc_ast::FormatArgs`] so that future late passes can call
+ /// [`clippy_utils::macros::find_format_args`]
+ pub FORMAT_ARGS_COLLECTOR,
+ internal_warn,
+ "collects `format_args` AST nodes for use in later lints"
+}
+
+declare_lint_pass!(FormatArgsCollector => [FORMAT_ARGS_COLLECTOR]);
+
+impl EarlyLintPass for FormatArgsCollector {
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
+ if let ExprKind::FormatArgs(args) = &expr.kind {
+ if has_span_from_proc_macro(cx, args) {
+ return;
+ }
+
+ collect_ast_format_args(expr.span, args);
+ }
+ }
+}
+
+/// Detects if the format string or an argument has its span set by a proc macro to something inside
+/// a macro callsite, e.g.
+///
+/// ```ignore
+/// println!(some_proc_macro!("input {}"), a);
+/// ```
+///
+/// Where `some_proc_macro` expands to
+///
+/// ```ignore
+/// println!("output {}", a);
+/// ```
+///
+/// But with the span of `"output {}"` set to the macro input
+///
+/// ```ignore
+/// println!(some_proc_macro!("input {}"), a);
+/// // ^^^^^^^^^^
+/// ```
+fn has_span_from_proc_macro(cx: &EarlyContext<'_>, args: &FormatArgs) -> bool {
+ let ctxt = args.span.ctxt();
+
+ // `format!("{} {} {c}", "one", "two", c = "three")`
+ // ^^^^^ ^^^^^ ^^^^^^^
+ let argument_span = args
+ .arguments
+ .explicit_args()
+ .iter()
+ .map(|argument| hygiene::walk_chain(argument.expr.span, ctxt));
+
+ // `format!("{} {} {c}", "one", "two", c = "three")`
+ // ^^ ^^ ^^^^^^
+ let between_spans = once(args.span)
+ .chain(argument_span)
+ .tuple_windows()
+ .map(|(start, end)| start.between(end));
+
+ for between_span in between_spans {
+ let mut seen_comma = false;
+
+ let Some(snippet) = snippet_opt(cx, between_span) else { return true };
+ for token in tokenize(&snippet) {
+ match token.kind {
+ TokenKind::LineComment { .. } | TokenKind::BlockComment { .. } | TokenKind::Whitespace => {},
+ TokenKind::Comma if !seen_comma => seen_comma = true,
+ // named arguments, `start_val, name = end_val`
+ // ^^^^^^^^^ between_span
+ TokenKind::Ident | TokenKind::Eq if seen_comma => {},
+ // An unexpected token usually indicates that we crossed a macro boundary
+ //
+ // `println!(some_proc_macro!("input {}"), a)`
+ // ^^^ between_span
+ // `println!("{}", val!(x))`
+ // ^^^^^^^ between_span
+ _ => return true,
+ }
+ }
+
+ if !seen_comma {
+ return true;
+ }
+ }
+
+ false
+}
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/interning_defined_symbol.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/interning_defined_symbol.rs
index 688a8b865..f8978e30a 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/interning_defined_symbol.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/interning_defined_symbol.rs
@@ -11,7 +11,7 @@ use rustc_hir::def_id::DefId;
use rustc_hir::{BinOpKind, Expr, ExprKind, UnOp};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::mir::interpret::ConstValue;
-use rustc_middle::ty::{self};
+use rustc_middle::ty;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::symbol::Symbol;
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs
index b1b5164ff..3d0d4a525 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs
@@ -26,7 +26,7 @@ use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::symbol::Ident;
use rustc_span::{sym, Loc, Span, Symbol};
use serde::{ser::SerializeStruct, Serialize, Serializer};
-use std::collections::BinaryHeap;
+use std::collections::{BTreeSet, BinaryHeap};
use std::fmt;
use std::fmt::Write as _;
use std::fs::{self, OpenOptions};
@@ -264,6 +264,9 @@ struct LintMetadata {
/// This field is only used in the output and will only be
/// mapped shortly before the actual output.
applicability: Option<ApplicabilityInfo>,
+ /// All the past names of lints which have been renamed.
+ #[serde(skip_serializing_if = "BTreeSet::is_empty")]
+ former_ids: BTreeSet<String>,
}
impl LintMetadata {
@@ -283,6 +286,7 @@ impl LintMetadata {
version,
docs,
applicability: None,
+ former_ids: BTreeSet::new(),
}
}
}
@@ -901,6 +905,7 @@ fn collect_renames(lints: &mut Vec<LintMetadata>) {
if name == lint_name;
if let Some(past_name) = k.strip_prefix(CLIPPY_LINT_GROUP_PREFIX);
then {
+ lint.former_ids.insert(past_name.to_owned());
writeln!(collected, "* `{past_name}`").unwrap();
names.push(past_name.to_string());
}
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/unnecessary_def_path.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/unnecessary_def_path.rs
index b59ef4086..14ed1368e 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/unnecessary_def_path.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/unnecessary_def_path.rs
@@ -11,7 +11,7 @@ use rustc_hir::def_id::DefId;
use rustc_hir::{Expr, ExprKind, Local, Mutability, Node};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::mir::interpret::{Allocation, ConstValue, GlobalAlloc};
-use rustc_middle::ty::{self, DefIdTree, Ty};
+use rustc_middle::ty::{self, Ty};
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::symbol::Symbol;
use rustc_span::Span;
diff --git a/src/tools/clippy/clippy_lints/src/utils/mod.rs b/src/tools/clippy/clippy_lints/src/utils/mod.rs
index 787e9fd98..dc647af26 100644
--- a/src/tools/clippy/clippy_lints/src/utils/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/mod.rs
@@ -1,5 +1,6 @@
pub mod author;
pub mod conf;
pub mod dump_hir;
+pub mod format_args_collector;
#[cfg(feature = "internal")]
pub mod internal_lints;
diff --git a/src/tools/clippy/clippy_lints/src/wildcard_imports.rs b/src/tools/clippy/clippy_lints/src/wildcard_imports.rs
index e4d1ee195..36f910c98 100644
--- a/src/tools/clippy/clippy_lints/src/wildcard_imports.rs
+++ b/src/tools/clippy/clippy_lints/src/wildcard_imports.rs
@@ -155,19 +155,13 @@ impl LateLintPass<'_> for WildcardImports {
)
};
- let imports_string = if used_imports.len() == 1 {
- used_imports.iter().next().unwrap().to_string()
+ let mut imports = used_imports.items().map(ToString::to_string).into_sorted_stable_ord(false);
+ let imports_string = if imports.len() == 1 {
+ imports.pop().unwrap()
+ } else if braced_glob {
+ imports.join(", ")
} else {
- let mut imports = used_imports
- .iter()
- .map(ToString::to_string)
- .collect::<Vec<_>>();
- imports.sort();
- if braced_glob {
- imports.join(", ")
- } else {
- format!("{{{}}}", imports.join(", "))
- }
+ format!("{{{}}}", imports.join(", "))
};
let sugg = if braced_glob {
diff --git a/src/tools/clippy/clippy_lints/src/write.rs b/src/tools/clippy/clippy_lints/src/write.rs
index df3350388..d7c94b909 100644
--- a/src/tools/clippy/clippy_lints/src/write.rs
+++ b/src/tools/clippy/clippy_lints/src/write.rs
@@ -1,10 +1,11 @@
use clippy_utils::diagnostics::{span_lint, span_lint_and_then};
-use clippy_utils::macros::{root_macro_call_first_node, FormatArgsExpn, MacroCall};
+use clippy_utils::macros::{find_format_args, format_arg_removal_span, root_macro_call_first_node, MacroCall};
use clippy_utils::source::{expand_past_previous_comma, snippet_opt};
use clippy_utils::{is_in_cfg_test, is_in_test_function};
-use rustc_ast::LitKind;
+use rustc_ast::token::LitKind;
+use rustc_ast::{FormatArgPosition, FormatArgs, FormatArgsPiece, FormatOptions, FormatPlaceholder, FormatTrait};
use rustc_errors::Applicability;
-use rustc_hir::{Expr, ExprKind, HirIdMap, Impl, Item, ItemKind};
+use rustc_hir::{Expr, Impl, Item, ItemKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::{sym, BytePos};
@@ -297,34 +298,40 @@ impl<'tcx> LateLintPass<'tcx> for Write {
_ => return,
}
- let Some(format_args) = FormatArgsExpn::find_nested(cx, expr, macro_call.expn) else { return };
-
- // ignore `writeln!(w)` and `write!(v, some_macro!())`
- if format_args.format_string.span.from_expansion() {
- return;
- }
+ find_format_args(cx, expr, macro_call.expn, |format_args| {
+ // ignore `writeln!(w)` and `write!(v, some_macro!())`
+ if format_args.span.from_expansion() {
+ return;
+ }
- match diag_name {
- sym::print_macro | sym::eprint_macro | sym::write_macro => {
- check_newline(cx, &format_args, &macro_call, name);
- },
- sym::println_macro | sym::eprintln_macro | sym::writeln_macro => {
- check_empty_string(cx, &format_args, &macro_call, name);
- },
- _ => {},
- }
+ match diag_name {
+ sym::print_macro | sym::eprint_macro | sym::write_macro => {
+ check_newline(cx, format_args, &macro_call, name);
+ },
+ sym::println_macro | sym::eprintln_macro | sym::writeln_macro => {
+ check_empty_string(cx, format_args, &macro_call, name);
+ },
+ _ => {},
+ }
- check_literal(cx, &format_args, name);
+ check_literal(cx, format_args, name);
- if !self.in_debug_impl {
- for arg in &format_args.args {
- if arg.format.r#trait == sym::Debug {
- span_lint(cx, USE_DEBUG, arg.span, "use of `Debug`-based formatting");
+ if !self.in_debug_impl {
+ for piece in &format_args.template {
+ if let &FormatArgsPiece::Placeholder(FormatPlaceholder {
+ span: Some(span),
+ format_trait: FormatTrait::Debug,
+ ..
+ }) = piece
+ {
+ span_lint(cx, USE_DEBUG, span, "use of `Debug`-based formatting");
+ }
}
}
- }
+ });
}
}
+
fn is_debug_impl(cx: &LateContext<'_>, item: &Item<'_>) -> bool {
if let ItemKind::Impl(Impl { of_trait: Some(trait_ref), .. }) = &item.kind
&& let Some(trait_id) = trait_ref.trait_def_id()
@@ -335,16 +342,18 @@ fn is_debug_impl(cx: &LateContext<'_>, item: &Item<'_>) -> bool {
}
}
-fn check_newline(cx: &LateContext<'_>, format_args: &FormatArgsExpn<'_>, macro_call: &MacroCall, name: &str) {
- let format_string_parts = &format_args.format_string.parts;
- let mut format_string_span = format_args.format_string.span;
-
- let Some(last) = format_string_parts.last() else { return };
+fn check_newline(cx: &LateContext<'_>, format_args: &FormatArgs, macro_call: &MacroCall, name: &str) {
+ let Some(FormatArgsPiece::Literal(last)) = format_args.template.last() else { return };
let count_vertical_whitespace = || {
- format_string_parts
+ format_args
+ .template
.iter()
- .flat_map(|part| part.as_str().chars())
+ .filter_map(|piece| match piece {
+ FormatArgsPiece::Literal(literal) => Some(literal),
+ FormatArgsPiece::Placeholder(_) => None,
+ })
+ .flat_map(|literal| literal.as_str().chars())
.filter(|ch| matches!(ch, '\r' | '\n'))
.count()
};
@@ -352,10 +361,9 @@ fn check_newline(cx: &LateContext<'_>, format_args: &FormatArgsExpn<'_>, macro_c
if last.as_str().ends_with('\n')
// ignore format strings with other internal vertical whitespace
&& count_vertical_whitespace() == 1
-
- // ignore trailing arguments: `print!("Issue\n{}", 1265);`
- && format_string_parts.len() > format_args.args.len()
{
+ let mut format_string_span = format_args.span;
+
let lint = if name == "write" {
format_string_span = expand_past_previous_comma(cx, format_string_span);
@@ -373,7 +381,7 @@ fn check_newline(cx: &LateContext<'_>, format_args: &FormatArgsExpn<'_>, macro_c
let name_span = cx.sess().source_map().span_until_char(macro_call.span, '!');
let Some(format_snippet) = snippet_opt(cx, format_string_span) else { return };
- if format_string_parts.len() == 1 && last.as_str() == "\n" {
+ if format_args.template.len() == 1 && last.as_str() == "\n" {
// print!("\n"), write!(f, "\n")
diag.multipart_suggestion(
@@ -398,11 +406,12 @@ fn check_newline(cx: &LateContext<'_>, format_args: &FormatArgsExpn<'_>, macro_c
}
}
-fn check_empty_string(cx: &LateContext<'_>, format_args: &FormatArgsExpn<'_>, macro_call: &MacroCall, name: &str) {
- if let [part] = &format_args.format_string.parts[..]
- && let mut span = format_args.format_string.span
- && part.as_str() == "\n"
+fn check_empty_string(cx: &LateContext<'_>, format_args: &FormatArgs, macro_call: &MacroCall, name: &str) {
+ if let [FormatArgsPiece::Literal(literal)] = &format_args.template[..]
+ && literal.as_str() == "\n"
{
+ let mut span = format_args.span;
+
let lint = if name == "writeln" {
span = expand_past_previous_comma(cx, span);
@@ -428,33 +437,49 @@ fn check_empty_string(cx: &LateContext<'_>, format_args: &FormatArgsExpn<'_>, ma
}
}
-fn check_literal(cx: &LateContext<'_>, format_args: &FormatArgsExpn<'_>, name: &str) {
- let mut counts = HirIdMap::<usize>::default();
- for param in format_args.params() {
- *counts.entry(param.value.hir_id).or_default() += 1;
+fn check_literal(cx: &LateContext<'_>, format_args: &FormatArgs, name: &str) {
+ let arg_index = |argument: &FormatArgPosition| argument.index.unwrap_or_else(|pos| pos);
+
+ let mut counts = vec![0u32; format_args.arguments.all_args().len()];
+ for piece in &format_args.template {
+ if let FormatArgsPiece::Placeholder(placeholder) = piece {
+ counts[arg_index(&placeholder.argument)] += 1;
+ }
}
- for arg in &format_args.args {
- let value = arg.param.value;
-
- if counts[&value.hir_id] == 1
- && arg.format.is_default()
- && let ExprKind::Lit(lit) = &value.kind
- && !value.span.from_expansion()
- && let Some(value_string) = snippet_opt(cx, value.span)
- {
- let (replacement, replace_raw) = match lit.node {
- LitKind::Str(..) => extract_str_literal(&value_string),
- LitKind::Char(ch) => (
- match ch {
- '"' => "\\\"",
- '\'' => "'",
- _ => &value_string[1..value_string.len() - 1],
+ for piece in &format_args.template {
+ if let FormatArgsPiece::Placeholder(FormatPlaceholder {
+ argument,
+ span: Some(placeholder_span),
+ format_trait: FormatTrait::Display,
+ format_options,
+ }) = piece
+ && *format_options == FormatOptions::default()
+ && let index = arg_index(argument)
+ && counts[index] == 1
+ && let Some(arg) = format_args.arguments.by_index(index)
+ && let rustc_ast::ExprKind::Lit(lit) = &arg.expr.kind
+ && !arg.expr.span.from_expansion()
+ && let Some(value_string) = snippet_opt(cx, arg.expr.span)
+ {
+ let (replacement, replace_raw) = match lit.kind {
+ LitKind::Str | LitKind::StrRaw(_) => match extract_str_literal(&value_string) {
+ Some(extracted) => extracted,
+ None => return,
+ },
+ LitKind::Char => (
+ match lit.symbol.as_str() {
+ "\"" => "\\\"",
+ "\\'" => "'",
+ _ => match value_string.strip_prefix('\'').and_then(|s| s.strip_suffix('\'')) {
+ Some(stripped) => stripped,
+ None => return,
+ },
}
.to_string(),
false,
),
- LitKind::Bool(b) => (b.to_string(), false),
+ LitKind::Bool => (lit.symbol.to_string(), false),
_ => continue,
};
@@ -464,7 +489,9 @@ fn check_literal(cx: &LateContext<'_>, format_args: &FormatArgsExpn<'_>, name: &
PRINT_LITERAL
};
- let format_string_is_raw = format_args.format_string.style.is_some();
+ let Some(format_string_snippet) = snippet_opt(cx, format_args.span) else { continue };
+ let format_string_is_raw = format_string_snippet.starts_with('r');
+
let replacement = match (format_string_is_raw, replace_raw) {
(false, false) => Some(replacement),
(false, true) => Some(replacement.replace('"', "\\\"").replace('\\', "\\\\")),
@@ -485,23 +512,24 @@ fn check_literal(cx: &LateContext<'_>, format_args: &FormatArgsExpn<'_>, name: &
span_lint_and_then(
cx,
lint,
- value.span,
+ arg.expr.span,
"literal with an empty format string",
|diag| {
if let Some(replacement) = replacement
// `format!("{}", "a")`, `format!("{named}", named = "b")
// ~~~~~ ~~~~~~~~~~~~~
- && let Some(value_span) = format_args.value_with_prev_comma_span(value.hir_id)
+ && let Some(removal_span) = format_arg_removal_span(format_args, index)
{
let replacement = replacement.replace('{', "{{").replace('}', "}}");
diag.multipart_suggestion(
"try this",
- vec![(arg.span, replacement), (value_span, String::new())],
+ vec![(*placeholder_span, replacement), (removal_span, String::new())],
Applicability::MachineApplicable,
);
}
},
);
+
}
}
}
@@ -511,13 +539,13 @@ fn check_literal(cx: &LateContext<'_>, format_args: &FormatArgsExpn<'_>, name: &
/// `r#"a"#` -> (`a`, true)
///
/// `"b"` -> (`b`, false)
-fn extract_str_literal(literal: &str) -> (String, bool) {
+fn extract_str_literal(literal: &str) -> Option<(String, bool)> {
let (literal, raw) = match literal.strip_prefix('r') {
Some(stripped) => (stripped.trim_matches('#'), true),
None => (literal, false),
};
- (literal[1..literal.len() - 1].to_string(), raw)
+ Some((literal.strip_prefix('"')?.strip_suffix('"')?.to_string(), raw))
}
enum UnescapeErr {
diff --git a/src/tools/clippy/clippy_utils/Cargo.toml b/src/tools/clippy/clippy_utils/Cargo.toml
index 173469f6c..124ebd164 100644
--- a/src/tools/clippy/clippy_utils/Cargo.toml
+++ b/src/tools/clippy/clippy_utils/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "clippy_utils"
-version = "0.1.69"
+version = "0.1.70"
edition = "2021"
publish = false
diff --git a/src/tools/clippy/clippy_utils/src/ast_utils.rs b/src/tools/clippy/clippy_utils/src/ast_utils.rs
index d82098523..1f15598db 100644
--- a/src/tools/clippy/clippy_utils/src/ast_utils.rs
+++ b/src/tools/clippy/clippy_utils/src/ast_utils.rs
@@ -143,7 +143,7 @@ pub fn eq_expr(l: &Expr, r: &Expr) -> bool {
(Paren(l), _) => eq_expr(l, r),
(_, Paren(r)) => eq_expr(l, r),
(Err, Err) => true,
- (Box(l), Box(r)) | (Try(l), Try(r)) | (Await(l), Await(r)) => eq_expr(l, r),
+ (Try(l), Try(r)) | (Await(l), Await(r)) => eq_expr(l, r),
(Array(l), Array(r)) => over(l, r, |l, r| eq_expr(l, r)),
(Tup(l), Tup(r)) => over(l, r, |l, r| eq_expr(l, r)),
(Repeat(le, ls), Repeat(re, rs)) => eq_expr(le, re) && eq_expr(&ls.value, &rs.value),
@@ -209,7 +209,7 @@ pub fn eq_expr(l: &Expr, r: &Expr) -> bool {
&& eq_fn_decl(lf, rf)
&& eq_expr(le, re)
},
- (Async(lc, _, lb), Async(rc, _, rb)) => lc == rc && eq_block(lb, rb),
+ (Async(lc, lb), Async(rc, rb)) => lc == rc && eq_block(lb, rb),
(Range(lf, lt, ll), Range(rf, rt, rl)) => ll == rl && eq_expr_opt(lf, rf) && eq_expr_opt(lt, rt),
(AddrOf(lbk, lm, le), AddrOf(rbk, rm, re)) => lbk == rbk && lm == rm && eq_expr(le, re),
(Path(lq, lp), Path(rq, rp)) => both(lq, rq, eq_qself) && eq_path(lp, rp),
@@ -286,8 +286,30 @@ pub fn eq_item_kind(l: &ItemKind, r: &ItemKind) -> bool {
match (l, r) {
(ExternCrate(l), ExternCrate(r)) => l == r,
(Use(l), Use(r)) => eq_use_tree(l, r),
- (Static(lt, lm, le), Static(rt, rm, re)) => lm == rm && eq_ty(lt, rt) && eq_expr_opt(le, re),
- (Const(ld, lt, le), Const(rd, rt, re)) => eq_defaultness(*ld, *rd) && eq_ty(lt, rt) && eq_expr_opt(le, re),
+ (
+ Static(box ast::StaticItem {
+ ty: lt,
+ mutability: lm,
+ expr: le,
+ }),
+ Static(box ast::StaticItem {
+ ty: rt,
+ mutability: rm,
+ expr: re,
+ }),
+ ) => lm == rm && eq_ty(lt, rt) && eq_expr_opt(le, re),
+ (
+ Const(box ast::ConstItem {
+ defaultness: ld,
+ ty: lt,
+ expr: le,
+ }),
+ Const(box ast::ConstItem {
+ defaultness: rd,
+ ty: rt,
+ expr: re,
+ }),
+ ) => eq_defaultness(*ld, *rd) && eq_ty(lt, rt) && eq_expr_opt(le, re),
(
Fn(box ast::Fn {
defaultness: ld,
@@ -451,7 +473,18 @@ pub fn eq_foreign_item_kind(l: &ForeignItemKind, r: &ForeignItemKind) -> bool {
pub fn eq_assoc_item_kind(l: &AssocItemKind, r: &AssocItemKind) -> bool {
use AssocItemKind::*;
match (l, r) {
- (Const(ld, lt, le), Const(rd, rt, re)) => eq_defaultness(*ld, *rd) && eq_ty(lt, rt) && eq_expr_opt(le, re),
+ (
+ Const(box ast::ConstItem {
+ defaultness: ld,
+ ty: lt,
+ expr: le,
+ }),
+ Const(box ast::ConstItem {
+ defaultness: rd,
+ ty: rt,
+ expr: re,
+ }),
+ ) => eq_defaultness(*ld, *rd) && eq_ty(lt, rt) && eq_expr_opt(le, re),
(
Fn(box ast::Fn {
defaultness: ld,
diff --git a/src/tools/clippy/clippy_utils/src/attrs.rs b/src/tools/clippy/clippy_utils/src/attrs.rs
index 7987a233b..b4ad42a50 100644
--- a/src/tools/clippy/clippy_utils/src/attrs.rs
+++ b/src/tools/clippy/clippy_utils/src/attrs.rs
@@ -145,8 +145,8 @@ pub fn get_unique_attr<'a>(
/// Return true if the attributes contain any of `proc_macro`,
/// `proc_macro_derive` or `proc_macro_attribute`, false otherwise
-pub fn is_proc_macro(sess: &Session, attrs: &[ast::Attribute]) -> bool {
- attrs.iter().any(|attr| sess.is_proc_macro_attr(attr))
+pub fn is_proc_macro(attrs: &[ast::Attribute]) -> bool {
+ attrs.iter().any(rustc_ast::Attribute::is_proc_macro_attr)
}
/// Return true if the attributes contain `#[doc(hidden)]`
diff --git a/src/tools/clippy/clippy_utils/src/check_proc_macro.rs b/src/tools/clippy/clippy_utils/src/check_proc_macro.rs
index 43f0df145..d3a6929f6 100644
--- a/src/tools/clippy/clippy_utils/src/check_proc_macro.rs
+++ b/src/tools/clippy/clippy_utils/src/check_proc_macro.rs
@@ -112,7 +112,6 @@ fn qpath_search_pat(path: &QPath<'_>) -> (Pat, Pat) {
/// Get the search patterns to use for the given expression
fn expr_search_pat(tcx: TyCtxt<'_>, e: &Expr<'_>) -> (Pat, Pat) {
match e.kind {
- ExprKind::Box(e) => (Pat::Str("box"), expr_search_pat(tcx, e).1),
ExprKind::ConstBlock(_) => (Pat::Str("const"), Pat::Str("}")),
ExprKind::Tup([]) => (Pat::Str(")"), Pat::Str("(")),
ExprKind::Unary(UnOp::Deref, e) => (Pat::Str("*"), expr_search_pat(tcx, e).1),
diff --git a/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs b/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs
index ee2f816f1..28c857170 100644
--- a/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs
+++ b/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs
@@ -199,11 +199,9 @@ fn expr_eagerness<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) -> EagernessS
},
// Memory allocation, custom operator, loop, or call to an unknown function
- ExprKind::Box(_)
- | ExprKind::Unary(..)
- | ExprKind::Binary(..)
- | ExprKind::Loop(..)
- | ExprKind::Call(..) => self.eagerness = Lazy,
+ ExprKind::Unary(..) | ExprKind::Binary(..) | ExprKind::Loop(..) | ExprKind::Call(..) => {
+ self.eagerness = Lazy;
+ },
ExprKind::ConstBlock(_)
| ExprKind::Array(_)
diff --git a/src/tools/clippy/clippy_utils/src/hir_utils.rs b/src/tools/clippy/clippy_utils/src/hir_utils.rs
index 0603755f8..3ee714782 100644
--- a/src/tools/clippy/clippy_utils/src/hir_utils.rs
+++ b/src/tools/clippy/clippy_utils/src/hir_utils.rs
@@ -249,7 +249,6 @@ impl HirEqInterExpr<'_, '_, '_> {
both(&li.label, &ri.label, |l, r| l.ident.name == r.ident.name)
&& both(le, re, |l, r| self.eq_expr(l, r))
},
- (&ExprKind::Box(l), &ExprKind::Box(r)) => self.eq_expr(l, r),
(&ExprKind::Call(l_fun, l_args), &ExprKind::Call(r_fun, r_args)) => {
self.inner.allow_side_effects && self.eq_expr(l_fun, r_fun) && self.eq_exprs(l_args, r_args)
},
@@ -402,14 +401,9 @@ impl HirEqInterExpr<'_, '_, '_> {
}
fn eq_path_parameters(&mut self, left: &GenericArgs<'_>, right: &GenericArgs<'_>) -> bool {
- if !(left.parenthesized || right.parenthesized) {
+ if left.parenthesized == right.parenthesized {
over(left.args, right.args, |l, r| self.eq_generic_arg(l, r)) // FIXME(flip1995): may not work
&& over(left.bindings, right.bindings, |l, r| self.eq_type_binding(l, r))
- } else if left.parenthesized && right.parenthesized {
- over(left.inputs(), right.inputs(), |l, r| self.eq_ty(l, r))
- && both(&Some(&left.bindings[0].ty()), &Some(&right.bindings[0].ty()), |l, r| {
- self.eq_ty(l, r)
- })
} else {
false
}
@@ -628,7 +622,7 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
self.hash_expr(j);
}
},
- ExprKind::Box(e) | ExprKind::DropTemps(e) | ExprKind::Yield(e, _) => {
+ ExprKind::DropTemps(e) | ExprKind::Yield(e, _) => {
self.hash_expr(e);
},
ExprKind::Call(fun, args) => {
diff --git a/src/tools/clippy/clippy_utils/src/lib.rs b/src/tools/clippy/clippy_utils/src/lib.rs
index f02f8ecb4..6b677df46 100644
--- a/src/tools/clippy/clippy_utils/src/lib.rs
+++ b/src/tools/clippy/clippy_utils/src/lib.rs
@@ -3,7 +3,6 @@
#![feature(let_chains)]
#![feature(lint_reasons)]
#![feature(never_type)]
-#![feature(once_cell)]
#![feature(rustc_private)]
#![recursion_limit = "512"]
#![cfg_attr(feature = "deny-warnings", deny(warnings))]
@@ -33,7 +32,6 @@ extern crate rustc_lexer;
extern crate rustc_lint;
extern crate rustc_middle;
extern crate rustc_mir_dataflow;
-extern crate rustc_parse_format;
extern crate rustc_session;
extern crate rustc_span;
extern crate rustc_target;
@@ -78,7 +76,7 @@ use std::sync::OnceLock;
use std::sync::{Mutex, MutexGuard};
use if_chain::if_chain;
-use rustc_ast::ast::{self, LitKind};
+use rustc_ast::ast::{self, LitKind, RangeLimits};
use rustc_ast::Attribute;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::unhash::UnhashMap;
@@ -96,6 +94,7 @@ use rustc_hir::{
use rustc_lexer::{tokenize, TokenKind};
use rustc_lint::{LateContext, Level, Lint, LintContext};
use rustc_middle::hir::place::PlaceBase;
+use rustc_middle::mir::ConstantKind;
use rustc_middle::ty as rustc_ty;
use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow};
use rustc_middle::ty::binding::BindingMode;
@@ -104,7 +103,7 @@ use rustc_middle::ty::fast_reject::SimplifiedType::{
PtrSimplifiedType, SliceSimplifiedType, StrSimplifiedType, UintSimplifiedType,
};
use rustc_middle::ty::{
- layout::IntegerExt, BorrowKind, ClosureKind, DefIdTree, Ty, TyCtxt, TypeAndMut, TypeVisitableExt, UpvarCapture,
+ layout::IntegerExt, BorrowKind, ClosureKind, Ty, TyCtxt, TypeAndMut, TypeVisitableExt, UpvarCapture,
};
use rustc_middle::ty::{FloatTy, IntTy, UintTy};
use rustc_span::hygiene::{ExpnKind, MacroKind};
@@ -114,7 +113,8 @@ use rustc_span::symbol::{kw, Ident, Symbol};
use rustc_span::Span;
use rustc_target::abi::Integer;
-use crate::consts::{constant, Constant};
+use crate::consts::{constant, miri_to_const, Constant};
+use crate::higher::Range;
use crate::ty::{can_partially_move_ty, expr_sig, is_copy, is_recursively_primitive_type, ty_is_fn_once_param};
use crate::visitors::for_each_expr;
@@ -617,7 +617,7 @@ fn item_children_by_name(tcx: TyCtxt<'_>, def_id: DefId, name: Symbol) -> Vec<Re
/// Can return multiple resolutions when there are multiple versions of the same crate, e.g.
/// `memchr::memchr` could return the functions from both memchr 1.0 and memchr 2.0.
///
-/// Also returns multiple results when there are mulitple paths under the same name e.g. `std::vec`
+/// Also returns multiple results when there are multiple paths under the same name e.g. `std::vec`
/// would have both a [`DefKind::Mod`] and [`DefKind::Macro`].
///
/// This function is expensive and should be used sparingly.
@@ -1491,6 +1491,68 @@ pub fn is_else_clause(tcx: TyCtxt<'_>, expr: &Expr<'_>) -> bool {
}
}
+/// Checks whether the given `Expr` is a range equivalent to a `RangeFull`.
+/// For the lower bound, this means that:
+/// - either there is none
+/// - or it is the smallest value that can be represented by the range's integer type
+/// For the upper bound, this means that:
+/// - either there is none
+/// - or it is the largest value that can be represented by the range's integer type and is
+/// inclusive
+/// - or it is a call to some container's `len` method and is exclusive, and the range is passed to
+/// a method call on that same container (e.g. `v.drain(..v.len())`)
+/// If the given `Expr` is not some kind of range, the function returns `false`.
+pub fn is_range_full(cx: &LateContext<'_>, expr: &Expr<'_>, container_path: Option<&Path<'_>>) -> bool {
+ let ty = cx.typeck_results().expr_ty(expr);
+ if let Some(Range { start, end, limits }) = Range::hir(expr) {
+ let start_is_none_or_min = start.map_or(true, |start| {
+ if let rustc_ty::Adt(_, subst) = ty.kind()
+ && let bnd_ty = subst.type_at(0)
+ && let Some(min_val) = bnd_ty.numeric_min_val(cx.tcx)
+ && let const_val = cx.tcx.valtree_to_const_val((bnd_ty, min_val.to_valtree()))
+ && let min_const_kind = ConstantKind::from_value(const_val, bnd_ty)
+ && let Some(min_const) = miri_to_const(cx.tcx, min_const_kind)
+ && let Some((start_const, _)) = constant(cx, cx.typeck_results(), start)
+ {
+ start_const == min_const
+ } else {
+ false
+ }
+ });
+ let end_is_none_or_max = end.map_or(true, |end| {
+ match limits {
+ RangeLimits::Closed => {
+ if let rustc_ty::Adt(_, subst) = ty.kind()
+ && let bnd_ty = subst.type_at(0)
+ && let Some(max_val) = bnd_ty.numeric_max_val(cx.tcx)
+ && let const_val = cx.tcx.valtree_to_const_val((bnd_ty, max_val.to_valtree()))
+ && let max_const_kind = ConstantKind::from_value(const_val, bnd_ty)
+ && let Some(max_const) = miri_to_const(cx.tcx, max_const_kind)
+ && let Some((end_const, _)) = constant(cx, cx.typeck_results(), end)
+ {
+ end_const == max_const
+ } else {
+ false
+ }
+ },
+ RangeLimits::HalfOpen => {
+ if let Some(container_path) = container_path
+ && let ExprKind::MethodCall(name, self_arg, [], _) = end.kind
+ && name.ident.name == sym::len
+ && let ExprKind::Path(QPath::Resolved(None, path)) = self_arg.kind
+ {
+ container_path.res == path.res
+ } else {
+ false
+ }
+ },
+ }
+ });
+ return start_is_none_or_min && end_is_none_or_max;
+ }
+ false
+}
+
/// Checks whether the given expression is a constant integer of the given value.
/// unlike `is_integer_literal`, this version does const folding
pub fn is_integer_const(cx: &LateContext<'_>, e: &Expr<'_>, value: u128) -> bool {
@@ -1904,16 +1966,7 @@ pub fn is_async_fn(kind: FnKind<'_>) -> bool {
/// Peels away all the compiler generated code surrounding the body of an async function,
pub fn get_async_fn_body<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'_>) -> Option<&'tcx Expr<'tcx>> {
- if let ExprKind::Call(
- _,
- &[
- Expr {
- kind: ExprKind::Closure(&Closure { body, .. }),
- ..
- },
- ],
- ) = body.value.kind
- {
+ if let ExprKind::Closure(&Closure { body, .. }) = body.value.kind {
if let ExprKind::Block(
Block {
stmts: [],
@@ -2114,9 +2167,7 @@ pub fn fn_has_unsatisfiable_preds(cx: &LateContext<'_>, did: DefId) -> bool {
.filter_map(|(p, _)| if p.is_global() { Some(*p) } else { None });
traits::impossible_predicates(
cx.tcx,
- traits::elaborate_predicates(cx.tcx, predicates)
- .map(|o| o.predicate)
- .collect::<Vec<_>>(),
+ traits::elaborate(cx.tcx, predicates).collect::<Vec<_>>(),
)
}
diff --git a/src/tools/clippy/clippy_utils/src/macros.rs b/src/tools/clippy/clippy_utils/src/macros.rs
index be6133d32..62d388a5e 100644
--- a/src/tools/clippy/clippy_utils/src/macros.rs
+++ b/src/tools/clippy/clippy_utils/src/macros.rs
@@ -1,22 +1,18 @@
#![allow(clippy::similar_names)] // `expr` and `expn`
-use crate::source::snippet_opt;
use crate::visitors::{for_each_expr, Descend};
use arrayvec::ArrayVec;
-use itertools::{izip, Either, Itertools};
-use rustc_ast::ast::LitKind;
-use rustc_hir::intravisit::{walk_expr, Visitor};
-use rustc_hir::{self as hir, Expr, ExprField, ExprKind, HirId, LangItem, Node, QPath, TyKind};
-use rustc_lexer::unescape::unescape_literal;
-use rustc_lexer::{tokenize, unescape, LiteralKind, TokenKind};
+use rustc_ast::{FormatArgs, FormatArgument, FormatPlaceholder};
+use rustc_data_structures::fx::FxHashMap;
+use rustc_hir::{self as hir, Expr, ExprKind, HirId, Node, QPath};
use rustc_lint::LateContext;
-use rustc_parse_format::{self as rpf, Alignment};
use rustc_span::def_id::DefId;
use rustc_span::hygiene::{self, MacroKind, SyntaxContext};
-use rustc_span::{sym, BytePos, ExpnData, ExpnId, ExpnKind, Pos, Span, SpanData, Symbol};
-use std::iter::{once, zip};
+use rustc_span::{sym, BytePos, ExpnData, ExpnId, ExpnKind, Span, Symbol};
+use std::cell::RefCell;
use std::ops::ControlFlow;
+use std::sync::atomic::{AtomicBool, Ordering};
const FORMAT_MACRO_DIAG_ITEMS: &[Symbol] = &[
sym::assert_eq_macro,
@@ -213,6 +209,7 @@ pub fn is_assert_macro(cx: &LateContext<'_>, def_id: DefId) -> bool {
matches!(name, sym::assert_macro | sym::debug_assert_macro)
}
+#[derive(Debug)]
pub enum PanicExpn<'a> {
/// No arguments - `panic!()`
Empty,
@@ -221,15 +218,12 @@ pub enum PanicExpn<'a> {
/// A single argument that implements `Display` - `panic!("{}", object)`
Display(&'a Expr<'a>),
/// Anything else - `panic!("error {}: {}", a, b)`
- Format(FormatArgsExpn<'a>),
+ Format(&'a Expr<'a>),
}
impl<'a> PanicExpn<'a> {
- pub fn parse(cx: &LateContext<'_>, expr: &'a Expr<'a>) -> Option<Self> {
- if !macro_backtrace(expr.span).any(|macro_call| is_panic(cx, macro_call.def_id)) {
- return None;
- }
- let ExprKind::Call(callee, [arg]) = &expr.kind else { return None };
+ pub fn parse(expr: &'a Expr<'a>) -> Option<Self> {
+ let ExprKind::Call(callee, [arg, rest @ ..]) = &expr.kind else { return None };
let ExprKind::Path(QPath::Resolved(_, path)) = &callee.kind else { return None };
let result = match path.segments.last().unwrap().ident.as_str() {
"panic" if arg.span.ctxt() == expr.span.ctxt() => Self::Empty,
@@ -238,7 +232,22 @@ impl<'a> PanicExpn<'a> {
let ExprKind::AddrOf(_, _, e) = &arg.kind else { return None };
Self::Display(e)
},
- "panic_fmt" => Self::Format(FormatArgsExpn::parse(cx, arg)?),
+ "panic_fmt" => Self::Format(arg),
+ // Since Rust 1.52, `assert_{eq,ne}` macros expand to use:
+ // `core::panicking::assert_failed(.., left_val, right_val, None | Some(format_args!(..)));`
+ "assert_failed" => {
+ // It should have 4 arguments in total (we already matched with the first argument,
+ // so we're just checking for 3)
+ if rest.len() != 3 {
+ return None;
+ }
+ // `msg_arg` is either `None` (no custom message) or `Some(format_args!(..))` (custom message)
+ let msg_arg = &rest[2];
+ match msg_arg.kind {
+ ExprKind::Call(_, [fmt_arg]) => Self::Format(fmt_arg),
+ _ => Self::Empty,
+ }
+ },
_ => return None,
};
Some(result)
@@ -251,7 +260,17 @@ pub fn find_assert_args<'a>(
expr: &'a Expr<'a>,
expn: ExpnId,
) -> Option<(&'a Expr<'a>, PanicExpn<'a>)> {
- find_assert_args_inner(cx, expr, expn).map(|([e], p)| (e, p))
+ find_assert_args_inner(cx, expr, expn).map(|([e], mut p)| {
+ // `assert!(..)` expands to `core::panicking::panic("assertion failed: ...")` (which we map to
+ // `PanicExpn::Str(..)`) and `assert!(.., "..")` expands to
+ // `core::panicking::panic_fmt(format_args!(".."))` (which we map to `PanicExpn::Format(..)`).
+ // So even we got `PanicExpn::Str(..)` that means there is no custom message provided
+ if let PanicExpn::Str(_) = p {
+ p = PanicExpn::Empty;
+ }
+
+ (e, p)
+ })
}
/// Finds the arguments of an `assert_eq!` or `debug_assert_eq!` macro call within the macro
@@ -275,13 +294,12 @@ fn find_assert_args_inner<'a, const N: usize>(
Some(inner_name) => find_assert_within_debug_assert(cx, expr, expn, Symbol::intern(inner_name))?,
};
let mut args = ArrayVec::new();
- let mut panic_expn = None;
- let _: Option<!> = for_each_expr(expr, |e| {
+ let panic_expn = for_each_expr(expr, |e| {
if args.is_full() {
- if panic_expn.is_none() && e.span.ctxt() != expr.span.ctxt() {
- panic_expn = PanicExpn::parse(cx, e);
+ match PanicExpn::parse(e) {
+ Some(expn) => ControlFlow::Break(expn),
+ None => ControlFlow::Continue(Descend::Yes),
}
- ControlFlow::Continue(Descend::from(panic_expn.is_none()))
} else if is_assert_arg(cx, e, expn) {
args.push(e);
ControlFlow::Continue(Descend::No)
@@ -339,241 +357,127 @@ fn is_assert_arg(cx: &LateContext<'_>, expr: &Expr<'_>, assert_expn: ExpnId) ->
}
}
-/// The format string doesn't exist in the HIR, so we reassemble it from source code
-#[derive(Debug)]
-pub struct FormatString {
- /// Span of the whole format string literal, including `[r#]"`.
- pub span: Span,
- /// Snippet of the whole format string literal, including `[r#]"`.
- pub snippet: String,
- /// If the string is raw `r"..."`/`r#""#`, how many `#`s does it have on each side.
- pub style: Option<usize>,
- /// The unescaped value of the format string, e.g. `"val – {}"` for the literal
- /// `"val \u{2013} {}"`.
- pub unescaped: String,
- /// The format string split by format args like `{..}`.
- pub parts: Vec<Symbol>,
+thread_local! {
+ /// We preserve the [`FormatArgs`] structs from the early pass for use in the late pass to be
+ /// able to access the many features of a [`LateContext`].
+ ///
+ /// A thread local is used because [`FormatArgs`] is `!Send` and `!Sync`, we are making an
+ /// assumption that the early pass the populates the map and the later late passes will all be
+ /// running on the same thread.
+ static AST_FORMAT_ARGS: RefCell<FxHashMap<Span, FormatArgs>> = {
+ static CALLED: AtomicBool = AtomicBool::new(false);
+ debug_assert!(
+ !CALLED.swap(true, Ordering::SeqCst),
+ "incorrect assumption: `AST_FORMAT_ARGS` should only be accessed by a single thread",
+ );
+
+ RefCell::default()
+ };
}
-impl FormatString {
- fn new(cx: &LateContext<'_>, pieces: &Expr<'_>) -> Option<Self> {
- // format_args!(r"a {} b \", 1);
- //
- // expands to
- //
- // ::core::fmt::Arguments::new_v1(&["a ", " b \\"],
- // &[::core::fmt::ArgumentV1::new_display(&1)]);
- //
- // where `pieces` is the expression `&["a ", " b \\"]`. It has the span of `r"a {} b \"`
- let span = pieces.span;
- let snippet = snippet_opt(cx, span)?;
-
- let (inner, style) = match tokenize(&snippet).next()?.kind {
- TokenKind::Literal { kind, .. } => {
- let style = match kind {
- LiteralKind::Str { .. } => None,
- LiteralKind::RawStr { n_hashes: Some(n), .. } => Some(n.into()),
- _ => return None,
- };
-
- let start = style.map_or(1, |n| 2 + n);
- let end = snippet.len() - style.map_or(1, |n| 1 + n);
-
- (&snippet[start..end], style)
- },
- _ => return None,
- };
+/// Record [`rustc_ast::FormatArgs`] for use in late lint passes, this should only be called by
+/// `FormatArgsCollector`
+pub fn collect_ast_format_args(span: Span, format_args: &FormatArgs) {
+ AST_FORMAT_ARGS.with(|ast_format_args| {
+ ast_format_args.borrow_mut().insert(span, format_args.clone());
+ });
+}
- let mode = if style.is_some() {
- unescape::Mode::RawStr
+/// Calls `callback` with an AST [`FormatArgs`] node if a `format_args` expansion is found as a
+/// descendant of `expn_id`
+pub fn find_format_args(cx: &LateContext<'_>, start: &Expr<'_>, expn_id: ExpnId, callback: impl FnOnce(&FormatArgs)) {
+ let format_args_expr = for_each_expr(start, |expr| {
+ let ctxt = expr.span.ctxt();
+ if ctxt.outer_expn().is_descendant_of(expn_id) {
+ if macro_backtrace(expr.span)
+ .map(|macro_call| cx.tcx.item_name(macro_call.def_id))
+ .any(|name| matches!(name, sym::const_format_args | sym::format_args | sym::format_args_nl))
+ {
+ ControlFlow::Break(expr)
+ } else {
+ ControlFlow::Continue(Descend::Yes)
+ }
} else {
- unescape::Mode::Str
- };
-
- let mut unescaped = String::with_capacity(inner.len());
- // Sometimes the original string comes from a macro which accepts a malformed string, such as in a
- // #[display(""somestring)] attribute (accepted by the `displaythis` crate). Reconstructing the
- // string from the span will not be possible, so we will just return None here.
- let mut unparsable = false;
- unescape_literal(inner, mode, &mut |_, ch| match ch {
- Ok(ch) => unescaped.push(ch),
- Err(e) if !e.is_fatal() => (),
- Err(_) => unparsable = true,
- });
- if unparsable {
- return None;
+ ControlFlow::Continue(Descend::No)
}
+ });
- let mut parts = Vec::new();
- let _: Option<!> = for_each_expr(pieces, |expr| {
- if let ExprKind::Lit(lit) = &expr.kind
- && let LitKind::Str(symbol, _) = lit.node
- {
- parts.push(symbol);
- }
- ControlFlow::Continue(())
+ if let Some(expr) = format_args_expr {
+ AST_FORMAT_ARGS.with(|ast_format_args| {
+ ast_format_args.borrow().get(&expr.span).map(callback);
});
-
- Some(Self {
- span,
- snippet,
- style,
- unescaped,
- parts,
- })
}
}
-struct FormatArgsValues<'tcx> {
- /// Values passed after the format string and implicit captures. `[1, z + 2, x]` for
- /// `format!("{x} {} {}", 1, z + 2)`.
- value_args: Vec<&'tcx Expr<'tcx>>,
- /// Maps an `rt::v1::Argument::position` or an `rt::v1::Count::Param` to its index in
- /// `value_args`
- pos_to_value_index: Vec<usize>,
- /// Used to check if a value is declared inline & to resolve `InnerSpan`s.
- format_string_span: SpanData,
-}
-
-impl<'tcx> FormatArgsValues<'tcx> {
- fn new(args: &'tcx Expr<'tcx>, format_string_span: SpanData) -> Self {
- let mut pos_to_value_index = Vec::new();
- let mut value_args = Vec::new();
- let _: Option<!> = for_each_expr(args, |expr| {
- if expr.span.ctxt() == args.span.ctxt() {
- // ArgumentV1::new_<format_trait>(<val>)
- // ArgumentV1::from_usize(<val>)
- if let ExprKind::Call(callee, [val]) = expr.kind
- && let ExprKind::Path(QPath::TypeRelative(ty, _)) = callee.kind
- && let TyKind::Path(QPath::LangItem(LangItem::FormatArgument, _, _)) = ty.kind
- {
- let val_idx = if val.span.ctxt() == expr.span.ctxt()
- && let ExprKind::Field(_, field) = val.kind
- && let Ok(idx) = field.name.as_str().parse()
- {
- // tuple index
- idx
- } else {
- // assume the value expression is passed directly
- pos_to_value_index.len()
- };
-
- pos_to_value_index.push(val_idx);
- }
- ControlFlow::Continue(Descend::Yes)
- } else {
- // assume that any expr with a differing span is a value
- value_args.push(expr);
- ControlFlow::Continue(Descend::No)
- }
- });
-
- Self {
- value_args,
- pos_to_value_index,
- format_string_span,
+/// Attempt to find the [`rustc_hir::Expr`] that corresponds to the [`FormatArgument`]'s value, if
+/// it cannot be found it will return the [`rustc_ast::Expr`].
+pub fn find_format_arg_expr<'hir, 'ast>(
+ start: &'hir Expr<'hir>,
+ target: &'ast FormatArgument,
+) -> Result<&'hir rustc_hir::Expr<'hir>, &'ast rustc_ast::Expr> {
+ for_each_expr(start, |expr| {
+ if expr.span == target.expr.span {
+ ControlFlow::Break(expr)
+ } else {
+ ControlFlow::Continue(())
}
- }
+ })
+ .ok_or(&target.expr)
}
-/// The positions of a format argument's value, precision and width
+/// Span of the `:` and format specifiers
///
-/// A position is an index into the second argument of `Arguments::new_v1[_formatted]`
-#[derive(Debug, Default, Copy, Clone)]
-struct ParamPosition {
- /// The position stored in `rt::v1::Argument::position`.
- value: usize,
- /// The position stored in `rt::v1::FormatSpec::width` if it is a `Count::Param`.
- width: Option<usize>,
- /// The position stored in `rt::v1::FormatSpec::precision` if it is a `Count::Param`.
- precision: Option<usize>,
+/// ```ignore
+/// format!("{:.}"), format!("{foo:.}")
+/// ^^ ^^
+/// ```
+pub fn format_placeholder_format_span(placeholder: &FormatPlaceholder) -> Option<Span> {
+ let base = placeholder.span?.data();
+
+ // `base.hi` is `{...}|`, subtract 1 byte (the length of '}') so that it points before the closing
+ // brace `{...|}`
+ Some(Span::new(
+ placeholder.argument.span?.hi(),
+ base.hi - BytePos(1),
+ base.ctxt,
+ base.parent,
+ ))
}
-impl<'tcx> Visitor<'tcx> for ParamPosition {
- fn visit_expr_field(&mut self, field: &'tcx ExprField<'tcx>) {
- match field.ident.name {
- sym::position => {
- if let ExprKind::Lit(lit) = &field.expr.kind
- && let LitKind::Int(pos, _) = lit.node
- {
- self.value = pos as usize;
- }
- },
- sym::precision => {
- self.precision = parse_count(field.expr);
- },
- sym::width => {
- self.width = parse_count(field.expr);
- },
- _ => walk_expr(self, field.expr),
- }
+/// Span covering the format string and values
+///
+/// ```ignore
+/// format("{}.{}", 10, 11)
+/// // ^^^^^^^^^^^^^^^
+/// ```
+pub fn format_args_inputs_span(format_args: &FormatArgs) -> Span {
+ match format_args.arguments.explicit_args() {
+ [] => format_args.span,
+ [.., last] => format_args
+ .span
+ .to(hygiene::walk_chain(last.expr.span, format_args.span.ctxt())),
}
}
-fn parse_count(expr: &Expr<'_>) -> Option<usize> {
- // <::core::fmt::rt::v1::Count>::Param(1usize),
- if let ExprKind::Call(ctor, [val]) = expr.kind
- && let ExprKind::Path(QPath::TypeRelative(_, path)) = ctor.kind
- && path.ident.name == sym::Param
- && let ExprKind::Lit(lit) = &val.kind
- && let LitKind::Int(pos, _) = lit.node
- {
- Some(pos as usize)
- } else {
- None
- }
-}
+/// Returns the [`Span`] of the value at `index` extended to the previous comma, e.g. for the value
+/// `10`
+///
+/// ```ignore
+/// format("{}.{}", 10, 11)
+/// // ^^^^
+/// ```
+pub fn format_arg_removal_span(format_args: &FormatArgs, index: usize) -> Option<Span> {
+ let ctxt = format_args.span.ctxt();
-/// Parses the `fmt` arg of `Arguments::new_v1_formatted(pieces, args, fmt, _)`
-fn parse_rt_fmt<'tcx>(fmt_arg: &'tcx Expr<'tcx>) -> Option<impl Iterator<Item = ParamPosition> + 'tcx> {
- if let ExprKind::AddrOf(.., array) = fmt_arg.kind
- && let ExprKind::Array(specs) = array.kind
- {
- Some(specs.iter().map(|spec| {
- if let ExprKind::Call(f, args) = spec.kind
- && let ExprKind::Path(QPath::TypeRelative(ty, f)) = f.kind
- && let TyKind::Path(QPath::LangItem(LangItem::FormatPlaceholder, _, _)) = ty.kind
- && f.ident.name == sym::new
- && let [position, _fill, _align, _flags, precision, width] = args
- && let ExprKind::Lit(position) = &position.kind
- && let LitKind::Int(position, _) = position.node {
- ParamPosition {
- value: position as usize,
- width: parse_count(width),
- precision: parse_count(precision),
- }
- } else {
- ParamPosition::default()
- }
- }))
- } else {
- None
- }
-}
+ let current = hygiene::walk_chain(format_args.arguments.by_index(index)?.expr.span, ctxt);
-/// `Span::from_inner`, but for `rustc_parse_format`'s `InnerSpan`
-fn span_from_inner(base: SpanData, inner: rpf::InnerSpan) -> Span {
- Span::new(
- base.lo + BytePos::from_usize(inner.start),
- base.lo + BytePos::from_usize(inner.end),
- base.ctxt,
- base.parent,
- )
-}
+ let prev = if index == 0 {
+ format_args.span
+ } else {
+ hygiene::walk_chain(format_args.arguments.by_index(index - 1)?.expr.span, ctxt)
+ };
-/// How a format parameter is used in the format string
-#[derive(Debug, Copy, Clone, PartialEq, Eq)]
-pub enum FormatParamKind {
- /// An implicit parameter , such as `{}` or `{:?}`.
- Implicit,
- /// A parameter with an explicit number, e.g. `{1}`, `{0:?}`, or `{:.0$}`
- Numbered,
- /// A parameter with an asterisk precision. e.g. `{:.*}`.
- Starred,
- /// A named parameter with a named `value_arg`, such as the `x` in `format!("{x}", x = 1)`.
- Named(Symbol),
- /// An implicit named parameter, such as the `y` in `format!("{y}")`.
- NamedInline(Symbol),
+ Some(current.with_lo(prev.hi()))
}
/// Where a format parameter is being used in the format string
@@ -587,462 +491,6 @@ pub enum FormatParamUsage {
Precision,
}
-/// A `FormatParam` is any place in a `FormatArgument` that refers to a supplied value, e.g.
-///
-/// ```
-/// let precision = 2;
-/// format!("{:.precision$}", 0.1234);
-/// ```
-///
-/// has two `FormatParam`s, a [`FormatParamKind::Implicit`] `.kind` with a `.value` of `0.1234`
-/// and a [`FormatParamKind::NamedInline("precision")`] `.kind` with a `.value` of `2`
-#[derive(Debug, Copy, Clone)]
-pub struct FormatParam<'tcx> {
- /// The expression this parameter refers to.
- pub value: &'tcx Expr<'tcx>,
- /// How this parameter refers to its `value`.
- pub kind: FormatParamKind,
- /// Where this format param is being used - argument/width/precision
- pub usage: FormatParamUsage,
- /// Span of the parameter, may be zero width. Includes the whitespace of implicit parameters.
- ///
- /// ```text
- /// format!("{}, { }, {0}, {name}", ...);
- /// ^ ~~ ~ ~~~~
- /// ```
- pub span: Span,
-}
-
-impl<'tcx> FormatParam<'tcx> {
- fn new(
- mut kind: FormatParamKind,
- usage: FormatParamUsage,
- position: usize,
- inner: rpf::InnerSpan,
- values: &FormatArgsValues<'tcx>,
- ) -> Option<Self> {
- let value_index = *values.pos_to_value_index.get(position)?;
- let value = *values.value_args.get(value_index)?;
- let span = span_from_inner(values.format_string_span, inner);
-
- // if a param is declared inline, e.g. `format!("{x}")`, the generated expr's span points
- // into the format string
- if let FormatParamKind::Named(name) = kind && values.format_string_span.contains(value.span.data()) {
- kind = FormatParamKind::NamedInline(name);
- }
-
- Some(Self {
- value,
- kind,
- usage,
- span,
- })
- }
-}
-
-/// Used by [width](https://doc.rust-lang.org/std/fmt/#width) and
-/// [precision](https://doc.rust-lang.org/std/fmt/#precision) specifiers.
-#[derive(Debug, Copy, Clone)]
-pub enum Count<'tcx> {
- /// Specified with a literal number, stores the value.
- Is(usize, Span),
- /// Specified using `$` and `*` syntaxes. The `*` format is still considered to be
- /// `FormatParamKind::Numbered`.
- Param(FormatParam<'tcx>),
- /// Not specified.
- Implied(Option<Span>),
-}
-
-impl<'tcx> Count<'tcx> {
- fn new(
- usage: FormatParamUsage,
- count: rpf::Count<'_>,
- position: Option<usize>,
- inner: Option<rpf::InnerSpan>,
- values: &FormatArgsValues<'tcx>,
- ) -> Option<Self> {
- let span = inner.map(|inner| span_from_inner(values.format_string_span, inner));
-
- Some(match count {
- rpf::Count::CountIs(val) => Self::Is(val, span?),
- rpf::Count::CountIsName(name, _) => Self::Param(FormatParam::new(
- FormatParamKind::Named(Symbol::intern(name)),
- usage,
- position?,
- inner?,
- values,
- )?),
- rpf::Count::CountIsParam(_) => Self::Param(FormatParam::new(
- FormatParamKind::Numbered,
- usage,
- position?,
- inner?,
- values,
- )?),
- rpf::Count::CountIsStar(_) => Self::Param(FormatParam::new(
- FormatParamKind::Starred,
- usage,
- position?,
- inner?,
- values,
- )?),
- rpf::Count::CountImplied => Self::Implied(span),
- })
- }
-
- pub fn is_implied(self) -> bool {
- matches!(self, Count::Implied(_))
- }
-
- pub fn param(self) -> Option<FormatParam<'tcx>> {
- match self {
- Count::Param(param) => Some(param),
- _ => None,
- }
- }
-
- pub fn span(self) -> Option<Span> {
- match self {
- Count::Is(_, span) => Some(span),
- Count::Param(param) => Some(param.span),
- Count::Implied(span) => span,
- }
- }
-}
-
-/// Specification for the formatting of an argument in the format string. See
-/// <https://doc.rust-lang.org/std/fmt/index.html#formatting-parameters> for the precise meanings.
-#[derive(Debug)]
-pub struct FormatSpec<'tcx> {
- /// Optionally specified character to fill alignment with.
- pub fill: Option<char>,
- /// Optionally specified alignment.
- pub align: Alignment,
- /// Whether all flag options are set to default (no flags specified).
- pub no_flags: bool,
- /// Represents either the maximum width or the integer precision.
- pub precision: Count<'tcx>,
- /// The minimum width, will be padded according to `width`/`align`
- pub width: Count<'tcx>,
- /// The formatting trait used by the argument, e.g. `sym::Display` for `{}`, `sym::Debug` for
- /// `{:?}`.
- pub r#trait: Symbol,
- pub trait_span: Option<Span>,
-}
-
-impl<'tcx> FormatSpec<'tcx> {
- fn new(spec: rpf::FormatSpec<'_>, positions: ParamPosition, values: &FormatArgsValues<'tcx>) -> Option<Self> {
- Some(Self {
- fill: spec.fill,
- align: spec.align,
- no_flags: spec.sign.is_none() && !spec.alternate && !spec.zero_pad && spec.debug_hex.is_none(),
- precision: Count::new(
- FormatParamUsage::Precision,
- spec.precision,
- positions.precision,
- spec.precision_span,
- values,
- )?,
- width: Count::new(
- FormatParamUsage::Width,
- spec.width,
- positions.width,
- spec.width_span,
- values,
- )?,
- r#trait: match spec.ty {
- "" => sym::Display,
- "?" => sym::Debug,
- "o" => sym!(Octal),
- "x" => sym!(LowerHex),
- "X" => sym!(UpperHex),
- "p" => sym::Pointer,
- "b" => sym!(Binary),
- "e" => sym!(LowerExp),
- "E" => sym!(UpperExp),
- _ => return None,
- },
- trait_span: spec
- .ty_span
- .map(|span| span_from_inner(values.format_string_span, span)),
- })
- }
-
- /// Returns true if this format spec is unchanged from the default. e.g. returns true for `{}`,
- /// `{foo}` and `{2}`, but false for `{:?}`, `{foo:5}` and `{3:.5}`
- pub fn is_default(&self) -> bool {
- self.r#trait == sym::Display && self.is_default_for_trait()
- }
-
- /// Has no other formatting specifiers than setting the format trait. returns true for `{}`,
- /// `{foo}`, `{:?}`, but false for `{foo:5}`, `{3:.5?}`
- pub fn is_default_for_trait(&self) -> bool {
- self.width.is_implied() && self.precision.is_implied() && self.align == Alignment::AlignUnknown && self.no_flags
- }
-}
-
-/// A format argument, such as `{}`, `{foo:?}`.
-#[derive(Debug)]
-pub struct FormatArg<'tcx> {
- /// The parameter the argument refers to.
- pub param: FormatParam<'tcx>,
- /// How to format `param`.
- pub format: FormatSpec<'tcx>,
- /// span of the whole argument, `{..}`.
- pub span: Span,
-}
-
-impl<'tcx> FormatArg<'tcx> {
- /// Span of the `:` and format specifiers
- ///
- /// ```ignore
- /// format!("{:.}"), format!("{foo:.}")
- /// ^^ ^^
- /// ```
- pub fn format_span(&self) -> Span {
- let base = self.span.data();
-
- // `base.hi` is `{...}|`, subtract 1 byte (the length of '}') so that it points before the closing
- // brace `{...|}`
- Span::new(self.param.span.hi(), base.hi - BytePos(1), base.ctxt, base.parent)
- }
-}
-
-/// A parsed `format_args!` expansion.
-#[derive(Debug)]
-pub struct FormatArgsExpn<'tcx> {
- /// The format string literal.
- pub format_string: FormatString,
- /// The format arguments, such as `{:?}`.
- pub args: Vec<FormatArg<'tcx>>,
- /// Has an added newline due to `println!()`/`writeln!()`/etc. The last format string part will
- /// include this added newline.
- pub newline: bool,
- /// Spans of the commas between the format string and explicit values, excluding any trailing
- /// comma
- ///
- /// ```ignore
- /// format!("..", 1, 2, 3,)
- /// // ^ ^ ^
- /// ```
- comma_spans: Vec<Span>,
- /// Explicit values passed after the format string, ignoring implicit captures. `[1, z + 2]` for
- /// `format!("{x} {} {y}", 1, z + 2)`.
- explicit_values: Vec<&'tcx Expr<'tcx>>,
-}
-
-impl<'tcx> FormatArgsExpn<'tcx> {
- /// Gets the spans of the commas inbetween the format string and explicit args, not including
- /// any trailing comma
- ///
- /// ```ignore
- /// format!("{} {}", a, b)
- /// // ^ ^
- /// ```
- ///
- /// Ensures that the format string and values aren't coming from a proc macro that sets the
- /// output span to that of its input
- fn comma_spans(cx: &LateContext<'_>, explicit_values: &[&Expr<'_>], fmt_span: Span) -> Option<Vec<Span>> {
- // `format!("{} {} {c}", "one", "two", c = "three")`
- // ^^^^^ ^^^^^ ^^^^^^^
- let value_spans = explicit_values
- .iter()
- .map(|val| hygiene::walk_chain(val.span, fmt_span.ctxt()));
-
- // `format!("{} {} {c}", "one", "two", c = "three")`
- // ^^ ^^ ^^^^^^
- let between_spans = once(fmt_span)
- .chain(value_spans)
- .tuple_windows()
- .map(|(start, end)| start.between(end));
-
- let mut comma_spans = Vec::new();
- for between_span in between_spans {
- let mut offset = 0;
- let mut seen_comma = false;
-
- for token in tokenize(&snippet_opt(cx, between_span)?) {
- match token.kind {
- TokenKind::LineComment { .. } | TokenKind::BlockComment { .. } | TokenKind::Whitespace => {},
- TokenKind::Comma if !seen_comma => {
- seen_comma = true;
-
- let base = between_span.data();
- comma_spans.push(Span::new(
- base.lo + BytePos(offset),
- base.lo + BytePos(offset + 1),
- base.ctxt,
- base.parent,
- ));
- },
- // named arguments, `start_val, name = end_val`
- // ^^^^^^^^^ between_span
- TokenKind::Ident | TokenKind::Eq if seen_comma => {},
- // An unexpected token usually indicates the format string or a value came from a proc macro output
- // that sets the span of its output to an input, e.g. `println!(some_proc_macro!("input"), ..)` that
- // emits a string literal with the span set to that of `"input"`
- _ => return None,
- }
- offset += token.len;
- }
-
- if !seen_comma {
- return None;
- }
- }
-
- Some(comma_spans)
- }
-
- pub fn parse(cx: &LateContext<'_>, expr: &'tcx Expr<'tcx>) -> Option<Self> {
- let macro_name = macro_backtrace(expr.span)
- .map(|macro_call| cx.tcx.item_name(macro_call.def_id))
- .find(|&name| matches!(name, sym::const_format_args | sym::format_args | sym::format_args_nl))?;
- let newline = macro_name == sym::format_args_nl;
-
- // ::core::fmt::Arguments::new_v1(pieces, args)
- // ::core::fmt::Arguments::new_v1_formatted(pieces, args, fmt, _unsafe_arg)
- if let ExprKind::Call(callee, [pieces, args, rest @ ..]) = expr.kind
- && let ExprKind::Path(QPath::TypeRelative(ty, seg)) = callee.kind
- && let TyKind::Path(QPath::LangItem(LangItem::FormatArguments, _, _)) = ty.kind
- && matches!(seg.ident.as_str(), "new_v1" | "new_v1_formatted")
- {
- let format_string = FormatString::new(cx, pieces)?;
-
- let mut parser = rpf::Parser::new(
- &format_string.unescaped,
- format_string.style,
- Some(format_string.snippet.clone()),
- // `format_string.unescaped` does not contain the appended newline
- false,
- rpf::ParseMode::Format,
- );
-
- let parsed_args = parser
- .by_ref()
- .filter_map(|piece| match piece {
- rpf::Piece::NextArgument(a) => Some(a),
- rpf::Piece::String(_) => None,
- })
- .collect_vec();
- if !parser.errors.is_empty() {
- return None;
- }
-
- let positions = if let Some(fmt_arg) = rest.first() {
- // If the argument contains format specs, `new_v1_formatted(_, _, fmt, _)`, parse
- // them.
-
- Either::Left(parse_rt_fmt(fmt_arg)?)
- } else {
- // If no format specs are given, the positions are in the given order and there are
- // no `precision`/`width`s to consider.
-
- Either::Right((0..).map(|n| ParamPosition {
- value: n,
- width: None,
- precision: None,
- }))
- };
-
- let values = FormatArgsValues::new(args, format_string.span.data());
-
- let args = izip!(positions, parsed_args, parser.arg_places)
- .map(|(position, parsed_arg, arg_span)| {
- Some(FormatArg {
- param: FormatParam::new(
- match parsed_arg.position {
- rpf::Position::ArgumentImplicitlyIs(_) => FormatParamKind::Implicit,
- rpf::Position::ArgumentIs(_) => FormatParamKind::Numbered,
- // NamedInline is handled by `FormatParam::new()`
- rpf::Position::ArgumentNamed(name) => FormatParamKind::Named(Symbol::intern(name)),
- },
- FormatParamUsage::Argument,
- position.value,
- parsed_arg.position_span,
- &values,
- )?,
- format: FormatSpec::new(parsed_arg.format, position, &values)?,
- span: span_from_inner(values.format_string_span, arg_span),
- })
- })
- .collect::<Option<Vec<_>>>()?;
-
- let mut explicit_values = values.value_args;
- // remove values generated for implicitly captured vars
- let len = explicit_values
- .iter()
- .take_while(|val| !format_string.span.contains(val.span))
- .count();
- explicit_values.truncate(len);
-
- let comma_spans = Self::comma_spans(cx, &explicit_values, format_string.span)?;
-
- Some(Self {
- format_string,
- args,
- newline,
- comma_spans,
- explicit_values,
- })
- } else {
- None
- }
- }
-
- pub fn find_nested(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>, expn_id: ExpnId) -> Option<Self> {
- for_each_expr(expr, |e| {
- let e_ctxt = e.span.ctxt();
- if e_ctxt == expr.span.ctxt() {
- ControlFlow::Continue(Descend::Yes)
- } else if e_ctxt.outer_expn().is_descendant_of(expn_id) {
- if let Some(args) = FormatArgsExpn::parse(cx, e) {
- ControlFlow::Break(args)
- } else {
- ControlFlow::Continue(Descend::No)
- }
- } else {
- ControlFlow::Continue(Descend::No)
- }
- })
- }
-
- /// Source callsite span of all inputs
- pub fn inputs_span(&self) -> Span {
- match *self.explicit_values {
- [] => self.format_string.span,
- [.., last] => self
- .format_string
- .span
- .to(hygiene::walk_chain(last.span, self.format_string.span.ctxt())),
- }
- }
-
- /// Get the span of a value expanded to the previous comma, e.g. for the value `10`
- ///
- /// ```ignore
- /// format("{}.{}", 10, 11)
- /// // ^^^^
- /// ```
- pub fn value_with_prev_comma_span(&self, value_id: HirId) -> Option<Span> {
- for (comma_span, value) in zip(&self.comma_spans, &self.explicit_values) {
- if value.hir_id == value_id {
- return Some(comma_span.to(hygiene::walk_chain(value.span, comma_span.ctxt())));
- }
- }
-
- None
- }
-
- /// Iterator of all format params, both values and those referenced by `width`/`precision`s.
- pub fn params(&'tcx self) -> impl Iterator<Item = FormatParam<'tcx>> {
- self.args
- .iter()
- .flat_map(|arg| [Some(arg.param), arg.format.precision.param(), arg.format.width.param()])
- .flatten()
- }
-}
-
/// A node with a `HirId` and a `Span`
pub trait HirNode {
fn hir_id(&self) -> HirId;
diff --git a/src/tools/clippy/clippy_utils/src/msrvs.rs b/src/tools/clippy/clippy_utils/src/msrvs.rs
index dbf9f3b62..e05de2dc9 100644
--- a/src/tools/clippy/clippy_utils/src/msrvs.rs
+++ b/src/tools/clippy/clippy_utils/src/msrvs.rs
@@ -19,6 +19,7 @@ macro_rules! msrv_aliases {
// names may refer to stabilized feature flags or library items
msrv_aliases! {
+ 1,68,0 { PATH_MAIN_SEPARATOR_STR }
1,65,0 { LET_ELSE }
1,62,0 { BOOL_THEN_SOME, DEFAULT_ENUM_ATTRIBUTE }
1,58,0 { FORMAT_ARGS_CAPTURE, PATTERN_TRAIT_CHAR_ARRAY }
diff --git a/src/tools/clippy/clippy_utils/src/paths.rs b/src/tools/clippy/clippy_utils/src/paths.rs
index 4aae0f728..9be2d0eae 100644
--- a/src/tools/clippy/clippy_utils/src/paths.rs
+++ b/src/tools/clippy/clippy_utils/src/paths.rs
@@ -23,6 +23,7 @@ pub const CLONE_TRAIT_METHOD: [&str; 4] = ["core", "clone", "Clone", "clone"];
pub const CORE_ITER_CLONED: [&str; 6] = ["core", "iter", "traits", "iterator", "Iterator", "cloned"];
pub const CORE_ITER_COPIED: [&str; 6] = ["core", "iter", "traits", "iterator", "Iterator", "copied"];
pub const CORE_ITER_FILTER: [&str; 6] = ["core", "iter", "traits", "iterator", "Iterator", "filter"];
+pub const CORE_RESULT_OK_METHOD: [&str; 4] = ["core", "result", "Result", "ok"];
pub const CSTRING_AS_C_STR: [&str; 5] = ["alloc", "ffi", "c_str", "CString", "as_c_str"];
pub const DEFAULT_TRAIT_METHOD: [&str; 4] = ["core", "default", "Default", "default"];
pub const DEREF_MUT_TRAIT_METHOD: [&str; 5] = ["core", "ops", "deref", "DerefMut", "deref_mut"];
@@ -67,6 +68,7 @@ pub const PARKING_LOT_MUTEX_GUARD: [&str; 3] = ["lock_api", "mutex", "MutexGuard
pub const PARKING_LOT_RWLOCK_READ_GUARD: [&str; 3] = ["lock_api", "rwlock", "RwLockReadGuard"];
pub const PARKING_LOT_RWLOCK_WRITE_GUARD: [&str; 3] = ["lock_api", "rwlock", "RwLockWriteGuard"];
pub const PATH_BUF_AS_PATH: [&str; 4] = ["std", "path", "PathBuf", "as_path"];
+pub const PATH_MAIN_SEPARATOR: [&str; 3] = ["std", "path", "MAIN_SEPARATOR"];
pub const PATH_TO_PATH_BUF: [&str; 4] = ["std", "path", "Path", "to_path_buf"];
pub const PEEKABLE: [&str; 5] = ["core", "iter", "adapters", "peekable", "Peekable"];
pub const PERMISSIONS: [&str; 3] = ["std", "fs", "Permissions"];
@@ -112,6 +114,7 @@ pub const STDERR: [&str; 4] = ["std", "io", "stdio", "stderr"];
pub const STDOUT: [&str; 4] = ["std", "io", "stdio", "stdout"];
pub const CONVERT_IDENTITY: [&str; 3] = ["core", "convert", "identity"];
pub const STD_FS_CREATE_DIR: [&str; 3] = ["std", "fs", "create_dir"];
+pub const STD_IO_LINES: [&str; 3] = ["std", "io", "Lines"];
pub const STD_IO_SEEK: [&str; 3] = ["std", "io", "Seek"];
pub const STD_IO_SEEK_FROM_CURRENT: [&str; 4] = ["std", "io", "SeekFrom", "Current"];
pub const STD_IO_SEEKFROM_START: [&str; 4] = ["std", "io", "SeekFrom", "Start"];
diff --git a/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs b/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs
index 1a35fe050..354b6d71a 100644
--- a/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs
+++ b/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs
@@ -37,7 +37,7 @@ pub fn is_min_const_fn<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, msrv: &Msrv)
| ty::PredicateKind::ConstEvaluatable(..)
| ty::PredicateKind::ConstEquate(..)
| ty::PredicateKind::TypeWellFormedFromEnv(..) => continue,
- ty::PredicateKind::AliasEq(..) => panic!("alias eq predicate on function: {predicate:#?}"),
+ ty::PredicateKind::AliasRelate(..) => panic!("alias relate predicate on function: {predicate:#?}"),
ty::PredicateKind::ObjectSafe(_) => panic!("object safe predicate on function: {predicate:#?}"),
ty::PredicateKind::ClosureKind(..) => panic!("closure kind predicate on function: {predicate:#?}"),
ty::PredicateKind::Subtype(_) => panic!("subtype predicate on function: {predicate:#?}"),
@@ -176,6 +176,10 @@ fn check_rvalue<'tcx>(
// FIXME(dyn-star)
unimplemented!()
},
+ Rvalue::Cast(CastKind::Transmute, _, _) => Err((
+ span,
+ "transmute can attempt to turn pointers into integers, so is unstable in const fn".into(),
+ )),
// binops are fine on integers
Rvalue::BinaryOp(_, box (lhs, rhs)) | Rvalue::CheckedBinaryOp(_, box (lhs, rhs)) => {
check_operand(tcx, lhs, span, body)?;
@@ -241,6 +245,7 @@ fn check_statement<'tcx>(
| StatementKind::StorageDead(_)
| StatementKind::Retag { .. }
| StatementKind::AscribeUserType(..)
+ | StatementKind::PlaceMention(..)
| StatementKind::Coverage(..)
| StatementKind::ConstEvalCounter
| StatementKind::Nop => Ok(()),
@@ -296,17 +301,13 @@ fn check_terminator<'tcx>(
| TerminatorKind::Goto { .. }
| TerminatorKind::Return
| TerminatorKind::Resume
+ | TerminatorKind::Terminate
| TerminatorKind::Unreachable => Ok(()),
TerminatorKind::Drop { place, .. } => check_place(tcx, *place, span, body),
- TerminatorKind::DropAndReplace { place, value, .. } => {
- check_place(tcx, *place, span, body)?;
- check_operand(tcx, value, span, body)
- },
TerminatorKind::SwitchInt { discr, targets: _ } => check_operand(tcx, discr, span, body),
- TerminatorKind::Abort => Err((span, "abort is not stable in const fn".into())),
TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => {
Err((span, "const fn generators are unstable".into()))
},
@@ -317,7 +318,7 @@ fn check_terminator<'tcx>(
from_hir_call: _,
destination: _,
target: _,
- cleanup: _,
+ unwind: _,
fn_span: _,
} => {
let fn_ty = func.ty(body, tcx);
@@ -360,7 +361,7 @@ fn check_terminator<'tcx>(
expected: _,
msg: _,
target: _,
- cleanup: _,
+ unwind: _,
} => check_operand(tcx, cond, span, body),
TerminatorKind::InlineAsm { .. } => Err((span, "cannot use inline assembly in const fn".into())),
diff --git a/src/tools/clippy/clippy_utils/src/source.rs b/src/tools/clippy/clippy_utils/src/source.rs
index cd5dcfdac..62fa37660 100644
--- a/src/tools/clippy/clippy_utils/src/source.rs
+++ b/src/tools/clippy/clippy_utils/src/source.rs
@@ -12,24 +12,21 @@ use rustc_span::{BytePos, Pos, Span, SpanData, SyntaxContext, DUMMY_SP};
use std::borrow::Cow;
/// Like `snippet_block`, but add braces if the expr is not an `ExprKind::Block`.
-/// Also takes an `Option<String>` which can be put inside the braces.
-pub fn expr_block<'a, T: LintContext>(
+pub fn expr_block<T: LintContext>(
cx: &T,
expr: &Expr<'_>,
- option: Option<String>,
- default: &'a str,
+ outer: SyntaxContext,
+ default: &str,
indent_relative_to: Option<Span>,
-) -> Cow<'a, str> {
- let code = snippet_block(cx, expr.span, default, indent_relative_to);
- let string = option.unwrap_or_default();
- if expr.span.from_expansion() {
- Cow::Owned(format!("{{ {} }}", snippet_with_macro_callsite(cx, expr.span, default)))
+ app: &mut Applicability,
+) -> String {
+ let (code, from_macro) = snippet_block_with_context(cx, expr.span, outer, default, indent_relative_to, app);
+ if from_macro {
+ format!("{{ {code} }}")
} else if let ExprKind::Block(_, _) = expr.kind {
- Cow::Owned(format!("{code}{string}"))
- } else if string.is_empty() {
- Cow::Owned(format!("{{ {code} }}"))
+ format!("{code}")
} else {
- Cow::Owned(format!("{{\n{code};\n{string}\n}}"))
+ format!("{{ {code} }}")
}
}
@@ -229,12 +226,6 @@ fn snippet_with_applicability_sess<'a>(
)
}
-/// Same as `snippet`, but should only be used when it's clear that the input span is
-/// not a macro argument.
-pub fn snippet_with_macro_callsite<'a, T: LintContext>(cx: &T, span: Span, default: &'a str) -> Cow<'a, str> {
- snippet(cx, span.source_callsite(), default)
-}
-
/// Converts a span to a code snippet. Returns `None` if not available.
pub fn snippet_opt(cx: &impl LintContext, span: Span) -> Option<String> {
snippet_opt_sess(cx.sess(), span)
@@ -303,6 +294,19 @@ pub fn snippet_block_with_applicability<'a>(
reindent_multiline(snip, true, indent)
}
+pub fn snippet_block_with_context<'a>(
+ cx: &impl LintContext,
+ span: Span,
+ outer: SyntaxContext,
+ default: &'a str,
+ indent_relative_to: Option<Span>,
+ app: &mut Applicability,
+) -> (Cow<'a, str>, bool) {
+ let (snip, from_macro) = snippet_with_context(cx, span, outer, default, app);
+ let indent = indent_relative_to.and_then(|s| indent_of(cx, s));
+ (reindent_multiline(snip, true, indent), from_macro)
+}
+
/// Same as `snippet_with_applicability`, but first walks the span up to the given context. This
/// will result in the macro call, rather then the expansion, if the span is from a child context.
/// If the span is not from a child context, it will be used directly instead.
diff --git a/src/tools/clippy/clippy_utils/src/sugg.rs b/src/tools/clippy/clippy_utils/src/sugg.rs
index 07feadca2..a5a4a921d 100644
--- a/src/tools/clippy/clippy_utils/src/sugg.rs
+++ b/src/tools/clippy/clippy_utils/src/sugg.rs
@@ -1,9 +1,7 @@
//! Contains utility functions to generate suggestions.
#![deny(clippy::missing_docs_in_private_items)]
-use crate::source::{
- snippet, snippet_opt, snippet_with_applicability, snippet_with_context, snippet_with_macro_callsite,
-};
+use crate::source::{snippet, snippet_opt, snippet_with_applicability, snippet_with_context};
use crate::ty::expr_sig;
use crate::{get_parent_expr_for_hir, higher};
use rustc_ast::util::parser::AssocOp;
@@ -89,12 +87,6 @@ impl<'a> Sugg<'a> {
})
}
- /// Same as `hir`, but will use the pre expansion span if the `expr` was in a macro.
- pub fn hir_with_macro_callsite(cx: &LateContext<'_>, expr: &hir::Expr<'_>, default: &'a str) -> Self {
- let get_snippet = |span| snippet_with_macro_callsite(cx, span, default);
- Self::hir_from_snippet(expr, get_snippet)
- }
-
/// Same as `hir`, but first walks the span up to the given context. This will result in the
/// macro call, rather then the expansion, if the span is from a child context. If the span is
/// not from a child context, it will be used directly instead.
@@ -133,7 +125,6 @@ impl<'a> Sugg<'a> {
match expr.kind {
hir::ExprKind::AddrOf(..)
- | hir::ExprKind::Box(..)
| hir::ExprKind::If(..)
| hir::ExprKind::Let(..)
| hir::ExprKind::Closure { .. }
@@ -188,7 +179,6 @@ impl<'a> Sugg<'a> {
match expr.kind {
_ if expr.span.ctxt() != ctxt => Sugg::NonParen(snippet_with_context(cx, expr.span, ctxt, default, app).0),
ast::ExprKind::AddrOf(..)
- | ast::ExprKind::Box(..)
| ast::ExprKind::Closure { .. }
| ast::ExprKind::If(..)
| ast::ExprKind::Let(..)
diff --git a/src/tools/clippy/clippy_utils/src/ty.rs b/src/tools/clippy/clippy_utils/src/ty.rs
index 25654e695..9449f0b55 100644
--- a/src/tools/clippy/clippy_utils/src/ty.rs
+++ b/src/tools/clippy/clippy_utils/src/ty.rs
@@ -16,9 +16,9 @@ use rustc_infer::infer::{
use rustc_lint::LateContext;
use rustc_middle::mir::interpret::{ConstValue, Scalar};
use rustc_middle::ty::{
- self, AdtDef, AliasTy, AssocKind, Binder, BoundRegion, DefIdTree, FnSig, IntTy, List, ParamEnv, Predicate,
- PredicateKind, Region, RegionKind, SubstsRef, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitableExt,
- TypeVisitor, UintTy, VariantDef, VariantDiscr,
+ self, layout::ValidityRequirement, AdtDef, AliasTy, AssocKind, Binder, BoundRegion, FnSig, IntTy, List, ParamEnv,
+ Predicate, PredicateKind, Region, RegionKind, SubstsRef, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable,
+ TypeVisitableExt, TypeVisitor, UintTy, VariantDef, VariantDiscr,
};
use rustc_middle::ty::{GenericArg, GenericArgKind};
use rustc_span::symbol::Ident;
@@ -538,11 +538,26 @@ pub fn same_type_and_consts<'tcx>(a: Ty<'tcx>, b: Ty<'tcx>) -> bool {
}
/// Checks if a given type looks safe to be uninitialized.
-pub fn is_uninit_value_valid_for_ty(cx: &LateContext<'_>, ty: Ty<'_>) -> bool {
+pub fn is_uninit_value_valid_for_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
+ cx.tcx
+ .check_validity_requirement((ValidityRequirement::Uninit, cx.param_env.and(ty)))
+ .unwrap_or_else(|_| is_uninit_value_valid_for_ty_fallback(cx, ty))
+}
+
+/// A fallback for polymorphic types, which are not supported by `check_validity_requirement`.
+fn is_uninit_value_valid_for_ty_fallback<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
match *ty.kind() {
+ // The array length may be polymorphic, let's try the inner type.
ty::Array(component, _) => is_uninit_value_valid_for_ty(cx, component),
+ // Peek through tuples and try their fallbacks.
ty::Tuple(types) => types.iter().all(|ty| is_uninit_value_valid_for_ty(cx, ty)),
- ty::Adt(adt, _) => cx.tcx.lang_items().maybe_uninit() == Some(adt.did()),
+ // Unions are always fine right now.
+ // This includes MaybeUninit, the main way people use uninitialized memory.
+ // For ADTs, we could look at all fields just like for tuples, but that's potentially
+ // exponential, so let's avoid doing that for now. Code doing that is sketchy enough to
+ // just use an `#[allow()]`.
+ ty::Adt(adt, _) => adt.is_union(),
+ // For the rest, conservatively assume that they cannot be uninit.
_ => false,
}
}
@@ -1121,3 +1136,47 @@ pub fn make_normalized_projection<'tcx>(
}
helper(tcx, param_env, make_projection(tcx, container_id, assoc_ty, substs)?)
}
+
+/// Check if given type has inner mutability such as [`std::cell::Cell`] or [`std::cell::RefCell`]
+/// etc.
+pub fn is_interior_mut_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
+ match *ty.kind() {
+ ty::Ref(_, inner_ty, mutbl) => mutbl == Mutability::Mut || is_interior_mut_ty(cx, inner_ty),
+ ty::Slice(inner_ty) => is_interior_mut_ty(cx, inner_ty),
+ ty::Array(inner_ty, size) => {
+ size.try_eval_target_usize(cx.tcx, cx.param_env)
+ .map_or(true, |u| u != 0)
+ && is_interior_mut_ty(cx, inner_ty)
+ },
+ ty::Tuple(fields) => fields.iter().any(|ty| is_interior_mut_ty(cx, ty)),
+ ty::Adt(def, substs) => {
+ // Special case for collections in `std` who's impl of `Hash` or `Ord` delegates to
+ // that of their type parameters. Note: we don't include `HashSet` and `HashMap`
+ // because they have no impl for `Hash` or `Ord`.
+ let def_id = def.did();
+ let is_std_collection = [
+ sym::Option,
+ sym::Result,
+ sym::LinkedList,
+ sym::Vec,
+ sym::VecDeque,
+ sym::BTreeMap,
+ sym::BTreeSet,
+ sym::Rc,
+ sym::Arc,
+ ]
+ .iter()
+ .any(|diag_item| cx.tcx.is_diagnostic_item(*diag_item, def_id));
+ let is_box = Some(def_id) == cx.tcx.lang_items().owned_box();
+ if is_std_collection || is_box {
+ // The type is mutable if any of its type parameters are
+ substs.types().any(|ty| is_interior_mut_ty(cx, ty))
+ } else {
+ !ty.has_escaping_bound_vars()
+ && cx.tcx.layout_of(cx.param_env.and(ty)).is_ok()
+ && !ty.is_freeze(cx.tcx, cx.param_env)
+ }
+ },
+ _ => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_utils/src/visitors.rs b/src/tools/clippy/clippy_utils/src/visitors.rs
index d27a20bd4..1dc19bac9 100644
--- a/src/tools/clippy/clippy_utils/src/visitors.rs
+++ b/src/tools/clippy/clippy_utils/src/visitors.rs
@@ -599,10 +599,7 @@ pub fn for_each_unconsumed_temporary<'tcx, B>(
| ExprKind::Let(&Let { init: e, .. }) => {
helper(typeck, false, e, f)?;
},
- ExprKind::Block(&Block { expr: Some(e), .. }, _)
- | ExprKind::Box(e)
- | ExprKind::Cast(e, _)
- | ExprKind::Unary(_, e) => {
+ ExprKind::Block(&Block { expr: Some(e), .. }, _) | ExprKind::Cast(e, _) | ExprKind::Unary(_, e) => {
helper(typeck, true, e, f)?;
},
ExprKind::Call(callee, args) => {
diff --git a/src/tools/clippy/declare_clippy_lint/Cargo.toml b/src/tools/clippy/declare_clippy_lint/Cargo.toml
index 80eee3681..5c9f76dbb 100644
--- a/src/tools/clippy/declare_clippy_lint/Cargo.toml
+++ b/src/tools/clippy/declare_clippy_lint/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "declare_clippy_lint"
-version = "0.1.69"
+version = "0.1.70"
edition = "2021"
publish = false
diff --git a/src/tools/clippy/etc/relicense/RELICENSE_DOCUMENTATION.md b/src/tools/clippy/etc/relicense/RELICENSE_DOCUMENTATION.md
index fcd7abbf3..ffb99cde4 100644
--- a/src/tools/clippy/etc/relicense/RELICENSE_DOCUMENTATION.md
+++ b/src/tools/clippy/etc/relicense/RELICENSE_DOCUMENTATION.md
@@ -35,7 +35,7 @@ relicensing are archived on GitHub. We also have saved Wayback Machine copies of
The usernames of commenters on these issues can be found in relicense_comments.txt
-There are a couple people in relicense_comments.txt who are not found in contributors.txt:
+There are a few people in relicense_comments.txt who are not found in contributors.txt:
- @EpocSquadron has [made minor text contributions to the
README](https://github.com/rust-lang/rust-clippy/commits?author=EpocSquadron) which have since been overwritten, and
@@ -55,7 +55,7 @@ There are a couple people in relicense_comments.txt who are not found in contrib
we rewrote (see below)
-Two of these contributors had nonminor contributions (#2184, #427) requiring a rewrite, carried out in #3251
+Two of these contributors had non-minor contributions (#2184, #427) requiring a rewrite, carried out in #3251
([archive](http://web.archive.org/web/20181005192411/https://github.com/rust-lang-nursery/rust-clippy/pull/3251),
[screenshot](https://user-images.githubusercontent.com/1617736/46573515-5cb69580-c94b-11e8-86e5-b456452121b2.png))
diff --git a/src/tools/clippy/lintcheck/Cargo.toml b/src/tools/clippy/lintcheck/Cargo.toml
index 653121af5..27d32f390 100644
--- a/src/tools/clippy/lintcheck/Cargo.toml
+++ b/src/tools/clippy/lintcheck/Cargo.toml
@@ -8,12 +8,16 @@ repository = "https://github.com/rust-lang/rust-clippy"
categories = ["development-tools"]
edition = "2021"
publish = false
+default-run = "lintcheck"
[dependencies]
+anyhow = "1.0.69"
cargo_metadata = "0.15.3"
-clap = "4.1.4"
+clap = { version = "4.1.8", features = ["derive", "env"] }
+crates_io_api = "0.8.1"
crossbeam-channel = "0.5.6"
flate2 = "1.0"
+indicatif = "0.17.3"
rayon = "1.5.1"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0.85"
@@ -24,3 +28,11 @@ walkdir = "2.3"
[features]
deny-warnings = []
+
+[[bin]]
+name = "lintcheck"
+path = "src/main.rs"
+
+[[bin]]
+name = "popular-crates"
+path = "src/popular-crates.rs"
diff --git a/src/tools/clippy/lintcheck/README.md b/src/tools/clippy/lintcheck/README.md
index 6142de5e3..faf3ce909 100644
--- a/src/tools/clippy/lintcheck/README.md
+++ b/src/tools/clippy/lintcheck/README.md
@@ -16,7 +16,7 @@ or
cargo lintcheck
```
-By default the logs will be saved into
+By default, the logs will be saved into
`lintcheck-logs/lintcheck_crates_logs.txt`.
You can set a custom sources.toml by adding `--crates-toml custom.toml` or using
@@ -25,6 +25,15 @@ the repo root.
The results will then be saved to `lintcheck-logs/custom_logs.toml`.
+The `custom.toml` file may be built using <https://crates.io> recently most
+downloaded crates by using the `popular-crates` binary from the `lintcheck`
+directory. For example, to retrieve the 100 recently most downloaded crates:
+
+```
+cargo run --release --bin popular-crates -- -n 100 custom.toml
+```
+
+
### Configuring the Crate Sources
The sources to check are saved in a `toml` file. There are three types of
diff --git a/src/tools/clippy/lintcheck/src/config.rs b/src/tools/clippy/lintcheck/src/config.rs
index e0244ddce..3f01e9bb0 100644
--- a/src/tools/clippy/lintcheck/src/config.rs
+++ b/src/tools/clippy/lintcheck/src/config.rs
@@ -1,131 +1,79 @@
-use clap::{Arg, ArgAction, ArgMatches, Command};
-use std::env;
-use std::path::PathBuf;
+use clap::Parser;
+use std::{num::NonZeroUsize, path::PathBuf};
-fn get_clap_config() -> ArgMatches {
- Command::new("lintcheck")
- .about("run clippy on a set of crates and check output")
- .args([
- Arg::new("only")
- .action(ArgAction::Set)
- .value_name("CRATE")
- .long("only")
- .help("Only process a single crate of the list"),
- Arg::new("crates-toml")
- .action(ArgAction::Set)
- .value_name("CRATES-SOURCES-TOML-PATH")
- .long("crates-toml")
- .help("Set the path for a crates.toml where lintcheck should read the sources from"),
- Arg::new("threads")
- .action(ArgAction::Set)
- .value_name("N")
- .value_parser(clap::value_parser!(usize))
- .short('j')
- .long("jobs")
- .help("Number of threads to use, 0 automatic choice"),
- Arg::new("fix")
- .long("fix")
- .help("Runs cargo clippy --fix and checks if all suggestions apply"),
- Arg::new("filter")
- .long("filter")
- .action(ArgAction::Append)
- .value_name("clippy_lint_name")
- .help("Apply a filter to only collect specified lints, this also overrides `allow` attributes"),
- Arg::new("markdown")
- .long("markdown")
- .help("Change the reports table to use markdown links"),
- Arg::new("recursive")
- .long("recursive")
- .help("Run clippy on the dependencies of crates specified in crates-toml")
- .conflicts_with("threads")
- .conflicts_with("fix"),
- ])
- .get_matches()
-}
-
-#[derive(Debug, Clone)]
+#[derive(Clone, Debug, Parser)]
pub(crate) struct LintcheckConfig {
- /// max number of jobs to spawn (default 1)
+ /// Number of threads to use (default: all unless --fix or --recursive)
+ #[clap(
+ long = "jobs",
+ short = 'j',
+ value_name = "N",
+ default_value_t = 0,
+ hide_default_value = true
+ )]
pub max_jobs: usize,
- /// we read the sources to check from here
+ /// Set the path for a crates.toml where lintcheck should read the sources from
+ #[clap(
+ long = "crates-toml",
+ value_name = "CRATES-SOURCES-TOML-PATH",
+ default_value = "lintcheck/lintcheck_crates.toml",
+ hide_default_value = true,
+ env = "LINTCHECK_TOML",
+ hide_env = true
+ )]
pub sources_toml_path: PathBuf,
- /// we save the clippy lint results here
- pub lintcheck_results_path: PathBuf,
- /// Check only a specified package
+ /// File to save the clippy lint results here
+ #[clap(skip = "")]
+ pub lintcheck_results_path: PathBuf, // Overridden in new()
+ /// Only process a single crate on the list
+ #[clap(long, value_name = "CRATE")]
pub only: Option<String>,
- /// whether to just run --fix and not collect all the warnings
+ /// Runs cargo clippy --fix and checks if all suggestions apply
+ #[clap(long, conflicts_with("max_jobs"))]
pub fix: bool,
- /// A list of lints that this lintcheck run should focus on
+ /// Apply a filter to only collect specified lints, this also overrides `allow` attributes
+ #[clap(long = "filter", value_name = "clippy_lint_name", use_value_delimiter = true)]
pub lint_filter: Vec<String>,
- /// Indicate if the output should support markdown syntax
+ /// Change the reports table to use markdown links
+ #[clap(long)]
pub markdown: bool,
- /// Run clippy on the dependencies of crates
+ /// Run clippy on the dependencies of crates specified in crates-toml
+ #[clap(long, conflicts_with("max_jobs"))]
pub recursive: bool,
}
impl LintcheckConfig {
pub fn new() -> Self {
- let clap_config = get_clap_config();
-
- // first, check if we got anything passed via the LINTCHECK_TOML env var,
- // if not, ask clap if we got any value for --crates-toml <foo>
- // if not, use the default "lintcheck/lintcheck_crates.toml"
- let sources_toml = env::var("LINTCHECK_TOML").unwrap_or_else(|_| {
- clap_config
- .get_one::<String>("crates-toml")
- .map_or("lintcheck/lintcheck_crates.toml", |s| &**s)
- .into()
- });
-
- let markdown = clap_config.contains_id("markdown");
- let sources_toml_path = PathBuf::from(sources_toml);
+ let mut config = LintcheckConfig::parse();
// for the path where we save the lint results, get the filename without extension (so for
// wasd.toml, use "wasd"...)
- let filename: PathBuf = sources_toml_path.file_stem().unwrap().into();
- let lintcheck_results_path = PathBuf::from(format!(
+ let filename: PathBuf = config.sources_toml_path.file_stem().unwrap().into();
+ config.lintcheck_results_path = PathBuf::from(format!(
"lintcheck-logs/{}_logs.{}",
filename.display(),
- if markdown { "md" } else { "txt" }
+ if config.markdown { "md" } else { "txt" }
));
- // look at the --threads arg, if 0 is passed, ask rayon rayon how many threads it would spawn and
- // use half of that for the physical core count
- // by default use a single thread
- let max_jobs = match clap_config.get_one::<usize>("threads") {
- Some(&0) => {
- // automatic choice
- // Rayon seems to return thread count so half that for core count
- rayon::current_num_threads() / 2
- },
- Some(&threads) => threads,
- // no -j passed, use a single thread
- None => 1,
+ // look at the --threads arg, if 0 is passed, use the threads count
+ if config.max_jobs == 0 {
+ config.max_jobs = if config.fix || config.recursive {
+ 1
+ } else {
+ std::thread::available_parallelism().map_or(1, NonZeroUsize::get)
+ };
};
- let lint_filter: Vec<String> = clap_config
- .get_many::<String>("filter")
- .map(|iter| {
- iter.map(|lint_name| {
- let mut filter = lint_name.replace('_', "-");
- if !filter.starts_with("clippy::") {
- filter.insert_str(0, "clippy::");
- }
- filter
- })
- .collect()
- })
- .unwrap_or_default();
-
- LintcheckConfig {
- max_jobs,
- sources_toml_path,
- lintcheck_results_path,
- only: clap_config.get_one::<String>("only").map(String::from),
- fix: clap_config.contains_id("fix"),
- lint_filter,
- markdown,
- recursive: clap_config.contains_id("recursive"),
+ for lint_name in &mut config.lint_filter {
+ *lint_name = format!(
+ "clippy::{}",
+ lint_name
+ .strip_prefix("clippy::")
+ .unwrap_or(lint_name)
+ .replace('_', "-")
+ );
}
+
+ config
}
}
diff --git a/src/tools/clippy/lintcheck/src/popular-crates.rs b/src/tools/clippy/lintcheck/src/popular-crates.rs
new file mode 100644
index 000000000..fdab984ad
--- /dev/null
+++ b/src/tools/clippy/lintcheck/src/popular-crates.rs
@@ -0,0 +1,65 @@
+#![deny(clippy::pedantic)]
+
+use clap::Parser;
+use crates_io_api::{CratesQueryBuilder, Sort, SyncClient};
+use indicatif::ProgressBar;
+use std::collections::HashSet;
+use std::fs::File;
+use std::io::{BufWriter, Write};
+use std::path::PathBuf;
+use std::time::Duration;
+
+#[derive(Parser)]
+struct Opts {
+ /// Output TOML file name
+ output: PathBuf,
+ /// Number of crate names to download
+ #[clap(short, long, default_value_t = 100)]
+ number: usize,
+ /// Do not output progress
+ #[clap(short, long)]
+ quiet: bool,
+}
+
+fn main() -> anyhow::Result<()> {
+ let opts = Opts::parse();
+ let mut output = BufWriter::new(File::create(opts.output)?);
+ output.write_all(b"[crates]\n")?;
+ let client = SyncClient::new(
+ "clippy/lintcheck (github.com/rust-lang/rust-clippy/)",
+ Duration::from_secs(1),
+ )?;
+ let mut seen_crates = HashSet::new();
+ let pb = if opts.quiet {
+ None
+ } else {
+ Some(ProgressBar::new(opts.number as u64))
+ };
+ let mut query = CratesQueryBuilder::new()
+ .sort(Sort::RecentDownloads)
+ .page_size(100)
+ .build();
+ while seen_crates.len() < opts.number {
+ let retrieved = client.crates(query.clone())?.crates;
+ if retrieved.is_empty() {
+ eprintln!("No more than {} crates available from API", seen_crates.len());
+ break;
+ }
+ for c in retrieved {
+ if seen_crates.insert(c.name.clone()) {
+ output.write_all(
+ format!(
+ "{} = {{ name = '{}', versions = ['{}'] }}\n",
+ c.name, c.name, c.max_version
+ )
+ .as_bytes(),
+ )?;
+ if let Some(pb) = &pb {
+ pb.inc(1);
+ }
+ }
+ }
+ query.set_page(query.page() + 1);
+ }
+ Ok(())
+}
diff --git a/src/tools/clippy/rust-toolchain b/src/tools/clippy/rust-toolchain
index cfe845ec7..91e8ccea1 100644
--- a/src/tools/clippy/rust-toolchain
+++ b/src/tools/clippy/rust-toolchain
@@ -1,3 +1,3 @@
[toolchain]
-channel = "nightly-2023-02-25"
+channel = "nightly-2023-04-06"
components = ["cargo", "llvm-tools", "rust-src", "rust-std", "rustc", "rustc-dev", "rustfmt"]
diff --git a/src/tools/clippy/rustc_tools_util/README.md b/src/tools/clippy/rustc_tools_util/README.md
index eefc661f9..e197ea048 100644
--- a/src/tools/clippy/rustc_tools_util/README.md
+++ b/src/tools/clippy/rustc_tools_util/README.md
@@ -49,6 +49,8 @@ The changelog for `rustc_tools_util` is available under:
## License
+<!-- REUSE-IgnoreStart -->
+
Copyright 2014-2022 The Rust Project Developers
Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
@@ -56,3 +58,5 @@ http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
<LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
option. All files in the project carrying such notice may not be
copied, modified, or distributed except according to those terms.
+
+<!-- REUSE-IgnoreEnd -->
diff --git a/src/tools/clippy/src/driver.rs b/src/tools/clippy/src/driver.rs
index dd183362f..718bc41fb 100644
--- a/src/tools/clippy/src/driver.rs
+++ b/src/tools/clippy/src/driver.rs
@@ -1,6 +1,6 @@
#![feature(rustc_private)]
#![feature(let_chains)]
-#![feature(once_cell)]
+#![feature(lazy_cell)]
#![feature(lint_reasons)]
#![cfg_attr(feature = "deny-warnings", deny(warnings))]
// warn on lints, that are included in `rust-lang/rust`s bootstrap
@@ -130,7 +130,7 @@ impl rustc_driver::Callbacks for ClippyCallbacks {
#[allow(rustc::bad_opt_access)]
fn config(&mut self, config: &mut interface::Config) {
let conf_path = clippy_lints::lookup_conf_file();
- let conf_path_string = if let Ok(Some(path)) = &conf_path {
+ let conf_path_string = if let Ok((Some(path), _)) = &conf_path {
path.to_str().map(String::from)
} else {
None
@@ -176,7 +176,7 @@ Common options:
--rustc Pass all args to rustc
-V, --version Print version info and exit
-Other options are the same as `cargo check`.
+For the other options see `cargo check --help`.
To allow or deny a lint from the command line you can use `cargo clippy --`
with:
diff --git a/src/tools/clippy/src/main.rs b/src/tools/clippy/src/main.rs
index 82147eba3..c5e9b96cf 100644
--- a/src/tools/clippy/src/main.rs
+++ b/src/tools/clippy/src/main.rs
@@ -18,7 +18,7 @@ Common options:
-V, --version Print version info and exit
--explain LINT Print the documentation for a given lint
-Other options are the same as `cargo check`.
+For the other options see `cargo check --help`.
To allow or deny a lint from the command line you can use `cargo clippy --`
with:
diff --git a/src/tools/clippy/tests/compile-test.rs b/src/tools/clippy/tests/compile-test.rs
index c10ee969c..57890ff31 100644
--- a/src/tools/clippy/tests/compile-test.rs
+++ b/src/tools/clippy/tests/compile-test.rs
@@ -1,5 +1,5 @@
#![feature(test)] // compiletest_rs requires this attribute
-#![feature(once_cell)]
+#![feature(lazy_cell)]
#![feature(is_sorted)]
#![cfg_attr(feature = "deny-warnings", deny(warnings))]
#![warn(rust_2018_idioms, unused_lifetimes)]
diff --git a/src/tools/clippy/tests/dogfood.rs b/src/tools/clippy/tests/dogfood.rs
index 6d0022f7a..68a878e9a 100644
--- a/src/tools/clippy/tests/dogfood.rs
+++ b/src/tools/clippy/tests/dogfood.rs
@@ -3,10 +3,11 @@
//!
//! See [Eating your own dog food](https://en.wikipedia.org/wiki/Eating_your_own_dog_food) for context
-#![feature(once_cell)]
+#![feature(lazy_cell)]
#![cfg_attr(feature = "deny-warnings", deny(warnings))]
#![warn(rust_2018_idioms, unused_lifetimes)]
+use itertools::Itertools;
use std::path::PathBuf;
use std::process::Command;
use test_utils::IS_RUSTC_TEST_SUITE;
@@ -19,8 +20,10 @@ fn dogfood_clippy() {
return;
}
+ let mut failed_packages = Vec::new();
+
// "" is the root package
- for package in &[
+ for package in [
"",
"clippy_dev",
"clippy_lints",
@@ -28,8 +31,16 @@ fn dogfood_clippy() {
"lintcheck",
"rustc_tools_util",
] {
- run_clippy_for_package(package, &["-D", "clippy::all", "-D", "clippy::pedantic"]);
+ if !run_clippy_for_package(package, &["-D", "clippy::all", "-D", "clippy::pedantic"]) {
+ failed_packages.push(if package.is_empty() { "root" } else { package });
+ }
}
+
+ assert!(
+ failed_packages.is_empty(),
+ "Dogfood failed for packages `{}`",
+ failed_packages.iter().format(", "),
+ );
}
#[test]
@@ -71,7 +82,7 @@ fn run_metadata_collection_lint() {
run_clippy_for_package("clippy_lints", &["-A", "unfulfilled_lint_expectations"]);
}
-fn run_clippy_for_package(project: &str, args: &[&str]) {
+fn run_clippy_for_package(project: &str, args: &[&str]) -> bool {
let root_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
let mut command = Command::new(&*test_utils::CARGO_CLIPPY_PATH);
@@ -107,5 +118,5 @@ fn run_clippy_for_package(project: &str, args: &[&str]) {
println!("stdout: {}", String::from_utf8_lossy(&output.stdout));
println!("stderr: {}", String::from_utf8_lossy(&output.stderr));
- assert!(output.status.success());
+ output.status.success()
}
diff --git a/src/tools/clippy/tests/lint_message_convention.rs b/src/tools/clippy/tests/lint_message_convention.rs
index abd0d1bc5..8feea800f 100644
--- a/src/tools/clippy/tests/lint_message_convention.rs
+++ b/src/tools/clippy/tests/lint_message_convention.rs
@@ -1,4 +1,4 @@
-#![feature(once_cell)]
+#![feature(lazy_cell)]
#![cfg_attr(feature = "deny-warnings", deny(warnings))]
#![warn(rust_2018_idioms, unused_lifetimes)]
diff --git a/src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/src/main.stderr b/src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/src/main.stderr
index 98697e001..aa1b3c638 100644
--- a/src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/src/main.stderr
+++ b/src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/src/main.stderr
@@ -1,2 +1,4 @@
-Using config file `$SRC_DIR/.clippy.toml`
-Warning: `$SRC_DIR/clippy.toml` will be ignored.
+warning: using config file `$SRC_DIR/.clippy.toml`, `$SRC_DIR/clippy.toml` will be ignored
+
+warning: 1 warning emitted
+
diff --git a/src/tools/clippy/tests/ui-internal/custom_ice_message.stderr b/src/tools/clippy/tests/ui-internal/custom_ice_message.stderr
index 7ed0ef027..b4619e980 100644
--- a/src/tools/clippy/tests/ui-internal/custom_ice_message.stderr
+++ b/src/tools/clippy/tests/ui-internal/custom_ice_message.stderr
@@ -9,3 +9,4 @@ note: we would appreciate a bug report: https://github.com/rust-lang/rust-clippy
note: Clippy version: foo
+thread panicked while panicking. aborting.
diff --git a/src/tools/clippy/tests/ui-toml/allow_mixed_uninlined_format_args/uninlined_format_args.stderr b/src/tools/clippy/tests/ui-toml/allow_mixed_uninlined_format_args/uninlined_format_args.stderr
index ee9417621..1be0cda12 100644
--- a/src/tools/clippy/tests/ui-toml/allow_mixed_uninlined_format_args/uninlined_format_args.stderr
+++ b/src/tools/clippy/tests/ui-toml/allow_mixed_uninlined_format_args/uninlined_format_args.stderr
@@ -11,29 +11,29 @@ LL - println!("val='{}'", local_i32);
LL + println!("val='{local_i32}'");
|
-error: literal with an empty format string
- --> $DIR/uninlined_format_args.rs:10:35
+error: variables can be used directly in the `format!` string
+ --> $DIR/uninlined_format_args.rs:10:5
|
LL | println!("Hello {} is {:.*}", "x", local_i32, local_f64);
- | ^^^
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = note: `-D clippy::print-literal` implied by `-D warnings`
-help: try this
+help: change this to
|
LL - println!("Hello {} is {:.*}", "x", local_i32, local_f64);
-LL + println!("Hello x is {:.*}", local_i32, local_f64);
+LL + println!("Hello {} is {local_f64:.local_i32$}", "x");
|
-error: variables can be used directly in the `format!` string
- --> $DIR/uninlined_format_args.rs:10:5
+error: literal with an empty format string
+ --> $DIR/uninlined_format_args.rs:10:35
|
LL | println!("Hello {} is {:.*}", "x", local_i32, local_f64);
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | ^^^
|
-help: change this to
+ = note: `-D clippy::print-literal` implied by `-D warnings`
+help: try this
|
LL - println!("Hello {} is {:.*}", "x", local_i32, local_f64);
-LL + println!("Hello {} is {local_f64:.local_i32$}", "x");
+LL + println!("Hello x is {:.*}", local_i32, local_f64);
|
error: variables can be used directly in the `format!` string
diff --git a/src/tools/clippy/tests/ui-toml/array_size_threshold/array_size_threshold.rs b/src/tools/clippy/tests/ui-toml/array_size_threshold/array_size_threshold.rs
new file mode 100644
index 000000000..7f623c7a9
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/array_size_threshold/array_size_threshold.rs
@@ -0,0 +1,10 @@
+#![allow(unused)]
+#![warn(clippy::large_const_arrays, clippy::large_stack_arrays)]
+
+const ABOVE: [u8; 11] = [0; 11];
+const BELOW: [u8; 10] = [0; 10];
+
+fn main() {
+ let above = [0u8; 11];
+ let below = [0u8; 10];
+}
diff --git a/src/tools/clippy/tests/ui-toml/array_size_threshold/array_size_threshold.stderr b/src/tools/clippy/tests/ui-toml/array_size_threshold/array_size_threshold.stderr
new file mode 100644
index 000000000..ac017b209
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/array_size_threshold/array_size_threshold.stderr
@@ -0,0 +1,29 @@
+error: large array defined as const
+ --> $DIR/array_size_threshold.rs:4:1
+ |
+LL | const ABOVE: [u8; 11] = [0; 11];
+ | -----^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | help: make this a static item: `static`
+ |
+ = note: `-D clippy::large-const-arrays` implied by `-D warnings`
+
+error: allocating a local array larger than 10 bytes
+ --> $DIR/array_size_threshold.rs:4:25
+ |
+LL | const ABOVE: [u8; 11] = [0; 11];
+ | ^^^^^^^
+ |
+ = help: consider allocating on the heap with `vec![0; 11].into_boxed_slice()`
+ = note: `-D clippy::large-stack-arrays` implied by `-D warnings`
+
+error: allocating a local array larger than 10 bytes
+ --> $DIR/array_size_threshold.rs:8:17
+ |
+LL | let above = [0u8; 11];
+ | ^^^^^^^^^
+ |
+ = help: consider allocating on the heap with `vec![0u8; 11].into_boxed_slice()`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui-toml/array_size_threshold/clippy.toml b/src/tools/clippy/tests/ui-toml/array_size_threshold/clippy.toml
new file mode 100644
index 000000000..3f1fe9a12
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/array_size_threshold/clippy.toml
@@ -0,0 +1 @@
+array-size-threshold = 10
diff --git a/src/tools/clippy/tests/ui-toml/extra_unused_type_parameters/clippy.toml b/src/tools/clippy/tests/ui-toml/extra_unused_type_parameters/clippy.toml
new file mode 100644
index 000000000..5f304987a
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/extra_unused_type_parameters/clippy.toml
@@ -0,0 +1 @@
+avoid-breaking-exported-api = true
diff --git a/src/tools/clippy/tests/ui-toml/extra_unused_type_parameters/extra_unused_type_parameters.rs b/src/tools/clippy/tests/ui-toml/extra_unused_type_parameters/extra_unused_type_parameters.rs
new file mode 100644
index 000000000..565523245
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/extra_unused_type_parameters/extra_unused_type_parameters.rs
@@ -0,0 +1,9 @@
+pub struct S;
+
+impl S {
+ pub fn exported_fn<T>() {
+ unimplemented!();
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-toml/ifs_same_cond/clippy.toml b/src/tools/clippy/tests/ui-toml/ifs_same_cond/clippy.toml
new file mode 100644
index 000000000..90a36ecd9
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/ifs_same_cond/clippy.toml
@@ -0,0 +1 @@
+ignore-interior-mutability = ["std::cell::Cell"]
diff --git a/src/tools/clippy/tests/ui-toml/ifs_same_cond/ifs_same_cond.rs b/src/tools/clippy/tests/ui-toml/ifs_same_cond/ifs_same_cond.rs
new file mode 100644
index 000000000..d623ac7e0
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/ifs_same_cond/ifs_same_cond.rs
@@ -0,0 +1,18 @@
+#![warn(clippy::ifs_same_cond)]
+#![allow(clippy::if_same_then_else, clippy::comparison_chain)]
+
+fn main() {}
+
+fn issue10272() {
+ use std::cell::Cell;
+
+ // Because the `ignore-interior-mutability` configuration
+ // is set to ignore for `std::cell::Cell`, the following `get()` calls
+ // should trigger warning
+ let x = Cell::new(true);
+ if x.get() {
+ } else if !x.take() {
+ } else if x.get() {
+ } else {
+ }
+}
diff --git a/src/tools/clippy/tests/ui-toml/ifs_same_cond/ifs_same_cond.stderr b/src/tools/clippy/tests/ui-toml/ifs_same_cond/ifs_same_cond.stderr
new file mode 100644
index 000000000..2841f62bc
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/ifs_same_cond/ifs_same_cond.stderr
@@ -0,0 +1,15 @@
+error: this `if` has the same condition as a previous `if`
+ --> $DIR/ifs_same_cond.rs:15:15
+ |
+LL | } else if x.get() {
+ | ^^^^^^^
+ |
+note: same as this
+ --> $DIR/ifs_same_cond.rs:13:8
+ |
+LL | if x.get() {
+ | ^^^^^^^
+ = note: `-D clippy::ifs-same-cond` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-toml/large_futures/clippy.toml b/src/tools/clippy/tests/ui-toml/large_futures/clippy.toml
new file mode 100644
index 000000000..61bb17fdf
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/large_futures/clippy.toml
@@ -0,0 +1 @@
+future-size-threshold = 1024
diff --git a/src/tools/clippy/tests/ui-toml/large_futures/large_futures.rs b/src/tools/clippy/tests/ui-toml/large_futures/large_futures.rs
new file mode 100644
index 000000000..4158df8b5
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/large_futures/large_futures.rs
@@ -0,0 +1,27 @@
+#![warn(clippy::large_futures)]
+
+fn main() {}
+
+pub async fn should_warn() {
+ let x = [0u8; 1024];
+ async {}.await;
+ dbg!(x);
+}
+
+pub async fn should_not_warn() {
+ let x = [0u8; 1020];
+ async {}.await;
+ dbg!(x);
+}
+
+pub async fn bar() {
+ should_warn().await;
+
+ async {
+ let x = [0u8; 1024];
+ dbg!(x);
+ }
+ .await;
+
+ should_not_warn().await;
+}
diff --git a/src/tools/clippy/tests/ui-toml/large_futures/large_futures.stderr b/src/tools/clippy/tests/ui-toml/large_futures/large_futures.stderr
new file mode 100644
index 000000000..b92734de2
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/large_futures/large_futures.stderr
@@ -0,0 +1,10 @@
+error: large future with a size of 1026 bytes
+ --> $DIR/large_futures.rs:18:5
+ |
+LL | should_warn().await;
+ | ^^^^^^^^^^^^^ help: consider `Box::pin` on it: `Box::pin(should_warn())`
+ |
+ = note: `-D clippy::large-futures` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr b/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr
index 6a246afac..8447c3172 100644
--- a/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr
+++ b/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr
@@ -24,6 +24,7 @@ error: error reading Clippy's configuration file `$DIR/clippy.toml`: unknown fie
enforced-import-renames
enum-variant-name-threshold
enum-variant-size-threshold
+ future-size-threshold
ignore-interior-mutability
large-error-threshold
literal-representation-threshold
diff --git a/src/tools/clippy/tests/ui/allow_attributes.fixed b/src/tools/clippy/tests/ui/allow_attributes.fixed
new file mode 100644
index 000000000..b8dd0619e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/allow_attributes.fixed
@@ -0,0 +1,25 @@
+// run-rustfix
+#![allow(unused)]
+#![warn(clippy::allow_attributes)]
+#![feature(lint_reasons)]
+
+fn main() {}
+
+// Using clippy::needless_borrow just as a placeholder, it isn't relevant.
+
+// Should lint
+#[expect(dead_code)]
+struct T1;
+
+struct T2; // Should not lint
+#[deny(clippy::needless_borrow)] // Should not lint
+struct T3;
+#[warn(clippy::needless_borrow)] // Should not lint
+struct T4;
+// `panic = "unwind"` should always be true
+#[cfg_attr(panic = "unwind", expect(dead_code))]
+struct CfgT;
+
+fn ignore_inner_attr() {
+ #![allow(unused)] // Should not lint
+}
diff --git a/src/tools/clippy/tests/ui/allow_attributes.rs b/src/tools/clippy/tests/ui/allow_attributes.rs
new file mode 100644
index 000000000..295f56090
--- /dev/null
+++ b/src/tools/clippy/tests/ui/allow_attributes.rs
@@ -0,0 +1,25 @@
+// run-rustfix
+#![allow(unused)]
+#![warn(clippy::allow_attributes)]
+#![feature(lint_reasons)]
+
+fn main() {}
+
+// Using clippy::needless_borrow just as a placeholder, it isn't relevant.
+
+// Should lint
+#[allow(dead_code)]
+struct T1;
+
+struct T2; // Should not lint
+#[deny(clippy::needless_borrow)] // Should not lint
+struct T3;
+#[warn(clippy::needless_borrow)] // Should not lint
+struct T4;
+// `panic = "unwind"` should always be true
+#[cfg_attr(panic = "unwind", allow(dead_code))]
+struct CfgT;
+
+fn ignore_inner_attr() {
+ #![allow(unused)] // Should not lint
+}
diff --git a/src/tools/clippy/tests/ui/allow_attributes.stderr b/src/tools/clippy/tests/ui/allow_attributes.stderr
new file mode 100644
index 000000000..681837e9e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/allow_attributes.stderr
@@ -0,0 +1,16 @@
+error: #[allow] attribute found
+ --> $DIR/allow_attributes.rs:11:3
+ |
+LL | #[allow(dead_code)]
+ | ^^^^^ help: replace it with: `expect`
+ |
+ = note: `-D clippy::allow-attributes` implied by `-D warnings`
+
+error: #[allow] attribute found
+ --> $DIR/allow_attributes.rs:20:30
+ |
+LL | #[cfg_attr(panic = "unwind", allow(dead_code))]
+ | ^^^^^ help: replace it with: `expect`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/almost_complete_range.fixed b/src/tools/clippy/tests/ui/almost_complete_range.fixed
index 6046addf7..a4bf7fe18 100644
--- a/src/tools/clippy/tests/ui/almost_complete_range.fixed
+++ b/src/tools/clippy/tests/ui/almost_complete_range.fixed
@@ -1,6 +1,6 @@
// run-rustfix
// edition:2018
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![feature(exclusive_range_pattern)]
#![feature(stmt_expr_attributes)]
@@ -9,33 +9,10 @@
#![allow(clippy::needless_parens_on_range_literals)]
#![allow(clippy::double_parens)]
-#[macro_use]
-extern crate macro_rules;
-
-macro_rules! a {
- () => {
- 'a'
- };
-}
-macro_rules! A {
- () => {
- 'A'
- };
-}
-macro_rules! zero {
- () => {
- '0'
- };
-}
-
-macro_rules! b {
- () => {
- let _ = 'a'..='z';
- let _ = 'A'..='Z';
- let _ = '0'..='9';
- };
-}
+extern crate proc_macros;
+use proc_macros::{external, inline_macros};
+#[inline_macros]
fn main() {
#[rustfmt::skip]
{
@@ -56,9 +33,9 @@ fn main() {
let _ = b'B'..b'Z';
let _ = b'1'..b'9';
- let _ = a!()..='z';
- let _ = A!()..='Z';
- let _ = zero!()..='9';
+ let _ = inline!('a')..='z';
+ let _ = inline!('A')..='Z';
+ let _ = inline!('0')..='9';
let _ = match 0u8 {
b'a'..=b'z' if true => 1,
@@ -80,8 +57,16 @@ fn main() {
_ => 7,
};
- almost_complete_range!();
- b!();
+ external!(
+ let _ = 'a'..'z';
+ let _ = 'A'..'Z';
+ let _ = '0'..'9';
+ );
+ inline!(
+ let _ = 'a'..='z';
+ let _ = 'A'..='Z';
+ let _ = '0'..='9';
+ );
}
#[clippy::msrv = "1.25"]
diff --git a/src/tools/clippy/tests/ui/almost_complete_range.rs b/src/tools/clippy/tests/ui/almost_complete_range.rs
index ae7e07ab8..8237c3a13 100644
--- a/src/tools/clippy/tests/ui/almost_complete_range.rs
+++ b/src/tools/clippy/tests/ui/almost_complete_range.rs
@@ -1,6 +1,6 @@
// run-rustfix
// edition:2018
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![feature(exclusive_range_pattern)]
#![feature(stmt_expr_attributes)]
@@ -9,33 +9,10 @@
#![allow(clippy::needless_parens_on_range_literals)]
#![allow(clippy::double_parens)]
-#[macro_use]
-extern crate macro_rules;
-
-macro_rules! a {
- () => {
- 'a'
- };
-}
-macro_rules! A {
- () => {
- 'A'
- };
-}
-macro_rules! zero {
- () => {
- '0'
- };
-}
-
-macro_rules! b {
- () => {
- let _ = 'a'..'z';
- let _ = 'A'..'Z';
- let _ = '0'..'9';
- };
-}
+extern crate proc_macros;
+use proc_macros::{external, inline_macros};
+#[inline_macros]
fn main() {
#[rustfmt::skip]
{
@@ -56,9 +33,9 @@ fn main() {
let _ = b'B'..b'Z';
let _ = b'1'..b'9';
- let _ = a!()..'z';
- let _ = A!()..'Z';
- let _ = zero!()..'9';
+ let _ = inline!('a')..'z';
+ let _ = inline!('A')..'Z';
+ let _ = inline!('0')..'9';
let _ = match 0u8 {
b'a'..b'z' if true => 1,
@@ -80,8 +57,16 @@ fn main() {
_ => 7,
};
- almost_complete_range!();
- b!();
+ external!(
+ let _ = 'a'..'z';
+ let _ = 'A'..'Z';
+ let _ = '0'..'9';
+ );
+ inline!(
+ let _ = 'a'..'z';
+ let _ = 'A'..'Z';
+ let _ = '0'..'9';
+ );
}
#[clippy::msrv = "1.25"]
diff --git a/src/tools/clippy/tests/ui/almost_complete_range.stderr b/src/tools/clippy/tests/ui/almost_complete_range.stderr
index a7a532878..34521c13a 100644
--- a/src/tools/clippy/tests/ui/almost_complete_range.stderr
+++ b/src/tools/clippy/tests/ui/almost_complete_range.stderr
@@ -1,5 +1,5 @@
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:42:17
+ --> $DIR/almost_complete_range.rs:19:17
|
LL | let _ = ('a') ..'z';
| ^^^^^^--^^^
@@ -9,7 +9,7 @@ LL | let _ = ('a') ..'z';
= note: `-D clippy::almost-complete-range` implied by `-D warnings`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:43:17
+ --> $DIR/almost_complete_range.rs:20:17
|
LL | let _ = 'A' .. ('Z');
| ^^^^--^^^^^^
@@ -17,7 +17,7 @@ LL | let _ = 'A' .. ('Z');
| help: use an inclusive range: `..=`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:44:17
+ --> $DIR/almost_complete_range.rs:21:17
|
LL | let _ = ((('0'))) .. ('9');
| ^^^^^^^^^^--^^^^^^
@@ -25,7 +25,7 @@ LL | let _ = ((('0'))) .. ('9');
| help: use an inclusive range: `..=`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:51:13
+ --> $DIR/almost_complete_range.rs:28:13
|
LL | let _ = (b'a')..(b'z');
| ^^^^^^--^^^^^^
@@ -33,7 +33,7 @@ LL | let _ = (b'a')..(b'z');
| help: use an inclusive range: `..=`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:52:13
+ --> $DIR/almost_complete_range.rs:29:13
|
LL | let _ = b'A'..b'Z';
| ^^^^--^^^^
@@ -41,7 +41,7 @@ LL | let _ = b'A'..b'Z';
| help: use an inclusive range: `..=`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:53:13
+ --> $DIR/almost_complete_range.rs:30:13
|
LL | let _ = b'0'..b'9';
| ^^^^--^^^^
@@ -49,31 +49,31 @@ LL | let _ = b'0'..b'9';
| help: use an inclusive range: `..=`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:59:13
+ --> $DIR/almost_complete_range.rs:36:13
|
-LL | let _ = a!()..'z';
- | ^^^^--^^^
- | |
- | help: use an inclusive range: `..=`
+LL | let _ = inline!('a')..'z';
+ | ^^^^^^^^^^^^--^^^
+ | |
+ | help: use an inclusive range: `..=`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:60:13
+ --> $DIR/almost_complete_range.rs:37:13
|
-LL | let _ = A!()..'Z';
- | ^^^^--^^^
- | |
- | help: use an inclusive range: `..=`
+LL | let _ = inline!('A')..'Z';
+ | ^^^^^^^^^^^^--^^^
+ | |
+ | help: use an inclusive range: `..=`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:61:13
+ --> $DIR/almost_complete_range.rs:38:13
|
-LL | let _ = zero!()..'9';
- | ^^^^^^^--^^^
- | |
- | help: use an inclusive range: `..=`
+LL | let _ = inline!('0')..'9';
+ | ^^^^^^^^^^^^--^^^
+ | |
+ | help: use an inclusive range: `..=`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:64:9
+ --> $DIR/almost_complete_range.rs:41:9
|
LL | b'a'..b'z' if true => 1,
| ^^^^--^^^^
@@ -81,7 +81,7 @@ LL | b'a'..b'z' if true => 1,
| help: use an inclusive range: `..=`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:65:9
+ --> $DIR/almost_complete_range.rs:42:9
|
LL | b'A'..b'Z' if true => 2,
| ^^^^--^^^^
@@ -89,7 +89,7 @@ LL | b'A'..b'Z' if true => 2,
| help: use an inclusive range: `..=`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:66:9
+ --> $DIR/almost_complete_range.rs:43:9
|
LL | b'0'..b'9' if true => 3,
| ^^^^--^^^^
@@ -97,7 +97,7 @@ LL | b'0'..b'9' if true => 3,
| help: use an inclusive range: `..=`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:74:9
+ --> $DIR/almost_complete_range.rs:51:9
|
LL | 'a'..'z' if true => 1,
| ^^^--^^^
@@ -105,7 +105,7 @@ LL | 'a'..'z' if true => 1,
| help: use an inclusive range: `..=`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:75:9
+ --> $DIR/almost_complete_range.rs:52:9
|
LL | 'A'..'Z' if true => 2,
| ^^^--^^^
@@ -113,7 +113,7 @@ LL | 'A'..'Z' if true => 2,
| help: use an inclusive range: `..=`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:76:9
+ --> $DIR/almost_complete_range.rs:53:9
|
LL | '0'..'9' if true => 3,
| ^^^--^^^
@@ -121,46 +121,37 @@ LL | '0'..'9' if true => 3,
| help: use an inclusive range: `..=`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:33:17
+ --> $DIR/almost_complete_range.rs:66:17
|
LL | let _ = 'a'..'z';
| ^^^--^^^
| |
| help: use an inclusive range: `..=`
-...
-LL | b!();
- | ---- in this macro invocation
|
- = note: this error originates in the macro `b` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `__inline_mac_fn_main` (in Nightly builds, run with -Z macro-backtrace for more info)
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:34:17
+ --> $DIR/almost_complete_range.rs:67:17
|
LL | let _ = 'A'..'Z';
| ^^^--^^^
| |
| help: use an inclusive range: `..=`
-...
-LL | b!();
- | ---- in this macro invocation
|
- = note: this error originates in the macro `b` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `__inline_mac_fn_main` (in Nightly builds, run with -Z macro-backtrace for more info)
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:35:17
+ --> $DIR/almost_complete_range.rs:68:17
|
LL | let _ = '0'..'9';
| ^^^--^^^
| |
| help: use an inclusive range: `..=`
-...
-LL | b!();
- | ---- in this macro invocation
|
- = note: this error originates in the macro `b` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `__inline_mac_fn_main` (in Nightly builds, run with -Z macro-backtrace for more info)
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:90:9
+ --> $DIR/almost_complete_range.rs:75:9
|
LL | 'a'..'z' => 1,
| ^^^--^^^
@@ -168,7 +159,7 @@ LL | 'a'..'z' => 1,
| help: use an inclusive range: `...`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:91:9
+ --> $DIR/almost_complete_range.rs:76:9
|
LL | 'A'..'Z' => 2,
| ^^^--^^^
@@ -176,7 +167,7 @@ LL | 'A'..'Z' => 2,
| help: use an inclusive range: `...`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:92:9
+ --> $DIR/almost_complete_range.rs:77:9
|
LL | '0'..'9' => 3,
| ^^^--^^^
@@ -184,7 +175,7 @@ LL | '0'..'9' => 3,
| help: use an inclusive range: `...`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:99:13
+ --> $DIR/almost_complete_range.rs:84:13
|
LL | let _ = 'a'..'z';
| ^^^--^^^
@@ -192,7 +183,7 @@ LL | let _ = 'a'..'z';
| help: use an inclusive range: `..=`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:100:13
+ --> $DIR/almost_complete_range.rs:85:13
|
LL | let _ = 'A'..'Z';
| ^^^--^^^
@@ -200,7 +191,7 @@ LL | let _ = 'A'..'Z';
| help: use an inclusive range: `..=`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:101:13
+ --> $DIR/almost_complete_range.rs:86:13
|
LL | let _ = '0'..'9';
| ^^^--^^^
@@ -208,7 +199,7 @@ LL | let _ = '0'..'9';
| help: use an inclusive range: `..=`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:103:9
+ --> $DIR/almost_complete_range.rs:88:9
|
LL | 'a'..'z' => 1,
| ^^^--^^^
@@ -216,7 +207,7 @@ LL | 'a'..'z' => 1,
| help: use an inclusive range: `..=`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:104:9
+ --> $DIR/almost_complete_range.rs:89:9
|
LL | 'A'..'Z' => 1,
| ^^^--^^^
@@ -224,7 +215,7 @@ LL | 'A'..'Z' => 1,
| help: use an inclusive range: `..=`
error: almost complete ascii range
- --> $DIR/almost_complete_range.rs:105:9
+ --> $DIR/almost_complete_range.rs:90:9
|
LL | '0'..'9' => 3,
| ^^^--^^^
diff --git a/src/tools/clippy/tests/ui/arithmetic_side_effects.rs b/src/tools/clippy/tests/ui/arithmetic_side_effects.rs
index 2611e3a78..3c06676d7 100644
--- a/src/tools/clippy/tests/ui/arithmetic_side_effects.rs
+++ b/src/tools/clippy/tests/ui/arithmetic_side_effects.rs
@@ -45,24 +45,32 @@ impl_arith!(
Div, Custom, Custom, div;
Mul, Custom, Custom, mul;
Rem, Custom, Custom, rem;
+ Shl, Custom, Custom, shl;
+ Shr, Custom, Custom, shr;
Sub, Custom, Custom, sub;
Add, Custom, &Custom, add;
Div, Custom, &Custom, div;
Mul, Custom, &Custom, mul;
Rem, Custom, &Custom, rem;
+ Shl, Custom, &Custom, shl;
+ Shr, Custom, &Custom, shr;
Sub, Custom, &Custom, sub;
Add, &Custom, Custom, add;
Div, &Custom, Custom, div;
Mul, &Custom, Custom, mul;
Rem, &Custom, Custom, rem;
+ Shl, &Custom, Custom, shl;
+ Shr, &Custom, Custom, shr;
Sub, &Custom, Custom, sub;
Add, &Custom, &Custom, add;
Div, &Custom, &Custom, div;
Mul, &Custom, &Custom, mul;
Rem, &Custom, &Custom, rem;
+ Shl, &Custom, &Custom, shl;
+ Shr, &Custom, &Custom, shr;
Sub, &Custom, &Custom, sub;
);
@@ -71,24 +79,32 @@ impl_assign_arith!(
DivAssign, Custom, Custom, div_assign;
MulAssign, Custom, Custom, mul_assign;
RemAssign, Custom, Custom, rem_assign;
+ ShlAssign, Custom, Custom, shl_assign;
+ ShrAssign, Custom, Custom, shr_assign;
SubAssign, Custom, Custom, sub_assign;
AddAssign, Custom, &Custom, add_assign;
DivAssign, Custom, &Custom, div_assign;
MulAssign, Custom, &Custom, mul_assign;
RemAssign, Custom, &Custom, rem_assign;
+ ShlAssign, Custom, &Custom, shl_assign;
+ ShrAssign, Custom, &Custom, shr_assign;
SubAssign, Custom, &Custom, sub_assign;
AddAssign, &Custom, Custom, add_assign;
DivAssign, &Custom, Custom, div_assign;
MulAssign, &Custom, Custom, mul_assign;
RemAssign, &Custom, Custom, rem_assign;
+ ShlAssign, &Custom, Custom, shl_assign;
+ ShrAssign, &Custom, Custom, shr_assign;
SubAssign, &Custom, Custom, sub_assign;
AddAssign, &Custom, &Custom, add_assign;
DivAssign, &Custom, &Custom, div_assign;
MulAssign, &Custom, &Custom, mul_assign;
RemAssign, &Custom, &Custom, rem_assign;
+ ShlAssign, &Custom, &Custom, shl_assign;
+ ShrAssign, &Custom, &Custom, shr_assign;
SubAssign, &Custom, &Custom, sub_assign;
);
@@ -297,6 +313,10 @@ pub fn unknown_ops_or_runtime_ops_that_can_overflow() {
_custom %= &Custom;
_custom *= Custom;
_custom *= &Custom;
+ _custom >>= Custom;
+ _custom >>= &Custom;
+ _custom <<= Custom;
+ _custom <<= &Custom;
_custom += -Custom;
_custom += &-Custom;
_custom -= -Custom;
@@ -307,6 +327,10 @@ pub fn unknown_ops_or_runtime_ops_that_can_overflow() {
_custom %= &-Custom;
_custom *= -Custom;
_custom *= &-Custom;
+ _custom >>= -Custom;
+ _custom >>= &-Custom;
+ _custom <<= -Custom;
+ _custom <<= &-Custom;
// Binary
_n = _n + 1;
@@ -347,6 +371,10 @@ pub fn unknown_ops_or_runtime_ops_that_can_overflow() {
_custom = Custom + &Custom;
_custom = &Custom + Custom;
_custom = &Custom + &Custom;
+ _custom = _custom >> _custom;
+ _custom = _custom >> &_custom;
+ _custom = Custom << _custom;
+ _custom = &Custom << _custom;
// Unary
_n = -_n;
@@ -397,4 +425,8 @@ pub fn integer_arithmetic() {
i ^= i;
}
+pub fn issue_10583(a: u16) -> u16 {
+ 10 / a
+}
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/arithmetic_side_effects.stderr b/src/tools/clippy/tests/ui/arithmetic_side_effects.stderr
index 17a2448fb..2c8ee2884 100644
--- a/src/tools/clippy/tests/ui/arithmetic_side_effects.stderr
+++ b/src/tools/clippy/tests/ui/arithmetic_side_effects.stderr
@@ -1,5 +1,5 @@
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:270:5
+ --> $DIR/arithmetic_side_effects.rs:286:5
|
LL | _n += 1;
| ^^^^^^^
@@ -7,592 +7,652 @@ LL | _n += 1;
= note: `-D clippy::arithmetic-side-effects` implied by `-D warnings`
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:271:5
+ --> $DIR/arithmetic_side_effects.rs:287:5
|
LL | _n += &1;
| ^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:272:5
+ --> $DIR/arithmetic_side_effects.rs:288:5
|
LL | _n -= 1;
| ^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:273:5
+ --> $DIR/arithmetic_side_effects.rs:289:5
|
LL | _n -= &1;
| ^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:274:5
+ --> $DIR/arithmetic_side_effects.rs:290:5
|
LL | _n /= 0;
| ^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:275:5
+ --> $DIR/arithmetic_side_effects.rs:291:5
|
LL | _n /= &0;
| ^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:276:5
+ --> $DIR/arithmetic_side_effects.rs:292:5
|
LL | _n %= 0;
| ^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:277:5
+ --> $DIR/arithmetic_side_effects.rs:293:5
|
LL | _n %= &0;
| ^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:278:5
+ --> $DIR/arithmetic_side_effects.rs:294:5
|
LL | _n *= 2;
| ^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:279:5
+ --> $DIR/arithmetic_side_effects.rs:295:5
|
LL | _n *= &2;
| ^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:280:5
+ --> $DIR/arithmetic_side_effects.rs:296:5
|
LL | _n += -1;
| ^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:281:5
+ --> $DIR/arithmetic_side_effects.rs:297:5
|
LL | _n += &-1;
| ^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:282:5
+ --> $DIR/arithmetic_side_effects.rs:298:5
|
LL | _n -= -1;
| ^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:283:5
+ --> $DIR/arithmetic_side_effects.rs:299:5
|
LL | _n -= &-1;
| ^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:284:5
+ --> $DIR/arithmetic_side_effects.rs:300:5
|
LL | _n /= -0;
| ^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:285:5
+ --> $DIR/arithmetic_side_effects.rs:301:5
|
LL | _n /= &-0;
| ^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:286:5
+ --> $DIR/arithmetic_side_effects.rs:302:5
|
LL | _n %= -0;
| ^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:287:5
+ --> $DIR/arithmetic_side_effects.rs:303:5
|
LL | _n %= &-0;
| ^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:288:5
+ --> $DIR/arithmetic_side_effects.rs:304:5
|
LL | _n *= -2;
| ^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:289:5
+ --> $DIR/arithmetic_side_effects.rs:305:5
|
LL | _n *= &-2;
| ^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:290:5
+ --> $DIR/arithmetic_side_effects.rs:306:5
|
LL | _custom += Custom;
| ^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:291:5
+ --> $DIR/arithmetic_side_effects.rs:307:5
|
LL | _custom += &Custom;
| ^^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:292:5
+ --> $DIR/arithmetic_side_effects.rs:308:5
|
LL | _custom -= Custom;
| ^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:293:5
+ --> $DIR/arithmetic_side_effects.rs:309:5
|
LL | _custom -= &Custom;
| ^^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:294:5
+ --> $DIR/arithmetic_side_effects.rs:310:5
|
LL | _custom /= Custom;
| ^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:295:5
+ --> $DIR/arithmetic_side_effects.rs:311:5
|
LL | _custom /= &Custom;
| ^^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:296:5
+ --> $DIR/arithmetic_side_effects.rs:312:5
|
LL | _custom %= Custom;
| ^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:297:5
+ --> $DIR/arithmetic_side_effects.rs:313:5
|
LL | _custom %= &Custom;
| ^^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:298:5
+ --> $DIR/arithmetic_side_effects.rs:314:5
|
LL | _custom *= Custom;
| ^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:299:5
+ --> $DIR/arithmetic_side_effects.rs:315:5
|
LL | _custom *= &Custom;
| ^^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:300:5
+ --> $DIR/arithmetic_side_effects.rs:316:5
+ |
+LL | _custom >>= Custom;
+ | ^^^^^^^^^^^^^^^^^^
+
+error: arithmetic operation that can potentially result in unexpected side-effects
+ --> $DIR/arithmetic_side_effects.rs:317:5
+ |
+LL | _custom >>= &Custom;
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: arithmetic operation that can potentially result in unexpected side-effects
+ --> $DIR/arithmetic_side_effects.rs:318:5
+ |
+LL | _custom <<= Custom;
+ | ^^^^^^^^^^^^^^^^^^
+
+error: arithmetic operation that can potentially result in unexpected side-effects
+ --> $DIR/arithmetic_side_effects.rs:319:5
+ |
+LL | _custom <<= &Custom;
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: arithmetic operation that can potentially result in unexpected side-effects
+ --> $DIR/arithmetic_side_effects.rs:320:5
|
LL | _custom += -Custom;
| ^^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:301:5
+ --> $DIR/arithmetic_side_effects.rs:321:5
|
LL | _custom += &-Custom;
| ^^^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:302:5
+ --> $DIR/arithmetic_side_effects.rs:322:5
|
LL | _custom -= -Custom;
| ^^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:303:5
+ --> $DIR/arithmetic_side_effects.rs:323:5
|
LL | _custom -= &-Custom;
| ^^^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:304:5
+ --> $DIR/arithmetic_side_effects.rs:324:5
|
LL | _custom /= -Custom;
| ^^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:305:5
+ --> $DIR/arithmetic_side_effects.rs:325:5
|
LL | _custom /= &-Custom;
| ^^^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:306:5
+ --> $DIR/arithmetic_side_effects.rs:326:5
|
LL | _custom %= -Custom;
| ^^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:307:5
+ --> $DIR/arithmetic_side_effects.rs:327:5
|
LL | _custom %= &-Custom;
| ^^^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:308:5
+ --> $DIR/arithmetic_side_effects.rs:328:5
|
LL | _custom *= -Custom;
| ^^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:309:5
+ --> $DIR/arithmetic_side_effects.rs:329:5
|
LL | _custom *= &-Custom;
| ^^^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:312:10
+ --> $DIR/arithmetic_side_effects.rs:330:5
+ |
+LL | _custom >>= -Custom;
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: arithmetic operation that can potentially result in unexpected side-effects
+ --> $DIR/arithmetic_side_effects.rs:331:5
+ |
+LL | _custom >>= &-Custom;
+ | ^^^^^^^^^^^^^^^^^^^^
+
+error: arithmetic operation that can potentially result in unexpected side-effects
+ --> $DIR/arithmetic_side_effects.rs:332:5
+ |
+LL | _custom <<= -Custom;
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: arithmetic operation that can potentially result in unexpected side-effects
+ --> $DIR/arithmetic_side_effects.rs:333:5
+ |
+LL | _custom <<= &-Custom;
+ | ^^^^^^^^^^^^^^^^^^^^
+
+error: arithmetic operation that can potentially result in unexpected side-effects
+ --> $DIR/arithmetic_side_effects.rs:336:10
|
LL | _n = _n + 1;
| ^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:313:10
+ --> $DIR/arithmetic_side_effects.rs:337:10
|
LL | _n = _n + &1;
| ^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:314:10
+ --> $DIR/arithmetic_side_effects.rs:338:10
|
LL | _n = 1 + _n;
| ^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:315:10
+ --> $DIR/arithmetic_side_effects.rs:339:10
|
LL | _n = &1 + _n;
| ^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:316:10
+ --> $DIR/arithmetic_side_effects.rs:340:10
|
LL | _n = _n - 1;
| ^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:317:10
+ --> $DIR/arithmetic_side_effects.rs:341:10
|
LL | _n = _n - &1;
| ^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:318:10
+ --> $DIR/arithmetic_side_effects.rs:342:10
|
LL | _n = 1 - _n;
| ^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:319:10
+ --> $DIR/arithmetic_side_effects.rs:343:10
|
LL | _n = &1 - _n;
| ^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:320:10
+ --> $DIR/arithmetic_side_effects.rs:344:10
|
LL | _n = _n / 0;
| ^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:321:10
+ --> $DIR/arithmetic_side_effects.rs:345:10
|
LL | _n = _n / &0;
| ^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:322:10
+ --> $DIR/arithmetic_side_effects.rs:346:10
|
LL | _n = _n % 0;
| ^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:323:10
+ --> $DIR/arithmetic_side_effects.rs:347:10
|
LL | _n = _n % &0;
| ^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:324:10
+ --> $DIR/arithmetic_side_effects.rs:348:10
|
LL | _n = _n * 2;
| ^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:325:10
+ --> $DIR/arithmetic_side_effects.rs:349:10
|
LL | _n = _n * &2;
| ^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:326:10
+ --> $DIR/arithmetic_side_effects.rs:350:10
|
LL | _n = 2 * _n;
| ^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:327:10
+ --> $DIR/arithmetic_side_effects.rs:351:10
|
LL | _n = &2 * _n;
| ^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:328:10
+ --> $DIR/arithmetic_side_effects.rs:352:10
|
LL | _n = 23 + &85;
| ^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:329:10
+ --> $DIR/arithmetic_side_effects.rs:353:10
|
LL | _n = &23 + 85;
| ^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:330:10
+ --> $DIR/arithmetic_side_effects.rs:354:10
|
LL | _n = &23 + &85;
| ^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:331:15
+ --> $DIR/arithmetic_side_effects.rs:355:15
|
LL | _custom = _custom + _custom;
| ^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:332:15
+ --> $DIR/arithmetic_side_effects.rs:356:15
|
LL | _custom = _custom + &_custom;
| ^^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:333:15
+ --> $DIR/arithmetic_side_effects.rs:357:15
|
LL | _custom = Custom + _custom;
| ^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:334:15
+ --> $DIR/arithmetic_side_effects.rs:358:15
|
LL | _custom = &Custom + _custom;
| ^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:335:15
+ --> $DIR/arithmetic_side_effects.rs:359:15
|
LL | _custom = _custom - Custom;
| ^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:336:15
+ --> $DIR/arithmetic_side_effects.rs:360:15
|
LL | _custom = _custom - &Custom;
| ^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:337:15
+ --> $DIR/arithmetic_side_effects.rs:361:15
|
LL | _custom = Custom - _custom;
| ^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:338:15
+ --> $DIR/arithmetic_side_effects.rs:362:15
|
LL | _custom = &Custom - _custom;
| ^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:339:15
+ --> $DIR/arithmetic_side_effects.rs:363:15
|
LL | _custom = _custom / Custom;
| ^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:340:15
+ --> $DIR/arithmetic_side_effects.rs:364:15
|
LL | _custom = _custom / &Custom;
| ^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:341:15
+ --> $DIR/arithmetic_side_effects.rs:365:15
|
LL | _custom = _custom % Custom;
| ^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:342:15
+ --> $DIR/arithmetic_side_effects.rs:366:15
|
LL | _custom = _custom % &Custom;
| ^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:343:15
+ --> $DIR/arithmetic_side_effects.rs:367:15
|
LL | _custom = _custom * Custom;
| ^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:344:15
+ --> $DIR/arithmetic_side_effects.rs:368:15
|
LL | _custom = _custom * &Custom;
| ^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:345:15
+ --> $DIR/arithmetic_side_effects.rs:369:15
|
LL | _custom = Custom * _custom;
| ^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:346:15
+ --> $DIR/arithmetic_side_effects.rs:370:15
|
LL | _custom = &Custom * _custom;
| ^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:347:15
+ --> $DIR/arithmetic_side_effects.rs:371:15
|
LL | _custom = Custom + &Custom;
| ^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:348:15
+ --> $DIR/arithmetic_side_effects.rs:372:15
|
LL | _custom = &Custom + Custom;
| ^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:349:15
+ --> $DIR/arithmetic_side_effects.rs:373:15
|
LL | _custom = &Custom + &Custom;
| ^^^^^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:352:10
+ --> $DIR/arithmetic_side_effects.rs:374:15
+ |
+LL | _custom = _custom >> _custom;
+ | ^^^^^^^^^^^^^^^^^^
+
+error: arithmetic operation that can potentially result in unexpected side-effects
+ --> $DIR/arithmetic_side_effects.rs:375:15
+ |
+LL | _custom = _custom >> &_custom;
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: arithmetic operation that can potentially result in unexpected side-effects
+ --> $DIR/arithmetic_side_effects.rs:376:15
+ |
+LL | _custom = Custom << _custom;
+ | ^^^^^^^^^^^^^^^^^
+
+error: arithmetic operation that can potentially result in unexpected side-effects
+ --> $DIR/arithmetic_side_effects.rs:377:15
+ |
+LL | _custom = &Custom << _custom;
+ | ^^^^^^^^^^^^^^^^^^
+
+error: arithmetic operation that can potentially result in unexpected side-effects
+ --> $DIR/arithmetic_side_effects.rs:380:10
|
LL | _n = -_n;
| ^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:353:10
+ --> $DIR/arithmetic_side_effects.rs:381:10
|
LL | _n = -&_n;
| ^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:354:15
+ --> $DIR/arithmetic_side_effects.rs:382:15
|
LL | _custom = -_custom;
| ^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:355:15
+ --> $DIR/arithmetic_side_effects.rs:383:15
|
LL | _custom = -&_custom;
| ^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:364:5
+ --> $DIR/arithmetic_side_effects.rs:392:5
|
LL | 1 + i;
| ^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:365:5
+ --> $DIR/arithmetic_side_effects.rs:393:5
|
LL | i * 2;
| ^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:367:5
+ --> $DIR/arithmetic_side_effects.rs:394:5
+ |
+LL | 1 % i / 2;
+ | ^^^^^
+
+error: arithmetic operation that can potentially result in unexpected side-effects
+ --> $DIR/arithmetic_side_effects.rs:395:5
|
LL | i - 2 + 2 - i;
| ^^^^^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:368:5
+ --> $DIR/arithmetic_side_effects.rs:396:5
|
LL | -i;
| ^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:369:5
- |
-LL | i >> 1;
- | ^^^^^^
-
-error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:370:5
- |
-LL | i << 1;
- | ^^^^^^
-
-error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:379:5
+ --> $DIR/arithmetic_side_effects.rs:407:5
|
LL | i += 1;
| ^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:380:5
+ --> $DIR/arithmetic_side_effects.rs:408:5
|
LL | i -= 1;
| ^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:381:5
+ --> $DIR/arithmetic_side_effects.rs:409:5
|
LL | i *= 2;
| ^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:383:5
+ --> $DIR/arithmetic_side_effects.rs:411:5
|
LL | i /= 0;
| ^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:385:5
+ --> $DIR/arithmetic_side_effects.rs:413:5
|
LL | i /= var1;
| ^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:386:5
+ --> $DIR/arithmetic_side_effects.rs:414:5
|
LL | i /= var2;
| ^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:388:5
+ --> $DIR/arithmetic_side_effects.rs:416:5
|
LL | i %= 0;
| ^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:390:5
+ --> $DIR/arithmetic_side_effects.rs:418:5
|
LL | i %= var1;
| ^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:391:5
+ --> $DIR/arithmetic_side_effects.rs:419:5
|
LL | i %= var2;
| ^^^^^^^^^
error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:392:5
+ --> $DIR/arithmetic_side_effects.rs:429:5
|
-LL | i <<= 3;
- | ^^^^^^^
-
-error: arithmetic operation that can potentially result in unexpected side-effects
- --> $DIR/arithmetic_side_effects.rs:393:5
- |
-LL | i >>= 2;
- | ^^^^^^^
+LL | 10 / a
+ | ^^^^^^
-error: aborting due to 99 previous errors
+error: aborting due to 109 previous errors
diff --git a/src/tools/clippy/tests/ui/as_conversions.rs b/src/tools/clippy/tests/ui/as_conversions.rs
index ba4394def..c50d4088b 100644
--- a/src/tools/clippy/tests/ui/as_conversions.rs
+++ b/src/tools/clippy/tests/ui/as_conversions.rs
@@ -1,20 +1,15 @@
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![warn(clippy::as_conversions)]
#![allow(clippy::borrow_as_ptr)]
-#[macro_use]
-extern crate macro_rules;
-
-fn with_external_macro() {
- as_conv_with_arg!(0u32 as u64);
- as_conv!();
-}
+extern crate proc_macros;
+use proc_macros::external;
fn main() {
let i = 0u32 as u64;
let j = &i as *const u64 as *mut u64;
- with_external_macro();
+ external!(0u32 as u64);
}
diff --git a/src/tools/clippy/tests/ui/as_conversions.stderr b/src/tools/clippy/tests/ui/as_conversions.stderr
index f5d59e1e5..54037a649 100644
--- a/src/tools/clippy/tests/ui/as_conversions.stderr
+++ b/src/tools/clippy/tests/ui/as_conversions.stderr
@@ -1,5 +1,5 @@
error: using a potentially dangerous silent `as` conversion
- --> $DIR/as_conversions.rs:15:13
+ --> $DIR/as_conversions.rs:10:13
|
LL | let i = 0u32 as u64;
| ^^^^^^^^^^^
@@ -8,7 +8,7 @@ LL | let i = 0u32 as u64;
= note: `-D clippy::as-conversions` implied by `-D warnings`
error: using a potentially dangerous silent `as` conversion
- --> $DIR/as_conversions.rs:17:13
+ --> $DIR/as_conversions.rs:12:13
|
LL | let j = &i as *const u64 as *mut u64;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -16,7 +16,7 @@ LL | let j = &i as *const u64 as *mut u64;
= help: consider using a safe wrapper for this conversion
error: using a potentially dangerous silent `as` conversion
- --> $DIR/as_conversions.rs:17:13
+ --> $DIR/as_conversions.rs:12:13
|
LL | let j = &i as *const u64 as *mut u64;
| ^^^^^^^^^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/async_yields_async.fixed b/src/tools/clippy/tests/ui/async_yields_async.fixed
index 3cf380d2b..579a63ea4 100644
--- a/src/tools/clippy/tests/ui/async_yields_async.fixed
+++ b/src/tools/clippy/tests/ui/async_yields_async.fixed
@@ -2,6 +2,7 @@
#![feature(lint_reasons)]
#![feature(async_closure)]
#![warn(clippy::async_yields_async)]
+#![allow(clippy::redundant_async_block)]
use core::future::Future;
use core::pin::Pin;
diff --git a/src/tools/clippy/tests/ui/async_yields_async.rs b/src/tools/clippy/tests/ui/async_yields_async.rs
index dd4131b60..5aec2fb50 100644
--- a/src/tools/clippy/tests/ui/async_yields_async.rs
+++ b/src/tools/clippy/tests/ui/async_yields_async.rs
@@ -2,6 +2,7 @@
#![feature(lint_reasons)]
#![feature(async_closure)]
#![warn(clippy::async_yields_async)]
+#![allow(clippy::redundant_async_block)]
use core::future::Future;
use core::pin::Pin;
diff --git a/src/tools/clippy/tests/ui/async_yields_async.stderr b/src/tools/clippy/tests/ui/async_yields_async.stderr
index 22ce1c6f6..7f7253483 100644
--- a/src/tools/clippy/tests/ui/async_yields_async.stderr
+++ b/src/tools/clippy/tests/ui/async_yields_async.stderr
@@ -1,5 +1,5 @@
error: an async construct yields a type which is itself awaitable
- --> $DIR/async_yields_async.rs:39:9
+ --> $DIR/async_yields_async.rs:40:9
|
LL | let _h = async {
| _____________________-
@@ -19,7 +19,7 @@ LL + }.await
|
error: an async construct yields a type which is itself awaitable
- --> $DIR/async_yields_async.rs:44:9
+ --> $DIR/async_yields_async.rs:45:9
|
LL | let _i = async {
| ____________________-
@@ -32,7 +32,7 @@ LL | | };
| |_____- outer async construct
error: an async construct yields a type which is itself awaitable
- --> $DIR/async_yields_async.rs:50:9
+ --> $DIR/async_yields_async.rs:51:9
|
LL | let _j = async || {
| ________________________-
@@ -51,7 +51,7 @@ LL + }.await
|
error: an async construct yields a type which is itself awaitable
- --> $DIR/async_yields_async.rs:55:9
+ --> $DIR/async_yields_async.rs:56:9
|
LL | let _k = async || {
| _______________________-
@@ -64,7 +64,7 @@ LL | | };
| |_____- outer async construct
error: an async construct yields a type which is itself awaitable
- --> $DIR/async_yields_async.rs:57:23
+ --> $DIR/async_yields_async.rs:58:23
|
LL | let _l = async || CustomFutureType;
| ^^^^^^^^^^^^^^^^
@@ -74,7 +74,7 @@ LL | let _l = async || CustomFutureType;
| help: consider awaiting this value: `CustomFutureType.await`
error: an async construct yields a type which is itself awaitable
- --> $DIR/async_yields_async.rs:63:9
+ --> $DIR/async_yields_async.rs:64:9
|
LL | let _m = async || {
| _______________________-
diff --git a/src/tools/clippy/tests/ui/author/blocks.stdout b/src/tools/clippy/tests/ui/author/blocks.stdout
index c6acf24c2..eb3e5189c 100644
--- a/src/tools/clippy/tests/ui/author/blocks.stdout
+++ b/src/tools/clippy/tests/ui/author/blocks.stdout
@@ -43,11 +43,7 @@ if let ExprKind::Block(block, None) = expr.kind
if let ExprKind::Closure(CaptureBy::Value, fn_decl, body_id, _, None) = expr.kind
&& let FnRetTy::DefaultReturn(_) = fn_decl.output
&& expr1 = &cx.tcx.hir().body(body_id).value
- && let ExprKind::Call(func, args) = expr1.kind
- && let ExprKind::Path(ref qpath) = func.kind
- && matches!(qpath, QPath::LangItem(LangItem::IdentityFuture, _))
- && args.len() == 1
- && let ExprKind::Closure(CaptureBy::Value, fn_decl1, body_id1, _, Some(Movability::Static)) = args[0].kind
+ && let ExprKind::Closure(CaptureBy::Value, fn_decl1, body_id1, _, Some(Movability::Static)) = expr1.kind
&& let FnRetTy::DefaultReturn(_) = fn_decl1.output
&& expr2 = &cx.tcx.hir().body(body_id1).value
&& let ExprKind::Block(block, None) = expr2.kind
diff --git a/src/tools/clippy/tests/ui/auxiliary/doc_unsafe_macros.rs b/src/tools/clippy/tests/ui/auxiliary/doc_unsafe_macros.rs
deleted file mode 100644
index 3d917e3dc..000000000
--- a/src/tools/clippy/tests/ui/auxiliary/doc_unsafe_macros.rs
+++ /dev/null
@@ -1,16 +0,0 @@
-#[macro_export]
-macro_rules! undocd_unsafe {
- () => {
- pub unsafe fn oy_vey() {
- unimplemented!();
- }
- };
-}
-#[macro_export]
-macro_rules! undocd_safe {
- () => {
- pub fn vey_oy() {
- unimplemented!();
- }
- };
-}
diff --git a/src/tools/clippy/tests/ui/auxiliary/implicit_hasher_macros.rs b/src/tools/clippy/tests/ui/auxiliary/implicit_hasher_macros.rs
deleted file mode 100644
index 1eb77c531..000000000
--- a/src/tools/clippy/tests/ui/auxiliary/implicit_hasher_macros.rs
+++ /dev/null
@@ -1,6 +0,0 @@
-#[macro_export]
-macro_rules! implicit_hasher_fn {
- () => {
- pub fn f(input: &HashMap<u32, u32>) {}
- };
-}
diff --git a/src/tools/clippy/tests/ui/auxiliary/macro_rules.rs b/src/tools/clippy/tests/ui/auxiliary/macro_rules.rs
index a13af5652..a9bb61451 100644
--- a/src/tools/clippy/tests/ui/auxiliary/macro_rules.rs
+++ b/src/tools/clippy/tests/ui/auxiliary/macro_rules.rs
@@ -3,21 +3,6 @@
//! Used to test that certain lints don't trigger in imported external macros
#[macro_export]
-macro_rules! foofoo {
- () => {
- loop {}
- };
-}
-
-#[macro_export]
-macro_rules! must_use_unit {
- () => {
- #[must_use]
- fn foo() {}
- };
-}
-
-#[macro_export]
macro_rules! try_err {
() => {
pub fn try_err_fn() -> Result<i32, i32> {
@@ -37,84 +22,6 @@ macro_rules! string_add {
}
#[macro_export]
-macro_rules! take_external {
- ($s:expr) => {
- std::mem::replace($s, Default::default())
- };
-}
-
-#[macro_export]
-macro_rules! option_env_unwrap_external {
- ($env: expr) => {
- option_env!($env).unwrap()
- };
- ($env: expr, $message: expr) => {
- option_env!($env).expect($message)
- };
-}
-
-#[macro_export]
-macro_rules! ref_arg_binding {
- () => {
- let ref _y = 42;
- };
-}
-
-#[macro_export]
-macro_rules! ref_arg_function {
- () => {
- fn fun_example(ref _x: usize) {}
- };
-}
-
-#[macro_export]
-macro_rules! as_conv_with_arg {
- (0u32 as u64) => {
- ()
- };
-}
-
-#[macro_export]
-macro_rules! as_conv {
- () => {
- 0u32 as u64
- };
-}
-
-#[macro_export]
-macro_rules! large_enum_variant {
- () => {
- enum LargeEnumInMacro {
- A(i32),
- B([i32; 8000]),
- }
- };
-}
-
-#[macro_export]
-macro_rules! field_reassign_with_default {
- () => {
- #[derive(Default)]
- struct A {
- pub i: i32,
- pub j: i64,
- }
- fn lint() {
- let mut a: A = Default::default();
- a.i = 42;
- a;
- }
- };
-}
-
-#[macro_export]
-macro_rules! default_numeric_fallback {
- () => {
- let x = 22;
- };
-}
-
-#[macro_export]
macro_rules! mut_mut {
() => {
let mut_mut_ty: &mut &mut u32 = &mut &mut 1u32;
@@ -122,49 +29,11 @@ macro_rules! mut_mut {
}
#[macro_export]
-macro_rules! ptr_as_ptr_cast {
- ($ptr: ident) => {
- $ptr as *const i32
- };
-}
-
-#[macro_export]
-macro_rules! manual_rem_euclid {
+macro_rules! issue_10421 {
() => {
- let value: i32 = 5;
- let _: i32 = ((value % 4) + 4) % 4;
- };
-}
-
-#[macro_export]
-macro_rules! equatable_if_let {
- ($a:ident) => {{ if let 2 = $a {} }};
-}
-
-#[macro_export]
-macro_rules! almost_complete_range {
- () => {
- let _ = 'a'..'z';
- let _ = 'A'..'Z';
- let _ = '0'..'9';
- };
-}
-
-#[macro_export]
-macro_rules! unsafe_macro {
- () => {
- unsafe {
- *core::ptr::null::<()>();
- *core::ptr::null::<()>();
- }
- };
-}
-
-#[macro_export]
-macro_rules! needless_lifetime {
- () => {
- fn needless_lifetime<'a>(x: &'a u8) -> &'a u8 {
- unimplemented!()
- }
+ let mut a = 1;
+ let mut b = 2;
+ a = b;
+ b = a;
};
}
diff --git a/src/tools/clippy/tests/ui/auxiliary/macro_use_helper.rs b/src/tools/clippy/tests/ui/auxiliary/macro_use_helper.rs
index ecb55d8cb..7ed8a28db 100644
--- a/src/tools/clippy/tests/ui/auxiliary/macro_use_helper.rs
+++ b/src/tools/clippy/tests/ui/auxiliary/macro_use_helper.rs
@@ -13,7 +13,7 @@ pub mod inner {
// RE-EXPORT
// this will stick in `inner` module
- pub use macro_rules::foofoo;
+ pub use macro_rules::mut_mut;
pub use macro_rules::try_err;
pub mod nested {
diff --git a/src/tools/clippy/tests/ui/auxiliary/proc_macro_with_span.rs b/src/tools/clippy/tests/ui/auxiliary/proc_macro_with_span.rs
deleted file mode 100644
index 8ea631f2b..000000000
--- a/src/tools/clippy/tests/ui/auxiliary/proc_macro_with_span.rs
+++ /dev/null
@@ -1,32 +0,0 @@
-// compile-flags: --emit=link
-// no-prefer-dynamic
-
-#![crate_type = "proc-macro"]
-
-extern crate proc_macro;
-
-use proc_macro::{token_stream::IntoIter, Group, Span, TokenStream, TokenTree};
-
-#[proc_macro]
-pub fn with_span(input: TokenStream) -> TokenStream {
- let mut iter = input.into_iter();
- let span = iter.next().unwrap().span();
- let mut res = TokenStream::new();
- write_with_span(span, iter, &mut res);
- res
-}
-
-fn write_with_span(s: Span, input: IntoIter, out: &mut TokenStream) {
- for mut tt in input {
- if let TokenTree::Group(g) = tt {
- let mut stream = TokenStream::new();
- write_with_span(s, g.stream().into_iter(), &mut stream);
- let mut group = Group::new(g.delimiter(), stream);
- group.set_span(s);
- out.extend([TokenTree::Group(group)]);
- } else {
- tt.set_span(s);
- out.extend([tt]);
- }
- }
-}
diff --git a/src/tools/clippy/tests/ui/auxiliary/proc_macros.rs b/src/tools/clippy/tests/ui/auxiliary/proc_macros.rs
new file mode 100644
index 000000000..3d5beab1e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/auxiliary/proc_macros.rs
@@ -0,0 +1,474 @@
+// compile-flags: --emit=link
+// no-prefer-dynamic
+
+#![crate_type = "proc-macro"]
+#![feature(let_chains)]
+#![feature(proc_macro_span)]
+#![allow(dead_code)]
+
+extern crate proc_macro;
+
+use core::mem;
+use proc_macro::{
+ token_stream::IntoIter,
+ Delimiter::{self, Brace, Parenthesis},
+ Group, Ident, Literal, Punct,
+ Spacing::{self, Alone, Joint},
+ Span, TokenStream, TokenTree as TT,
+};
+
+type Result<T> = core::result::Result<T, TokenStream>;
+
+/// Make a `compile_error!` pointing to the given span.
+fn make_error(msg: &str, span: Span) -> TokenStream {
+ TokenStream::from_iter([
+ TT::Ident(Ident::new("compile_error", span)),
+ TT::Punct(punct_with_span('!', Alone, span)),
+ TT::Group({
+ let mut msg = Literal::string(msg);
+ msg.set_span(span);
+ group_with_span(Parenthesis, TokenStream::from_iter([TT::Literal(msg)]), span)
+ }),
+ ])
+}
+
+fn expect_tt<T>(tt: Option<TT>, f: impl FnOnce(TT) -> Option<T>, expected: &str, span: Span) -> Result<T> {
+ match tt {
+ None => Err(make_error(
+ &format!("unexpected end of input, expected {expected}"),
+ span,
+ )),
+ Some(tt) => {
+ let span = tt.span();
+ match f(tt) {
+ Some(x) => Ok(x),
+ None => Err(make_error(&format!("unexpected token, expected {expected}"), span)),
+ }
+ },
+ }
+}
+
+fn punct_with_span(c: char, spacing: Spacing, span: Span) -> Punct {
+ let mut p = Punct::new(c, spacing);
+ p.set_span(span);
+ p
+}
+
+fn group_with_span(delimiter: Delimiter, stream: TokenStream, span: Span) -> Group {
+ let mut g = Group::new(delimiter, stream);
+ g.set_span(span);
+ g
+}
+
+/// Token used to escape the following token from the macro's span rules.
+const ESCAPE_CHAR: char = '$';
+
+/// Takes a single token followed by a sequence of tokens. Returns the sequence of tokens with their
+/// span set to that of the first token. Tokens may be escaped with either `#ident` or `#(tokens)`.
+#[proc_macro]
+pub fn with_span(input: TokenStream) -> TokenStream {
+ let mut iter = input.into_iter();
+ let span = iter.next().unwrap().span();
+ let mut res = TokenStream::new();
+ if let Err(e) = write_with_span(span, iter, &mut res) {
+ e
+ } else {
+ res
+ }
+}
+
+/// Takes a sequence of tokens and return the tokens with the span set such that they appear to be
+/// from an external macro. Tokens may be escaped with either `#ident` or `#(tokens)`.
+#[proc_macro]
+pub fn external(input: TokenStream) -> TokenStream {
+ let mut res = TokenStream::new();
+ if let Err(e) = write_with_span(Span::mixed_site(), input.into_iter(), &mut res) {
+ e
+ } else {
+ res
+ }
+}
+
+/// Copies all the tokens, replacing all their spans with the given span. Tokens can be escaped
+/// either by `#ident` or `#(tokens)`.
+fn write_with_span(s: Span, mut input: IntoIter, out: &mut TokenStream) -> Result<()> {
+ while let Some(tt) = input.next() {
+ match tt {
+ TT::Punct(p) if p.as_char() == ESCAPE_CHAR => {
+ expect_tt(
+ input.next(),
+ |tt| match tt {
+ tt @ (TT::Ident(_) | TT::Literal(_)) => {
+ out.extend([tt]);
+ Some(())
+ },
+ TT::Punct(mut p) if p.as_char() == ESCAPE_CHAR => {
+ p.set_span(s);
+ out.extend([TT::Punct(p)]);
+ Some(())
+ },
+ TT::Group(g) if g.delimiter() == Parenthesis => {
+ out.extend([TT::Group(group_with_span(Delimiter::None, g.stream(), g.span()))]);
+ Some(())
+ },
+ _ => None,
+ },
+ "an ident, a literal, or parenthesized tokens",
+ p.span(),
+ )?;
+ },
+ TT::Group(g) => {
+ let mut stream = TokenStream::new();
+ write_with_span(s, g.stream().into_iter(), &mut stream)?;
+ out.extend([TT::Group(group_with_span(g.delimiter(), stream, s))]);
+ },
+ mut tt => {
+ tt.set_span(s);
+ out.extend([tt]);
+ },
+ }
+ }
+ Ok(())
+}
+
+/// Within the item this attribute is attached to, an `inline!` macro is available which expands the
+/// contained tokens as though they came from a macro expansion.
+///
+/// Within the `inline!` macro, any token preceded by `$` is passed as though it were an argument
+/// with an automatically chosen fragment specifier. `$ident` will be passed as `ident`, `$1` or
+/// `$"literal"` will be passed as `literal`, `$'lt` will be passed as `lifetime`, and `$(...)` will
+/// pass the contained tokens as a `tt` sequence (the wrapping parenthesis are removed). If another
+/// specifier is required it can be specified within parenthesis like `$(@expr ...)`. This will
+/// expand the remaining tokens as a single argument.
+///
+/// Multiple `inline!` macros may be nested within each other. This will expand as nested macro
+/// calls. However, any arguments will be passed as though they came from the outermost context.
+#[proc_macro_attribute]
+pub fn inline_macros(args: TokenStream, input: TokenStream) -> TokenStream {
+ let mut args = args.into_iter();
+ let mac_name = match args.next() {
+ Some(TT::Ident(name)) => Some(name),
+ Some(tt) => {
+ return make_error(
+ "unexpected argument, expected either an ident or no arguments",
+ tt.span(),
+ );
+ },
+ None => None,
+ };
+ if let Some(tt) = args.next() {
+ return make_error(
+ "unexpected argument, expected either an ident or no arguments",
+ tt.span(),
+ );
+ };
+
+ let mac_name = if let Some(mac_name) = mac_name {
+ Ident::new(&format!("__inline_mac_{mac_name}"), Span::call_site())
+ } else {
+ let mut input = match LookaheadIter::new(input.clone().into_iter()) {
+ Some(x) => x,
+ None => return input,
+ };
+ loop {
+ match input.next() {
+ None => break Ident::new("__inline_mac", Span::call_site()),
+ Some(TT::Ident(kind)) => match &*kind.to_string() {
+ "impl" => break Ident::new("__inline_mac_impl", Span::call_site()),
+ kind @ ("struct" | "enum" | "union" | "fn" | "mod" | "trait" | "type" | "const" | "static") => {
+ if let TT::Ident(name) = &input.tt {
+ break Ident::new(&format!("__inline_mac_{kind}_{name}"), Span::call_site());
+ } else {
+ break Ident::new(&format!("__inline_mac_{kind}"), Span::call_site());
+ }
+ },
+ _ => {},
+ },
+ _ => {},
+ }
+ }
+ };
+
+ let mut expander = Expander::default();
+ let mut mac = MacWriter::new(mac_name);
+ if let Err(e) = expander.expand(input.into_iter(), &mut mac) {
+ return e;
+ }
+ let mut out = TokenStream::new();
+ mac.finish(&mut out);
+ out.extend(expander.expn);
+ out
+}
+
+/// Wraps a `TokenStream` iterator with a single token lookahead.
+struct LookaheadIter {
+ tt: TT,
+ iter: IntoIter,
+}
+impl LookaheadIter {
+ fn new(mut iter: IntoIter) -> Option<Self> {
+ iter.next().map(|tt| Self { tt, iter })
+ }
+
+ /// Get's the lookahead token, replacing it with the next token in the stream.
+ /// Note: If there isn't a next token, this will not return the lookahead token.
+ fn next(&mut self) -> Option<TT> {
+ self.iter.next().map(|tt| mem::replace(&mut self.tt, tt))
+ }
+}
+
+/// Builds the macro used to implement all the `inline!` macro calls.
+struct MacWriter {
+ name: Ident,
+ macros: TokenStream,
+ next_idx: usize,
+}
+impl MacWriter {
+ fn new(name: Ident) -> Self {
+ Self {
+ name,
+ macros: TokenStream::new(),
+ next_idx: 0,
+ }
+ }
+
+ /// Inserts a new `inline!` call.
+ fn insert(&mut self, name_span: Span, bang_span: Span, body: Group, expander: &mut Expander) -> Result<()> {
+ let idx = self.next_idx;
+ self.next_idx += 1;
+
+ let mut inner = Expander::for_arm(idx);
+ inner.expand(body.stream().into_iter(), self)?;
+ let new_arm = inner.arm.unwrap();
+
+ self.macros.extend([
+ TT::Group(Group::new(Parenthesis, new_arm.args_def)),
+ TT::Punct(Punct::new('=', Joint)),
+ TT::Punct(Punct::new('>', Alone)),
+ TT::Group(Group::new(Parenthesis, inner.expn)),
+ TT::Punct(Punct::new(';', Alone)),
+ ]);
+
+ expander.expn.extend([
+ TT::Ident({
+ let mut name = self.name.clone();
+ name.set_span(name_span);
+ name
+ }),
+ TT::Punct(punct_with_span('!', Alone, bang_span)),
+ ]);
+ let mut call_body = TokenStream::from_iter([TT::Literal(Literal::usize_unsuffixed(idx))]);
+ if let Some(arm) = expander.arm.as_mut() {
+ if !new_arm.args.is_empty() {
+ arm.add_sub_args(new_arm.args, &mut call_body);
+ }
+ } else {
+ call_body.extend(new_arm.args);
+ }
+ let mut g = Group::new(body.delimiter(), call_body);
+ g.set_span(body.span());
+ expander.expn.extend([TT::Group(g)]);
+ Ok(())
+ }
+
+ /// Creates the macro definition.
+ fn finish(self, out: &mut TokenStream) {
+ if self.next_idx != 0 {
+ out.extend([
+ TT::Ident(Ident::new("macro_rules", Span::call_site())),
+ TT::Punct(Punct::new('!', Alone)),
+ TT::Ident(self.name),
+ TT::Group(Group::new(Brace, self.macros)),
+ ])
+ }
+ }
+}
+
+struct MacroArm {
+ args_def: TokenStream,
+ args: Vec<TT>,
+}
+impl MacroArm {
+ fn add_single_arg_def(&mut self, kind: &str, dollar_span: Span, arg_span: Span, out: &mut TokenStream) {
+ let mut name = Ident::new(&format!("_{}", self.args.len()), Span::call_site());
+ self.args_def.extend([
+ TT::Punct(Punct::new('$', Alone)),
+ TT::Ident(name.clone()),
+ TT::Punct(Punct::new(':', Alone)),
+ TT::Ident(Ident::new(kind, Span::call_site())),
+ ]);
+ name.set_span(arg_span);
+ out.extend([TT::Punct(punct_with_span('$', Alone, dollar_span)), TT::Ident(name)]);
+ }
+
+ fn add_parenthesized_arg_def(&mut self, kind: Ident, dollar_span: Span, arg_span: Span, out: &mut TokenStream) {
+ let mut name = Ident::new(&format!("_{}", self.args.len()), Span::call_site());
+ self.args_def.extend([TT::Group(Group::new(
+ Parenthesis,
+ TokenStream::from_iter([
+ TT::Punct(Punct::new('$', Alone)),
+ TT::Ident(name.clone()),
+ TT::Punct(Punct::new(':', Alone)),
+ TT::Ident(kind),
+ ]),
+ ))]);
+ name.set_span(arg_span);
+ out.extend([TT::Punct(punct_with_span('$', Alone, dollar_span)), TT::Ident(name)]);
+ }
+
+ fn add_multi_arg_def(&mut self, dollar_span: Span, arg_span: Span, out: &mut TokenStream) {
+ let mut name = Ident::new(&format!("_{}", self.args.len()), Span::call_site());
+ self.args_def.extend([TT::Group(Group::new(
+ Parenthesis,
+ TokenStream::from_iter([
+ TT::Punct(Punct::new('$', Alone)),
+ TT::Group(Group::new(
+ Parenthesis,
+ TokenStream::from_iter([
+ TT::Punct(Punct::new('$', Alone)),
+ TT::Ident(name.clone()),
+ TT::Punct(Punct::new(':', Alone)),
+ TT::Ident(Ident::new("tt", Span::call_site())),
+ ]),
+ )),
+ TT::Punct(Punct::new('*', Alone)),
+ ]),
+ ))]);
+ name.set_span(arg_span);
+ out.extend([
+ TT::Punct(punct_with_span('$', Alone, dollar_span)),
+ TT::Group(group_with_span(
+ Parenthesis,
+ TokenStream::from_iter([TT::Punct(punct_with_span('$', Alone, dollar_span)), TT::Ident(name)]),
+ dollar_span,
+ )),
+ TT::Punct(punct_with_span('*', Alone, dollar_span)),
+ ]);
+ }
+
+ fn add_arg(&mut self, dollar_span: Span, tt: TT, input: &mut IntoIter, out: &mut TokenStream) -> Result<()> {
+ match tt {
+ TT::Punct(p) if p.as_char() == ESCAPE_CHAR => out.extend([TT::Punct(p)]),
+ TT::Punct(p) if p.as_char() == '\'' && p.spacing() == Joint => {
+ let lt_name = expect_tt(
+ input.next(),
+ |tt| match tt {
+ TT::Ident(x) => Some(x),
+ _ => None,
+ },
+ "lifetime name",
+ p.span(),
+ )?;
+ let arg_span = p.span().join(lt_name.span()).unwrap_or(p.span());
+ self.add_single_arg_def("lifetime", dollar_span, arg_span, out);
+ self.args.extend([TT::Punct(p), TT::Ident(lt_name)]);
+ },
+ TT::Ident(x) => {
+ self.add_single_arg_def("ident", dollar_span, x.span(), out);
+ self.args.push(TT::Ident(x));
+ },
+ TT::Literal(x) => {
+ self.add_single_arg_def("literal", dollar_span, x.span(), out);
+ self.args.push(TT::Literal(x));
+ },
+ TT::Group(g) if g.delimiter() == Parenthesis => {
+ let mut inner = g.stream().into_iter();
+ if let Some(TT::Punct(p)) = inner.next()
+ && p.as_char() == '@'
+ {
+ let kind = expect_tt(
+ inner.next(),
+ |tt| match tt {
+ TT::Ident(kind) => Some(kind),
+ _ => None,
+ },
+ "a macro fragment specifier",
+ p.span(),
+ )?;
+ self.add_parenthesized_arg_def(kind, dollar_span, g.span(), out);
+ self.args.push(TT::Group(group_with_span(Parenthesis, inner.collect(), g.span())))
+ } else {
+ self.add_multi_arg_def(dollar_span, g.span(), out);
+ self.args.push(TT::Group(g));
+ }
+ },
+ tt => return Err(make_error("unsupported escape", tt.span())),
+ };
+ Ok(())
+ }
+
+ fn add_sub_args(&mut self, args: Vec<TT>, out: &mut TokenStream) {
+ self.add_multi_arg_def(Span::call_site(), Span::call_site(), out);
+ self.args
+ .extend([TT::Group(Group::new(Parenthesis, TokenStream::from_iter(args)))]);
+ }
+}
+
+#[derive(Default)]
+struct Expander {
+ arm: Option<MacroArm>,
+ expn: TokenStream,
+}
+impl Expander {
+ fn for_arm(idx: usize) -> Self {
+ Self {
+ arm: Some(MacroArm {
+ args_def: TokenStream::from_iter([TT::Literal(Literal::usize_unsuffixed(idx))]),
+ args: Vec::new(),
+ }),
+ expn: TokenStream::new(),
+ }
+ }
+
+ fn write_tt(&mut self, tt: TT, mac: &mut MacWriter) -> Result<()> {
+ match tt {
+ TT::Group(g) => {
+ let outer = mem::take(&mut self.expn);
+ self.expand(g.stream().into_iter(), mac)?;
+ let inner = mem::replace(&mut self.expn, outer);
+ self.expn
+ .extend([TT::Group(group_with_span(g.delimiter(), inner, g.span()))]);
+ },
+ tt => self.expn.extend([tt]),
+ }
+ Ok(())
+ }
+
+ fn expand(&mut self, input: IntoIter, mac: &mut MacWriter) -> Result<()> {
+ let Some(mut input) = LookaheadIter::new(input) else {
+ return Ok(());
+ };
+ while let Some(tt) = input.next() {
+ if let TT::Punct(p) = &tt
+ && p.as_char() == ESCAPE_CHAR
+ && let Some(arm) = self.arm.as_mut()
+ {
+ arm.add_arg(p.span(), mem::replace(&mut input.tt, tt), &mut input.iter, &mut self.expn)?;
+ if input.next().is_none() {
+ return Ok(());
+ }
+ } else if let TT::Punct(p) = &input.tt
+ && p.as_char() == '!'
+ && let TT::Ident(name) = &tt
+ && name.to_string() == "inline"
+ {
+ let g = expect_tt(
+ input.iter.next(),
+ |tt| match tt {
+ TT::Group(g) => Some(g),
+ _ => None,
+ },
+ "macro arguments",
+ p.span(),
+ )?;
+ mac.insert(name.span(), p.span(), g, self)?;
+ if input.next().is_none() {
+ return Ok(());
+ }
+ } else {
+ self.write_tt(tt, mac)?;
+ }
+ }
+ self.write_tt(input.tt, mac)
+ }
+}
diff --git a/src/tools/clippy/tests/ui/borrow_interior_mutable_const/auxiliary/helper.rs b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/auxiliary/helper.rs
index f13733af3..b03c21262 100644
--- a/src/tools/clippy/tests/ui/borrow_interior_mutable_const/auxiliary/helper.rs
+++ b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/auxiliary/helper.rs
@@ -1,5 +1,5 @@
// this file solely exists to test constants defined in foreign crates.
-// As the most common case is the `http` crate, it replicates `http::HeadewrName`'s structure.
+// As the most common case is the `http` crate, it replicates `http::HeaderName`'s structure.
#![allow(clippy::declare_interior_mutable_const)]
#![allow(unused_tuple_struct_fields)]
diff --git a/src/tools/clippy/tests/ui/boxed_local.rs b/src/tools/clippy/tests/ui/boxed_local.rs
index 4639f00a8..79b6d33fc 100644
--- a/src/tools/clippy/tests/ui/boxed_local.rs
+++ b/src/tools/clippy/tests/ui/boxed_local.rs
@@ -1,4 +1,3 @@
-#![feature(box_syntax)]
#![feature(lint_reasons)]
#![allow(
clippy::borrowed_box,
@@ -34,7 +33,7 @@ fn ok_box_trait(boxed_trait: &Box<dyn Z>) {
}
fn warn_call() {
- let x = box A;
+ let x = Box::new(A);
x.foo();
}
@@ -43,41 +42,41 @@ fn warn_arg(x: Box<A>) {
}
fn nowarn_closure_arg() {
- let x = Some(box A);
+ let x = Some(Box::new(A));
x.map_or((), |x| take_ref(&x));
}
fn warn_rename_call() {
- let x = box A;
+ let x = Box::new(A);
let y = x;
y.foo(); // via autoderef
}
fn warn_notuse() {
- let bz = box A;
+ let bz = Box::new(A);
}
fn warn_pass() {
- let bz = box A;
+ let bz = Box::new(A);
take_ref(&bz); // via deref coercion
}
fn nowarn_return() -> Box<A> {
- box A // moved out, "escapes"
+ Box::new(A) // moved out, "escapes"
}
fn nowarn_move() {
- let bx = box A;
+ let bx = Box::new(A);
drop(bx) // moved in, "escapes"
}
fn nowarn_call() {
- let bx = box A;
+ let bx = Box::new(A);
bx.clone(); // method only available to Box, not via autoderef
}
fn nowarn_pass() {
- let bx = box A;
+ let bx = Box::new(A);
take_box(&bx); // fn needs &Box
}
@@ -86,30 +85,20 @@ fn take_ref(x: &A) {}
fn nowarn_ref_take() {
// false positive, should actually warn
- let x = box A;
+ let x = Box::new(A);
let y = &x;
take_box(y);
}
fn nowarn_match() {
- let x = box A; // moved into a match
+ let x = Box::new(A); // moved into a match
match x {
y => drop(y),
}
}
fn warn_match() {
- let x = box A;
- match &x {
- // not moved
- y => (),
- }
-}
-
-fn nowarn_large_array() {
- // should not warn, is large array
- // and should not be on stack
- let x = box [1; 10000];
+ let x = Box::new(A);
match &x {
// not moved
y => (),
diff --git a/src/tools/clippy/tests/ui/boxed_local.stderr b/src/tools/clippy/tests/ui/boxed_local.stderr
index 9036529f3..10d78fbc0 100644
--- a/src/tools/clippy/tests/ui/boxed_local.stderr
+++ b/src/tools/clippy/tests/ui/boxed_local.stderr
@@ -1,5 +1,5 @@
error: local variable doesn't need to be boxed here
- --> $DIR/boxed_local.rs:41:13
+ --> $DIR/boxed_local.rs:40:13
|
LL | fn warn_arg(x: Box<A>) {
| ^
@@ -7,19 +7,19 @@ LL | fn warn_arg(x: Box<A>) {
= note: `-D clippy::boxed-local` implied by `-D warnings`
error: local variable doesn't need to be boxed here
- --> $DIR/boxed_local.rs:132:12
+ --> $DIR/boxed_local.rs:121:12
|
LL | pub fn new(_needs_name: Box<PeekableSeekable<&()>>) -> () {}
| ^^^^^^^^^^^
error: local variable doesn't need to be boxed here
- --> $DIR/boxed_local.rs:196:44
+ --> $DIR/boxed_local.rs:185:44
|
LL | fn default_impl_x(self: Box<Self>, x: Box<u32>) -> u32 {
| ^
error: local variable doesn't need to be boxed here
- --> $DIR/boxed_local.rs:203:16
+ --> $DIR/boxed_local.rs:192:16
|
LL | fn foo(x: Box<u32>) {}
| ^
diff --git a/src/tools/clippy/tests/ui/cast.rs b/src/tools/clippy/tests/ui/cast.rs
index 8b2673c2a..a86b85706 100644
--- a/src/tools/clippy/tests/ui/cast.rs
+++ b/src/tools/clippy/tests/ui/cast.rs
@@ -29,6 +29,12 @@ fn main() {
1f64 as isize;
1f64 as usize;
1f32 as u32 as u16;
+ {
+ let _x: i8 = 1i32 as _;
+ 1f32 as i32;
+ 1f64 as i32;
+ 1f32 as u8;
+ }
// Test clippy::cast_possible_wrap
1u8 as i8;
1u16 as i16;
diff --git a/src/tools/clippy/tests/ui/cast.stderr b/src/tools/clippy/tests/ui/cast.stderr
index 451078de2..65ecf1aa3 100644
--- a/src/tools/clippy/tests/ui/cast.stderr
+++ b/src/tools/clippy/tests/ui/cast.stderr
@@ -44,10 +44,6 @@ LL | 1f32 as i32;
|
= help: if this is intentional allow the lint with `#[allow(clippy::cast_possible_truncation)]` ...
= note: `-D clippy::cast-possible-truncation` implied by `-D warnings`
-help: ... or use `try_from` and handle the error accordingly
- |
-LL | i32::try_from(1f32);
- | ~~~~~~~~~~~~~~~~~~~
error: casting `f32` to `u32` may truncate the value
--> $DIR/cast.rs:25:5
@@ -56,10 +52,6 @@ LL | 1f32 as u32;
| ^^^^^^^^^^^
|
= help: if this is intentional allow the lint with `#[allow(clippy::cast_possible_truncation)]` ...
-help: ... or use `try_from` and handle the error accordingly
- |
-LL | u32::try_from(1f32);
- | ~~~~~~~~~~~~~~~~~~~
error: casting `f32` to `u32` may lose the sign of the value
--> $DIR/cast.rs:25:5
@@ -76,10 +68,6 @@ LL | 1f64 as f32;
| ^^^^^^^^^^^
|
= help: if this is intentional allow the lint with `#[allow(clippy::cast_possible_truncation)]` ...
-help: ... or use `try_from` and handle the error accordingly
- |
-LL | f32::try_from(1f64);
- | ~~~~~~~~~~~~~~~~~~~
error: casting `i32` to `i8` may truncate the value
--> $DIR/cast.rs:27:5
@@ -112,10 +100,6 @@ LL | 1f64 as isize;
| ^^^^^^^^^^^^^
|
= help: if this is intentional allow the lint with `#[allow(clippy::cast_possible_truncation)]` ...
-help: ... or use `try_from` and handle the error accordingly
- |
-LL | isize::try_from(1f64);
- | ~~~~~~~~~~~~~~~~~~~~~
error: casting `f64` to `usize` may truncate the value
--> $DIR/cast.rs:30:5
@@ -124,10 +108,6 @@ LL | 1f64 as usize;
| ^^^^^^^^^^^^^
|
= help: if this is intentional allow the lint with `#[allow(clippy::cast_possible_truncation)]` ...
-help: ... or use `try_from` and handle the error accordingly
- |
-LL | usize::try_from(1f64);
- | ~~~~~~~~~~~~~~~~~~~~~
error: casting `f64` to `usize` may lose the sign of the value
--> $DIR/cast.rs:30:5
@@ -154,10 +134,6 @@ LL | 1f32 as u32 as u16;
| ^^^^^^^^^^^
|
= help: if this is intentional allow the lint with `#[allow(clippy::cast_possible_truncation)]` ...
-help: ... or use `try_from` and handle the error accordingly
- |
-LL | u32::try_from(1f32) as u16;
- | ~~~~~~~~~~~~~~~~~~~
error: casting `f32` to `u32` may lose the sign of the value
--> $DIR/cast.rs:31:5
@@ -165,8 +141,50 @@ error: casting `f32` to `u32` may lose the sign of the value
LL | 1f32 as u32 as u16;
| ^^^^^^^^^^^
+error: casting `i32` to `i8` may truncate the value
+ --> $DIR/cast.rs:33:22
+ |
+LL | let _x: i8 = 1i32 as _;
+ | ^^^^^^^^^
+ |
+ = help: if this is intentional allow the lint with `#[allow(clippy::cast_possible_truncation)]` ...
+help: ... or use `try_from` and handle the error accordingly
+ |
+LL | let _x: i8 = 1i32.try_into();
+ | ~~~~~~~~~~~~~~~
+
+error: casting `f32` to `i32` may truncate the value
+ --> $DIR/cast.rs:34:9
+ |
+LL | 1f32 as i32;
+ | ^^^^^^^^^^^
+ |
+ = help: if this is intentional allow the lint with `#[allow(clippy::cast_possible_truncation)]` ...
+
+error: casting `f64` to `i32` may truncate the value
+ --> $DIR/cast.rs:35:9
+ |
+LL | 1f64 as i32;
+ | ^^^^^^^^^^^
+ |
+ = help: if this is intentional allow the lint with `#[allow(clippy::cast_possible_truncation)]` ...
+
+error: casting `f32` to `u8` may truncate the value
+ --> $DIR/cast.rs:36:9
+ |
+LL | 1f32 as u8;
+ | ^^^^^^^^^^
+ |
+ = help: if this is intentional allow the lint with `#[allow(clippy::cast_possible_truncation)]` ...
+
+error: casting `f32` to `u8` may lose the sign of the value
+ --> $DIR/cast.rs:36:9
+ |
+LL | 1f32 as u8;
+ | ^^^^^^^^^^
+
error: casting `u8` to `i8` may wrap around the value
- --> $DIR/cast.rs:33:5
+ --> $DIR/cast.rs:39:5
|
LL | 1u8 as i8;
| ^^^^^^^^^
@@ -174,43 +192,43 @@ LL | 1u8 as i8;
= note: `-D clippy::cast-possible-wrap` implied by `-D warnings`
error: casting `u16` to `i16` may wrap around the value
- --> $DIR/cast.rs:34:5
+ --> $DIR/cast.rs:40:5
|
LL | 1u16 as i16;
| ^^^^^^^^^^^
error: casting `u32` to `i32` may wrap around the value
- --> $DIR/cast.rs:35:5
+ --> $DIR/cast.rs:41:5
|
LL | 1u32 as i32;
| ^^^^^^^^^^^
error: casting `u64` to `i64` may wrap around the value
- --> $DIR/cast.rs:36:5
+ --> $DIR/cast.rs:42:5
|
LL | 1u64 as i64;
| ^^^^^^^^^^^
error: casting `usize` to `isize` may wrap around the value
- --> $DIR/cast.rs:37:5
+ --> $DIR/cast.rs:43:5
|
LL | 1usize as isize;
| ^^^^^^^^^^^^^^^
error: casting `i32` to `u32` may lose the sign of the value
- --> $DIR/cast.rs:40:5
+ --> $DIR/cast.rs:46:5
|
LL | -1i32 as u32;
| ^^^^^^^^^^^^
error: casting `isize` to `usize` may lose the sign of the value
- --> $DIR/cast.rs:42:5
+ --> $DIR/cast.rs:48:5
|
LL | -1isize as usize;
| ^^^^^^^^^^^^^^^^
error: casting `i64` to `i8` may truncate the value
- --> $DIR/cast.rs:109:5
+ --> $DIR/cast.rs:115:5
|
LL | (-99999999999i64).min(1) as i8; // should be linted because signed
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -222,7 +240,7 @@ LL | i8::try_from((-99999999999i64).min(1)); // should be linted because sig
| ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
error: casting `u64` to `u8` may truncate the value
- --> $DIR/cast.rs:121:5
+ --> $DIR/cast.rs:127:5
|
LL | 999999u64.clamp(0, 256) as u8; // should still be linted
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -234,7 +252,7 @@ LL | u8::try_from(999999u64.clamp(0, 256)); // should still be linted
| ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
error: casting `main::E2` to `u8` may truncate the value
- --> $DIR/cast.rs:142:21
+ --> $DIR/cast.rs:148:21
|
LL | let _ = self as u8;
| ^^^^^^^^^^
@@ -246,7 +264,7 @@ LL | let _ = u8::try_from(self);
| ~~~~~~~~~~~~~~~~~~
error: casting `main::E2::B` to `u8` will truncate the value
- --> $DIR/cast.rs:143:21
+ --> $DIR/cast.rs:149:21
|
LL | let _ = Self::B as u8;
| ^^^^^^^^^^^^^
@@ -254,7 +272,7 @@ LL | let _ = Self::B as u8;
= note: `-D clippy::cast-enum-truncation` implied by `-D warnings`
error: casting `main::E5` to `i8` may truncate the value
- --> $DIR/cast.rs:179:21
+ --> $DIR/cast.rs:185:21
|
LL | let _ = self as i8;
| ^^^^^^^^^^
@@ -266,13 +284,13 @@ LL | let _ = i8::try_from(self);
| ~~~~~~~~~~~~~~~~~~
error: casting `main::E5::A` to `i8` will truncate the value
- --> $DIR/cast.rs:180:21
+ --> $DIR/cast.rs:186:21
|
LL | let _ = Self::A as i8;
| ^^^^^^^^^^^^^
error: casting `main::E6` to `i16` may truncate the value
- --> $DIR/cast.rs:194:21
+ --> $DIR/cast.rs:200:21
|
LL | let _ = self as i16;
| ^^^^^^^^^^^
@@ -284,7 +302,7 @@ LL | let _ = i16::try_from(self);
| ~~~~~~~~~~~~~~~~~~~
error: casting `main::E7` to `usize` may truncate the value on targets with 32-bit wide pointers
- --> $DIR/cast.rs:209:21
+ --> $DIR/cast.rs:215:21
|
LL | let _ = self as usize;
| ^^^^^^^^^^^^^
@@ -296,7 +314,7 @@ LL | let _ = usize::try_from(self);
| ~~~~~~~~~~~~~~~~~~~~~
error: casting `main::E10` to `u16` may truncate the value
- --> $DIR/cast.rs:250:21
+ --> $DIR/cast.rs:256:21
|
LL | let _ = self as u16;
| ^^^^^^^^^^^
@@ -308,7 +326,7 @@ LL | let _ = u16::try_from(self);
| ~~~~~~~~~~~~~~~~~~~
error: casting `u32` to `u8` may truncate the value
- --> $DIR/cast.rs:258:13
+ --> $DIR/cast.rs:264:13
|
LL | let c = (q >> 16) as u8;
| ^^^^^^^^^^^^^^^
@@ -316,11 +334,11 @@ LL | let c = (q >> 16) as u8;
= help: if this is intentional allow the lint with `#[allow(clippy::cast_possible_truncation)]` ...
help: ... or use `try_from` and handle the error accordingly
|
-LL | let c = u8::try_from((q >> 16));
- | ~~~~~~~~~~~~~~~~~~~~~~~
+LL | let c = u8::try_from(q >> 16);
+ | ~~~~~~~~~~~~~~~~~~~~~
error: casting `u32` to `u8` may truncate the value
- --> $DIR/cast.rs:261:13
+ --> $DIR/cast.rs:267:13
|
LL | let c = (q / 1000) as u8;
| ^^^^^^^^^^^^^^^^
@@ -328,8 +346,8 @@ LL | let c = (q / 1000) as u8;
= help: if this is intentional allow the lint with `#[allow(clippy::cast_possible_truncation)]` ...
help: ... or use `try_from` and handle the error accordingly
|
-LL | let c = u8::try_from((q / 1000));
- | ~~~~~~~~~~~~~~~~~~~~~~~~
+LL | let c = u8::try_from(q / 1000);
+ | ~~~~~~~~~~~~~~~~~~~~~~
-error: aborting due to 36 previous errors
+error: aborting due to 41 previous errors
diff --git a/src/tools/clippy/tests/ui/clear_with_drain.fixed b/src/tools/clippy/tests/ui/clear_with_drain.fixed
new file mode 100644
index 000000000..2d9545eee
--- /dev/null
+++ b/src/tools/clippy/tests/ui/clear_with_drain.fixed
@@ -0,0 +1,358 @@
+// run-rustfix
+#![allow(unused)]
+#![warn(clippy::clear_with_drain)]
+
+use std::collections::{BinaryHeap, HashMap, HashSet, VecDeque};
+
+fn vec_range() {
+ // Do not lint because iterator is assigned
+ let mut v = vec![1, 2, 3];
+ let iter = v.drain(0..v.len());
+
+ // Do not lint because iterator is used
+ let mut v = vec![1, 2, 3];
+ let n = v.drain(0..v.len()).count();
+
+ // Do not lint because iterator is assigned and used
+ let mut v = vec![1, 2, 3];
+ let iter = v.drain(usize::MIN..v.len());
+ let n = iter.count();
+
+ // Do lint
+ let mut v = vec![1, 2, 3];
+ v.clear();
+
+ // Do lint
+ let mut v = vec![1, 2, 3];
+ v.clear();
+}
+
+fn vec_range_from() {
+ // Do not lint because iterator is assigned
+ let mut v = vec![1, 2, 3];
+ let iter = v.drain(0..);
+
+ // Do not lint because iterator is assigned and used
+ let mut v = vec![1, 2, 3];
+ let mut iter = v.drain(0..);
+ let next = iter.next();
+
+ // Do not lint because iterator is used
+ let mut v = vec![1, 2, 3];
+ let next = v.drain(usize::MIN..).next();
+
+ // Do lint
+ let mut v = vec![1, 2, 3];
+ v.clear();
+
+ // Do lint
+ let mut v = vec![1, 2, 3];
+ v.clear();
+}
+
+fn vec_range_full() {
+ // Do not lint because iterator is assigned
+ let mut v = vec![1, 2, 3];
+ let iter = v.drain(..);
+
+ // Do not lint because iterator is used
+ let mut v = vec![1, 2, 3];
+ for x in v.drain(..) {
+ let y = format!("x = {x}");
+ }
+
+ // Do lint
+ let mut v = vec![1, 2, 3];
+ v.clear();
+}
+
+fn vec_range_to() {
+ // Do not lint because iterator is assigned
+ let mut v = vec![1, 2, 3];
+ let iter = v.drain(..v.len());
+
+ // Do not lint because iterator is assigned and used
+ let mut v = vec![1, 2, 3];
+ let iter = v.drain(..v.len());
+ for x in iter {
+ let y = format!("x = {x}");
+ }
+
+ // Do lint
+ let mut v = vec![1, 2, 3];
+ v.clear();
+}
+
+fn vec_partial_drains() {
+ // Do not lint any of these because the ranges are not full
+
+ let mut v = vec![1, 2, 3];
+ v.drain(1..);
+ let mut v = vec![1, 2, 3];
+ v.drain(1..).max();
+
+ let mut v = vec![1, 2, 3];
+ v.drain(..v.len() - 1);
+ let mut v = vec![1, 2, 3];
+ v.drain(..v.len() - 1).min();
+
+ let mut v = vec![1, 2, 3];
+ v.drain(1..v.len() - 1);
+ let mut v = vec![1, 2, 3];
+ let w: Vec<i8> = v.drain(1..v.len() - 1).collect();
+}
+
+fn vec_deque_range() {
+ // Do not lint because iterator is assigned
+ let mut deque = VecDeque::from([1, 2, 3]);
+ let iter = deque.drain(0..deque.len());
+
+ // Do not lint because iterator is used
+ let mut deque = VecDeque::from([1, 2, 3]);
+ let n = deque.drain(0..deque.len()).count();
+
+ // Do not lint because iterator is assigned and used
+ let mut deque = VecDeque::from([1, 2, 3]);
+ let iter = deque.drain(usize::MIN..deque.len());
+ let n = iter.count();
+
+ // Do lint
+ let mut deque = VecDeque::from([1, 2, 3]);
+ deque.clear();
+
+ // Do lint
+ let mut deque = VecDeque::from([1, 2, 3]);
+ deque.clear();
+}
+
+fn vec_deque_range_from() {
+ // Do not lint because iterator is assigned
+ let mut deque = VecDeque::from([1, 2, 3]);
+ let iter = deque.drain(0..);
+
+ // Do not lint because iterator is assigned and used
+ let mut deque = VecDeque::from([1, 2, 3]);
+ let mut iter = deque.drain(0..);
+ let next = iter.next();
+
+ // Do not lint because iterator is used
+ let mut deque = VecDeque::from([1, 2, 3]);
+ let next = deque.drain(usize::MIN..).next();
+
+ // Do lint
+ let mut deque = VecDeque::from([1, 2, 3]);
+ deque.clear();
+
+ // Do lint
+ let mut deque = VecDeque::from([1, 2, 3]);
+ deque.clear();
+}
+
+fn vec_deque_range_full() {
+ // Do not lint because iterator is assigned
+ let mut deque = VecDeque::from([1, 2, 3]);
+ let iter = deque.drain(..);
+
+ // Do not lint because iterator is used
+ let mut deque = VecDeque::from([1, 2, 3]);
+ for x in deque.drain(..) {
+ let y = format!("x = {x}");
+ }
+
+ // Do lint
+ let mut deque = VecDeque::from([1, 2, 3]);
+ deque.clear();
+}
+
+fn vec_deque_range_to() {
+ // Do not lint because iterator is assigned
+ let mut deque = VecDeque::from([1, 2, 3]);
+ let iter = deque.drain(..deque.len());
+
+ // Do not lint because iterator is assigned and used
+ let mut deque = VecDeque::from([1, 2, 3]);
+ let iter = deque.drain(..deque.len());
+ for x in iter {
+ let y = format!("x = {x}");
+ }
+
+ // Do lint
+ let mut deque = VecDeque::from([1, 2, 3]);
+ deque.clear();
+}
+
+fn vec_deque_partial_drains() {
+ // Do not lint any of these because the ranges are not full
+
+ let mut deque = VecDeque::from([1, 2, 3]);
+ deque.drain(1..);
+ let mut deque = VecDeque::from([1, 2, 3]);
+ deque.drain(1..).max();
+
+ let mut deque = VecDeque::from([1, 2, 3]);
+ deque.drain(..deque.len() - 1);
+ let mut deque = VecDeque::from([1, 2, 3]);
+ deque.drain(..deque.len() - 1).min();
+
+ let mut deque = VecDeque::from([1, 2, 3]);
+ deque.drain(1..deque.len() - 1);
+ let mut deque = VecDeque::from([1, 2, 3]);
+ let w: Vec<i8> = deque.drain(1..deque.len() - 1).collect();
+}
+
+fn string_range() {
+ // Do not lint because iterator is assigned
+ let mut s = String::from("Hello, world!");
+ let iter = s.drain(0..s.len());
+
+ // Do not lint because iterator is used
+ let mut s = String::from("Hello, world!");
+ let n = s.drain(0..s.len()).count();
+
+ // Do not lint because iterator is assigned and used
+ let mut s = String::from("Hello, world!");
+ let iter = s.drain(usize::MIN..s.len());
+ let n = iter.count();
+
+ // Do lint
+ let mut s = String::from("Hello, world!");
+ s.clear();
+
+ // Do lint
+ let mut s = String::from("Hello, world!");
+ s.clear();
+}
+
+fn string_range_from() {
+ // Do not lint because iterator is assigned
+ let mut s = String::from("Hello, world!");
+ let iter = s.drain(0..);
+
+ // Do not lint because iterator is assigned and used
+ let mut s = String::from("Hello, world!");
+ let mut iter = s.drain(0..);
+ let next = iter.next();
+
+ // Do not lint because iterator is used
+ let mut s = String::from("Hello, world!");
+ let next = s.drain(usize::MIN..).next();
+
+ // Do lint
+ let mut s = String::from("Hello, world!");
+ s.clear();
+
+ // Do lint
+ let mut s = String::from("Hello, world!");
+ s.clear();
+}
+
+fn string_range_full() {
+ // Do not lint because iterator is assigned
+ let mut s = String::from("Hello, world!");
+ let iter = s.drain(..);
+
+ // Do not lint because iterator is used
+ let mut s = String::from("Hello, world!");
+ for x in s.drain(..) {
+ let y = format!("x = {x}");
+ }
+
+ // Do lint
+ let mut s = String::from("Hello, world!");
+ s.clear();
+}
+
+fn string_range_to() {
+ // Do not lint because iterator is assigned
+ let mut s = String::from("Hello, world!");
+ let iter = s.drain(..s.len());
+
+ // Do not lint because iterator is assigned and used
+ let mut s = String::from("Hello, world!");
+ let iter = s.drain(..s.len());
+ for x in iter {
+ let y = format!("x = {x}");
+ }
+
+ // Do lint
+ let mut s = String::from("Hello, world!");
+ s.clear();
+}
+
+fn string_partial_drains() {
+ // Do not lint any of these because the ranges are not full
+
+ let mut s = String::from("Hello, world!");
+ s.drain(1..);
+ let mut s = String::from("Hello, world!");
+ s.drain(1..).max();
+
+ let mut s = String::from("Hello, world!");
+ s.drain(..s.len() - 1);
+ let mut s = String::from("Hello, world!");
+ s.drain(..s.len() - 1).min();
+
+ let mut s = String::from("Hello, world!");
+ s.drain(1..s.len() - 1);
+ let mut s = String::from("Hello, world!");
+ let w: String = s.drain(1..s.len() - 1).collect();
+}
+
+fn hash_set() {
+ // Do not lint because iterator is assigned
+ let mut set = HashSet::from([1, 2, 3]);
+ let iter = set.drain();
+
+ // Do not lint because iterator is assigned and used
+ let mut set = HashSet::from([1, 2, 3]);
+ let mut iter = set.drain();
+ let next = iter.next();
+
+ // Do not lint because iterator is used
+ let mut set = HashSet::from([1, 2, 3]);
+ let next = set.drain().next();
+
+ // Do lint
+ let mut set = HashSet::from([1, 2, 3]);
+ set.clear();
+}
+
+fn hash_map() {
+ // Do not lint because iterator is assigned
+ let mut map = HashMap::from([(1, "a"), (2, "b")]);
+ let iter = map.drain();
+
+ // Do not lint because iterator is assigned and used
+ let mut map = HashMap::from([(1, "a"), (2, "b")]);
+ let mut iter = map.drain();
+ let next = iter.next();
+
+ // Do not lint because iterator is used
+ let mut map = HashMap::from([(1, "a"), (2, "b")]);
+ let next = map.drain().next();
+
+ // Do lint
+ let mut map = HashMap::from([(1, "a"), (2, "b")]);
+ map.clear();
+}
+
+fn binary_heap() {
+ // Do not lint because iterator is assigned
+ let mut heap = BinaryHeap::from([1, 2]);
+ let iter = heap.drain();
+
+ // Do not lint because iterator is assigned and used
+ let mut heap = BinaryHeap::from([1, 2]);
+ let mut iter = heap.drain();
+ let next = iter.next();
+
+ // Do not lint because iterator is used
+ let mut heap = BinaryHeap::from([1, 2]);
+ let next = heap.drain().next();
+
+ // Do lint
+ let mut heap = BinaryHeap::from([1, 2]);
+ heap.clear();
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/clear_with_drain.rs b/src/tools/clippy/tests/ui/clear_with_drain.rs
new file mode 100644
index 000000000..4d60ee46e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/clear_with_drain.rs
@@ -0,0 +1,358 @@
+// run-rustfix
+#![allow(unused)]
+#![warn(clippy::clear_with_drain)]
+
+use std::collections::{BinaryHeap, HashMap, HashSet, VecDeque};
+
+fn vec_range() {
+ // Do not lint because iterator is assigned
+ let mut v = vec![1, 2, 3];
+ let iter = v.drain(0..v.len());
+
+ // Do not lint because iterator is used
+ let mut v = vec![1, 2, 3];
+ let n = v.drain(0..v.len()).count();
+
+ // Do not lint because iterator is assigned and used
+ let mut v = vec![1, 2, 3];
+ let iter = v.drain(usize::MIN..v.len());
+ let n = iter.count();
+
+ // Do lint
+ let mut v = vec![1, 2, 3];
+ v.drain(0..v.len());
+
+ // Do lint
+ let mut v = vec![1, 2, 3];
+ v.drain(usize::MIN..v.len());
+}
+
+fn vec_range_from() {
+ // Do not lint because iterator is assigned
+ let mut v = vec![1, 2, 3];
+ let iter = v.drain(0..);
+
+ // Do not lint because iterator is assigned and used
+ let mut v = vec![1, 2, 3];
+ let mut iter = v.drain(0..);
+ let next = iter.next();
+
+ // Do not lint because iterator is used
+ let mut v = vec![1, 2, 3];
+ let next = v.drain(usize::MIN..).next();
+
+ // Do lint
+ let mut v = vec![1, 2, 3];
+ v.drain(0..);
+
+ // Do lint
+ let mut v = vec![1, 2, 3];
+ v.drain(usize::MIN..);
+}
+
+fn vec_range_full() {
+ // Do not lint because iterator is assigned
+ let mut v = vec![1, 2, 3];
+ let iter = v.drain(..);
+
+ // Do not lint because iterator is used
+ let mut v = vec![1, 2, 3];
+ for x in v.drain(..) {
+ let y = format!("x = {x}");
+ }
+
+ // Do lint
+ let mut v = vec![1, 2, 3];
+ v.drain(..);
+}
+
+fn vec_range_to() {
+ // Do not lint because iterator is assigned
+ let mut v = vec![1, 2, 3];
+ let iter = v.drain(..v.len());
+
+ // Do not lint because iterator is assigned and used
+ let mut v = vec![1, 2, 3];
+ let iter = v.drain(..v.len());
+ for x in iter {
+ let y = format!("x = {x}");
+ }
+
+ // Do lint
+ let mut v = vec![1, 2, 3];
+ v.drain(..v.len());
+}
+
+fn vec_partial_drains() {
+ // Do not lint any of these because the ranges are not full
+
+ let mut v = vec![1, 2, 3];
+ v.drain(1..);
+ let mut v = vec![1, 2, 3];
+ v.drain(1..).max();
+
+ let mut v = vec![1, 2, 3];
+ v.drain(..v.len() - 1);
+ let mut v = vec![1, 2, 3];
+ v.drain(..v.len() - 1).min();
+
+ let mut v = vec![1, 2, 3];
+ v.drain(1..v.len() - 1);
+ let mut v = vec![1, 2, 3];
+ let w: Vec<i8> = v.drain(1..v.len() - 1).collect();
+}
+
+fn vec_deque_range() {
+ // Do not lint because iterator is assigned
+ let mut deque = VecDeque::from([1, 2, 3]);
+ let iter = deque.drain(0..deque.len());
+
+ // Do not lint because iterator is used
+ let mut deque = VecDeque::from([1, 2, 3]);
+ let n = deque.drain(0..deque.len()).count();
+
+ // Do not lint because iterator is assigned and used
+ let mut deque = VecDeque::from([1, 2, 3]);
+ let iter = deque.drain(usize::MIN..deque.len());
+ let n = iter.count();
+
+ // Do lint
+ let mut deque = VecDeque::from([1, 2, 3]);
+ deque.drain(0..deque.len());
+
+ // Do lint
+ let mut deque = VecDeque::from([1, 2, 3]);
+ deque.drain(usize::MIN..deque.len());
+}
+
+fn vec_deque_range_from() {
+ // Do not lint because iterator is assigned
+ let mut deque = VecDeque::from([1, 2, 3]);
+ let iter = deque.drain(0..);
+
+ // Do not lint because iterator is assigned and used
+ let mut deque = VecDeque::from([1, 2, 3]);
+ let mut iter = deque.drain(0..);
+ let next = iter.next();
+
+ // Do not lint because iterator is used
+ let mut deque = VecDeque::from([1, 2, 3]);
+ let next = deque.drain(usize::MIN..).next();
+
+ // Do lint
+ let mut deque = VecDeque::from([1, 2, 3]);
+ deque.drain(0..);
+
+ // Do lint
+ let mut deque = VecDeque::from([1, 2, 3]);
+ deque.drain(usize::MIN..);
+}
+
+fn vec_deque_range_full() {
+ // Do not lint because iterator is assigned
+ let mut deque = VecDeque::from([1, 2, 3]);
+ let iter = deque.drain(..);
+
+ // Do not lint because iterator is used
+ let mut deque = VecDeque::from([1, 2, 3]);
+ for x in deque.drain(..) {
+ let y = format!("x = {x}");
+ }
+
+ // Do lint
+ let mut deque = VecDeque::from([1, 2, 3]);
+ deque.drain(..);
+}
+
+fn vec_deque_range_to() {
+ // Do not lint because iterator is assigned
+ let mut deque = VecDeque::from([1, 2, 3]);
+ let iter = deque.drain(..deque.len());
+
+ // Do not lint because iterator is assigned and used
+ let mut deque = VecDeque::from([1, 2, 3]);
+ let iter = deque.drain(..deque.len());
+ for x in iter {
+ let y = format!("x = {x}");
+ }
+
+ // Do lint
+ let mut deque = VecDeque::from([1, 2, 3]);
+ deque.drain(..deque.len());
+}
+
+fn vec_deque_partial_drains() {
+ // Do not lint any of these because the ranges are not full
+
+ let mut deque = VecDeque::from([1, 2, 3]);
+ deque.drain(1..);
+ let mut deque = VecDeque::from([1, 2, 3]);
+ deque.drain(1..).max();
+
+ let mut deque = VecDeque::from([1, 2, 3]);
+ deque.drain(..deque.len() - 1);
+ let mut deque = VecDeque::from([1, 2, 3]);
+ deque.drain(..deque.len() - 1).min();
+
+ let mut deque = VecDeque::from([1, 2, 3]);
+ deque.drain(1..deque.len() - 1);
+ let mut deque = VecDeque::from([1, 2, 3]);
+ let w: Vec<i8> = deque.drain(1..deque.len() - 1).collect();
+}
+
+fn string_range() {
+ // Do not lint because iterator is assigned
+ let mut s = String::from("Hello, world!");
+ let iter = s.drain(0..s.len());
+
+ // Do not lint because iterator is used
+ let mut s = String::from("Hello, world!");
+ let n = s.drain(0..s.len()).count();
+
+ // Do not lint because iterator is assigned and used
+ let mut s = String::from("Hello, world!");
+ let iter = s.drain(usize::MIN..s.len());
+ let n = iter.count();
+
+ // Do lint
+ let mut s = String::from("Hello, world!");
+ s.drain(0..s.len());
+
+ // Do lint
+ let mut s = String::from("Hello, world!");
+ s.drain(usize::MIN..s.len());
+}
+
+fn string_range_from() {
+ // Do not lint because iterator is assigned
+ let mut s = String::from("Hello, world!");
+ let iter = s.drain(0..);
+
+ // Do not lint because iterator is assigned and used
+ let mut s = String::from("Hello, world!");
+ let mut iter = s.drain(0..);
+ let next = iter.next();
+
+ // Do not lint because iterator is used
+ let mut s = String::from("Hello, world!");
+ let next = s.drain(usize::MIN..).next();
+
+ // Do lint
+ let mut s = String::from("Hello, world!");
+ s.drain(0..);
+
+ // Do lint
+ let mut s = String::from("Hello, world!");
+ s.drain(usize::MIN..);
+}
+
+fn string_range_full() {
+ // Do not lint because iterator is assigned
+ let mut s = String::from("Hello, world!");
+ let iter = s.drain(..);
+
+ // Do not lint because iterator is used
+ let mut s = String::from("Hello, world!");
+ for x in s.drain(..) {
+ let y = format!("x = {x}");
+ }
+
+ // Do lint
+ let mut s = String::from("Hello, world!");
+ s.drain(..);
+}
+
+fn string_range_to() {
+ // Do not lint because iterator is assigned
+ let mut s = String::from("Hello, world!");
+ let iter = s.drain(..s.len());
+
+ // Do not lint because iterator is assigned and used
+ let mut s = String::from("Hello, world!");
+ let iter = s.drain(..s.len());
+ for x in iter {
+ let y = format!("x = {x}");
+ }
+
+ // Do lint
+ let mut s = String::from("Hello, world!");
+ s.drain(..s.len());
+}
+
+fn string_partial_drains() {
+ // Do not lint any of these because the ranges are not full
+
+ let mut s = String::from("Hello, world!");
+ s.drain(1..);
+ let mut s = String::from("Hello, world!");
+ s.drain(1..).max();
+
+ let mut s = String::from("Hello, world!");
+ s.drain(..s.len() - 1);
+ let mut s = String::from("Hello, world!");
+ s.drain(..s.len() - 1).min();
+
+ let mut s = String::from("Hello, world!");
+ s.drain(1..s.len() - 1);
+ let mut s = String::from("Hello, world!");
+ let w: String = s.drain(1..s.len() - 1).collect();
+}
+
+fn hash_set() {
+ // Do not lint because iterator is assigned
+ let mut set = HashSet::from([1, 2, 3]);
+ let iter = set.drain();
+
+ // Do not lint because iterator is assigned and used
+ let mut set = HashSet::from([1, 2, 3]);
+ let mut iter = set.drain();
+ let next = iter.next();
+
+ // Do not lint because iterator is used
+ let mut set = HashSet::from([1, 2, 3]);
+ let next = set.drain().next();
+
+ // Do lint
+ let mut set = HashSet::from([1, 2, 3]);
+ set.drain();
+}
+
+fn hash_map() {
+ // Do not lint because iterator is assigned
+ let mut map = HashMap::from([(1, "a"), (2, "b")]);
+ let iter = map.drain();
+
+ // Do not lint because iterator is assigned and used
+ let mut map = HashMap::from([(1, "a"), (2, "b")]);
+ let mut iter = map.drain();
+ let next = iter.next();
+
+ // Do not lint because iterator is used
+ let mut map = HashMap::from([(1, "a"), (2, "b")]);
+ let next = map.drain().next();
+
+ // Do lint
+ let mut map = HashMap::from([(1, "a"), (2, "b")]);
+ map.drain();
+}
+
+fn binary_heap() {
+ // Do not lint because iterator is assigned
+ let mut heap = BinaryHeap::from([1, 2]);
+ let iter = heap.drain();
+
+ // Do not lint because iterator is assigned and used
+ let mut heap = BinaryHeap::from([1, 2]);
+ let mut iter = heap.drain();
+ let next = iter.next();
+
+ // Do not lint because iterator is used
+ let mut heap = BinaryHeap::from([1, 2]);
+ let next = heap.drain().next();
+
+ // Do lint
+ let mut heap = BinaryHeap::from([1, 2]);
+ heap.drain();
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/clear_with_drain.stderr b/src/tools/clippy/tests/ui/clear_with_drain.stderr
new file mode 100644
index 000000000..20158da11
--- /dev/null
+++ b/src/tools/clippy/tests/ui/clear_with_drain.stderr
@@ -0,0 +1,130 @@
+error: `drain` used to clear a `Vec`
+ --> $DIR/clear_with_drain.rs:23:7
+ |
+LL | v.drain(0..v.len());
+ | ^^^^^^^^^^^^^^^^^ help: try: `clear()`
+ |
+ = note: `-D clippy::clear-with-drain` implied by `-D warnings`
+
+error: `drain` used to clear a `Vec`
+ --> $DIR/clear_with_drain.rs:27:7
+ |
+LL | v.drain(usize::MIN..v.len());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `clear()`
+
+error: `drain` used to clear a `Vec`
+ --> $DIR/clear_with_drain.rs:46:7
+ |
+LL | v.drain(0..);
+ | ^^^^^^^^^^ help: try: `clear()`
+
+error: `drain` used to clear a `Vec`
+ --> $DIR/clear_with_drain.rs:50:7
+ |
+LL | v.drain(usize::MIN..);
+ | ^^^^^^^^^^^^^^^^^^^ help: try: `clear()`
+
+error: `drain` used to clear a `Vec`
+ --> $DIR/clear_with_drain.rs:66:7
+ |
+LL | v.drain(..);
+ | ^^^^^^^^^ help: try: `clear()`
+
+error: `drain` used to clear a `Vec`
+ --> $DIR/clear_with_drain.rs:83:7
+ |
+LL | v.drain(..v.len());
+ | ^^^^^^^^^^^^^^^^ help: try: `clear()`
+
+error: `drain` used to clear a `VecDeque`
+ --> $DIR/clear_with_drain.rs:121:11
+ |
+LL | deque.drain(0..deque.len());
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try: `clear()`
+
+error: `drain` used to clear a `VecDeque`
+ --> $DIR/clear_with_drain.rs:125:11
+ |
+LL | deque.drain(usize::MIN..deque.len());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `clear()`
+
+error: `drain` used to clear a `VecDeque`
+ --> $DIR/clear_with_drain.rs:144:11
+ |
+LL | deque.drain(0..);
+ | ^^^^^^^^^^ help: try: `clear()`
+
+error: `drain` used to clear a `VecDeque`
+ --> $DIR/clear_with_drain.rs:148:11
+ |
+LL | deque.drain(usize::MIN..);
+ | ^^^^^^^^^^^^^^^^^^^ help: try: `clear()`
+
+error: `drain` used to clear a `VecDeque`
+ --> $DIR/clear_with_drain.rs:164:11
+ |
+LL | deque.drain(..);
+ | ^^^^^^^^^ help: try: `clear()`
+
+error: `drain` used to clear a `VecDeque`
+ --> $DIR/clear_with_drain.rs:181:11
+ |
+LL | deque.drain(..deque.len());
+ | ^^^^^^^^^^^^^^^^^^^^ help: try: `clear()`
+
+error: `drain` used to clear a `String`
+ --> $DIR/clear_with_drain.rs:219:7
+ |
+LL | s.drain(0..s.len());
+ | ^^^^^^^^^^^^^^^^^ help: try: `clear()`
+
+error: `drain` used to clear a `String`
+ --> $DIR/clear_with_drain.rs:223:7
+ |
+LL | s.drain(usize::MIN..s.len());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `clear()`
+
+error: `drain` used to clear a `String`
+ --> $DIR/clear_with_drain.rs:242:7
+ |
+LL | s.drain(0..);
+ | ^^^^^^^^^^ help: try: `clear()`
+
+error: `drain` used to clear a `String`
+ --> $DIR/clear_with_drain.rs:246:7
+ |
+LL | s.drain(usize::MIN..);
+ | ^^^^^^^^^^^^^^^^^^^ help: try: `clear()`
+
+error: `drain` used to clear a `String`
+ --> $DIR/clear_with_drain.rs:262:7
+ |
+LL | s.drain(..);
+ | ^^^^^^^^^ help: try: `clear()`
+
+error: `drain` used to clear a `String`
+ --> $DIR/clear_with_drain.rs:279:7
+ |
+LL | s.drain(..s.len());
+ | ^^^^^^^^^^^^^^^^ help: try: `clear()`
+
+error: `drain` used to clear a `HashSet`
+ --> $DIR/clear_with_drain.rs:317:9
+ |
+LL | set.drain();
+ | ^^^^^^^ help: try: `clear()`
+
+error: `drain` used to clear a `HashMap`
+ --> $DIR/clear_with_drain.rs:336:9
+ |
+LL | map.drain();
+ | ^^^^^^^ help: try: `clear()`
+
+error: `drain` used to clear a `BinaryHeap`
+ --> $DIR/clear_with_drain.rs:355:10
+ |
+LL | heap.drain();
+ | ^^^^^^^ help: try: `clear()`
+
+error: aborting due to 21 previous errors
+
diff --git a/src/tools/clippy/tests/ui/collection_is_never_read.rs b/src/tools/clippy/tests/ui/collection_is_never_read.rs
new file mode 100644
index 000000000..01259a983
--- /dev/null
+++ b/src/tools/clippy/tests/ui/collection_is_never_read.rs
@@ -0,0 +1,190 @@
+#![allow(unused)]
+#![warn(clippy::collection_is_never_read)]
+
+use std::collections::{HashMap, HashSet};
+
+fn main() {}
+
+fn not_a_collection() {
+ // TODO: Expand `collection_is_never_read` beyond collections?
+ let mut x = 10; // Ok
+ x += 1;
+}
+
+fn no_access_at_all() {
+ // Other lints should catch this.
+ let x = vec![1, 2, 3]; // Ok
+}
+
+fn write_without_read() {
+ // The main use case for `collection_is_never_read`.
+ let mut x = HashMap::new(); // WARNING
+ x.insert(1, 2);
+}
+
+fn read_without_write() {
+ let mut x = vec![1, 2, 3]; // Ok
+ let _ = x.len();
+}
+
+fn write_and_read() {
+ let mut x = vec![1, 2, 3]; // Ok
+ x.push(4);
+ let _ = x.len();
+}
+
+fn write_after_read() {
+ // TODO: Warn here, but this requires more extensive data flow analysis.
+ let mut x = vec![1, 2, 3]; // Ok
+ let _ = x.len();
+ x.push(4); // Pointless
+}
+
+fn write_before_reassign() {
+ // TODO: Warn here, but this requires more extensive data flow analysis.
+ let mut x = HashMap::new(); // Ok
+ x.insert(1, 2); // Pointless
+ x = HashMap::new();
+ let _ = x.len();
+}
+
+fn read_in_closure() {
+ let mut x = HashMap::new(); // Ok
+ x.insert(1, 2);
+ let _ = || {
+ let _ = x.len();
+ };
+}
+
+fn write_in_closure() {
+ let mut x = vec![1, 2, 3]; // WARNING
+ let _ = || {
+ x.push(4);
+ };
+}
+
+fn read_in_format() {
+ let mut x = HashMap::new(); // Ok
+ x.insert(1, 2);
+ format!("{x:?}");
+}
+
+fn shadowing_1() {
+ let x = HashMap::<usize, usize>::new(); // Ok
+ let _ = x.len();
+ let mut x = HashMap::new(); // WARNING
+ x.insert(1, 2);
+}
+
+fn shadowing_2() {
+ let mut x = HashMap::new(); // WARNING
+ x.insert(1, 2);
+ let x = HashMap::<usize, usize>::new(); // Ok
+ let _ = x.len();
+}
+
+#[allow(clippy::let_unit_value)]
+fn fake_read_1() {
+ let mut x = vec![1, 2, 3]; // WARNING
+ x.reverse();
+ let _: () = x.clear();
+}
+
+fn fake_read_2() {
+ let mut x = vec![1, 2, 3]; // WARNING
+ x.reverse();
+ println!("{:?}", x.push(5));
+}
+
+fn assignment() {
+ let mut x = vec![1, 2, 3]; // WARNING
+ let y = vec![4, 5, 6]; // Ok
+ x = y;
+}
+
+#[allow(clippy::self_assignment)]
+fn self_assignment() {
+ let mut x = vec![1, 2, 3]; // WARNING
+ x = x;
+}
+
+fn method_argument_but_not_target() {
+ struct MyStruct;
+ impl MyStruct {
+ fn my_method(&self, _argument: &[usize]) {}
+ }
+ let my_struct = MyStruct;
+
+ let mut x = vec![1, 2, 3]; // Ok
+ x.reverse();
+ my_struct.my_method(&x);
+}
+
+fn insert_is_not_a_read() {
+ let mut x = HashSet::new(); // WARNING
+ x.insert(5);
+}
+
+fn insert_is_a_read() {
+ let mut x = HashSet::new(); // Ok
+ if x.insert(5) {
+ println!("5 was inserted");
+ }
+}
+
+fn not_read_if_return_value_not_used() {
+ // `is_empty` does not modify the set, so it's a query. But since the return value is not used, the
+ // lint does not consider it a read here.
+ let x = vec![1, 2, 3]; // WARNING
+ x.is_empty();
+}
+
+fn extension_traits() {
+ trait VecExt<T> {
+ fn method_with_side_effect(&self);
+ fn method_without_side_effect(&self);
+ }
+
+ impl<T> VecExt<T> for Vec<T> {
+ fn method_with_side_effect(&self) {
+ println!("my length: {}", self.len());
+ }
+ fn method_without_side_effect(&self) {}
+ }
+
+ let x = vec![1, 2, 3]; // Ok
+ x.method_with_side_effect();
+
+ let y = vec![1, 2, 3]; // Ok (false negative)
+ y.method_without_side_effect();
+}
+
+fn function_argument() {
+ #[allow(clippy::ptr_arg)]
+ fn foo<T>(v: &Vec<T>) -> usize {
+ v.len()
+ }
+
+ let x = vec![1, 2, 3]; // Ok
+ foo(&x);
+}
+
+fn string() {
+ // Do lint (write without read)
+ let mut s = String::new();
+ s.push_str("Hello, World!");
+
+ // Do not lint (read without write)
+ let mut s = String::from("Hello, World!");
+ let _ = s.len();
+
+ // Do not lint (write and read)
+ let mut s = String::from("Hello, World!");
+ s.push_str("foo, bar");
+ let _ = s.len();
+
+ // Do lint the first line, but not the second
+ let mut s = String::from("Hello, World!");
+ let t = String::from("foo, bar");
+ s = t;
+}
diff --git a/src/tools/clippy/tests/ui/collection_is_never_read.stderr b/src/tools/clippy/tests/ui/collection_is_never_read.stderr
new file mode 100644
index 000000000..cf51a5368
--- /dev/null
+++ b/src/tools/clippy/tests/ui/collection_is_never_read.stderr
@@ -0,0 +1,76 @@
+error: collection is never read
+ --> $DIR/collection_is_never_read.rs:21:5
+ |
+LL | let mut x = HashMap::new(); // WARNING
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::collection-is-never-read` implied by `-D warnings`
+
+error: collection is never read
+ --> $DIR/collection_is_never_read.rs:60:5
+ |
+LL | let mut x = vec![1, 2, 3]; // WARNING
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: collection is never read
+ --> $DIR/collection_is_never_read.rs:75:5
+ |
+LL | let mut x = HashMap::new(); // WARNING
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: collection is never read
+ --> $DIR/collection_is_never_read.rs:80:5
+ |
+LL | let mut x = HashMap::new(); // WARNING
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: collection is never read
+ --> $DIR/collection_is_never_read.rs:88:5
+ |
+LL | let mut x = vec![1, 2, 3]; // WARNING
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: collection is never read
+ --> $DIR/collection_is_never_read.rs:94:5
+ |
+LL | let mut x = vec![1, 2, 3]; // WARNING
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: collection is never read
+ --> $DIR/collection_is_never_read.rs:100:5
+ |
+LL | let mut x = vec![1, 2, 3]; // WARNING
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: collection is never read
+ --> $DIR/collection_is_never_read.rs:107:5
+ |
+LL | let mut x = vec![1, 2, 3]; // WARNING
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: collection is never read
+ --> $DIR/collection_is_never_read.rs:124:5
+ |
+LL | let mut x = HashSet::new(); // WARNING
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: collection is never read
+ --> $DIR/collection_is_never_read.rs:138:5
+ |
+LL | let x = vec![1, 2, 3]; // WARNING
+ | ^^^^^^^^^^^^^^^^^^^^^^
+
+error: collection is never read
+ --> $DIR/collection_is_never_read.rs:174:5
+ |
+LL | let mut s = String::new();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: collection is never read
+ --> $DIR/collection_is_never_read.rs:187:5
+ |
+LL | let mut s = String::from("Hello, World!");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 12 previous errors
+
diff --git a/src/tools/clippy/tests/ui/crashes/ice-10148.rs b/src/tools/clippy/tests/ui/crashes/ice-10148.rs
new file mode 100644
index 000000000..1ab3570c9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-10148.rs
@@ -0,0 +1,9 @@
+// aux-build:../../auxiliary/proc_macros.rs
+
+extern crate proc_macros;
+
+use proc_macros::with_span;
+
+fn main() {
+ println!(with_span!(""something ""));
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-10148.stderr b/src/tools/clippy/tests/ui/crashes/ice-10148.stderr
new file mode 100644
index 000000000..f23e4433f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-10148.stderr
@@ -0,0 +1,12 @@
+error: empty string literal in `println!`
+ --> $DIR/ice-10148.rs:8:5
+ |
+LL | println!(with_span!(""something ""));
+ | ^^^^^^^^^^^^^^^^^^^^-----------^^^^^
+ | |
+ | help: remove the empty string
+ |
+ = note: `-D clippy::println-empty-string` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6179.rs b/src/tools/clippy/tests/ui/crashes/ice-6179.rs
index 4fe92d356..ce1895851 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-6179.rs
+++ b/src/tools/clippy/tests/ui/crashes/ice-6179.rs
@@ -2,7 +2,7 @@
//! The ICE is mainly caused by using `hir_ty_to_ty`. See the discussion in the PR for details.
#![warn(clippy::use_self)]
-#![allow(dead_code)]
+#![allow(dead_code, clippy::let_with_type_underscore)]
struct Foo;
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6252.stderr b/src/tools/clippy/tests/ui/crashes/ice-6252.stderr
index efdd56dd4..4787282f5 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-6252.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-6252.stderr
@@ -6,11 +6,11 @@ LL | _n: PhantomData,
|
help: consider importing one of these items
|
-LL | use core::marker::PhantomData;
+LL + use core::marker::PhantomData;
|
-LL | use serde::__private::PhantomData;
+LL + use serde::__private::PhantomData;
|
-LL | use std::marker::PhantomData;
+LL + use std::marker::PhantomData;
|
error[E0412]: cannot find type `VAL` in this scope
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6254.stderr b/src/tools/clippy/tests/ui/crashes/ice-6254.stderr
index 22d82a30c..263c27d3d 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-6254.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-6254.stderr
@@ -6,6 +6,8 @@ LL | FOO_REF_REF => {},
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #62411 <https://github.com/rust-lang/rust/issues/62411>
+ = note: the traits must be derived, manual `impl`s are not sufficient
+ = note: see https://doc.rust-lang.org/stable/std/marker/trait.StructuralEq.html for details
= note: `-D indirect-structural-match` implied by `-D warnings`
error: aborting due to previous error
diff --git a/src/tools/clippy/tests/ui/default_numeric_fallback_f64.fixed b/src/tools/clippy/tests/ui/default_numeric_fallback_f64.fixed
index a370ccc76..42c15d6a7 100644
--- a/src/tools/clippy/tests/ui/default_numeric_fallback_f64.fixed
+++ b/src/tools/clippy/tests/ui/default_numeric_fallback_f64.fixed
@@ -1,5 +1,5 @@
// run-rustfix
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![warn(clippy::default_numeric_fallback)]
#![allow(
@@ -9,11 +9,12 @@
clippy::unnecessary_operation,
clippy::branches_sharing_code,
clippy::match_single_binding,
- clippy::let_unit_value
+ clippy::let_unit_value,
+ clippy::let_with_type_underscore
)]
-#[macro_use]
-extern crate macro_rules;
+extern crate proc_macros;
+use proc_macros::{external, inline_macros};
mod basic_expr {
fn test() {
@@ -166,20 +167,17 @@ mod method_calls {
}
mod in_macro {
- macro_rules! internal_macro {
- () => {
- let x = 22.0_f64;
- };
- }
+ use super::*;
// Should lint in internal macro.
+ #[inline_macros]
fn internal() {
- internal_macro!();
+ inline!(let x = 22.0_f64;);
}
// Should NOT lint in external macro.
fn external() {
- default_numeric_fallback!();
+ external!(let x = 22.;);
}
}
diff --git a/src/tools/clippy/tests/ui/default_numeric_fallback_f64.rs b/src/tools/clippy/tests/ui/default_numeric_fallback_f64.rs
index 2476fe951..7da7ea254 100644
--- a/src/tools/clippy/tests/ui/default_numeric_fallback_f64.rs
+++ b/src/tools/clippy/tests/ui/default_numeric_fallback_f64.rs
@@ -1,5 +1,5 @@
// run-rustfix
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![warn(clippy::default_numeric_fallback)]
#![allow(
@@ -9,11 +9,12 @@
clippy::unnecessary_operation,
clippy::branches_sharing_code,
clippy::match_single_binding,
- clippy::let_unit_value
+ clippy::let_unit_value,
+ clippy::let_with_type_underscore
)]
-#[macro_use]
-extern crate macro_rules;
+extern crate proc_macros;
+use proc_macros::{external, inline_macros};
mod basic_expr {
fn test() {
@@ -166,20 +167,17 @@ mod method_calls {
}
mod in_macro {
- macro_rules! internal_macro {
- () => {
- let x = 22.;
- };
- }
+ use super::*;
// Should lint in internal macro.
+ #[inline_macros]
fn internal() {
- internal_macro!();
+ inline!(let x = 22.;);
}
// Should NOT lint in external macro.
fn external() {
- default_numeric_fallback!();
+ external!(let x = 22.;);
}
}
diff --git a/src/tools/clippy/tests/ui/default_numeric_fallback_f64.stderr b/src/tools/clippy/tests/ui/default_numeric_fallback_f64.stderr
index 5df2f6423..b949cd1d5 100644
--- a/src/tools/clippy/tests/ui/default_numeric_fallback_f64.stderr
+++ b/src/tools/clippy/tests/ui/default_numeric_fallback_f64.stderr
@@ -1,5 +1,5 @@
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:21:17
+ --> $DIR/default_numeric_fallback_f64.rs:22:17
|
LL | let x = 0.12;
| ^^^^ help: consider adding suffix: `0.12_f64`
@@ -7,147 +7,144 @@ LL | let x = 0.12;
= note: `-D clippy::default-numeric-fallback` implied by `-D warnings`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:22:18
+ --> $DIR/default_numeric_fallback_f64.rs:23:18
|
LL | let x = [1., 2., 3.];
| ^^ help: consider adding suffix: `1.0_f64`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:22:22
+ --> $DIR/default_numeric_fallback_f64.rs:23:22
|
LL | let x = [1., 2., 3.];
| ^^ help: consider adding suffix: `2.0_f64`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:22:26
+ --> $DIR/default_numeric_fallback_f64.rs:23:26
|
LL | let x = [1., 2., 3.];
| ^^ help: consider adding suffix: `3.0_f64`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:23:28
+ --> $DIR/default_numeric_fallback_f64.rs:24:28
|
LL | let x = if true { (1., 2.) } else { (3., 4.) };
| ^^ help: consider adding suffix: `1.0_f64`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:23:32
+ --> $DIR/default_numeric_fallback_f64.rs:24:32
|
LL | let x = if true { (1., 2.) } else { (3., 4.) };
| ^^ help: consider adding suffix: `2.0_f64`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:23:46
+ --> $DIR/default_numeric_fallback_f64.rs:24:46
|
LL | let x = if true { (1., 2.) } else { (3., 4.) };
| ^^ help: consider adding suffix: `3.0_f64`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:23:50
+ --> $DIR/default_numeric_fallback_f64.rs:24:50
|
LL | let x = if true { (1., 2.) } else { (3., 4.) };
| ^^ help: consider adding suffix: `4.0_f64`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:24:23
+ --> $DIR/default_numeric_fallback_f64.rs:25:23
|
LL | let x = match 1. {
| ^^ help: consider adding suffix: `1.0_f64`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:25:18
+ --> $DIR/default_numeric_fallback_f64.rs:26:18
|
LL | _ => 1.,
| ^^ help: consider adding suffix: `1.0_f64`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:44:21
+ --> $DIR/default_numeric_fallback_f64.rs:45:21
|
LL | let y = 1.;
| ^^ help: consider adding suffix: `1.0_f64`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:52:21
+ --> $DIR/default_numeric_fallback_f64.rs:53:21
|
LL | let y = 1.;
| ^^ help: consider adding suffix: `1.0_f64`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:58:21
+ --> $DIR/default_numeric_fallback_f64.rs:59:21
|
LL | let y = 1.;
| ^^ help: consider adding suffix: `1.0_f64`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:66:21
+ --> $DIR/default_numeric_fallback_f64.rs:67:21
|
LL | let y = 1.;
| ^^ help: consider adding suffix: `1.0_f64`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:78:9
+ --> $DIR/default_numeric_fallback_f64.rs:79:9
|
LL | 1.
| ^^ help: consider adding suffix: `1.0_f64`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:84:27
+ --> $DIR/default_numeric_fallback_f64.rs:85:27
|
LL | let f = || -> _ { 1. };
| ^^ help: consider adding suffix: `1.0_f64`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:88:29
+ --> $DIR/default_numeric_fallback_f64.rs:89:29
|
LL | let f = || -> f64 { 1. };
| ^^ help: consider adding suffix: `1.0_f64`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:102:21
+ --> $DIR/default_numeric_fallback_f64.rs:103:21
|
LL | generic_arg(1.);
| ^^ help: consider adding suffix: `1.0_f64`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:105:32
+ --> $DIR/default_numeric_fallback_f64.rs:106:32
|
LL | let x: _ = generic_arg(1.);
| ^^ help: consider adding suffix: `1.0_f64`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:123:28
+ --> $DIR/default_numeric_fallback_f64.rs:124:28
|
LL | GenericStruct { x: 1. };
| ^^ help: consider adding suffix: `1.0_f64`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:126:36
+ --> $DIR/default_numeric_fallback_f64.rs:127:36
|
LL | let _ = GenericStruct { x: 1. };
| ^^ help: consider adding suffix: `1.0_f64`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:144:24
+ --> $DIR/default_numeric_fallback_f64.rs:145:24
|
LL | GenericEnum::X(1.);
| ^^ help: consider adding suffix: `1.0_f64`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:164:23
+ --> $DIR/default_numeric_fallback_f64.rs:165:23
|
LL | s.generic_arg(1.);
| ^^ help: consider adding suffix: `1.0_f64`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_f64.rs:171:21
+ --> $DIR/default_numeric_fallback_f64.rs:175:25
|
-LL | let x = 22.;
- | ^^^ help: consider adding suffix: `22.0_f64`
-...
-LL | internal_macro!();
- | ----------------- in this macro invocation
+LL | inline!(let x = 22.;);
+ | ^^^ help: consider adding suffix: `22.0_f64`
|
- = note: this error originates in the macro `internal_macro` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `__inline_mac_fn_internal` (in Nightly builds, run with -Z macro-backtrace for more info)
error: aborting due to 24 previous errors
diff --git a/src/tools/clippy/tests/ui/default_numeric_fallback_i32.fixed b/src/tools/clippy/tests/ui/default_numeric_fallback_i32.fixed
index 3f4994f04..b7485b73d 100644
--- a/src/tools/clippy/tests/ui/default_numeric_fallback_i32.fixed
+++ b/src/tools/clippy/tests/ui/default_numeric_fallback_i32.fixed
@@ -1,5 +1,5 @@
// run-rustfix
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![feature(lint_reasons)]
#![warn(clippy::default_numeric_fallback)]
@@ -9,11 +9,12 @@
clippy::no_effect,
clippy::unnecessary_operation,
clippy::branches_sharing_code,
- clippy::let_unit_value
+ clippy::let_unit_value,
+ clippy::let_with_type_underscore
)]
-#[macro_use]
-extern crate macro_rules;
+extern crate proc_macros;
+use proc_macros::{external, inline_macros};
mod basic_expr {
fn test() {
@@ -167,20 +168,17 @@ mod method_calls {
}
mod in_macro {
- macro_rules! internal_macro {
- () => {
- let x = 22_i32;
- };
- }
+ use super::*;
// Should lint in internal macro.
+ #[inline_macros]
fn internal() {
- internal_macro!();
+ inline!(let x = 22_i32;);
}
// Should NOT lint in external macro.
fn external() {
- default_numeric_fallback!();
+ external!(let x = 22;);
}
}
diff --git a/src/tools/clippy/tests/ui/default_numeric_fallback_i32.rs b/src/tools/clippy/tests/ui/default_numeric_fallback_i32.rs
index 2df0e0978..7307d3135 100644
--- a/src/tools/clippy/tests/ui/default_numeric_fallback_i32.rs
+++ b/src/tools/clippy/tests/ui/default_numeric_fallback_i32.rs
@@ -1,5 +1,5 @@
// run-rustfix
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![feature(lint_reasons)]
#![warn(clippy::default_numeric_fallback)]
@@ -9,11 +9,12 @@
clippy::no_effect,
clippy::unnecessary_operation,
clippy::branches_sharing_code,
- clippy::let_unit_value
+ clippy::let_unit_value,
+ clippy::let_with_type_underscore
)]
-#[macro_use]
-extern crate macro_rules;
+extern crate proc_macros;
+use proc_macros::{external, inline_macros};
mod basic_expr {
fn test() {
@@ -167,20 +168,17 @@ mod method_calls {
}
mod in_macro {
- macro_rules! internal_macro {
- () => {
- let x = 22;
- };
- }
+ use super::*;
// Should lint in internal macro.
+ #[inline_macros]
fn internal() {
- internal_macro!();
+ inline!(let x = 22;);
}
// Should NOT lint in external macro.
fn external() {
- default_numeric_fallback!();
+ external!(let x = 22;);
}
}
diff --git a/src/tools/clippy/tests/ui/default_numeric_fallback_i32.stderr b/src/tools/clippy/tests/ui/default_numeric_fallback_i32.stderr
index 6f219c3fc..48cd28102 100644
--- a/src/tools/clippy/tests/ui/default_numeric_fallback_i32.stderr
+++ b/src/tools/clippy/tests/ui/default_numeric_fallback_i32.stderr
@@ -1,5 +1,5 @@
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:21:17
+ --> $DIR/default_numeric_fallback_i32.rs:22:17
|
LL | let x = 22;
| ^^ help: consider adding suffix: `22_i32`
@@ -7,159 +7,156 @@ LL | let x = 22;
= note: `-D clippy::default-numeric-fallback` implied by `-D warnings`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:22:18
+ --> $DIR/default_numeric_fallback_i32.rs:23:18
|
LL | let x = [1, 2, 3];
| ^ help: consider adding suffix: `1_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:22:21
+ --> $DIR/default_numeric_fallback_i32.rs:23:21
|
LL | let x = [1, 2, 3];
| ^ help: consider adding suffix: `2_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:22:24
+ --> $DIR/default_numeric_fallback_i32.rs:23:24
|
LL | let x = [1, 2, 3];
| ^ help: consider adding suffix: `3_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:23:28
+ --> $DIR/default_numeric_fallback_i32.rs:24:28
|
LL | let x = if true { (1, 2) } else { (3, 4) };
| ^ help: consider adding suffix: `1_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:23:31
+ --> $DIR/default_numeric_fallback_i32.rs:24:31
|
LL | let x = if true { (1, 2) } else { (3, 4) };
| ^ help: consider adding suffix: `2_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:23:44
+ --> $DIR/default_numeric_fallback_i32.rs:24:44
|
LL | let x = if true { (1, 2) } else { (3, 4) };
| ^ help: consider adding suffix: `3_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:23:47
+ --> $DIR/default_numeric_fallback_i32.rs:24:47
|
LL | let x = if true { (1, 2) } else { (3, 4) };
| ^ help: consider adding suffix: `4_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:24:23
+ --> $DIR/default_numeric_fallback_i32.rs:25:23
|
LL | let x = match 1 {
| ^ help: consider adding suffix: `1_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:25:13
+ --> $DIR/default_numeric_fallback_i32.rs:26:13
|
LL | 1 => 1,
| ^ help: consider adding suffix: `1_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:25:18
+ --> $DIR/default_numeric_fallback_i32.rs:26:18
|
LL | 1 => 1,
| ^ help: consider adding suffix: `1_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:26:18
+ --> $DIR/default_numeric_fallback_i32.rs:27:18
|
LL | _ => 2,
| ^ help: consider adding suffix: `2_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:45:21
+ --> $DIR/default_numeric_fallback_i32.rs:46:21
|
LL | let y = 1;
| ^ help: consider adding suffix: `1_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:53:21
+ --> $DIR/default_numeric_fallback_i32.rs:54:21
|
LL | let y = 1;
| ^ help: consider adding suffix: `1_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:59:21
+ --> $DIR/default_numeric_fallback_i32.rs:60:21
|
LL | let y = 1;
| ^ help: consider adding suffix: `1_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:67:21
+ --> $DIR/default_numeric_fallback_i32.rs:68:21
|
LL | let y = 1;
| ^ help: consider adding suffix: `1_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:79:9
+ --> $DIR/default_numeric_fallback_i32.rs:80:9
|
LL | 1
| ^ help: consider adding suffix: `1_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:85:27
+ --> $DIR/default_numeric_fallback_i32.rs:86:27
|
LL | let f = || -> _ { 1 };
| ^ help: consider adding suffix: `1_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:89:29
+ --> $DIR/default_numeric_fallback_i32.rs:90:29
|
LL | let f = || -> i32 { 1 };
| ^ help: consider adding suffix: `1_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:103:21
+ --> $DIR/default_numeric_fallback_i32.rs:104:21
|
LL | generic_arg(1);
| ^ help: consider adding suffix: `1_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:106:32
+ --> $DIR/default_numeric_fallback_i32.rs:107:32
|
LL | let x: _ = generic_arg(1);
| ^ help: consider adding suffix: `1_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:124:28
+ --> $DIR/default_numeric_fallback_i32.rs:125:28
|
LL | GenericStruct { x: 1 };
| ^ help: consider adding suffix: `1_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:127:36
+ --> $DIR/default_numeric_fallback_i32.rs:128:36
|
LL | let _ = GenericStruct { x: 1 };
| ^ help: consider adding suffix: `1_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:145:24
+ --> $DIR/default_numeric_fallback_i32.rs:146:24
|
LL | GenericEnum::X(1);
| ^ help: consider adding suffix: `1_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:165:23
+ --> $DIR/default_numeric_fallback_i32.rs:166:23
|
LL | s.generic_arg(1);
| ^ help: consider adding suffix: `1_i32`
error: default numeric fallback might occur
- --> $DIR/default_numeric_fallback_i32.rs:172:21
+ --> $DIR/default_numeric_fallback_i32.rs:176:25
|
-LL | let x = 22;
- | ^^ help: consider adding suffix: `22_i32`
-...
-LL | internal_macro!();
- | ----------------- in this macro invocation
+LL | inline!(let x = 22;);
+ | ^^ help: consider adding suffix: `22_i32`
|
- = note: this error originates in the macro `internal_macro` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `__inline_mac_fn_internal` (in Nightly builds, run with -Z macro-backtrace for more info)
error: aborting due to 26 previous errors
diff --git a/src/tools/clippy/tests/ui/default_trait_access.fixed b/src/tools/clippy/tests/ui/default_trait_access.fixed
index 5640599d4..7842ef3ec 100644
--- a/src/tools/clippy/tests/ui/default_trait_access.fixed
+++ b/src/tools/clippy/tests/ui/default_trait_access.fixed
@@ -1,12 +1,12 @@
// run-rustfix
-// aux-build: proc_macro_with_span.rs
+// aux-build: proc_macros.rs
#![deny(clippy::default_trait_access)]
#![allow(dead_code, unused_imports)]
#![allow(clippy::uninlined_format_args)]
-extern crate proc_macro_with_span;
+extern crate proc_macros;
-use proc_macro_with_span::with_span;
+use proc_macros::with_span;
use std::default;
use std::default::Default as D2;
use std::string;
diff --git a/src/tools/clippy/tests/ui/default_trait_access.rs b/src/tools/clippy/tests/ui/default_trait_access.rs
index 11d4bc5c5..cbb3e59c9 100644
--- a/src/tools/clippy/tests/ui/default_trait_access.rs
+++ b/src/tools/clippy/tests/ui/default_trait_access.rs
@@ -1,12 +1,12 @@
// run-rustfix
-// aux-build: proc_macro_with_span.rs
+// aux-build: proc_macros.rs
#![deny(clippy::default_trait_access)]
#![allow(dead_code, unused_imports)]
#![allow(clippy::uninlined_format_args)]
-extern crate proc_macro_with_span;
+extern crate proc_macros;
-use proc_macro_with_span::with_span;
+use proc_macros::with_span;
use std::default;
use std::default::Default as D2;
use std::string;
diff --git a/src/tools/clippy/tests/ui/deref_addrof.fixed b/src/tools/clippy/tests/ui/deref_addrof.fixed
index 2f489deb1..ca5c03304 100644
--- a/src/tools/clippy/tests/ui/deref_addrof.fixed
+++ b/src/tools/clippy/tests/ui/deref_addrof.fixed
@@ -1,7 +1,12 @@
// run-rustfix
+// aux-build:proc_macros.rs
+
#![allow(clippy::return_self_not_must_use)]
#![warn(clippy::deref_addrof)]
+extern crate proc_macros;
+use proc_macros::inline_macros;
+
fn get_number() -> usize {
10
}
@@ -41,28 +46,15 @@ fn main() {
let _ = unsafe { *core::ptr::addr_of!(a) };
}
-#[rustfmt::skip]
-macro_rules! m {
- ($visitor: expr) => {
- $visitor
- };
-}
-
-#[rustfmt::skip]
-macro_rules! m_mut {
- ($visitor: expr) => {
- $visitor
- };
-}
-
#[derive(Copy, Clone)]
pub struct S;
+#[inline_macros]
impl S {
pub fn f(&self) -> &Self {
- m!(self)
+ inline!($(@expr self))
}
#[allow(unused_mut)] // mut will be unused, once the macro is fixed
pub fn f_mut(mut self) -> Self {
- m_mut!(self)
+ inline!($(@expr self))
}
}
diff --git a/src/tools/clippy/tests/ui/deref_addrof.rs b/src/tools/clippy/tests/ui/deref_addrof.rs
index 49f360b9a..3db5fafe9 100644
--- a/src/tools/clippy/tests/ui/deref_addrof.rs
+++ b/src/tools/clippy/tests/ui/deref_addrof.rs
@@ -1,7 +1,12 @@
// run-rustfix
+// aux-build:proc_macros.rs
+
#![allow(clippy::return_self_not_must_use)]
#![warn(clippy::deref_addrof)]
+extern crate proc_macros;
+use proc_macros::inline_macros;
+
fn get_number() -> usize {
10
}
@@ -41,28 +46,15 @@ fn main() {
let _ = unsafe { *core::ptr::addr_of!(a) };
}
-#[rustfmt::skip]
-macro_rules! m {
- ($visitor: expr) => {
- *& $visitor
- };
-}
-
-#[rustfmt::skip]
-macro_rules! m_mut {
- ($visitor: expr) => {
- *& mut $visitor
- };
-}
-
#[derive(Copy, Clone)]
pub struct S;
+#[inline_macros]
impl S {
pub fn f(&self) -> &Self {
- m!(self)
+ inline!(*& $(@expr self))
}
#[allow(unused_mut)] // mut will be unused, once the macro is fixed
pub fn f_mut(mut self) -> Self {
- m_mut!(self)
+ inline!(*&mut $(@expr self))
}
}
diff --git a/src/tools/clippy/tests/ui/deref_addrof.stderr b/src/tools/clippy/tests/ui/deref_addrof.stderr
index 75371fcdb..e0287522f 100644
--- a/src/tools/clippy/tests/ui/deref_addrof.stderr
+++ b/src/tools/clippy/tests/ui/deref_addrof.stderr
@@ -1,5 +1,5 @@
error: immediately dereferencing a reference
- --> $DIR/deref_addrof.rs:19:13
+ --> $DIR/deref_addrof.rs:24:13
|
LL | let b = *&a;
| ^^^ help: try this: `a`
@@ -7,68 +7,62 @@ LL | let b = *&a;
= note: `-D clippy::deref-addrof` implied by `-D warnings`
error: immediately dereferencing a reference
- --> $DIR/deref_addrof.rs:21:13
+ --> $DIR/deref_addrof.rs:26:13
|
LL | let b = *&get_number();
| ^^^^^^^^^^^^^^ help: try this: `get_number()`
error: immediately dereferencing a reference
- --> $DIR/deref_addrof.rs:26:13
+ --> $DIR/deref_addrof.rs:31:13
|
LL | let b = *&bytes[1..2][0];
| ^^^^^^^^^^^^^^^^ help: try this: `bytes[1..2][0]`
error: immediately dereferencing a reference
- --> $DIR/deref_addrof.rs:30:13
+ --> $DIR/deref_addrof.rs:35:13
|
LL | let b = *&(a);
| ^^^^^ help: try this: `(a)`
error: immediately dereferencing a reference
- --> $DIR/deref_addrof.rs:32:13
+ --> $DIR/deref_addrof.rs:37:13
|
LL | let b = *(&a);
| ^^^^^ help: try this: `a`
error: immediately dereferencing a reference
- --> $DIR/deref_addrof.rs:35:13
+ --> $DIR/deref_addrof.rs:40:13
|
LL | let b = *((&a));
| ^^^^^^^ help: try this: `a`
error: immediately dereferencing a reference
- --> $DIR/deref_addrof.rs:37:13
+ --> $DIR/deref_addrof.rs:42:13
|
LL | let b = *&&a;
| ^^^^ help: try this: `&a`
error: immediately dereferencing a reference
- --> $DIR/deref_addrof.rs:39:14
+ --> $DIR/deref_addrof.rs:44:14
|
LL | let b = **&aref;
| ^^^^^^ help: try this: `aref`
error: immediately dereferencing a reference
- --> $DIR/deref_addrof.rs:47:9
+ --> $DIR/deref_addrof.rs:54:17
|
-LL | *& $visitor
- | ^^^^^^^^^^^ help: try this: `$visitor`
-...
-LL | m!(self)
- | -------- in this macro invocation
+LL | inline!(*& $(@expr self))
+ | ^^^^^^^^^^^^^^^^ help: try this: `$(@expr self)`
|
- = note: this error originates in the macro `m` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `__inline_mac_impl` (in Nightly builds, run with -Z macro-backtrace for more info)
error: immediately dereferencing a reference
- --> $DIR/deref_addrof.rs:54:9
+ --> $DIR/deref_addrof.rs:58:17
|
-LL | *& mut $visitor
- | ^^^^^^^^^^^^^^^ help: try this: `$visitor`
-...
-LL | m_mut!(self)
- | ------------ in this macro invocation
+LL | inline!(*&mut $(@expr self))
+ | ^^^^^^^^^^^^^^^^^^^ help: try this: `$(@expr self)`
|
- = note: this error originates in the macro `m_mut` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `__inline_mac_impl` (in Nightly builds, run with -Z macro-backtrace for more info)
error: aborting due to 10 previous errors
diff --git a/src/tools/clippy/tests/ui/deref_addrof_macro.rs b/src/tools/clippy/tests/ui/deref_addrof_macro.rs
index dcebd6c6e..57c0be3f5 100644
--- a/src/tools/clippy/tests/ui/deref_addrof_macro.rs
+++ b/src/tools/clippy/tests/ui/deref_addrof_macro.rs
@@ -1,10 +1,13 @@
-macro_rules! m {
- ($($x:tt),*) => { &[$(($x, stringify!(x)),)*] };
-}
+// aux-build:proc_macros.rs
+
+#![warn(clippy::deref_addrof)]
+
+extern crate proc_macros;
-#[warn(clippy::deref_addrof)]
-fn f() -> [(i32, &'static str); 3] {
- *m![1, 2, 3] // should be fine
+#[proc_macros::inline_macros]
+fn f() -> i32 {
+ // should be fine
+ *inline!(&$1)
}
fn main() {}
diff --git a/src/tools/clippy/tests/ui/derivable_impls.fixed b/src/tools/clippy/tests/ui/derivable_impls.fixed
index ee8456f5d..89ec33a0d 100644
--- a/src/tools/clippy/tests/ui/derivable_impls.fixed
+++ b/src/tools/clippy/tests/ui/derivable_impls.fixed
@@ -231,4 +231,41 @@ impl Default for NonExhaustiveEnum {
}
}
+// https://github.com/rust-lang/rust-clippy/issues/10396
+
+#[derive(Default)]
+struct DefaultType;
+
+struct GenericType<T = DefaultType> {
+ t: T,
+}
+
+impl Default for GenericType {
+ fn default() -> Self {
+ Self { t: Default::default() }
+ }
+}
+
+struct InnerGenericType<T> {
+ t: T,
+}
+
+impl Default for InnerGenericType<DefaultType> {
+ fn default() -> Self {
+ Self { t: Default::default() }
+ }
+}
+
+struct OtherGenericType<T = DefaultType> {
+ inner: InnerGenericType<T>,
+}
+
+impl Default for OtherGenericType {
+ fn default() -> Self {
+ Self {
+ inner: Default::default(),
+ }
+ }
+}
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/derivable_impls.rs b/src/tools/clippy/tests/ui/derivable_impls.rs
index 14af419bc..def6e4116 100644
--- a/src/tools/clippy/tests/ui/derivable_impls.rs
+++ b/src/tools/clippy/tests/ui/derivable_impls.rs
@@ -267,4 +267,41 @@ impl Default for NonExhaustiveEnum {
}
}
+// https://github.com/rust-lang/rust-clippy/issues/10396
+
+#[derive(Default)]
+struct DefaultType;
+
+struct GenericType<T = DefaultType> {
+ t: T,
+}
+
+impl Default for GenericType {
+ fn default() -> Self {
+ Self { t: Default::default() }
+ }
+}
+
+struct InnerGenericType<T> {
+ t: T,
+}
+
+impl Default for InnerGenericType<DefaultType> {
+ fn default() -> Self {
+ Self { t: Default::default() }
+ }
+}
+
+struct OtherGenericType<T = DefaultType> {
+ inner: InnerGenericType<T>,
+}
+
+impl Default for OtherGenericType {
+ fn default() -> Self {
+ Self {
+ inner: Default::default(),
+ }
+ }
+}
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/derivable_impls.stderr b/src/tools/clippy/tests/ui/derivable_impls.stderr
index 81963c3be..8089f5ea0 100644
--- a/src/tools/clippy/tests/ui/derivable_impls.stderr
+++ b/src/tools/clippy/tests/ui/derivable_impls.stderr
@@ -14,7 +14,8 @@ LL | | }
= help: remove the manual implementation...
help: ...and instead derive it
|
-LL | #[derive(Default)]
+LL + #[derive(Default)]
+LL | struct FooDefault<'a> {
|
error: this `impl` can be derived
@@ -30,7 +31,8 @@ LL | | }
= help: remove the manual implementation...
help: ...and instead derive it
|
-LL | #[derive(Default)]
+LL + #[derive(Default)]
+LL | struct TupleDefault(bool, i32, u64);
|
error: this `impl` can be derived
@@ -46,7 +48,8 @@ LL | | }
= help: remove the manual implementation...
help: ...and instead derive it
|
-LL | #[derive(Default)]
+LL + #[derive(Default)]
+LL | struct StrDefault<'a>(&'a str);
|
error: this `impl` can be derived
@@ -62,7 +65,8 @@ LL | | }
= help: remove the manual implementation...
help: ...and instead derive it
|
-LL | #[derive(Default)]
+LL + #[derive(Default)]
+LL | struct Y(u32);
|
error: this `impl` can be derived
@@ -78,7 +82,8 @@ LL | | }
= help: remove the manual implementation...
help: ...and instead derive it
|
-LL | #[derive(Default)]
+LL + #[derive(Default)]
+LL | struct WithoutSelfCurly {
|
error: this `impl` can be derived
@@ -94,7 +99,8 @@ LL | | }
= help: remove the manual implementation...
help: ...and instead derive it
|
-LL | #[derive(Default)]
+LL + #[derive(Default)]
+LL | struct WithoutSelfParan(bool);
|
error: this `impl` can be derived
@@ -110,7 +116,8 @@ LL | | }
= help: remove the manual implementation...
help: ...and instead derive it
|
-LL | #[derive(Default)]
+LL + #[derive(Default)]
+LL | pub struct RepeatDefault1 {
|
error: this `impl` can be derived
@@ -126,7 +133,8 @@ LL | | }
= help: remove the manual implementation...
help: ...and instead derive it...
|
-LL | #[derive(Default)]
+LL + #[derive(Default)]
+LL | pub enum SimpleEnum {
|
help: ...and mark the default variant
|
diff --git a/src/tools/clippy/tests/ui/doc_unsafe.rs b/src/tools/clippy/tests/ui/doc_unsafe.rs
index b91f7aa0d..30674ce37 100644
--- a/src/tools/clippy/tests/ui/doc_unsafe.rs
+++ b/src/tools/clippy/tests/ui/doc_unsafe.rs
@@ -1,9 +1,9 @@
-// aux-build:doc_unsafe_macros.rs
+// aux-build:proc_macros.rs
#![allow(clippy::let_unit_value)]
-#[macro_use]
-extern crate doc_unsafe_macros;
+extern crate proc_macros;
+use proc_macros::external;
/// This is not sufficiently documented
pub unsafe fn destroy_the_planet() {
@@ -105,7 +105,11 @@ macro_rules! very_unsafe {
very_unsafe!();
// we don't lint code from external macros
-undocd_unsafe!();
+external! {
+ pub unsafe fn oy_vey() {
+ unimplemented!();
+ }
+}
fn main() {
unsafe {
diff --git a/src/tools/clippy/tests/ui/double_must_use.rs b/src/tools/clippy/tests/ui/double_must_use.rs
index 05e087b08..26a387b3c 100644
--- a/src/tools/clippy/tests/ui/double_must_use.rs
+++ b/src/tools/clippy/tests/ui/double_must_use.rs
@@ -21,6 +21,17 @@ pub fn must_use_with_note() -> Result<(), ()> {
unimplemented!();
}
+// vvvv Should not lint (#10486)
+#[must_use]
+async fn async_must_use() -> usize {
+ unimplemented!();
+}
+
+#[must_use]
+async fn async_must_use_result() -> Result<(), ()> {
+ Ok(())
+}
+
fn main() {
must_use_result();
must_use_tuple();
diff --git a/src/tools/clippy/tests/ui/double_must_use.stderr b/src/tools/clippy/tests/ui/double_must_use.stderr
index 3d34557a8..49ab2ea3e 100644
--- a/src/tools/clippy/tests/ui/double_must_use.stderr
+++ b/src/tools/clippy/tests/ui/double_must_use.stderr
@@ -23,5 +23,13 @@ LL | pub fn must_use_array() -> [Result<(), ()>; 1] {
|
= help: either add some descriptive text or remove the attribute
-error: aborting due to 3 previous errors
+error: this function has an empty `#[must_use]` attribute, but returns a type already marked as `#[must_use]`
+ --> $DIR/double_must_use.rs:31:1
+ |
+LL | async fn async_must_use_result() -> Result<(), ()> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: either add some descriptive text or remove the attribute
+
+error: aborting due to 4 previous errors
diff --git a/src/tools/clippy/tests/ui/empty_loop.rs b/src/tools/clippy/tests/ui/empty_loop.rs
index 8fd7697eb..6a8e6b550 100644
--- a/src/tools/clippy/tests/ui/empty_loop.rs
+++ b/src/tools/clippy/tests/ui/empty_loop.rs
@@ -1,9 +1,9 @@
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![warn(clippy::empty_loop)]
-#[macro_use]
-extern crate macro_rules;
+extern crate proc_macros;
+use proc_macros::{external, inline_macros};
fn should_trigger() {
loop {}
@@ -16,6 +16,7 @@ fn should_trigger() {
}
}
+#[inline_macros]
fn should_not_trigger() {
loop {
panic!("This is fine")
@@ -38,14 +39,10 @@ fn should_not_trigger() {
loop {}
// We don't lint loops inside macros
- macro_rules! foo {
- () => {
- loop {}
- };
- }
+ inline!(loop {});
// We don't lint external macros
- foofoo!()
+ external!(loop {});
}
fn main() {}
diff --git a/src/tools/clippy/tests/ui/equatable_if_let.fixed b/src/tools/clippy/tests/ui/equatable_if_let.fixed
index 9af2ba962..007702ab5 100644
--- a/src/tools/clippy/tests/ui/equatable_if_let.fixed
+++ b/src/tools/clippy/tests/ui/equatable_if_let.fixed
@@ -1,11 +1,11 @@
// run-rustfix
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![allow(unused_variables, dead_code, clippy::derive_partial_eq_without_eq)]
#![warn(clippy::equatable_if_let)]
-#[macro_use]
-extern crate macro_rules;
+extern crate proc_macros;
+use proc_macros::{external, inline_macros};
use std::cmp::Ordering;
@@ -44,6 +44,7 @@ impl PartialEq for NotStructuralEq {
}
}
+#[inline_macros]
fn main() {
let a = 2;
let b = 3;
@@ -78,14 +79,9 @@ fn main() {
if Some(g) == Some(NotStructuralEq::A) {}
if matches!(h, NoPartialEqStruct { a: 2, b: false }) {}
- macro_rules! m1 {
- (x) => {
- "abc"
- };
- }
- if "abc" == m1!(x) {
+ if "abc" == inline!("abc") {
println!("OK");
}
- equatable_if_let!(a);
+ external!({ if let 2 = $a {} });
}
diff --git a/src/tools/clippy/tests/ui/equatable_if_let.rs b/src/tools/clippy/tests/ui/equatable_if_let.rs
index c3626c081..3bda79776 100644
--- a/src/tools/clippy/tests/ui/equatable_if_let.rs
+++ b/src/tools/clippy/tests/ui/equatable_if_let.rs
@@ -1,11 +1,11 @@
// run-rustfix
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![allow(unused_variables, dead_code, clippy::derive_partial_eq_without_eq)]
#![warn(clippy::equatable_if_let)]
-#[macro_use]
-extern crate macro_rules;
+extern crate proc_macros;
+use proc_macros::{external, inline_macros};
use std::cmp::Ordering;
@@ -44,6 +44,7 @@ impl PartialEq for NotStructuralEq {
}
}
+#[inline_macros]
fn main() {
let a = 2;
let b = 3;
@@ -78,14 +79,9 @@ fn main() {
if let Some(NotStructuralEq::A) = Some(g) {}
if let NoPartialEqStruct { a: 2, b: false } = h {}
- macro_rules! m1 {
- (x) => {
- "abc"
- };
- }
- if let m1!(x) = "abc" {
+ if let inline!("abc") = "abc" {
println!("OK");
}
- equatable_if_let!(a);
+ external!({ if let 2 = $a {} });
}
diff --git a/src/tools/clippy/tests/ui/equatable_if_let.stderr b/src/tools/clippy/tests/ui/equatable_if_let.stderr
index 40ca75b8d..a72d87bb7 100644
--- a/src/tools/clippy/tests/ui/equatable_if_let.stderr
+++ b/src/tools/clippy/tests/ui/equatable_if_let.stderr
@@ -1,5 +1,5 @@
error: this pattern matching can be expressed using equality
- --> $DIR/equatable_if_let.rs:59:8
+ --> $DIR/equatable_if_let.rs:60:8
|
LL | if let 2 = a {}
| ^^^^^^^^^ help: try: `a == 2`
@@ -7,82 +7,82 @@ LL | if let 2 = a {}
= note: `-D clippy::equatable-if-let` implied by `-D warnings`
error: this pattern matching can be expressed using equality
- --> $DIR/equatable_if_let.rs:60:8
+ --> $DIR/equatable_if_let.rs:61:8
|
LL | if let Ordering::Greater = a.cmp(&b) {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `a.cmp(&b) == Ordering::Greater`
error: this pattern matching can be expressed using equality
- --> $DIR/equatable_if_let.rs:61:8
+ --> $DIR/equatable_if_let.rs:62:8
|
LL | if let Some(2) = c {}
| ^^^^^^^^^^^^^^^ help: try: `c == Some(2)`
error: this pattern matching can be expressed using equality
- --> $DIR/equatable_if_let.rs:62:8
+ --> $DIR/equatable_if_let.rs:63:8
|
LL | if let Struct { a: 2, b: false } = d {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `d == (Struct { a: 2, b: false })`
error: this pattern matching can be expressed using equality
- --> $DIR/equatable_if_let.rs:63:8
+ --> $DIR/equatable_if_let.rs:64:8
|
LL | if let Enum::TupleVariant(32, 64) = e {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `e == Enum::TupleVariant(32, 64)`
error: this pattern matching can be expressed using equality
- --> $DIR/equatable_if_let.rs:64:8
+ --> $DIR/equatable_if_let.rs:65:8
|
LL | if let Enum::RecordVariant { a: 64, b: 32 } = e {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `e == (Enum::RecordVariant { a: 64, b: 32 })`
error: this pattern matching can be expressed using equality
- --> $DIR/equatable_if_let.rs:65:8
+ --> $DIR/equatable_if_let.rs:66:8
|
LL | if let Enum::UnitVariant = e {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `e == Enum::UnitVariant`
error: this pattern matching can be expressed using equality
- --> $DIR/equatable_if_let.rs:66:8
+ --> $DIR/equatable_if_let.rs:67:8
|
LL | if let (Enum::UnitVariant, &Struct { a: 2, b: false }) = (e, &d) {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(e, &d) == (Enum::UnitVariant, &Struct { a: 2, b: false })`
error: this pattern matching can be expressed using `matches!`
- --> $DIR/equatable_if_let.rs:75:8
+ --> $DIR/equatable_if_let.rs:76:8
|
LL | if let NotPartialEq::A = f {}
| ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `matches!(f, NotPartialEq::A)`
error: this pattern matching can be expressed using equality
- --> $DIR/equatable_if_let.rs:76:8
+ --> $DIR/equatable_if_let.rs:77:8
|
LL | if let NotStructuralEq::A = g {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `g == NotStructuralEq::A`
error: this pattern matching can be expressed using `matches!`
- --> $DIR/equatable_if_let.rs:77:8
+ --> $DIR/equatable_if_let.rs:78:8
|
LL | if let Some(NotPartialEq::A) = Some(f) {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `matches!(Some(f), Some(NotPartialEq::A))`
error: this pattern matching can be expressed using equality
- --> $DIR/equatable_if_let.rs:78:8
+ --> $DIR/equatable_if_let.rs:79:8
|
LL | if let Some(NotStructuralEq::A) = Some(g) {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Some(g) == Some(NotStructuralEq::A)`
error: this pattern matching can be expressed using `matches!`
- --> $DIR/equatable_if_let.rs:79:8
+ --> $DIR/equatable_if_let.rs:80:8
|
LL | if let NoPartialEqStruct { a: 2, b: false } = h {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `matches!(h, NoPartialEqStruct { a: 2, b: false })`
error: this pattern matching can be expressed using equality
- --> $DIR/equatable_if_let.rs:86:8
+ --> $DIR/equatable_if_let.rs:82:8
|
-LL | if let m1!(x) = "abc" {
- | ^^^^^^^^^^^^^^^^^^ help: try: `"abc" == m1!(x)`
+LL | if let inline!("abc") = "abc" {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"abc" == inline!("abc")`
error: aborting due to 14 previous errors
diff --git a/src/tools/clippy/tests/ui/erasing_op.rs b/src/tools/clippy/tests/ui/erasing_op.rs
index ae2fad008..74985029e 100644
--- a/src/tools/clippy/tests/ui/erasing_op.rs
+++ b/src/tools/clippy/tests/ui/erasing_op.rs
@@ -31,9 +31,7 @@ impl core::ops::Mul<i32> for Vec1 {
#[allow(clippy::no_effect)]
#[warn(clippy::erasing_op)]
-fn main() {
- let x: u8 = 0;
-
+fn test(x: u8) {
x * 0;
0 & x;
0 / x;
@@ -41,3 +39,7 @@ fn main() {
0 * Vec1 { x: 5 };
Vec1 { x: 5 } * 0;
}
+
+fn main() {
+ test(0)
+}
diff --git a/src/tools/clippy/tests/ui/erasing_op.stderr b/src/tools/clippy/tests/ui/erasing_op.stderr
index 165ed9bfe..979412523 100644
--- a/src/tools/clippy/tests/ui/erasing_op.stderr
+++ b/src/tools/clippy/tests/ui/erasing_op.stderr
@@ -1,5 +1,5 @@
error: this operation will always return zero. This is likely not the intended outcome
- --> $DIR/erasing_op.rs:37:5
+ --> $DIR/erasing_op.rs:35:5
|
LL | x * 0;
| ^^^^^
@@ -7,25 +7,25 @@ LL | x * 0;
= note: `-D clippy::erasing-op` implied by `-D warnings`
error: this operation will always return zero. This is likely not the intended outcome
- --> $DIR/erasing_op.rs:38:5
+ --> $DIR/erasing_op.rs:36:5
|
LL | 0 & x;
| ^^^^^
error: this operation will always return zero. This is likely not the intended outcome
- --> $DIR/erasing_op.rs:39:5
+ --> $DIR/erasing_op.rs:37:5
|
LL | 0 / x;
| ^^^^^
error: this operation will always return zero. This is likely not the intended outcome
- --> $DIR/erasing_op.rs:41:5
+ --> $DIR/erasing_op.rs:39:5
|
LL | 0 * Vec1 { x: 5 };
| ^^^^^^^^^^^^^^^^^
error: this operation will always return zero. This is likely not the intended outcome
- --> $DIR/erasing_op.rs:42:5
+ --> $DIR/erasing_op.rs:40:5
|
LL | Vec1 { x: 5 } * 0;
| ^^^^^^^^^^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/extra_unused_type_parameters.fixed b/src/tools/clippy/tests/ui/extra_unused_type_parameters.fixed
new file mode 100644
index 000000000..19e718625
--- /dev/null
+++ b/src/tools/clippy/tests/ui/extra_unused_type_parameters.fixed
@@ -0,0 +1,105 @@
+// run-rustfix
+
+#![allow(unused, clippy::needless_lifetimes)]
+#![warn(clippy::extra_unused_type_parameters)]
+
+fn unused_ty(x: u8) {
+ unimplemented!()
+}
+
+fn unused_multi(x: u8) {
+ unimplemented!()
+}
+
+fn unused_with_lt<'a>(x: &'a u8) {
+ unimplemented!()
+}
+
+fn used_ty<T>(x: T, y: u8) {}
+
+fn used_ref<'a, T>(x: &'a T) {}
+
+fn used_ret<T: Default>(x: u8) -> T {
+ T::default()
+}
+
+fn unused_bounded<U>(x: U) {
+ unimplemented!();
+}
+
+fn some_unused<B, C>(b: B, c: C) {
+ unimplemented!();
+}
+
+fn used_opaque<A>(iter: impl Iterator<Item = A>) -> usize {
+ iter.count()
+}
+
+fn used_ret_opaque<A>() -> impl Iterator<Item = A> {
+ std::iter::empty()
+}
+
+fn used_vec_box<T>(x: Vec<Box<T>>) {}
+
+fn used_body<T: Default + ToString>() -> String {
+ T::default().to_string()
+}
+
+fn used_closure<T: Default + ToString>() -> impl Fn() {
+ || println!("{}", T::default().to_string())
+}
+
+struct S;
+
+impl S {
+ fn unused_ty_impl(&self) {
+ unimplemented!()
+ }
+}
+
+// Don't lint on trait methods
+trait Foo {
+ fn bar<T>(&self);
+}
+
+impl Foo for S {
+ fn bar<T>(&self) {}
+}
+
+fn skip_index<A, Iter>(iter: Iter, index: usize) -> impl Iterator<Item = A>
+where
+ Iter: Iterator<Item = A>,
+{
+ iter.enumerate()
+ .filter_map(move |(i, a)| if i == index { None } else { Some(a) })
+}
+
+fn unused_opaque(dummy: impl Default) {
+ unimplemented!()
+}
+
+mod unexported_trait_bounds {
+ mod private {
+ pub trait Private {}
+ }
+
+ fn priv_trait_bound<T: private::Private>() {
+ unimplemented!();
+ }
+
+ fn unused_with_priv_trait_bound<T: private::Private>() {
+ unimplemented!();
+ }
+}
+
+mod issue10319 {
+ fn assert_send<T: Send>() {}
+
+ fn assert_send_where<T>()
+ where
+ T: Send,
+ {
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/extra_unused_type_parameters.rs b/src/tools/clippy/tests/ui/extra_unused_type_parameters.rs
index 480174342..e53bb587e 100644
--- a/src/tools/clippy/tests/ui/extra_unused_type_parameters.rs
+++ b/src/tools/clippy/tests/ui/extra_unused_type_parameters.rs
@@ -1,3 +1,5 @@
+// run-rustfix
+
#![allow(unused, clippy::needless_lifetimes)]
#![warn(clippy::extra_unused_type_parameters)]
@@ -21,14 +23,7 @@ fn used_ret<T: Default>(x: u8) -> T {
T::default()
}
-fn unused_bounded<T: Default, U>(x: U) {
- unimplemented!();
-}
-
-fn unused_where_clause<T, U>(x: U)
-where
- T: Default,
-{
+fn unused_bounded<T: Default, U, V: Default>(x: U) {
unimplemented!();
}
diff --git a/src/tools/clippy/tests/ui/extra_unused_type_parameters.stderr b/src/tools/clippy/tests/ui/extra_unused_type_parameters.stderr
index 86c88fc9b..c042a5a22 100644
--- a/src/tools/clippy/tests/ui/extra_unused_type_parameters.stderr
+++ b/src/tools/clippy/tests/ui/extra_unused_type_parameters.stderr
@@ -1,75 +1,64 @@
-error: type parameter goes unused in function definition
- --> $DIR/extra_unused_type_parameters.rs:4:13
+error: type parameter `T` goes unused in function definition
+ --> $DIR/extra_unused_type_parameters.rs:6:13
|
LL | fn unused_ty<T>(x: u8) {
- | ^^^
+ | ^^^ help: consider removing the parameter
|
- = help: consider removing the parameter
= note: `-D clippy::extra-unused-type-parameters` implied by `-D warnings`
-error: type parameters go unused in function definition
- --> $DIR/extra_unused_type_parameters.rs:8:16
+error: type parameters go unused in function definition: T, U
+ --> $DIR/extra_unused_type_parameters.rs:10:16
|
LL | fn unused_multi<T, U>(x: u8) {
- | ^^^^^^
- |
- = help: consider removing the parameters
+ | ^^^^^^ help: consider removing the parameters
-error: type parameter goes unused in function definition
- --> $DIR/extra_unused_type_parameters.rs:12:23
+error: type parameter `T` goes unused in function definition
+ --> $DIR/extra_unused_type_parameters.rs:14:21
|
LL | fn unused_with_lt<'a, T>(x: &'a u8) {
- | ^
- |
- = help: consider removing the parameter
+ | ^^^ help: consider removing the parameter
-error: type parameter goes unused in function definition
- --> $DIR/extra_unused_type_parameters.rs:24:19
+error: type parameters go unused in function definition: T, V
+ --> $DIR/extra_unused_type_parameters.rs:26:19
|
-LL | fn unused_bounded<T: Default, U>(x: U) {
- | ^^^^^^^^^^^
+LL | fn unused_bounded<T: Default, U, V: Default>(x: U) {
+ | ^^^^^^^^^^^^ ^^^^^^^^^^^^
|
- = help: consider removing the parameter
-
-error: type parameter goes unused in function definition
- --> $DIR/extra_unused_type_parameters.rs:28:24
+help: consider removing the parameters
|
-LL | fn unused_where_clause<T, U>(x: U)
- | ^^
+LL - fn unused_bounded<T: Default, U, V: Default>(x: U) {
+LL + fn unused_bounded<U>(x: U) {
|
- = help: consider removing the parameter
-error: type parameters go unused in function definition
- --> $DIR/extra_unused_type_parameters.rs:35:16
+error: type parameters go unused in function definition: A, D, E
+ --> $DIR/extra_unused_type_parameters.rs:30:16
|
LL | fn some_unused<A, B, C, D: Iterator<Item = (B, C)>, E>(b: B, c: C) {
- | ^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^
+ | ^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: consider removing the parameters
+ |
+LL - fn some_unused<A, B, C, D: Iterator<Item = (B, C)>, E>(b: B, c: C) {
+LL + fn some_unused<B, C>(b: B, c: C) {
|
- = help: consider removing the parameters
-error: type parameter goes unused in function definition
- --> $DIR/extra_unused_type_parameters.rs:60:22
+error: type parameter `T` goes unused in function definition
+ --> $DIR/extra_unused_type_parameters.rs:55:22
|
LL | fn unused_ty_impl<T>(&self) {
- | ^^^
- |
- = help: consider removing the parameter
+ | ^^^ help: consider removing the parameter
-error: type parameters go unused in function definition
- --> $DIR/extra_unused_type_parameters.rs:82:17
+error: type parameters go unused in function definition: A, B
+ --> $DIR/extra_unused_type_parameters.rs:77:17
|
LL | fn unused_opaque<A, B>(dummy: impl Default) {
- | ^^^^^^
- |
- = help: consider removing the parameters
+ | ^^^^^^ help: consider removing the parameters
-error: type parameter goes unused in function definition
- --> $DIR/extra_unused_type_parameters.rs:95:58
+error: type parameter `U` goes unused in function definition
+ --> $DIR/extra_unused_type_parameters.rs:90:56
|
LL | fn unused_with_priv_trait_bound<T: private::Private, U>() {
- | ^
- |
- = help: consider removing the parameter
+ | ^^^ help: consider removing the parameter
-error: aborting due to 9 previous errors
+error: aborting due to 8 previous errors
diff --git a/src/tools/clippy/tests/ui/extra_unused_type_parameters_unfixable.rs b/src/tools/clippy/tests/ui/extra_unused_type_parameters_unfixable.rs
new file mode 100644
index 000000000..10b39aa8f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/extra_unused_type_parameters_unfixable.rs
@@ -0,0 +1,24 @@
+#![warn(clippy::extra_unused_type_parameters)]
+
+fn unused_where_clause<T, U>(x: U)
+where
+ T: Default,
+{
+ unimplemented!();
+}
+
+fn unused_multi_where_clause<T, U, V: Default>(x: U)
+where
+ T: Default,
+{
+ unimplemented!();
+}
+
+fn unused_all_where_clause<T, U: Default, V: Default>()
+where
+ T: Default,
+{
+ unimplemented!();
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/extra_unused_type_parameters_unfixable.stderr b/src/tools/clippy/tests/ui/extra_unused_type_parameters_unfixable.stderr
new file mode 100644
index 000000000..a9580cc89
--- /dev/null
+++ b/src/tools/clippy/tests/ui/extra_unused_type_parameters_unfixable.stderr
@@ -0,0 +1,27 @@
+error: type parameter `T` goes unused in function definition
+ --> $DIR/extra_unused_type_parameters_unfixable.rs:3:24
+ |
+LL | fn unused_where_clause<T, U>(x: U)
+ | ^
+ |
+ = help: consider removing the parameter
+ = note: `-D clippy::extra-unused-type-parameters` implied by `-D warnings`
+
+error: type parameters go unused in function definition: T, V
+ --> $DIR/extra_unused_type_parameters_unfixable.rs:10:30
+ |
+LL | fn unused_multi_where_clause<T, U, V: Default>(x: U)
+ | ^ ^^^^^^^^^^
+ |
+ = help: consider removing the parameters
+
+error: type parameters go unused in function definition: T, U, V
+ --> $DIR/extra_unused_type_parameters_unfixable.rs:17:28
+ |
+LL | fn unused_all_where_clause<T, U: Default, V: Default>()
+ | ^ ^^^^^^^^^^ ^^^^^^^^^^
+ |
+ = help: consider removing the parameters
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/field_reassign_with_default.rs b/src/tools/clippy/tests/ui/field_reassign_with_default.rs
index 1f989bb12..0e208b3ed 100644
--- a/src/tools/clippy/tests/ui/field_reassign_with_default.rs
+++ b/src/tools/clippy/tests/ui/field_reassign_with_default.rs
@@ -1,12 +1,12 @@
// aux-build:proc_macro_derive.rs
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![warn(clippy::field_reassign_with_default)]
#[macro_use]
extern crate proc_macro_derive;
-#[macro_use]
-extern crate macro_rules;
+extern crate proc_macros;
+use proc_macros::{external, inline_macros};
// Don't lint on derives that derive `Default`
// See https://github.com/rust-lang/rust-clippy/issues/6545
@@ -36,14 +36,6 @@ struct D {
b: Option<i32>,
}
-macro_rules! m {
- ($key:ident: $value:tt) => {{
- let mut data = $crate::D::default();
- data.$key = Some($value);
- data
- }};
-}
-
/// Implements .next() that returns a different number each time.
struct SideEffect(i32);
@@ -57,6 +49,7 @@ impl SideEffect {
}
}
+#[inline_macros]
fn main() {
// wrong, produces first error in stderr
let mut a: A = Default::default();
@@ -150,7 +143,18 @@ fn main() {
a.i = vec![1];
// Don't lint in external macros
- field_reassign_with_default!();
+ external! {
+ #[derive(Default)]
+ struct A {
+ pub i: i32,
+ pub j: i64,
+ }
+ fn lint() {
+ let mut a: A = Default::default();
+ a.i = 42;
+ a;
+ }
+ }
// be sure suggestion is correct with generics
let mut a: Wrapper<bool> = Default::default();
@@ -160,9 +164,11 @@ fn main() {
a.i = 42;
// Don't lint in macros
- m! {
- a: 42
- };
+ inline!(
+ let mut data = $crate::D::default();
+ data.$a = Some($42);
+ data
+ );
}
mod m {
diff --git a/src/tools/clippy/tests/ui/field_reassign_with_default.stderr b/src/tools/clippy/tests/ui/field_reassign_with_default.stderr
index 710bb66a4..da74f9ef9 100644
--- a/src/tools/clippy/tests/ui/field_reassign_with_default.stderr
+++ b/src/tools/clippy/tests/ui/field_reassign_with_default.stderr
@@ -1,132 +1,132 @@
error: field assignment outside of initializer for an instance created with Default::default()
- --> $DIR/field_reassign_with_default.rs:63:5
+ --> $DIR/field_reassign_with_default.rs:56:5
|
LL | a.i = 42;
| ^^^^^^^^^
|
note: consider initializing the variable with `main::A { i: 42, ..Default::default() }` and removing relevant reassignments
- --> $DIR/field_reassign_with_default.rs:62:5
+ --> $DIR/field_reassign_with_default.rs:55:5
|
LL | let mut a: A = Default::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= note: `-D clippy::field-reassign-with-default` implied by `-D warnings`
error: field assignment outside of initializer for an instance created with Default::default()
- --> $DIR/field_reassign_with_default.rs:103:5
+ --> $DIR/field_reassign_with_default.rs:96:5
|
LL | a.j = 43;
| ^^^^^^^^^
|
note: consider initializing the variable with `main::A { j: 43, i: 42 }` and removing relevant reassignments
- --> $DIR/field_reassign_with_default.rs:102:5
+ --> $DIR/field_reassign_with_default.rs:95:5
|
LL | let mut a: A = Default::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: field assignment outside of initializer for an instance created with Default::default()
- --> $DIR/field_reassign_with_default.rs:108:5
+ --> $DIR/field_reassign_with_default.rs:101:5
|
LL | a.i = 42;
| ^^^^^^^^^
|
note: consider initializing the variable with `main::A { i: 42, j: 44 }` and removing relevant reassignments
- --> $DIR/field_reassign_with_default.rs:107:5
+ --> $DIR/field_reassign_with_default.rs:100:5
|
LL | let mut a: A = Default::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: field assignment outside of initializer for an instance created with Default::default()
- --> $DIR/field_reassign_with_default.rs:114:5
+ --> $DIR/field_reassign_with_default.rs:107:5
|
LL | a.i = 42;
| ^^^^^^^^^
|
note: consider initializing the variable with `main::A { i: 42, ..Default::default() }` and removing relevant reassignments
- --> $DIR/field_reassign_with_default.rs:113:5
+ --> $DIR/field_reassign_with_default.rs:106:5
|
LL | let mut a = A::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^
error: field assignment outside of initializer for an instance created with Default::default()
- --> $DIR/field_reassign_with_default.rs:124:5
+ --> $DIR/field_reassign_with_default.rs:117:5
|
LL | a.i = Default::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^
|
note: consider initializing the variable with `main::A { i: Default::default(), ..Default::default() }` and removing relevant reassignments
- --> $DIR/field_reassign_with_default.rs:123:5
+ --> $DIR/field_reassign_with_default.rs:116:5
|
LL | let mut a: A = Default::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: field assignment outside of initializer for an instance created with Default::default()
- --> $DIR/field_reassign_with_default.rs:128:5
+ --> $DIR/field_reassign_with_default.rs:121:5
|
LL | a.i = Default::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^
|
note: consider initializing the variable with `main::A { i: Default::default(), j: 45 }` and removing relevant reassignments
- --> $DIR/field_reassign_with_default.rs:127:5
+ --> $DIR/field_reassign_with_default.rs:120:5
|
LL | let mut a: A = Default::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: field assignment outside of initializer for an instance created with Default::default()
- --> $DIR/field_reassign_with_default.rs:150:5
+ --> $DIR/field_reassign_with_default.rs:143:5
|
LL | a.i = vec![1];
| ^^^^^^^^^^^^^^
|
note: consider initializing the variable with `C { i: vec![1], ..Default::default() }` and removing relevant reassignments
- --> $DIR/field_reassign_with_default.rs:149:5
+ --> $DIR/field_reassign_with_default.rs:142:5
|
LL | let mut a: C = C::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: field assignment outside of initializer for an instance created with Default::default()
- --> $DIR/field_reassign_with_default.rs:157:5
+ --> $DIR/field_reassign_with_default.rs:161:5
|
LL | a.i = true;
| ^^^^^^^^^^^
|
note: consider initializing the variable with `Wrapper::<bool> { i: true }` and removing relevant reassignments
- --> $DIR/field_reassign_with_default.rs:156:5
+ --> $DIR/field_reassign_with_default.rs:160:5
|
LL | let mut a: Wrapper<bool> = Default::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: field assignment outside of initializer for an instance created with Default::default()
- --> $DIR/field_reassign_with_default.rs:160:5
+ --> $DIR/field_reassign_with_default.rs:164:5
|
LL | a.i = 42;
| ^^^^^^^^^
|
note: consider initializing the variable with `WrapperMulti::<i32, i64> { i: 42, ..Default::default() }` and removing relevant reassignments
- --> $DIR/field_reassign_with_default.rs:159:5
+ --> $DIR/field_reassign_with_default.rs:163:5
|
LL | let mut a: WrapperMulti<i32, i64> = Default::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: field assignment outside of initializer for an instance created with Default::default()
- --> $DIR/field_reassign_with_default.rs:229:13
+ --> $DIR/field_reassign_with_default.rs:235:13
|
LL | f.name = name.len();
| ^^^^^^^^^^^^^^^^^^^^
|
note: consider initializing the variable with `issue6312::ImplDropAllCopy { name: name.len(), ..Default::default() }` and removing relevant reassignments
- --> $DIR/field_reassign_with_default.rs:228:13
+ --> $DIR/field_reassign_with_default.rs:234:13
|
LL | let mut f = ImplDropAllCopy::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: field assignment outside of initializer for an instance created with Default::default()
- --> $DIR/field_reassign_with_default.rs:245:13
+ --> $DIR/field_reassign_with_default.rs:251:13
|
LL | f.name = name.len();
| ^^^^^^^^^^^^^^^^^^^^
|
note: consider initializing the variable with `issue6312::NoDropAllCopy { name: name.len(), ..Default::default() }` and removing relevant reassignments
- --> $DIR/field_reassign_with_default.rs:244:13
+ --> $DIR/field_reassign_with_default.rs:250:13
|
LL | let mut f = NoDropAllCopy::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/format.fixed b/src/tools/clippy/tests/ui/format.fixed
index cd2f70ee8..beedf2c1d 100644
--- a/src/tools/clippy/tests/ui/format.fixed
+++ b/src/tools/clippy/tests/ui/format.fixed
@@ -1,5 +1,4 @@
// run-rustfix
-// aux-build: proc_macro_with_span.rs
#![warn(clippy::useless_format)]
#![allow(
unused_tuple_struct_fields,
@@ -10,8 +9,6 @@
clippy::uninlined_format_args
)]
-extern crate proc_macro_with_span;
-
struct Foo(pub String);
macro_rules! foo {
@@ -90,7 +87,4 @@ fn main() {
let _ = abc.to_string();
let xx = "xx";
let _ = xx.to_string();
-
- // Issue #10148
- println!(proc_macro_with_span::with_span!(""something ""));
}
diff --git a/src/tools/clippy/tests/ui/format.rs b/src/tools/clippy/tests/ui/format.rs
index c22345a79..e805f1818 100644
--- a/src/tools/clippy/tests/ui/format.rs
+++ b/src/tools/clippy/tests/ui/format.rs
@@ -1,5 +1,4 @@
// run-rustfix
-// aux-build: proc_macro_with_span.rs
#![warn(clippy::useless_format)]
#![allow(
unused_tuple_struct_fields,
@@ -10,8 +9,6 @@
clippy::uninlined_format_args
)]
-extern crate proc_macro_with_span;
-
struct Foo(pub String);
macro_rules! foo {
@@ -92,7 +89,4 @@ fn main() {
let _ = format!("{abc}");
let xx = "xx";
let _ = format!("{xx}");
-
- // Issue #10148
- println!(proc_macro_with_span::with_span!(""something ""));
}
diff --git a/src/tools/clippy/tests/ui/format.stderr b/src/tools/clippy/tests/ui/format.stderr
index a0e5d5c8a..0ef0ac655 100644
--- a/src/tools/clippy/tests/ui/format.stderr
+++ b/src/tools/clippy/tests/ui/format.stderr
@@ -1,5 +1,5 @@
error: useless use of `format!`
- --> $DIR/format.rs:22:5
+ --> $DIR/format.rs:19:5
|
LL | format!("foo");
| ^^^^^^^^^^^^^^ help: consider using `.to_string()`: `"foo".to_string()`
@@ -7,19 +7,19 @@ LL | format!("foo");
= note: `-D clippy::useless-format` implied by `-D warnings`
error: useless use of `format!`
- --> $DIR/format.rs:23:5
+ --> $DIR/format.rs:20:5
|
LL | format!("{{}}");
| ^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `"{}".to_string()`
error: useless use of `format!`
- --> $DIR/format.rs:24:5
+ --> $DIR/format.rs:21:5
|
LL | format!("{{}} abc {{}}");
| ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `"{} abc {}".to_string()`
error: useless use of `format!`
- --> $DIR/format.rs:25:5
+ --> $DIR/format.rs:22:5
|
LL | / format!(
LL | | r##"foo {{}}
@@ -34,67 +34,67 @@ LL ~ " bar"##.to_string();
|
error: useless use of `format!`
- --> $DIR/format.rs:30:13
+ --> $DIR/format.rs:27:13
|
LL | let _ = format!("");
| ^^^^^^^^^^^ help: consider using `String::new()`: `String::new()`
error: useless use of `format!`
- --> $DIR/format.rs:32:5
+ --> $DIR/format.rs:29:5
|
LL | format!("{}", "foo");
| ^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `"foo".to_string()`
error: useless use of `format!`
- --> $DIR/format.rs:40:5
+ --> $DIR/format.rs:37:5
|
LL | format!("{}", arg);
| ^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `arg.to_string()`
error: useless use of `format!`
- --> $DIR/format.rs:70:5
+ --> $DIR/format.rs:67:5
|
LL | format!("{}", 42.to_string());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `42.to_string()`
error: useless use of `format!`
- --> $DIR/format.rs:72:5
+ --> $DIR/format.rs:69:5
|
LL | format!("{}", x.display().to_string());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `x.display().to_string()`
error: useless use of `format!`
- --> $DIR/format.rs:76:18
+ --> $DIR/format.rs:73:18
|
LL | let _ = Some(format!("{}", a + "bar"));
| ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `a + "bar"`
error: useless use of `format!`
- --> $DIR/format.rs:80:22
+ --> $DIR/format.rs:77:22
|
LL | let _s: String = format!("{}", &*v.join("/n"));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `(&*v.join("/n")).to_string()`
error: useless use of `format!`
- --> $DIR/format.rs:86:13
+ --> $DIR/format.rs:83:13
|
LL | let _ = format!("{x}");
| ^^^^^^^^^^^^^^ help: consider using `.to_string()`: `x.to_string()`
error: useless use of `format!`
- --> $DIR/format.rs:88:13
+ --> $DIR/format.rs:85:13
|
LL | let _ = format!("{y}", y = x);
| ^^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `x.to_string()`
error: useless use of `format!`
- --> $DIR/format.rs:92:13
+ --> $DIR/format.rs:89:13
|
LL | let _ = format!("{abc}");
| ^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `abc.to_string()`
error: useless use of `format!`
- --> $DIR/format.rs:94:13
+ --> $DIR/format.rs:91:13
|
LL | let _ = format!("{xx}");
| ^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `xx.to_string()`
diff --git a/src/tools/clippy/tests/ui/format_args_unfixable.rs b/src/tools/clippy/tests/ui/format_args_unfixable.rs
index eb0ac15bf..423bfaf97 100644
--- a/src/tools/clippy/tests/ui/format_args_unfixable.rs
+++ b/src/tools/clippy/tests/ui/format_args_unfixable.rs
@@ -1,4 +1,5 @@
#![warn(clippy::format_in_format_args, clippy::to_string_in_format_args)]
+#![allow(unused)]
#![allow(clippy::assertions_on_constants, clippy::eq_op, clippy::uninlined_format_args)]
use std::io::{stdout, Error, ErrorKind, Write};
@@ -57,3 +58,46 @@ fn main() {
my_macro!();
println!("error: {}", my_other_macro!());
}
+
+macro_rules! _internal {
+ ($($args:tt)*) => {
+ println!("{}", format_args!($($args)*))
+ };
+}
+
+macro_rules! my_println2 {
+ ($target:expr, $($args:tt)+) => {{
+ if $target {
+ _internal!($($args)+)
+ }
+ }};
+}
+
+macro_rules! my_println2_args {
+ ($target:expr, $($args:tt)+) => {{
+ if $target {
+ _internal!("foo: {}", format_args!($($args)+))
+ }
+ }};
+}
+
+fn test2() {
+ let error = Error::new(ErrorKind::Other, "bad thing");
+
+ // None of these should be linted without the config change
+ my_println2!(true, "error: {}", format!("something failed at {}", Location::caller()));
+ my_println2!(
+ true,
+ "{}: {}",
+ error,
+ format!("something failed at {}", Location::caller())
+ );
+
+ my_println2_args!(true, "error: {}", format!("something failed at {}", Location::caller()));
+ my_println2_args!(
+ true,
+ "{}: {}",
+ error,
+ format!("something failed at {}", Location::caller())
+ );
+}
diff --git a/src/tools/clippy/tests/ui/format_args_unfixable.stderr b/src/tools/clippy/tests/ui/format_args_unfixable.stderr
index b291d475a..c1be48c3b 100644
--- a/src/tools/clippy/tests/ui/format_args_unfixable.stderr
+++ b/src/tools/clippy/tests/ui/format_args_unfixable.stderr
@@ -1,5 +1,5 @@
error: `format!` in `println!` args
- --> $DIR/format_args_unfixable.rs:25:5
+ --> $DIR/format_args_unfixable.rs:26:5
|
LL | println!("error: {}", format!("something failed at {}", Location::caller()));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -9,7 +9,7 @@ LL | println!("error: {}", format!("something failed at {}", Location::calle
= note: `-D clippy::format-in-format-args` implied by `-D warnings`
error: `format!` in `println!` args
- --> $DIR/format_args_unfixable.rs:26:5
+ --> $DIR/format_args_unfixable.rs:27:5
|
LL | println!("{}: {}", error, format!("something failed at {}", Location::caller()));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -18,7 +18,7 @@ LL | println!("{}: {}", error, format!("something failed at {}", Location::c
= help: or consider changing `format!` to `format_args!`
error: `format!` in `println!` args
- --> $DIR/format_args_unfixable.rs:27:5
+ --> $DIR/format_args_unfixable.rs:28:5
|
LL | println!("{:?}: {}", error, format!("something failed at {}", Location::caller()));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -27,7 +27,7 @@ LL | println!("{:?}: {}", error, format!("something failed at {}", Location:
= help: or consider changing `format!` to `format_args!`
error: `format!` in `println!` args
- --> $DIR/format_args_unfixable.rs:28:5
+ --> $DIR/format_args_unfixable.rs:29:5
|
LL | println!("{{}}: {}", format!("something failed at {}", Location::caller()));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -36,7 +36,7 @@ LL | println!("{{}}: {}", format!("something failed at {}", Location::caller
= help: or consider changing `format!` to `format_args!`
error: `format!` in `println!` args
- --> $DIR/format_args_unfixable.rs:29:5
+ --> $DIR/format_args_unfixable.rs:30:5
|
LL | println!(r#"error: "{}""#, format!("something failed at {}", Location::caller()));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -45,7 +45,7 @@ LL | println!(r#"error: "{}""#, format!("something failed at {}", Location::
= help: or consider changing `format!` to `format_args!`
error: `format!` in `println!` args
- --> $DIR/format_args_unfixable.rs:30:5
+ --> $DIR/format_args_unfixable.rs:31:5
|
LL | println!("error: {}", format!(r#"something failed at "{}""#, Location::caller()));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -54,7 +54,7 @@ LL | println!("error: {}", format!(r#"something failed at "{}""#, Location::
= help: or consider changing `format!` to `format_args!`
error: `format!` in `println!` args
- --> $DIR/format_args_unfixable.rs:31:5
+ --> $DIR/format_args_unfixable.rs:32:5
|
LL | println!("error: {}", format!("something failed at {} {0}", Location::caller()));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -63,7 +63,7 @@ LL | println!("error: {}", format!("something failed at {} {0}", Location::c
= help: or consider changing `format!` to `format_args!`
error: `format!` in `format!` args
- --> $DIR/format_args_unfixable.rs:32:13
+ --> $DIR/format_args_unfixable.rs:33:13
|
LL | let _ = format!("error: {}", format!("something failed at {}", Location::caller()));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -72,7 +72,7 @@ LL | let _ = format!("error: {}", format!("something failed at {}", Location
= help: or consider changing `format!` to `format_args!`
error: `format!` in `write!` args
- --> $DIR/format_args_unfixable.rs:33:13
+ --> $DIR/format_args_unfixable.rs:34:13
|
LL | let _ = write!(
| _____________^
@@ -86,7 +86,7 @@ LL | | );
= help: or consider changing `format!` to `format_args!`
error: `format!` in `writeln!` args
- --> $DIR/format_args_unfixable.rs:38:13
+ --> $DIR/format_args_unfixable.rs:39:13
|
LL | let _ = writeln!(
| _____________^
@@ -100,7 +100,7 @@ LL | | );
= help: or consider changing `format!` to `format_args!`
error: `format!` in `print!` args
- --> $DIR/format_args_unfixable.rs:43:5
+ --> $DIR/format_args_unfixable.rs:44:5
|
LL | print!("error: {}", format!("something failed at {}", Location::caller()));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -109,7 +109,7 @@ LL | print!("error: {}", format!("something failed at {}", Location::caller(
= help: or consider changing `format!` to `format_args!`
error: `format!` in `eprint!` args
- --> $DIR/format_args_unfixable.rs:44:5
+ --> $DIR/format_args_unfixable.rs:45:5
|
LL | eprint!("error: {}", format!("something failed at {}", Location::caller()));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -118,7 +118,7 @@ LL | eprint!("error: {}", format!("something failed at {}", Location::caller
= help: or consider changing `format!` to `format_args!`
error: `format!` in `eprintln!` args
- --> $DIR/format_args_unfixable.rs:45:5
+ --> $DIR/format_args_unfixable.rs:46:5
|
LL | eprintln!("error: {}", format!("something failed at {}", Location::caller()));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -127,7 +127,7 @@ LL | eprintln!("error: {}", format!("something failed at {}", Location::call
= help: or consider changing `format!` to `format_args!`
error: `format!` in `format_args!` args
- --> $DIR/format_args_unfixable.rs:46:13
+ --> $DIR/format_args_unfixable.rs:47:13
|
LL | let _ = format_args!("error: {}", format!("something failed at {}", Location::caller()));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -136,7 +136,7 @@ LL | let _ = format_args!("error: {}", format!("something failed at {}", Loc
= help: or consider changing `format!` to `format_args!`
error: `format!` in `assert!` args
- --> $DIR/format_args_unfixable.rs:47:5
+ --> $DIR/format_args_unfixable.rs:48:5
|
LL | assert!(true, "error: {}", format!("something failed at {}", Location::caller()));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -145,7 +145,7 @@ LL | assert!(true, "error: {}", format!("something failed at {}", Location::
= help: or consider changing `format!` to `format_args!`
error: `format!` in `assert_eq!` args
- --> $DIR/format_args_unfixable.rs:48:5
+ --> $DIR/format_args_unfixable.rs:49:5
|
LL | assert_eq!(0, 0, "error: {}", format!("something failed at {}", Location::caller()));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -154,7 +154,7 @@ LL | assert_eq!(0, 0, "error: {}", format!("something failed at {}", Locatio
= help: or consider changing `format!` to `format_args!`
error: `format!` in `assert_ne!` args
- --> $DIR/format_args_unfixable.rs:49:5
+ --> $DIR/format_args_unfixable.rs:50:5
|
LL | assert_ne!(0, 0, "error: {}", format!("something failed at {}", Location::caller()));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -163,7 +163,7 @@ LL | assert_ne!(0, 0, "error: {}", format!("something failed at {}", Locatio
= help: or consider changing `format!` to `format_args!`
error: `format!` in `panic!` args
- --> $DIR/format_args_unfixable.rs:50:5
+ --> $DIR/format_args_unfixable.rs:51:5
|
LL | panic!("error: {}", format!("something failed at {}", Location::caller()));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/ifs_same_cond.rs b/src/tools/clippy/tests/ui/ifs_same_cond.rs
index 9850fc091..9ce9a8762 100644
--- a/src/tools/clippy/tests/ui/ifs_same_cond.rs
+++ b/src/tools/clippy/tests/ui/ifs_same_cond.rs
@@ -43,4 +43,30 @@ fn ifs_same_cond() {
}
}
+fn issue10272() {
+ let a = String::from("ha");
+ if a.contains("ah") {
+ } else if a.contains("ah") {
+ // Trigger this lint
+ } else if a.contains("ha") {
+ } else if a == "wow" {
+ }
+
+ let p: *mut i8 = std::ptr::null_mut();
+ if p.is_null() {
+ } else if p.align_offset(0) == 0 {
+ } else if p.is_null() {
+ // ok, p is mutable pointer
+ } else {
+ }
+
+ let x = std::cell::Cell::new(true);
+ if x.get() {
+ } else if !x.take() {
+ } else if x.get() {
+ // ok, x is interior mutable type
+ } else {
+ }
+}
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/ifs_same_cond.stderr b/src/tools/clippy/tests/ui/ifs_same_cond.stderr
index 411308732..9519f6904 100644
--- a/src/tools/clippy/tests/ui/ifs_same_cond.stderr
+++ b/src/tools/clippy/tests/ui/ifs_same_cond.stderr
@@ -35,5 +35,17 @@ note: same as this
LL | if 2 * a == 1 {
| ^^^^^^^^^^
-error: aborting due to 3 previous errors
+error: this `if` has the same condition as a previous `if`
+ --> $DIR/ifs_same_cond.rs:49:15
+ |
+LL | } else if a.contains("ah") {
+ | ^^^^^^^^^^^^^^^^
+ |
+note: same as this
+ --> $DIR/ifs_same_cond.rs:48:8
+ |
+LL | if a.contains("ah") {
+ | ^^^^^^^^^^^^^^^^
+
+error: aborting due to 4 previous errors
diff --git a/src/tools/clippy/tests/ui/impl_trait_in_params.stderr b/src/tools/clippy/tests/ui/impl_trait_in_params.stderr
index acfcc2144..803837435 100644
--- a/src/tools/clippy/tests/ui/impl_trait_in_params.stderr
+++ b/src/tools/clippy/tests/ui/impl_trait_in_params.stderr
@@ -5,7 +5,7 @@ LL | pub fn a(_: impl Trait) {}
| ^^^^^^^^^^
|
= note: `-D clippy::impl-trait-in-params` implied by `-D warnings`
-help: add a type paremeter
+help: add a type parameter
|
LL | pub fn a<{ /* Generic name */ }: Trait>(_: impl Trait) {}
| +++++++++++++++++++++++++++++++
@@ -16,7 +16,7 @@ error: '`impl Trait` used as a function parameter'
LL | pub fn c<C: Trait>(_: C, _: impl Trait) {}
| ^^^^^^^^^^
|
-help: add a type paremeter
+help: add a type parameter
|
LL | pub fn c<C: Trait, { /* Generic name */ }: Trait>(_: C, _: impl Trait) {}
| +++++++++++++++++++++++++++++++
diff --git a/src/tools/clippy/tests/ui/implicit_clone.fixed b/src/tools/clippy/tests/ui/implicit_clone.fixed
index 51b1afbe5..8ccc3da7b 100644
--- a/src/tools/clippy/tests/ui/implicit_clone.fixed
+++ b/src/tools/clippy/tests/ui/implicit_clone.fixed
@@ -87,7 +87,7 @@ fn main() {
let kitten = Kitten {};
let _ = kitten.clone();
let _ = own_same_from_ref(&kitten);
- // this shouln't lint
+ // this shouldn't lint
let _ = kitten.to_vec();
// we expect no lints for this
diff --git a/src/tools/clippy/tests/ui/implicit_clone.rs b/src/tools/clippy/tests/ui/implicit_clone.rs
index 8a9027433..593333126 100644
--- a/src/tools/clippy/tests/ui/implicit_clone.rs
+++ b/src/tools/clippy/tests/ui/implicit_clone.rs
@@ -87,7 +87,7 @@ fn main() {
let kitten = Kitten {};
let _ = kitten.to_owned();
let _ = own_same_from_ref(&kitten);
- // this shouln't lint
+ // this shouldn't lint
let _ = kitten.to_vec();
// we expect no lints for this
diff --git a/src/tools/clippy/tests/ui/implicit_hasher.rs b/src/tools/clippy/tests/ui/implicit_hasher.rs
index fd96ca3f4..35d08a07b 100644
--- a/src/tools/clippy/tests/ui/implicit_hasher.rs
+++ b/src/tools/clippy/tests/ui/implicit_hasher.rs
@@ -1,9 +1,11 @@
-// aux-build:implicit_hasher_macros.rs
+// aux-build:proc_macros.rs
+
#![deny(clippy::implicit_hasher)]
#![allow(unused)]
#[macro_use]
-extern crate implicit_hasher_macros;
+extern crate proc_macros;
+use proc_macros::external;
use std::cmp::Eq;
use std::collections::{HashMap, HashSet};
@@ -68,22 +70,19 @@ impl<S: BuildHasher + Default> Foo<i64> for HashSet<String, S> {
pub fn foo(_map: &mut HashMap<i32, i32>, _set: &mut HashSet<i32>) {}
-macro_rules! gen {
- (impl) => {
+#[proc_macros::inline_macros]
+pub mod gen {
+ use super::*;
+ inline! {
impl<K: Hash + Eq, V> Foo<u8> for HashMap<K, V> {
fn make() -> (Self, Self) {
(HashMap::new(), HashMap::with_capacity(10))
}
}
- };
- (fn $name:ident) => {
- pub fn $name(_map: &mut HashMap<i32, i32>, _set: &mut HashSet<i32>) {}
- };
+ pub fn bar(_map: &mut HashMap<i32, i32>, _set: &mut HashSet<i32>) {}
+ }
}
-#[rustfmt::skip]
-gen!(impl);
-gen!(fn bar);
// When the macro is in a different file, the suggestion spans can't be combined properly
// and should not cause an ICE
@@ -94,7 +93,9 @@ pub mod test_macro;
__implicit_hasher_test_macro!(impl<K, V> for HashMap<K, V> where V: test_macro::A);
// #4260
-implicit_hasher_fn!();
+external! {
+ pub fn f(input: &HashMap<u32, u32>) {}
+}
// #7712
pub async fn election_vote(_data: HashMap<i32, i32>) {}
diff --git a/src/tools/clippy/tests/ui/implicit_hasher.stderr b/src/tools/clippy/tests/ui/implicit_hasher.stderr
index 59b0fba2a..83b46de2e 100644
--- a/src/tools/clippy/tests/ui/implicit_hasher.stderr
+++ b/src/tools/clippy/tests/ui/implicit_hasher.stderr
@@ -1,11 +1,11 @@
error: impl for `HashMap` should be generalized over different hashers
- --> $DIR/implicit_hasher.rs:16:35
+ --> $DIR/implicit_hasher.rs:18:35
|
LL | impl<K: Hash + Eq, V> Foo<i8> for HashMap<K, V> {
| ^^^^^^^^^^^^^
|
note: the lint level is defined here
- --> $DIR/implicit_hasher.rs:2:9
+ --> $DIR/implicit_hasher.rs:3:9
|
LL | #![deny(clippy::implicit_hasher)]
| ^^^^^^^^^^^^^^^^^^^^^^^
@@ -19,7 +19,7 @@ LL | (HashMap::default(), HashMap::with_capacity_and_hasher(10, Default:
| ~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
error: impl for `HashMap` should be generalized over different hashers
- --> $DIR/implicit_hasher.rs:25:36
+ --> $DIR/implicit_hasher.rs:27:36
|
LL | impl<K: Hash + Eq, V> Foo<i8> for (HashMap<K, V>,) {
| ^^^^^^^^^^^^^
@@ -34,7 +34,7 @@ LL | ((HashMap::default(),), (HashMap::with_capacity_and_hasher(10, Defa
| ~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
error: impl for `HashMap` should be generalized over different hashers
- --> $DIR/implicit_hasher.rs:30:19
+ --> $DIR/implicit_hasher.rs:32:19
|
LL | impl Foo<i16> for HashMap<String, String> {
| ^^^^^^^^^^^^^^^^^^^^^^^
@@ -49,7 +49,7 @@ LL | (HashMap::default(), HashMap::with_capacity_and_hasher(10, Default:
| ~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
error: impl for `HashSet` should be generalized over different hashers
- --> $DIR/implicit_hasher.rs:47:32
+ --> $DIR/implicit_hasher.rs:49:32
|
LL | impl<T: Hash + Eq> Foo<i8> for HashSet<T> {
| ^^^^^^^^^^
@@ -64,7 +64,7 @@ LL | (HashSet::default(), HashSet::with_capacity_and_hasher(10, Default:
| ~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
error: impl for `HashSet` should be generalized over different hashers
- --> $DIR/implicit_hasher.rs:52:19
+ --> $DIR/implicit_hasher.rs:54:19
|
LL | impl Foo<i16> for HashSet<String> {
| ^^^^^^^^^^^^^^^
@@ -79,7 +79,7 @@ LL | (HashSet::default(), HashSet::with_capacity_and_hasher(10, Default:
| ~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
error: parameter of type `HashMap` should be generalized over different hashers
- --> $DIR/implicit_hasher.rs:69:23
+ --> $DIR/implicit_hasher.rs:71:23
|
LL | pub fn foo(_map: &mut HashMap<i32, i32>, _set: &mut HashSet<i32>) {}
| ^^^^^^^^^^^^^^^^^
@@ -90,7 +90,7 @@ LL | pub fn foo<S: ::std::hash::BuildHasher>(_map: &mut HashMap<i32, i32, S>, _s
| +++++++++++++++++++++++++++++ ~~~~~~~~~~~~~~~~~~~~
error: parameter of type `HashSet` should be generalized over different hashers
- --> $DIR/implicit_hasher.rs:69:53
+ --> $DIR/implicit_hasher.rs:71:53
|
LL | pub fn foo(_map: &mut HashMap<i32, i32>, _set: &mut HashSet<i32>) {}
| ^^^^^^^^^^^^
@@ -101,15 +101,12 @@ LL | pub fn foo<S: ::std::hash::BuildHasher>(_map: &mut HashMap<i32, i32>, _set:
| +++++++++++++++++++++++++++++ ~~~~~~~~~~~~~~~
error: impl for `HashMap` should be generalized over different hashers
- --> $DIR/implicit_hasher.rs:73:43
+ --> $DIR/implicit_hasher.rs:77:43
|
LL | impl<K: Hash + Eq, V> Foo<u8> for HashMap<K, V> {
| ^^^^^^^^^^^^^
-...
-LL | gen!(impl);
- | ---------- in this macro invocation
|
- = note: this error originates in the macro `gen` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `__inline_mac_mod_gen` (in Nightly builds, run with -Z macro-backtrace for more info)
help: consider adding a type parameter
|
LL | impl<K: Hash + Eq, V, S: ::std::hash::BuildHasher + Default> Foo<u8> for HashMap<K, V, S> {
@@ -120,37 +117,31 @@ LL | (HashMap::default(), HashMap::with_capacity_and_hasher(10,
| ~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
error: parameter of type `HashMap` should be generalized over different hashers
- --> $DIR/implicit_hasher.rs:81:33
+ --> $DIR/implicit_hasher.rs:83:31
|
-LL | pub fn $name(_map: &mut HashMap<i32, i32>, _set: &mut HashSet<i32>) {}
- | ^^^^^^^^^^^^^^^^^
-...
-LL | gen!(fn bar);
- | ------------ in this macro invocation
+LL | pub fn bar(_map: &mut HashMap<i32, i32>, _set: &mut HashSet<i32>) {}
+ | ^^^^^^^^^^^^^^^^^
|
- = note: this error originates in the macro `gen` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `__inline_mac_mod_gen` (in Nightly builds, run with -Z macro-backtrace for more info)
help: consider adding a type parameter
|
-LL | pub fn $name<S: ::std::hash::BuildHasher>(_map: &mut HashMap<i32, i32, S>, _set: &mut HashSet<i32>) {}
- | +++++++++++++++++++++++++++++ ~~~~~~~~~~~~~~~~~~~~
+LL | pub fn bar<S: ::std::hash::BuildHasher>(_map: &mut HashMap<i32, i32, S>, _set: &mut HashSet<i32>) {}
+ | +++++++++++++++++++++++++++++ ~~~~~~~~~~~~~~~~~~~~
error: parameter of type `HashSet` should be generalized over different hashers
- --> $DIR/implicit_hasher.rs:81:63
+ --> $DIR/implicit_hasher.rs:83:61
|
-LL | pub fn $name(_map: &mut HashMap<i32, i32>, _set: &mut HashSet<i32>) {}
- | ^^^^^^^^^^^^
-...
-LL | gen!(fn bar);
- | ------------ in this macro invocation
+LL | pub fn bar(_map: &mut HashMap<i32, i32>, _set: &mut HashSet<i32>) {}
+ | ^^^^^^^^^^^^
|
- = note: this error originates in the macro `gen` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `__inline_mac_mod_gen` (in Nightly builds, run with -Z macro-backtrace for more info)
help: consider adding a type parameter
|
-LL | pub fn $name<S: ::std::hash::BuildHasher>(_map: &mut HashMap<i32, i32>, _set: &mut HashSet<i32, S>) {}
- | +++++++++++++++++++++++++++++ ~~~~~~~~~~~~~~~
+LL | pub fn bar<S: ::std::hash::BuildHasher>(_map: &mut HashMap<i32, i32>, _set: &mut HashSet<i32, S>) {}
+ | +++++++++++++++++++++++++++++ ~~~~~~~~~~~~~~~
error: parameter of type `HashMap` should be generalized over different hashers
- --> $DIR/implicit_hasher.rs:100:35
+ --> $DIR/implicit_hasher.rs:101:35
|
LL | pub async fn election_vote(_data: HashMap<i32, i32>) {}
| ^^^^^^^^^^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/inconsistent_struct_constructor.fixed b/src/tools/clippy/tests/ui/inconsistent_struct_constructor.fixed
index 74ba2f1c5..5aaa00f85 100644
--- a/src/tools/clippy/tests/ui/inconsistent_struct_constructor.fixed
+++ b/src/tools/clippy/tests/ui/inconsistent_struct_constructor.fixed
@@ -1,10 +1,14 @@
// run-rustfix
+// aux-build:proc_macros.rs
+
#![warn(clippy::inconsistent_struct_constructor)]
#![allow(clippy::redundant_field_names)]
#![allow(clippy::unnecessary_operation)]
#![allow(clippy::no_effect)]
#![allow(dead_code)]
+extern crate proc_macros;
+
#[derive(Default)]
struct Foo {
x: i32,
@@ -12,18 +16,10 @@ struct Foo {
z: i32,
}
-macro_rules! new_foo {
- () => {
- let x = 1;
- let y = 1;
- let z = 1;
- Foo { y, x, z }
- };
-}
-
mod without_base {
use super::Foo;
+ #[proc_macros::inline_macros]
fn test() {
let x = 1;
let y = 1;
@@ -34,7 +30,12 @@ mod without_base {
// Should NOT lint.
// issue #7069.
- new_foo!();
+ inline!({
+ let x = 1;
+ let y = 1;
+ let z = 1;
+ Foo { y, x, z }
+ });
// Should NOT lint because the order is the same as in the definition.
Foo { x, y, z };
diff --git a/src/tools/clippy/tests/ui/inconsistent_struct_constructor.rs b/src/tools/clippy/tests/ui/inconsistent_struct_constructor.rs
index ba96e1e33..2b2dd7f59 100644
--- a/src/tools/clippy/tests/ui/inconsistent_struct_constructor.rs
+++ b/src/tools/clippy/tests/ui/inconsistent_struct_constructor.rs
@@ -1,10 +1,14 @@
// run-rustfix
+// aux-build:proc_macros.rs
+
#![warn(clippy::inconsistent_struct_constructor)]
#![allow(clippy::redundant_field_names)]
#![allow(clippy::unnecessary_operation)]
#![allow(clippy::no_effect)]
#![allow(dead_code)]
+extern crate proc_macros;
+
#[derive(Default)]
struct Foo {
x: i32,
@@ -12,18 +16,10 @@ struct Foo {
z: i32,
}
-macro_rules! new_foo {
- () => {
- let x = 1;
- let y = 1;
- let z = 1;
- Foo { y, x, z }
- };
-}
-
mod without_base {
use super::Foo;
+ #[proc_macros::inline_macros]
fn test() {
let x = 1;
let y = 1;
@@ -34,7 +30,12 @@ mod without_base {
// Should NOT lint.
// issue #7069.
- new_foo!();
+ inline!({
+ let x = 1;
+ let y = 1;
+ let z = 1;
+ Foo { y, x, z }
+ });
// Should NOT lint because the order is the same as in the definition.
Foo { x, y, z };
diff --git a/src/tools/clippy/tests/ui/inconsistent_struct_constructor.stderr b/src/tools/clippy/tests/ui/inconsistent_struct_constructor.stderr
index c90189e96..785a6dc9d 100644
--- a/src/tools/clippy/tests/ui/inconsistent_struct_constructor.stderr
+++ b/src/tools/clippy/tests/ui/inconsistent_struct_constructor.stderr
@@ -1,5 +1,5 @@
error: struct constructor field order is inconsistent with struct definition field order
- --> $DIR/inconsistent_struct_constructor.rs:33:9
+ --> $DIR/inconsistent_struct_constructor.rs:29:9
|
LL | Foo { y, x, z };
| ^^^^^^^^^^^^^^^ help: try: `Foo { x, y, z }`
@@ -7,7 +7,7 @@ LL | Foo { y, x, z };
= note: `-D clippy::inconsistent-struct-constructor` implied by `-D warnings`
error: struct constructor field order is inconsistent with struct definition field order
- --> $DIR/inconsistent_struct_constructor.rs:55:9
+ --> $DIR/inconsistent_struct_constructor.rs:56:9
|
LL | / Foo {
LL | | z,
diff --git a/src/tools/clippy/tests/ui/integer_arithmetic.rs b/src/tools/clippy/tests/ui/integer_arithmetic.rs
index 67f24b454..8dfdee662 100644
--- a/src/tools/clippy/tests/ui/integer_arithmetic.rs
+++ b/src/tools/clippy/tests/ui/integer_arithmetic.rs
@@ -4,7 +4,7 @@
#[rustfmt::skip]
fn main() {
let mut i = 1i32;
- let mut var1 = 0i32;
+ let mut var1 = 13i32;
let mut var2 = -1i32;
1 + i;
i * 2;
diff --git a/src/tools/clippy/tests/ui/item_after_statement.rs b/src/tools/clippy/tests/ui/items_after_statement.rs
index 5e92dcab1..f12cb8f22 100644
--- a/src/tools/clippy/tests/ui/item_after_statement.rs
+++ b/src/tools/clippy/tests/ui/items_after_statement.rs
@@ -51,3 +51,20 @@ fn semicolon() {
let _ = S::new(3);
}
+
+fn item_from_macro() {
+ macro_rules! static_assert_size {
+ ($ty:ty, $size:expr) => {
+ const _: [(); $size] = [(); ::std::mem::size_of::<$ty>()];
+ };
+ }
+
+ let _ = 1;
+ static_assert_size!(u32, 4);
+}
+
+fn allow_attribute() {
+ let _ = 1;
+ #[allow(clippy::items_after_statements)]
+ const _: usize = 1;
+}
diff --git a/src/tools/clippy/tests/ui/item_after_statement.stderr b/src/tools/clippy/tests/ui/items_after_statement.stderr
index 2523c53ac..f69635a97 100644
--- a/src/tools/clippy/tests/ui/item_after_statement.stderr
+++ b/src/tools/clippy/tests/ui/items_after_statement.stderr
@@ -1,5 +1,5 @@
error: adding items after statements is confusing, since items exist from the start of the scope
- --> $DIR/item_after_statement.rs:13:5
+ --> $DIR/items_after_statement.rs:13:5
|
LL | / fn foo() {
LL | | println!("foo");
@@ -9,7 +9,7 @@ LL | | }
= note: `-D clippy::items-after-statements` implied by `-D warnings`
error: adding items after statements is confusing, since items exist from the start of the scope
- --> $DIR/item_after_statement.rs:20:5
+ --> $DIR/items_after_statement.rs:20:5
|
LL | / fn foo() {
LL | | println!("foo");
@@ -17,7 +17,7 @@ LL | | }
| |_____^
error: adding items after statements is confusing, since items exist from the start of the scope
- --> $DIR/item_after_statement.rs:33:13
+ --> $DIR/items_after_statement.rs:33:13
|
LL | / fn say_something() {
LL | | println!("something");
diff --git a/src/tools/clippy/tests/ui/large_enum_variant.rs b/src/tools/clippy/tests/ui/large_enum_variant.rs
index 3b96f09d7..f09f8ae0c 100644
--- a/src/tools/clippy/tests/ui/large_enum_variant.rs
+++ b/src/tools/clippy/tests/ui/large_enum_variant.rs
@@ -1,11 +1,11 @@
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![allow(dead_code)]
#![allow(unused_variables)]
#![warn(clippy::large_enum_variant)]
-#[macro_use]
-extern crate macro_rules;
+extern crate proc_macros;
+use proc_macros::external;
enum LargeEnum {
A(i32),
@@ -155,5 +155,10 @@ enum LargeEnumOfConst {
}
fn main() {
- large_enum_variant!();
+ external!(
+ enum LargeEnumInMacro {
+ A(i32),
+ B([i32; 8000]),
+ }
+ );
}
diff --git a/src/tools/clippy/tests/ui/large_futures.rs b/src/tools/clippy/tests/ui/large_futures.rs
new file mode 100644
index 000000000..4a8ba995d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/large_futures.rs
@@ -0,0 +1,61 @@
+#![feature(generators)]
+#![warn(clippy::large_futures)]
+#![allow(clippy::future_not_send)]
+#![allow(clippy::manual_async_fn)]
+
+async fn big_fut(_arg: [u8; 1024 * 16]) {}
+
+async fn wait() {
+ let f = async {
+ big_fut([0u8; 1024 * 16]).await;
+ };
+ f.await
+}
+async fn calls_fut(fut: impl std::future::Future<Output = ()>) {
+ loop {
+ wait().await;
+ if true {
+ return fut.await;
+ } else {
+ wait().await;
+ }
+ }
+}
+
+pub async fn test() {
+ let fut = big_fut([0u8; 1024 * 16]);
+ foo().await;
+ calls_fut(fut).await;
+}
+
+pub fn foo() -> impl std::future::Future<Output = ()> {
+ async {
+ let x = [0i32; 1024 * 16];
+ async {}.await;
+ dbg!(x);
+ }
+}
+
+pub async fn lines() {
+ async {
+ let x = [0i32; 1024 * 16];
+ async {}.await;
+ println!("{:?}", x);
+ }
+ .await;
+}
+
+pub async fn macro_expn() {
+ macro_rules! macro_ {
+ () => {
+ async {
+ let x = [0i32; 1024 * 16];
+ async {}.await;
+ println!("macro: {:?}", x);
+ }
+ };
+ }
+ macro_!().await
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/large_futures.stderr b/src/tools/clippy/tests/ui/large_futures.stderr
new file mode 100644
index 000000000..67e0fceff
--- /dev/null
+++ b/src/tools/clippy/tests/ui/large_futures.stderr
@@ -0,0 +1,82 @@
+error: large future with a size of 16385 bytes
+ --> $DIR/large_futures.rs:10:9
+ |
+LL | big_fut([0u8; 1024 * 16]).await;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider `Box::pin` on it: `Box::pin(big_fut([0u8; 1024 * 16]))`
+ |
+ = note: `-D clippy::large-futures` implied by `-D warnings`
+
+error: large future with a size of 16386 bytes
+ --> $DIR/large_futures.rs:12:5
+ |
+LL | f.await
+ | ^ help: consider `Box::pin` on it: `Box::pin(f)`
+
+error: large future with a size of 16387 bytes
+ --> $DIR/large_futures.rs:16:9
+ |
+LL | wait().await;
+ | ^^^^^^ help: consider `Box::pin` on it: `Box::pin(wait())`
+
+error: large future with a size of 16387 bytes
+ --> $DIR/large_futures.rs:20:13
+ |
+LL | wait().await;
+ | ^^^^^^ help: consider `Box::pin` on it: `Box::pin(wait())`
+
+error: large future with a size of 65540 bytes
+ --> $DIR/large_futures.rs:27:5
+ |
+LL | foo().await;
+ | ^^^^^ help: consider `Box::pin` on it: `Box::pin(foo())`
+
+error: large future with a size of 49159 bytes
+ --> $DIR/large_futures.rs:28:5
+ |
+LL | calls_fut(fut).await;
+ | ^^^^^^^^^^^^^^ help: consider `Box::pin` on it: `Box::pin(calls_fut(fut))`
+
+error: large future with a size of 65540 bytes
+ --> $DIR/large_futures.rs:40:5
+ |
+LL | / async {
+LL | | let x = [0i32; 1024 * 16];
+LL | | async {}.await;
+LL | | println!("{:?}", x);
+LL | | }
+ | |_____^
+ |
+help: consider `Box::pin` on it
+ |
+LL ~ Box::pin(async {
+LL + let x = [0i32; 1024 * 16];
+LL + async {}.await;
+LL + println!("{:?}", x);
+LL + })
+ |
+
+error: large future with a size of 65540 bytes
+ --> $DIR/large_futures.rs:51:13
+ |
+LL | / async {
+LL | | let x = [0i32; 1024 * 16];
+LL | | async {}.await;
+LL | | println!("macro: {:?}", x);
+LL | | }
+ | |_____________^
+...
+LL | macro_!().await
+ | --------- in this macro invocation
+ |
+ = note: this error originates in the macro `macro_` (in Nightly builds, run with -Z macro-backtrace for more info)
+help: consider `Box::pin` on it
+ |
+LL ~ Box::pin(async {
+LL + let x = [0i32; 1024 * 16];
+LL + async {}.await;
+LL + println!("macro: {:?}", x);
+LL + })
+ |
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/len_without_is_empty.rs b/src/tools/clippy/tests/ui/len_without_is_empty.rs
index b5dec6c46..52aabefae 100644
--- a/src/tools/clippy/tests/ui/len_without_is_empty.rs
+++ b/src/tools/clippy/tests/ui/len_without_is_empty.rs
@@ -282,6 +282,87 @@ impl AsyncLen {
}
}
+// issue #7232
+pub struct AsyncLenWithoutIsEmpty;
+impl AsyncLenWithoutIsEmpty {
+ pub async fn async_task(&self) -> bool {
+ true
+ }
+
+ pub async fn len(&self) -> usize {
+ usize::from(!self.async_task().await)
+ }
+}
+
+// issue #7232
+pub struct AsyncOptionLenWithoutIsEmpty;
+impl AsyncOptionLenWithoutIsEmpty {
+ async fn async_task(&self) -> bool {
+ true
+ }
+
+ pub async fn len(&self) -> Option<usize> {
+ None
+ }
+}
+
+// issue #7232
+pub struct AsyncOptionLenNonIntegral;
+impl AsyncOptionLenNonIntegral {
+ // don't lint
+ pub async fn len(&self) -> Option<String> {
+ None
+ }
+}
+
+// issue #7232
+pub struct AsyncResultLenWithoutIsEmpty;
+impl AsyncResultLenWithoutIsEmpty {
+ async fn async_task(&self) -> bool {
+ true
+ }
+
+ pub async fn len(&self) -> Result<usize, ()> {
+ Err(())
+ }
+}
+
+// issue #7232
+pub struct AsyncOptionLen;
+impl AsyncOptionLen {
+ async fn async_task(&self) -> bool {
+ true
+ }
+
+ pub async fn len(&self) -> Result<usize, ()> {
+ Err(())
+ }
+
+ pub async fn is_empty(&self) -> bool {
+ true
+ }
+}
+
+pub struct AsyncLenSyncIsEmpty;
+impl AsyncLenSyncIsEmpty {
+ pub async fn len(&self) -> u32 {
+ 0
+ }
+
+ pub fn is_empty(&self) -> bool {
+ true
+ }
+}
+
+// issue #9520
+pub struct NonStandardLen;
+impl NonStandardLen {
+ // don't lint
+ pub fn len(&self, something: usize) -> usize {
+ something
+ }
+}
+
// issue #9520
pub struct NonStandardLenAndIsEmptySignature;
impl NonStandardLenAndIsEmptySignature {
@@ -328,4 +409,15 @@ impl NonStandardSignatureWithGenerics {
}
}
+pub struct DifferingErrors;
+impl DifferingErrors {
+ pub fn len(&self) -> Result<usize, u8> {
+ Ok(0)
+ }
+
+ pub fn is_empty(&self) -> Result<bool, u16> {
+ Ok(true)
+ }
+}
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/len_without_is_empty.stderr b/src/tools/clippy/tests/ui/len_without_is_empty.stderr
index 8e890e2e2..1bce1734b 100644
--- a/src/tools/clippy/tests/ui/len_without_is_empty.stderr
+++ b/src/tools/clippy/tests/ui/len_without_is_empty.stderr
@@ -119,5 +119,23 @@ LL | pub fn len(&self) -> Result<usize, ()> {
|
= help: use a custom `Error` type instead
-error: aborting due to 12 previous errors
+error: struct `AsyncLenWithoutIsEmpty` has a public `len` method, but no `is_empty` method
+ --> $DIR/len_without_is_empty.rs:292:5
+ |
+LL | pub async fn len(&self) -> usize {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: struct `AsyncOptionLenWithoutIsEmpty` has a public `len` method, but no `is_empty` method
+ --> $DIR/len_without_is_empty.rs:304:5
+ |
+LL | pub async fn len(&self) -> Option<usize> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: struct `AsyncResultLenWithoutIsEmpty` has a public `len` method, but no `is_empty` method
+ --> $DIR/len_without_is_empty.rs:325:5
+ |
+LL | pub async fn len(&self) -> Result<usize, ()> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 15 previous errors
diff --git a/src/tools/clippy/tests/ui/let_unit.fixed b/src/tools/clippy/tests/ui/let_unit.fixed
index 6343cff0f..76ff0645f 100644
--- a/src/tools/clippy/tests/ui/let_unit.fixed
+++ b/src/tools/clippy/tests/ui/let_unit.fixed
@@ -175,3 +175,7 @@ fn attributes() {
#[expect(clippy::let_unit_value)]
let _ = f();
}
+
+async fn issue10433() {
+ let _pending: () = std::future::pending().await;
+}
diff --git a/src/tools/clippy/tests/ui/let_unit.rs b/src/tools/clippy/tests/ui/let_unit.rs
index c9bb2849f..895ccfe36 100644
--- a/src/tools/clippy/tests/ui/let_unit.rs
+++ b/src/tools/clippy/tests/ui/let_unit.rs
@@ -175,3 +175,7 @@ fn attributes() {
#[expect(clippy::let_unit_value)]
let _ = f();
}
+
+async fn issue10433() {
+ let _pending: () = std::future::pending().await;
+}
diff --git a/src/tools/clippy/tests/ui/let_with_type_underscore.rs b/src/tools/clippy/tests/ui/let_with_type_underscore.rs
new file mode 100644
index 000000000..175718b94
--- /dev/null
+++ b/src/tools/clippy/tests/ui/let_with_type_underscore.rs
@@ -0,0 +1,19 @@
+#![allow(unused)]
+#![warn(clippy::let_with_type_underscore)]
+#![allow(clippy::let_unit_value)]
+
+fn func() -> &'static str {
+ ""
+}
+
+fn main() {
+ // Will lint
+ let x: _ = 1;
+ let _: _ = 2;
+ let x: _ = func();
+
+ let x = 1; // Will not lint, Rust inferres this to an integer before Clippy
+ let x = func();
+ let x: Vec<_> = Vec::<u32>::new();
+ let x: [_; 1] = [1];
+}
diff --git a/src/tools/clippy/tests/ui/let_with_type_underscore.stderr b/src/tools/clippy/tests/ui/let_with_type_underscore.stderr
new file mode 100644
index 000000000..16bf83c70
--- /dev/null
+++ b/src/tools/clippy/tests/ui/let_with_type_underscore.stderr
@@ -0,0 +1,39 @@
+error: variable declared with type underscore
+ --> $DIR/let_with_type_underscore.rs:11:5
+ |
+LL | let x: _ = 1;
+ | ^^^^^^^^^^^^^
+ |
+help: remove the explicit type `_` declaration
+ --> $DIR/let_with_type_underscore.rs:11:10
+ |
+LL | let x: _ = 1;
+ | ^^^
+ = note: `-D clippy::let-with-type-underscore` implied by `-D warnings`
+
+error: variable declared with type underscore
+ --> $DIR/let_with_type_underscore.rs:12:5
+ |
+LL | let _: _ = 2;
+ | ^^^^^^^^^^^^^
+ |
+help: remove the explicit type `_` declaration
+ --> $DIR/let_with_type_underscore.rs:12:10
+ |
+LL | let _: _ = 2;
+ | ^^^
+
+error: variable declared with type underscore
+ --> $DIR/let_with_type_underscore.rs:13:5
+ |
+LL | let x: _ = func();
+ | ^^^^^^^^^^^^^^^^^^
+ |
+help: remove the explicit type `_` declaration
+ --> $DIR/let_with_type_underscore.rs:13:10
+ |
+LL | let x: _ = func();
+ | ^^^
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/lines_filter_map_ok.fixed b/src/tools/clippy/tests/ui/lines_filter_map_ok.fixed
new file mode 100644
index 000000000..f4033cd8e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/lines_filter_map_ok.fixed
@@ -0,0 +1,29 @@
+// run-rustfix
+
+#![allow(unused, clippy::map_identity)]
+#![warn(clippy::lines_filter_map_ok)]
+
+use std::io::{self, BufRead, BufReader};
+
+fn main() -> io::Result<()> {
+ let f = std::fs::File::open("/")?;
+ // Lint
+ BufReader::new(f).lines().map_while(Result::ok).for_each(|_| ());
+ // Lint
+ let f = std::fs::File::open("/")?;
+ BufReader::new(f).lines().map_while(Result::ok).for_each(|_| ());
+ let s = "foo\nbar\nbaz\n";
+ // Lint
+ io::stdin().lines().map_while(Result::ok).for_each(|_| ());
+ // Lint
+ io::stdin().lines().map_while(Result::ok).for_each(|_| ());
+ // Do not lint (not a `Lines` iterator)
+ io::stdin()
+ .lines()
+ .map(std::convert::identity)
+ .filter_map(|x| x.ok())
+ .for_each(|_| ());
+ // Do not lint (not a `Result::ok()` extractor)
+ io::stdin().lines().filter_map(|x| x.err()).for_each(|_| ());
+ Ok(())
+}
diff --git a/src/tools/clippy/tests/ui/lines_filter_map_ok.rs b/src/tools/clippy/tests/ui/lines_filter_map_ok.rs
new file mode 100644
index 000000000..7e11816b2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/lines_filter_map_ok.rs
@@ -0,0 +1,29 @@
+// run-rustfix
+
+#![allow(unused, clippy::map_identity)]
+#![warn(clippy::lines_filter_map_ok)]
+
+use std::io::{self, BufRead, BufReader};
+
+fn main() -> io::Result<()> {
+ let f = std::fs::File::open("/")?;
+ // Lint
+ BufReader::new(f).lines().filter_map(Result::ok).for_each(|_| ());
+ // Lint
+ let f = std::fs::File::open("/")?;
+ BufReader::new(f).lines().flat_map(Result::ok).for_each(|_| ());
+ let s = "foo\nbar\nbaz\n";
+ // Lint
+ io::stdin().lines().filter_map(Result::ok).for_each(|_| ());
+ // Lint
+ io::stdin().lines().filter_map(|x| x.ok()).for_each(|_| ());
+ // Do not lint (not a `Lines` iterator)
+ io::stdin()
+ .lines()
+ .map(std::convert::identity)
+ .filter_map(|x| x.ok())
+ .for_each(|_| ());
+ // Do not lint (not a `Result::ok()` extractor)
+ io::stdin().lines().filter_map(|x| x.err()).for_each(|_| ());
+ Ok(())
+}
diff --git a/src/tools/clippy/tests/ui/lines_filter_map_ok.stderr b/src/tools/clippy/tests/ui/lines_filter_map_ok.stderr
new file mode 100644
index 000000000..cddd403d5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/lines_filter_map_ok.stderr
@@ -0,0 +1,51 @@
+error: `filter_map()` will run forever if the iterator repeatedly produces an `Err`
+ --> $DIR/lines_filter_map_ok.rs:11:31
+ |
+LL | BufReader::new(f).lines().filter_map(Result::ok).for_each(|_| ());
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `map_while(Result::ok)`
+ |
+note: this expression returning a `std::io::Lines` may produce an infinite number of `Err` in case of a read error
+ --> $DIR/lines_filter_map_ok.rs:11:5
+ |
+LL | BufReader::new(f).lines().filter_map(Result::ok).for_each(|_| ());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ = note: `-D clippy::lines-filter-map-ok` implied by `-D warnings`
+
+error: `flat_map()` will run forever if the iterator repeatedly produces an `Err`
+ --> $DIR/lines_filter_map_ok.rs:14:31
+ |
+LL | BufReader::new(f).lines().flat_map(Result::ok).for_each(|_| ());
+ | ^^^^^^^^^^^^^^^^^^^^ help: replace with: `map_while(Result::ok)`
+ |
+note: this expression returning a `std::io::Lines` may produce an infinite number of `Err` in case of a read error
+ --> $DIR/lines_filter_map_ok.rs:14:5
+ |
+LL | BufReader::new(f).lines().flat_map(Result::ok).for_each(|_| ());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: `filter_map()` will run forever if the iterator repeatedly produces an `Err`
+ --> $DIR/lines_filter_map_ok.rs:17:25
+ |
+LL | io::stdin().lines().filter_map(Result::ok).for_each(|_| ());
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `map_while(Result::ok)`
+ |
+note: this expression returning a `std::io::Lines` may produce an infinite number of `Err` in case of a read error
+ --> $DIR/lines_filter_map_ok.rs:17:5
+ |
+LL | io::stdin().lines().filter_map(Result::ok).for_each(|_| ());
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: `filter_map()` will run forever if the iterator repeatedly produces an `Err`
+ --> $DIR/lines_filter_map_ok.rs:19:25
+ |
+LL | io::stdin().lines().filter_map(|x| x.ok()).for_each(|_| ());
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `map_while(Result::ok)`
+ |
+note: this expression returning a `std::io::Lines` may produce an infinite number of `Err` in case of a read error
+ --> $DIR/lines_filter_map_ok.rs:19:5
+ |
+LL | io::stdin().lines().filter_map(|x| x.ok()).for_each(|_| ());
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/macro_use_imports.fixed b/src/tools/clippy/tests/ui/macro_use_imports.fixed
index e612480d2..a395e4f56 100644
--- a/src/tools/clippy/tests/ui/macro_use_imports.fixed
+++ b/src/tools/clippy/tests/ui/macro_use_imports.fixed
@@ -16,11 +16,11 @@ extern crate macro_use_helper as mac;
extern crate proc_macro_derive as mini_mac;
mod a {
- use mac::{pub_macro, function_macro, ty_macro, inner_mod_macro, pub_in_private_macro};
+ use mac::{pub_macro, inner_mod_macro, function_macro, ty_macro, pub_in_private_macro};
use mac;
use mini_mac::ClippyMiniMacroTest;
use mini_mac;
- use mac::{inner::foofoo, inner::try_err};
+ use mac::{inner::mut_mut, inner::try_err};
use mac::inner;
use mac::inner::nested::string_add;
use mac::inner::nested;
@@ -36,7 +36,7 @@ mod a {
let v: ty_macro!() = Vec::default();
inner::try_err!();
- inner::foofoo!();
+ inner::mut_mut!();
nested::string_add!();
}
}
diff --git a/src/tools/clippy/tests/ui/macro_use_imports.rs b/src/tools/clippy/tests/ui/macro_use_imports.rs
index b34817cc3..b1a287332 100644
--- a/src/tools/clippy/tests/ui/macro_use_imports.rs
+++ b/src/tools/clippy/tests/ui/macro_use_imports.rs
@@ -36,7 +36,7 @@ mod a {
let v: ty_macro!() = Vec::default();
inner::try_err!();
- inner::foofoo!();
+ inner::mut_mut!();
nested::string_add!();
}
}
diff --git a/src/tools/clippy/tests/ui/macro_use_imports.stderr b/src/tools/clippy/tests/ui/macro_use_imports.stderr
index 61843124c..6fd338cef 100644
--- a/src/tools/clippy/tests/ui/macro_use_imports.stderr
+++ b/src/tools/clippy/tests/ui/macro_use_imports.stderr
@@ -16,13 +16,13 @@ error: `macro_use` attributes are no longer needed in the Rust 2018 edition
--> $DIR/macro_use_imports.rs:23:5
|
LL | #[macro_use]
- | ^^^^^^^^^^^^ help: remove the attribute and import the macro directly, try: `use mac::{inner::foofoo, inner::try_err};`
+ | ^^^^^^^^^^^^ help: remove the attribute and import the macro directly, try: `use mac::{inner::mut_mut, inner::try_err};`
error: `macro_use` attributes are no longer needed in the Rust 2018 edition
--> $DIR/macro_use_imports.rs:19:5
|
LL | #[macro_use]
- | ^^^^^^^^^^^^ help: remove the attribute and import the macro directly, try: `use mac::{pub_macro, function_macro, ty_macro, inner_mod_macro, pub_in_private_macro};`
+ | ^^^^^^^^^^^^ help: remove the attribute and import the macro directly, try: `use mac::{pub_macro, inner_mod_macro, function_macro, ty_macro, pub_in_private_macro};`
error: aborting due to 4 previous errors
diff --git a/src/tools/clippy/tests/ui/macro_use_imports_expect.rs b/src/tools/clippy/tests/ui/macro_use_imports_expect.rs
index 8a1b05da9..5aac5af26 100644
--- a/src/tools/clippy/tests/ui/macro_use_imports_expect.rs
+++ b/src/tools/clippy/tests/ui/macro_use_imports_expect.rs
@@ -39,7 +39,7 @@ mod a {
let v: ty_macro!() = Vec::default();
inner::try_err!();
- inner::foofoo!();
+ inner::mut_mut!();
nested::string_add!();
}
}
diff --git a/src/tools/clippy/tests/ui/manual_async_fn.fixed b/src/tools/clippy/tests/ui/manual_async_fn.fixed
index b7e46a4a8..5cc4a43af 100644
--- a/src/tools/clippy/tests/ui/manual_async_fn.fixed
+++ b/src/tools/clippy/tests/ui/manual_async_fn.fixed
@@ -107,4 +107,10 @@ mod issue_5765 {
}
}
+pub async fn issue_10450() -> i32 { 42 }
+
+pub(crate) async fn issue_10450_2() -> i32 { 42 }
+
+pub(self) async fn issue_10450_3() -> i32 { 42 }
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/manual_async_fn.rs b/src/tools/clippy/tests/ui/manual_async_fn.rs
index b05429da6..ba504b8a8 100644
--- a/src/tools/clippy/tests/ui/manual_async_fn.rs
+++ b/src/tools/clippy/tests/ui/manual_async_fn.rs
@@ -127,4 +127,16 @@ mod issue_5765 {
}
}
+pub fn issue_10450() -> impl Future<Output = i32> {
+ async { 42 }
+}
+
+pub(crate) fn issue_10450_2() -> impl Future<Output = i32> {
+ async { 42 }
+}
+
+pub(self) fn issue_10450_3() -> impl Future<Output = i32> {
+ async { 42 }
+}
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/manual_async_fn.stderr b/src/tools/clippy/tests/ui/manual_async_fn.stderr
index 0a903ed6f..f5ee3eb7c 100644
--- a/src/tools/clippy/tests/ui/manual_async_fn.stderr
+++ b/src/tools/clippy/tests/ui/manual_async_fn.stderr
@@ -161,5 +161,50 @@ help: move the body of the async block to the enclosing function
LL | fn explicit<'a, 'b>(_: &'a i32, _: &'b i32) -> impl Future<Output = i32> + 'a + 'b { 42 }
| ~~~~~~
-error: aborting due to 10 previous errors
+error: this function can be simplified using the `async fn` syntax
+ --> $DIR/manual_async_fn.rs:130:1
+ |
+LL | pub fn issue_10450() -> impl Future<Output = i32> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: make the function `async` and return the output of the future directly
+ |
+LL | pub async fn issue_10450() -> i32 {
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+help: move the body of the async block to the enclosing function
+ |
+LL | pub fn issue_10450() -> impl Future<Output = i32> { 42 }
+ | ~~~~~~
+
+error: this function can be simplified using the `async fn` syntax
+ --> $DIR/manual_async_fn.rs:134:1
+ |
+LL | pub(crate) fn issue_10450_2() -> impl Future<Output = i32> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: make the function `async` and return the output of the future directly
+ |
+LL | pub(crate) async fn issue_10450_2() -> i32 {
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+help: move the body of the async block to the enclosing function
+ |
+LL | pub(crate) fn issue_10450_2() -> impl Future<Output = i32> { 42 }
+ | ~~~~~~
+
+error: this function can be simplified using the `async fn` syntax
+ --> $DIR/manual_async_fn.rs:138:1
+ |
+LL | pub(self) fn issue_10450_3() -> impl Future<Output = i32> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: make the function `async` and return the output of the future directly
+ |
+LL | pub(self) async fn issue_10450_3() -> i32 {
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+help: move the body of the async block to the enclosing function
+ |
+LL | pub(self) fn issue_10450_3() -> impl Future<Output = i32> { 42 }
+ | ~~~~~~
+
+error: aborting due to 13 previous errors
diff --git a/src/tools/clippy/tests/ui/manual_clamp.rs b/src/tools/clippy/tests/ui/manual_clamp.rs
index f7902e6fd..cdfd8e4c3 100644
--- a/src/tools/clippy/tests/ui/manual_clamp.rs
+++ b/src/tools/clippy/tests/ui/manual_clamp.rs
@@ -326,3 +326,22 @@ fn msrv_1_50() {
input
};
}
+
+const fn _const() {
+ let (input, min, max) = (0, -1, 2);
+ let _ = if input < min {
+ min
+ } else if input > max {
+ max
+ } else {
+ input
+ };
+
+ let mut x = input;
+ if max < x {
+ let x = max;
+ }
+ if min > x {
+ x = min;
+ }
+}
diff --git a/src/tools/clippy/tests/ui/manual_main_separator_str.fixed b/src/tools/clippy/tests/ui/manual_main_separator_str.fixed
new file mode 100644
index 000000000..50f46d6b3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_main_separator_str.fixed
@@ -0,0 +1,39 @@
+// run-rustfix
+
+#![allow(unused)]
+#![warn(clippy::manual_main_separator_str)]
+
+use std::path::MAIN_SEPARATOR;
+
+fn len(s: &str) -> usize {
+ s.len()
+}
+
+struct U<'a> {
+ f: &'a str,
+ g: &'a String,
+}
+
+struct V<T> {
+ f: T,
+}
+
+fn main() {
+ // Should lint
+ let _: &str = std::path::MAIN_SEPARATOR_STR;
+ let _ = len(std::path::MAIN_SEPARATOR_STR);
+ let _: Vec<u16> = std::path::MAIN_SEPARATOR_STR.encode_utf16().collect();
+
+ // Should lint for field `f` only
+ let _ = U {
+ f: std::path::MAIN_SEPARATOR_STR,
+ g: &MAIN_SEPARATOR.to_string(),
+ };
+
+ // Should not lint
+ let _: &String = &MAIN_SEPARATOR.to_string();
+ let _ = &MAIN_SEPARATOR.to_string();
+ let _ = V {
+ f: &MAIN_SEPARATOR.to_string(),
+ };
+}
diff --git a/src/tools/clippy/tests/ui/manual_main_separator_str.rs b/src/tools/clippy/tests/ui/manual_main_separator_str.rs
new file mode 100644
index 000000000..2dbb9e661
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_main_separator_str.rs
@@ -0,0 +1,39 @@
+// run-rustfix
+
+#![allow(unused)]
+#![warn(clippy::manual_main_separator_str)]
+
+use std::path::MAIN_SEPARATOR;
+
+fn len(s: &str) -> usize {
+ s.len()
+}
+
+struct U<'a> {
+ f: &'a str,
+ g: &'a String,
+}
+
+struct V<T> {
+ f: T,
+}
+
+fn main() {
+ // Should lint
+ let _: &str = &MAIN_SEPARATOR.to_string();
+ let _ = len(&MAIN_SEPARATOR.to_string());
+ let _: Vec<u16> = MAIN_SEPARATOR.to_string().encode_utf16().collect();
+
+ // Should lint for field `f` only
+ let _ = U {
+ f: &MAIN_SEPARATOR.to_string(),
+ g: &MAIN_SEPARATOR.to_string(),
+ };
+
+ // Should not lint
+ let _: &String = &MAIN_SEPARATOR.to_string();
+ let _ = &MAIN_SEPARATOR.to_string();
+ let _ = V {
+ f: &MAIN_SEPARATOR.to_string(),
+ };
+}
diff --git a/src/tools/clippy/tests/ui/manual_main_separator_str.stderr b/src/tools/clippy/tests/ui/manual_main_separator_str.stderr
new file mode 100644
index 000000000..e6cefde66
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_main_separator_str.stderr
@@ -0,0 +1,28 @@
+error: taking a reference on `std::path::MAIN_SEPARATOR` conversion to `String`
+ --> $DIR/manual_main_separator_str.rs:23:19
+ |
+LL | let _: &str = &MAIN_SEPARATOR.to_string();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `std::path::MAIN_SEPARATOR_STR`
+ |
+ = note: `-D clippy::manual-main-separator-str` implied by `-D warnings`
+
+error: taking a reference on `std::path::MAIN_SEPARATOR` conversion to `String`
+ --> $DIR/manual_main_separator_str.rs:24:17
+ |
+LL | let _ = len(&MAIN_SEPARATOR.to_string());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `std::path::MAIN_SEPARATOR_STR`
+
+error: taking a reference on `std::path::MAIN_SEPARATOR` conversion to `String`
+ --> $DIR/manual_main_separator_str.rs:25:23
+ |
+LL | let _: Vec<u16> = MAIN_SEPARATOR.to_string().encode_utf16().collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `std::path::MAIN_SEPARATOR_STR`
+
+error: taking a reference on `std::path::MAIN_SEPARATOR` conversion to `String`
+ --> $DIR/manual_main_separator_str.rs:29:12
+ |
+LL | f: &MAIN_SEPARATOR.to_string(),
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `std::path::MAIN_SEPARATOR_STR`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_rem_euclid.fixed b/src/tools/clippy/tests/ui/manual_rem_euclid.fixed
index 4cdc0546a..1f6df1b0a 100644
--- a/src/tools/clippy/tests/ui/manual_rem_euclid.fixed
+++ b/src/tools/clippy/tests/ui/manual_rem_euclid.fixed
@@ -1,18 +1,13 @@
// run-rustfix
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![warn(clippy::manual_rem_euclid)]
+#![allow(clippy::let_with_type_underscore)]
-#[macro_use]
-extern crate macro_rules;
-
-macro_rules! internal_rem_euclid {
- () => {
- let value: i32 = 5;
- let _: i32 = value.rem_euclid(4);
- };
-}
+extern crate proc_macros;
+use proc_macros::{external, inline_macros};
+#[inline_macros]
fn main() {
let value: i32 = 5;
@@ -38,10 +33,16 @@ fn main() {
let _: i32 = ((4 % value) + 4) % 4;
// Lint in internal macros
- internal_rem_euclid!();
+ inline!(
+ let value: i32 = 5;
+ let _: i32 = value.rem_euclid(4);
+ );
// Do not lint in external macros
- manual_rem_euclid!();
+ external!(
+ let value: i32 = 5;
+ let _: i32 = ((value % 4) + 4) % 4;
+ );
}
// Should lint for params too
diff --git a/src/tools/clippy/tests/ui/manual_rem_euclid.rs b/src/tools/clippy/tests/ui/manual_rem_euclid.rs
index 58a9e20f3..b275e8a38 100644
--- a/src/tools/clippy/tests/ui/manual_rem_euclid.rs
+++ b/src/tools/clippy/tests/ui/manual_rem_euclid.rs
@@ -1,18 +1,13 @@
// run-rustfix
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![warn(clippy::manual_rem_euclid)]
+#![allow(clippy::let_with_type_underscore)]
-#[macro_use]
-extern crate macro_rules;
-
-macro_rules! internal_rem_euclid {
- () => {
- let value: i32 = 5;
- let _: i32 = ((value % 4) + 4) % 4;
- };
-}
+extern crate proc_macros;
+use proc_macros::{external, inline_macros};
+#[inline_macros]
fn main() {
let value: i32 = 5;
@@ -38,10 +33,16 @@ fn main() {
let _: i32 = ((4 % value) + 4) % 4;
// Lint in internal macros
- internal_rem_euclid!();
+ inline!(
+ let value: i32 = 5;
+ let _: i32 = ((value % 4) + 4) % 4;
+ );
// Do not lint in external macros
- manual_rem_euclid!();
+ external!(
+ let value: i32 = 5;
+ let _: i32 = ((value % 4) + 4) % 4;
+ );
}
// Should lint for params too
diff --git a/src/tools/clippy/tests/ui/manual_rem_euclid.stderr b/src/tools/clippy/tests/ui/manual_rem_euclid.stderr
index e3122a588..a43707f89 100644
--- a/src/tools/clippy/tests/ui/manual_rem_euclid.stderr
+++ b/src/tools/clippy/tests/ui/manual_rem_euclid.stderr
@@ -1,5 +1,5 @@
error: manual `rem_euclid` implementation
- --> $DIR/manual_rem_euclid.rs:19:18
+ --> $DIR/manual_rem_euclid.rs:14:18
|
LL | let _: i32 = ((value % 4) + 4) % 4;
| ^^^^^^^^^^^^^^^^^^^^^ help: consider using: `value.rem_euclid(4)`
@@ -7,60 +7,57 @@ LL | let _: i32 = ((value % 4) + 4) % 4;
= note: `-D clippy::manual-rem-euclid` implied by `-D warnings`
error: manual `rem_euclid` implementation
- --> $DIR/manual_rem_euclid.rs:20:18
+ --> $DIR/manual_rem_euclid.rs:15:18
|
LL | let _: i32 = (4 + (value % 4)) % 4;
| ^^^^^^^^^^^^^^^^^^^^^ help: consider using: `value.rem_euclid(4)`
error: manual `rem_euclid` implementation
- --> $DIR/manual_rem_euclid.rs:21:18
+ --> $DIR/manual_rem_euclid.rs:16:18
|
LL | let _: i32 = (value % 4 + 4) % 4;
| ^^^^^^^^^^^^^^^^^^^ help: consider using: `value.rem_euclid(4)`
error: manual `rem_euclid` implementation
- --> $DIR/manual_rem_euclid.rs:22:18
+ --> $DIR/manual_rem_euclid.rs:17:18
|
LL | let _: i32 = (4 + value % 4) % 4;
| ^^^^^^^^^^^^^^^^^^^ help: consider using: `value.rem_euclid(4)`
error: manual `rem_euclid` implementation
- --> $DIR/manual_rem_euclid.rs:23:22
+ --> $DIR/manual_rem_euclid.rs:18:22
|
LL | let _: i32 = 1 + (4 + value % 4) % 4;
| ^^^^^^^^^^^^^^^^^^^ help: consider using: `value.rem_euclid(4)`
error: manual `rem_euclid` implementation
- --> $DIR/manual_rem_euclid.rs:12:22
+ --> $DIR/manual_rem_euclid.rs:38:22
|
LL | let _: i32 = ((value % 4) + 4) % 4;
| ^^^^^^^^^^^^^^^^^^^^^ help: consider using: `value.rem_euclid(4)`
-...
-LL | internal_rem_euclid!();
- | ---------------------- in this macro invocation
|
- = note: this error originates in the macro `internal_rem_euclid` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `__inline_mac_fn_main` (in Nightly builds, run with -Z macro-backtrace for more info)
error: manual `rem_euclid` implementation
- --> $DIR/manual_rem_euclid.rs:49:5
+ --> $DIR/manual_rem_euclid.rs:50:5
|
LL | ((num % 4) + 4) % 4
| ^^^^^^^^^^^^^^^^^^^ help: consider using: `num.rem_euclid(4)`
error: manual `rem_euclid` implementation
- --> $DIR/manual_rem_euclid.rs:54:5
+ --> $DIR/manual_rem_euclid.rs:55:5
|
LL | ((num % 4) + 4) % 4
| ^^^^^^^^^^^^^^^^^^^ help: consider using: `num.rem_euclid(4)`
error: manual `rem_euclid` implementation
- --> $DIR/manual_rem_euclid.rs:66:18
+ --> $DIR/manual_rem_euclid.rs:67:18
|
LL | let _: i32 = ((x % 4) + 4) % 4;
| ^^^^^^^^^^^^^^^^^ help: consider using: `x.rem_euclid(4)`
error: manual `rem_euclid` implementation
- --> $DIR/manual_rem_euclid.rs:79:18
+ --> $DIR/manual_rem_euclid.rs:80:18
|
LL | let _: i32 = ((x % 4) + 4) % 4;
| ^^^^^^^^^^^^^^^^^ help: consider using: `x.rem_euclid(4)`
diff --git a/src/tools/clippy/tests/ui/manual_slice_size_calculation.rs b/src/tools/clippy/tests/ui/manual_slice_size_calculation.rs
new file mode 100644
index 000000000..5082f931f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_slice_size_calculation.rs
@@ -0,0 +1,36 @@
+#![allow(unused)]
+#![warn(clippy::manual_slice_size_calculation)]
+
+use core::mem::{align_of, size_of};
+
+fn main() {
+ let v_i32 = Vec::<i32>::new();
+ let s_i32 = v_i32.as_slice();
+
+ // True positives:
+ let _ = s_i32.len() * size_of::<i32>(); // WARNING
+ let _ = size_of::<i32>() * s_i32.len(); // WARNING
+ let _ = size_of::<i32>() * s_i32.len() * 5; // WARNING
+
+ let len = s_i32.len();
+ let size = size_of::<i32>();
+ let _ = len * size_of::<i32>(); // WARNING
+ let _ = s_i32.len() * size; // WARNING
+ let _ = len * size; // WARNING
+
+ // True negatives:
+ let _ = size_of::<i32>() + s_i32.len(); // Ok, not a multiplication
+ let _ = size_of::<i32>() * s_i32.partition_point(|_| true); // Ok, not len()
+ let _ = size_of::<i32>() * v_i32.len(); // Ok, not a slice
+ let _ = align_of::<i32>() * s_i32.len(); // Ok, not size_of()
+ let _ = size_of::<u32>() * s_i32.len(); // Ok, different types
+
+ // False negatives:
+ let _ = 5 * size_of::<i32>() * s_i32.len(); // Ok (MISSED OPPORTUNITY)
+ let _ = size_of::<i32>() * 5 * s_i32.len(); // Ok (MISSED OPPORTUNITY)
+}
+
+const fn _const(s_i32: &[i32]) {
+ // True negative:
+ let _ = s_i32.len() * size_of::<i32>(); // Ok, can't use size_of_val in const
+}
diff --git a/src/tools/clippy/tests/ui/manual_slice_size_calculation.stderr b/src/tools/clippy/tests/ui/manual_slice_size_calculation.stderr
new file mode 100644
index 000000000..4a24fc60a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_slice_size_calculation.stderr
@@ -0,0 +1,51 @@
+error: manual slice size calculation
+ --> $DIR/manual_slice_size_calculation.rs:11:13
+ |
+LL | let _ = s_i32.len() * size_of::<i32>(); // WARNING
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using std::mem::size_of_value instead
+ = note: `-D clippy::manual-slice-size-calculation` implied by `-D warnings`
+
+error: manual slice size calculation
+ --> $DIR/manual_slice_size_calculation.rs:12:13
+ |
+LL | let _ = size_of::<i32>() * s_i32.len(); // WARNING
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using std::mem::size_of_value instead
+
+error: manual slice size calculation
+ --> $DIR/manual_slice_size_calculation.rs:13:13
+ |
+LL | let _ = size_of::<i32>() * s_i32.len() * 5; // WARNING
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using std::mem::size_of_value instead
+
+error: manual slice size calculation
+ --> $DIR/manual_slice_size_calculation.rs:17:13
+ |
+LL | let _ = len * size_of::<i32>(); // WARNING
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using std::mem::size_of_value instead
+
+error: manual slice size calculation
+ --> $DIR/manual_slice_size_calculation.rs:18:13
+ |
+LL | let _ = s_i32.len() * size; // WARNING
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using std::mem::size_of_value instead
+
+error: manual slice size calculation
+ --> $DIR/manual_slice_size_calculation.rs:19:13
+ |
+LL | let _ = len * size; // WARNING
+ | ^^^^^^^^^^
+ |
+ = help: consider using std::mem::size_of_value instead
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/match_result_ok.fixed b/src/tools/clippy/tests/ui/match_result_ok.fixed
index 8b91b9854..10ae1ee52 100644
--- a/src/tools/clippy/tests/ui/match_result_ok.fixed
+++ b/src/tools/clippy/tests/ui/match_result_ok.fixed
@@ -16,7 +16,7 @@ fn str_to_int_ok(x: &str) -> i32 {
#[rustfmt::skip]
fn strange_some_no_else(x: &str) -> i32 {
{
- if let Ok(y) = x . parse() {
+ if let Ok(y) = x . parse() {
return y;
};
0
diff --git a/src/tools/clippy/tests/ui/match_result_ok.stderr b/src/tools/clippy/tests/ui/match_result_ok.stderr
index 98a95705c..cbdc56aa2 100644
--- a/src/tools/clippy/tests/ui/match_result_ok.stderr
+++ b/src/tools/clippy/tests/ui/match_result_ok.stderr
@@ -18,7 +18,7 @@ LL | if let Some(y) = x . parse() . ok () {
|
help: consider matching on `Ok(y)` and removing the call to `ok` instead
|
-LL | if let Ok(y) = x . parse() {
+LL | if let Ok(y) = x . parse() {
| ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
error: matching on `Some` with `ok()` is redundant
diff --git a/src/tools/clippy/tests/ui/match_single_binding.fixed b/src/tools/clippy/tests/ui/match_single_binding.fixed
index 6cfb6661a..201301cc9 100644
--- a/src/tools/clippy/tests/ui/match_single_binding.fixed
+++ b/src/tools/clippy/tests/ui/match_single_binding.fixed
@@ -1,7 +1,12 @@
// run-rustfix
#![warn(clippy::match_single_binding)]
-#![allow(unused_variables)]
-#![allow(clippy::toplevel_ref_arg, clippy::uninlined_format_args)]
+#![allow(
+ unused,
+ clippy::let_unit_value,
+ clippy::no_effect,
+ clippy::toplevel_ref_arg,
+ clippy::uninlined_format_args
+)]
struct Point {
x: i32,
@@ -109,10 +114,9 @@ fn main() {
// Lint
let x = 1;
- println!("Not an array index start");
+ println!("Not an array index start")
}
-#[allow(dead_code)]
fn issue_8723() {
let (mut val, idx) = ("a b", 1);
@@ -125,16 +129,15 @@ fn issue_8723() {
let _ = val;
}
-#[allow(dead_code)]
+fn side_effects() {}
+
fn issue_9575() {
- fn side_effects() {}
let _ = || {
side_effects();
- println!("Needs curlies");
+ println!("Needs curlies")
};
}
-#[allow(dead_code)]
fn issue_9725(r: Option<u32>) {
let x = r;
match x {
@@ -146,3 +149,25 @@ fn issue_9725(r: Option<u32>) {
},
};
}
+
+fn issue_10447() -> usize {
+ ();
+
+ let a = ();
+
+ side_effects();
+
+ let b = side_effects();
+
+ println!("1");
+
+ let c = println!("1");
+
+ let in_expr = [
+ (),
+ side_effects(),
+ println!("1"),
+ ];
+
+ 2
+}
diff --git a/src/tools/clippy/tests/ui/match_single_binding.rs b/src/tools/clippy/tests/ui/match_single_binding.rs
index f188aeb5f..8b047b19c 100644
--- a/src/tools/clippy/tests/ui/match_single_binding.rs
+++ b/src/tools/clippy/tests/ui/match_single_binding.rs
@@ -1,7 +1,12 @@
// run-rustfix
#![warn(clippy::match_single_binding)]
-#![allow(unused_variables)]
-#![allow(clippy::toplevel_ref_arg, clippy::uninlined_format_args)]
+#![allow(
+ unused,
+ clippy::let_unit_value,
+ clippy::no_effect,
+ clippy::toplevel_ref_arg,
+ clippy::uninlined_format_args
+)]
struct Point {
x: i32,
@@ -127,7 +132,6 @@ fn main() {
}
}
-#[allow(dead_code)]
fn issue_8723() {
let (mut val, idx) = ("a b", 1);
@@ -141,15 +145,14 @@ fn issue_8723() {
let _ = val;
}
-#[allow(dead_code)]
+fn side_effects() {}
+
fn issue_9575() {
- fn side_effects() {}
let _ = || match side_effects() {
_ => println!("Needs curlies"),
};
}
-#[allow(dead_code)]
fn issue_9725(r: Option<u32>) {
match r {
x => match x {
@@ -162,3 +165,43 @@ fn issue_9725(r: Option<u32>) {
},
};
}
+
+fn issue_10447() -> usize {
+ match 1 {
+ _ => (),
+ }
+
+ let a = match 1 {
+ _ => (),
+ };
+
+ match 1 {
+ _ => side_effects(),
+ }
+
+ let b = match 1 {
+ _ => side_effects(),
+ };
+
+ match 1 {
+ _ => println!("1"),
+ }
+
+ let c = match 1 {
+ _ => println!("1"),
+ };
+
+ let in_expr = [
+ match 1 {
+ _ => (),
+ },
+ match 1 {
+ _ => side_effects(),
+ },
+ match 1 {
+ _ => println!("1"),
+ },
+ ];
+
+ 2
+}
diff --git a/src/tools/clippy/tests/ui/match_single_binding.stderr b/src/tools/clippy/tests/ui/match_single_binding.stderr
index e960d64ad..9d16af76c 100644
--- a/src/tools/clippy/tests/ui/match_single_binding.stderr
+++ b/src/tools/clippy/tests/ui/match_single_binding.stderr
@@ -1,5 +1,5 @@
error: this match could be written as a `let` statement
- --> $DIR/match_single_binding.rs:28:5
+ --> $DIR/match_single_binding.rs:33:5
|
LL | / match (a, b, c) {
LL | | (x, y, z) => {
@@ -18,7 +18,7 @@ LL + }
|
error: this match could be written as a `let` statement
- --> $DIR/match_single_binding.rs:34:5
+ --> $DIR/match_single_binding.rs:39:5
|
LL | / match (a, b, c) {
LL | | (x, y, z) => println!("{} {} {}", x, y, z),
@@ -32,7 +32,7 @@ LL + println!("{} {} {}", x, y, z);
|
error: this match could be replaced by its body itself
- --> $DIR/match_single_binding.rs:51:5
+ --> $DIR/match_single_binding.rs:56:5
|
LL | / match a {
LL | | _ => println!("whatever"),
@@ -40,7 +40,7 @@ LL | | }
| |_____^ help: consider using the match body instead: `println!("whatever");`
error: this match could be replaced by its body itself
- --> $DIR/match_single_binding.rs:55:5
+ --> $DIR/match_single_binding.rs:60:5
|
LL | / match a {
LL | | _ => {
@@ -59,7 +59,7 @@ LL + }
|
error: this match could be replaced by its body itself
- --> $DIR/match_single_binding.rs:62:5
+ --> $DIR/match_single_binding.rs:67:5
|
LL | / match a {
LL | | _ => {
@@ -81,7 +81,7 @@ LL + }
|
error: this match could be written as a `let` statement
- --> $DIR/match_single_binding.rs:72:5
+ --> $DIR/match_single_binding.rs:77:5
|
LL | / match p {
LL | | Point { x, y } => println!("Coords: ({}, {})", x, y),
@@ -95,7 +95,7 @@ LL + println!("Coords: ({}, {})", x, y);
|
error: this match could be written as a `let` statement
- --> $DIR/match_single_binding.rs:76:5
+ --> $DIR/match_single_binding.rs:81:5
|
LL | / match p {
LL | | Point { x: x1, y: y1 } => println!("Coords: ({}, {})", x1, y1),
@@ -109,7 +109,7 @@ LL + println!("Coords: ({}, {})", x1, y1);
|
error: this match could be written as a `let` statement
- --> $DIR/match_single_binding.rs:81:5
+ --> $DIR/match_single_binding.rs:86:5
|
LL | / match x {
LL | | ref r => println!("Got a reference to {}", r),
@@ -123,7 +123,7 @@ LL + println!("Got a reference to {}", r);
|
error: this match could be written as a `let` statement
- --> $DIR/match_single_binding.rs:86:5
+ --> $DIR/match_single_binding.rs:91:5
|
LL | / match x {
LL | | ref mut mr => println!("Got a mutable reference to {}", mr),
@@ -137,7 +137,7 @@ LL + println!("Got a mutable reference to {}", mr);
|
error: this match could be written as a `let` statement
- --> $DIR/match_single_binding.rs:90:5
+ --> $DIR/match_single_binding.rs:95:5
|
LL | / let product = match coords() {
LL | | Point { x, y } => x * y,
@@ -151,7 +151,7 @@ LL + let product = x * y;
|
error: this match could be written as a `let` statement
- --> $DIR/match_single_binding.rs:98:18
+ --> $DIR/match_single_binding.rs:103:18
|
LL | .map(|i| match i.unwrap() {
| __________________^
@@ -168,16 +168,16 @@ LL ~ })
|
error: this match could be replaced by its body itself
- --> $DIR/match_single_binding.rs:124:5
+ --> $DIR/match_single_binding.rs:129:5
|
LL | / match x {
LL | | // =>
LL | | _ => println!("Not an array index start"),
LL | | }
- | |_____^ help: consider using the match body instead: `println!("Not an array index start");`
+ | |_____^ help: consider using the match body instead: `println!("Not an array index start")`
error: this assignment could be simplified
- --> $DIR/match_single_binding.rs:134:5
+ --> $DIR/match_single_binding.rs:138:5
|
LL | / val = match val.split_at(idx) {
LL | | (pre, suf) => {
@@ -197,7 +197,7 @@ LL ~ };
|
error: this match could be replaced by its scrutinee and body
- --> $DIR/match_single_binding.rs:147:16
+ --> $DIR/match_single_binding.rs:151:16
|
LL | let _ = || match side_effects() {
| ________________^
@@ -209,12 +209,12 @@ help: consider using the scrutinee and body instead
|
LL ~ let _ = || {
LL + side_effects();
-LL + println!("Needs curlies");
+LL + println!("Needs curlies")
LL ~ };
|
error: this match could be written as a `let` statement
- --> $DIR/match_single_binding.rs:154:5
+ --> $DIR/match_single_binding.rs:157:5
|
LL | / match r {
LL | | x => match x {
@@ -238,5 +238,80 @@ LL + },
LL ~ };
|
-error: aborting due to 15 previous errors
+error: this match could be replaced by its body itself
+ --> $DIR/match_single_binding.rs:170:5
+ |
+LL | / match 1 {
+LL | | _ => (),
+LL | | }
+ | |_____^ help: consider using the match body instead: `();`
+
+error: this match could be replaced by its body itself
+ --> $DIR/match_single_binding.rs:174:13
+ |
+LL | let a = match 1 {
+ | _____________^
+LL | | _ => (),
+LL | | };
+ | |_____^ help: consider using the match body instead: `()`
+
+error: this match could be replaced by its body itself
+ --> $DIR/match_single_binding.rs:178:5
+ |
+LL | / match 1 {
+LL | | _ => side_effects(),
+LL | | }
+ | |_____^ help: consider using the match body instead: `side_effects();`
+
+error: this match could be replaced by its body itself
+ --> $DIR/match_single_binding.rs:182:13
+ |
+LL | let b = match 1 {
+ | _____________^
+LL | | _ => side_effects(),
+LL | | };
+ | |_____^ help: consider using the match body instead: `side_effects()`
+
+error: this match could be replaced by its body itself
+ --> $DIR/match_single_binding.rs:186:5
+ |
+LL | / match 1 {
+LL | | _ => println!("1"),
+LL | | }
+ | |_____^ help: consider using the match body instead: `println!("1");`
+
+error: this match could be replaced by its body itself
+ --> $DIR/match_single_binding.rs:190:13
+ |
+LL | let c = match 1 {
+ | _____________^
+LL | | _ => println!("1"),
+LL | | };
+ | |_____^ help: consider using the match body instead: `println!("1")`
+
+error: this match could be replaced by its body itself
+ --> $DIR/match_single_binding.rs:195:9
+ |
+LL | / match 1 {
+LL | | _ => (),
+LL | | },
+ | |_________^ help: consider using the match body instead: `()`
+
+error: this match could be replaced by its body itself
+ --> $DIR/match_single_binding.rs:198:9
+ |
+LL | / match 1 {
+LL | | _ => side_effects(),
+LL | | },
+ | |_________^ help: consider using the match body instead: `side_effects()`
+
+error: this match could be replaced by its body itself
+ --> $DIR/match_single_binding.rs:201:9
+ |
+LL | / match 1 {
+LL | | _ => println!("1"),
+LL | | },
+ | |_________^ help: consider using the match body instead: `println!("1")`
+
+error: aborting due to 24 previous errors
diff --git a/src/tools/clippy/tests/ui/match_single_binding2.fixed b/src/tools/clippy/tests/ui/match_single_binding2.fixed
index 6a7db67e3..e3cf56a42 100644
--- a/src/tools/clippy/tests/ui/match_single_binding2.fixed
+++ b/src/tools/clippy/tests/ui/match_single_binding2.fixed
@@ -30,7 +30,7 @@ fn main() {
#[rustfmt::skip]
Some((first, _second)) => {
let (a, b) = get_tup();
- println!("a {:?} and b {:?}", a, b);
+ println!("a {:?} and b {:?}", a, b)
},
None => println!("nothing"),
}
@@ -49,5 +49,5 @@ fn main() {
0 => 1,
_ => 2,
};
- println!("Single branch");
+ println!("Single branch")
}
diff --git a/src/tools/clippy/tests/ui/match_single_binding2.stderr b/src/tools/clippy/tests/ui/match_single_binding2.stderr
index 22bf7d8be..e180b93e7 100644
--- a/src/tools/clippy/tests/ui/match_single_binding2.stderr
+++ b/src/tools/clippy/tests/ui/match_single_binding2.stderr
@@ -27,7 +27,7 @@ LL | | }
help: consider using a `let` statement
|
LL ~ let (a, b) = get_tup();
-LL + println!("a {:?} and b {:?}", a, b);
+LL + println!("a {:?} and b {:?}", a, b)
|
error: this match could be replaced by its scrutinee and body
@@ -61,7 +61,7 @@ LL ~ match x {
LL + 0 => 1,
LL + _ => 2,
LL + };
-LL + println!("Single branch");
+LL + println!("Single branch")
|
error: aborting due to 4 previous errors
diff --git a/src/tools/clippy/tests/ui/mem_replace.fixed b/src/tools/clippy/tests/ui/mem_replace.fixed
index 874d55843..7fd340173 100644
--- a/src/tools/clippy/tests/ui/mem_replace.fixed
+++ b/src/tools/clippy/tests/ui/mem_replace.fixed
@@ -90,3 +90,37 @@ fn msrv_1_40() {
let mut s = String::from("foo");
let _ = std::mem::take(&mut s);
}
+
+fn issue9824() {
+ struct Foo<'a>(Option<&'a str>);
+ impl<'a> std::ops::Deref for Foo<'a> {
+ type Target = Option<&'a str>;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+ }
+ impl<'a> std::ops::DerefMut for Foo<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.0
+ }
+ }
+
+ struct Bar {
+ opt: Option<u8>,
+ val: String,
+ }
+
+ let mut f = Foo(Some("foo"));
+ let mut b = Bar {
+ opt: Some(1),
+ val: String::from("bar"),
+ };
+
+ // replace option with none
+ let _ = f.0.take();
+ let _ = (*f).take();
+ let _ = b.opt.take();
+ // replace with default
+ let _ = std::mem::take(&mut b.val);
+}
diff --git a/src/tools/clippy/tests/ui/mem_replace.rs b/src/tools/clippy/tests/ui/mem_replace.rs
index f4f3bff51..fa2903add 100644
--- a/src/tools/clippy/tests/ui/mem_replace.rs
+++ b/src/tools/clippy/tests/ui/mem_replace.rs
@@ -90,3 +90,37 @@ fn msrv_1_40() {
let mut s = String::from("foo");
let _ = std::mem::replace(&mut s, String::default());
}
+
+fn issue9824() {
+ struct Foo<'a>(Option<&'a str>);
+ impl<'a> std::ops::Deref for Foo<'a> {
+ type Target = Option<&'a str>;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+ }
+ impl<'a> std::ops::DerefMut for Foo<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.0
+ }
+ }
+
+ struct Bar {
+ opt: Option<u8>,
+ val: String,
+ }
+
+ let mut f = Foo(Some("foo"));
+ let mut b = Bar {
+ opt: Some(1),
+ val: String::from("bar"),
+ };
+
+ // replace option with none
+ let _ = std::mem::replace(&mut f.0, None);
+ let _ = std::mem::replace(&mut *f, None);
+ let _ = std::mem::replace(&mut b.opt, None);
+ // replace with default
+ let _ = std::mem::replace(&mut b.val, String::default());
+}
diff --git a/src/tools/clippy/tests/ui/mem_replace.stderr b/src/tools/clippy/tests/ui/mem_replace.stderr
index caa127f76..58b57be75 100644
--- a/src/tools/clippy/tests/ui/mem_replace.stderr
+++ b/src/tools/clippy/tests/ui/mem_replace.stderr
@@ -122,5 +122,29 @@ error: replacing a value of type `T` with `T::default()` is better expressed usi
LL | let _ = std::mem::replace(&mut s, String::default());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::mem::take(&mut s)`
-error: aborting due to 20 previous errors
+error: replacing an `Option` with `None`
+ --> $DIR/mem_replace.rs:121:13
+ |
+LL | let _ = std::mem::replace(&mut f.0, None);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider `Option::take()` instead: `f.0.take()`
+
+error: replacing an `Option` with `None`
+ --> $DIR/mem_replace.rs:122:13
+ |
+LL | let _ = std::mem::replace(&mut *f, None);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider `Option::take()` instead: `(*f).take()`
+
+error: replacing an `Option` with `None`
+ --> $DIR/mem_replace.rs:123:13
+ |
+LL | let _ = std::mem::replace(&mut b.opt, None);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider `Option::take()` instead: `b.opt.take()`
+
+error: replacing a value of type `T` with `T::default()` is better expressed using `std::mem::take`
+ --> $DIR/mem_replace.rs:125:13
+ |
+LL | let _ = std::mem::replace(&mut b.val, String::default());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::mem::take(&mut b.val)`
+
+error: aborting due to 24 previous errors
diff --git a/src/tools/clippy/tests/ui/mem_replace_macro.rs b/src/tools/clippy/tests/ui/mem_replace_macro.rs
index 0c09344b8..3932e7d00 100644
--- a/src/tools/clippy/tests/ui/mem_replace_macro.rs
+++ b/src/tools/clippy/tests/ui/mem_replace_macro.rs
@@ -1,21 +1,12 @@
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![warn(clippy::mem_replace_with_default)]
-#[macro_use]
-extern crate macro_rules;
-
-macro_rules! take {
- ($s:expr) => {
- std::mem::replace($s, Default::default())
- };
-}
-
-fn replace_with_default() {
- let s = &mut String::from("foo");
- take!(s);
- take_external!(s);
-}
+extern crate proc_macros;
+use proc_macros::{external, inline_macros};
+#[inline_macros]
fn main() {
- replace_with_default();
+ let s = &mut String::from("foo");
+ inline!(std::mem::replace($s, Default::default()));
+ external!(std::mem::replace($s, Default::default()));
}
diff --git a/src/tools/clippy/tests/ui/mem_replace_macro.stderr b/src/tools/clippy/tests/ui/mem_replace_macro.stderr
index dd69ab8b5..35dda93da 100644
--- a/src/tools/clippy/tests/ui/mem_replace_macro.stderr
+++ b/src/tools/clippy/tests/ui/mem_replace_macro.stderr
@@ -1,14 +1,11 @@
error: replacing a value of type `T` with `T::default()` is better expressed using `std::mem::take`
- --> $DIR/mem_replace_macro.rs:9:9
+ --> $DIR/mem_replace_macro.rs:10:13
|
-LL | std::mem::replace($s, Default::default())
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-...
-LL | take!(s);
- | -------- in this macro invocation
+LL | inline!(std::mem::replace($s, Default::default()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: `-D clippy::mem-replace-with-default` implied by `-D warnings`
- = note: this error originates in the macro `take` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `__inline_mac_fn_main` (in Nightly builds, run with -Z macro-backtrace for more info)
error: aborting due to previous error
diff --git a/src/tools/clippy/tests/ui/missing_assert_message.rs b/src/tools/clippy/tests/ui/missing_assert_message.rs
new file mode 100644
index 000000000..89404ca88
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing_assert_message.rs
@@ -0,0 +1,84 @@
+#![allow(unused)]
+#![warn(clippy::missing_assert_message)]
+
+macro_rules! bar {
+ ($( $x:expr ),*) => {
+ foo()
+ };
+}
+
+fn main() {}
+
+// Should trigger warning
+fn asserts_without_message() {
+ assert!(foo());
+ assert_eq!(foo(), foo());
+ assert_ne!(foo(), foo());
+ debug_assert!(foo());
+ debug_assert_eq!(foo(), foo());
+ debug_assert_ne!(foo(), foo());
+}
+
+// Should trigger warning
+fn asserts_without_message_but_with_macro_calls() {
+ assert!(bar!(true));
+ assert!(bar!(true, false));
+ assert_eq!(bar!(true), foo());
+ assert_ne!(bar!(true, true), bar!(true));
+}
+
+// Should trigger warning
+fn asserts_with_trailing_commas() {
+ assert!(foo(),);
+ assert_eq!(foo(), foo(),);
+ assert_ne!(foo(), foo(),);
+ debug_assert!(foo(),);
+ debug_assert_eq!(foo(), foo(),);
+ debug_assert_ne!(foo(), foo(),);
+}
+
+// Should not trigger warning
+fn asserts_with_message_and_with_macro_calls() {
+ assert!(bar!(true), "msg");
+ assert!(bar!(true, false), "msg");
+ assert_eq!(bar!(true), foo(), "msg");
+ assert_ne!(bar!(true, true), bar!(true), "msg");
+}
+
+// Should not trigger warning
+fn asserts_with_message() {
+ assert!(foo(), "msg");
+ assert_eq!(foo(), foo(), "msg");
+ assert_ne!(foo(), foo(), "msg");
+ debug_assert!(foo(), "msg");
+ debug_assert_eq!(foo(), foo(), "msg");
+ debug_assert_ne!(foo(), foo(), "msg");
+}
+
+// Should not trigger warning
+#[test]
+fn asserts_without_message_but_inside_a_test_function() {
+ assert!(foo());
+ assert_eq!(foo(), foo());
+ assert_ne!(foo(), foo());
+ debug_assert!(foo());
+ debug_assert_eq!(foo(), foo());
+ debug_assert_ne!(foo(), foo());
+}
+
+// Should not trigger warning
+#[cfg(test)]
+mod tests {
+ fn asserts_without_message_but_inside_a_test_module() {
+ assert!(foo());
+ assert_eq!(foo(), foo());
+ assert_ne!(foo(), foo());
+ debug_assert!(foo());
+ debug_assert_eq!(foo(), foo());
+ debug_assert_ne!(foo(), foo());
+ }
+}
+
+fn foo() -> bool {
+ true
+}
diff --git a/src/tools/clippy/tests/ui/missing_assert_message.stderr b/src/tools/clippy/tests/ui/missing_assert_message.stderr
new file mode 100644
index 000000000..ecd038012
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing_assert_message.stderr
@@ -0,0 +1,131 @@
+error: assert without any message
+ --> $DIR/missing_assert_message.rs:14:5
+ |
+LL | assert!(foo());
+ | ^^^^^^^^^^^^^^
+ |
+ = help: consider describing why the failing assert is problematic
+ = note: `-D clippy::missing-assert-message` implied by `-D warnings`
+
+error: assert without any message
+ --> $DIR/missing_assert_message.rs:15:5
+ |
+LL | assert_eq!(foo(), foo());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider describing why the failing assert is problematic
+
+error: assert without any message
+ --> $DIR/missing_assert_message.rs:16:5
+ |
+LL | assert_ne!(foo(), foo());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider describing why the failing assert is problematic
+
+error: assert without any message
+ --> $DIR/missing_assert_message.rs:17:5
+ |
+LL | debug_assert!(foo());
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider describing why the failing assert is problematic
+
+error: assert without any message
+ --> $DIR/missing_assert_message.rs:18:5
+ |
+LL | debug_assert_eq!(foo(), foo());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider describing why the failing assert is problematic
+
+error: assert without any message
+ --> $DIR/missing_assert_message.rs:19:5
+ |
+LL | debug_assert_ne!(foo(), foo());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider describing why the failing assert is problematic
+
+error: assert without any message
+ --> $DIR/missing_assert_message.rs:24:5
+ |
+LL | assert!(bar!(true));
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider describing why the failing assert is problematic
+
+error: assert without any message
+ --> $DIR/missing_assert_message.rs:25:5
+ |
+LL | assert!(bar!(true, false));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider describing why the failing assert is problematic
+
+error: assert without any message
+ --> $DIR/missing_assert_message.rs:26:5
+ |
+LL | assert_eq!(bar!(true), foo());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider describing why the failing assert is problematic
+
+error: assert without any message
+ --> $DIR/missing_assert_message.rs:27:5
+ |
+LL | assert_ne!(bar!(true, true), bar!(true));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider describing why the failing assert is problematic
+
+error: assert without any message
+ --> $DIR/missing_assert_message.rs:32:5
+ |
+LL | assert!(foo(),);
+ | ^^^^^^^^^^^^^^^
+ |
+ = help: consider describing why the failing assert is problematic
+
+error: assert without any message
+ --> $DIR/missing_assert_message.rs:33:5
+ |
+LL | assert_eq!(foo(), foo(),);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider describing why the failing assert is problematic
+
+error: assert without any message
+ --> $DIR/missing_assert_message.rs:34:5
+ |
+LL | assert_ne!(foo(), foo(),);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider describing why the failing assert is problematic
+
+error: assert without any message
+ --> $DIR/missing_assert_message.rs:35:5
+ |
+LL | debug_assert!(foo(),);
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider describing why the failing assert is problematic
+
+error: assert without any message
+ --> $DIR/missing_assert_message.rs:36:5
+ |
+LL | debug_assert_eq!(foo(), foo(),);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider describing why the failing assert is problematic
+
+error: assert without any message
+ --> $DIR/missing_assert_message.rs:37:5
+ |
+LL | debug_assert_ne!(foo(), foo(),);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider describing why the failing assert is problematic
+
+error: aborting due to 16 previous errors
+
diff --git a/src/tools/clippy/tests/ui/missing_const_for_fn/cant_be_const.rs b/src/tools/clippy/tests/ui/missing_const_for_fn/cant_be_const.rs
index 75cace181..e6f88c6e6 100644
--- a/src/tools/clippy/tests/ui/missing_const_for_fn/cant_be_const.rs
+++ b/src/tools/clippy/tests/ui/missing_const_for_fn/cant_be_const.rs
@@ -3,15 +3,15 @@
//! The .stderr output of this test should be empty. Otherwise it's a bug somewhere.
// aux-build:helper.rs
-// aux-build:../../auxiliary/proc_macro_with_span.rs
+// aux-build:../../auxiliary/proc_macros.rs
#![warn(clippy::missing_const_for_fn)]
#![feature(start)]
extern crate helper;
-extern crate proc_macro_with_span;
+extern crate proc_macros;
-use proc_macro_with_span::with_span;
+use proc_macros::with_span;
struct Game;
diff --git a/src/tools/clippy/tests/ui/missing_doc.rs b/src/tools/clippy/tests/ui/missing_doc.rs
index 590ad63c9..575204894 100644
--- a/src/tools/clippy/tests/ui/missing_doc.rs
+++ b/src/tools/clippy/tests/ui/missing_doc.rs
@@ -1,5 +1,5 @@
// needs-asm-support
-// aux-build: proc_macro_with_span.rs
+// aux-build: proc_macros.rs
#![warn(clippy::missing_docs_in_private_items)]
// When denying at the crate level, be sure to not get random warnings from the
@@ -8,9 +8,9 @@
//! Some garbage docs for the crate here
#![doc = "More garbage"]
-extern crate proc_macro_with_span;
+extern crate proc_macros;
-use proc_macro_with_span::with_span;
+use proc_macros::with_span;
use std::arch::global_asm;
type Typedef = String;
diff --git a/src/tools/clippy/tests/ui/missing_doc.stderr b/src/tools/clippy/tests/ui/missing_doc.stderr
index d3bef28bf..4e8a49bf1 100644
--- a/src/tools/clippy/tests/ui/missing_doc.stderr
+++ b/src/tools/clippy/tests/ui/missing_doc.stderr
@@ -6,30 +6,12 @@ LL | type Typedef = String;
|
= note: `-D clippy::missing-docs-in-private-items` implied by `-D warnings`
-error: missing documentation for a type alias
- --> $DIR/missing_doc.rs:17:1
- |
-LL | pub type PubTypedef = String;
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
error: missing documentation for a module
--> $DIR/missing_doc.rs:19:1
|
LL | mod module_no_dox {}
| ^^^^^^^^^^^^^^^^^^^^
-error: missing documentation for a module
- --> $DIR/missing_doc.rs:20:1
- |
-LL | pub mod pub_module_no_dox {}
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-error: missing documentation for a function
- --> $DIR/missing_doc.rs:24:1
- |
-LL | pub fn foo2() {}
- | ^^^^^^^^^^^^^^^^
-
error: missing documentation for a function
--> $DIR/missing_doc.rs:25:1
|
@@ -69,50 +51,18 @@ error: missing documentation for a variant
LL | BarB,
| ^^^^
-error: missing documentation for an enum
- --> $DIR/missing_doc.rs:44:1
- |
-LL | / pub enum PubBaz {
-LL | | PubBazA { a: isize },
-LL | | }
- | |_^
-
-error: missing documentation for a variant
- --> $DIR/missing_doc.rs:45:5
- |
-LL | PubBazA { a: isize },
- | ^^^^^^^^^^^^^^^^^^^^
-
-error: missing documentation for a struct field
- --> $DIR/missing_doc.rs:45:15
- |
-LL | PubBazA { a: isize },
- | ^^^^^^^^
-
error: missing documentation for a constant
--> $DIR/missing_doc.rs:65:1
|
LL | const FOO: u32 = 0;
| ^^^^^^^^^^^^^^^^^^^
-error: missing documentation for a constant
- --> $DIR/missing_doc.rs:72:1
- |
-LL | pub const FOO4: u32 = 0;
- | ^^^^^^^^^^^^^^^^^^^^^^^^
-
error: missing documentation for a static
--> $DIR/missing_doc.rs:74:1
|
LL | static BAR: u32 = 0;
| ^^^^^^^^^^^^^^^^^^^^
-error: missing documentation for a static
- --> $DIR/missing_doc.rs:81:1
- |
-LL | pub static BAR4: u32 = 0;
- | ^^^^^^^^^^^^^^^^^^^^^^^^^
-
error: missing documentation for a module
--> $DIR/missing_doc.rs:83:1
|
@@ -126,34 +76,16 @@ LL | | }
| |_^
error: missing documentation for a function
- --> $DIR/missing_doc.rs:86:5
- |
-LL | pub fn undocumented1() {}
- | ^^^^^^^^^^^^^^^^^^^^^^^^^
-
-error: missing documentation for a function
- --> $DIR/missing_doc.rs:87:5
- |
-LL | pub fn undocumented2() {}
- | ^^^^^^^^^^^^^^^^^^^^^^^^^
-
-error: missing documentation for a function
--> $DIR/missing_doc.rs:88:5
|
LL | fn undocumented3() {}
| ^^^^^^^^^^^^^^^^^^^^^
error: missing documentation for a function
- --> $DIR/missing_doc.rs:93:9
- |
-LL | pub fn also_undocumented1() {}
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-error: missing documentation for a function
--> $DIR/missing_doc.rs:94:9
|
LL | fn also_undocumented2() {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
-error: aborting due to 24 previous errors
+error: aborting due to 13 previous errors
diff --git a/src/tools/clippy/tests/ui/missing_doc_impl.rs b/src/tools/clippy/tests/ui/missing_doc_impl.rs
index 0396d1193..e2d49b090 100644
--- a/src/tools/clippy/tests/ui/missing_doc_impl.rs
+++ b/src/tools/clippy/tests/ui/missing_doc_impl.rs
@@ -1,4 +1,4 @@
-// aux-build: proc_macro_with_span.rs
+// aux-build: proc_macros.rs
#![warn(clippy::missing_docs_in_private_items)]
#![allow(dead_code)]
@@ -7,8 +7,8 @@
//! Some garbage docs for the crate here
#![doc = "More garbage"]
-extern crate proc_macro_with_span;
-use proc_macro_with_span::with_span;
+extern crate proc_macros;
+use proc_macros::with_span;
struct Foo {
a: isize,
diff --git a/src/tools/clippy/tests/ui/missing_doc_impl.stderr b/src/tools/clippy/tests/ui/missing_doc_impl.stderr
index b410f56e1..111d65469 100644
--- a/src/tools/clippy/tests/ui/missing_doc_impl.stderr
+++ b/src/tools/clippy/tests/ui/missing_doc_impl.stderr
@@ -21,60 +21,12 @@ error: missing documentation for a struct field
LL | b: isize,
| ^^^^^^^^
-error: missing documentation for a struct
- --> $DIR/missing_doc_impl.rs:18:1
- |
-LL | / pub struct PubFoo {
-LL | | pub a: isize,
-LL | | b: isize,
-LL | | }
- | |_^
-
-error: missing documentation for a struct field
- --> $DIR/missing_doc_impl.rs:19:5
- |
-LL | pub a: isize,
- | ^^^^^^^^^^^^
-
error: missing documentation for a struct field
--> $DIR/missing_doc_impl.rs:20:5
|
LL | b: isize,
| ^^^^^^^^
-error: missing documentation for a trait
- --> $DIR/missing_doc_impl.rs:43:1
- |
-LL | / pub trait C {
-LL | | fn foo(&self);
-LL | | fn foo_with_impl(&self) {}
-LL | | }
- | |_^
-
-error: missing documentation for a method
- --> $DIR/missing_doc_impl.rs:44:5
- |
-LL | fn foo(&self);
- | ^^^^^^^^^^^^^^
-
-error: missing documentation for a method
- --> $DIR/missing_doc_impl.rs:45:5
- |
-LL | fn foo_with_impl(&self) {}
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-error: missing documentation for an associated type
- --> $DIR/missing_doc_impl.rs:55:5
- |
-LL | type AssociatedType;
- | ^^^^^^^^^^^^^^^^^^^^
-
-error: missing documentation for an associated type
- --> $DIR/missing_doc_impl.rs:56:5
- |
-LL | type AssociatedTypeDef = Self;
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
error: missing documentation for an associated function
--> $DIR/missing_doc_impl.rs:67:5
|
@@ -90,12 +42,6 @@ LL | fn bar() {}
| ^^^^^^^^^^^
error: missing documentation for an associated function
- --> $DIR/missing_doc_impl.rs:74:5
- |
-LL | pub fn foo() {}
- | ^^^^^^^^^^^^^^^
-
-error: missing documentation for an associated function
--> $DIR/missing_doc_impl.rs:78:5
|
LL | / fn foo2() -> u32 {
@@ -103,5 +49,5 @@ LL | | 1
LL | | }
| |_____^
-error: aborting due to 15 previous errors
+error: aborting due to 7 previous errors
diff --git a/src/tools/clippy/tests/ui/mistyped_literal_suffix.fixed b/src/tools/clippy/tests/ui/mistyped_literal_suffix.fixed
index becb9562a..9a47d7c56 100644
--- a/src/tools/clippy/tests/ui/mistyped_literal_suffix.fixed
+++ b/src/tools/clippy/tests/ui/mistyped_literal_suffix.fixed
@@ -1,5 +1,5 @@
// run-rustfix
-// aux-build: proc_macro_with_span.rs
+// aux-build: proc_macros.rs
#![allow(
dead_code,
@@ -10,8 +10,8 @@
clippy::unusual_byte_groupings
)]
-extern crate proc_macro_with_span;
-use proc_macro_with_span::with_span;
+extern crate proc_macros;
+use proc_macros::with_span;
fn main() {
let fail14 = 2_i32;
diff --git a/src/tools/clippy/tests/ui/mistyped_literal_suffix.rs b/src/tools/clippy/tests/ui/mistyped_literal_suffix.rs
index ee841bcd7..04261cba5 100644
--- a/src/tools/clippy/tests/ui/mistyped_literal_suffix.rs
+++ b/src/tools/clippy/tests/ui/mistyped_literal_suffix.rs
@@ -1,5 +1,5 @@
// run-rustfix
-// aux-build: proc_macro_with_span.rs
+// aux-build: proc_macros.rs
#![allow(
dead_code,
@@ -10,8 +10,8 @@
clippy::unusual_byte_groupings
)]
-extern crate proc_macro_with_span;
-use proc_macro_with_span::with_span;
+extern crate proc_macros;
+use proc_macros::with_span;
fn main() {
let fail14 = 2_32;
diff --git a/src/tools/clippy/tests/ui/multiple_unsafe_ops_per_block.rs b/src/tools/clippy/tests/ui/multiple_unsafe_ops_per_block.rs
index 4511bc99c..9082f1675 100644
--- a/src/tools/clippy/tests/ui/multiple_unsafe_ops_per_block.rs
+++ b/src/tools/clippy/tests/ui/multiple_unsafe_ops_per_block.rs
@@ -1,12 +1,12 @@
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![allow(unused)]
#![allow(deref_nullptr)]
#![allow(clippy::unnecessary_operation)]
#![allow(clippy::drop_copy)]
#![warn(clippy::multiple_unsafe_ops_per_block)]
-#[macro_use]
-extern crate macro_rules;
+extern crate proc_macros;
+use proc_macros::external;
use core::arch::asm;
@@ -113,7 +113,38 @@ unsafe fn read_char_good(ptr: *const u8) -> char {
// no lint
fn issue10259() {
- unsafe_macro!();
+ external!(unsafe {
+ *core::ptr::null::<()>();
+ *core::ptr::null::<()>();
+ });
+}
+
+fn _fn_ptr(x: unsafe fn()) {
+ unsafe {
+ x();
+ x();
+ }
+}
+
+fn _assoc_const() {
+ trait X {
+ const X: unsafe fn();
+ }
+ fn _f<T: X>() {
+ unsafe {
+ T::X();
+ T::X();
+ }
+ }
+}
+
+fn _field_fn_ptr(x: unsafe fn()) {
+ struct X(unsafe fn());
+ let x = X(x);
+ unsafe {
+ x.0();
+ x.0();
+ }
}
fn main() {}
diff --git a/src/tools/clippy/tests/ui/multiple_unsafe_ops_per_block.stderr b/src/tools/clippy/tests/ui/multiple_unsafe_ops_per_block.stderr
index 303aeb7ae..badc284ec 100644
--- a/src/tools/clippy/tests/ui/multiple_unsafe_ops_per_block.stderr
+++ b/src/tools/clippy/tests/ui/multiple_unsafe_ops_per_block.stderr
@@ -125,5 +125,65 @@ note: raw pointer dereference occurs here
LL | unsafe { char::from_u32_unchecked(*ptr.cast::<u32>()) }
| ^^^^^^^^^^^^^^^^^^
-error: aborting due to 5 previous errors
+error: this `unsafe` block contains 2 unsafe operations, expected only one
+ --> $DIR/multiple_unsafe_ops_per_block.rs:123:5
+ |
+LL | / unsafe {
+LL | | x();
+LL | | x();
+LL | | }
+ | |_____^
+ |
+note: unsafe function call occurs here
+ --> $DIR/multiple_unsafe_ops_per_block.rs:124:9
+ |
+LL | x();
+ | ^^^
+note: unsafe function call occurs here
+ --> $DIR/multiple_unsafe_ops_per_block.rs:125:9
+ |
+LL | x();
+ | ^^^
+
+error: this `unsafe` block contains 2 unsafe operations, expected only one
+ --> $DIR/multiple_unsafe_ops_per_block.rs:134:9
+ |
+LL | / unsafe {
+LL | | T::X();
+LL | | T::X();
+LL | | }
+ | |_________^
+ |
+note: unsafe function call occurs here
+ --> $DIR/multiple_unsafe_ops_per_block.rs:135:13
+ |
+LL | T::X();
+ | ^^^^^^
+note: unsafe function call occurs here
+ --> $DIR/multiple_unsafe_ops_per_block.rs:136:13
+ |
+LL | T::X();
+ | ^^^^^^
+
+error: this `unsafe` block contains 2 unsafe operations, expected only one
+ --> $DIR/multiple_unsafe_ops_per_block.rs:144:5
+ |
+LL | / unsafe {
+LL | | x.0();
+LL | | x.0();
+LL | | }
+ | |_____^
+ |
+note: unsafe function call occurs here
+ --> $DIR/multiple_unsafe_ops_per_block.rs:145:9
+ |
+LL | x.0();
+ | ^^^^^
+note: unsafe function call occurs here
+ --> $DIR/multiple_unsafe_ops_per_block.rs:146:9
+ |
+LL | x.0();
+ | ^^^^^
+
+error: aborting due to 8 previous errors
diff --git a/src/tools/clippy/tests/ui/must_use_unit.fixed b/src/tools/clippy/tests/ui/must_use_unit.fixed
index 6c9aa434a..b7d375ff8 100644
--- a/src/tools/clippy/tests/ui/must_use_unit.fixed
+++ b/src/tools/clippy/tests/ui/must_use_unit.fixed
@@ -1,11 +1,11 @@
//run-rustfix
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![warn(clippy::must_use_unit)]
#![allow(clippy::unused_unit)]
-#[macro_use]
-extern crate macro_rules;
+extern crate proc_macros;
+use proc_macros::external;
pub fn must_use_default() {}
@@ -22,5 +22,8 @@ fn main() {
must_use_with_note();
// We should not lint in external macros
- must_use_unit!();
+ external!(
+ #[must_use]
+ fn foo() {}
+ );
}
diff --git a/src/tools/clippy/tests/ui/must_use_unit.rs b/src/tools/clippy/tests/ui/must_use_unit.rs
index 8a395dc28..74d6b4ca8 100644
--- a/src/tools/clippy/tests/ui/must_use_unit.rs
+++ b/src/tools/clippy/tests/ui/must_use_unit.rs
@@ -1,11 +1,11 @@
//run-rustfix
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![warn(clippy::must_use_unit)]
#![allow(clippy::unused_unit)]
-#[macro_use]
-extern crate macro_rules;
+extern crate proc_macros;
+use proc_macros::external;
#[must_use]
pub fn must_use_default() {}
@@ -22,5 +22,8 @@ fn main() {
must_use_with_note();
// We should not lint in external macros
- must_use_unit!();
+ external!(
+ #[must_use]
+ fn foo() {}
+ );
}
diff --git a/src/tools/clippy/tests/ui/mut_mut.rs b/src/tools/clippy/tests/ui/mut_mut.rs
index ee3a85656..06bb08544 100644
--- a/src/tools/clippy/tests/ui/mut_mut.rs
+++ b/src/tools/clippy/tests/ui/mut_mut.rs
@@ -1,10 +1,10 @@
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![warn(clippy::mut_mut)]
#![allow(unused)]
#![allow(clippy::no_effect, clippy::uninlined_format_args, clippy::unnecessary_operation)]
-#[macro_use]
-extern crate macro_rules;
+extern crate proc_macros;
+use proc_macros::{external, inline_macros};
fn fun(x: &mut &mut u32) -> bool {
**x > 0
@@ -21,6 +21,7 @@ macro_rules! mut_ptr {
}
#[allow(unused_mut, unused_variables)]
+#[inline_macros]
fn main() {
let mut x = &mut &mut 1u32;
{
@@ -37,7 +38,7 @@ fn main() {
***y + **x;
}
- let mut z = mut_ptr!(&mut 3u32);
+ let mut z = inline!(&mut $(&mut 3u32));
}
fn issue939() {
@@ -55,7 +56,7 @@ fn issue939() {
fn issue6922() {
// do not lint from an external macro
- mut_mut!();
+ external!(let mut_mut_ty: &mut &mut u32 = &mut &mut 1u32;);
}
mod issue9035 {
diff --git a/src/tools/clippy/tests/ui/mut_mut.stderr b/src/tools/clippy/tests/ui/mut_mut.stderr
index 6820a85aa..93b857eb2 100644
--- a/src/tools/clippy/tests/ui/mut_mut.stderr
+++ b/src/tools/clippy/tests/ui/mut_mut.stderr
@@ -7,54 +7,51 @@ LL | fn fun(x: &mut &mut u32) -> bool {
= note: `-D clippy::mut-mut` implied by `-D warnings`
error: generally you want to avoid `&mut &mut _` if possible
- --> $DIR/mut_mut.rs:25:17
+ --> $DIR/mut_mut.rs:26:17
|
LL | let mut x = &mut &mut 1u32;
| ^^^^^^^^^^^^^^
error: generally you want to avoid `&mut &mut _` if possible
- --> $DIR/mut_mut.rs:19:9
+ --> $DIR/mut_mut.rs:41:25
|
-LL | &mut $p
- | ^^^^^^^
-...
-LL | let mut z = mut_ptr!(&mut 3u32);
- | ------------------- in this macro invocation
+LL | let mut z = inline!(&mut $(&mut 3u32));
+ | ^
|
- = note: this error originates in the macro `mut_ptr` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `__inline_mac_fn_main` (in Nightly builds, run with -Z macro-backtrace for more info)
error: this expression mutably borrows a mutable reference. Consider reborrowing
- --> $DIR/mut_mut.rs:27:21
+ --> $DIR/mut_mut.rs:28:21
|
LL | let mut y = &mut x;
| ^^^^^^
error: generally you want to avoid `&mut &mut _` if possible
- --> $DIR/mut_mut.rs:31:32
+ --> $DIR/mut_mut.rs:32:32
|
LL | let y: &mut &mut u32 = &mut &mut 2;
| ^^^^^^^^^^^
error: generally you want to avoid `&mut &mut _` if possible
- --> $DIR/mut_mut.rs:31:16
+ --> $DIR/mut_mut.rs:32:16
|
LL | let y: &mut &mut u32 = &mut &mut 2;
| ^^^^^^^^^^^^^
error: generally you want to avoid `&mut &mut _` if possible
- --> $DIR/mut_mut.rs:36:37
+ --> $DIR/mut_mut.rs:37:37
|
LL | let y: &mut &mut &mut u32 = &mut &mut &mut 2;
| ^^^^^^^^^^^^^^^^
error: generally you want to avoid `&mut &mut _` if possible
- --> $DIR/mut_mut.rs:36:16
+ --> $DIR/mut_mut.rs:37:16
|
LL | let y: &mut &mut &mut u32 = &mut &mut &mut 2;
| ^^^^^^^^^^^^^^^^^^
error: generally you want to avoid `&mut &mut _` if possible
- --> $DIR/mut_mut.rs:36:21
+ --> $DIR/mut_mut.rs:37:21
|
LL | let y: &mut &mut &mut u32 = &mut &mut &mut 2;
| ^^^^^^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/needless_late_init.fixed b/src/tools/clippy/tests/ui/needless_late_init.fixed
index 17f2227ba..86d899bb4 100644
--- a/src/tools/clippy/tests/ui/needless_late_init.fixed
+++ b/src/tools/clippy/tests/ui/needless_late_init.fixed
@@ -1,4 +1,5 @@
// run-rustfix
+// aux-build:proc_macros.rs
#![feature(let_chains)]
#![allow(unused)]
#![allow(
@@ -10,6 +11,8 @@
clippy::uninlined_format_args
)]
+extern crate proc_macros;
+
use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use std::rc::Rc;
@@ -138,6 +141,7 @@ const fn in_const() -> &'static str {
a
}
+#[proc_macros::inline_macros]
fn does_not_lint() {
let z;
if false {
@@ -195,35 +199,27 @@ fn does_not_lint() {
}
y = 3;
- macro_rules! assign {
- ($i:ident) => {
- $i = 1;
- };
- }
let x;
- assign!(x);
+ inline!($x = 1;);
let x;
if true {
- assign!(x);
+ inline!($x = 1;);
} else {
x = 2;
}
- macro_rules! in_macro {
- () => {
- let x;
- x = 1;
+ inline!({
+ let x;
+ x = 1;
- let x;
- if true {
- x = 1;
- } else {
- x = 2;
- }
- };
- }
- in_macro!();
+ let x;
+ if true {
+ x = 1;
+ } else {
+ x = 2;
+ }
+ });
// ignore if-lets - https://github.com/rust-lang/rust-clippy/issues/8613
let x;
diff --git a/src/tools/clippy/tests/ui/needless_late_init.rs b/src/tools/clippy/tests/ui/needless_late_init.rs
index d84457a29..969afb38e 100644
--- a/src/tools/clippy/tests/ui/needless_late_init.rs
+++ b/src/tools/clippy/tests/ui/needless_late_init.rs
@@ -1,4 +1,5 @@
// run-rustfix
+// aux-build:proc_macros.rs
#![feature(let_chains)]
#![allow(unused)]
#![allow(
@@ -10,6 +11,8 @@
clippy::uninlined_format_args
)]
+extern crate proc_macros;
+
use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use std::rc::Rc;
@@ -138,6 +141,7 @@ const fn in_const() -> &'static str {
a
}
+#[proc_macros::inline_macros]
fn does_not_lint() {
let z;
if false {
@@ -195,35 +199,27 @@ fn does_not_lint() {
}
y = 3;
- macro_rules! assign {
- ($i:ident) => {
- $i = 1;
- };
- }
let x;
- assign!(x);
+ inline!($x = 1;);
let x;
if true {
- assign!(x);
+ inline!($x = 1;);
} else {
x = 2;
}
- macro_rules! in_macro {
- () => {
- let x;
- x = 1;
+ inline!({
+ let x;
+ x = 1;
- let x;
- if true {
- x = 1;
- } else {
- x = 2;
- }
- };
- }
- in_macro!();
+ let x;
+ if true {
+ x = 1;
+ } else {
+ x = 2;
+ }
+ });
// ignore if-lets - https://github.com/rust-lang/rust-clippy/issues/8613
let x;
diff --git a/src/tools/clippy/tests/ui/needless_late_init.stderr b/src/tools/clippy/tests/ui/needless_late_init.stderr
index 0a256fb4a..eff782f8b 100644
--- a/src/tools/clippy/tests/ui/needless_late_init.stderr
+++ b/src/tools/clippy/tests/ui/needless_late_init.stderr
@@ -1,5 +1,5 @@
error: unneeded late initialization
- --> $DIR/needless_late_init.rs:24:5
+ --> $DIR/needless_late_init.rs:27:5
|
LL | let a;
| ^^^^^^ created here
@@ -13,7 +13,7 @@ LL | let a = "zero";
| ~~~~~
error: unneeded late initialization
- --> $DIR/needless_late_init.rs:27:5
+ --> $DIR/needless_late_init.rs:30:5
|
LL | let b;
| ^^^^^^ created here
@@ -27,7 +27,7 @@ LL | let b = 1;
| ~~~~~
error: unneeded late initialization
- --> $DIR/needless_late_init.rs:28:5
+ --> $DIR/needless_late_init.rs:31:5
|
LL | let c;
| ^^^^^^ created here
@@ -41,7 +41,7 @@ LL | let c = 2;
| ~~~~~
error: unneeded late initialization
- --> $DIR/needless_late_init.rs:32:5
+ --> $DIR/needless_late_init.rs:35:5
|
LL | let d: usize;
| ^^^^^^^^^^^^^ created here
@@ -54,7 +54,7 @@ LL | let d: usize = 1;
| ~~~~~~~~~~~~
error: unneeded late initialization
- --> $DIR/needless_late_init.rs:35:5
+ --> $DIR/needless_late_init.rs:38:5
|
LL | let e;
| ^^^^^^ created here
@@ -67,7 +67,7 @@ LL | let e = format!("{}", d);
| ~~~~~
error: unneeded late initialization
- --> $DIR/needless_late_init.rs:40:5
+ --> $DIR/needless_late_init.rs:43:5
|
LL | let a;
| ^^^^^^
@@ -88,7 +88,7 @@ LL | };
| +
error: unneeded late initialization
- --> $DIR/needless_late_init.rs:49:5
+ --> $DIR/needless_late_init.rs:52:5
|
LL | let b;
| ^^^^^^
@@ -109,7 +109,7 @@ LL | };
| +
error: unneeded late initialization
- --> $DIR/needless_late_init.rs:56:5
+ --> $DIR/needless_late_init.rs:59:5
|
LL | let d;
| ^^^^^^
@@ -130,7 +130,7 @@ LL | };
| +
error: unneeded late initialization
- --> $DIR/needless_late_init.rs:64:5
+ --> $DIR/needless_late_init.rs:67:5
|
LL | let e;
| ^^^^^^
@@ -151,7 +151,7 @@ LL | };
| +
error: unneeded late initialization
- --> $DIR/needless_late_init.rs:71:5
+ --> $DIR/needless_late_init.rs:74:5
|
LL | let f;
| ^^^^^^
@@ -167,7 +167,7 @@ LL + 1 => "three",
|
error: unneeded late initialization
- --> $DIR/needless_late_init.rs:77:5
+ --> $DIR/needless_late_init.rs:80:5
|
LL | let g: usize;
| ^^^^^^^^^^^^^
@@ -187,7 +187,7 @@ LL | };
| +
error: unneeded late initialization
- --> $DIR/needless_late_init.rs:85:5
+ --> $DIR/needless_late_init.rs:88:5
|
LL | let x;
| ^^^^^^ created here
@@ -201,7 +201,7 @@ LL | let x = 1;
| ~~~~~
error: unneeded late initialization
- --> $DIR/needless_late_init.rs:89:5
+ --> $DIR/needless_late_init.rs:92:5
|
LL | let x;
| ^^^^^^ created here
@@ -215,7 +215,7 @@ LL | let x = SignificantDrop;
| ~~~~~
error: unneeded late initialization
- --> $DIR/needless_late_init.rs:93:5
+ --> $DIR/needless_late_init.rs:96:5
|
LL | let x;
| ^^^^^^ created here
@@ -229,7 +229,7 @@ LL | let x = SignificantDrop;
| ~~~~~
error: unneeded late initialization
- --> $DIR/needless_late_init.rs:112:5
+ --> $DIR/needless_late_init.rs:115:5
|
LL | let a;
| ^^^^^^
@@ -250,7 +250,7 @@ LL | };
| +
error: unneeded late initialization
- --> $DIR/needless_late_init.rs:129:5
+ --> $DIR/needless_late_init.rs:132:5
|
LL | let a;
| ^^^^^^
diff --git a/src/tools/clippy/tests/ui/needless_lifetimes.fixed b/src/tools/clippy/tests/ui/needless_lifetimes.fixed
index f0f1f9298..e6ead69d1 100644
--- a/src/tools/clippy/tests/ui/needless_lifetimes.fixed
+++ b/src/tools/clippy/tests/ui/needless_lifetimes.fixed
@@ -1,5 +1,5 @@
// run-rustfix
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![warn(clippy::needless_lifetimes)]
#![allow(
@@ -12,8 +12,8 @@
clippy::get_first
)]
-#[macro_use]
-extern crate macro_rules;
+extern crate proc_macros;
+use proc_macros::inline_macros;
fn distinct_lifetimes(_x: &u8, _y: &u8, _z: u8) {}
@@ -502,30 +502,29 @@ mod pr_9743_output_lifetime_checks {
}
}
+#[inline_macros]
mod in_macro {
- macro_rules! local_one_input_macro {
- () => {
- fn one_input(x: &u8) -> &u8 {
- unimplemented!()
- }
- };
- }
+ use proc_macros::external;
// lint local macro expands to function with needless lifetimes
- local_one_input_macro!();
+ inline! {
+ fn one_input(x: &u8) -> &u8 {
+ unimplemented!()
+ }
+ }
// no lint on external macro
- macro_rules::needless_lifetime!();
-
- macro_rules! expanded_lifetime {
- ($l:lifetime) => {
- fn f<$l>(arg: &$l str) -> &$l str {
- arg
- }
+ external! {
+ fn needless_lifetime<'a>(x: &'a u8) -> &'a u8 {
+ unimplemented!()
}
}
- expanded_lifetime!('a);
+ inline! {
+ fn f<$'a>(arg: &$'a str) -> &$'a str {
+ arg
+ }
+ }
}
mod issue5787 {
diff --git a/src/tools/clippy/tests/ui/needless_lifetimes.rs b/src/tools/clippy/tests/ui/needless_lifetimes.rs
index ddfd10430..06eb43050 100644
--- a/src/tools/clippy/tests/ui/needless_lifetimes.rs
+++ b/src/tools/clippy/tests/ui/needless_lifetimes.rs
@@ -1,5 +1,5 @@
// run-rustfix
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![warn(clippy::needless_lifetimes)]
#![allow(
@@ -12,8 +12,8 @@
clippy::get_first
)]
-#[macro_use]
-extern crate macro_rules;
+extern crate proc_macros;
+use proc_macros::inline_macros;
fn distinct_lifetimes<'a, 'b>(_x: &'a u8, _y: &'b u8, _z: u8) {}
@@ -502,30 +502,29 @@ mod pr_9743_output_lifetime_checks {
}
}
+#[inline_macros]
mod in_macro {
- macro_rules! local_one_input_macro {
- () => {
- fn one_input<'a>(x: &'a u8) -> &'a u8 {
- unimplemented!()
- }
- };
- }
+ use proc_macros::external;
// lint local macro expands to function with needless lifetimes
- local_one_input_macro!();
+ inline! {
+ fn one_input<'a>(x: &'a u8) -> &'a u8 {
+ unimplemented!()
+ }
+ }
// no lint on external macro
- macro_rules::needless_lifetime!();
-
- macro_rules! expanded_lifetime {
- ($l:lifetime) => {
- fn f<$l>(arg: &$l str) -> &$l str {
- arg
- }
+ external! {
+ fn needless_lifetime<'a>(x: &'a u8) -> &'a u8 {
+ unimplemented!()
}
}
- expanded_lifetime!('a);
+ inline! {
+ fn f<$'a>(arg: &$'a str) -> &$'a str {
+ arg
+ }
+ }
}
mod issue5787 {
diff --git a/src/tools/clippy/tests/ui/needless_lifetimes.stderr b/src/tools/clippy/tests/ui/needless_lifetimes.stderr
index 4e3c8f20d..86acc4e00 100644
--- a/src/tools/clippy/tests/ui/needless_lifetimes.stderr
+++ b/src/tools/clippy/tests/ui/needless_lifetimes.stderr
@@ -540,19 +540,16 @@ LL + fn multiple_inputs_output_not_elided<'b>(x: &u8, y: &'b u8, z: &'b u8)
|
error: the following explicit lifetimes could be elided: 'a
- --> $DIR/needless_lifetimes.rs:508:13
+ --> $DIR/needless_lifetimes.rs:511:9
|
-LL | fn one_input<'a>(x: &'a u8) -> &'a u8 {
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-...
-LL | local_one_input_macro!();
- | ------------------------ in this macro invocation
+LL | fn one_input<'a>(x: &'a u8) -> &'a u8 {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = note: this error originates in the macro `local_one_input_macro` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `__inline_mac_mod_in_macro` (in Nightly builds, run with -Z macro-backtrace for more info)
help: elide the lifetimes
|
-LL - fn one_input<'a>(x: &'a u8) -> &'a u8 {
-LL + fn one_input(x: &u8) -> &u8 {
+LL - fn one_input<'a>(x: &'a u8) -> &'a u8 {
+LL + fn one_input(x: &u8) -> &u8 {
|
error: aborting due to 46 previous errors
diff --git a/src/tools/clippy/tests/ui/needless_return.fixed b/src/tools/clippy/tests/ui/needless_return.fixed
index 0f525dd29..57c08996c 100644
--- a/src/tools/clippy/tests/ui/needless_return.fixed
+++ b/src/tools/clippy/tests/ui/needless_return.fixed
@@ -307,4 +307,13 @@ mod issue10049 {
}
}
+fn test_match_as_stmt() {
+ let x = 9;
+ match x {
+ 1 => 2,
+ 2 => return,
+ _ => 0,
+ };
+}
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/needless_return.rs b/src/tools/clippy/tests/ui/needless_return.rs
index a1db8375d..7c1feefbe 100644
--- a/src/tools/clippy/tests/ui/needless_return.rs
+++ b/src/tools/clippy/tests/ui/needless_return.rs
@@ -317,4 +317,13 @@ mod issue10049 {
}
}
+fn test_match_as_stmt() {
+ let x = 9;
+ match x {
+ 1 => 2,
+ 2 => return,
+ _ => 0,
+ };
+}
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/needless_update.rs b/src/tools/clippy/tests/ui/needless_update.rs
index b93ff048a..4e8517cad 100644
--- a/src/tools/clippy/tests/ui/needless_update.rs
+++ b/src/tools/clippy/tests/ui/needless_update.rs
@@ -1,5 +1,5 @@
#![warn(clippy::needless_update)]
-#![allow(clippy::no_effect)]
+#![allow(clippy::no_effect, clippy::unnecessary_struct_initialization)]
struct S {
pub a: i32,
diff --git a/src/tools/clippy/tests/ui/new_ret_no_self.rs b/src/tools/clippy/tests/ui/new_ret_no_self.rs
index beec42f08..a2a30c8b9 100644
--- a/src/tools/clippy/tests/ui/new_ret_no_self.rs
+++ b/src/tools/clippy/tests/ui/new_ret_no_self.rs
@@ -406,7 +406,7 @@ mod issue10041 {
struct Bomb;
impl Bomb {
- // Hidden <Rhs = Self> default generic paramter.
+ // Hidden <Rhs = Self> default generic parameter.
pub fn new() -> impl PartialOrd {
0i32
}
diff --git a/src/tools/clippy/tests/ui/no_effect.rs b/src/tools/clippy/tests/ui/no_effect.rs
index f08eb092e..1e42e1fba 100644
--- a/src/tools/clippy/tests/ui/no_effect.rs
+++ b/src/tools/clippy/tests/ui/no_effect.rs
@@ -1,7 +1,12 @@
-#![feature(box_syntax, fn_traits, unboxed_closures)]
+#![feature(fn_traits, unboxed_closures)]
#![warn(clippy::no_effect_underscore_binding)]
#![allow(dead_code, path_statements)]
-#![allow(clippy::deref_addrof, clippy::redundant_field_names, clippy::uninlined_format_args)]
+#![allow(
+ clippy::deref_addrof,
+ clippy::redundant_field_names,
+ clippy::uninlined_format_args,
+ clippy::unnecessary_struct_initialization
+)]
struct Unit;
struct Tuple(i32);
@@ -102,7 +107,6 @@ fn main() {
*&42;
&6;
(5, 6, 7);
- box 42;
..;
5..;
..5;
diff --git a/src/tools/clippy/tests/ui/no_effect.stderr b/src/tools/clippy/tests/ui/no_effect.stderr
index 6a1e636f9..f10f2bcf2 100644
--- a/src/tools/clippy/tests/ui/no_effect.stderr
+++ b/src/tools/clippy/tests/ui/no_effect.stderr
@@ -1,5 +1,5 @@
error: statement with no effect
- --> $DIR/no_effect.rs:92:5
+ --> $DIR/no_effect.rs:97:5
|
LL | 0;
| ^^
@@ -7,157 +7,151 @@ LL | 0;
= note: `-D clippy::no-effect` implied by `-D warnings`
error: statement with no effect
- --> $DIR/no_effect.rs:93:5
+ --> $DIR/no_effect.rs:98:5
|
LL | s2;
| ^^^
error: statement with no effect
- --> $DIR/no_effect.rs:94:5
+ --> $DIR/no_effect.rs:99:5
|
LL | Unit;
| ^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:95:5
+ --> $DIR/no_effect.rs:100:5
|
LL | Tuple(0);
| ^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:96:5
+ --> $DIR/no_effect.rs:101:5
|
LL | Struct { field: 0 };
| ^^^^^^^^^^^^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:97:5
+ --> $DIR/no_effect.rs:102:5
|
LL | Struct { ..s };
| ^^^^^^^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:98:5
+ --> $DIR/no_effect.rs:103:5
|
LL | Union { a: 0 };
| ^^^^^^^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:99:5
+ --> $DIR/no_effect.rs:104:5
|
LL | Enum::Tuple(0);
| ^^^^^^^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:100:5
+ --> $DIR/no_effect.rs:105:5
|
LL | Enum::Struct { field: 0 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:101:5
+ --> $DIR/no_effect.rs:106:5
|
LL | 5 + 6;
| ^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:102:5
+ --> $DIR/no_effect.rs:107:5
|
LL | *&42;
| ^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:103:5
+ --> $DIR/no_effect.rs:108:5
|
LL | &6;
| ^^^
error: statement with no effect
- --> $DIR/no_effect.rs:104:5
+ --> $DIR/no_effect.rs:109:5
|
LL | (5, 6, 7);
| ^^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:105:5
- |
-LL | box 42;
- | ^^^^^^^
-
-error: statement with no effect
- --> $DIR/no_effect.rs:106:5
+ --> $DIR/no_effect.rs:110:5
|
LL | ..;
| ^^^
error: statement with no effect
- --> $DIR/no_effect.rs:107:5
+ --> $DIR/no_effect.rs:111:5
|
LL | 5..;
| ^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:108:5
+ --> $DIR/no_effect.rs:112:5
|
LL | ..5;
| ^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:109:5
+ --> $DIR/no_effect.rs:113:5
|
LL | 5..6;
| ^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:110:5
+ --> $DIR/no_effect.rs:114:5
|
LL | 5..=6;
| ^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:111:5
+ --> $DIR/no_effect.rs:115:5
|
LL | [42, 55];
| ^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:112:5
+ --> $DIR/no_effect.rs:116:5
|
LL | [42, 55][1];
| ^^^^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:113:5
+ --> $DIR/no_effect.rs:117:5
|
LL | (42, 55).1;
| ^^^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:114:5
+ --> $DIR/no_effect.rs:118:5
|
LL | [42; 55];
| ^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:115:5
+ --> $DIR/no_effect.rs:119:5
|
LL | [42; 55][13];
| ^^^^^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:117:5
+ --> $DIR/no_effect.rs:121:5
|
LL | || x += 5;
| ^^^^^^^^^^
error: statement with no effect
- --> $DIR/no_effect.rs:119:5
+ --> $DIR/no_effect.rs:123:5
|
LL | FooString { s: s };
| ^^^^^^^^^^^^^^^^^^^
error: binding to `_` prefixed variable with no side-effect
- --> $DIR/no_effect.rs:120:5
+ --> $DIR/no_effect.rs:124:5
|
LL | let _unused = 1;
| ^^^^^^^^^^^^^^^^
@@ -165,22 +159,22 @@ LL | let _unused = 1;
= note: `-D clippy::no-effect-underscore-binding` implied by `-D warnings`
error: binding to `_` prefixed variable with no side-effect
- --> $DIR/no_effect.rs:121:5
+ --> $DIR/no_effect.rs:125:5
|
LL | let _penguin = || println!("Some helpful closure");
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: binding to `_` prefixed variable with no side-effect
- --> $DIR/no_effect.rs:122:5
+ --> $DIR/no_effect.rs:126:5
|
LL | let _duck = Struct { field: 0 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: binding to `_` prefixed variable with no side-effect
- --> $DIR/no_effect.rs:123:5
+ --> $DIR/no_effect.rs:127:5
|
LL | let _cat = [2, 4, 6, 8][2];
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
-error: aborting due to 30 previous errors
+error: aborting due to 29 previous errors
diff --git a/src/tools/clippy/tests/ui/no_mangle_with_rust_abi.fixed b/src/tools/clippy/tests/ui/no_mangle_with_rust_abi.fixed
deleted file mode 100644
index d18dec22a..000000000
--- a/src/tools/clippy/tests/ui/no_mangle_with_rust_abi.fixed
+++ /dev/null
@@ -1,48 +0,0 @@
-// run-rustfix
-
-#![allow(unused)]
-#![warn(clippy::no_mangle_with_rust_abi)]
-
-#[no_mangle]
-extern "C" fn rust_abi_fn_one(arg_one: u32, arg_two: usize) {}
-
-#[no_mangle]
-pub extern "C" fn rust_abi_fn_two(arg_one: u32, arg_two: usize) {}
-
-/// # Safety
-/// This function shouldn't be called unless the horsemen are ready
-#[no_mangle]
-pub unsafe extern "C" fn rust_abi_fn_three(arg_one: u32, arg_two: usize) {}
-
-/// # Safety
-/// This function shouldn't be called unless the horsemen are ready
-#[no_mangle]
-unsafe extern "C" fn rust_abi_fn_four(arg_one: u32, arg_two: usize) {}
-
-#[no_mangle]
-extern "C" fn rust_abi_multiline_function_really_long_name_to_overflow_args_to_multiple_lines(
- arg_one: u32,
- arg_two: usize,
-) -> u32 {
- 0
-}
-
-// Must not run on functions that explicitly opt in to Rust ABI with `extern "Rust"`
-#[no_mangle]
-#[rustfmt::skip]
-extern "Rust" fn rust_abi_fn_explicit_opt_in(arg_one: u32, arg_two: usize) {}
-
-fn rust_abi_fn_again(arg_one: u32, arg_two: usize) {}
-
-#[no_mangle]
-extern "C" fn c_abi_fn(arg_one: u32, arg_two: usize) {}
-
-extern "C" fn c_abi_fn_again(arg_one: u32, arg_two: usize) {}
-
-extern "C" {
- fn c_abi_in_block(arg_one: u32, arg_two: usize);
-}
-
-fn main() {
- // test code goes here
-}
diff --git a/src/tools/clippy/tests/ui/no_mangle_with_rust_abi.rs b/src/tools/clippy/tests/ui/no_mangle_with_rust_abi.rs
index 481e1b6d9..b32e72111 100644
--- a/src/tools/clippy/tests/ui/no_mangle_with_rust_abi.rs
+++ b/src/tools/clippy/tests/ui/no_mangle_with_rust_abi.rs
@@ -1,5 +1,3 @@
-// run-rustfix
-
#![allow(unused)]
#![warn(clippy::no_mangle_with_rust_abi)]
diff --git a/src/tools/clippy/tests/ui/no_mangle_with_rust_abi.stderr b/src/tools/clippy/tests/ui/no_mangle_with_rust_abi.stderr
index 71517d318..da5d31d8f 100644
--- a/src/tools/clippy/tests/ui/no_mangle_with_rust_abi.stderr
+++ b/src/tools/clippy/tests/ui/no_mangle_with_rust_abi.stderr
@@ -1,31 +1,66 @@
-error: attribute #[no_mangle] set on a Rust ABI function
- --> $DIR/no_mangle_with_rust_abi.rs:7:1
+error: `#[no_mangle]` set on a function with the default (`Rust`) ABI
+ --> $DIR/no_mangle_with_rust_abi.rs:5:1
|
LL | fn rust_abi_fn_one(arg_one: u32, arg_two: usize) {}
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `extern "C" fn rust_abi_fn_one(arg_one: u32, arg_two: usize)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: `-D clippy::no-mangle-with-rust-abi` implied by `-D warnings`
+help: set an ABI
+ |
+LL | extern "C" fn rust_abi_fn_one(arg_one: u32, arg_two: usize) {}
+ | ++++++++++
+help: or explicitly set the default
+ |
+LL | extern "Rust" fn rust_abi_fn_one(arg_one: u32, arg_two: usize) {}
+ | +++++++++++++
-error: attribute #[no_mangle] set on a Rust ABI function
- --> $DIR/no_mangle_with_rust_abi.rs:10:1
+error: `#[no_mangle]` set on a function with the default (`Rust`) ABI
+ --> $DIR/no_mangle_with_rust_abi.rs:8:1
|
LL | pub fn rust_abi_fn_two(arg_one: u32, arg_two: usize) {}
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `pub extern "C" fn rust_abi_fn_two(arg_one: u32, arg_two: usize)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: set an ABI
+ |
+LL | pub extern "C" fn rust_abi_fn_two(arg_one: u32, arg_two: usize) {}
+ | ++++++++++
+help: or explicitly set the default
+ |
+LL | pub extern "Rust" fn rust_abi_fn_two(arg_one: u32, arg_two: usize) {}
+ | +++++++++++++
-error: attribute #[no_mangle] set on a Rust ABI function
- --> $DIR/no_mangle_with_rust_abi.rs:15:1
+error: `#[no_mangle]` set on a function with the default (`Rust`) ABI
+ --> $DIR/no_mangle_with_rust_abi.rs:13:1
|
LL | pub unsafe fn rust_abi_fn_three(arg_one: u32, arg_two: usize) {}
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `pub unsafe extern "C" fn rust_abi_fn_three(arg_one: u32, arg_two: usize)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: set an ABI
+ |
+LL | pub unsafe extern "C" fn rust_abi_fn_three(arg_one: u32, arg_two: usize) {}
+ | ++++++++++
+help: or explicitly set the default
+ |
+LL | pub unsafe extern "Rust" fn rust_abi_fn_three(arg_one: u32, arg_two: usize) {}
+ | +++++++++++++
-error: attribute #[no_mangle] set on a Rust ABI function
- --> $DIR/no_mangle_with_rust_abi.rs:20:1
+error: `#[no_mangle]` set on a function with the default (`Rust`) ABI
+ --> $DIR/no_mangle_with_rust_abi.rs:18:1
|
LL | unsafe fn rust_abi_fn_four(arg_one: u32, arg_two: usize) {}
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unsafe extern "C" fn rust_abi_fn_four(arg_one: u32, arg_two: usize)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: set an ABI
+ |
+LL | unsafe extern "C" fn rust_abi_fn_four(arg_one: u32, arg_two: usize) {}
+ | ++++++++++
+help: or explicitly set the default
+ |
+LL | unsafe extern "Rust" fn rust_abi_fn_four(arg_one: u32, arg_two: usize) {}
+ | +++++++++++++
-error: attribute #[no_mangle] set on a Rust ABI function
- --> $DIR/no_mangle_with_rust_abi.rs:23:1
+error: `#[no_mangle]` set on a function with the default (`Rust`) ABI
+ --> $DIR/no_mangle_with_rust_abi.rs:21:1
|
LL | / fn rust_abi_multiline_function_really_long_name_to_overflow_args_to_multiple_lines(
LL | | arg_one: u32,
@@ -33,13 +68,14 @@ LL | | arg_two: usize,
LL | | ) -> u32 {
| |________^
|
-help: try
+help: set an ABI
|
-LL + extern "C" fn rust_abi_multiline_function_really_long_name_to_overflow_args_to_multiple_lines(
-LL + arg_one: u32,
-LL + arg_two: usize,
-LL ~ ) -> u32 {
+LL | extern "C" fn rust_abi_multiline_function_really_long_name_to_overflow_args_to_multiple_lines(
+ | ++++++++++
+help: or explicitly set the default
|
+LL | extern "Rust" fn rust_abi_multiline_function_really_long_name_to_overflow_args_to_multiple_lines(
+ | +++++++++++++
error: aborting due to 5 previous errors
diff --git a/src/tools/clippy/tests/ui/nonminimal_bool.rs b/src/tools/clippy/tests/ui/nonminimal_bool.rs
index e9b4367ca..80cc7c60f 100644
--- a/src/tools/clippy/tests/ui/nonminimal_bool.rs
+++ b/src/tools/clippy/tests/ui/nonminimal_bool.rs
@@ -63,3 +63,50 @@ fn issue9428() {
println!("foo");
}
}
+
+fn issue_10523() {
+ macro_rules! a {
+ ($v:expr) => {
+ $v.is_some()
+ };
+ }
+ let x: Option<u32> = None;
+ if !a!(x) {}
+}
+
+fn issue_10523_1() {
+ macro_rules! a {
+ ($v:expr) => {
+ !$v.is_some()
+ };
+ }
+ let x: Option<u32> = None;
+ if a!(x) {}
+}
+
+fn issue_10523_2() {
+ macro_rules! a {
+ () => {
+ !None::<u32>.is_some()
+ };
+ }
+ if a!() {}
+}
+
+fn issue_10435() {
+ let x = vec![0];
+ let y = vec![1];
+ let z = vec![2];
+
+ // vvv Should not lint
+ #[allow(clippy::nonminimal_bool)]
+ if !x.is_empty() && !(y.is_empty() || z.is_empty()) {
+ println!("{}", line!());
+ }
+
+ // vvv Should not lint (#10435 talks about a bug where it lints)
+ #[allow(clippy::nonminimal_bool)]
+ if !(x == [0]) {
+ println!("{}", line!());
+ }
+}
diff --git a/src/tools/clippy/tests/ui/option_env_unwrap.rs b/src/tools/clippy/tests/ui/option_env_unwrap.rs
index 0141fb785..9a56cf40d 100644
--- a/src/tools/clippy/tests/ui/option_env_unwrap.rs
+++ b/src/tools/clippy/tests/ui/option_env_unwrap.rs
@@ -1,24 +1,16 @@
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![warn(clippy::option_env_unwrap)]
#![allow(clippy::map_flatten)]
-#[macro_use]
-extern crate macro_rules;
-
-macro_rules! option_env_unwrap {
- ($env: expr) => {
- option_env!($env).unwrap()
- };
- ($env: expr, $message: expr) => {
- option_env!($env).expect($message)
- };
-}
+extern crate proc_macros;
+use proc_macros::{external, inline_macros};
+#[inline_macros]
fn main() {
let _ = option_env!("PATH").unwrap();
let _ = option_env!("PATH").expect("environment variable PATH isn't set");
- let _ = option_env_unwrap!("PATH");
- let _ = option_env_unwrap!("PATH", "environment variable PATH isn't set");
- let _ = option_env_unwrap_external!("PATH");
- let _ = option_env_unwrap_external!("PATH", "environment variable PATH isn't set");
+ let _ = inline!(option_env!($"PATH").unwrap());
+ let _ = inline!(option_env!($"PATH").expect($"environment variable PATH isn't set"));
+ let _ = external!(option_env!($"PATH").unwrap());
+ let _ = external!(option_env!($"PATH").expect($"environment variable PATH isn't set"));
}
diff --git a/src/tools/clippy/tests/ui/option_env_unwrap.stderr b/src/tools/clippy/tests/ui/option_env_unwrap.stderr
index bc188a07e..7bba62686 100644
--- a/src/tools/clippy/tests/ui/option_env_unwrap.stderr
+++ b/src/tools/clippy/tests/ui/option_env_unwrap.stderr
@@ -1,5 +1,5 @@
error: this will panic at run-time if the environment variable doesn't exist at compile-time
- --> $DIR/option_env_unwrap.rs:18:13
+ --> $DIR/option_env_unwrap.rs:10:13
|
LL | let _ = option_env!("PATH").unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -8,7 +8,7 @@ LL | let _ = option_env!("PATH").unwrap();
= note: `-D clippy::option-env-unwrap` implied by `-D warnings`
error: this will panic at run-time if the environment variable doesn't exist at compile-time
- --> $DIR/option_env_unwrap.rs:19:13
+ --> $DIR/option_env_unwrap.rs:11:13
|
LL | let _ = option_env!("PATH").expect("environment variable PATH isn't set");
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -16,46 +16,40 @@ LL | let _ = option_env!("PATH").expect("environment variable PATH isn't set
= help: consider using the `env!` macro instead
error: this will panic at run-time if the environment variable doesn't exist at compile-time
- --> $DIR/option_env_unwrap.rs:10:9
+ --> $DIR/option_env_unwrap.rs:12:21
|
-LL | option_env!($env).unwrap()
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^
-...
-LL | let _ = option_env_unwrap!("PATH");
- | -------------------------- in this macro invocation
+LL | let _ = inline!(option_env!($"PATH").unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: consider using the `env!` macro instead
- = note: this error originates in the macro `option_env_unwrap` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `__inline_mac_fn_main` (in Nightly builds, run with -Z macro-backtrace for more info)
error: this will panic at run-time if the environment variable doesn't exist at compile-time
- --> $DIR/option_env_unwrap.rs:13:9
+ --> $DIR/option_env_unwrap.rs:13:21
|
-LL | option_env!($env).expect($message)
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-...
-LL | let _ = option_env_unwrap!("PATH", "environment variable PATH isn't set");
- | ----------------------------------------------------------------- in this macro invocation
+LL | let _ = inline!(option_env!($"PATH").expect($"environment variable PATH isn't set"));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: consider using the `env!` macro instead
- = note: this error originates in the macro `option_env_unwrap` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `__inline_mac_fn_main` (in Nightly builds, run with -Z macro-backtrace for more info)
error: this will panic at run-time if the environment variable doesn't exist at compile-time
- --> $DIR/option_env_unwrap.rs:22:13
+ --> $DIR/option_env_unwrap.rs:14:13
|
-LL | let _ = option_env_unwrap_external!("PATH");
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | let _ = external!(option_env!($"PATH").unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: consider using the `env!` macro instead
- = note: this error originates in the macro `option_env_unwrap_external` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `external` (in Nightly builds, run with -Z macro-backtrace for more info)
error: this will panic at run-time if the environment variable doesn't exist at compile-time
- --> $DIR/option_env_unwrap.rs:23:13
+ --> $DIR/option_env_unwrap.rs:15:13
|
-LL | let _ = option_env_unwrap_external!("PATH", "environment variable PATH isn't set");
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | let _ = external!(option_env!($"PATH").expect($"environment variable PATH isn't set"));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: consider using the `env!` macro instead
- = note: this error originates in the macro `option_env_unwrap_external` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `external` (in Nightly builds, run with -Z macro-backtrace for more info)
error: aborting due to 6 previous errors
diff --git a/src/tools/clippy/tests/ui/overflow_check_conditional.rs b/src/tools/clippy/tests/ui/overflow_check_conditional.rs
index 5db75f529..e1e301140 100644
--- a/src/tools/clippy/tests/ui/overflow_check_conditional.rs
+++ b/src/tools/clippy/tests/ui/overflow_check_conditional.rs
@@ -1,9 +1,6 @@
#![warn(clippy::overflow_check_conditional)]
-fn main() {
- let a: u32 = 1;
- let b: u32 = 2;
- let c: u32 = 3;
+fn test(a: u32, b: u32, c: u32) {
if a + b < a {}
if a > a + b {}
if a + b < b {}
@@ -23,3 +20,7 @@ fn main() {
if i > i + j {}
if i - j < i {}
}
+
+fn main() {
+ test(1, 2, 3)
+}
diff --git a/src/tools/clippy/tests/ui/overflow_check_conditional.stderr b/src/tools/clippy/tests/ui/overflow_check_conditional.stderr
index 1b8b146b6..92d1d8ef9 100644
--- a/src/tools/clippy/tests/ui/overflow_check_conditional.stderr
+++ b/src/tools/clippy/tests/ui/overflow_check_conditional.stderr
@@ -1,5 +1,5 @@
error: you are trying to use classic C overflow conditions that will fail in Rust
- --> $DIR/overflow_check_conditional.rs:7:8
+ --> $DIR/overflow_check_conditional.rs:4:8
|
LL | if a + b < a {}
| ^^^^^^^^^
@@ -7,43 +7,43 @@ LL | if a + b < a {}
= note: `-D clippy::overflow-check-conditional` implied by `-D warnings`
error: you are trying to use classic C overflow conditions that will fail in Rust
- --> $DIR/overflow_check_conditional.rs:8:8
+ --> $DIR/overflow_check_conditional.rs:5:8
|
LL | if a > a + b {}
| ^^^^^^^^^
error: you are trying to use classic C overflow conditions that will fail in Rust
- --> $DIR/overflow_check_conditional.rs:9:8
+ --> $DIR/overflow_check_conditional.rs:6:8
|
LL | if a + b < b {}
| ^^^^^^^^^
error: you are trying to use classic C overflow conditions that will fail in Rust
- --> $DIR/overflow_check_conditional.rs:10:8
+ --> $DIR/overflow_check_conditional.rs:7:8
|
LL | if b > a + b {}
| ^^^^^^^^^
error: you are trying to use classic C underflow conditions that will fail in Rust
- --> $DIR/overflow_check_conditional.rs:11:8
+ --> $DIR/overflow_check_conditional.rs:8:8
|
LL | if a - b > b {}
| ^^^^^^^^^
error: you are trying to use classic C underflow conditions that will fail in Rust
- --> $DIR/overflow_check_conditional.rs:12:8
+ --> $DIR/overflow_check_conditional.rs:9:8
|
LL | if b < a - b {}
| ^^^^^^^^^
error: you are trying to use classic C underflow conditions that will fail in Rust
- --> $DIR/overflow_check_conditional.rs:13:8
+ --> $DIR/overflow_check_conditional.rs:10:8
|
LL | if a - b > a {}
| ^^^^^^^^^
error: you are trying to use classic C underflow conditions that will fail in Rust
- --> $DIR/overflow_check_conditional.rs:14:8
+ --> $DIR/overflow_check_conditional.rs:11:8
|
LL | if a < a - b {}
| ^^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/print_literal.rs b/src/tools/clippy/tests/ui/print_literal.rs
index 86f908f66..538513e91 100644
--- a/src/tools/clippy/tests/ui/print_literal.rs
+++ b/src/tools/clippy/tests/ui/print_literal.rs
@@ -38,4 +38,8 @@ fn main() {
// named args shouldn't change anything either
println!("{foo} {bar}", foo = "hello", bar = "world");
println!("{bar} {foo}", foo = "hello", bar = "world");
+
+ // The string literal from `file!()` has a callsite span that isn't marked as coming from an
+ // expansion
+ println!("file: {}", file!());
}
diff --git a/src/tools/clippy/tests/ui/ptr_as_ptr.fixed b/src/tools/clippy/tests/ui/ptr_as_ptr.fixed
index df36a9b84..ee7b998a0 100644
--- a/src/tools/clippy/tests/ui/ptr_as_ptr.fixed
+++ b/src/tools/clippy/tests/ui/ptr_as_ptr.fixed
@@ -1,16 +1,12 @@
// run-rustfix
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![warn(clippy::ptr_as_ptr)]
-extern crate macro_rules;
-
-macro_rules! cast_it {
- ($ptr: ident) => {
- $ptr.cast::<i32>()
- };
-}
+extern crate proc_macros;
+use proc_macros::{external, inline_macros};
+#[inline_macros]
fn main() {
let ptr: *const u32 = &42_u32;
let mut_ptr: *mut u32 = &mut 42_u32;
@@ -38,10 +34,10 @@ fn main() {
let _: *mut i32 = mut_ptr.cast();
// Make sure the lint is triggered inside a macro
- let _ = cast_it!(ptr);
+ let _ = inline!($ptr.cast::<i32>());
// Do not lint inside macros from external crates
- let _ = macro_rules::ptr_as_ptr_cast!(ptr);
+ let _ = external!($ptr as *const i32);
}
#[clippy::msrv = "1.37"]
diff --git a/src/tools/clippy/tests/ui/ptr_as_ptr.rs b/src/tools/clippy/tests/ui/ptr_as_ptr.rs
index 302c66462..c88329ce4 100644
--- a/src/tools/clippy/tests/ui/ptr_as_ptr.rs
+++ b/src/tools/clippy/tests/ui/ptr_as_ptr.rs
@@ -1,16 +1,12 @@
// run-rustfix
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![warn(clippy::ptr_as_ptr)]
-extern crate macro_rules;
-
-macro_rules! cast_it {
- ($ptr: ident) => {
- $ptr as *const i32
- };
-}
+extern crate proc_macros;
+use proc_macros::{external, inline_macros};
+#[inline_macros]
fn main() {
let ptr: *const u32 = &42_u32;
let mut_ptr: *mut u32 = &mut 42_u32;
@@ -38,10 +34,10 @@ fn main() {
let _: *mut i32 = mut_ptr as _;
// Make sure the lint is triggered inside a macro
- let _ = cast_it!(ptr);
+ let _ = inline!($ptr as *const i32);
// Do not lint inside macros from external crates
- let _ = macro_rules::ptr_as_ptr_cast!(ptr);
+ let _ = external!($ptr as *const i32);
}
#[clippy::msrv = "1.37"]
diff --git a/src/tools/clippy/tests/ui/ptr_as_ptr.stderr b/src/tools/clippy/tests/ui/ptr_as_ptr.stderr
index a68e1cab6..78d733994 100644
--- a/src/tools/clippy/tests/ui/ptr_as_ptr.stderr
+++ b/src/tools/clippy/tests/ui/ptr_as_ptr.stderr
@@ -1,5 +1,5 @@
error: `as` casting between raw pointers without changing its mutability
- --> $DIR/ptr_as_ptr.rs:18:13
+ --> $DIR/ptr_as_ptr.rs:14:13
|
LL | let _ = ptr as *const i32;
| ^^^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `ptr.cast::<i32>()`
@@ -7,48 +7,45 @@ LL | let _ = ptr as *const i32;
= note: `-D clippy::ptr-as-ptr` implied by `-D warnings`
error: `as` casting between raw pointers without changing its mutability
- --> $DIR/ptr_as_ptr.rs:19:13
+ --> $DIR/ptr_as_ptr.rs:15:13
|
LL | let _ = mut_ptr as *mut i32;
| ^^^^^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `mut_ptr.cast::<i32>()`
error: `as` casting between raw pointers without changing its mutability
- --> $DIR/ptr_as_ptr.rs:24:17
+ --> $DIR/ptr_as_ptr.rs:20:17
|
LL | let _ = *ptr_ptr as *const i32;
| ^^^^^^^^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `(*ptr_ptr).cast::<i32>()`
error: `as` casting between raw pointers without changing its mutability
- --> $DIR/ptr_as_ptr.rs:37:25
+ --> $DIR/ptr_as_ptr.rs:33:25
|
LL | let _: *const i32 = ptr as *const _;
| ^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `ptr.cast()`
error: `as` casting between raw pointers without changing its mutability
- --> $DIR/ptr_as_ptr.rs:38:23
+ --> $DIR/ptr_as_ptr.rs:34:23
|
LL | let _: *mut i32 = mut_ptr as _;
| ^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `mut_ptr.cast()`
error: `as` casting between raw pointers without changing its mutability
- --> $DIR/ptr_as_ptr.rs:10:9
+ --> $DIR/ptr_as_ptr.rs:37:21
|
-LL | $ptr as *const i32
- | ^^^^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `$ptr.cast::<i32>()`
-...
-LL | let _ = cast_it!(ptr);
- | ------------- in this macro invocation
+LL | let _ = inline!($ptr as *const i32);
+ | ^^^^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `$ptr.cast::<i32>()`
|
- = note: this error originates in the macro `cast_it` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `__inline_mac_fn_main` (in Nightly builds, run with -Z macro-backtrace for more info)
error: `as` casting between raw pointers without changing its mutability
- --> $DIR/ptr_as_ptr.rs:62:13
+ --> $DIR/ptr_as_ptr.rs:58:13
|
LL | let _ = ptr as *const i32;
| ^^^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `ptr.cast::<i32>()`
error: `as` casting between raw pointers without changing its mutability
- --> $DIR/ptr_as_ptr.rs:63:13
+ --> $DIR/ptr_as_ptr.rs:59:13
|
LL | let _ = mut_ptr as *mut i32;
| ^^^^^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `mut_ptr.cast::<i32>()`
diff --git a/src/tools/clippy/tests/ui/redundant_async_block.fixed b/src/tools/clippy/tests/ui/redundant_async_block.fixed
new file mode 100644
index 000000000..ad96993c4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_async_block.fixed
@@ -0,0 +1,193 @@
+// run-rustfix
+
+#![allow(unused, clippy::manual_async_fn)]
+#![warn(clippy::redundant_async_block)]
+
+use std::future::Future;
+
+async fn func1(n: usize) -> usize {
+ n + 1
+}
+
+async fn func2() -> String {
+ let s = String::from("some string");
+ let f = async { (*s).to_owned() };
+ let x = f;
+ x.await
+}
+
+fn main() {
+ let fut1 = async { 17 };
+ // Lint
+ let fut2 = fut1;
+
+ let fut1 = async { 25 };
+ // Lint
+ let fut2 = fut1;
+
+ // Lint
+ let fut = async { 42 };
+
+ // Do not lint: not a single expression
+ let fut = async {
+ func1(10).await;
+ func2().await
+ };
+
+ // Do not lint: expression contains `.await`
+ let fut = async { func1(func2().await.len()).await };
+}
+
+#[allow(clippy::let_and_return)]
+fn capture_local() -> impl Future<Output = i32> {
+ let fut = async { 17 };
+ // Lint
+ fut
+}
+
+fn capture_local_closure(s: &str) -> impl Future<Output = &str> {
+ let f = move || std::future::ready(s);
+ // Do not lint: `f` would not live long enough
+ async move { f().await }
+}
+
+#[allow(clippy::let_and_return)]
+fn capture_arg(s: &str) -> impl Future<Output = &str> {
+ let fut = async move { s };
+ // Lint
+ fut
+}
+
+fn capture_future_arg<T>(f: impl Future<Output = T>) -> impl Future<Output = T> {
+ // Lint
+ f
+}
+
+fn capture_func_result<FN, F, T>(f: FN) -> impl Future<Output = T>
+where
+ F: Future<Output = T>,
+ FN: FnOnce() -> F,
+{
+ // Do not lint, as f() would be evaluated prematurely
+ async { f().await }
+}
+
+fn double_future(f: impl Future<Output = impl Future<Output = u32>>) -> impl Future<Output = u32> {
+ // Do not lint, we will get a `.await` outside a `.async`
+ async { f.await.await }
+}
+
+fn await_in_async<F, R>(f: F) -> impl Future<Output = u32>
+where
+ F: FnOnce() -> R,
+ R: Future<Output = u32>,
+{
+ // Lint
+ async { f().await + 1 }
+}
+
+#[derive(Debug, Clone)]
+struct F {}
+
+impl F {
+ async fn run(&self) {}
+}
+
+pub async fn run() {
+ let f = F {};
+ let c = f.clone();
+ // Do not lint: `c` would not live long enough
+ spawn(async move { c.run().await });
+ let _f = f;
+}
+
+fn spawn<F: Future + 'static>(_: F) {}
+
+async fn work(_: &str) {}
+
+fn capture() {
+ let val = "Hello World".to_owned();
+ // Do not lint: `val` would not live long enough
+ spawn(async { work(&{ val }).await });
+}
+
+fn await_from_macro() -> impl Future<Output = u32> {
+ macro_rules! mac {
+ ($e:expr) => {
+ $e.await
+ };
+ }
+ // Do not lint: the macro may change in the future
+ // or return different things depending on its argument
+ async { mac!(async { 42 }) }
+}
+
+fn async_expr_from_macro() -> impl Future<Output = u32> {
+ macro_rules! mac {
+ () => {
+ async { 42 }
+ };
+ }
+ // Do not lint: the macro may change in the future
+ async { mac!().await }
+}
+
+fn async_expr_from_macro_deep() -> impl Future<Output = u32> {
+ macro_rules! mac {
+ () => {
+ async { 42 }
+ };
+ }
+ // Do not lint: the macro may change in the future
+ async { ({ mac!() }).await }
+}
+
+fn all_from_macro() -> impl Future<Output = u32> {
+ macro_rules! mac {
+ () => {
+ // Lint
+ async { 42 }
+ };
+ }
+ mac!()
+}
+
+fn parts_from_macro() -> impl Future<Output = u32> {
+ macro_rules! mac {
+ ($e: expr) => {
+ // Do not lint: `$e` might not always be side-effect free
+ async { $e.await }
+ };
+ }
+ mac!(async { 42 })
+}
+
+fn safe_parts_from_macro() -> impl Future<Output = u32> {
+ macro_rules! mac {
+ ($e: expr) => {
+ // Lint
+ async { $e }
+ };
+ }
+ mac!(42)
+}
+
+fn parts_from_macro_deep() -> impl Future<Output = u32> {
+ macro_rules! mac {
+ ($e: expr) => {
+ // Do not lint: `$e` might not always be side-effect free
+ async { ($e,).0.await }
+ };
+ }
+ let f = std::future::ready(42);
+ mac!(f)
+}
+
+fn await_from_macro_deep() -> impl Future<Output = u32> {
+ macro_rules! mac {
+ ($e:expr) => {{ $e }.await};
+ }
+ // Do not lint: the macro may change in the future
+ // or return different things depending on its argument
+ async { mac!(async { 42 }) }
+}
diff --git a/src/tools/clippy/tests/ui/redundant_async_block.rs b/src/tools/clippy/tests/ui/redundant_async_block.rs
new file mode 100644
index 000000000..7ae235583
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_async_block.rs
@@ -0,0 +1,193 @@
+// run-rustfix
+
+#![allow(unused, clippy::manual_async_fn)]
+#![warn(clippy::redundant_async_block)]
+
+use std::future::Future;
+
+async fn func1(n: usize) -> usize {
+ n + 1
+}
+
+async fn func2() -> String {
+ let s = String::from("some string");
+ let f = async { (*s).to_owned() };
+ let x = async { f.await };
+ x.await
+}
+
+fn main() {
+ let fut1 = async { 17 };
+ // Lint
+ let fut2 = async { fut1.await };
+
+ let fut1 = async { 25 };
+ // Lint
+ let fut2 = async move { fut1.await };
+
+ // Lint
+ let fut = async { async { 42 }.await };
+
+ // Do not lint: not a single expression
+ let fut = async {
+ func1(10).await;
+ func2().await
+ };
+
+ // Do not lint: expression contains `.await`
+ let fut = async { func1(func2().await.len()).await };
+}
+
+#[allow(clippy::let_and_return)]
+fn capture_local() -> impl Future<Output = i32> {
+ let fut = async { 17 };
+ // Lint
+ async move { fut.await }
+}
+
+fn capture_local_closure(s: &str) -> impl Future<Output = &str> {
+ let f = move || std::future::ready(s);
+ // Do not lint: `f` would not live long enough
+ async move { f().await }
+}
+
+#[allow(clippy::let_and_return)]
+fn capture_arg(s: &str) -> impl Future<Output = &str> {
+ let fut = async move { s };
+ // Lint
+ async move { fut.await }
+}
+
+fn capture_future_arg<T>(f: impl Future<Output = T>) -> impl Future<Output = T> {
+ // Lint
+ async { f.await }
+}
+
+fn capture_func_result<FN, F, T>(f: FN) -> impl Future<Output = T>
+where
+ F: Future<Output = T>,
+ FN: FnOnce() -> F,
+{
+ // Do not lint, as f() would be evaluated prematurely
+ async { f().await }
+}
+
+fn double_future(f: impl Future<Output = impl Future<Output = u32>>) -> impl Future<Output = u32> {
+ // Do not lint, we will get a `.await` outside a `.async`
+ async { f.await.await }
+}
+
+fn await_in_async<F, R>(f: F) -> impl Future<Output = u32>
+where
+ F: FnOnce() -> R,
+ R: Future<Output = u32>,
+{
+ // Lint
+ async { async { f().await + 1 }.await }
+}
+
+#[derive(Debug, Clone)]
+struct F {}
+
+impl F {
+ async fn run(&self) {}
+}
+
+pub async fn run() {
+ let f = F {};
+ let c = f.clone();
+ // Do not lint: `c` would not live long enough
+ spawn(async move { c.run().await });
+ let _f = f;
+}
+
+fn spawn<F: Future + 'static>(_: F) {}
+
+async fn work(_: &str) {}
+
+fn capture() {
+ let val = "Hello World".to_owned();
+ // Do not lint: `val` would not live long enough
+ spawn(async { work(&{ val }).await });
+}
+
+fn await_from_macro() -> impl Future<Output = u32> {
+ macro_rules! mac {
+ ($e:expr) => {
+ $e.await
+ };
+ }
+ // Do not lint: the macro may change in the future
+ // or return different things depending on its argument
+ async { mac!(async { 42 }) }
+}
+
+fn async_expr_from_macro() -> impl Future<Output = u32> {
+ macro_rules! mac {
+ () => {
+ async { 42 }
+ };
+ }
+ // Do not lint: the macro may change in the future
+ async { mac!().await }
+}
+
+fn async_expr_from_macro_deep() -> impl Future<Output = u32> {
+ macro_rules! mac {
+ () => {
+ async { 42 }
+ };
+ }
+ // Do not lint: the macro may change in the future
+ async { ({ mac!() }).await }
+}
+
+fn all_from_macro() -> impl Future<Output = u32> {
+ macro_rules! mac {
+ () => {
+ // Lint
+ async { async { 42 }.await }
+ };
+ }
+ mac!()
+}
+
+fn parts_from_macro() -> impl Future<Output = u32> {
+ macro_rules! mac {
+ ($e: expr) => {
+ // Do not lint: `$e` might not always be side-effect free
+ async { $e.await }
+ };
+ }
+ mac!(async { 42 })
+}
+
+fn safe_parts_from_macro() -> impl Future<Output = u32> {
+ macro_rules! mac {
+ ($e: expr) => {
+ // Lint
+ async { async { $e }.await }
+ };
+ }
+ mac!(42)
+}
+
+fn parts_from_macro_deep() -> impl Future<Output = u32> {
+ macro_rules! mac {
+ ($e: expr) => {
+ // Do not lint: `$e` might not always be side-effect free
+ async { ($e,).0.await }
+ };
+ }
+ let f = std::future::ready(42);
+ mac!(f)
+}
+
+fn await_from_macro_deep() -> impl Future<Output = u32> {
+ macro_rules! mac {
+ ($e:expr) => {{ $e }.await};
+ }
+ // Do not lint: the macro may change in the future
+ // or return different things depending on its argument
+ async { mac!(async { 42 }) }
+}
diff --git a/src/tools/clippy/tests/ui/redundant_async_block.stderr b/src/tools/clippy/tests/ui/redundant_async_block.stderr
new file mode 100644
index 000000000..f3dcb09b4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_async_block.stderr
@@ -0,0 +1,74 @@
+error: this async expression only awaits a single future
+ --> $DIR/redundant_async_block.rs:15:13
+ |
+LL | let x = async { f.await };
+ | ^^^^^^^^^^^^^^^^^ help: you can reduce it to: `f`
+ |
+ = note: `-D clippy::redundant-async-block` implied by `-D warnings`
+
+error: this async expression only awaits a single future
+ --> $DIR/redundant_async_block.rs:22:16
+ |
+LL | let fut2 = async { fut1.await };
+ | ^^^^^^^^^^^^^^^^^^^^ help: you can reduce it to: `fut1`
+
+error: this async expression only awaits a single future
+ --> $DIR/redundant_async_block.rs:26:16
+ |
+LL | let fut2 = async move { fut1.await };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: you can reduce it to: `fut1`
+
+error: this async expression only awaits a single future
+ --> $DIR/redundant_async_block.rs:29:15
+ |
+LL | let fut = async { async { 42 }.await };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: you can reduce it to: `async { 42 }`
+
+error: this async expression only awaits a single future
+ --> $DIR/redundant_async_block.rs:45:5
+ |
+LL | async move { fut.await }
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: you can reduce it to: `fut`
+
+error: this async expression only awaits a single future
+ --> $DIR/redundant_async_block.rs:58:5
+ |
+LL | async move { fut.await }
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: you can reduce it to: `fut`
+
+error: this async expression only awaits a single future
+ --> $DIR/redundant_async_block.rs:63:5
+ |
+LL | async { f.await }
+ | ^^^^^^^^^^^^^^^^^ help: you can reduce it to: `f`
+
+error: this async expression only awaits a single future
+ --> $DIR/redundant_async_block.rs:86:5
+ |
+LL | async { async { f().await + 1 }.await }
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: you can reduce it to: `async { f().await + 1 }`
+
+error: this async expression only awaits a single future
+ --> $DIR/redundant_async_block.rs:149:13
+ |
+LL | async { async { 42 }.await }
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: you can reduce it to: `async { 42 }`
+...
+LL | mac!()
+ | ------ in this macro invocation
+ |
+ = note: this error originates in the macro `mac` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: this async expression only awaits a single future
+ --> $DIR/redundant_async_block.rs:169:13
+ |
+LL | async { async { $e }.await }
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: you can reduce it to: `async { $e }`
+...
+LL | mac!(42)
+ | -------- in this macro invocation
+ |
+ = note: this error originates in the macro `mac` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/redundant_closure_call_fixable.fixed b/src/tools/clippy/tests/ui/redundant_closure_call_fixable.fixed
index c0e49ff4c..b987fd2ce 100644
--- a/src/tools/clippy/tests/ui/redundant_closure_call_fixable.fixed
+++ b/src/tools/clippy/tests/ui/redundant_closure_call_fixable.fixed
@@ -2,6 +2,7 @@
#![feature(async_closure)]
#![warn(clippy::redundant_closure_call)]
+#![allow(clippy::redundant_async_block)]
#![allow(unused)]
async fn something() -> u32 {
diff --git a/src/tools/clippy/tests/ui/redundant_closure_call_fixable.rs b/src/tools/clippy/tests/ui/redundant_closure_call_fixable.rs
index 9e6e54348..633a2979d 100644
--- a/src/tools/clippy/tests/ui/redundant_closure_call_fixable.rs
+++ b/src/tools/clippy/tests/ui/redundant_closure_call_fixable.rs
@@ -2,6 +2,7 @@
#![feature(async_closure)]
#![warn(clippy::redundant_closure_call)]
+#![allow(clippy::redundant_async_block)]
#![allow(unused)]
async fn something() -> u32 {
diff --git a/src/tools/clippy/tests/ui/redundant_closure_call_fixable.stderr b/src/tools/clippy/tests/ui/redundant_closure_call_fixable.stderr
index d71bcba2a..8a1f07716 100644
--- a/src/tools/clippy/tests/ui/redundant_closure_call_fixable.stderr
+++ b/src/tools/clippy/tests/ui/redundant_closure_call_fixable.stderr
@@ -1,5 +1,5 @@
error: try not to call a closure in the expression where it is declared
- --> $DIR/redundant_closure_call_fixable.rs:16:13
+ --> $DIR/redundant_closure_call_fixable.rs:17:13
|
LL | let a = (|| 42)();
| ^^^^^^^^^ help: try doing something like: `42`
@@ -7,7 +7,7 @@ LL | let a = (|| 42)();
= note: `-D clippy::redundant-closure-call` implied by `-D warnings`
error: try not to call a closure in the expression where it is declared
- --> $DIR/redundant_closure_call_fixable.rs:17:13
+ --> $DIR/redundant_closure_call_fixable.rs:18:13
|
LL | let b = (async || {
| _____________^
@@ -27,7 +27,7 @@ LL ~ };
|
error: try not to call a closure in the expression where it is declared
- --> $DIR/redundant_closure_call_fixable.rs:22:13
+ --> $DIR/redundant_closure_call_fixable.rs:23:13
|
LL | let c = (|| {
| _____________^
@@ -47,13 +47,13 @@ LL ~ };
|
error: try not to call a closure in the expression where it is declared
- --> $DIR/redundant_closure_call_fixable.rs:27:13
+ --> $DIR/redundant_closure_call_fixable.rs:28:13
|
LL | let d = (async || something().await)();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try doing something like: `async { something().await }`
error: try not to call a closure in the expression where it is declared
- --> $DIR/redundant_closure_call_fixable.rs:36:13
+ --> $DIR/redundant_closure_call_fixable.rs:37:13
|
LL | (|| m!())()
| ^^^^^^^^^^^ help: try doing something like: `m!()`
@@ -64,7 +64,7 @@ LL | m2!();
= note: this error originates in the macro `m2` (in Nightly builds, run with -Z macro-backtrace for more info)
error: try not to call a closure in the expression where it is declared
- --> $DIR/redundant_closure_call_fixable.rs:31:13
+ --> $DIR/redundant_closure_call_fixable.rs:32:13
|
LL | (|| 0)()
| ^^^^^^^^ help: try doing something like: `0`
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_result.fixed b/src/tools/clippy/tests/ui/redundant_pattern_matching_result.fixed
index b88c5d0be..42348df44 100644
--- a/src/tools/clippy/tests/ui/redundant_pattern_matching_result.fixed
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_result.fixed
@@ -69,8 +69,8 @@ fn issue5504() {
}
fn try_result_opt() -> Result<i32, i32> {
- while (r#try!(result_opt())).is_some() {}
- if (r#try!(result_opt())).is_some() {}
+ while r#try!(result_opt()).is_some() {}
+ if r#try!(result_opt()).is_some() {}
Ok(42)
}
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_result.stderr b/src/tools/clippy/tests/ui/redundant_pattern_matching_result.stderr
index e6afe9eb7..d6a46babb 100644
--- a/src/tools/clippy/tests/ui/redundant_pattern_matching_result.stderr
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_result.stderr
@@ -88,13 +88,13 @@ error: redundant pattern matching, consider using `is_some()`
--> $DIR/redundant_pattern_matching_result.rs:84:19
|
LL | while let Some(_) = r#try!(result_opt()) {}
- | ----------^^^^^^^----------------------- help: try this: `while (r#try!(result_opt())).is_some()`
+ | ----------^^^^^^^----------------------- help: try this: `while r#try!(result_opt()).is_some()`
error: redundant pattern matching, consider using `is_some()`
--> $DIR/redundant_pattern_matching_result.rs:85:16
|
LL | if let Some(_) = r#try!(result_opt()) {}
- | -------^^^^^^^----------------------- help: try this: `if (r#try!(result_opt())).is_some()`
+ | -------^^^^^^^----------------------- help: try this: `if r#try!(result_opt()).is_some()`
error: redundant pattern matching, consider using `is_some()`
--> $DIR/redundant_pattern_matching_result.rs:91:12
diff --git a/src/tools/clippy/tests/ui/single_component_path_imports.fixed b/src/tools/clippy/tests/ui/single_component_path_imports.fixed
index 4c40739d6..8c96c4715 100644
--- a/src/tools/clippy/tests/ui/single_component_path_imports.fixed
+++ b/src/tools/clippy/tests/ui/single_component_path_imports.fixed
@@ -2,9 +2,11 @@
#![warn(clippy::single_component_path_imports)]
#![allow(unused_imports)]
+use core;
use serde as edres;
pub use serde;
+use std;
macro_rules! m {
() => {
@@ -17,6 +19,10 @@ fn main() {
// False positive #5154, shouldn't trigger lint.
m!();
+
+ // False positive #10549
+ let _ = self::std::io::stdout();
+ let _ = 0 as self::core::ffi::c_uint;
}
mod hello_mod {
diff --git a/src/tools/clippy/tests/ui/single_component_path_imports.rs b/src/tools/clippy/tests/ui/single_component_path_imports.rs
index 9280bab3c..8434bf7ea 100644
--- a/src/tools/clippy/tests/ui/single_component_path_imports.rs
+++ b/src/tools/clippy/tests/ui/single_component_path_imports.rs
@@ -2,9 +2,11 @@
#![warn(clippy::single_component_path_imports)]
#![allow(unused_imports)]
+use core;
use regex;
use serde as edres;
pub use serde;
+use std;
macro_rules! m {
() => {
@@ -17,6 +19,10 @@ fn main() {
// False positive #5154, shouldn't trigger lint.
m!();
+
+ // False positive #10549
+ let _ = self::std::io::stdout();
+ let _ = 0 as self::core::ffi::c_uint;
}
mod hello_mod {
diff --git a/src/tools/clippy/tests/ui/single_component_path_imports.stderr b/src/tools/clippy/tests/ui/single_component_path_imports.stderr
index 71dcc25d6..d69a86470 100644
--- a/src/tools/clippy/tests/ui/single_component_path_imports.stderr
+++ b/src/tools/clippy/tests/ui/single_component_path_imports.stderr
@@ -1,5 +1,5 @@
error: this import is redundant
- --> $DIR/single_component_path_imports.rs:5:1
+ --> $DIR/single_component_path_imports.rs:6:1
|
LL | use regex;
| ^^^^^^^^^^ help: remove it entirely
@@ -7,7 +7,7 @@ LL | use regex;
= note: `-D clippy::single-component-path-imports` implied by `-D warnings`
error: this import is redundant
- --> $DIR/single_component_path_imports.rs:23:5
+ --> $DIR/single_component_path_imports.rs:29:5
|
LL | use regex;
| ^^^^^^^^^^ help: remove it entirely
diff --git a/src/tools/clippy/tests/ui/single_match_else.rs b/src/tools/clippy/tests/ui/single_match_else.rs
index 5d03f77e9..3c86f41f3 100644
--- a/src/tools/clippy/tests/ui/single_match_else.rs
+++ b/src/tools/clippy/tests/ui/single_match_else.rs
@@ -1,9 +1,9 @@
-// aux-build: proc_macro_with_span.rs
+// aux-build: proc_macros.rs
#![warn(clippy::single_match_else)]
#![allow(clippy::needless_return, clippy::no_effect, clippy::uninlined_format_args)]
-extern crate proc_macro_with_span;
-use proc_macro_with_span::with_span;
+extern crate proc_macros;
+use proc_macros::with_span;
enum ExprNode {
ExprAddrOf,
diff --git a/src/tools/clippy/tests/ui/string_add.rs b/src/tools/clippy/tests/ui/string_add.rs
index 16673c01e..20edbe31f 100644
--- a/src/tools/clippy/tests/ui/string_add.rs
+++ b/src/tools/clippy/tests/ui/string_add.rs
@@ -1,7 +1,7 @@
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
-#[macro_use]
-extern crate macro_rules;
+extern crate proc_macros;
+use proc_macros::external;
#[warn(clippy::string_add)]
#[allow(clippy::string_add_assign, unused)]
@@ -22,5 +22,8 @@ fn main() {
x = x + 1;
assert_eq!(2, x);
- string_add!();
+ external!({
+ let y = "".to_owned();
+ let z = y + "...";
+ });
}
diff --git a/src/tools/clippy/tests/ui/suspicious_doc_comments.fixed b/src/tools/clippy/tests/ui/suspicious_doc_comments.fixed
new file mode 100644
index 000000000..b404df94d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/suspicious_doc_comments.fixed
@@ -0,0 +1,81 @@
+// run-rustfix
+#![allow(unused)]
+#![warn(clippy::suspicious_doc_comments)]
+
+//! Real module documentation.
+//! Fake module documentation.
+fn baz() {}
+
+pub mod singleline_outer_doc {
+ //! This module contains useful functions.
+
+ pub fn bar() {}
+}
+
+pub mod singleline_inner_doc {
+ //! This module contains useful functions.
+
+ pub fn bar() {}
+}
+
+pub mod multiline_outer_doc {
+ /*! This module contains useful functions.
+ */
+
+ pub fn bar() {}
+}
+
+pub mod multiline_inner_doc {
+ /*! This module contains useful functions.
+ */
+
+ pub fn bar() {}
+}
+
+pub mod multiline_outer_doc2 {
+ //! This module
+ //! contains
+ //! useful functions.
+
+ pub fn bar() {}
+}
+
+pub mod multiline_outer_doc3 {
+ //! a
+ //! b
+
+ /// c
+ pub fn bar() {}
+}
+
+pub mod multiline_outer_doc4 {
+ //! a
+ /// b
+ pub fn bar() {}
+}
+
+pub mod multiline_outer_doc_gap {
+ //! a
+
+ //! b
+ pub fn bar() {}
+}
+
+pub mod multiline_outer_doc_commented {
+ /////! This outer doc comment was commented out.
+ pub fn bar() {}
+}
+
+pub mod outer_doc_macro {
+ //! Very cool macro
+ macro_rules! x {
+ () => {};
+ }
+}
+
+pub mod useless_outer_doc {
+ //! Huh.
+ use std::mem;
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/suspicious_doc_comments.rs b/src/tools/clippy/tests/ui/suspicious_doc_comments.rs
new file mode 100644
index 000000000..46eff51e2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/suspicious_doc_comments.rs
@@ -0,0 +1,81 @@
+// run-rustfix
+#![allow(unused)]
+#![warn(clippy::suspicious_doc_comments)]
+
+//! Real module documentation.
+///! Fake module documentation.
+fn baz() {}
+
+pub mod singleline_outer_doc {
+ ///! This module contains useful functions.
+
+ pub fn bar() {}
+}
+
+pub mod singleline_inner_doc {
+ //! This module contains useful functions.
+
+ pub fn bar() {}
+}
+
+pub mod multiline_outer_doc {
+ /**! This module contains useful functions.
+ */
+
+ pub fn bar() {}
+}
+
+pub mod multiline_inner_doc {
+ /*! This module contains useful functions.
+ */
+
+ pub fn bar() {}
+}
+
+pub mod multiline_outer_doc2 {
+ ///! This module
+ ///! contains
+ ///! useful functions.
+
+ pub fn bar() {}
+}
+
+pub mod multiline_outer_doc3 {
+ ///! a
+ ///! b
+
+ /// c
+ pub fn bar() {}
+}
+
+pub mod multiline_outer_doc4 {
+ ///! a
+ /// b
+ pub fn bar() {}
+}
+
+pub mod multiline_outer_doc_gap {
+ ///! a
+
+ ///! b
+ pub fn bar() {}
+}
+
+pub mod multiline_outer_doc_commented {
+ /////! This outer doc comment was commented out.
+ pub fn bar() {}
+}
+
+pub mod outer_doc_macro {
+ ///! Very cool macro
+ macro_rules! x {
+ () => {};
+ }
+}
+
+pub mod useless_outer_doc {
+ ///! Huh.
+ use std::mem;
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/suspicious_doc_comments.stderr b/src/tools/clippy/tests/ui/suspicious_doc_comments.stderr
new file mode 100644
index 000000000..6c167df27
--- /dev/null
+++ b/src/tools/clippy/tests/ui/suspicious_doc_comments.stderr
@@ -0,0 +1,114 @@
+error: this is an outer doc comment and does not apply to the parent module or crate
+ --> $DIR/suspicious_doc_comments.rs:6:1
+ |
+LL | ///! Fake module documentation.
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::suspicious-doc-comments` implied by `-D warnings`
+help: use an inner doc comment to document the parent module or crate
+ |
+LL | //! Fake module documentation.
+ |
+
+error: this is an outer doc comment and does not apply to the parent module or crate
+ --> $DIR/suspicious_doc_comments.rs:10:5
+ |
+LL | ///! This module contains useful functions.
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: use an inner doc comment to document the parent module or crate
+ |
+LL | //! This module contains useful functions.
+ |
+
+error: this is an outer doc comment and does not apply to the parent module or crate
+ --> $DIR/suspicious_doc_comments.rs:22:5
+ |
+LL | / /**! This module contains useful functions.
+LL | | */
+ | |_______^
+ |
+help: use an inner doc comment to document the parent module or crate
+ |
+LL ~ /*! This module contains useful functions.
+LL + */
+ |
+
+error: this is an outer doc comment and does not apply to the parent module or crate
+ --> $DIR/suspicious_doc_comments.rs:36:5
+ |
+LL | / ///! This module
+LL | | ///! contains
+LL | | ///! useful functions.
+ | |__________________________^
+ |
+help: use an inner doc comment to document the parent module or crate
+ |
+LL ~ //! This module
+LL ~ //! contains
+LL ~ //! useful functions.
+ |
+
+error: this is an outer doc comment and does not apply to the parent module or crate
+ --> $DIR/suspicious_doc_comments.rs:44:5
+ |
+LL | / ///! a
+LL | | ///! b
+ | |__________^
+ |
+help: use an inner doc comment to document the parent module or crate
+ |
+LL ~ //! a
+LL ~ //! b
+ |
+
+error: this is an outer doc comment and does not apply to the parent module or crate
+ --> $DIR/suspicious_doc_comments.rs:52:5
+ |
+LL | ///! a
+ | ^^^^^^
+ |
+help: use an inner doc comment to document the parent module or crate
+ |
+LL | //! a
+ |
+
+error: this is an outer doc comment and does not apply to the parent module or crate
+ --> $DIR/suspicious_doc_comments.rs:58:5
+ |
+LL | / ///! a
+LL | |
+LL | | ///! b
+ | |__________^
+ |
+help: use an inner doc comment to document the parent module or crate
+ |
+LL ~ //! a
+LL |
+LL ~ //! b
+ |
+
+error: this is an outer doc comment and does not apply to the parent module or crate
+ --> $DIR/suspicious_doc_comments.rs:70:5
+ |
+LL | ///! Very cool macro
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+help: use an inner doc comment to document the parent module or crate
+ |
+LL | //! Very cool macro
+ |
+
+error: this is an outer doc comment and does not apply to the parent module or crate
+ --> $DIR/suspicious_doc_comments.rs:77:5
+ |
+LL | ///! Huh.
+ | ^^^^^^^^^
+ |
+help: use an inner doc comment to document the parent module or crate
+ |
+LL | //! Huh.
+ |
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/suspicious_doc_comments_unfixable.rs b/src/tools/clippy/tests/ui/suspicious_doc_comments_unfixable.rs
new file mode 100644
index 000000000..ad98c7f49
--- /dev/null
+++ b/src/tools/clippy/tests/ui/suspicious_doc_comments_unfixable.rs
@@ -0,0 +1,16 @@
+#![allow(unused)]
+#![warn(clippy::suspicious_doc_comments)]
+
+///! a
+///! b
+/// c
+///! d
+pub fn foo() {}
+
+///! a
+///! b
+/// c
+///! d
+use std::mem;
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/suspicious_doc_comments_unfixable.stderr b/src/tools/clippy/tests/ui/suspicious_doc_comments_unfixable.stderr
new file mode 100644
index 000000000..f89146dad
--- /dev/null
+++ b/src/tools/clippy/tests/ui/suspicious_doc_comments_unfixable.stderr
@@ -0,0 +1,37 @@
+error: this is an outer doc comment and does not apply to the parent module or crate
+ --> $DIR/suspicious_doc_comments_unfixable.rs:4:1
+ |
+LL | / ///! a
+LL | | ///! b
+LL | | /// c
+LL | | ///! d
+ | |______^
+ |
+ = note: `-D clippy::suspicious-doc-comments` implied by `-D warnings`
+help: use an inner doc comment to document the parent module or crate
+ |
+LL + //! a
+LL + //! b
+LL | /// c
+LL + //! d
+ |
+
+error: this is an outer doc comment and does not apply to the parent module or crate
+ --> $DIR/suspicious_doc_comments_unfixable.rs:10:1
+ |
+LL | / ///! a
+LL | | ///! b
+LL | | /// c
+LL | | ///! d
+ | |______^
+ |
+help: use an inner doc comment to document the parent module or crate
+ |
+LL + //! a
+LL + //! b
+LL | /// c
+LL + //! d
+ |
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/swap.fixed b/src/tools/clippy/tests/ui/swap.fixed
index fa89706a8..9703674d1 100644
--- a/src/tools/clippy/tests/ui/swap.fixed
+++ b/src/tools/clippy/tests/ui/swap.fixed
@@ -1,4 +1,5 @@
// run-rustfix
+// aux-build: macro_rules.rs
#![warn(clippy::all)]
#![allow(
@@ -8,7 +9,8 @@
redundant_semicolons,
dead_code,
unused_assignments,
- unused_variables
+ unused_variables,
+ clippy::let_and_return
)]
struct Foo(u32);
@@ -65,19 +67,19 @@ fn xor_swap_locals() {
// This is an xor-based swap of local variables.
let mut a = 0;
let mut b = 1;
- std::mem::swap(&mut a, &mut b)
+ std::mem::swap(&mut a, &mut b);
}
fn xor_field_swap() {
// This is an xor-based swap of fields in a struct.
let mut bar = Bar { a: 0, b: 1 };
- std::mem::swap(&mut bar.a, &mut bar.b)
+ std::mem::swap(&mut bar.a, &mut bar.b);
}
fn xor_slice_swap() {
// This is an xor-based swap of a slice
let foo = &mut [1, 2];
- foo.swap(0, 1)
+ foo.swap(0, 1);
}
fn xor_no_swap() {
@@ -186,3 +188,14 @@ const fn issue_9864(mut u: u32) -> u32 {
v = temp;
u + v
}
+
+#[macro_use]
+extern crate macro_rules;
+
+const fn issue_10421(x: u32) -> u32 {
+ issue_10421!();
+ let a = x;
+ let a = a;
+ let a = a;
+ a
+}
diff --git a/src/tools/clippy/tests/ui/swap.rs b/src/tools/clippy/tests/ui/swap.rs
index ef8a81c83..a0228065e 100644
--- a/src/tools/clippy/tests/ui/swap.rs
+++ b/src/tools/clippy/tests/ui/swap.rs
@@ -1,4 +1,5 @@
// run-rustfix
+// aux-build: macro_rules.rs
#![warn(clippy::all)]
#![allow(
@@ -8,7 +9,8 @@
redundant_semicolons,
dead_code,
unused_assignments,
- unused_variables
+ unused_variables,
+ clippy::let_and_return
)]
struct Foo(u32);
@@ -215,3 +217,14 @@ const fn issue_9864(mut u: u32) -> u32 {
v = temp;
u + v
}
+
+#[macro_use]
+extern crate macro_rules;
+
+const fn issue_10421(x: u32) -> u32 {
+ issue_10421!();
+ let a = x;
+ let a = a;
+ let a = a;
+ a
+}
diff --git a/src/tools/clippy/tests/ui/swap.stderr b/src/tools/clippy/tests/ui/swap.stderr
index f0acbfe25..0c2462684 100644
--- a/src/tools/clippy/tests/ui/swap.stderr
+++ b/src/tools/clippy/tests/ui/swap.stderr
@@ -1,106 +1,106 @@
error: this looks like you are swapping `bar.a` and `bar.b` manually
- --> $DIR/swap.rs:25:5
+ --> $DIR/swap.rs:27:5
|
LL | / let temp = bar.a;
LL | | bar.a = bar.b;
LL | | bar.b = temp;
- | |________________^ help: try: `std::mem::swap(&mut bar.a, &mut bar.b)`
+ | |_________________^ help: try: `std::mem::swap(&mut bar.a, &mut bar.b);`
|
= note: or maybe you should use `std::mem::replace`?
= note: `-D clippy::manual-swap` implied by `-D warnings`
error: this looks like you are swapping elements of `foo` manually
- --> $DIR/swap.rs:37:5
+ --> $DIR/swap.rs:39:5
|
LL | / let temp = foo[0];
LL | | foo[0] = foo[1];
LL | | foo[1] = temp;
- | |_________________^ help: try: `foo.swap(0, 1)`
+ | |__________________^ help: try: `foo.swap(0, 1);`
error: this looks like you are swapping elements of `foo` manually
- --> $DIR/swap.rs:46:5
+ --> $DIR/swap.rs:48:5
|
LL | / let temp = foo[0];
LL | | foo[0] = foo[1];
LL | | foo[1] = temp;
- | |_________________^ help: try: `foo.swap(0, 1)`
+ | |__________________^ help: try: `foo.swap(0, 1);`
error: this looks like you are swapping elements of `foo` manually
- --> $DIR/swap.rs:65:5
+ --> $DIR/swap.rs:67:5
|
LL | / let temp = foo[0];
LL | | foo[0] = foo[1];
LL | | foo[1] = temp;
- | |_________________^ help: try: `foo.swap(0, 1)`
+ | |__________________^ help: try: `foo.swap(0, 1);`
error: this looks like you are swapping `a` and `b` manually
- --> $DIR/swap.rs:76:5
+ --> $DIR/swap.rs:78:5
|
LL | / a ^= b;
LL | | b ^= a;
LL | | a ^= b;
- | |___________^ help: try: `std::mem::swap(&mut a, &mut b)`
+ | |___________^ help: try: `std::mem::swap(&mut a, &mut b);`
error: this looks like you are swapping `bar.a` and `bar.b` manually
- --> $DIR/swap.rs:84:5
+ --> $DIR/swap.rs:86:5
|
LL | / bar.a ^= bar.b;
LL | | bar.b ^= bar.a;
LL | | bar.a ^= bar.b;
- | |___________________^ help: try: `std::mem::swap(&mut bar.a, &mut bar.b)`
+ | |___________________^ help: try: `std::mem::swap(&mut bar.a, &mut bar.b);`
error: this looks like you are swapping elements of `foo` manually
- --> $DIR/swap.rs:92:5
+ --> $DIR/swap.rs:94:5
|
LL | / foo[0] ^= foo[1];
LL | | foo[1] ^= foo[0];
LL | | foo[0] ^= foo[1];
- | |_____________________^ help: try: `foo.swap(0, 1)`
+ | |_____________________^ help: try: `foo.swap(0, 1);`
error: this looks like you are swapping `foo[0][1]` and `bar[1][0]` manually
- --> $DIR/swap.rs:121:5
+ --> $DIR/swap.rs:123:5
|
LL | / let temp = foo[0][1];
LL | | foo[0][1] = bar[1][0];
LL | | bar[1][0] = temp;
- | |____________________^ help: try: `std::mem::swap(&mut foo[0][1], &mut bar[1][0])`
+ | |_____________________^ help: try: `std::mem::swap(&mut foo[0][1], &mut bar[1][0]);`
|
= note: or maybe you should use `std::mem::replace`?
error: this looks like you are swapping `a` and `b` manually
- --> $DIR/swap.rs:135:7
+ --> $DIR/swap.rs:137:7
|
LL | ; let t = a;
| _______^
LL | | a = b;
LL | | b = t;
- | |_________^ help: try: `std::mem::swap(&mut a, &mut b)`
+ | |__________^ help: try: `std::mem::swap(&mut a, &mut b);`
|
= note: or maybe you should use `std::mem::replace`?
error: this looks like you are swapping `c.0` and `a` manually
- --> $DIR/swap.rs:144:7
+ --> $DIR/swap.rs:146:7
|
LL | ; let t = c.0;
| _______^
LL | | c.0 = a;
LL | | a = t;
- | |_________^ help: try: `std::mem::swap(&mut c.0, &mut a)`
+ | |__________^ help: try: `std::mem::swap(&mut c.0, &mut a);`
|
= note: or maybe you should use `std::mem::replace`?
error: this looks like you are swapping `b` and `a` manually
- --> $DIR/swap.rs:170:5
+ --> $DIR/swap.rs:172:5
|
LL | / let t = b;
LL | | b = a;
LL | | a = t;
- | |_________^ help: try: `std::mem::swap(&mut b, &mut a)`
+ | |__________^ help: try: `std::mem::swap(&mut b, &mut a);`
|
= note: or maybe you should use `std::mem::replace`?
error: this looks like you are trying to swap `a` and `b`
- --> $DIR/swap.rs:132:5
+ --> $DIR/swap.rs:134:5
|
LL | / a = b;
LL | | b = a;
@@ -110,7 +110,7 @@ LL | | b = a;
= note: `-D clippy::almost-swapped` implied by `-D warnings`
error: this looks like you are trying to swap `c.0` and `a`
- --> $DIR/swap.rs:141:5
+ --> $DIR/swap.rs:143:5
|
LL | / c.0 = a;
LL | | a = c.0;
@@ -119,7 +119,7 @@ LL | | a = c.0;
= note: or maybe you should use `std::mem::replace`?
error: this looks like you are trying to swap `a` and `b`
- --> $DIR/swap.rs:148:5
+ --> $DIR/swap.rs:150:5
|
LL | / let a = b;
LL | | let b = a;
@@ -128,7 +128,7 @@ LL | | let b = a;
= note: or maybe you should use `std::mem::replace`?
error: this looks like you are trying to swap `d` and `c`
- --> $DIR/swap.rs:153:5
+ --> $DIR/swap.rs:155:5
|
LL | / d = c;
LL | | c = d;
@@ -137,7 +137,7 @@ LL | | c = d;
= note: or maybe you should use `std::mem::replace`?
error: this looks like you are trying to swap `a` and `b`
- --> $DIR/swap.rs:157:5
+ --> $DIR/swap.rs:159:5
|
LL | / let a = b;
LL | | b = a;
@@ -146,12 +146,12 @@ LL | | b = a;
= note: or maybe you should use `std::mem::replace`?
error: this looks like you are swapping `s.0.x` and `s.0.y` manually
- --> $DIR/swap.rs:205:5
+ --> $DIR/swap.rs:207:5
|
LL | / let t = s.0.x;
LL | | s.0.x = s.0.y;
LL | | s.0.y = t;
- | |_____________^ help: try: `std::mem::swap(&mut s.0.x, &mut s.0.y)`
+ | |______________^ help: try: `std::mem::swap(&mut s.0.x, &mut s.0.y);`
|
= note: or maybe you should use `std::mem::replace`?
diff --git a/src/tools/clippy/tests/ui/tests_outside_test_module.rs b/src/tools/clippy/tests/ui/tests_outside_test_module.rs
new file mode 100644
index 000000000..1982b1d01
--- /dev/null
+++ b/src/tools/clippy/tests/ui/tests_outside_test_module.rs
@@ -0,0 +1,18 @@
+// compile-flags: --test
+#![allow(unused)]
+#![warn(clippy::tests_outside_test_module)]
+
+fn main() {
+ // test code goes here
+}
+
+// Should lint
+#[test]
+fn my_test() {}
+
+#[cfg(test)]
+mod tests {
+ // Should not lint
+ #[test]
+ fn my_test() {}
+}
diff --git a/src/tools/clippy/tests/ui/tests_outside_test_module.stderr b/src/tools/clippy/tests/ui/tests_outside_test_module.stderr
new file mode 100644
index 000000000..125a79d6e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/tests_outside_test_module.stderr
@@ -0,0 +1,11 @@
+error: this function marked with #[test] is outside a #[cfg(test)] module
+ --> $DIR/tests_outside_test_module.rs:11:1
+ |
+LL | fn my_test() {}
+ | ^^^^^^^^^^^^^^^
+ |
+ = note: move it to a testing module marked with #[cfg(test)]
+ = note: `-D clippy::tests-outside-test-module` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/toplevel_ref_arg.fixed b/src/tools/clippy/tests/ui/toplevel_ref_arg.fixed
index 09fb66ca3..174c858a4 100644
--- a/src/tools/clippy/tests/ui/toplevel_ref_arg.fixed
+++ b/src/tools/clippy/tests/ui/toplevel_ref_arg.fixed
@@ -1,17 +1,12 @@
// run-rustfix
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![warn(clippy::toplevel_ref_arg)]
-#![allow(clippy::uninlined_format_args)]
+#![allow(clippy::uninlined_format_args, unused)]
-#[macro_use]
-extern crate macro_rules;
-
-macro_rules! gen_binding {
- () => {
- let _y = &42;
- };
-}
+extern crate proc_macros;
+use proc_macros::{external, inline_macros};
+#[inline_macros]
fn main() {
// Closures should not warn
let y = |ref x| println!("{:?}", x);
@@ -38,13 +33,8 @@ fn main() {
for ref _x in 0..10 {}
// lint in macro
- #[allow(unused)]
- {
- gen_binding!();
- }
+ inline!(let _y = &42;);
// do not lint in external macro
- {
- ref_arg_binding!();
- }
+ external!(let ref _y = 42;);
}
diff --git a/src/tools/clippy/tests/ui/toplevel_ref_arg.rs b/src/tools/clippy/tests/ui/toplevel_ref_arg.rs
index 9d1f2f810..4b81a0611 100644
--- a/src/tools/clippy/tests/ui/toplevel_ref_arg.rs
+++ b/src/tools/clippy/tests/ui/toplevel_ref_arg.rs
@@ -1,17 +1,12 @@
// run-rustfix
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![warn(clippy::toplevel_ref_arg)]
-#![allow(clippy::uninlined_format_args)]
+#![allow(clippy::uninlined_format_args, unused)]
-#[macro_use]
-extern crate macro_rules;
-
-macro_rules! gen_binding {
- () => {
- let ref _y = 42;
- };
-}
+extern crate proc_macros;
+use proc_macros::{external, inline_macros};
+#[inline_macros]
fn main() {
// Closures should not warn
let y = |ref x| println!("{:?}", x);
@@ -38,13 +33,8 @@ fn main() {
for ref _x in 0..10 {}
// lint in macro
- #[allow(unused)]
- {
- gen_binding!();
- }
+ inline!(let ref _y = 42;);
// do not lint in external macro
- {
- ref_arg_binding!();
- }
+ external!(let ref _y = 42;);
}
diff --git a/src/tools/clippy/tests/ui/toplevel_ref_arg.stderr b/src/tools/clippy/tests/ui/toplevel_ref_arg.stderr
index 9c853020a..407c2d9fc 100644
--- a/src/tools/clippy/tests/ui/toplevel_ref_arg.stderr
+++ b/src/tools/clippy/tests/ui/toplevel_ref_arg.stderr
@@ -1,5 +1,5 @@
error: `ref` on an entire `let` pattern is discouraged, take a reference with `&` instead
- --> $DIR/toplevel_ref_arg.rs:20:9
+ --> $DIR/toplevel_ref_arg.rs:15:9
|
LL | let ref _x = 1;
| ----^^^^^^----- help: try: `let _x = &1;`
@@ -7,39 +7,36 @@ LL | let ref _x = 1;
= note: `-D clippy::toplevel-ref-arg` implied by `-D warnings`
error: `ref` on an entire `let` pattern is discouraged, take a reference with `&` instead
- --> $DIR/toplevel_ref_arg.rs:22:9
+ --> $DIR/toplevel_ref_arg.rs:17:9
|
LL | let ref _y: (&_, u8) = (&1, 2);
| ----^^^^^^--------------------- help: try: `let _y: &(&_, u8) = &(&1, 2);`
error: `ref` on an entire `let` pattern is discouraged, take a reference with `&` instead
- --> $DIR/toplevel_ref_arg.rs:24:9
+ --> $DIR/toplevel_ref_arg.rs:19:9
|
LL | let ref _z = 1 + 2;
| ----^^^^^^--------- help: try: `let _z = &(1 + 2);`
error: `ref` on an entire `let` pattern is discouraged, take a reference with `&` instead
- --> $DIR/toplevel_ref_arg.rs:26:9
+ --> $DIR/toplevel_ref_arg.rs:21:9
|
LL | let ref mut _z = 1 + 2;
| ----^^^^^^^^^^--------- help: try: `let _z = &mut (1 + 2);`
error: `ref` on an entire `let` pattern is discouraged, take a reference with `&` instead
- --> $DIR/toplevel_ref_arg.rs:31:9
+ --> $DIR/toplevel_ref_arg.rs:26:9
|
LL | let ref _x = vec![1, 2, 3];
| ----^^^^^^----------------- help: try: `let _x = &vec![1, 2, 3];`
error: `ref` on an entire `let` pattern is discouraged, take a reference with `&` instead
- --> $DIR/toplevel_ref_arg.rs:11:13
+ --> $DIR/toplevel_ref_arg.rs:36:17
|
-LL | let ref _y = 42;
- | ----^^^^^^------ help: try: `let _y = &42;`
-...
-LL | gen_binding!();
- | -------------- in this macro invocation
+LL | inline!(let ref _y = 42;);
+ | ----^^^^^^------ help: try: `let _y = &42;`
|
- = note: this error originates in the macro `gen_binding` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `__inline_mac_fn_main` (in Nightly builds, run with -Z macro-backtrace for more info)
error: aborting due to 6 previous errors
diff --git a/src/tools/clippy/tests/ui/toplevel_ref_arg_non_rustfix.rs b/src/tools/clippy/tests/ui/toplevel_ref_arg_non_rustfix.rs
index 1a493fbce..2047593e7 100644
--- a/src/tools/clippy/tests/ui/toplevel_ref_arg_non_rustfix.rs
+++ b/src/tools/clippy/tests/ui/toplevel_ref_arg_non_rustfix.rs
@@ -1,33 +1,27 @@
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![warn(clippy::toplevel_ref_arg)]
#![allow(unused)]
-#[macro_use]
-extern crate macro_rules;
+extern crate proc_macros;
+use proc_macros::{external, inline_macros};
fn the_answer(ref mut x: u8) {
*x = 42;
}
-macro_rules! gen_function {
- () => {
- fn fun_example(ref _x: usize) {}
- };
-}
-
+#[inline_macros]
fn main() {
let mut x = 0;
the_answer(x);
// lint in macro
- #[allow(unused)]
- {
- gen_function!();
+ inline! {
+ fn fun_example(ref _x: usize) {}
}
// do not lint in external macro
- {
- ref_arg_function!();
+ external! {
+ fn fun_example2(ref _x: usize) {}
}
}
diff --git a/src/tools/clippy/tests/ui/toplevel_ref_arg_non_rustfix.stderr b/src/tools/clippy/tests/ui/toplevel_ref_arg_non_rustfix.stderr
index e97011c7f..7307bd599 100644
--- a/src/tools/clippy/tests/ui/toplevel_ref_arg_non_rustfix.stderr
+++ b/src/tools/clippy/tests/ui/toplevel_ref_arg_non_rustfix.stderr
@@ -7,15 +7,12 @@ LL | fn the_answer(ref mut x: u8) {
= note: `-D clippy::toplevel-ref-arg` implied by `-D warnings`
error: `ref` directly on a function argument is ignored. Consider using a reference type instead
- --> $DIR/toplevel_ref_arg_non_rustfix.rs:15:24
+ --> $DIR/toplevel_ref_arg_non_rustfix.rs:20:24
|
LL | fn fun_example(ref _x: usize) {}
| ^^^^^^
-...
-LL | gen_function!();
- | --------------- in this macro invocation
|
- = note: this error originates in the macro `gen_function` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `__inline_mac_fn_main` (in Nightly builds, run with -Z macro-backtrace for more info)
error: aborting due to 2 previous errors
diff --git a/src/tools/clippy/tests/ui/trailing_empty_array.rs b/src/tools/clippy/tests/ui/trailing_empty_array.rs
index c39b0bcaf..8e3749eef 100644
--- a/src/tools/clippy/tests/ui/trailing_empty_array.rs
+++ b/src/tools/clippy/tests/ui/trailing_empty_array.rs
@@ -155,7 +155,6 @@ struct TupleStructReprC(i32, [usize; 0]);
type NamedTuple = (i32, [usize; 0]);
-#[rustfmt::skip] // [rustfmt#4995](https://github.com/rust-lang/rustfmt/issues/4995)
struct ConstParamZeroDefault<const N: usize = 0> {
field: i32,
last: [usize; N],
@@ -166,7 +165,6 @@ struct ConstParamNoDefault<const N: usize> {
last: [usize; N],
}
-#[rustfmt::skip]
struct ConstParamNonZeroDefault<const N: usize = 1> {
field: i32,
last: [usize; N],
diff --git a/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.fixed b/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.fixed
index 55307506e..cc84ba25b 100644
--- a/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.fixed
+++ b/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.fixed
@@ -4,7 +4,7 @@
// would otherwise be responsible for
#![warn(clippy::useless_transmute)]
#![warn(clippy::transmute_ptr_to_ptr)]
-#![allow(dead_code, unused_unsafe, clippy::borrow_as_ptr)]
+#![allow(unused, clippy::borrow_as_ptr)]
use std::mem::{size_of, transmute};
@@ -77,3 +77,9 @@ fn cannot_be_expressed_as_pointer_cast(in_param: Single) -> Pair {
unsafe { transmute::<Single, Pair>(in_param) }
}
+
+fn issue_10449() {
+ fn f() {}
+
+ let _x: u8 = unsafe { *(f as *const u8) };
+}
diff --git a/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.rs b/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.rs
index e7360f3f9..aa65ab4dd 100644
--- a/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.rs
+++ b/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.rs
@@ -4,7 +4,7 @@
// would otherwise be responsible for
#![warn(clippy::useless_transmute)]
#![warn(clippy::transmute_ptr_to_ptr)]
-#![allow(dead_code, unused_unsafe, clippy::borrow_as_ptr)]
+#![allow(unused, clippy::borrow_as_ptr)]
use std::mem::{size_of, transmute};
@@ -77,3 +77,9 @@ fn cannot_be_expressed_as_pointer_cast(in_param: Single) -> Pair {
unsafe { transmute::<Single, Pair>(in_param) }
}
+
+fn issue_10449() {
+ fn f() {}
+
+ let _x: u8 = unsafe { *std::mem::transmute::<fn(), *const u8>(f) };
+}
diff --git a/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.stderr b/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.stderr
index e862fcb67..58f5162c7 100644
--- a/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.stderr
+++ b/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.stderr
@@ -58,5 +58,11 @@ error: transmute from a reference to a pointer
LL | unsafe { transmute::<&[i32; 1], *const u8>(in_param) }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `in_param as *const [i32; 1] as *const u8`
-error: aborting due to 9 previous errors
+error: transmute from `fn()` to `*const u8` which could be expressed as a pointer cast instead
+ --> $DIR/transmutes_expressible_as_ptr_casts.rs:84:28
+ |
+LL | let _x: u8 = unsafe { *std::mem::transmute::<fn(), *const u8>(f) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(f as *const u8)`
+
+error: aborting due to 10 previous errors
diff --git a/src/tools/clippy/tests/ui/try_err.fixed b/src/tools/clippy/tests/ui/try_err.fixed
index 264194419..dc497b169 100644
--- a/src/tools/clippy/tests/ui/try_err.fixed
+++ b/src/tools/clippy/tests/ui/try_err.fixed
@@ -1,11 +1,11 @@
// run-rustfix
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![deny(clippy::try_err)]
#![allow(clippy::unnecessary_wraps, clippy::needless_question_mark)]
-#[macro_use]
-extern crate macro_rules;
+extern crate proc_macros;
+use proc_macros::{external, inline_macros};
use std::io;
use std::task::Poll;
@@ -79,36 +79,22 @@ fn nested_error() -> Result<i32, i32> {
Ok(1)
}
-// Bad suggestion when in macro (see #6242)
-macro_rules! try_validation {
- ($e: expr) => {{
- match $e {
+#[inline_macros]
+fn calling_macro() -> Result<i32, i32> {
+ // macro
+ inline!(
+ match $(Ok::<_, i32>(5)) {
Ok(_) => 0,
Err(_) => return Err(1),
}
- }};
-}
-
-macro_rules! ret_one {
- () => {
- 1
- };
-}
-
-macro_rules! try_validation_in_macro {
- ($e: expr) => {{
- match $e {
+ );
+ // `Err` arg is another macro
+ inline!(
+ match $(Ok::<_, i32>(5)) {
Ok(_) => 0,
- Err(_) => return Err(ret_one!()),
+ Err(_) => return Err(inline!(1)),
}
- }};
-}
-
-fn calling_macro() -> Result<i32, i32> {
- // macro
- try_validation!(Ok::<_, i32>(5));
- // `Err` arg is another macro
- try_validation_in_macro!(Ok::<_, i32>(5));
+ );
Ok(5)
}
@@ -121,24 +107,19 @@ fn main() {
calling_macro().unwrap();
// We don't want to lint in external macros
- try_err!();
-}
-
-macro_rules! bar {
- () => {
- String::from("aasdfasdfasdfa")
- };
-}
-
-macro_rules! foo {
- () => {
- bar!()
- };
+ external! {
+ pub fn try_err_fn() -> Result<i32, i32> {
+ let err: i32 = 1;
+ // To avoid warnings during rustfix
+ if true { Err(err)? } else { Ok(2) }
+ }
+ }
}
+#[inline_macros]
pub fn macro_inside(fail: bool) -> Result<i32, String> {
if fail {
- return Err(foo!());
+ return Err(inline!(inline!(String::from("aasdfasdfasdfa"))));
}
Ok(0)
}
diff --git a/src/tools/clippy/tests/ui/try_err.rs b/src/tools/clippy/tests/ui/try_err.rs
index bc6979bf4..86aeb75cd 100644
--- a/src/tools/clippy/tests/ui/try_err.rs
+++ b/src/tools/clippy/tests/ui/try_err.rs
@@ -1,11 +1,11 @@
// run-rustfix
-// aux-build:macro_rules.rs
+// aux-build:proc_macros.rs
#![deny(clippy::try_err)]
#![allow(clippy::unnecessary_wraps, clippy::needless_question_mark)]
-#[macro_use]
-extern crate macro_rules;
+extern crate proc_macros;
+use proc_macros::{external, inline_macros};
use std::io;
use std::task::Poll;
@@ -79,36 +79,22 @@ fn nested_error() -> Result<i32, i32> {
Ok(1)
}
-// Bad suggestion when in macro (see #6242)
-macro_rules! try_validation {
- ($e: expr) => {{
- match $e {
+#[inline_macros]
+fn calling_macro() -> Result<i32, i32> {
+ // macro
+ inline!(
+ match $(Ok::<_, i32>(5)) {
Ok(_) => 0,
Err(_) => Err(1)?,
}
- }};
-}
-
-macro_rules! ret_one {
- () => {
- 1
- };
-}
-
-macro_rules! try_validation_in_macro {
- ($e: expr) => {{
- match $e {
+ );
+ // `Err` arg is another macro
+ inline!(
+ match $(Ok::<_, i32>(5)) {
Ok(_) => 0,
- Err(_) => Err(ret_one!())?,
+ Err(_) => Err(inline!(1))?,
}
- }};
-}
-
-fn calling_macro() -> Result<i32, i32> {
- // macro
- try_validation!(Ok::<_, i32>(5));
- // `Err` arg is another macro
- try_validation_in_macro!(Ok::<_, i32>(5));
+ );
Ok(5)
}
@@ -121,24 +107,19 @@ fn main() {
calling_macro().unwrap();
// We don't want to lint in external macros
- try_err!();
-}
-
-macro_rules! bar {
- () => {
- String::from("aasdfasdfasdfa")
- };
-}
-
-macro_rules! foo {
- () => {
- bar!()
- };
+ external! {
+ pub fn try_err_fn() -> Result<i32, i32> {
+ let err: i32 = 1;
+ // To avoid warnings during rustfix
+ if true { Err(err)? } else { Ok(2) }
+ }
+ }
}
+#[inline_macros]
pub fn macro_inside(fail: bool) -> Result<i32, String> {
if fail {
- Err(foo!())?;
+ Err(inline!(inline!(String::from("aasdfasdfasdfa"))))?;
}
Ok(0)
}
diff --git a/src/tools/clippy/tests/ui/try_err.stderr b/src/tools/clippy/tests/ui/try_err.stderr
index 0cb1328fb..4ad0e2e56 100644
--- a/src/tools/clippy/tests/ui/try_err.stderr
+++ b/src/tools/clippy/tests/ui/try_err.stderr
@@ -29,53 +29,47 @@ LL | Err(err)?;
| ^^^^^^^^^ help: try this: `return Err(err.into())`
error: returning an `Err(_)` with the `?` operator
- --> $DIR/try_err.rs:87:23
+ --> $DIR/try_err.rs:88:23
|
LL | Err(_) => Err(1)?,
| ^^^^^^^ help: try this: `return Err(1)`
-...
-LL | try_validation!(Ok::<_, i32>(5));
- | -------------------------------- in this macro invocation
|
- = note: this error originates in the macro `try_validation` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `__inline_mac_fn_calling_macro` (in Nightly builds, run with -Z macro-backtrace for more info)
error: returning an `Err(_)` with the `?` operator
- --> $DIR/try_err.rs:102:23
+ --> $DIR/try_err.rs:95:23
|
-LL | Err(_) => Err(ret_one!())?,
- | ^^^^^^^^^^^^^^^^ help: try this: `return Err(ret_one!())`
-...
-LL | try_validation_in_macro!(Ok::<_, i32>(5));
- | ----------------------------------------- in this macro invocation
+LL | Err(_) => Err(inline!(1))?,
+ | ^^^^^^^^^^^^^^^^ help: try this: `return Err(inline!(1))`
|
- = note: this error originates in the macro `try_validation_in_macro` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: this error originates in the macro `__inline_mac_fn_calling_macro` (in Nightly builds, run with -Z macro-backtrace for more info)
error: returning an `Err(_)` with the `?` operator
- --> $DIR/try_err.rs:141:9
+ --> $DIR/try_err.rs:122:9
|
-LL | Err(foo!())?;
- | ^^^^^^^^^^^^ help: try this: `return Err(foo!())`
+LL | Err(inline!(inline!(String::from("aasdfasdfasdfa"))))?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `return Err(inline!(inline!(String::from("aasdfasdfasdfa"))))`
error: returning an `Err(_)` with the `?` operator
- --> $DIR/try_err.rs:148:9
+ --> $DIR/try_err.rs:129:9
|
LL | Err(io::ErrorKind::WriteZero)?
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `return Poll::Ready(Err(io::ErrorKind::WriteZero.into()))`
error: returning an `Err(_)` with the `?` operator
- --> $DIR/try_err.rs:150:9
+ --> $DIR/try_err.rs:131:9
|
LL | Err(io::Error::new(io::ErrorKind::InvalidInput, "error"))?
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `return Poll::Ready(Err(io::Error::new(io::ErrorKind::InvalidInput, "error")))`
error: returning an `Err(_)` with the `?` operator
- --> $DIR/try_err.rs:158:9
+ --> $DIR/try_err.rs:139:9
|
LL | Err(io::ErrorKind::NotFound)?
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `return Poll::Ready(Some(Err(io::ErrorKind::NotFound.into())))`
error: returning an `Err(_)` with the `?` operator
- --> $DIR/try_err.rs:167:16
+ --> $DIR/try_err.rs:148:16
|
LL | return Err(42)?;
| ^^^^^^^^ help: try this: `Err(42)`
diff --git a/src/tools/clippy/tests/ui/uninit.rs b/src/tools/clippy/tests/ui/uninit.rs
index 211317317..c996de894 100644
--- a/src/tools/clippy/tests/ui/uninit.rs
+++ b/src/tools/clippy/tests/ui/uninit.rs
@@ -1,15 +1,17 @@
#![feature(stmt_expr_attributes)]
#![allow(clippy::let_unit_value, invalid_value)]
-use std::mem::{self, MaybeUninit};
+use std::mem::MaybeUninit;
+
+union MyOwnMaybeUninit {
+ value: u8,
+ uninit: (),
+}
fn main() {
let _: usize = unsafe { MaybeUninit::uninit().assume_init() };
- // edge case: For now we lint on empty arrays
- let _: [u8; 0] = unsafe { MaybeUninit::uninit().assume_init() };
-
- // edge case: For now we accept unit tuples
+ // This is OK, because ZSTs do not contain data.
let _: () = unsafe { MaybeUninit::uninit().assume_init() };
// This is OK, because `MaybeUninit` allows uninitialized data.
@@ -21,6 +23,31 @@ fn main() {
// This is OK, because all constitutent types are uninit-compatible.
let _: (MaybeUninit<usize>, [MaybeUninit<bool>; 2]) = unsafe { MaybeUninit::uninit().assume_init() };
+ // This is OK, because our own MaybeUninit is just as fine as the one from core.
+ let _: MyOwnMaybeUninit = unsafe { MaybeUninit::uninit().assume_init() };
+
+ // This is OK, because empty arrays don't contain data.
+ let _: [u8; 0] = unsafe { MaybeUninit::uninit().assume_init() };
+
// Was a false negative.
- let _: usize = unsafe { mem::MaybeUninit::uninit().assume_init() };
+ let _: usize = unsafe { MaybeUninit::uninit().assume_init() };
+
+ polymorphic::<()>();
+ polymorphic_maybe_uninit_array::<10>();
+ polymorphic_maybe_uninit::<u8>();
+
+ fn polymorphic<T>() {
+ // We are conservative around polymorphic types.
+ let _: T = unsafe { MaybeUninit::uninit().assume_init() };
+ }
+
+ fn polymorphic_maybe_uninit_array<const N: usize>() {
+ // While the type is polymorphic, MaybeUninit<u8> is not.
+ let _: [MaybeUninit<u8>; N] = unsafe { MaybeUninit::uninit().assume_init() };
+ }
+
+ fn polymorphic_maybe_uninit<T>() {
+ // The entire type is polymorphic, but it's wrapped in a MaybeUninit.
+ let _: MaybeUninit<T> = unsafe { MaybeUninit::uninit().assume_init() };
+ }
}
diff --git a/src/tools/clippy/tests/ui/uninit.stderr b/src/tools/clippy/tests/ui/uninit.stderr
index 15ef23494..248de56da 100644
--- a/src/tools/clippy/tests/ui/uninit.stderr
+++ b/src/tools/clippy/tests/ui/uninit.stderr
@@ -1,5 +1,5 @@
error: this call for this type may be undefined behavior
- --> $DIR/uninit.rs:7:29
+ --> $DIR/uninit.rs:12:29
|
LL | let _: usize = unsafe { MaybeUninit::uninit().assume_init() };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -7,16 +7,16 @@ LL | let _: usize = unsafe { MaybeUninit::uninit().assume_init() };
= note: `#[deny(clippy::uninit_assumed_init)]` on by default
error: this call for this type may be undefined behavior
- --> $DIR/uninit.rs:10:31
+ --> $DIR/uninit.rs:33:29
|
-LL | let _: [u8; 0] = unsafe { MaybeUninit::uninit().assume_init() };
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | let _: usize = unsafe { MaybeUninit::uninit().assume_init() };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: this call for this type may be undefined behavior
- --> $DIR/uninit.rs:25:29
+ --> $DIR/uninit.rs:41:29
|
-LL | let _: usize = unsafe { mem::MaybeUninit::uninit().assume_init() };
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | let _: T = unsafe { MaybeUninit::uninit().assume_init() };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: aborting due to 3 previous errors
diff --git a/src/tools/clippy/tests/ui/uninit_vec.rs b/src/tools/clippy/tests/ui/uninit_vec.rs
index 194e4fc15..79effc82f 100644
--- a/src/tools/clippy/tests/ui/uninit_vec.rs
+++ b/src/tools/clippy/tests/ui/uninit_vec.rs
@@ -7,6 +7,11 @@ struct MyVec {
vec: Vec<u8>,
}
+union MyOwnMaybeUninit {
+ value: u8,
+ uninit: (),
+}
+
fn main() {
// with_capacity() -> set_len() should be detected
let mut vec: Vec<u8> = Vec::with_capacity(1000);
@@ -97,4 +102,34 @@ fn main() {
unsafe {
vec.set_len(0);
}
+
+ // ZSTs should not be detected
+ let mut vec: Vec<()> = Vec::with_capacity(1000);
+ unsafe {
+ vec.set_len(10);
+ }
+
+ // unions should not be detected
+ let mut vec: Vec<MyOwnMaybeUninit> = Vec::with_capacity(1000);
+ unsafe {
+ vec.set_len(10);
+ }
+
+ polymorphic::<()>();
+
+ fn polymorphic<T>() {
+ // We are conservative around polymorphic types.
+ let mut vec: Vec<T> = Vec::with_capacity(1000);
+ unsafe {
+ vec.set_len(10);
+ }
+ }
+
+ fn poly_maybe_uninit<T>() {
+ // We are conservative around polymorphic types.
+ let mut vec: Vec<MaybeUninit<T>> = Vec::with_capacity(1000);
+ unsafe {
+ vec.set_len(10);
+ }
+ }
}
diff --git a/src/tools/clippy/tests/ui/uninit_vec.stderr b/src/tools/clippy/tests/ui/uninit_vec.stderr
index 77fc689f0..9cdf0c95a 100644
--- a/src/tools/clippy/tests/ui/uninit_vec.stderr
+++ b/src/tools/clippy/tests/ui/uninit_vec.stderr
@@ -1,5 +1,5 @@
error: calling `set_len()` immediately after reserving a buffer creates uninitialized values
- --> $DIR/uninit_vec.rs:12:5
+ --> $DIR/uninit_vec.rs:17:5
|
LL | let mut vec: Vec<u8> = Vec::with_capacity(1000);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -11,7 +11,7 @@ LL | vec.set_len(200);
= note: `-D clippy::uninit-vec` implied by `-D warnings`
error: calling `set_len()` immediately after reserving a buffer creates uninitialized values
- --> $DIR/uninit_vec.rs:18:5
+ --> $DIR/uninit_vec.rs:23:5
|
LL | vec.reserve(1000);
| ^^^^^^^^^^^^^^^^^^
@@ -22,7 +22,7 @@ LL | vec.set_len(200);
= help: initialize the buffer or wrap the content in `MaybeUninit`
error: calling `set_len()` on empty `Vec` creates out-of-bound values
- --> $DIR/uninit_vec.rs:24:5
+ --> $DIR/uninit_vec.rs:29:5
|
LL | let mut vec: Vec<u8> = Vec::new();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -31,7 +31,7 @@ LL | vec.set_len(200);
| ^^^^^^^^^^^^^^^^
error: calling `set_len()` on empty `Vec` creates out-of-bound values
- --> $DIR/uninit_vec.rs:30:5
+ --> $DIR/uninit_vec.rs:35:5
|
LL | let mut vec: Vec<u8> = Default::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -40,7 +40,7 @@ LL | vec.set_len(200);
| ^^^^^^^^^^^^^^^^
error: calling `set_len()` on empty `Vec` creates out-of-bound values
- --> $DIR/uninit_vec.rs:35:5
+ --> $DIR/uninit_vec.rs:40:5
|
LL | let mut vec: Vec<u8> = Vec::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -49,7 +49,7 @@ LL | vec.set_len(200);
| ^^^^^^^^^^^^^^^^
error: calling `set_len()` immediately after reserving a buffer creates uninitialized values
- --> $DIR/uninit_vec.rs:49:5
+ --> $DIR/uninit_vec.rs:54:5
|
LL | let mut vec: Vec<u8> = Vec::with_capacity(1000);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -60,7 +60,7 @@ LL | vec.set_len(200);
= help: initialize the buffer or wrap the content in `MaybeUninit`
error: calling `set_len()` immediately after reserving a buffer creates uninitialized values
- --> $DIR/uninit_vec.rs:58:5
+ --> $DIR/uninit_vec.rs:63:5
|
LL | my_vec.vec.reserve(1000);
| ^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -71,7 +71,7 @@ LL | my_vec.vec.set_len(200);
= help: initialize the buffer or wrap the content in `MaybeUninit`
error: calling `set_len()` immediately after reserving a buffer creates uninitialized values
- --> $DIR/uninit_vec.rs:63:5
+ --> $DIR/uninit_vec.rs:68:5
|
LL | my_vec.vec = Vec::with_capacity(1000);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -82,7 +82,7 @@ LL | my_vec.vec.set_len(200);
= help: initialize the buffer or wrap the content in `MaybeUninit`
error: calling `set_len()` immediately after reserving a buffer creates uninitialized values
- --> $DIR/uninit_vec.rs:42:9
+ --> $DIR/uninit_vec.rs:47:9
|
LL | let mut vec: Vec<u8> = Vec::with_capacity(1000);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -92,7 +92,7 @@ LL | vec.set_len(200);
= help: initialize the buffer or wrap the content in `MaybeUninit`
error: calling `set_len()` immediately after reserving a buffer creates uninitialized values
- --> $DIR/uninit_vec.rs:45:9
+ --> $DIR/uninit_vec.rs:50:9
|
LL | vec.reserve(1000);
| ^^^^^^^^^^^^^^^^^^
@@ -101,5 +101,16 @@ LL | vec.set_len(200);
|
= help: initialize the buffer or wrap the content in `MaybeUninit`
-error: aborting due to 10 previous errors
+error: calling `set_len()` immediately after reserving a buffer creates uninitialized values
+ --> $DIR/uninit_vec.rs:122:9
+ |
+LL | let mut vec: Vec<T> = Vec::with_capacity(1000);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | unsafe {
+LL | vec.set_len(10);
+ | ^^^^^^^^^^^^^^^
+ |
+ = help: initialize the buffer or wrap the content in `MaybeUninit`
+
+error: aborting due to 11 previous errors
diff --git a/src/tools/clippy/tests/ui/uninlined_format_args.fixed b/src/tools/clippy/tests/ui/uninlined_format_args.fixed
index cbd5cc5fc..3122081a4 100644
--- a/src/tools/clippy/tests/ui/uninlined_format_args.fixed
+++ b/src/tools/clippy/tests/ui/uninlined_format_args.fixed
@@ -1,11 +1,11 @@
-// aux-build:proc_macro_with_span.rs
+// aux-build:proc_macros.rs
// run-rustfix
#![warn(clippy::uninlined_format_args)]
-#![allow(named_arguments_used_positionally, unused_imports, unused_macros, unused_variables)]
+#![allow(named_arguments_used_positionally, unused)]
#![allow(clippy::eq_op, clippy::format_in_format_args, clippy::print_literal)]
-extern crate proc_macro_with_span;
-use proc_macro_with_span::with_span;
+extern crate proc_macros;
+use proc_macros::with_span;
macro_rules! no_param_str {
() => {
@@ -119,7 +119,7 @@ fn tester(fn_arg: i32) {
println!("Width = {local_i32}, value with width = {local_f64:local_i32$}");
println!("{local_i32:width$.prec$}");
println!("{width:width$.prec$}");
- println!("{}", format!("{local_i32}"));
+ println!("{}", format!("{}", local_i32));
my_println!("{}", local_i32);
my_println_args!("{}", local_i32);
@@ -178,3 +178,87 @@ fn _meets_msrv() {
fn _do_not_fire() {
println!("{:?}", None::<()>);
}
+
+macro_rules! _internal {
+ ($($args:tt)*) => {
+ println!("{}", format_args!($($args)*))
+ };
+}
+
+macro_rules! my_println2 {
+ ($target:expr, $($args:tt)+) => {{
+ if $target {
+ _internal!($($args)+)
+ }
+ }};
+}
+
+macro_rules! my_println2_args {
+ ($target:expr, $($args:tt)+) => {{
+ if $target {
+ _internal!("foo: {}", format_args!($($args)+))
+ }
+ }};
+}
+
+macro_rules! my_concat {
+ ($fmt:literal $(, $e:expr)*) => {
+ println!(concat!("ERROR: ", $fmt), $($e,)*)
+ }
+}
+
+macro_rules! my_good_macro {
+ ($fmt:literal $(, $e:expr)* $(,)?) => {
+ println!($fmt $(, $e)*)
+ }
+}
+
+macro_rules! my_bad_macro {
+ ($fmt:literal, $($e:expr),*) => {
+ println!($fmt, $($e,)*)
+ }
+}
+
+macro_rules! my_bad_macro2 {
+ ($fmt:literal) => {
+ let s = $fmt.clone();
+ println!("{}", s);
+ };
+ ($fmt:literal, $($e:expr)+) => {
+ println!($fmt, $($e,)*)
+ };
+}
+
+// This abomination was suggested by @Alexendoo, may the Rust gods have mercy on their soul...
+// https://github.com/rust-lang/rust-clippy/pull/9948#issuecomment-1327965962
+macro_rules! used_twice {
+ (
+ large = $large:literal,
+ small = $small:literal,
+ $val:expr,
+ ) => {
+ if $val < 5 {
+ println!($small, $val);
+ } else {
+ println!($large, $val);
+ }
+ };
+}
+
+fn tester2() {
+ let local_i32 = 1;
+ my_println2_args!(true, "{}", local_i32);
+ my_println2!(true, "{}", local_i32);
+ my_concat!("{}", local_i32);
+ my_good_macro!("{}", local_i32);
+ my_good_macro!("{}", local_i32,);
+
+ // FIXME: Broken false positives, currently unhandled
+ my_bad_macro!("{}", local_i32);
+ my_bad_macro2!("{}", local_i32);
+ used_twice! {
+ large = "large value: {}",
+ small = "small value: {}",
+ local_i32,
+ };
+}
diff --git a/src/tools/clippy/tests/ui/uninlined_format_args.rs b/src/tools/clippy/tests/ui/uninlined_format_args.rs
index cf0ea5be4..b153ef256 100644
--- a/src/tools/clippy/tests/ui/uninlined_format_args.rs
+++ b/src/tools/clippy/tests/ui/uninlined_format_args.rs
@@ -1,11 +1,11 @@
-// aux-build:proc_macro_with_span.rs
+// aux-build:proc_macros.rs
// run-rustfix
#![warn(clippy::uninlined_format_args)]
-#![allow(named_arguments_used_positionally, unused_imports, unused_macros, unused_variables)]
+#![allow(named_arguments_used_positionally, unused)]
#![allow(clippy::eq_op, clippy::format_in_format_args, clippy::print_literal)]
-extern crate proc_macro_with_span;
-use proc_macro_with_span::with_span;
+extern crate proc_macros;
+use proc_macros::with_span;
macro_rules! no_param_str {
() => {
@@ -183,3 +183,87 @@ fn _meets_msrv() {
fn _do_not_fire() {
println!("{:?}", None::<()>);
}
+
+macro_rules! _internal {
+ ($($args:tt)*) => {
+ println!("{}", format_args!($($args)*))
+ };
+}
+
+macro_rules! my_println2 {
+ ($target:expr, $($args:tt)+) => {{
+ if $target {
+ _internal!($($args)+)
+ }
+ }};
+}
+
+macro_rules! my_println2_args {
+ ($target:expr, $($args:tt)+) => {{
+ if $target {
+ _internal!("foo: {}", format_args!($($args)+))
+ }
+ }};
+}
+
+macro_rules! my_concat {
+ ($fmt:literal $(, $e:expr)*) => {
+ println!(concat!("ERROR: ", $fmt), $($e,)*)
+ }
+}
+
+macro_rules! my_good_macro {
+ ($fmt:literal $(, $e:expr)* $(,)?) => {
+ println!($fmt $(, $e)*)
+ }
+}
+
+macro_rules! my_bad_macro {
+ ($fmt:literal, $($e:expr),*) => {
+ println!($fmt, $($e,)*)
+ }
+}
+
+macro_rules! my_bad_macro2 {
+ ($fmt:literal) => {
+ let s = $fmt.clone();
+ println!("{}", s);
+ };
+ ($fmt:literal, $($e:expr)+) => {
+ println!($fmt, $($e,)*)
+ };
+}
+
+// This abomination was suggested by @Alexendoo, may the Rust gods have mercy on their soul...
+// https://github.com/rust-lang/rust-clippy/pull/9948#issuecomment-1327965962
+macro_rules! used_twice {
+ (
+ large = $large:literal,
+ small = $small:literal,
+ $val:expr,
+ ) => {
+ if $val < 5 {
+ println!($small, $val);
+ } else {
+ println!($large, $val);
+ }
+ };
+}
+
+fn tester2() {
+ let local_i32 = 1;
+ my_println2_args!(true, "{}", local_i32);
+ my_println2!(true, "{}", local_i32);
+ my_concat!("{}", local_i32);
+ my_good_macro!("{}", local_i32);
+ my_good_macro!("{}", local_i32,);
+
+ // FIXME: Broken false positives, currently unhandled
+ my_bad_macro!("{}", local_i32);
+ my_bad_macro2!("{}", local_i32);
+ used_twice! {
+ large = "large value: {}",
+ small = "small value: {}",
+ local_i32,
+ };
+}
diff --git a/src/tools/clippy/tests/ui/uninlined_format_args.stderr b/src/tools/clippy/tests/ui/uninlined_format_args.stderr
index a12abf8be..dc4af6ef4 100644
--- a/src/tools/clippy/tests/ui/uninlined_format_args.stderr
+++ b/src/tools/clippy/tests/ui/uninlined_format_args.stderr
@@ -775,18 +775,6 @@ LL + println!("{width:width$.prec$}");
|
error: variables can be used directly in the `format!` string
- --> $DIR/uninlined_format_args.rs:125:20
- |
-LL | println!("{}", format!("{}", local_i32));
- | ^^^^^^^^^^^^^^^^^^^^^^^^
- |
-help: change this to
- |
-LL - println!("{}", format!("{}", local_i32));
-LL + println!("{}", format!("{local_i32}"));
- |
-
-error: variables can be used directly in the `format!` string
--> $DIR/uninlined_format_args.rs:143:5
|
LL | / println!(
@@ -856,5 +844,5 @@ LL - println!("expand='{}'", local_i32);
LL + println!("expand='{local_i32}'");
|
-error: aborting due to 72 previous errors
+error: aborting due to 71 previous errors
diff --git a/src/tools/clippy/tests/ui/unit_arg.rs b/src/tools/clippy/tests/ui/unit_arg.rs
index 07e70873a..674ae4f1d 100644
--- a/src/tools/clippy/tests/ui/unit_arg.rs
+++ b/src/tools/clippy/tests/ui/unit_arg.rs
@@ -1,4 +1,4 @@
-// aux-build: proc_macro_with_span.rs
+// aux-build: proc_macros.rs
#![warn(clippy::unit_arg)]
#![allow(unused_must_use, unused_variables)]
#![allow(
@@ -13,9 +13,9 @@
clippy::unused_unit
)]
-extern crate proc_macro_with_span;
+extern crate proc_macros;
-use proc_macro_with_span::with_span;
+use proc_macros::with_span;
use std::fmt::Debug;
fn foo<T: Debug>(t: T) {
diff --git a/src/tools/clippy/tests/ui/unnecessary_box_returns.rs b/src/tools/clippy/tests/ui/unnecessary_box_returns.rs
new file mode 100644
index 000000000..fe60d9297
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_box_returns.rs
@@ -0,0 +1,60 @@
+#![warn(clippy::unnecessary_box_returns)]
+
+trait Bar {
+ // lint
+ fn baz(&self) -> Box<usize>;
+}
+
+pub struct Foo {}
+
+impl Bar for Foo {
+ // don't lint: this is a problem with the trait, not the implementation
+ fn baz(&self) -> Box<usize> {
+ Box::new(42)
+ }
+}
+
+impl Foo {
+ fn baz(&self) -> Box<usize> {
+ // lint
+ Box::new(13)
+ }
+}
+
+// lint
+fn bxed_usize() -> Box<usize> {
+ Box::new(5)
+}
+
+// lint
+fn _bxed_foo() -> Box<Foo> {
+ Box::new(Foo {})
+}
+
+// don't lint: this is exported
+pub fn bxed_foo() -> Box<Foo> {
+ Box::new(Foo {})
+}
+
+// don't lint: str is unsized
+fn bxed_str() -> Box<str> {
+ "Hello, world!".to_string().into_boxed_str()
+}
+
+// don't lint: function contains the word, "box"
+fn boxed_usize() -> Box<usize> {
+ Box::new(7)
+}
+
+// don't lint: this has an unspecified return type
+fn default() {}
+
+// don't lint: this doesn't return a Box
+fn string() -> String {
+ String::from("Hello, world")
+}
+
+fn main() {
+ // don't lint: this is a closure
+ let a = || -> Box<usize> { Box::new(5) };
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_box_returns.stderr b/src/tools/clippy/tests/ui/unnecessary_box_returns.stderr
new file mode 100644
index 000000000..b17512c10
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_box_returns.stderr
@@ -0,0 +1,35 @@
+error: boxed return of the sized type `usize`
+ --> $DIR/unnecessary_box_returns.rs:5:22
+ |
+LL | fn baz(&self) -> Box<usize>;
+ | ^^^^^^^^^^ help: try: `usize`
+ |
+ = help: changing this also requires a change to the return expressions in this function
+ = note: `-D clippy::unnecessary-box-returns` implied by `-D warnings`
+
+error: boxed return of the sized type `usize`
+ --> $DIR/unnecessary_box_returns.rs:18:22
+ |
+LL | fn baz(&self) -> Box<usize> {
+ | ^^^^^^^^^^ help: try: `usize`
+ |
+ = help: changing this also requires a change to the return expressions in this function
+
+error: boxed return of the sized type `usize`
+ --> $DIR/unnecessary_box_returns.rs:25:20
+ |
+LL | fn bxed_usize() -> Box<usize> {
+ | ^^^^^^^^^^ help: try: `usize`
+ |
+ = help: changing this also requires a change to the return expressions in this function
+
+error: boxed return of the sized type `Foo`
+ --> $DIR/unnecessary_box_returns.rs:30:19
+ |
+LL | fn _bxed_foo() -> Box<Foo> {
+ | ^^^^^^^^ help: try: `Foo`
+ |
+ = help: changing this also requires a change to the return expressions in this function
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unnecessary_lazy_eval.fixed b/src/tools/clippy/tests/ui/unnecessary_lazy_eval.fixed
index 22e9bd8bd..3b93800f8 100644
--- a/src/tools/clippy/tests/ui/unnecessary_lazy_eval.fixed
+++ b/src/tools/clippy/tests/ui/unnecessary_lazy_eval.fixed
@@ -1,12 +1,12 @@
// run-rustfix
-// aux-build: proc_macro_with_span.rs
+// aux-build: proc_macros.rs
#![warn(clippy::unnecessary_lazy_evaluations)]
#![allow(clippy::redundant_closure)]
#![allow(clippy::bind_instead_of_map)]
#![allow(clippy::map_identity)]
-extern crate proc_macro_with_span;
-use proc_macro_with_span::with_span;
+extern crate proc_macros;
+use proc_macros::with_span;
struct Deep(Option<usize>);
diff --git a/src/tools/clippy/tests/ui/unnecessary_lazy_eval.rs b/src/tools/clippy/tests/ui/unnecessary_lazy_eval.rs
index 8726d84a2..2851c0c51 100644
--- a/src/tools/clippy/tests/ui/unnecessary_lazy_eval.rs
+++ b/src/tools/clippy/tests/ui/unnecessary_lazy_eval.rs
@@ -1,12 +1,12 @@
// run-rustfix
-// aux-build: proc_macro_with_span.rs
+// aux-build: proc_macros.rs
#![warn(clippy::unnecessary_lazy_evaluations)]
#![allow(clippy::redundant_closure)]
#![allow(clippy::bind_instead_of_map)]
#![allow(clippy::map_identity)]
-extern crate proc_macro_with_span;
-use proc_macro_with_span::with_span;
+extern crate proc_macros;
+use proc_macros::with_span;
struct Deep(Option<usize>);
diff --git a/src/tools/clippy/tests/ui/unnecessary_operation.fixed b/src/tools/clippy/tests/ui/unnecessary_operation.fixed
index d37163570..b046694f8 100644
--- a/src/tools/clippy/tests/ui/unnecessary_operation.fixed
+++ b/src/tools/clippy/tests/ui/unnecessary_operation.fixed
@@ -1,7 +1,12 @@
// run-rustfix
-#![feature(box_syntax)]
-#![allow(clippy::deref_addrof, dead_code, unused, clippy::no_effect)]
+#![allow(
+ clippy::deref_addrof,
+ dead_code,
+ unused,
+ clippy::no_effect,
+ clippy::unnecessary_struct_initialization
+)]
#![warn(clippy::unnecessary_operation)]
struct Tuple(i32);
@@ -59,7 +64,6 @@ fn main() {
5;6;get_number();
get_number();
get_number();
- get_number();
5;get_number();
42;get_number();
assert!([42, 55].len() > get_usize());
diff --git a/src/tools/clippy/tests/ui/unnecessary_operation.rs b/src/tools/clippy/tests/ui/unnecessary_operation.rs
index a14fd4bca..9ed9679e9 100644
--- a/src/tools/clippy/tests/ui/unnecessary_operation.rs
+++ b/src/tools/clippy/tests/ui/unnecessary_operation.rs
@@ -1,7 +1,12 @@
// run-rustfix
-#![feature(box_syntax)]
-#![allow(clippy::deref_addrof, dead_code, unused, clippy::no_effect)]
+#![allow(
+ clippy::deref_addrof,
+ dead_code,
+ unused,
+ clippy::no_effect,
+ clippy::unnecessary_struct_initialization
+)]
#![warn(clippy::unnecessary_operation)]
struct Tuple(i32);
@@ -57,7 +62,6 @@ fn main() {
*&get_number();
&get_number();
(5, 6, get_number());
- box get_number();
get_number()..;
..get_number();
5..get_number();
diff --git a/src/tools/clippy/tests/ui/unnecessary_operation.stderr b/src/tools/clippy/tests/ui/unnecessary_operation.stderr
index f66d08ecb..a1d0d9399 100644
--- a/src/tools/clippy/tests/ui/unnecessary_operation.stderr
+++ b/src/tools/clippy/tests/ui/unnecessary_operation.stderr
@@ -1,5 +1,5 @@
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:51:5
+ --> $DIR/unnecessary_operation.rs:56:5
|
LL | Tuple(get_number());
| ^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
@@ -7,109 +7,103 @@ LL | Tuple(get_number());
= note: `-D clippy::unnecessary-operation` implied by `-D warnings`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:52:5
+ --> $DIR/unnecessary_operation.rs:57:5
|
LL | Struct { field: get_number() };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:53:5
+ --> $DIR/unnecessary_operation.rs:58:5
|
LL | Struct { ..get_struct() };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_struct();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:54:5
+ --> $DIR/unnecessary_operation.rs:59:5
|
LL | Enum::Tuple(get_number());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:55:5
+ --> $DIR/unnecessary_operation.rs:60:5
|
LL | Enum::Struct { field: get_number() };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:56:5
+ --> $DIR/unnecessary_operation.rs:61:5
|
LL | 5 + get_number();
| ^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `5;get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:57:5
+ --> $DIR/unnecessary_operation.rs:62:5
|
LL | *&get_number();
| ^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:58:5
+ --> $DIR/unnecessary_operation.rs:63:5
|
LL | &get_number();
| ^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:59:5
+ --> $DIR/unnecessary_operation.rs:64:5
|
LL | (5, 6, get_number());
| ^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `5;6;get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:60:5
- |
-LL | box get_number();
- | ^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
-
-error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:61:5
+ --> $DIR/unnecessary_operation.rs:65:5
|
LL | get_number()..;
| ^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:62:5
+ --> $DIR/unnecessary_operation.rs:66:5
|
LL | ..get_number();
| ^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:63:5
+ --> $DIR/unnecessary_operation.rs:67:5
|
LL | 5..get_number();
| ^^^^^^^^^^^^^^^^ help: statement can be reduced to: `5;get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:64:5
+ --> $DIR/unnecessary_operation.rs:68:5
|
LL | [42, get_number()];
| ^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `42;get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:65:5
+ --> $DIR/unnecessary_operation.rs:69:5
|
LL | [42, 55][get_usize()];
| ^^^^^^^^^^^^^^^^^^^^^^ help: statement can be written as: `assert!([42, 55].len() > get_usize());`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:66:5
+ --> $DIR/unnecessary_operation.rs:70:5
|
LL | (42, get_number()).1;
| ^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `42;get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:67:5
+ --> $DIR/unnecessary_operation.rs:71:5
|
LL | [get_number(); 55];
| ^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:68:5
+ --> $DIR/unnecessary_operation.rs:72:5
|
LL | [42; 55][get_usize()];
| ^^^^^^^^^^^^^^^^^^^^^^ help: statement can be written as: `assert!([42; 55].len() > get_usize());`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:69:5
+ --> $DIR/unnecessary_operation.rs:73:5
|
LL | / {
LL | | get_number()
@@ -117,12 +111,12 @@ LL | | };
| |______^ help: statement can be reduced to: `get_number();`
error: unnecessary operation
- --> $DIR/unnecessary_operation.rs:72:5
+ --> $DIR/unnecessary_operation.rs:76:5
|
LL | / FooString {
LL | | s: String::from("blah"),
LL | | };
| |______^ help: statement can be reduced to: `String::from("blah");`
-error: aborting due to 20 previous errors
+error: aborting due to 19 previous errors
diff --git a/src/tools/clippy/tests/ui/unnecessary_struct_initialization.fixed b/src/tools/clippy/tests/ui/unnecessary_struct_initialization.fixed
new file mode 100644
index 000000000..b47129e4a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_struct_initialization.fixed
@@ -0,0 +1,73 @@
+// run-rustfix
+
+#![allow(unused)]
+#![warn(clippy::unnecessary_struct_initialization)]
+
+struct S {
+ f: String,
+}
+
+#[derive(Clone, Copy)]
+struct T {
+ f: u32,
+}
+
+struct U {
+ f: u32,
+}
+
+impl Clone for U {
+ fn clone(&self) -> Self {
+ // Do not lint: `Self` does not implement `Copy`
+ Self { ..*self }
+ }
+}
+
+#[derive(Copy)]
+struct V {
+ f: u32,
+}
+
+impl Clone for V {
+ fn clone(&self) -> Self {
+ // Lint: `Self` implements `Copy`
+ *self
+ }
+}
+
+fn main() {
+ // Should lint: `a` would be consumed anyway
+ let a = S { f: String::from("foo") };
+ let mut b = a;
+
+ // Should lint: `b` would be consumed, and is mutable
+ let c = &mut b;
+
+ // Should not lint as `d` is not mutable
+ let d = S { f: String::from("foo") };
+ let e = &mut S { ..d };
+
+ // Should lint as `f` would be consumed anyway
+ let f = S { f: String::from("foo") };
+ let g = &f;
+
+ // Should lint: the result of an expression is mutable
+ let h = &mut *Box::new(S { f: String::from("foo") });
+
+ // Should not lint: `m` would be both alive and borrowed
+ let m = T { f: 17 };
+ let n = &T { ..m };
+
+ // Should not lint: `m` should not be modified
+ let o = &mut T { ..m };
+ o.f = 32;
+ assert_eq!(m.f, 17);
+
+ // Should not lint: `m` should not be modified
+ let o = &mut T { ..m } as *mut T;
+ unsafe { &mut *o }.f = 32;
+ assert_eq!(m.f, 17);
+
+ // Should lint: the result of an expression is mutable and temporary
+ let p = &mut *Box::new(T { f: 5 });
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_struct_initialization.rs b/src/tools/clippy/tests/ui/unnecessary_struct_initialization.rs
new file mode 100644
index 000000000..63b11c626
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_struct_initialization.rs
@@ -0,0 +1,77 @@
+// run-rustfix
+
+#![allow(unused)]
+#![warn(clippy::unnecessary_struct_initialization)]
+
+struct S {
+ f: String,
+}
+
+#[derive(Clone, Copy)]
+struct T {
+ f: u32,
+}
+
+struct U {
+ f: u32,
+}
+
+impl Clone for U {
+ fn clone(&self) -> Self {
+ // Do not lint: `Self` does not implement `Copy`
+ Self { ..*self }
+ }
+}
+
+#[derive(Copy)]
+struct V {
+ f: u32,
+}
+
+impl Clone for V {
+ fn clone(&self) -> Self {
+ // Lint: `Self` implements `Copy`
+ Self { ..*self }
+ }
+}
+
+fn main() {
+ // Should lint: `a` would be consumed anyway
+ let a = S { f: String::from("foo") };
+ let mut b = S { ..a };
+
+ // Should lint: `b` would be consumed, and is mutable
+ let c = &mut S { ..b };
+
+ // Should not lint as `d` is not mutable
+ let d = S { f: String::from("foo") };
+ let e = &mut S { ..d };
+
+ // Should lint as `f` would be consumed anyway
+ let f = S { f: String::from("foo") };
+ let g = &S { ..f };
+
+ // Should lint: the result of an expression is mutable
+ let h = &mut S {
+ ..*Box::new(S { f: String::from("foo") })
+ };
+
+ // Should not lint: `m` would be both alive and borrowed
+ let m = T { f: 17 };
+ let n = &T { ..m };
+
+ // Should not lint: `m` should not be modified
+ let o = &mut T { ..m };
+ o.f = 32;
+ assert_eq!(m.f, 17);
+
+ // Should not lint: `m` should not be modified
+ let o = &mut T { ..m } as *mut T;
+ unsafe { &mut *o }.f = 32;
+ assert_eq!(m.f, 17);
+
+ // Should lint: the result of an expression is mutable and temporary
+ let p = &mut T {
+ ..*Box::new(T { f: 5 })
+ };
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_struct_initialization.stderr b/src/tools/clippy/tests/ui/unnecessary_struct_initialization.stderr
new file mode 100644
index 000000000..ca4970577
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_struct_initialization.stderr
@@ -0,0 +1,46 @@
+error: unnecessary struct building
+ --> $DIR/unnecessary_struct_initialization.rs:34:9
+ |
+LL | Self { ..*self }
+ | ^^^^^^^^^^^^^^^^ help: replace with: `*self`
+ |
+ = note: `-D clippy::unnecessary-struct-initialization` implied by `-D warnings`
+
+error: unnecessary struct building
+ --> $DIR/unnecessary_struct_initialization.rs:41:17
+ |
+LL | let mut b = S { ..a };
+ | ^^^^^^^^^ help: replace with: `a`
+
+error: unnecessary struct building
+ --> $DIR/unnecessary_struct_initialization.rs:44:18
+ |
+LL | let c = &mut S { ..b };
+ | ^^^^^^^^^ help: replace with: `b`
+
+error: unnecessary struct building
+ --> $DIR/unnecessary_struct_initialization.rs:52:14
+ |
+LL | let g = &S { ..f };
+ | ^^^^^^^^^ help: replace with: `f`
+
+error: unnecessary struct building
+ --> $DIR/unnecessary_struct_initialization.rs:55:18
+ |
+LL | let h = &mut S {
+ | __________________^
+LL | | ..*Box::new(S { f: String::from("foo") })
+LL | | };
+ | |_____^ help: replace with: `*Box::new(S { f: String::from("foo") })`
+
+error: unnecessary struct building
+ --> $DIR/unnecessary_struct_initialization.rs:74:18
+ |
+LL | let p = &mut T {
+ | __________________^
+LL | | ..*Box::new(T { f: 5 })
+LL | | };
+ | |_____^ help: replace with: `*Box::new(T { f: 5 })`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unnecessary_unsafety_doc.rs b/src/tools/clippy/tests/ui/unnecessary_unsafety_doc.rs
index c160e31af..431093ab3 100644
--- a/src/tools/clippy/tests/ui/unnecessary_unsafety_doc.rs
+++ b/src/tools/clippy/tests/ui/unnecessary_unsafety_doc.rs
@@ -1,10 +1,10 @@
-// aux-build:doc_unsafe_macros.rs
+// aux-build:proc_macros.rs
#![allow(clippy::let_unit_value)]
#![warn(clippy::unnecessary_safety_doc)]
-#[macro_use]
-extern crate doc_unsafe_macros;
+extern crate proc_macros;
+use proc_macros::external;
/// This is has no safety section, and does not need one either
pub fn destroy_the_planet() {
@@ -129,7 +129,11 @@ macro_rules! very_safe {
very_safe!();
// we don't lint code from external macros
-undocd_safe!();
+external!(
+ pub fn vey_oy() {
+ unimplemented!();
+ }
+);
fn main() {}
diff --git a/src/tools/clippy/tests/ui/unnecessary_unsafety_doc.stderr b/src/tools/clippy/tests/ui/unnecessary_unsafety_doc.stderr
index 72898c93f..b0f20fdac 100644
--- a/src/tools/clippy/tests/ui/unnecessary_unsafety_doc.stderr
+++ b/src/tools/clippy/tests/ui/unnecessary_unsafety_doc.stderr
@@ -42,7 +42,7 @@ LL | very_safe!();
= note: this error originates in the macro `very_safe` (in Nightly builds, run with -Z macro-backtrace for more info)
error: docs for safe trait have unnecessary `# Safety` section
- --> $DIR/unnecessary_unsafety_doc.rs:147:1
+ --> $DIR/unnecessary_unsafety_doc.rs:151:1
|
LL | pub trait DocumentedSafeTraitWithImplementationHeader {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/unused_format_specs.fixed b/src/tools/clippy/tests/ui/unused_format_specs.fixed
deleted file mode 100644
index 2930722b4..000000000
--- a/src/tools/clippy/tests/ui/unused_format_specs.fixed
+++ /dev/null
@@ -1,18 +0,0 @@
-// run-rustfix
-
-#![warn(clippy::unused_format_specs)]
-#![allow(unused)]
-
-fn main() {
- let f = 1.0f64;
- println!("{}", 1.0);
- println!("{f} {f:?}");
-
- println!("{}", 1);
-}
-
-fn should_not_lint() {
- let f = 1.0f64;
- println!("{:.1}", 1.0);
- println!("{f:.w$} {f:.*?}", 3, w = 2);
-}
diff --git a/src/tools/clippy/tests/ui/unused_format_specs.rs b/src/tools/clippy/tests/ui/unused_format_specs.rs
deleted file mode 100644
index ee192a000..000000000
--- a/src/tools/clippy/tests/ui/unused_format_specs.rs
+++ /dev/null
@@ -1,18 +0,0 @@
-// run-rustfix
-
-#![warn(clippy::unused_format_specs)]
-#![allow(unused)]
-
-fn main() {
- let f = 1.0f64;
- println!("{:.}", 1.0);
- println!("{f:.} {f:.?}");
-
- println!("{:.}", 1);
-}
-
-fn should_not_lint() {
- let f = 1.0f64;
- println!("{:.1}", 1.0);
- println!("{f:.w$} {f:.*?}", 3, w = 2);
-}
diff --git a/src/tools/clippy/tests/ui/unused_format_specs.stderr b/src/tools/clippy/tests/ui/unused_format_specs.stderr
deleted file mode 100644
index 7231c17e7..000000000
--- a/src/tools/clippy/tests/ui/unused_format_specs.stderr
+++ /dev/null
@@ -1,54 +0,0 @@
-error: empty precision specifier has no effect
- --> $DIR/unused_format_specs.rs:8:17
- |
-LL | println!("{:.}", 1.0);
- | ^
- |
- = note: a precision specifier is not required to format floats
- = note: `-D clippy::unused-format-specs` implied by `-D warnings`
-help: remove the `.`
- |
-LL - println!("{:.}", 1.0);
-LL + println!("{}", 1.0);
- |
-
-error: empty precision specifier has no effect
- --> $DIR/unused_format_specs.rs:9:18
- |
-LL | println!("{f:.} {f:.?}");
- | ^
- |
- = note: a precision specifier is not required to format floats
-help: remove the `.`
- |
-LL - println!("{f:.} {f:.?}");
-LL + println!("{f} {f:.?}");
- |
-
-error: empty precision specifier has no effect
- --> $DIR/unused_format_specs.rs:9:24
- |
-LL | println!("{f:.} {f:.?}");
- | ^
- |
- = note: a precision specifier is not required to format floats
-help: remove the `.`
- |
-LL - println!("{f:.} {f:.?}");
-LL + println!("{f:.} {f:?}");
- |
-
-error: empty precision specifier has no effect
- --> $DIR/unused_format_specs.rs:11:17
- |
-LL | println!("{:.}", 1);
- | ^
- |
-help: remove the `.`
- |
-LL - println!("{:.}", 1);
-LL + println!("{}", 1);
- |
-
-error: aborting due to 4 previous errors
-
diff --git a/src/tools/clippy/tests/ui/unused_format_specs_unfixable.stderr b/src/tools/clippy/tests/ui/unused_format_specs_unfixable.stderr
index 9f1890282..cb7156b6b 100644
--- a/src/tools/clippy/tests/ui/unused_format_specs_unfixable.stderr
+++ b/src/tools/clippy/tests/ui/unused_format_specs_unfixable.stderr
@@ -37,11 +37,7 @@ error: format specifiers have no effect on `format_args!()`
LL | println!("{:5}.", format_args_from_macro!());
| ^^^^
|
-help: for the width to apply consider using `format!()`
- --> $DIR/unused_format_specs_unfixable.rs:16:17
- |
-LL | println!("{:5}.", format_args_from_macro!());
- | ^
+ = help: for the width to apply consider using `format!()`
help: if the current behavior is intentional, remove the format specifiers
|
LL - println!("{:5}.", format_args_from_macro!());
@@ -54,11 +50,7 @@ error: format specifiers have no effect on `format_args!()`
LL | println!("{args:5}");
| ^^^^^^^^
|
-help: for the width to apply consider using `format!()`
- --> $DIR/unused_format_specs_unfixable.rs:19:21
- |
-LL | println!("{args:5}");
- | ^
+ = help: for the width to apply consider using `format!()`
help: if the current behavior is intentional, remove the format specifiers
|
LL - println!("{args:5}");
diff --git a/src/tools/clippy/tests/ui/use_self.fixed b/src/tools/clippy/tests/ui/use_self.fixed
index 0a6166571..3ac621731 100644
--- a/src/tools/clippy/tests/ui/use_self.fixed
+++ b/src/tools/clippy/tests/ui/use_self.fixed
@@ -647,3 +647,13 @@ fn msrv_1_37() {
}
}
}
+
+mod issue_10371 {
+ struct Val<const V: i32> {}
+
+ impl<const V: i32> From<Val<V>> for i32 {
+ fn from(_: Val<V>) -> Self {
+ todo!()
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/use_self.rs b/src/tools/clippy/tests/ui/use_self.rs
index 39c2b431f..9dc5d1e3f 100644
--- a/src/tools/clippy/tests/ui/use_self.rs
+++ b/src/tools/clippy/tests/ui/use_self.rs
@@ -647,3 +647,13 @@ fn msrv_1_37() {
}
}
}
+
+mod issue_10371 {
+ struct Val<const V: i32> {}
+
+ impl<const V: i32> From<Val<V>> for i32 {
+ fn from(_: Val<V>) -> Self {
+ todo!()
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/workspace.rs b/src/tools/clippy/tests/workspace.rs
index 95325e060..c9cbc5054 100644
--- a/src/tools/clippy/tests/workspace.rs
+++ b/src/tools/clippy/tests/workspace.rs
@@ -1,4 +1,4 @@
-#![feature(once_cell)]
+#![feature(lazy_cell)]
use std::path::PathBuf;
use std::process::Command;
diff --git a/src/tools/clippy/triagebot.toml b/src/tools/clippy/triagebot.toml
index 6f50ef932..3f8f6a7b9 100644
--- a/src/tools/clippy/triagebot.toml
+++ b/src/tools/clippy/triagebot.toml
@@ -17,6 +17,7 @@ contributing_url = "https://github.com/rust-lang/rust-clippy/blob/master/CONTRIB
[assign.owners]
"/.github" = ["@flip1995"]
+"/util/gh-pages" = ["@xFrednet"]
"*" = [
"@flip1995",
"@Manishearth",
diff --git a/src/tools/collect-license-metadata/src/licenses.rs b/src/tools/collect-license-metadata/src/licenses.rs
index 1c95b1bc8..2855069db 100644
--- a/src/tools/collect-license-metadata/src/licenses.rs
+++ b/src/tools/collect-license-metadata/src/licenses.rs
@@ -42,6 +42,7 @@ pub(crate) struct License {
impl License {
fn simplify(&mut self) {
self.remove_copyright_prefixes();
+ self.remove_trailing_dots();
self.copyright.sort();
self.copyright.dedup();
}
@@ -62,4 +63,12 @@ impl License {
*copyright = stripped.into();
}
}
+
+ fn remove_trailing_dots(&mut self) {
+ for copyright in &mut self.copyright {
+ if copyright.ends_with('.') {
+ *copyright = copyright.trim_end_matches('.').to_string();
+ }
+ }
+ }
}
diff --git a/src/tools/collect-license-metadata/src/path_tree.rs b/src/tools/collect-license-metadata/src/path_tree.rs
index 133ff6837..68b6cef64 100644
--- a/src/tools/collect-license-metadata/src/path_tree.rs
+++ b/src/tools/collect-license-metadata/src/path_tree.rs
@@ -10,10 +10,10 @@ use std::path::{Path, PathBuf};
#[derive(serde::Serialize)]
#[serde(rename_all = "kebab-case", tag = "type")]
pub(crate) enum Node<L> {
- Root { childs: Vec<Node<L>> },
- Directory { name: PathBuf, childs: Vec<Node<L>>, license: Option<L> },
+ Root { children: Vec<Node<L>> },
+ Directory { name: PathBuf, children: Vec<Node<L>>, license: Option<L> },
File { name: PathBuf, license: L },
- FileGroup { names: Vec<PathBuf>, license: L },
+ Group { files: Vec<PathBuf>, directories: Vec<PathBuf>, license: L },
Empty,
}
@@ -22,7 +22,7 @@ impl Node<LicenseId> {
self.merge_directories();
self.collapse_in_licensed_directories();
self.merge_directory_licenses();
- self.merge_file_groups();
+ self.merge_groups();
self.remove_empty();
}
@@ -48,14 +48,14 @@ impl Node<LicenseId> {
/// ```
fn merge_directories(&mut self) {
match self {
- Node::Root { childs } | Node::Directory { childs, license: None, .. } => {
+ Node::Root { children } | Node::Directory { children, license: None, .. } => {
let mut directories = BTreeMap::new();
let mut files = Vec::new();
- for child in childs.drain(..) {
+ for child in children.drain(..) {
match child {
- Node::Directory { name, mut childs, license: None } => {
- directories.entry(name).or_insert_with(Vec::new).append(&mut childs);
+ Node::Directory { name, mut children, license: None } => {
+ directories.entry(name).or_insert_with(Vec::new).append(&mut children);
}
file @ Node::File { .. } => {
files.push(file);
@@ -64,8 +64,8 @@ impl Node<LicenseId> {
Node::Root { .. } => {
panic!("can't have a root inside another element");
}
- Node::FileGroup { .. } => {
- panic!("FileGroup should not be present at this stage");
+ Node::Group { .. } => {
+ panic!("Group should not be present at this stage");
}
Node::Directory { license: Some(_), .. } => {
panic!("license should not be set at this stage");
@@ -73,21 +73,21 @@ impl Node<LicenseId> {
}
}
- childs.extend(directories.into_iter().map(|(name, childs)| Node::Directory {
+ children.extend(directories.into_iter().map(|(name, children)| Node::Directory {
name,
- childs,
+ children,
license: None,
}));
- childs.append(&mut files);
+ children.append(&mut files);
- for child in &mut *childs {
+ for child in &mut *children {
child.merge_directories();
}
}
Node::Empty => {}
Node::File { .. } => {}
- Node::FileGroup { .. } => {
- panic!("FileGroup should not be present at this stage");
+ Node::Group { .. } => {
+ panic!("Group should not be present at this stage");
}
Node::Directory { license: Some(_), .. } => {
panic!("license should not be set at this stage");
@@ -105,13 +105,13 @@ impl Node<LicenseId> {
/// our inclusion of LLVM.
fn collapse_in_licensed_directories(&mut self) {
match self {
- Node::Directory { childs, license, .. } => {
- for child in &mut *childs {
+ Node::Directory { children, license, .. } => {
+ for child in &mut *children {
child.collapse_in_licensed_directories();
}
let mut licenses_count = BTreeMap::new();
- for child in &*childs {
+ for child in &*children {
let Some(license) = child.license() else { continue };
*licenses_count.entry(license).or_insert(0) += 1;
}
@@ -122,51 +122,51 @@ impl Node<LicenseId> {
.map(|(license, _)| license);
if let Some(most_popular_license) = most_popular_license {
- childs.retain(|child| child.license() != Some(most_popular_license));
+ children.retain(|child| child.license() != Some(most_popular_license));
*license = Some(most_popular_license);
}
}
- Node::Root { childs } => {
- for child in &mut *childs {
+ Node::Root { children } => {
+ for child in &mut *children {
child.collapse_in_licensed_directories();
}
}
Node::File { .. } => {}
- Node::FileGroup { .. } => {}
+ Node::Group { .. } => panic!("group should not be present at this stage"),
Node::Empty => {}
}
}
/// Reduce the depth of the tree by merging subdirectories with the same license as their
- /// parent directory into their parent, and adjusting the paths of the childs accordingly.
+ /// parent directory into their parent, and adjusting the paths of the children accordingly.
fn merge_directory_licenses(&mut self) {
match self {
- Node::Root { childs } => {
- for child in &mut *childs {
+ Node::Root { children } => {
+ for child in &mut *children {
child.merge_directory_licenses();
}
}
- Node::Directory { childs, license, .. } => {
+ Node::Directory { children, license, .. } => {
let mut to_add = Vec::new();
- for child in &mut *childs {
+ for child in &mut *children {
child.merge_directory_licenses();
let Node::Directory {
name: child_name,
- childs: child_childs,
+ children: child_children,
license: child_license,
} = child else { continue };
if child_license != license {
continue;
}
- for mut child_child in child_childs.drain(..) {
+ for mut child_child in child_children.drain(..) {
match &mut child_child {
Node::Root { .. } => {
panic!("can't have a root inside another element");
}
- Node::FileGroup { .. } => {
- panic!("FileGroup should not be present at this stage");
+ Node::Group { .. } => {
+ panic!("Group should not be present at this stage");
}
Node::Directory { name: child_child_name, .. } => {
*child_child_name = child_name.join(&child_child_name);
@@ -181,42 +181,78 @@ impl Node<LicenseId> {
*child = Node::Empty;
}
- childs.append(&mut to_add);
+ children.append(&mut to_add);
}
Node::Empty => {}
Node::File { .. } => {}
- Node::FileGroup { .. } => {}
+ Node::Group { .. } => panic!("Group should not be present at this stage"),
}
}
/// This pass groups multiple files in a directory with the same license into a single
- /// "FileGroup", so that the license of all those files can be reported as a group.
+ /// "Group", so that the license of all those files can be reported as a group.
+ ///
+ /// This also merges directories *without exceptions*.
///
/// Crucially this pass runs after collapse_in_licensed_directories, so the most common license
/// will already be marked as the directory's license and won't be turned into a group.
- fn merge_file_groups(&mut self) {
+ fn merge_groups(&mut self) {
+ #[derive(Default)]
+ struct Grouped {
+ files: Vec<PathBuf>,
+ directories: Vec<PathBuf>,
+ }
match self {
- Node::Root { childs } | Node::Directory { childs, .. } => {
- let mut grouped = BTreeMap::new();
+ Node::Root { children } | Node::Directory { children, .. } => {
+ let mut grouped: BTreeMap<LicenseId, Grouped> = BTreeMap::new();
- for child in &mut *childs {
- child.merge_file_groups();
- if let Node::File { name, license } = child {
- grouped.entry(*license).or_insert_with(Vec::new).push(name.clone());
- *child = Node::Empty;
+ for child in &mut *children {
+ child.merge_groups();
+ match child {
+ Node::Directory { name, children, license: Some(license) } => {
+ if children.is_empty() {
+ grouped
+ .entry(*license)
+ .or_insert_with(Grouped::default)
+ .directories
+ .push(name.clone());
+ *child = Node::Empty;
+ }
+ }
+ Node::File { name, license } => {
+ grouped
+ .entry(*license)
+ .or_insert_with(Grouped::default)
+ .files
+ .push(name.clone());
+ *child = Node::Empty;
+ }
+ _ => {}
}
}
- for (license, mut names) in grouped.into_iter() {
- if names.len() == 1 {
- childs.push(Node::File { license, name: names.pop().unwrap() });
+ for (license, mut grouped) in grouped.into_iter() {
+ if grouped.files.len() + grouped.directories.len() <= 1 {
+ if let Some(name) = grouped.files.pop() {
+ children.push(Node::File { license, name });
+ } else if let Some(name) = grouped.directories.pop() {
+ children.push(Node::Directory {
+ name,
+ children: Vec::new(),
+ license: Some(license),
+ });
+ }
} else {
- childs.push(Node::FileGroup { license, names });
+ children.push(Node::Group {
+ license,
+ files: grouped.files,
+ directories: grouped.directories,
+ });
}
}
}
Node::File { .. } => {}
- Node::FileGroup { .. } => panic!("FileGroup should not be present at this stage"),
+ Node::Group { .. } => panic!("FileGroup should not be present at this stage"),
Node::Empty => {}
}
}
@@ -225,13 +261,13 @@ impl Node<LicenseId> {
/// sure to remove them from the tree.
fn remove_empty(&mut self) {
match self {
- Node::Root { childs } | Node::Directory { childs, .. } => {
- for child in &mut *childs {
+ Node::Root { children } | Node::Directory { children, .. } => {
+ for child in &mut *children {
child.remove_empty();
}
- childs.retain(|child| !matches!(child, Node::Empty));
+ children.retain(|child| !matches!(child, Node::Empty));
}
- Node::FileGroup { .. } => {}
+ Node::Group { .. } => {}
Node::File { .. } => {}
Node::Empty => {}
}
@@ -239,7 +275,7 @@ impl Node<LicenseId> {
fn license(&self) -> Option<LicenseId> {
match self {
- Node::Directory { childs, license: Some(license), .. } if childs.is_empty() => {
+ Node::Directory { children, license: Some(license), .. } if children.is_empty() => {
Some(*license)
}
Node::File { license, .. } => Some(*license),
@@ -249,7 +285,7 @@ impl Node<LicenseId> {
}
pub(crate) fn build(mut input: Vec<(PathBuf, LicenseId)>) -> Node<LicenseId> {
- let mut childs = Vec::new();
+ let mut children = Vec::new();
// Ensure reproducibility of all future steps.
input.sort();
@@ -259,15 +295,15 @@ pub(crate) fn build(mut input: Vec<(PathBuf, LicenseId)>) -> Node<LicenseId> {
for component in path.parent().unwrap_or_else(|| Path::new(".")).components().rev() {
node = Node::Directory {
name: component.as_os_str().into(),
- childs: vec![node],
+ children: vec![node],
license: None,
};
}
- childs.push(node);
+ children.push(node);
}
- Node::Root { childs }
+ Node::Root { children }
}
/// Convert a `Node<LicenseId>` into a `Node<&License>`, expanding all interned license IDs with a
@@ -277,17 +313,23 @@ pub(crate) fn expand_interned_licenses(
interner: &LicensesInterner,
) -> Node<&License> {
match node {
- Node::Root { childs } => Node::Root {
- childs: childs.into_iter().map(|child| strip_interning(child, interner)).collect(),
+ Node::Root { children } => Node::Root {
+ children: children
+ .into_iter()
+ .map(|child| expand_interned_licenses(child, interner))
+ .collect(),
},
- Node::Directory { name, childs, license } => Node::Directory {
- childs: childs.into_iter().map(|child| strip_interning(child, interner)).collect(),
+ Node::Directory { name, children, license } => Node::Directory {
+ children: children
+ .into_iter()
+ .map(|child| expand_interned_licenses(child, interner))
+ .collect(),
license: license.map(|license| interner.resolve(license)),
name,
},
Node::File { name, license } => Node::File { name, license: interner.resolve(license) },
- Node::FileGroup { names, license } => {
- Node::FileGroup { names, license: interner.resolve(license) }
+ Node::Group { files, directories, license } => {
+ Node::Group { files, directories, license: interner.resolve(license) }
}
Node::Empty => Node::Empty,
}
diff --git a/src/tools/compiletest/Cargo.toml b/src/tools/compiletest/Cargo.toml
index 0db043a4f..85fd6523c 100644
--- a/src/tools/compiletest/Cargo.toml
+++ b/src/tools/compiletest/Cargo.toml
@@ -26,4 +26,10 @@ libc = "0.2"
[target.'cfg(windows)'.dependencies]
miow = "0.5"
-winapi = { version = "0.3", features = ["winerror"] }
+
+[target.'cfg(windows)'.dependencies.windows]
+version = "0.46.0"
+features = [
+ "Win32_Foundation",
+ "Win32_System_Diagnostics_Debug",
+]
diff --git a/src/tools/compiletest/src/common.rs b/src/tools/compiletest/src/common.rs
index 7fe2e6257..d2f494942 100644
--- a/src/tools/compiletest/src/common.rs
+++ b/src/tools/compiletest/src/common.rs
@@ -8,107 +8,84 @@ use std::process::Command;
use std::str::FromStr;
use crate::util::{add_dylib_path, PathBufExt};
-use lazycell::LazyCell;
-use test::ColorConfig;
-
-#[derive(Clone, Copy, PartialEq, Debug)]
-pub enum Mode {
- RunPassValgrind,
- Pretty,
- DebugInfo,
- Codegen,
- Rustdoc,
- RustdocJson,
- CodegenUnits,
- Incremental,
- RunMake,
- Ui,
- JsDocTest,
- MirOpt,
- Assembly,
-}
+use lazycell::AtomicLazyCell;
+use serde::de::{Deserialize, Deserializer, Error as _};
+use std::collections::{HashMap, HashSet};
+use test::{ColorConfig, OutputFormat};
+
+macro_rules! string_enum {
+ ($(#[$meta:meta])* $vis:vis enum $name:ident { $($variant:ident => $repr:expr,)* }) => {
+ $(#[$meta])*
+ $vis enum $name {
+ $($variant,)*
+ }
-impl Mode {
- pub fn disambiguator(self) -> &'static str {
- // Pretty-printing tests could run concurrently, and if they do,
- // they need to keep their output segregated.
- match self {
- Pretty => ".pretty",
- _ => "",
+ impl $name {
+ $vis const VARIANTS: &'static [Self] = &[$(Self::$variant,)*];
+ $vis const STR_VARIANTS: &'static [&'static str] = &[$(Self::$variant.to_str(),)*];
+
+ $vis const fn to_str(&self) -> &'static str {
+ match self {
+ $(Self::$variant => $repr,)*
+ }
+ }
}
- }
-}
-impl FromStr for Mode {
- type Err = ();
- fn from_str(s: &str) -> Result<Mode, ()> {
- match s {
- "run-pass-valgrind" => Ok(RunPassValgrind),
- "pretty" => Ok(Pretty),
- "debuginfo" => Ok(DebugInfo),
- "codegen" => Ok(Codegen),
- "rustdoc" => Ok(Rustdoc),
- "rustdoc-json" => Ok(RustdocJson),
- "codegen-units" => Ok(CodegenUnits),
- "incremental" => Ok(Incremental),
- "run-make" => Ok(RunMake),
- "ui" => Ok(Ui),
- "js-doc-test" => Ok(JsDocTest),
- "mir-opt" => Ok(MirOpt),
- "assembly" => Ok(Assembly),
- _ => Err(()),
+ impl fmt::Display for $name {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(self.to_str(), f)
+ }
}
- }
-}
-impl fmt::Display for Mode {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let s = match *self {
- RunPassValgrind => "run-pass-valgrind",
- Pretty => "pretty",
- DebugInfo => "debuginfo",
- Codegen => "codegen",
- Rustdoc => "rustdoc",
- RustdocJson => "rustdoc-json",
- CodegenUnits => "codegen-units",
- Incremental => "incremental",
- RunMake => "run-make",
- Ui => "ui",
- JsDocTest => "js-doc-test",
- MirOpt => "mir-opt",
- Assembly => "assembly",
- };
- fmt::Display::fmt(s, f)
+ impl FromStr for $name {
+ type Err = ();
+
+ fn from_str(s: &str) -> Result<Self, ()> {
+ match s {
+ $($repr => Ok(Self::$variant),)*
+ _ => Err(()),
+ }
+ }
+ }
}
}
-#[derive(Clone, Copy, PartialEq, Debug, Hash)]
-pub enum PassMode {
- Check,
- Build,
- Run,
+string_enum! {
+ #[derive(Clone, Copy, PartialEq, Debug)]
+ pub enum Mode {
+ RunPassValgrind => "run-pass-valgrind",
+ Pretty => "pretty",
+ DebugInfo => "debuginfo",
+ Codegen => "codegen",
+ Rustdoc => "rustdoc",
+ RustdocJson => "rustdoc-json",
+ CodegenUnits => "codegen-units",
+ Incremental => "incremental",
+ RunMake => "run-make",
+ Ui => "ui",
+ JsDocTest => "js-doc-test",
+ MirOpt => "mir-opt",
+ Assembly => "assembly",
+ }
}
-impl FromStr for PassMode {
- type Err = ();
- fn from_str(s: &str) -> Result<Self, ()> {
- match s {
- "check" => Ok(PassMode::Check),
- "build" => Ok(PassMode::Build),
- "run" => Ok(PassMode::Run),
- _ => Err(()),
+impl Mode {
+ pub fn disambiguator(self) -> &'static str {
+ // Pretty-printing tests could run concurrently, and if they do,
+ // they need to keep their output segregated.
+ match self {
+ Pretty => ".pretty",
+ _ => "",
}
}
}
-impl fmt::Display for PassMode {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let s = match *self {
- PassMode::Check => "check",
- PassMode::Build => "build",
- PassMode::Run => "run",
- };
- fmt::Display::fmt(s, f)
+string_enum! {
+ #[derive(Clone, Copy, PartialEq, Debug, Hash)]
+ pub enum PassMode {
+ Check => "check",
+ Build => "build",
+ Run => "run",
}
}
@@ -119,63 +96,30 @@ pub enum FailMode {
Run,
}
-#[derive(Clone, Debug, PartialEq)]
-pub enum CompareMode {
- Polonius,
- Chalk,
- NextSolver,
- SplitDwarf,
- SplitDwarfSingle,
-}
-
-impl CompareMode {
- pub(crate) fn to_str(&self) -> &'static str {
- match *self {
- CompareMode::Polonius => "polonius",
- CompareMode::Chalk => "chalk",
- CompareMode::NextSolver => "next-solver",
- CompareMode::SplitDwarf => "split-dwarf",
- CompareMode::SplitDwarfSingle => "split-dwarf-single",
- }
- }
-
- pub fn parse(s: String) -> CompareMode {
- match s.as_str() {
- "polonius" => CompareMode::Polonius,
- "chalk" => CompareMode::Chalk,
- "next-solver" => CompareMode::NextSolver,
- "split-dwarf" => CompareMode::SplitDwarf,
- "split-dwarf-single" => CompareMode::SplitDwarfSingle,
- x => panic!("unknown --compare-mode option: {}", x),
- }
- }
-}
-
-#[derive(Clone, Copy, Debug, PartialEq)]
-pub enum Debugger {
- Cdb,
- Gdb,
- Lldb,
-}
-
-impl Debugger {
- fn to_str(&self) -> &'static str {
- match self {
- Debugger::Cdb => "cdb",
- Debugger::Gdb => "gdb",
- Debugger::Lldb => "lldb",
- }
+string_enum! {
+ #[derive(Clone, Debug, PartialEq)]
+ pub enum CompareMode {
+ Polonius => "polonius",
+ Chalk => "chalk",
+ NextSolver => "next-solver",
+ SplitDwarf => "split-dwarf",
+ SplitDwarfSingle => "split-dwarf-single",
}
}
-impl fmt::Display for Debugger {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- fmt::Display::fmt(self.to_str(), f)
+string_enum! {
+ #[derive(Clone, Copy, Debug, PartialEq)]
+ pub enum Debugger {
+ Cdb => "cdb",
+ Gdb => "gdb",
+ Lldb => "lldb",
}
}
-#[derive(Clone, Copy, Debug, PartialEq)]
+#[derive(Clone, Copy, Debug, PartialEq, Default, serde::Deserialize)]
+#[serde(rename_all = "kebab-case")]
pub enum PanicStrategy {
+ #[default]
Unwind,
Abort,
}
@@ -337,7 +281,7 @@ pub struct Config {
pub verbose: bool,
/// Print one character per test instead of one line
- pub quiet: bool,
+ pub format: OutputFormat,
/// Whether to use colors in test.
pub color: ColorConfig,
@@ -369,7 +313,8 @@ pub struct Config {
pub cflags: String,
pub cxxflags: String,
pub ar: String,
- pub linker: Option<String>,
+ pub target_linker: Option<String>,
+ pub host_linker: Option<String>,
pub llvm_components: String,
/// Path to a NodeJS executable. Used for JS doctests, emscripten and WASM tests
@@ -383,7 +328,9 @@ pub struct Config {
/// Only rerun the tests that result has been modified accoring to Git status
pub only_modified: bool,
- pub target_cfg: LazyCell<TargetCfg>,
+ pub target_cfgs: AtomicLazyCell<TargetCfgs>,
+
+ pub nocapture: bool,
}
impl Config {
@@ -394,8 +341,18 @@ impl Config {
})
}
- fn target_cfg(&self) -> &TargetCfg {
- self.target_cfg.borrow_with(|| TargetCfg::new(self))
+ pub fn target_cfgs(&self) -> &TargetCfgs {
+ match self.target_cfgs.borrow() {
+ Some(cfgs) => cfgs,
+ None => {
+ let _ = self.target_cfgs.fill(TargetCfgs::new(self));
+ self.target_cfgs.borrow().unwrap()
+ }
+ }
+ }
+
+ pub fn target_cfg(&self) -> &TargetCfg {
+ &self.target_cfgs().current
}
pub fn matches_arch(&self, arch: &str) -> bool {
@@ -447,94 +404,154 @@ impl Config {
}
}
-#[derive(Clone, Debug)]
+#[derive(Debug, Clone)]
+pub struct TargetCfgs {
+ pub current: TargetCfg,
+ pub all_targets: HashSet<String>,
+ pub all_archs: HashSet<String>,
+ pub all_oses: HashSet<String>,
+ pub all_oses_and_envs: HashSet<String>,
+ pub all_envs: HashSet<String>,
+ pub all_abis: HashSet<String>,
+ pub all_families: HashSet<String>,
+ pub all_pointer_widths: HashSet<String>,
+}
+
+impl TargetCfgs {
+ fn new(config: &Config) -> TargetCfgs {
+ let targets: HashMap<String, TargetCfg> = if config.stage_id.starts_with("stage0-") {
+ // #[cfg(bootstrap)]
+ // Needed only for one cycle, remove during the bootstrap bump.
+ Self::collect_all_slow(config)
+ } else {
+ serde_json::from_str(&rustc_output(
+ config,
+ &["--print=all-target-specs-json", "-Zunstable-options"],
+ ))
+ .unwrap()
+ };
+
+ let mut current = None;
+ let mut all_targets = HashSet::new();
+ let mut all_archs = HashSet::new();
+ let mut all_oses = HashSet::new();
+ let mut all_oses_and_envs = HashSet::new();
+ let mut all_envs = HashSet::new();
+ let mut all_abis = HashSet::new();
+ let mut all_families = HashSet::new();
+ let mut all_pointer_widths = HashSet::new();
+
+ for (target, cfg) in targets.into_iter() {
+ all_archs.insert(cfg.arch.clone());
+ all_oses.insert(cfg.os.clone());
+ all_oses_and_envs.insert(cfg.os_and_env());
+ all_envs.insert(cfg.env.clone());
+ all_abis.insert(cfg.abi.clone());
+ for family in &cfg.families {
+ all_families.insert(family.clone());
+ }
+ all_pointer_widths.insert(format!("{}bit", cfg.pointer_width));
+
+ if target == config.target {
+ current = Some(cfg);
+ }
+ all_targets.insert(target.into());
+ }
+
+ Self {
+ current: current.expect("current target not found"),
+ all_targets,
+ all_archs,
+ all_oses,
+ all_oses_and_envs,
+ all_envs,
+ all_abis,
+ all_families,
+ all_pointer_widths,
+ }
+ }
+
+ // #[cfg(bootstrap)]
+ // Needed only for one cycle, remove during the bootstrap bump.
+ fn collect_all_slow(config: &Config) -> HashMap<String, TargetCfg> {
+ let mut result = HashMap::new();
+ for target in rustc_output(config, &["--print=target-list"]).trim().lines() {
+ let json = rustc_output(
+ config,
+ &["--print=target-spec-json", "-Zunstable-options", "--target", target],
+ );
+ match serde_json::from_str(&json) {
+ Ok(res) => {
+ result.insert(target.into(), res);
+ }
+ Err(err) => panic!("failed to parse target spec for {target}: {err}"),
+ }
+ }
+ result
+ }
+}
+
+#[derive(Clone, Debug, serde::Deserialize)]
+#[serde(rename_all = "kebab-case")]
pub struct TargetCfg {
- arch: String,
- os: String,
- env: String,
- abi: String,
- families: Vec<String>,
- pointer_width: u32,
+ pub(crate) arch: String,
+ #[serde(default = "default_os")]
+ pub(crate) os: String,
+ #[serde(default)]
+ pub(crate) env: String,
+ #[serde(default)]
+ pub(crate) abi: String,
+ #[serde(rename = "target-family", default)]
+ pub(crate) families: Vec<String>,
+ #[serde(rename = "target-pointer-width", deserialize_with = "serde_parse_u32")]
+ pub(crate) pointer_width: u32,
+ #[serde(rename = "target-endian", default)]
endian: Endian,
+ #[serde(rename = "panic-strategy", default)]
panic: PanicStrategy,
}
-#[derive(Eq, PartialEq, Clone, Debug)]
+impl TargetCfg {
+ pub(crate) fn os_and_env(&self) -> String {
+ format!("{}-{}", self.os, self.env)
+ }
+}
+
+fn default_os() -> String {
+ "none".into()
+}
+
+#[derive(Eq, PartialEq, Clone, Debug, Default, serde::Deserialize)]
+#[serde(rename_all = "kebab-case")]
pub enum Endian {
+ #[default]
Little,
Big,
}
-impl TargetCfg {
- fn new(config: &Config) -> TargetCfg {
- let mut command = Command::new(&config.rustc_path);
- add_dylib_path(&mut command, iter::once(&config.compile_lib_path));
- let output = match command
- .arg("--print=cfg")
- .arg("--target")
- .arg(&config.target)
- .args(&config.target_rustcflags)
- .output()
- {
- Ok(output) => output,
- Err(e) => panic!("error: failed to get cfg info from {:?}: {e}", config.rustc_path),
- };
- if !output.status.success() {
- panic!(
- "error: failed to get cfg info from {:?}\n--- stdout\n{}\n--- stderr\n{}",
- config.rustc_path,
- String::from_utf8(output.stdout).unwrap(),
- String::from_utf8(output.stderr).unwrap(),
- );
- }
- let print_cfg = String::from_utf8(output.stdout).unwrap();
- let mut arch = None;
- let mut os = None;
- let mut env = None;
- let mut abi = None;
- let mut families = Vec::new();
- let mut pointer_width = None;
- let mut endian = None;
- let mut panic = None;
- for line in print_cfg.lines() {
- if let Some((name, value)) = line.split_once('=') {
- let value = value.trim_matches('"');
- match name {
- "target_arch" => arch = Some(value),
- "target_os" => os = Some(value),
- "target_env" => env = Some(value),
- "target_abi" => abi = Some(value),
- "target_family" => families.push(value.to_string()),
- "target_pointer_width" => pointer_width = Some(value.parse().unwrap()),
- "target_endian" => {
- endian = Some(match value {
- "little" => Endian::Little,
- "big" => Endian::Big,
- s => panic!("unexpected {s}"),
- })
- }
- "panic" => {
- panic = match value {
- "abort" => Some(PanicStrategy::Abort),
- "unwind" => Some(PanicStrategy::Unwind),
- s => panic!("unexpected {s}"),
- }
- }
- _ => {}
- }
- }
- }
- TargetCfg {
- arch: arch.unwrap().to_string(),
- os: os.unwrap().to_string(),
- env: env.unwrap().to_string(),
- abi: abi.unwrap().to_string(),
- families,
- pointer_width: pointer_width.unwrap(),
- endian: endian.unwrap(),
- panic: panic.unwrap(),
- }
+fn rustc_output(config: &Config, args: &[&str]) -> String {
+ let mut command = Command::new(&config.rustc_path);
+ add_dylib_path(&mut command, iter::once(&config.compile_lib_path));
+ command.args(&config.target_rustcflags).args(args);
+ command.env("RUSTC_BOOTSTRAP", "1");
+
+ let output = match command.output() {
+ Ok(output) => output,
+ Err(e) => panic!("error: failed to run {command:?}: {e}"),
+ };
+ if !output.status.success() {
+ panic!(
+ "error: failed to run {command:?}\n--- stdout\n{}\n--- stderr\n{}",
+ String::from_utf8(output.stdout).unwrap(),
+ String::from_utf8(output.stderr).unwrap(),
+ );
}
+ String::from_utf8(output.stdout).unwrap()
+}
+
+fn serde_parse_u32<'de, D: Deserializer<'de>>(deserializer: D) -> Result<u32, D::Error> {
+ let string = String::deserialize(deserializer)?;
+ string.parse().map_err(D::Error::custom)
}
#[derive(Debug, Clone)]
diff --git a/src/tools/compiletest/src/header.rs b/src/tools/compiletest/src/header.rs
index d9b39927c..bc65ec932 100644
--- a/src/tools/compiletest/src/header.rs
+++ b/src/tools/compiletest/src/header.rs
@@ -6,24 +6,19 @@ use std::io::BufReader;
use std::path::{Path, PathBuf};
use std::process::Command;
+use build_helper::ci::CiEnv;
use tracing::*;
-use crate::common::{CompareMode, Config, Debugger, FailMode, Mode, PassMode};
+use crate::common::{Config, Debugger, FailMode, Mode, PassMode};
+use crate::header::cfg::parse_cfg_name_directive;
+use crate::header::cfg::MatchOutcome;
use crate::util;
use crate::{extract_cdb_version, extract_gdb_version};
+mod cfg;
#[cfg(test)]
mod tests;
-/// The result of parse_cfg_name_directive.
-#[derive(Clone, Copy, PartialEq, Debug)]
-enum ParsedNameDirective {
- /// No match.
- NoMatch,
- /// Match.
- Match,
-}
-
/// Properties which must be known very early, before actually running
/// the test.
#[derive(Default)]
@@ -84,6 +79,9 @@ pub struct TestProps {
pub unset_rustc_env: Vec<String>,
// Environment settings to use during execution
pub exec_env: Vec<(String, String)>,
+ // Environment variables to unset prior to execution.
+ // Variables are unset before applying 'exec_env'
+ pub unset_exec_env: Vec<String>,
// Build documentation for all specified aux-builds as well
pub build_aux_docs: bool,
// Flag to force a crate to be built with the host architecture
@@ -150,6 +148,8 @@ pub struct TestProps {
pub normalize_stdout: Vec<(String, String)>,
pub normalize_stderr: Vec<(String, String)>,
pub failure_status: i32,
+ // For UI tests, allows compiler to exit with arbitrary failure status
+ pub dont_check_failure_status: bool,
// Whether or not `rustfix` should apply the `CodeSuggestion`s of this test and compile the
// resulting Rust code.
pub run_rustfix: bool,
@@ -187,11 +187,13 @@ mod directives {
pub const AUX_CRATE: &'static str = "aux-crate";
pub const EXEC_ENV: &'static str = "exec-env";
pub const RUSTC_ENV: &'static str = "rustc-env";
+ pub const UNSET_EXEC_ENV: &'static str = "unset-exec-env";
pub const UNSET_RUSTC_ENV: &'static str = "unset-rustc-env";
pub const FORBID_OUTPUT: &'static str = "forbid-output";
pub const CHECK_TEST_LINE_NUMBERS_MATCH: &'static str = "check-test-line-numbers-match";
pub const IGNORE_PASS: &'static str = "ignore-pass";
pub const FAILURE_STATUS: &'static str = "failure-status";
+ pub const DONT_CHECK_FAILURE_STATUS: &'static str = "dont-check-failure-status";
pub const RUN_RUSTFIX: &'static str = "run-rustfix";
pub const RUSTFIX_ONLY_MACHINE_APPLICABLE: &'static str = "rustfix-only-machine-applicable";
pub const ASSEMBLY_OUTPUT: &'static str = "assembly-output";
@@ -218,6 +220,7 @@ impl TestProps {
rustc_env: vec![],
unset_rustc_env: vec![],
exec_env: vec![],
+ unset_exec_env: vec![],
build_aux_docs: false,
force_host: false,
check_stdout: false,
@@ -239,6 +242,7 @@ impl TestProps {
normalize_stdout: vec![],
normalize_stderr: vec![],
failure_status: -1,
+ dont_check_failure_status: false,
run_rustfix: false,
rustfix_only_machine_applicable: false,
assembly_output: None,
@@ -278,8 +282,12 @@ impl TestProps {
/// `//[foo]`), then the property is ignored unless `cfg` is
/// `Some("foo")`.
fn load_from(&mut self, testfile: &Path, cfg: Option<&str>, config: &Config) {
- // Mode-dependent defaults.
- self.remap_src_base = config.mode == Mode::Ui && !config.suite.contains("rustdoc");
+ // In CI, we've sometimes encountered non-determinism related to truncating very long paths.
+ // Set a consistent (short) prefix to avoid issues, but only in CI to avoid regressing the
+ // contributor experience.
+ if CiEnv::is_ci() {
+ self.remap_src_base = config.mode == Mode::Ui && !config.suite.contains("rustdoc");
+ }
let mut has_edition = false;
if !testfile.is_dir() {
@@ -365,6 +373,12 @@ impl TestProps {
);
config.push_name_value_directive(
ln,
+ UNSET_EXEC_ENV,
+ &mut self.unset_exec_env,
+ |r| r,
+ );
+ config.push_name_value_directive(
+ ln,
RUSTC_ENV,
&mut self.rustc_env,
Config::parse_env,
@@ -401,6 +415,12 @@ impl TestProps {
self.failure_status = code;
}
+ config.set_name_directive(
+ ln,
+ DONT_CHECK_FAILURE_STATUS,
+ &mut self.dont_check_failure_status,
+ );
+
config.set_name_directive(ln, RUN_RUSTFIX, &mut self.run_rustfix);
config.set_name_directive(
ln,
@@ -647,7 +667,7 @@ impl Config {
}
fn parse_custom_normalization(&self, mut line: &str, prefix: &str) -> Option<(String, String)> {
- if self.parse_cfg_name_directive(line, prefix) == ParsedNameDirective::Match {
+ if parse_cfg_name_directive(self, line, prefix).outcome == MatchOutcome::Match {
let from = parse_normalization_string(&mut line)?;
let to = parse_normalization_string(&mut line)?;
Some((from, to))
@@ -664,68 +684,6 @@ impl Config {
self.parse_name_directive(line, "needs-profiler-support")
}
- /// Parses a name-value directive which contains config-specific information, e.g., `ignore-x86`
- /// or `normalize-stderr-32bit`.
- fn parse_cfg_name_directive(&self, line: &str, prefix: &str) -> ParsedNameDirective {
- if !line.as_bytes().starts_with(prefix.as_bytes()) {
- return ParsedNameDirective::NoMatch;
- }
- if line.as_bytes().get(prefix.len()) != Some(&b'-') {
- return ParsedNameDirective::NoMatch;
- }
-
- let name = line[prefix.len() + 1..].split(&[':', ' '][..]).next().unwrap();
-
- let matches_pointer_width = || {
- name.strip_suffix("bit")
- .and_then(|width| width.parse::<u32>().ok())
- .map(|width| self.get_pointer_width() == width)
- .unwrap_or(false)
- };
-
- // If something is ignored for emscripten, it likely also needs to be
- // ignored for wasm32-unknown-unknown.
- // `wasm32-bare` is an alias to refer to just wasm32-unknown-unknown
- // (in contrast to `wasm32` which also matches non-bare targets like
- // asmjs-unknown-emscripten).
- let matches_wasm32_alias = || {
- self.target == "wasm32-unknown-unknown" && matches!(name, "emscripten" | "wasm32-bare")
- };
-
- let is_match = name == "test" ||
- self.target == name || // triple
- self.matches_os(name) ||
- self.matches_env(name) ||
- self.matches_abi(name) ||
- self.matches_family(name) ||
- self.target.ends_with(name) || // target and env
- self.matches_arch(name) ||
- matches_wasm32_alias() ||
- matches_pointer_width() ||
- name == self.stage_id.split('-').next().unwrap() || // stage
- name == self.channel || // channel
- (self.target != self.host && name == "cross-compile") ||
- (name == "endian-big" && self.is_big_endian()) ||
- (self.remote_test_client.is_some() && name == "remote") ||
- match self.compare_mode {
- Some(CompareMode::Polonius) => name == "compare-mode-polonius",
- Some(CompareMode::Chalk) => name == "compare-mode-chalk",
- Some(CompareMode::NextSolver) => name == "compare-mode-next-solver",
- Some(CompareMode::SplitDwarf) => name == "compare-mode-split-dwarf",
- Some(CompareMode::SplitDwarfSingle) => name == "compare-mode-split-dwarf-single",
- None => false,
- } ||
- (cfg!(debug_assertions) && name == "debug") ||
- match self.debugger {
- Some(Debugger::Cdb) => name == "cdb",
- Some(Debugger::Gdb) => name == "gdb",
- Some(Debugger::Lldb) => name == "lldb",
- None => false,
- };
-
- if is_match { ParsedNameDirective::Match } else { ParsedNameDirective::NoMatch }
- }
-
fn has_cfg_prefix(&self, line: &str, prefix: &str) -> bool {
// returns whether this line contains this prefix or not. For prefix
// "ignore", returns true if line says "ignore-x86_64", "ignore-arch",
@@ -964,6 +922,19 @@ pub fn make_test_description<R: Read>(
.join(if config.host.contains("windows") { "rust-lld.exe" } else { "rust-lld" })
.exists();
+ fn is_on_path(file: &'static str) -> impl Fn() -> bool {
+ move || env::split_paths(&env::var_os("PATH").unwrap()).any(|dir| dir.join(file).is_file())
+ }
+
+ // On Windows, dlltool.exe is used for all architectures.
+ #[cfg(windows)]
+ let (has_i686_dlltool, has_x86_64_dlltool) =
+ (is_on_path("dlltool.exe"), is_on_path("dlltool.exe"));
+ // For non-Windows, there are architecture specific dlltool binaries.
+ #[cfg(not(windows))]
+ let (has_i686_dlltool, has_x86_64_dlltool) =
+ (is_on_path("i686-w64-mingw32-dlltool"), is_on_path("x86_64-w64-mingw32-dlltool"));
+
iter_header(path, src, &mut |revision, ln| {
if revision.is_some() && revision != cfg {
return;
@@ -979,21 +950,44 @@ pub fn make_test_description<R: Read>(
}
};
}
- ignore = match config.parse_cfg_name_directive(ln, "ignore") {
- ParsedNameDirective::Match => {
- ignore_message = Some("cfg -> ignore => Match");
- true
- }
- ParsedNameDirective::NoMatch => ignore,
- };
+
+ {
+ let parsed = parse_cfg_name_directive(config, ln, "ignore");
+ ignore = match parsed.outcome {
+ MatchOutcome::Match => {
+ let reason = parsed.pretty_reason.unwrap();
+ // The ignore reason must be a &'static str, so we have to leak memory to
+ // create it. This is fine, as the header is parsed only at the start of
+ // compiletest so it won't grow indefinitely.
+ ignore_message = Some(Box::leak(Box::<str>::from(match parsed.comment {
+ Some(comment) => format!("ignored {reason} ({comment})"),
+ None => format!("ignored {reason}"),
+ })) as &str);
+ true
+ }
+ MatchOutcome::NoMatch => ignore,
+ MatchOutcome::External => ignore,
+ MatchOutcome::Invalid => panic!("invalid line in {}: {ln}", path.display()),
+ };
+ }
if config.has_cfg_prefix(ln, "only") {
- ignore = match config.parse_cfg_name_directive(ln, "only") {
- ParsedNameDirective::Match => ignore,
- ParsedNameDirective::NoMatch => {
- ignore_message = Some("cfg -> only => NoMatch");
+ let parsed = parse_cfg_name_directive(config, ln, "only");
+ ignore = match parsed.outcome {
+ MatchOutcome::Match => ignore,
+ MatchOutcome::NoMatch => {
+ let reason = parsed.pretty_reason.unwrap();
+ // The ignore reason must be a &'static str, so we have to leak memory to
+ // create it. This is fine, as the header is parsed only at the start of
+ // compiletest so it won't grow indefinitely.
+ ignore_message = Some(Box::leak(Box::<str>::from(match parsed.comment {
+ Some(comment) => format!("only executed {reason} ({comment})"),
+ None => format!("only executed {reason}"),
+ })) as &str);
true
}
+ MatchOutcome::External => ignore,
+ MatchOutcome::Invalid => panic!("invalid line in {}: {ln}", path.display()),
};
}
@@ -1031,6 +1025,8 @@ pub fn make_test_description<R: Read>(
reason!(config.debugger == Some(Debugger::Gdb) && ignore_gdb(config, ln));
reason!(config.debugger == Some(Debugger::Lldb) && ignore_lldb(config, ln));
reason!(!has_rust_lld && config.parse_name_directive(ln, "needs-rust-lld"));
+ reason!(config.parse_name_directive(ln, "needs-i686-dlltool") && !has_i686_dlltool());
+ reason!(config.parse_name_directive(ln, "needs-x86_64-dlltool") && !has_x86_64_dlltool());
should_fail |= config.parse_name_directive(ln, "should-fail");
});
@@ -1047,6 +1043,16 @@ pub fn make_test_description<R: Read>(
name,
ignore,
ignore_message,
+ #[cfg(not(bootstrap))]
+ source_file: "",
+ #[cfg(not(bootstrap))]
+ start_line: 0,
+ #[cfg(not(bootstrap))]
+ start_col: 0,
+ #[cfg(not(bootstrap))]
+ end_line: 0,
+ #[cfg(not(bootstrap))]
+ end_col: 0,
should_panic,
compile_fail: false,
no_run: false,
diff --git a/src/tools/compiletest/src/header/cfg.rs b/src/tools/compiletest/src/header/cfg.rs
new file mode 100644
index 000000000..aa36fd708
--- /dev/null
+++ b/src/tools/compiletest/src/header/cfg.rs
@@ -0,0 +1,324 @@
+use crate::common::{CompareMode, Config, Debugger};
+use std::collections::HashSet;
+
+const EXTRA_ARCHS: &[&str] = &["spirv"];
+
+/// Parses a name-value directive which contains config-specific information, e.g., `ignore-x86`
+/// or `normalize-stderr-32bit`.
+pub(super) fn parse_cfg_name_directive<'a>(
+ config: &Config,
+ line: &'a str,
+ prefix: &str,
+) -> ParsedNameDirective<'a> {
+ if !line.as_bytes().starts_with(prefix.as_bytes()) {
+ return ParsedNameDirective::invalid();
+ }
+ if line.as_bytes().get(prefix.len()) != Some(&b'-') {
+ return ParsedNameDirective::invalid();
+ }
+ let line = &line[prefix.len() + 1..];
+
+ let (name, comment) =
+ line.split_once(&[':', ' ']).map(|(l, c)| (l, Some(c))).unwrap_or((line, None));
+
+ // Some of the matchers might be "" depending on what the target information is. To avoid
+ // problems we outright reject empty directives.
+ if name == "" {
+ return ParsedNameDirective::invalid();
+ }
+
+ let mut outcome = MatchOutcome::Invalid;
+ let mut message = None;
+
+ macro_rules! condition {
+ (
+ name: $name:expr,
+ $(allowed_names: $allowed_names:expr,)?
+ $(condition: $condition:expr,)?
+ message: $($message:tt)*
+ ) => {{
+ // This is not inlined to avoid problems with macro repetitions.
+ let format_message = || format!($($message)*);
+
+ if outcome != MatchOutcome::Invalid {
+ // Ignore all other matches if we already found one
+ } else if $name.custom_matches(name) {
+ message = Some(format_message());
+ if true $(&& $condition)? {
+ outcome = MatchOutcome::Match;
+ } else {
+ outcome = MatchOutcome::NoMatch;
+ }
+ }
+ $(else if $allowed_names.custom_contains(name) {
+ message = Some(format_message());
+ outcome = MatchOutcome::NoMatch;
+ })?
+ }};
+ }
+
+ let target_cfgs = config.target_cfgs();
+ let target_cfg = config.target_cfg();
+
+ condition! {
+ name: "test",
+ message: "always"
+ }
+ condition! {
+ name: &config.target,
+ allowed_names: &target_cfgs.all_targets,
+ message: "when the target is {name}"
+ }
+ condition! {
+ name: &[
+ Some(&*target_cfg.os),
+ // If something is ignored for emscripten, it likely also needs to be
+ // ignored for wasm32-unknown-unknown.
+ (config.target == "wasm32-unknown-unknown").then_some("emscripten"),
+ ],
+ allowed_names: &target_cfgs.all_oses,
+ message: "when the operative system is {name}"
+ }
+ condition! {
+ name: &target_cfg.env,
+ allowed_names: &target_cfgs.all_envs,
+ message: "when the target environment is {name}"
+ }
+ condition! {
+ name: &target_cfg.os_and_env(),
+ allowed_names: &target_cfgs.all_oses_and_envs,
+ message: "when the operative system and target environment are {name}"
+ }
+ condition! {
+ name: &target_cfg.abi,
+ allowed_names: &target_cfgs.all_abis,
+ message: "when the ABI is {name}"
+ }
+ condition! {
+ name: &target_cfg.arch,
+ allowed_names: ContainsEither { a: &target_cfgs.all_archs, b: &EXTRA_ARCHS },
+ message: "when the architecture is {name}"
+ }
+ condition! {
+ name: format!("{}bit", target_cfg.pointer_width),
+ allowed_names: &target_cfgs.all_pointer_widths,
+ message: "when the pointer width is {name}"
+ }
+ condition! {
+ name: &*target_cfg.families,
+ allowed_names: &target_cfgs.all_families,
+ message: "when the target family is {name}"
+ }
+
+ // `wasm32-bare` is an alias to refer to just wasm32-unknown-unknown
+ // (in contrast to `wasm32` which also matches non-bare targets like
+ // asmjs-unknown-emscripten).
+ condition! {
+ name: "wasm32-bare",
+ condition: config.target == "wasm32-unknown-unknown",
+ message: "when the target is WASM"
+ }
+
+ condition! {
+ name: "asmjs",
+ condition: config.target.starts_with("asmjs"),
+ message: "when the architecture is asm.js",
+ }
+ condition! {
+ name: "thumb",
+ condition: config.target.starts_with("thumb"),
+ message: "when the architecture is part of the Thumb family"
+ }
+
+ // Technically the locally built compiler uses the "dev" channel rather than the "nightly"
+ // channel, even though most people don't know or won't care about it. To avoid confusion, we
+ // treat the "dev" channel as the "nightly" channel when processing the directive.
+ condition! {
+ name: if config.channel == "dev" { "nightly" } else { &config.channel },
+ allowed_names: &["stable", "beta", "nightly"],
+ message: "when the release channel is {name}",
+ }
+
+ condition! {
+ name: "cross-compile",
+ condition: config.target != config.host,
+ message: "when cross-compiling"
+ }
+ condition! {
+ name: "endian-big",
+ condition: config.is_big_endian(),
+ message: "on big-endian targets",
+ }
+ condition! {
+ name: config.stage_id.split('-').next().unwrap(),
+ allowed_names: &["stage0", "stage1", "stage2"],
+ message: "when the bootstrapping stage is {name}",
+ }
+ condition! {
+ name: "remote",
+ condition: config.remote_test_client.is_some(),
+ message: "when running tests remotely",
+ }
+ condition! {
+ name: "debug",
+ condition: cfg!(debug_assertions),
+ message: "when building with debug assertions",
+ }
+ condition! {
+ name: config.debugger.as_ref().map(|d| d.to_str()),
+ allowed_names: &Debugger::STR_VARIANTS,
+ message: "when the debugger is {name}",
+ }
+ condition! {
+ name: config.compare_mode
+ .as_ref()
+ .map(|d| format!("compare-mode-{}", d.to_str())),
+ allowed_names: ContainsPrefixed {
+ prefix: "compare-mode-",
+ inner: CompareMode::STR_VARIANTS,
+ },
+ message: "when comparing with {name}",
+ }
+
+ if prefix == "ignore" && outcome == MatchOutcome::Invalid {
+ // Don't error out for ignore-tidy-* diretives, as those are not handled by compiletest.
+ if name.starts_with("tidy-") {
+ outcome = MatchOutcome::External;
+ }
+
+ // Don't error out for ignore-pass, as that is handled elsewhere.
+ if name == "pass" {
+ outcome = MatchOutcome::External;
+ }
+
+ // Don't error out for ignore-llvm-version, that has a custom syntax and is handled
+ // elsewhere.
+ if name == "llvm-version" {
+ outcome = MatchOutcome::External;
+ }
+
+ // Don't error out for ignore-llvm-version, that has a custom syntax and is handled
+ // elsewhere.
+ if name == "gdb-version" {
+ outcome = MatchOutcome::External;
+ }
+ }
+
+ ParsedNameDirective {
+ name: Some(name),
+ comment: comment.map(|c| c.trim().trim_start_matches('-').trim()),
+ outcome,
+ pretty_reason: message,
+ }
+}
+
+/// The result of parse_cfg_name_directive.
+#[derive(Clone, PartialEq, Debug)]
+pub(super) struct ParsedNameDirective<'a> {
+ pub(super) name: Option<&'a str>,
+ pub(super) pretty_reason: Option<String>,
+ pub(super) comment: Option<&'a str>,
+ pub(super) outcome: MatchOutcome,
+}
+
+impl ParsedNameDirective<'_> {
+ fn invalid() -> Self {
+ Self { name: None, pretty_reason: None, comment: None, outcome: MatchOutcome::NoMatch }
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Debug)]
+pub(super) enum MatchOutcome {
+ /// No match.
+ NoMatch,
+ /// Match.
+ Match,
+ /// The directive was invalid.
+ Invalid,
+ /// The directive is handled by other parts of our tooling.
+ External,
+}
+
+trait CustomContains {
+ fn custom_contains(&self, item: &str) -> bool;
+}
+
+impl CustomContains for HashSet<String> {
+ fn custom_contains(&self, item: &str) -> bool {
+ self.contains(item)
+ }
+}
+
+impl CustomContains for &[&str] {
+ fn custom_contains(&self, item: &str) -> bool {
+ self.contains(&item)
+ }
+}
+
+impl<const N: usize> CustomContains for [&str; N] {
+ fn custom_contains(&self, item: &str) -> bool {
+ self.contains(&item)
+ }
+}
+
+struct ContainsPrefixed<T: CustomContains> {
+ prefix: &'static str,
+ inner: T,
+}
+
+impl<T: CustomContains> CustomContains for ContainsPrefixed<T> {
+ fn custom_contains(&self, item: &str) -> bool {
+ match item.strip_prefix(self.prefix) {
+ Some(stripped) => self.inner.custom_contains(stripped),
+ None => false,
+ }
+ }
+}
+
+struct ContainsEither<'a, A: CustomContains, B: CustomContains> {
+ a: &'a A,
+ b: &'a B,
+}
+
+impl<A: CustomContains, B: CustomContains> CustomContains for ContainsEither<'_, A, B> {
+ fn custom_contains(&self, item: &str) -> bool {
+ self.a.custom_contains(item) || self.b.custom_contains(item)
+ }
+}
+
+trait CustomMatches {
+ fn custom_matches(&self, name: &str) -> bool;
+}
+
+impl CustomMatches for &str {
+ fn custom_matches(&self, name: &str) -> bool {
+ name == *self
+ }
+}
+
+impl CustomMatches for String {
+ fn custom_matches(&self, name: &str) -> bool {
+ name == self
+ }
+}
+
+impl<T: CustomMatches> CustomMatches for &[T] {
+ fn custom_matches(&self, name: &str) -> bool {
+ self.iter().any(|m| m.custom_matches(name))
+ }
+}
+
+impl<const N: usize, T: CustomMatches> CustomMatches for [T; N] {
+ fn custom_matches(&self, name: &str) -> bool {
+ self.iter().any(|m| m.custom_matches(name))
+ }
+}
+
+impl<T: CustomMatches> CustomMatches for Option<T> {
+ fn custom_matches(&self, name: &str) -> bool {
+ match self {
+ Some(inner) => inner.custom_matches(name),
+ None => false,
+ }
+ }
+}
diff --git a/src/tools/compiletest/src/header/tests.rs b/src/tools/compiletest/src/header/tests.rs
index e42b8c524..acd588d7f 100644
--- a/src/tools/compiletest/src/header/tests.rs
+++ b/src/tools/compiletest/src/header/tests.rs
@@ -47,7 +47,7 @@ fn config() -> Config {
"--src-base=",
"--build-base=",
"--sysroot-base=",
- "--stage-id=stage2",
+ "--stage-id=stage2-x86_64-unknown-linux-gnu",
"--cc=c",
"--cxx=c++",
"--cflags=",
@@ -174,7 +174,7 @@ fn ignore_target() {
assert!(check_ignore(&config, "// ignore-gnu"));
assert!(check_ignore(&config, "// ignore-64bit"));
- assert!(!check_ignore(&config, "// ignore-i686"));
+ assert!(!check_ignore(&config, "// ignore-x86"));
assert!(!check_ignore(&config, "// ignore-windows"));
assert!(!check_ignore(&config, "// ignore-msvc"));
assert!(!check_ignore(&config, "// ignore-32bit"));
@@ -200,7 +200,7 @@ fn only_target() {
#[test]
fn stage() {
let mut config = config();
- config.stage_id = "stage1".to_owned();
+ config.stage_id = "stage1-x86_64-unknown-linux-gnu".to_owned();
assert!(check_ignore(&config, "// ignore-stage1"));
assert!(!check_ignore(&config, "// ignore-stage2"));
diff --git a/src/tools/compiletest/src/main.rs b/src/tools/compiletest/src/main.rs
index 1760c29ec..6a91d25a8 100644
--- a/src/tools/compiletest/src/main.rs
+++ b/src/tools/compiletest/src/main.rs
@@ -6,12 +6,13 @@
extern crate test;
use crate::common::{expected_output_path, output_base_dir, output_relative_path, UI_EXTENSIONS};
-use crate::common::{CompareMode, Config, Debugger, Mode, PassMode, TestPaths};
+use crate::common::{Config, Debugger, Mode, PassMode, TestPaths};
use crate::util::logv;
use build_helper::git::{get_git_modified_files, get_git_untracked_files};
use core::panic;
use getopts::Options;
-use lazycell::LazyCell;
+use lazycell::AtomicLazyCell;
+use std::collections::BTreeSet;
use std::ffi::OsString;
use std::fs;
use std::io::{self, ErrorKind};
@@ -24,6 +25,7 @@ use tracing::*;
use walkdir::WalkDir;
use self::header::{make_test_description, EarlyProps};
+use std::sync::Arc;
#[cfg(test)]
mod tests;
@@ -41,7 +43,7 @@ pub mod util;
fn main() {
tracing_subscriber::fmt::init();
- let config = parse_config(env::args().collect());
+ let config = Arc::new(parse_config(env::args().collect()));
if config.valgrind_path.is_none() && config.force_valgrind {
panic!("Can't find Valgrind to run Valgrind tests");
@@ -114,6 +116,7 @@ pub fn parse_config(args: Vec<String>) -> Config {
)
.optflag("", "quiet", "print one character per test instead of one line")
.optopt("", "color", "coloring: auto, always, never", "WHEN")
+ .optflag("", "json", "emit json output instead of plaintext output")
.optopt("", "logfile", "file to log test execution to", "FILE")
.optopt("", "target", "the target to build for", "TARGET")
.optopt("", "host", "the host to build for", "HOST")
@@ -131,7 +134,8 @@ pub fn parse_config(args: Vec<String>) -> Config {
.reqopt("", "cflags", "flags for the C compiler", "FLAGS")
.reqopt("", "cxxflags", "flags for the CXX compiler", "FLAGS")
.optopt("", "ar", "path to an archiver", "PATH")
- .optopt("", "linker", "path to a linker", "PATH")
+ .optopt("", "target-linker", "path to a linker for the target", "PATH")
+ .optopt("", "host-linker", "path to a linker for the host", "PATH")
.reqopt("", "llvm-components", "list of LLVM components built in", "LIST")
.optopt("", "llvm-bin-dir", "Path to LLVM's `bin` directory", "PATH")
.optopt("", "nodejs", "the name of nodejs", "PATH")
@@ -151,6 +155,7 @@ pub fn parse_config(args: Vec<String>) -> Config {
)
.optflag("", "force-rerun", "rerun tests even if the inputs are unchanged")
.optflag("", "only-modified", "only run tests that result been modified")
+ .optflag("", "nocapture", "")
.optflag("h", "help", "show this message")
.reqopt("", "channel", "current Rust channel", "CHANNEL")
.optopt("", "edition", "default Rust edition", "EDITION");
@@ -281,11 +286,18 @@ pub fn parse_config(args: Vec<String>) -> Config {
&& !opt_str2(matches.opt_str("adb-test-dir")).is_empty(),
lldb_python_dir: matches.opt_str("lldb-python-dir"),
verbose: matches.opt_present("verbose"),
- quiet: matches.opt_present("quiet"),
+ format: match (matches.opt_present("quiet"), matches.opt_present("json")) {
+ (true, true) => panic!("--quiet and --json are incompatible"),
+ (true, false) => test::OutputFormat::Terse,
+ (false, true) => test::OutputFormat::Json,
+ (false, false) => test::OutputFormat::Pretty,
+ },
only_modified: matches.opt_present("only-modified"),
color,
remote_test_client: matches.opt_str("remote-test-client").map(PathBuf::from),
- compare_mode: matches.opt_str("compare-mode").map(CompareMode::parse),
+ compare_mode: matches
+ .opt_str("compare-mode")
+ .map(|s| s.parse().expect("invalid --compare-mode provided")),
rustfix_coverage: matches.opt_present("rustfix-coverage"),
has_tidy,
channel: matches.opt_str("channel").unwrap(),
@@ -296,14 +308,17 @@ pub fn parse_config(args: Vec<String>) -> Config {
cflags: matches.opt_str("cflags").unwrap(),
cxxflags: matches.opt_str("cxxflags").unwrap(),
ar: matches.opt_str("ar").unwrap_or_else(|| String::from("ar")),
- linker: matches.opt_str("linker"),
+ target_linker: matches.opt_str("target-linker"),
+ host_linker: matches.opt_str("host-linker"),
llvm_components: matches.opt_str("llvm-components").unwrap(),
nodejs: matches.opt_str("nodejs"),
npm: matches.opt_str("npm"),
force_rerun: matches.opt_present("force-rerun"),
- target_cfg: LazyCell::new(),
+ target_cfgs: AtomicLazyCell::new(),
+
+ nocapture: matches.opt_present("nocapture"),
}
}
@@ -337,9 +352,10 @@ pub fn log_config(config: &Config) {
logv(c, format!("adb_test_dir: {:?}", config.adb_test_dir));
logv(c, format!("adb_device_status: {}", config.adb_device_status));
logv(c, format!("ar: {}", config.ar));
- logv(c, format!("linker: {:?}", config.linker));
+ logv(c, format!("target-linker: {:?}", config.target_linker));
+ logv(c, format!("host-linker: {:?}", config.host_linker));
logv(c, format!("verbose: {}", config.verbose));
- logv(c, format!("quiet: {}", config.quiet));
+ logv(c, format!("format: {:?}", config.format));
logv(c, "\n".to_string());
}
@@ -357,7 +373,7 @@ pub fn opt_str2(maybestr: Option<String>) -> String {
}
}
-pub fn run_tests(config: Config) {
+pub fn run_tests(config: Arc<Config>) {
// If we want to collect rustfix coverage information,
// we first make sure that the coverage file does not exist.
// It will be created later on.
@@ -399,8 +415,10 @@ pub fn run_tests(config: Config) {
};
let mut tests = Vec::new();
- for c in &configs {
- make_tests(c, &mut tests);
+ for c in configs {
+ let mut found_paths = BTreeSet::new();
+ make_tests(c, &mut tests, &mut found_paths);
+ check_overlapping_tests(&found_paths);
}
tests.sort_by(|a, b| a.desc.name.as_slice().cmp(&b.desc.name.as_slice()));
@@ -416,10 +434,14 @@ pub fn run_tests(config: Config) {
// easy to miss which tests failed, and as such fail to reproduce
// the failure locally.
- eprintln!(
+ println!(
"Some tests failed in compiletest suite={}{} mode={} host={} target={}",
config.suite,
- config.compare_mode.map(|c| format!(" compare_mode={:?}", c)).unwrap_or_default(),
+ config
+ .compare_mode
+ .as_ref()
+ .map(|c| format!(" compare_mode={:?}", c))
+ .unwrap_or_default(),
config.mode,
config.host,
config.target
@@ -439,13 +461,13 @@ pub fn run_tests(config: Config) {
}
}
-fn configure_cdb(config: &Config) -> Option<Config> {
+fn configure_cdb(config: &Config) -> Option<Arc<Config>> {
config.cdb.as_ref()?;
- Some(Config { debugger: Some(Debugger::Cdb), ..config.clone() })
+ Some(Arc::new(Config { debugger: Some(Debugger::Cdb), ..config.clone() }))
}
-fn configure_gdb(config: &Config) -> Option<Config> {
+fn configure_gdb(config: &Config) -> Option<Arc<Config>> {
config.gdb_version?;
if config.matches_env("msvc") {
@@ -476,10 +498,10 @@ fn configure_gdb(config: &Config) -> Option<Config> {
env::set_var("RUST_TEST_THREADS", "1");
}
- Some(Config { debugger: Some(Debugger::Gdb), ..config.clone() })
+ Some(Arc::new(Config { debugger: Some(Debugger::Gdb), ..config.clone() }))
}
-fn configure_lldb(config: &Config) -> Option<Config> {
+fn configure_lldb(config: &Config) -> Option<Arc<Config>> {
config.lldb_python_dir.as_ref()?;
if let Some(350) = config.lldb_version {
@@ -492,23 +514,27 @@ fn configure_lldb(config: &Config) -> Option<Config> {
return None;
}
- Some(Config { debugger: Some(Debugger::Lldb), ..config.clone() })
+ Some(Arc::new(Config { debugger: Some(Debugger::Lldb), ..config.clone() }))
}
pub fn test_opts(config: &Config) -> test::TestOpts {
+ if env::var("RUST_TEST_NOCAPTURE").is_ok() {
+ eprintln!(
+ "WARNING: RUST_TEST_NOCAPTURE is no longer used. \
+ Use the `--nocapture` flag instead."
+ );
+ }
+
test::TestOpts {
exclude_should_panic: false,
filters: config.filters.clone(),
filter_exact: config.filter_exact,
run_ignored: if config.run_ignored { test::RunIgnored::Yes } else { test::RunIgnored::No },
- format: if config.quiet { test::OutputFormat::Terse } else { test::OutputFormat::Pretty },
+ format: config.format,
logfile: config.logfile.clone(),
run_tests: true,
bench_benchmarks: true,
- nocapture: match env::var("RUST_TEST_NOCAPTURE") {
- Ok(val) => &val != "0",
- Err(_) => false,
- },
+ nocapture: config.nocapture,
color: config.color,
shuffle: false,
shuffle_seed: None,
@@ -522,18 +548,23 @@ pub fn test_opts(config: &Config) -> test::TestOpts {
}
}
-pub fn make_tests(config: &Config, tests: &mut Vec<test::TestDescAndFn>) {
+pub fn make_tests(
+ config: Arc<Config>,
+ tests: &mut Vec<test::TestDescAndFn>,
+ found_paths: &mut BTreeSet<PathBuf>,
+) {
debug!("making tests from {:?}", config.src_base.display());
- let inputs = common_inputs_stamp(config);
- let modified_tests = modified_tests(config, &config.src_base).unwrap_or_else(|err| {
+ let inputs = common_inputs_stamp(&config);
+ let modified_tests = modified_tests(&config, &config.src_base).unwrap_or_else(|err| {
panic!("modified_tests got error from dir: {}, error: {}", config.src_base.display(), err)
});
collect_tests_from_dir(
- config,
+ config.clone(),
&config.src_base,
&PathBuf::new(),
&inputs,
tests,
+ found_paths,
&modified_tests,
)
.unwrap_or_else(|_| panic!("Could not read tests from {}", config.src_base.display()));
@@ -599,11 +630,12 @@ fn modified_tests(config: &Config, dir: &Path) -> Result<Vec<PathBuf>, String> {
}
fn collect_tests_from_dir(
- config: &Config,
+ config: Arc<Config>,
dir: &Path,
relative_dir_path: &Path,
inputs: &Stamp,
tests: &mut Vec<test::TestDescAndFn>,
+ found_paths: &mut BTreeSet<PathBuf>,
modified_tests: &Vec<PathBuf>,
) -> io::Result<()> {
// Ignore directories that contain a file named `compiletest-ignore-dir`.
@@ -626,7 +658,7 @@ fn collect_tests_from_dir(
// sequential loop because otherwise, if we do it in the
// tests themselves, they race for the privilege of
// creating the directories and sometimes fail randomly.
- let build_dir = output_relative_path(config, relative_dir_path);
+ let build_dir = output_relative_path(&config, relative_dir_path);
fs::create_dir_all(&build_dir).unwrap();
// Add each `.rs` file as a test, and recurse further on any
@@ -637,20 +669,23 @@ fn collect_tests_from_dir(
let file_name = file.file_name();
if is_test(&file_name) && (!config.only_modified || modified_tests.contains(&file_path)) {
debug!("found test file: {:?}", file_path.display());
+ let rel_test_path = relative_dir_path.join(file_path.file_stem().unwrap());
+ found_paths.insert(rel_test_path);
let paths =
TestPaths { file: file_path, relative_dir: relative_dir_path.to_path_buf() };
- tests.extend(make_test(config, &paths, inputs))
+ tests.extend(make_test(config.clone(), &paths, inputs))
} else if file_path.is_dir() {
let relative_file_path = relative_dir_path.join(file.file_name());
if &file_name != "auxiliary" {
debug!("found directory: {:?}", file_path.display());
collect_tests_from_dir(
- config,
+ config.clone(),
&file_path,
&relative_file_path,
inputs,
tests,
+ found_paths,
modified_tests,
)?;
}
@@ -674,14 +709,18 @@ pub fn is_test(file_name: &OsString) -> bool {
!invalid_prefixes.iter().any(|p| file_name.starts_with(p))
}
-fn make_test(config: &Config, testpaths: &TestPaths, inputs: &Stamp) -> Vec<test::TestDescAndFn> {
+fn make_test(
+ config: Arc<Config>,
+ testpaths: &TestPaths,
+ inputs: &Stamp,
+) -> Vec<test::TestDescAndFn> {
let test_path = if config.mode == Mode::RunMake {
// Parse directives in the Makefile
testpaths.file.join("Makefile")
} else {
PathBuf::from(&testpaths.file)
};
- let early_props = EarlyProps::from_file(config, &test_path);
+ let early_props = EarlyProps::from_file(&config, &test_path);
// Incremental tests are special, they inherently cannot be run in parallel.
// `runtest::run` will be responsible for iterating over revisions.
@@ -696,19 +735,22 @@ fn make_test(config: &Config, testpaths: &TestPaths, inputs: &Stamp) -> Vec<test
let src_file =
std::fs::File::open(&test_path).expect("open test file to parse ignores");
let cfg = revision.map(|v| &**v);
- let test_name = crate::make_test_name(config, testpaths, revision);
- let mut desc = make_test_description(config, test_name, &test_path, src_file, cfg);
+ let test_name = crate::make_test_name(&config, testpaths, revision);
+ let mut desc = make_test_description(&config, test_name, &test_path, src_file, cfg);
// Ignore tests that already run and are up to date with respect to inputs.
if !config.force_rerun {
desc.ignore |= is_up_to_date(
- config,
+ &config,
testpaths,
&early_props,
revision.map(|s| s.as_str()),
inputs,
);
}
- test::TestDescAndFn { desc, testfn: make_test_closure(config, testpaths, revision) }
+ test::TestDescAndFn {
+ desc,
+ testfn: make_test_closure(config.clone(), testpaths, revision),
+ }
})
.collect()
}
@@ -842,7 +884,7 @@ fn make_test_name(
}
fn make_test_closure(
- config: &Config,
+ config: Arc<Config>,
testpaths: &TestPaths,
revision: Option<&String>,
) -> test::TestFn {
@@ -1066,3 +1108,24 @@ fn extract_lldb_version(full_version_line: &str) -> Option<(u32, bool)> {
fn not_a_digit(c: char) -> bool {
!c.is_digit(10)
}
+
+fn check_overlapping_tests(found_paths: &BTreeSet<PathBuf>) {
+ let mut collisions = Vec::new();
+ for path in found_paths {
+ for ancestor in path.ancestors().skip(1) {
+ if found_paths.contains(ancestor) {
+ collisions.push((path, ancestor.clone()));
+ }
+ }
+ }
+ if !collisions.is_empty() {
+ let collisions: String = collisions
+ .into_iter()
+ .map(|(path, check_parent)| format!("test {path:?} clashes with {check_parent:?}\n"))
+ .collect();
+ panic!(
+ "{collisions}\n\
+ Tests cannot have overlapping names. Make sure they use unique prefixes."
+ );
+ }
+}
diff --git a/src/tools/compiletest/src/read2.rs b/src/tools/compiletest/src/read2.rs
index a5dc68597..725f7a151 100644
--- a/src/tools/compiletest/src/read2.rs
+++ b/src/tools/compiletest/src/read2.rs
@@ -232,7 +232,7 @@ mod imp {
use miow::iocp::{CompletionPort, CompletionStatus};
use miow::pipe::NamedPipe;
use miow::Overlapped;
- use winapi::shared::winerror::ERROR_BROKEN_PIPE;
+ use windows::Win32::Foundation::ERROR_BROKEN_PIPE;
struct Pipe<'a> {
dst: &'a mut Vec<u8>,
@@ -295,7 +295,7 @@ mod imp {
match self.pipe.read_overlapped(dst, self.overlapped.raw()) {
Ok(_) => Ok(()),
Err(e) => {
- if e.raw_os_error() == Some(ERROR_BROKEN_PIPE as i32) {
+ if e.raw_os_error() == Some(ERROR_BROKEN_PIPE.0 as i32) {
self.done = true;
Ok(())
} else {
diff --git a/src/tools/compiletest/src/runtest.rs b/src/tools/compiletest/src/runtest.rs
index 41c23ff86..7f0b894f5 100644
--- a/src/tools/compiletest/src/runtest.rs
+++ b/src/tools/compiletest/src/runtest.rs
@@ -30,6 +30,7 @@ use std::iter;
use std::path::{Path, PathBuf};
use std::process::{Child, Command, ExitStatus, Output, Stdio};
use std::str;
+use std::sync::Arc;
use glob::glob;
use once_cell::sync::Lazy;
@@ -49,8 +50,10 @@ const FAKE_SRC_BASE: &str = "fake-test-src-base";
#[cfg(windows)]
fn disable_error_reporting<F: FnOnce() -> R, R>(f: F) -> R {
use std::sync::Mutex;
- use winapi::um::errhandlingapi::SetErrorMode;
- use winapi::um::winbase::SEM_NOGPFAULTERRORBOX;
+
+ use windows::Win32::System::Diagnostics::Debug::{
+ SetErrorMode, SEM_NOGPFAULTERRORBOX, THREAD_ERROR_MODE,
+ };
static LOCK: Mutex<()> = Mutex::new(());
@@ -62,6 +65,7 @@ fn disable_error_reporting<F: FnOnce() -> R, R>(f: F) -> R {
// termination by design. This mode is inherited by all child processes.
unsafe {
let old_mode = SetErrorMode(SEM_NOGPFAULTERRORBOX); // read inherited flags
+ let old_mode = THREAD_ERROR_MODE(old_mode);
SetErrorMode(old_mode | SEM_NOGPFAULTERRORBOX);
let r = f();
SetErrorMode(old_mode);
@@ -93,7 +97,7 @@ pub fn get_lib_name(lib: &str, dylib: bool) -> String {
}
}
-pub fn run(config: Config, testpaths: &TestPaths, revision: Option<&str>) {
+pub fn run(config: Arc<Config>, testpaths: &TestPaths, revision: Option<&str>) {
match &*config.target {
"arm-linux-androideabi"
| "armv7-linux-androideabi"
@@ -278,13 +282,15 @@ impl<'test> TestCx<'test> {
Incremental => {
let revision =
self.revision.expect("incremental tests require a list of revisions");
- if revision.starts_with("rpass") || revision.starts_with("rfail") {
+ if revision.starts_with("cpass")
+ || revision.starts_with("rpass")
+ || revision.starts_with("rfail")
+ {
true
} else if revision.starts_with("cfail") {
- // FIXME: would be nice if incremental revs could start with "cpass"
pm.is_some()
} else {
- panic!("revision name must begin with rpass, rfail, or cfail");
+ panic!("revision name must begin with cpass, rpass, rfail, or cfail");
}
}
mode => panic!("unimplemented for mode {:?}", mode),
@@ -304,7 +310,9 @@ impl<'test> TestCx<'test> {
);
}
- self.check_correct_failure_status(proc_res);
+ if !self.props.dont_check_failure_status {
+ self.check_correct_failure_status(proc_res);
+ }
}
}
@@ -384,6 +392,20 @@ impl<'test> TestCx<'test> {
}
}
+ fn run_cpass_test(&self) {
+ let emit_metadata = self.should_emit_metadata(self.pass_mode());
+ let proc_res = self.compile_test(WillExecute::No, emit_metadata);
+
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("compilation failed!", &proc_res);
+ }
+
+ // FIXME(#41968): Move this check to tidy?
+ if !errors::load_errors(&self.testpaths.file, self.revision).is_empty() {
+ self.fatal("compile-pass tests with expected warnings should be moved to ui/");
+ }
+ }
+
fn run_rpass_test(&self) {
let emit_metadata = self.should_emit_metadata(self.pass_mode());
let should_run = self.run_if_enabled();
@@ -393,17 +415,15 @@ impl<'test> TestCx<'test> {
self.fatal_proc_rec("compilation failed!", &proc_res);
}
+ // FIXME(#41968): Move this check to tidy?
+ if !errors::load_errors(&self.testpaths.file, self.revision).is_empty() {
+ self.fatal("run-pass tests with expected warnings should be moved to ui/");
+ }
+
if let WillExecute::Disabled = should_run {
return;
}
- // FIXME(#41968): Move this check to tidy?
- let expected_errors = errors::load_errors(&self.testpaths.file, self.revision);
- assert!(
- expected_errors.is_empty(),
- "run-pass tests with expected warnings should be moved to ui/"
- );
-
let proc_res = self.exec_compiled_test();
if !proc_res.status.success() {
self.fatal_proc_rec("test run failed!", &proc_res);
@@ -983,7 +1003,12 @@ impl<'test> TestCx<'test> {
&["-quiet".as_ref(), "-batch".as_ref(), "-nx".as_ref(), &debugger_script];
let mut gdb = Command::new(self.config.gdb.as_ref().unwrap());
- gdb.args(debugger_opts).env("PYTHONPATH", rust_pp_module_abs_path);
+ let pythonpath = if let Ok(pp) = std::env::var("PYTHONPATH") {
+ format!("{pp}:{rust_pp_module_abs_path}")
+ } else {
+ rust_pp_module_abs_path
+ };
+ gdb.args(debugger_opts).env("PYTHONPATH", pythonpath);
debugger_run_result =
self.compose_and_run(gdb, self.config.run_lib_path.to_str().unwrap(), None, None);
@@ -1149,13 +1174,18 @@ impl<'test> TestCx<'test> {
) -> ProcRes {
// Prepare the lldb_batchmode which executes the debugger script
let lldb_script_path = rust_src_root.join("src/etc/lldb_batchmode.py");
+ let pythonpath = if let Ok(pp) = std::env::var("PYTHONPATH") {
+ format!("{pp}:{}", self.config.lldb_python_dir.as_ref().unwrap())
+ } else {
+ self.config.lldb_python_dir.as_ref().unwrap().to_string()
+ };
self.cmd2procres(
Command::new(&self.config.python)
.arg(&lldb_script_path)
.arg(test_executable)
.arg(debugger_script)
.env("PYTHONUNBUFFERED", "1") // Help debugging #78665
- .env("PYTHONPATH", self.config.lldb_python_dir.as_ref().unwrap()),
+ .env("PYTHONPATH", pythonpath),
)
}
@@ -1540,7 +1570,7 @@ impl<'test> TestCx<'test> {
rustdoc.arg("--output-format").arg("json").arg("-Zunstable-options");
}
- if let Some(ref linker) = self.config.linker {
+ if let Some(ref linker) = self.config.target_linker {
rustdoc.arg(format!("-Clinker={}", linker));
}
@@ -1583,8 +1613,13 @@ impl<'test> TestCx<'test> {
test_client
.args(&["run", &support_libs.len().to_string(), &prog])
.args(support_libs)
- .args(args)
- .envs(env.clone());
+ .args(args);
+
+ for key in &self.props.unset_exec_env {
+ test_client.env_remove(key);
+ }
+ test_client.envs(env.clone());
+
self.compose_and_run(
test_client,
self.config.run_lib_path.to_str().unwrap(),
@@ -1596,7 +1631,13 @@ impl<'test> TestCx<'test> {
let aux_dir = self.aux_output_dir_name();
let ProcArgs { prog, args } = self.make_run_args();
let mut wr_run = Command::new("wr-run");
- wr_run.args(&[&prog]).args(args).envs(env.clone());
+ wr_run.args(&[&prog]).args(args);
+
+ for key in &self.props.unset_exec_env {
+ wr_run.env_remove(key);
+ }
+ wr_run.envs(env.clone());
+
self.compose_and_run(
wr_run,
self.config.run_lib_path.to_str().unwrap(),
@@ -1608,7 +1649,13 @@ impl<'test> TestCx<'test> {
let aux_dir = self.aux_output_dir_name();
let ProcArgs { prog, args } = self.make_run_args();
let mut program = Command::new(&prog);
- program.args(args).current_dir(&self.output_base_dir()).envs(env.clone());
+ program.args(args).current_dir(&self.output_base_dir());
+
+ for key in &self.props.unset_exec_env {
+ program.env_remove(key);
+ }
+ program.envs(env.clone());
+
self.compose_and_run(
program,
self.config.run_lib_path.to_str().unwrap(),
@@ -2053,10 +2100,15 @@ impl<'test> TestCx<'test> {
if self.props.force_host {
self.maybe_add_external_args(&mut rustc, &self.config.host_rustcflags);
+ if !is_rustdoc {
+ if let Some(ref linker) = self.config.host_linker {
+ rustc.arg(format!("-Clinker={}", linker));
+ }
+ }
} else {
self.maybe_add_external_args(&mut rustc, &self.config.target_rustcflags);
if !is_rustdoc {
- if let Some(ref linker) = self.config.linker {
+ if let Some(ref linker) = self.config.target_linker {
rustc.arg(format!("-Clinker={}", linker));
}
}
@@ -2105,7 +2157,7 @@ impl<'test> TestCx<'test> {
if let Some(ref p) = self.config.nodejs {
args.push(p.clone());
} else {
- self.fatal("no NodeJS binary found (--nodejs)");
+ self.fatal("emscripten target requested and no NodeJS binary found (--nodejs)");
}
// If this is otherwise wasm, then run tests under nodejs with our
// shim
@@ -2113,7 +2165,7 @@ impl<'test> TestCx<'test> {
if let Some(ref p) = self.config.nodejs {
args.push(p.clone());
} else {
- self.fatal("no NodeJS binary found (--nodejs)");
+ self.fatal("wasm32 target requested and no NodeJS binary found (--nodejs)");
}
let src = self
@@ -2903,10 +2955,11 @@ impl<'test> TestCx<'test> {
fn run_incremental_test(&self) {
// Basic plan for a test incremental/foo/bar.rs:
// - load list of revisions rpass1, cfail2, rpass3
- // - each should begin with `rpass`, `cfail`, or `rfail`
- // - if `rpass`, expect compile and execution to succeed
+ // - each should begin with `cpass`, `rpass`, `cfail`, or `rfail`
+ // - if `cpass`, expect compilation to succeed, don't execute
+ // - if `rpass`, expect compilation and execution to succeed
// - if `cfail`, expect compilation to fail
- // - if `rfail`, expect execution to fail
+ // - if `rfail`, expect compilation to succeed and execution to fail
// - create a directory build/foo/bar.incremental
// - compile foo/bar.rs with -C incremental=.../foo/bar.incremental and -C rpass1
// - because name of revision starts with "rpass", expect success
@@ -2930,7 +2983,12 @@ impl<'test> TestCx<'test> {
print!("revision={:?} props={:#?}", revision, self.props);
}
- if revision.starts_with("rpass") {
+ if revision.starts_with("cpass") {
+ if self.props.should_ice {
+ self.fatal("can only use should-ice in cfail tests");
+ }
+ self.run_cpass_test();
+ } else if revision.starts_with("rpass") {
if self.props.should_ice {
self.fatal("can only use should-ice in cfail tests");
}
@@ -2943,7 +3001,7 @@ impl<'test> TestCx<'test> {
} else if revision.starts_with("cfail") {
self.run_cfail_test();
} else {
- self.fatal("revision name must begin with rpass, rfail, or cfail");
+ self.fatal("revision name must begin with cpass, rpass, rfail, or cfail");
}
}
@@ -2963,6 +3021,7 @@ impl<'test> TestCx<'test> {
|| host.contains("freebsd")
|| host.contains("netbsd")
|| host.contains("openbsd")
+ || host.contains("aix")
{
"gmake"
} else {
@@ -3002,7 +3061,7 @@ impl<'test> TestCx<'test> {
cmd.env("NODE", node);
}
- if let Some(ref linker) = self.config.linker {
+ if let Some(ref linker) = self.config.target_linker {
cmd.env("RUSTC_LINKER", linker);
}
diff --git a/src/tools/compiletest/src/util.rs b/src/tools/compiletest/src/util.rs
index 5f6a27e53..748240cc9 100644
--- a/src/tools/compiletest/src/util.rs
+++ b/src/tools/compiletest/src/util.rs
@@ -156,6 +156,8 @@ pub fn dylib_env_var() -> &'static str {
"DYLD_LIBRARY_PATH"
} else if cfg!(target_os = "haiku") {
"LIBRARY_PATH"
+ } else if cfg!(target_os = "aix") {
+ "LIBPATH"
} else {
"LD_LIBRARY_PATH"
}
diff --git a/src/tools/generate-copyright/src/main.rs b/src/tools/generate-copyright/src/main.rs
index d172c9e15..60c771676 100644
--- a/src/tools/generate-copyright/src/main.rs
+++ b/src/tools/generate-copyright/src/main.rs
@@ -20,24 +20,24 @@ fn render_recursive(node: &Node, buffer: &mut Vec<u8>, depth: usize) -> Result<(
let prefix = std::iter::repeat("> ").take(depth + 1).collect::<String>();
match node {
- Node::Root { childs } => {
- for child in childs {
+ Node::Root { children } => {
+ for child in children {
render_recursive(child, buffer, depth)?;
}
}
- Node::Directory { name, childs, license } => {
+ Node::Directory { name, children, license } => {
render_license(&prefix, std::iter::once(name), license, buffer)?;
- if !childs.is_empty() {
+ if !children.is_empty() {
writeln!(buffer, "{prefix}")?;
writeln!(buffer, "{prefix}*Exceptions:*")?;
- for child in childs {
+ for child in children {
writeln!(buffer, "{prefix}")?;
render_recursive(child, buffer, depth + 1)?;
}
}
}
- Node::FileGroup { names, license } => {
- render_license(&prefix, names.iter(), license, buffer)?;
+ Node::Group { files, directories, license } => {
+ render_license(&prefix, directories.iter().chain(files.iter()), license, buffer)?;
}
Node::File { name, license } => {
render_license(&prefix, std::iter::once(name), license, buffer)?;
@@ -73,10 +73,10 @@ struct Metadata {
#[derive(serde::Deserialize)]
#[serde(rename_all = "kebab-case", tag = "type")]
pub(crate) enum Node {
- Root { childs: Vec<Node> },
- Directory { name: String, childs: Vec<Node>, license: License },
+ Root { children: Vec<Node> },
+ Directory { name: String, children: Vec<Node>, license: License },
File { name: String, license: License },
- FileGroup { names: Vec<String>, license: License },
+ Group { files: Vec<String>, directories: Vec<String>, license: License },
}
#[derive(serde::Deserialize)]
diff --git a/src/tools/jsondocck/src/main.rs b/src/tools/jsondocck/src/main.rs
index 76770fe36..e3d05ec83 100644
--- a/src/tools/jsondocck/src/main.rs
+++ b/src/tools/jsondocck/src/main.rs
@@ -237,7 +237,7 @@ fn check_command(command: Command, cache: &mut Cache) -> Result<(), CkError> {
// Serde json doesn't implement Ord or Hash for Value, so we must
// use a Vec here. While in theory that makes setwize equality
- // O(n^2), in practice n will never be large enought to matter.
+ // O(n^2), in practice n will never be large enough to matter.
let expected_values =
values.iter().map(|v| string_to_value(v, cache)).collect::<Vec<_>>();
if expected_values.len() != got_values.len() {
diff --git a/src/tools/jsondoclint/Cargo.toml b/src/tools/jsondoclint/Cargo.toml
index 8990310a4..1318a1f44 100644
--- a/src/tools/jsondoclint/Cargo.toml
+++ b/src/tools/jsondoclint/Cargo.toml
@@ -9,6 +9,7 @@ edition = "2021"
anyhow = "1.0.62"
clap = { version = "4.0.15", features = ["derive"] }
fs-err = "2.8.1"
+rustc-hash = "1.1.0"
rustdoc-json-types = { version = "0.1.0", path = "../../rustdoc-json-types" }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0.85"
diff --git a/src/tools/jsondoclint/src/item_kind.rs b/src/tools/jsondoclint/src/item_kind.rs
index b395c6e7d..45a9c93ee 100644
--- a/src/tools/jsondoclint/src/item_kind.rs
+++ b/src/tools/jsondoclint/src/item_kind.rs
@@ -1,6 +1,6 @@
use rustdoc_json_types::{Item, ItemEnum, ItemKind, ItemSummary};
-/// A univeral way to represent an [`ItemEnum`] or [`ItemKind`]
+/// A universal way to represent an [`ItemEnum`] or [`ItemKind`]
#[derive(Debug, Clone, Copy)]
pub(crate) enum Kind {
Module,
@@ -53,7 +53,7 @@ impl Kind {
Primitive => true,
ForeignType => true,
- // FIXME(adotinthevoid): I'm not sure if these are corrent
+ // FIXME(adotinthevoid): I'm not sure if these are correct
Keyword => false,
OpaqueTy => false,
ProcAttribute => false,
diff --git a/src/tools/jsondoclint/src/main.rs b/src/tools/jsondoclint/src/main.rs
index 05e938f4f..ee163ddfd 100644
--- a/src/tools/jsondoclint/src/main.rs
+++ b/src/tools/jsondoclint/src/main.rs
@@ -72,7 +72,7 @@ fn main() -> Result<()> {
)
}
[sel] => eprintln!(
- "{} not in index or paths, but refered to at '{}'",
+ "{} not in index or paths, but referred to at '{}'",
err.id.0,
json_find::to_jsonpath(&sel)
),
@@ -85,12 +85,12 @@ fn main() -> Result<()> {
.collect::<Vec<_>>()
.join(", ");
eprintln!(
- "{} not in index or paths, but refered to at {sels}",
+ "{} not in index or paths, but referred to at {sels}",
err.id.0
);
} else {
eprintln!(
- "{} not in index or paths, but refered to at '{}' and {} more",
+ "{} not in index or paths, but referred to at '{}' and {} more",
err.id.0,
json_find::to_jsonpath(&sel),
sels.len() - 1,
diff --git a/src/tools/jsondoclint/src/validator/tests.rs b/src/tools/jsondoclint/src/validator/tests.rs
index 1ef41ff12..95a56a9df 100644
--- a/src/tools/jsondoclint/src/validator/tests.rs
+++ b/src/tools/jsondoclint/src/validator/tests.rs
@@ -1,5 +1,4 @@
-use std::collections::HashMap;
-
+use rustc_hash::FxHashMap;
use rustdoc_json_types::{Crate, Item, ItemKind, ItemSummary, Visibility, FORMAT_VERSION};
use crate::json_find::SelectorPart;
@@ -27,7 +26,7 @@ fn errors_on_missing_links() {
root: id("0"),
crate_version: None,
includes_private: false,
- index: HashMap::from_iter([(
+ index: FxHashMap::from_iter([(
id("0"),
Item {
name: Some("root".to_owned()),
@@ -36,7 +35,7 @@ fn errors_on_missing_links() {
span: None,
visibility: Visibility::Public,
docs: None,
- links: HashMap::from_iter([("Not Found".to_owned(), id("1"))]),
+ links: FxHashMap::from_iter([("Not Found".to_owned(), id("1"))]),
attrs: vec![],
deprecation: None,
inner: ItemEnum::Module(Module {
@@ -46,8 +45,8 @@ fn errors_on_missing_links() {
}),
},
)]),
- paths: HashMap::new(),
- external_crates: HashMap::new(),
+ paths: FxHashMap::default(),
+ external_crates: FxHashMap::default(),
format_version: rustdoc_json_types::FORMAT_VERSION,
};
@@ -73,7 +72,7 @@ fn errors_on_local_in_paths_and_not_index() {
root: id("0:0:1572"),
crate_version: None,
includes_private: false,
- index: HashMap::from_iter([
+ index: FxHashMap::from_iter([
(
id("0:0:1572"),
Item {
@@ -83,7 +82,7 @@ fn errors_on_local_in_paths_and_not_index() {
span: None,
visibility: Visibility::Public,
docs: None,
- links: HashMap::from_iter([(("prim@i32".to_owned(), id("0:1:1571")))]),
+ links: FxHashMap::from_iter([(("prim@i32".to_owned(), id("0:1:1571")))]),
attrs: Vec::new(),
deprecation: None,
inner: ItemEnum::Module(Module {
@@ -102,14 +101,14 @@ fn errors_on_local_in_paths_and_not_index() {
span: None,
visibility: Visibility::Public,
docs: None,
- links: HashMap::default(),
+ links: FxHashMap::default(),
attrs: Vec::new(),
deprecation: None,
inner: ItemEnum::Primitive(Primitive { name: "i32".to_owned(), impls: vec![] }),
},
),
]),
- paths: HashMap::from_iter([(
+ paths: FxHashMap::from_iter([(
id("0:1:1571"),
ItemSummary {
crate_id: 0,
@@ -117,7 +116,7 @@ fn errors_on_local_in_paths_and_not_index() {
kind: ItemKind::Primitive,
},
)]),
- external_crates: HashMap::default(),
+ external_crates: FxHashMap::default(),
format_version: rustdoc_json_types::FORMAT_VERSION,
};
@@ -137,7 +136,7 @@ fn checks_local_crate_id_is_correct() {
root: id("root"),
crate_version: None,
includes_private: false,
- index: HashMap::from_iter([(
+ index: FxHashMap::from_iter([(
id("root"),
Item {
id: id("root"),
@@ -146,7 +145,7 @@ fn checks_local_crate_id_is_correct() {
span: None,
visibility: Visibility::Public,
docs: None,
- links: HashMap::default(),
+ links: FxHashMap::default(),
attrs: Vec::new(),
deprecation: None,
inner: ItemEnum::Module(Module {
@@ -156,8 +155,8 @@ fn checks_local_crate_id_is_correct() {
}),
},
)]),
- paths: HashMap::default(),
- external_crates: HashMap::default(),
+ paths: FxHashMap::default(),
+ external_crates: FxHashMap::default(),
format_version: FORMAT_VERSION,
};
check(&krate, &[]);
diff --git a/src/tools/lint-docs/src/lib.rs b/src/tools/lint-docs/src/lib.rs
index 3842a649c..034c6aa07 100644
--- a/src/tools/lint-docs/src/lib.rs
+++ b/src/tools/lint-docs/src/lib.rs
@@ -45,6 +45,36 @@ impl Lint {
fn check_style(&self) -> Result<(), Box<dyn Error>> {
for &expected in &["### Example", "### Explanation", "{{produces}}"] {
if expected == "{{produces}}" && self.is_ignored() {
+ if self.doc_contains("{{produces}}") {
+ return Err(format!(
+ "the lint example has `ignore`, but also contains the {{{{produces}}}} marker\n\
+ \n\
+ The documentation generator cannot generate the example output when the \
+ example is ignored.\n\
+ Manually include the sample output below the example. For example:\n\
+ \n\
+ /// ```rust,ignore (needs command line option)\n\
+ /// #[cfg(widnows)]\n\
+ /// fn foo() {{}}\n\
+ /// ```\n\
+ ///\n\
+ /// This will produce:\n\
+ /// \n\
+ /// ```text\n\
+ /// warning: unknown condition name used\n\
+ /// --> lint_example.rs:1:7\n\
+ /// |\n\
+ /// 1 | #[cfg(widnows)]\n\
+ /// | ^^^^^^^\n\
+ /// |\n\
+ /// = note: `#[warn(unexpected_cfgs)]` on by default\n\
+ /// ```\n\
+ \n\
+ Replacing the output with the text of the example you \
+ compiled manually yourself.\n\
+ "
+ ).into());
+ }
continue;
}
if !self.doc_contains(expected) {
@@ -317,10 +347,10 @@ impl<'a> LintExtractor<'a> {
..,
&format!(
"This will produce:\n\
- \n\
- ```text\n\
- {}\
- ```",
+ \n\
+ ```text\n\
+ {}\
+ ```",
output
),
);
@@ -392,37 +422,36 @@ impl<'a> LintExtractor<'a> {
.filter(|line| line.starts_with('{'))
.map(serde_json::from_str)
.collect::<Result<Vec<serde_json::Value>, _>>()?;
- match msgs
+ // First try to find the messages with the `code` field set to our lint.
+ let matches: Vec<_> = msgs
.iter()
- .find(|msg| matches!(&msg["code"]["code"], serde_json::Value::String(s) if s==name))
- {
- Some(msg) => {
- let rendered = msg["rendered"].as_str().expect("rendered field should exist");
- Ok(rendered.to_string())
- }
- None => {
- match msgs.iter().find(
- |msg| matches!(&msg["rendered"], serde_json::Value::String(s) if s.contains(name)),
- ) {
- Some(msg) => {
- let rendered = msg["rendered"].as_str().expect("rendered field should exist");
- Ok(rendered.to_string())
- }
- None => {
- let rendered: Vec<&str> =
- msgs.iter().filter_map(|msg| msg["rendered"].as_str()).collect();
- let non_json: Vec<&str> =
- stderr.lines().filter(|line| !line.starts_with('{')).collect();
- Err(format!(
- "did not find lint `{}` in output of example, got:\n{}\n{}",
- name,
- non_json.join("\n"),
- rendered.join("\n")
- )
- .into())
- }
- }
+ .filter(|msg| matches!(&msg["code"]["code"], serde_json::Value::String(s) if s==name))
+ .map(|msg| msg["rendered"].as_str().expect("rendered field should exist").to_string())
+ .collect();
+ if matches.is_empty() {
+ // Some lints override their code to something else (E0566).
+ // Try to find something that looks like it could be our lint.
+ let matches: Vec<_> = msgs.iter().filter(|msg|
+ matches!(&msg["rendered"], serde_json::Value::String(s) if s.contains(name)))
+ .map(|msg| msg["rendered"].as_str().expect("rendered field should exist").to_string())
+ .collect();
+ if matches.is_empty() {
+ let rendered: Vec<&str> =
+ msgs.iter().filter_map(|msg| msg["rendered"].as_str()).collect();
+ let non_json: Vec<&str> =
+ stderr.lines().filter(|line| !line.starts_with('{')).collect();
+ Err(format!(
+ "did not find lint `{}` in output of example, got:\n{}\n{}",
+ name,
+ non_json.join("\n"),
+ rendered.join("\n")
+ )
+ .into())
+ } else {
+ Ok(matches.join("\n"))
}
+ } else {
+ Ok(matches.join("\n"))
}
}
diff --git a/src/tools/publish_toolstate.py b/src/tools/publish_toolstate.py
index 395bcc745..2018c239b 100755
--- a/src/tools/publish_toolstate.py
+++ b/src/tools/publish_toolstate.py
@@ -86,7 +86,7 @@ def gh_url():
return os.environ['TOOLSTATE_ISSUES_API_URL']
-def maybe_delink(message):
+def maybe_remove_mention(message):
# type: (str) -> str
if os.environ.get('TOOLSTATE_SKIP_MENTIONS') is not None:
return message.replace("@", "")
@@ -109,7 +109,7 @@ def issue(
else:
status_description = 'no longer builds'
request = json.dumps({
- 'body': maybe_delink(textwrap.dedent('''\
+ 'body': maybe_remove_mention(textwrap.dedent('''\
Hello, this is your friendly neighborhood mergebot.
After merging PR {}, I observed that the tool {} {}.
A follow-up PR to the repository {} is needed to fix the fallout.
@@ -285,7 +285,7 @@ try:
issue_url = gh_url() + '/{}/comments'.format(number)
response = urllib2.urlopen(urllib2.Request(
issue_url,
- json.dumps({'body': maybe_delink(message)}).encode(),
+ json.dumps({'body': maybe_remove_mention(message)}).encode(),
{
'Authorization': 'token ' + github_token,
'Content-Type': 'application/json',
diff --git a/src/tools/replace-version-placeholder/src/main.rs b/src/tools/replace-version-placeholder/src/main.rs
index 33b35d054..0aebfc4aa 100644
--- a/src/tools/replace-version-placeholder/src/main.rs
+++ b/src/tools/replace-version-placeholder/src/main.rs
@@ -10,7 +10,7 @@ fn main() {
let version_str = version_str.trim();
walk::walk(
&root_path,
- &mut |path| {
+ |path, _is_dir| {
walk::filter_dirs(path)
// We exempt these as they require the placeholder
// for their operation
diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock
index ec1977672..25242c602 100644
--- a/src/tools/rust-analyzer/Cargo.lock
+++ b/src/tools/rust-analyzer/Cargo.lock
@@ -169,9 +169,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chalk-derive"
-version = "0.88.0"
+version = "0.89.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4df80a3fbc1f0e59f560eeeebca94bf655566a8ad3023c210a109deb6056455a"
+checksum = "ea176c50987dc4765961aa165001e8eb5a722a26308c5797a47303ea91686aab"
dependencies = [
"proc-macro2",
"quote",
@@ -181,9 +181,9 @@ dependencies = [
[[package]]
name = "chalk-ir"
-version = "0.88.0"
+version = "0.89.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f39e5272016916956298cceea5147006f897972c274a768ed4d6e074efe5d3fb"
+checksum = "473b480241695428c14e8f84f1c9a47ef232450a50faf3a4041e5c9dc11e0a3b"
dependencies = [
"bitflags",
"chalk-derive",
@@ -192,9 +192,9 @@ dependencies = [
[[package]]
name = "chalk-recursive"
-version = "0.88.0"
+version = "0.89.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d9d60b42ad7478d3e027e2f9ea4e99fbbb8fdee0c8c3cf068be269f57e603618"
+checksum = "6764b4fe67cac3a3758185084efbfbd39bf0352795824ba849ddd2b64cd4bb28"
dependencies = [
"chalk-derive",
"chalk-ir",
@@ -205,9 +205,9 @@ dependencies = [
[[package]]
name = "chalk-solve"
-version = "0.88.0"
+version = "0.89.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ab30620ea5b36819525eaab2204f4b8e1842fc7ee36826424a28bef59ae7fecf"
+checksum = "55a7e6160966eceb6e7dcc2f479a2af4c477aaf5bccbc640d82515995ab1a6cc"
dependencies = [
"chalk-derive",
"chalk-ir",
@@ -572,6 +572,7 @@ dependencies = [
"chalk-recursive",
"chalk-solve",
"cov-mark",
+ "either",
"ena",
"expect-test",
"hir-def",
@@ -1714,6 +1715,7 @@ name = "syntax"
version = "0.0.0"
dependencies = [
"cov-mark",
+ "either",
"expect-test",
"indexmap",
"itertools",
diff --git a/src/tools/rust-analyzer/bench_data/numerous_macro_rules b/src/tools/rust-analyzer/bench_data/numerous_macro_rules
index bf89ed594..7610a3ae1 100644
--- a/src/tools/rust-analyzer/bench_data/numerous_macro_rules
+++ b/src/tools/rust-analyzer/bench_data/numerous_macro_rules
@@ -341,8 +341,8 @@ macro_rules! __ra_macro_fixture339 {($name : ident )=>{ impl Clone for $name
macro_rules! __ra_macro_fixture340 {([$($stack : tt )*])=>{$($stack )* }; ([$($stack : tt )*]{$($tail : tt )* })=>{$($stack )* { remove_sections_inner ! ([]$($tail )*); }}; ([$($stack : tt )*]$t : tt $($tail : tt )*)=>{ remove_sections ! ([$($stack )* $t ]$($tail )*); }; }
macro_rules! __ra_macro_fixture341 {($t : ty ,$z : expr )=>{ impl Zero for $t { fn zero ()-> Self {$z as $t } fn is_zero (& self )-> bool { self == & Self :: zero ()}}}; }
macro_rules! __ra_macro_fixture342 {($($ident : ident ),* $(,)?)=>{$(# [ allow ( bad_style )] pub const $ident : super :: Name = super :: Name :: new_inline ( stringify ! ($ident )); )* }; }
-macro_rules! __ra_macro_fixture343 {($($trait : ident =>$expand : ident ),* )=>{# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum BuiltinDeriveExpander {$($trait ),* } impl BuiltinDeriveExpander { pub fn expand (& self , db : & dyn AstDatabase , id : LazyMacroId , tt : & tt :: Subtree , )-> Result < tt :: Subtree , mbe :: ExpandError > { let expander = match * self {$(BuiltinDeriveExpander ::$trait =>$expand , )* }; expander ( db , id , tt )} fn find_by_name ( name : & name :: Name )-> Option < Self > { match name {$(id if id == & name :: name ! [$trait ]=> Some ( BuiltinDeriveExpander ::$trait ), )* _ => None , }}}}; }
-macro_rules! __ra_macro_fixture344 {( LAZY : $(($name : ident , $kind : ident )=>$expand : ident ),* , EAGER : $(($e_name : ident , $e_kind : ident )=>$e_expand : ident ),* )=>{# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum BuiltinFnLikeExpander {$($kind ),* }# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum EagerExpander {$($e_kind ),* } impl BuiltinFnLikeExpander { pub fn expand (& self , db : & dyn AstDatabase , id : LazyMacroId , tt : & tt :: Subtree , )-> ExpandResult < tt :: Subtree > { let expander = match * self {$(BuiltinFnLikeExpander ::$kind =>$expand , )* }; expander ( db , id , tt )}} impl EagerExpander { pub fn expand (& self , db : & dyn AstDatabase , arg_id : EagerMacroId , tt : & tt :: Subtree , )-> ExpandResult < Option < ( tt :: Subtree , FragmentKind )>> { let expander = match * self {$(EagerExpander ::$e_kind =>$e_expand , )* }; expander ( db , arg_id , tt )}} fn find_by_name ( ident : & name :: Name )-> Option < Either < BuiltinFnLikeExpander , EagerExpander >> { match ident {$(id if id == & name :: name ! [$name ]=> Some ( Either :: Left ( BuiltinFnLikeExpander ::$kind )), )* $(id if id == & name :: name ! [$e_name ]=> Some ( Either :: Right ( EagerExpander ::$e_kind )), )* _ => return None , }}}; }
+macro_rules! __ra_macro_fixture343 {($($trait : ident =>$expand : ident ),* )=>{# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum BuiltinDeriveExpander {$($trait ),* } impl BuiltinDeriveExpander { pub fn expand (& self , db : & dyn ExpandDatabase , id : LazyMacroId , tt : & tt :: Subtree , )-> Result < tt :: Subtree , mbe :: ExpandError > { let expander = match * self {$(BuiltinDeriveExpander ::$trait =>$expand , )* }; expander ( db , id , tt )} fn find_by_name ( name : & name :: Name )-> Option < Self > { match name {$(id if id == & name :: name ! [$trait ]=> Some ( BuiltinDeriveExpander ::$trait ), )* _ => None , }}}}; }
+macro_rules! __ra_macro_fixture344 {( LAZY : $(($name : ident , $kind : ident )=>$expand : ident ),* , EAGER : $(($e_name : ident , $e_kind : ident )=>$e_expand : ident ),* )=>{# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum BuiltinFnLikeExpander {$($kind ),* }# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum EagerExpander {$($e_kind ),* } impl BuiltinFnLikeExpander { pub fn expand (& self , db : & dyn ExpandDatabase , id : LazyMacroId , tt : & tt :: Subtree , )-> ExpandResult < tt :: Subtree > { let expander = match * self {$(BuiltinFnLikeExpander ::$kind =>$expand , )* }; expander ( db , id , tt )}} impl EagerExpander { pub fn expand (& self , db : & dyn ExpandDatabase , arg_id : EagerMacroId , tt : & tt :: Subtree , )-> ExpandResult < Option < ( tt :: Subtree , FragmentKind )>> { let expander = match * self {$(EagerExpander ::$e_kind =>$e_expand , )* }; expander ( db , arg_id , tt )}} fn find_by_name ( ident : & name :: Name )-> Option < Either < BuiltinFnLikeExpander , EagerExpander >> { match ident {$(id if id == & name :: name ! [$name ]=> Some ( Either :: Left ( BuiltinFnLikeExpander ::$kind )), )* $(id if id == & name :: name ! [$e_name ]=> Some ( Either :: Right ( EagerExpander ::$e_kind )), )* _ => return None , }}}; }
macro_rules! __ra_macro_fixture345 {($($ty : ty =>$this : ident $im : block );*)=>{$(impl ToTokenTree for $ty { fn to_token ($this )-> tt :: TokenTree { let leaf : tt :: Leaf = $im . into (); leaf . into ()}} impl ToTokenTree for &$ty { fn to_token ($this )-> tt :: TokenTree { let leaf : tt :: Leaf = $im . clone (). into (); leaf . into ()}})* }}
macro_rules! __ra_macro_fixture346 {($name : ident )=>{ impl $crate :: salsa :: InternKey for $name { fn from_intern_id ( v : $crate :: salsa :: InternId )-> Self {$name ( v )} fn as_intern_id (& self )-> $crate :: salsa :: InternId { self . 0 }}}; }
macro_rules! __ra_macro_fixture347 {($($var : ident ($t : ty )),+ )=>{$(impl From <$t > for AttrOwner { fn from ( t : $t )-> AttrOwner { AttrOwner ::$var ( t )}})+ }; }
diff --git a/src/tools/rust-analyzer/crates/flycheck/src/lib.rs b/src/tools/rust-analyzer/crates/flycheck/src/lib.rs
index 11f7b068e..accb14a51 100644
--- a/src/tools/rust-analyzer/crates/flycheck/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/flycheck/src/lib.rs
@@ -76,7 +76,7 @@ impl fmt::Display for FlycheckConfig {
#[derive(Debug)]
pub struct FlycheckHandle {
// XXX: drop order is significant
- sender: Sender<Restart>,
+ sender: Sender<StateChange>,
_thread: jod_thread::JoinHandle,
id: usize,
}
@@ -89,7 +89,7 @@ impl FlycheckHandle {
workspace_root: AbsPathBuf,
) -> FlycheckHandle {
let actor = FlycheckActor::new(id, sender, config, workspace_root);
- let (sender, receiver) = unbounded::<Restart>();
+ let (sender, receiver) = unbounded::<StateChange>();
let thread = jod_thread::Builder::new()
.name("Flycheck".to_owned())
.spawn(move || actor.run(receiver))
@@ -99,12 +99,12 @@ impl FlycheckHandle {
/// Schedule a re-start of the cargo check worker.
pub fn restart(&self) {
- self.sender.send(Restart::Yes).unwrap();
+ self.sender.send(StateChange::Restart).unwrap();
}
/// Stop this cargo check worker.
pub fn cancel(&self) {
- self.sender.send(Restart::No).unwrap();
+ self.sender.send(StateChange::Cancel).unwrap();
}
pub fn id(&self) -> usize {
@@ -149,9 +149,9 @@ pub enum Progress {
DidFailToRestart(String),
}
-enum Restart {
- Yes,
- No,
+enum StateChange {
+ Restart,
+ Cancel,
}
/// A [`FlycheckActor`] is a single check instance of a workspace.
@@ -172,7 +172,7 @@ struct FlycheckActor {
}
enum Event {
- Restart(Restart),
+ RequestStateChange(StateChange),
CheckEvent(Option<CargoMessage>),
}
@@ -191,30 +191,31 @@ impl FlycheckActor {
self.send(Message::Progress { id: self.id, progress });
}
- fn next_event(&self, inbox: &Receiver<Restart>) -> Option<Event> {
+ fn next_event(&self, inbox: &Receiver<StateChange>) -> Option<Event> {
let check_chan = self.cargo_handle.as_ref().map(|cargo| &cargo.receiver);
if let Ok(msg) = inbox.try_recv() {
// give restarts a preference so check outputs don't block a restart or stop
- return Some(Event::Restart(msg));
+ return Some(Event::RequestStateChange(msg));
}
select! {
- recv(inbox) -> msg => msg.ok().map(Event::Restart),
+ recv(inbox) -> msg => msg.ok().map(Event::RequestStateChange),
recv(check_chan.unwrap_or(&never())) -> msg => Some(Event::CheckEvent(msg.ok())),
}
}
- fn run(mut self, inbox: Receiver<Restart>) {
+ fn run(mut self, inbox: Receiver<StateChange>) {
'event: while let Some(event) = self.next_event(&inbox) {
match event {
- Event::Restart(Restart::No) => {
+ Event::RequestStateChange(StateChange::Cancel) => {
+ tracing::debug!(flycheck_id = self.id, "flycheck cancelled");
self.cancel_check_process();
}
- Event::Restart(Restart::Yes) => {
+ Event::RequestStateChange(StateChange::Restart) => {
// Cancel the previously spawned process
self.cancel_check_process();
while let Ok(restart) = inbox.recv_timeout(Duration::from_millis(50)) {
// restart chained with a stop, so just cancel
- if let Restart::No = restart {
+ if let StateChange::Cancel = restart {
continue 'event;
}
}
@@ -255,10 +256,20 @@ impl FlycheckActor {
}
Event::CheckEvent(Some(message)) => match message {
CargoMessage::CompilerArtifact(msg) => {
+ tracing::trace!(
+ flycheck_id = self.id,
+ artifact = msg.target.name,
+ "artifact received"
+ );
self.report_progress(Progress::DidCheckCrate(msg.target.name));
}
CargoMessage::Diagnostic(msg) => {
+ tracing::trace!(
+ flycheck_id = self.id,
+ message = msg.message,
+ "diagnostic received"
+ );
self.send(Message::AddDiagnostic {
id: self.id,
workspace_root: self.root.clone(),
@@ -445,42 +456,56 @@ impl CargoActor {
// simply skip a line if it doesn't parse, which just ignores any
// erroneous output.
- let mut error = String::new();
- let mut read_at_least_one_message = false;
+ let mut stdout_errors = String::new();
+ let mut stderr_errors = String::new();
+ let mut read_at_least_one_stdout_message = false;
+ let mut read_at_least_one_stderr_message = false;
+ let process_line = |line: &str, error: &mut String| {
+ // Try to deserialize a message from Cargo or Rustc.
+ let mut deserializer = serde_json::Deserializer::from_str(line);
+ deserializer.disable_recursion_limit();
+ if let Ok(message) = JsonMessage::deserialize(&mut deserializer) {
+ match message {
+ // Skip certain kinds of messages to only spend time on what's useful
+ JsonMessage::Cargo(message) => match message {
+ cargo_metadata::Message::CompilerArtifact(artifact) if !artifact.fresh => {
+ self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap();
+ }
+ cargo_metadata::Message::CompilerMessage(msg) => {
+ self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap();
+ }
+ _ => (),
+ },
+ JsonMessage::Rustc(message) => {
+ self.sender.send(CargoMessage::Diagnostic(message)).unwrap();
+ }
+ }
+ return true;
+ }
+
+ error.push_str(line);
+ error.push('\n');
+ return false;
+ };
let output = streaming_output(
self.stdout,
self.stderr,
&mut |line| {
- read_at_least_one_message = true;
-
- // Try to deserialize a message from Cargo or Rustc.
- let mut deserializer = serde_json::Deserializer::from_str(line);
- deserializer.disable_recursion_limit();
- if let Ok(message) = JsonMessage::deserialize(&mut deserializer) {
- match message {
- // Skip certain kinds of messages to only spend time on what's useful
- JsonMessage::Cargo(message) => match message {
- cargo_metadata::Message::CompilerArtifact(artifact)
- if !artifact.fresh =>
- {
- self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap();
- }
- cargo_metadata::Message::CompilerMessage(msg) => {
- self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap();
- }
- _ => (),
- },
- JsonMessage::Rustc(message) => {
- self.sender.send(CargoMessage::Diagnostic(message)).unwrap();
- }
- }
+ if process_line(line, &mut stdout_errors) {
+ read_at_least_one_stdout_message = true;
}
},
&mut |line| {
- error.push_str(line);
- error.push('\n');
+ if process_line(line, &mut stderr_errors) {
+ read_at_least_one_stderr_message = true;
+ }
},
);
+
+ let read_at_least_one_message =
+ read_at_least_one_stdout_message || read_at_least_one_stderr_message;
+ let mut error = stdout_errors;
+ error.push_str(&stderr_errors);
match output {
Ok(_) => Ok((read_at_least_one_message, error)),
Err(e) => Err(io::Error::new(e.kind(), format!("{e:?}: {error}"))),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/adt.rs b/src/tools/rust-analyzer/crates/hir-def/src/adt.rs
index 9bc1c54a3..b336f59ff 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/adt.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/adt.rs
@@ -40,6 +40,7 @@ pub struct StructData {
pub repr: Option<ReprOptions>,
pub visibility: RawVisibility,
pub rustc_has_incoherent_inherent_impls: bool,
+ pub fundamental: bool,
}
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -173,10 +174,10 @@ impl StructData {
let item_tree = loc.id.item_tree(db);
let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone();
- let rustc_has_incoherent_inherent_impls = item_tree
- .attrs(db, loc.container.krate, ModItem::from(loc.id.value).into())
- .by_key("rustc_has_incoherent_inherent_impls")
- .exists();
+ let attrs = item_tree.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into());
+ let rustc_has_incoherent_inherent_impls =
+ attrs.by_key("rustc_has_incoherent_inherent_impls").exists();
+ let fundamental = attrs.by_key("fundamental").exists();
let strukt = &item_tree[loc.id.value];
let (variant_data, diagnostics) = lower_fields(
@@ -196,6 +197,7 @@ impl StructData {
repr,
visibility: item_tree[strukt.visibility].clone(),
rustc_has_incoherent_inherent_impls,
+ fundamental,
}),
diagnostics.into(),
)
@@ -215,10 +217,10 @@ impl StructData {
let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone();
- let rustc_has_incoherent_inherent_impls = item_tree
- .attrs(db, loc.container.krate, ModItem::from(loc.id.value).into())
- .by_key("rustc_has_incoherent_inherent_impls")
- .exists();
+ let attrs = item_tree.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into());
+ let rustc_has_incoherent_inherent_impls =
+ attrs.by_key("rustc_has_incoherent_inherent_impls").exists();
+ let fundamental = attrs.by_key("fundamental").exists();
let union = &item_tree[loc.id.value];
let (variant_data, diagnostics) = lower_fields(
@@ -238,6 +240,7 @@ impl StructData {
repr,
visibility: item_tree[union.visibility].clone(),
rustc_has_incoherent_inherent_impls,
+ fundamental,
}),
diagnostics.into(),
)
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
index fcd92ad33..200072c17 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
@@ -300,6 +300,7 @@ impl AttrsWithOwner {
AdtId::UnionId(it) => attrs_from_item_tree(it.lookup(db).id, db),
},
AttrDefId::TraitId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ AttrDefId::TraitAliasId(it) => attrs_from_item_tree(it.lookup(db).id, db),
AttrDefId::MacroId(it) => match it {
MacroId::Macro2Id(it) => attrs_from_item_tree(it.lookup(db).id, db),
MacroId::MacroRulesId(it) => attrs_from_item_tree(it.lookup(db).id, db),
@@ -315,26 +316,14 @@ impl AttrsWithOwner {
let src = it.parent().child_source(db);
RawAttrs::from_attrs_owner(
db.upcast(),
- src.with_value(src.value[it.local_id()].as_ref().either(
- |it| match it {
- ast::TypeOrConstParam::Type(it) => it as _,
- ast::TypeOrConstParam::Const(it) => it as _,
- },
- |it| it as _,
- )),
+ src.with_value(&src.value[it.local_id()]),
)
}
GenericParamId::TypeParamId(it) => {
let src = it.parent().child_source(db);
RawAttrs::from_attrs_owner(
db.upcast(),
- src.with_value(src.value[it.local_id()].as_ref().either(
- |it| match it {
- ast::TypeOrConstParam::Type(it) => it as _,
- ast::TypeOrConstParam::Const(it) => it as _,
- },
- |it| it as _,
- )),
+ src.with_value(&src.value[it.local_id()]),
)
}
GenericParamId::LifetimeParamId(it) => {
@@ -404,6 +393,7 @@ impl AttrsWithOwner {
AttrDefId::StaticId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
AttrDefId::ConstId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
AttrDefId::TraitId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ AttrDefId::TraitAliasId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
AttrDefId::TypeAliasId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
AttrDefId::MacroId(id) => match id {
MacroId::Macro2Id(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
@@ -412,28 +402,14 @@ impl AttrsWithOwner {
},
AttrDefId::ImplId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
AttrDefId::GenericParamId(id) => match id {
- GenericParamId::ConstParamId(id) => {
- id.parent().child_source(db).map(|source| match &source[id.local_id()] {
- Either::Left(ast::TypeOrConstParam::Type(id)) => {
- ast::AnyHasAttrs::new(id.clone())
- }
- Either::Left(ast::TypeOrConstParam::Const(id)) => {
- ast::AnyHasAttrs::new(id.clone())
- }
- Either::Right(id) => ast::AnyHasAttrs::new(id.clone()),
- })
- }
- GenericParamId::TypeParamId(id) => {
- id.parent().child_source(db).map(|source| match &source[id.local_id()] {
- Either::Left(ast::TypeOrConstParam::Type(id)) => {
- ast::AnyHasAttrs::new(id.clone())
- }
- Either::Left(ast::TypeOrConstParam::Const(id)) => {
- ast::AnyHasAttrs::new(id.clone())
- }
- Either::Right(id) => ast::AnyHasAttrs::new(id.clone()),
- })
- }
+ GenericParamId::ConstParamId(id) => id
+ .parent()
+ .child_source(db)
+ .map(|source| ast::AnyHasAttrs::new(source[id.local_id()].clone())),
+ GenericParamId::TypeParamId(id) => id
+ .parent()
+ .child_source(db)
+ .map(|source| ast::AnyHasAttrs::new(source[id.local_id()].clone())),
GenericParamId::LifetimeParamId(id) => id
.parent
.child_source(db)
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/body.rs
index 8fd9255b8..b70e658ef 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body.rs
@@ -24,7 +24,9 @@ use syntax::{ast, AstPtr, SyntaxNode, SyntaxNodePtr};
use crate::{
attr::Attrs,
db::DefDatabase,
- expr::{dummy_expr_id, Expr, ExprId, Label, LabelId, Pat, PatId},
+ expr::{
+ dummy_expr_id, Binding, BindingId, Expr, ExprId, Label, LabelId, Pat, PatId, RecordFieldPat,
+ },
item_scope::BuiltinShadowMode,
macro_id_to_def_id,
nameres::DefMap,
@@ -270,7 +272,7 @@ pub struct Mark {
pub struct Body {
pub exprs: Arena<Expr>,
pub pats: Arena<Pat>,
- pub or_pats: FxHashMap<PatId, Arc<[PatId]>>,
+ pub bindings: Arena<Binding>,
pub labels: Arena<Label>,
/// The patterns for the function's parameters. While the parameter types are
/// part of the function signature, the patterns are not (they don't change
@@ -409,18 +411,6 @@ impl Body {
.map(move |&block| (block, db.block_def_map(block).expect("block ID without DefMap")))
}
- pub fn pattern_representative(&self, pat: PatId) -> PatId {
- self.or_pats.get(&pat).and_then(|pats| pats.first().copied()).unwrap_or(pat)
- }
-
- /// Retrieves all ident patterns this pattern shares the ident with.
- pub fn ident_patterns_for<'slf>(&'slf self, pat: &'slf PatId) -> &'slf [PatId] {
- match self.or_pats.get(pat) {
- Some(pats) => pats,
- None => std::slice::from_ref(pat),
- }
- }
-
pub fn pretty_print(&self, db: &dyn DefDatabase, owner: DefWithBodyId) -> String {
pretty::print_body_hir(db, self, owner)
}
@@ -435,13 +425,52 @@ impl Body {
}
fn shrink_to_fit(&mut self) {
- let Self { _c: _, body_expr: _, block_scopes, or_pats, exprs, labels, params, pats } = self;
+ let Self { _c: _, body_expr: _, block_scopes, exprs, labels, params, pats, bindings } =
+ self;
block_scopes.shrink_to_fit();
- or_pats.shrink_to_fit();
exprs.shrink_to_fit();
labels.shrink_to_fit();
params.shrink_to_fit();
pats.shrink_to_fit();
+ bindings.shrink_to_fit();
+ }
+
+ pub fn walk_bindings_in_pat(&self, pat_id: PatId, mut f: impl FnMut(BindingId)) {
+ self.walk_pats(pat_id, &mut |pat| {
+ if let Pat::Bind { id, .. } = pat {
+ f(*id);
+ }
+ });
+ }
+
+ pub fn walk_pats(&self, pat_id: PatId, f: &mut impl FnMut(&Pat)) {
+ let pat = &self[pat_id];
+ f(pat);
+ match pat {
+ Pat::Range { .. }
+ | Pat::Lit(..)
+ | Pat::Path(..)
+ | Pat::ConstBlock(..)
+ | Pat::Wild
+ | Pat::Missing => {}
+ &Pat::Bind { subpat, .. } => {
+ if let Some(subpat) = subpat {
+ self.walk_pats(subpat, f);
+ }
+ }
+ Pat::Or(args) | Pat::Tuple { args, .. } | Pat::TupleStruct { args, .. } => {
+ args.iter().copied().for_each(|p| self.walk_pats(p, f));
+ }
+ Pat::Ref { pat, .. } => self.walk_pats(*pat, f),
+ Pat::Slice { prefix, slice, suffix } => {
+ let total_iter = prefix.iter().chain(slice.iter()).chain(suffix.iter());
+ total_iter.copied().for_each(|p| self.walk_pats(p, f));
+ }
+ Pat::Record { args, .. } => {
+ args.iter().for_each(|RecordFieldPat { pat, .. }| self.walk_pats(*pat, f));
+ }
+ Pat::Box { inner } => self.walk_pats(*inner, f),
+ }
}
}
@@ -451,7 +480,7 @@ impl Default for Body {
body_expr: dummy_expr_id(),
exprs: Default::default(),
pats: Default::default(),
- or_pats: Default::default(),
+ bindings: Default::default(),
labels: Default::default(),
params: Default::default(),
block_scopes: Default::default(),
@@ -484,6 +513,14 @@ impl Index<LabelId> for Body {
}
}
+impl Index<BindingId> for Body {
+ type Output = Binding;
+
+ fn index(&self, b: BindingId) -> &Binding {
+ &self.bindings[b]
+ }
+}
+
// FIXME: Change `node_` prefix to something more reasonable.
// Perhaps `expr_syntax` and `expr_id`?
impl BodySourceMap {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
index 04b1c4f01..fedaf3955 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
@@ -15,6 +15,7 @@ use la_arena::Arena;
use once_cell::unsync::OnceCell;
use profile::Count;
use rustc_hash::FxHashMap;
+use smallvec::SmallVec;
use syntax::{
ast::{
self, ArrayExprKind, AstChildren, HasArgList, HasLoopBody, HasName, LiteralKind,
@@ -30,14 +31,14 @@ use crate::{
builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint},
db::DefDatabase,
expr::{
- dummy_expr_id, Array, BindingAnnotation, ClosureKind, Expr, ExprId, FloatTypeWrapper,
- Label, LabelId, Literal, MatchArm, Movability, Pat, PatId, RecordFieldPat, RecordLitField,
- Statement,
+ dummy_expr_id, Array, Binding, BindingAnnotation, BindingId, ClosureKind, Expr, ExprId,
+ FloatTypeWrapper, Label, LabelId, Literal, MatchArm, Movability, Pat, PatId,
+ RecordFieldPat, RecordLitField, Statement,
},
item_scope::BuiltinShadowMode,
path::{GenericArgs, Path},
type_ref::{Mutability, Rawness, TypeRef},
- AdtId, BlockLoc, ModuleDefId, UnresolvedMacro,
+ AdtId, BlockId, BlockLoc, ModuleDefId, UnresolvedMacro,
};
pub struct LowerCtx<'a> {
@@ -87,16 +88,14 @@ pub(super) fn lower(
body: Body {
exprs: Arena::default(),
pats: Arena::default(),
+ bindings: Arena::default(),
labels: Arena::default(),
params: Vec::new(),
body_expr: dummy_expr_id(),
block_scopes: Vec::new(),
_c: Count::new(),
- or_pats: Default::default(),
},
expander,
- name_to_pat_grouping: Default::default(),
- is_lowering_inside_or_pat: false,
is_lowering_assignee_expr: false,
is_lowering_generator: false,
}
@@ -109,13 +108,26 @@ struct ExprCollector<'a> {
ast_id_map: Arc<AstIdMap>,
body: Body,
source_map: BodySourceMap,
- // a poor-mans union-find?
- name_to_pat_grouping: FxHashMap<Name, Vec<PatId>>,
- is_lowering_inside_or_pat: bool,
is_lowering_assignee_expr: bool,
is_lowering_generator: bool,
}
+#[derive(Debug, Default)]
+struct BindingList {
+ map: FxHashMap<Name, BindingId>,
+}
+
+impl BindingList {
+ fn find(
+ &mut self,
+ ec: &mut ExprCollector<'_>,
+ name: Name,
+ mode: BindingAnnotation,
+ ) -> BindingId {
+ *self.map.entry(name).or_insert_with_key(|n| ec.alloc_binding(n.clone(), mode))
+ }
+}
+
impl ExprCollector<'_> {
fn collect(
mut self,
@@ -127,17 +139,16 @@ impl ExprCollector<'_> {
param_list.self_param().filter(|_| attr_enabled.next().unwrap_or(false))
{
let ptr = AstPtr::new(&self_param);
- let param_pat = self.alloc_pat(
- Pat::Bind {
- name: name![self],
- mode: BindingAnnotation::new(
- self_param.mut_token().is_some() && self_param.amp_token().is_none(),
- false,
- ),
- subpat: None,
- },
- Either::Right(ptr),
+ let binding_id = self.alloc_binding(
+ name![self],
+ BindingAnnotation::new(
+ self_param.mut_token().is_some() && self_param.amp_token().is_none(),
+ false,
+ ),
);
+ let param_pat =
+ self.alloc_pat(Pat::Bind { id: binding_id, subpat: None }, Either::Right(ptr));
+ self.add_definition_to_binding(binding_id, param_pat);
self.body.params.push(param_pat);
}
@@ -179,6 +190,9 @@ impl ExprCollector<'_> {
id
}
+ fn alloc_binding(&mut self, name: Name, mode: BindingAnnotation) -> BindingId {
+ self.body.bindings.alloc(Binding { name, mode, definitions: SmallVec::new() })
+ }
fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId {
let src = self.expander.to_source(ptr);
let id = self.make_pat(pat, src.clone());
@@ -238,33 +252,32 @@ impl ExprCollector<'_> {
}
ast::Expr::BlockExpr(e) => match e.modifier() {
Some(ast::BlockModifier::Try(_)) => {
- let body = self.collect_block(e);
- self.alloc_expr(Expr::TryBlock { body }, syntax_ptr)
+ self.collect_block_(e, |id, statements, tail| Expr::TryBlock {
+ id,
+ statements,
+ tail,
+ })
}
Some(ast::BlockModifier::Unsafe(_)) => {
- let body = self.collect_block(e);
- self.alloc_expr(Expr::Unsafe { body }, syntax_ptr)
+ self.collect_block_(e, |id, statements, tail| Expr::Unsafe {
+ id,
+ statements,
+ tail,
+ })
}
- // FIXME: we need to record these effects somewhere...
Some(ast::BlockModifier::Label(label)) => {
let label = self.collect_label(label);
- let res = self.collect_block(e);
- match &mut self.body.exprs[res] {
- Expr::Block { label: block_label, .. } => {
- *block_label = Some(label);
- }
- _ => unreachable!(),
- }
- res
- }
- Some(ast::BlockModifier::Async(_)) => {
- let body = self.collect_block(e);
- self.alloc_expr(Expr::Async { body }, syntax_ptr)
- }
- Some(ast::BlockModifier::Const(_)) => {
- let body = self.collect_block(e);
- self.alloc_expr(Expr::Const { body }, syntax_ptr)
+ self.collect_block_(e, |id, statements, tail| Expr::Block {
+ id,
+ statements,
+ tail,
+ label: Some(label),
+ })
}
+ Some(ast::BlockModifier::Async(_)) => self
+ .collect_block_(e, |id, statements, tail| Expr::Async { id, statements, tail }),
+ Some(ast::BlockModifier::Const(_)) => self
+ .collect_block_(e, |id, statements, tail| Expr::Const { id, statements, tail }),
None => self.collect_block(e),
},
ast::Expr::LoopExpr(e) => {
@@ -486,6 +499,8 @@ impl ExprCollector<'_> {
Movability::Movable
};
ClosureKind::Generator(movability)
+ } else if e.async_token().is_some() {
+ ClosureKind::Async
} else {
ClosureKind::Closure
};
@@ -737,6 +752,19 @@ impl ExprCollector<'_> {
}
fn collect_block(&mut self, block: ast::BlockExpr) -> ExprId {
+ self.collect_block_(block, |id, statements, tail| Expr::Block {
+ id,
+ statements,
+ tail,
+ label: None,
+ })
+ }
+
+ fn collect_block_(
+ &mut self,
+ block: ast::BlockExpr,
+ mk_block: impl FnOnce(BlockId, Box<[Statement]>, Option<ExprId>) -> Expr,
+ ) -> ExprId {
let file_local_id = self.ast_id_map.ast_id(&block);
let ast_id = AstId::new(self.expander.current_file_id, file_local_id);
let block_loc =
@@ -769,15 +797,8 @@ impl ExprCollector<'_> {
});
let syntax_node_ptr = AstPtr::new(&block.into());
- let expr_id = self.alloc_expr(
- Expr::Block {
- id: block_id,
- statements: statements.into_boxed_slice(),
- tail,
- label: None,
- },
- syntax_node_ptr,
- );
+ let expr_id = self
+ .alloc_expr(mk_block(block_id, statements.into_boxed_slice(), tail), syntax_node_ptr);
self.expander.def_map = prev_def_map;
self.expander.module = prev_local_module;
@@ -799,13 +820,7 @@ impl ExprCollector<'_> {
}
fn collect_pat(&mut self, pat: ast::Pat) -> PatId {
- let pat_id = self.collect_pat_(pat);
- for (_, pats) in self.name_to_pat_grouping.drain() {
- let pats = Arc::<[_]>::from(pats);
- self.body.or_pats.extend(pats.iter().map(|&pat| (pat, pats.clone())));
- }
- self.is_lowering_inside_or_pat = false;
- pat_id
+ self.collect_pat_(pat, &mut BindingList::default())
}
fn collect_pat_opt(&mut self, pat: Option<ast::Pat>) -> PatId {
@@ -815,16 +830,18 @@ impl ExprCollector<'_> {
}
}
- fn collect_pat_(&mut self, pat: ast::Pat) -> PatId {
+ fn collect_pat_(&mut self, pat: ast::Pat, binding_list: &mut BindingList) -> PatId {
let pattern = match &pat {
ast::Pat::IdentPat(bp) => {
let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing);
- let key = self.is_lowering_inside_or_pat.then(|| name.clone());
let annotation =
BindingAnnotation::new(bp.mut_token().is_some(), bp.ref_token().is_some());
- let subpat = bp.pat().map(|subpat| self.collect_pat_(subpat));
- let pattern = if annotation == BindingAnnotation::Unannotated && subpat.is_none() {
+ let subpat = bp.pat().map(|subpat| self.collect_pat_(subpat, binding_list));
+
+ let is_simple_ident_pat =
+ annotation == BindingAnnotation::Unannotated && subpat.is_none();
+ let (binding, pattern) = if is_simple_ident_pat {
// This could also be a single-segment path pattern. To
// decide that, we need to try resolving the name.
let (resolved, _) = self.expander.def_map.resolve_path(
@@ -834,12 +851,12 @@ impl ExprCollector<'_> {
BuiltinShadowMode::Other,
);
match resolved.take_values() {
- Some(ModuleDefId::ConstId(_)) => Pat::Path(name.into()),
+ Some(ModuleDefId::ConstId(_)) => (None, Pat::Path(name.into())),
Some(ModuleDefId::EnumVariantId(_)) => {
// this is only really valid for unit variants, but
// shadowing other enum variants with a pattern is
// an error anyway
- Pat::Path(name.into())
+ (None, Pat::Path(name.into()))
}
Some(ModuleDefId::AdtId(AdtId::StructId(s)))
if self.db.struct_data(s).variant_data.kind() != StructKind::Record =>
@@ -847,30 +864,34 @@ impl ExprCollector<'_> {
// Funnily enough, record structs *can* be shadowed
// by pattern bindings (but unit or tuple structs
// can't).
- Pat::Path(name.into())
+ (None, Pat::Path(name.into()))
}
// shadowing statics is an error as well, so we just ignore that case here
- _ => Pat::Bind { name, mode: annotation, subpat },
+ _ => {
+ let id = binding_list.find(self, name, annotation);
+ (Some(id), Pat::Bind { id, subpat })
+ }
}
} else {
- Pat::Bind { name, mode: annotation, subpat }
+ let id = binding_list.find(self, name, annotation);
+ (Some(id), Pat::Bind { id, subpat })
};
let ptr = AstPtr::new(&pat);
let pat = self.alloc_pat(pattern, Either::Left(ptr));
- if let Some(key) = key {
- self.name_to_pat_grouping.entry(key).or_default().push(pat);
+ if let Some(binding_id) = binding {
+ self.add_definition_to_binding(binding_id, pat);
}
return pat;
}
ast::Pat::TupleStructPat(p) => {
let path =
p.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new);
- let (args, ellipsis) = self.collect_tuple_pat(p.fields());
+ let (args, ellipsis) = self.collect_tuple_pat(p.fields(), binding_list);
Pat::TupleStruct { path, args, ellipsis }
}
ast::Pat::RefPat(p) => {
- let pat = self.collect_pat_opt(p.pat());
+ let pat = self.collect_pat_opt_(p.pat(), binding_list);
let mutability = Mutability::from_mutable(p.mut_token().is_some());
Pat::Ref { pat, mutability }
}
@@ -880,13 +901,12 @@ impl ExprCollector<'_> {
path.map(Pat::Path).unwrap_or(Pat::Missing)
}
ast::Pat::OrPat(p) => {
- self.is_lowering_inside_or_pat = true;
- let pats = p.pats().map(|p| self.collect_pat_(p)).collect();
+ let pats = p.pats().map(|p| self.collect_pat_(p, binding_list)).collect();
Pat::Or(pats)
}
- ast::Pat::ParenPat(p) => return self.collect_pat_opt_(p.pat()),
+ ast::Pat::ParenPat(p) => return self.collect_pat_opt_(p.pat(), binding_list),
ast::Pat::TuplePat(p) => {
- let (args, ellipsis) = self.collect_tuple_pat(p.fields());
+ let (args, ellipsis) = self.collect_tuple_pat(p.fields(), binding_list);
Pat::Tuple { args, ellipsis }
}
ast::Pat::WildcardPat(_) => Pat::Wild,
@@ -899,7 +919,7 @@ impl ExprCollector<'_> {
.fields()
.filter_map(|f| {
let ast_pat = f.pat()?;
- let pat = self.collect_pat_(ast_pat);
+ let pat = self.collect_pat_(ast_pat, binding_list);
let name = f.field_name()?.as_name();
Some(RecordFieldPat { name, pat })
})
@@ -918,9 +938,15 @@ impl ExprCollector<'_> {
// FIXME properly handle `RestPat`
Pat::Slice {
- prefix: prefix.into_iter().map(|p| self.collect_pat_(p)).collect(),
- slice: slice.map(|p| self.collect_pat_(p)),
- suffix: suffix.into_iter().map(|p| self.collect_pat_(p)).collect(),
+ prefix: prefix
+ .into_iter()
+ .map(|p| self.collect_pat_(p, binding_list))
+ .collect(),
+ slice: slice.map(|p| self.collect_pat_(p, binding_list)),
+ suffix: suffix
+ .into_iter()
+ .map(|p| self.collect_pat_(p, binding_list))
+ .collect(),
}
}
ast::Pat::LiteralPat(lit) => {
@@ -943,7 +969,7 @@ impl ExprCollector<'_> {
Pat::Missing
}
ast::Pat::BoxPat(boxpat) => {
- let inner = self.collect_pat_opt_(boxpat.pat());
+ let inner = self.collect_pat_opt_(boxpat.pat(), binding_list);
Pat::Box { inner }
}
ast::Pat::ConstBlockPat(const_block_pat) => {
@@ -960,7 +986,7 @@ impl ExprCollector<'_> {
let src = self.expander.to_source(Either::Left(AstPtr::new(&pat)));
let pat =
self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| {
- this.collect_pat_opt_(expanded_pat)
+ this.collect_pat_opt_(expanded_pat, binding_list)
});
self.source_map.pat_map.insert(src, pat);
return pat;
@@ -974,21 +1000,25 @@ impl ExprCollector<'_> {
self.alloc_pat(pattern, Either::Left(ptr))
}
- fn collect_pat_opt_(&mut self, pat: Option<ast::Pat>) -> PatId {
+ fn collect_pat_opt_(&mut self, pat: Option<ast::Pat>, binding_list: &mut BindingList) -> PatId {
match pat {
- Some(pat) => self.collect_pat_(pat),
+ Some(pat) => self.collect_pat_(pat, binding_list),
None => self.missing_pat(),
}
}
- fn collect_tuple_pat(&mut self, args: AstChildren<ast::Pat>) -> (Box<[PatId]>, Option<usize>) {
+ fn collect_tuple_pat(
+ &mut self,
+ args: AstChildren<ast::Pat>,
+ binding_list: &mut BindingList,
+ ) -> (Box<[PatId]>, Option<usize>) {
// Find the location of the `..`, if there is one. Note that we do not
// consider the possibility of there being multiple `..` here.
let ellipsis = args.clone().position(|p| matches!(p, ast::Pat::RestPat(_)));
// We want to skip the `..` pattern here, since we account for it above.
let args = args
.filter(|p| !matches!(p, ast::Pat::RestPat(_)))
- .map(|p| self.collect_pat_(p))
+ .map(|p| self.collect_pat_(p, binding_list))
.collect();
(args, ellipsis)
@@ -1017,6 +1047,10 @@ impl ExprCollector<'_> {
None => Some(()),
}
}
+
+ fn add_definition_to_binding(&mut self, binding_id: BindingId, pat_id: PatId) {
+ self.body.bindings[binding_id].definitions.push(pat_id);
+ }
}
impl From<ast::LiteralKind> for Literal {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs
index 4b4664a1c..5a9b825a2 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs
@@ -5,7 +5,7 @@ use std::fmt::{self, Write};
use syntax::ast::HasName;
use crate::{
- expr::{Array, BindingAnnotation, ClosureKind, Literal, Movability, Statement},
+ expr::{Array, BindingAnnotation, BindingId, ClosureKind, Literal, Movability, Statement},
pretty::{print_generic_args, print_path, print_type_ref},
type_ref::TypeRef,
};
@@ -292,18 +292,6 @@ impl<'a> Printer<'a> {
self.print_expr(*expr);
w!(self, "?");
}
- Expr::TryBlock { body } => {
- w!(self, "try ");
- self.print_expr(*body);
- }
- Expr::Async { body } => {
- w!(self, "async ");
- self.print_expr(*body);
- }
- Expr::Const { body } => {
- w!(self, "const ");
- self.print_expr(*body);
- }
Expr::Cast { expr, type_ref } => {
self.print_expr(*expr);
w!(self, " as ");
@@ -372,8 +360,14 @@ impl<'a> Printer<'a> {
w!(self, "]");
}
Expr::Closure { args, arg_types, ret_type, body, closure_kind } => {
- if let ClosureKind::Generator(Movability::Static) = closure_kind {
- w!(self, "static ");
+ match closure_kind {
+ ClosureKind::Generator(Movability::Static) => {
+ w!(self, "static ");
+ }
+ ClosureKind::Async => {
+ w!(self, "async ");
+ }
+ _ => (),
}
w!(self, "|");
for (i, (pat, ty)) in args.iter().zip(arg_types.iter()).enumerate() {
@@ -402,10 +396,6 @@ impl<'a> Printer<'a> {
}
w!(self, ")");
}
- Expr::Unsafe { body } => {
- w!(self, "unsafe ");
- self.print_expr(*body);
- }
Expr::Array(arr) => {
w!(self, "[");
if !matches!(arr, Array::ElementList { elements, .. } if elements.is_empty()) {
@@ -428,25 +418,47 @@ impl<'a> Printer<'a> {
}
Expr::Literal(lit) => self.print_literal(lit),
Expr::Block { id: _, statements, tail, label } => {
- self.whitespace();
- if let Some(lbl) = label {
- w!(self, "{}: ", self.body[*lbl].name);
+ let label = label.map(|lbl| format!("{}: ", self.body[lbl].name));
+ self.print_block(label.as_deref(), statements, tail);
+ }
+ Expr::Unsafe { id: _, statements, tail } => {
+ self.print_block(Some("unsafe "), statements, tail);
+ }
+ Expr::TryBlock { id: _, statements, tail } => {
+ self.print_block(Some("try "), statements, tail);
+ }
+ Expr::Async { id: _, statements, tail } => {
+ self.print_block(Some("async "), statements, tail);
+ }
+ Expr::Const { id: _, statements, tail } => {
+ self.print_block(Some("const "), statements, tail);
+ }
+ }
+ }
+
+ fn print_block(
+ &mut self,
+ label: Option<&str>,
+ statements: &Box<[Statement]>,
+ tail: &Option<la_arena::Idx<Expr>>,
+ ) {
+ self.whitespace();
+ if let Some(lbl) = label {
+ w!(self, "{}", lbl);
+ }
+ w!(self, "{{");
+ if !statements.is_empty() || tail.is_some() {
+ self.indented(|p| {
+ for stmt in &**statements {
+ p.print_stmt(stmt);
}
- w!(self, "{{");
- if !statements.is_empty() || tail.is_some() {
- self.indented(|p| {
- for stmt in &**statements {
- p.print_stmt(stmt);
- }
- if let Some(tail) = tail {
- p.print_expr(*tail);
- }
- p.newline();
- });
+ if let Some(tail) = tail {
+ p.print_expr(*tail);
}
- w!(self, "}}");
- }
+ p.newline();
+ });
}
+ w!(self, "}}");
}
fn print_pat(&mut self, pat: PatId) {
@@ -518,14 +530,8 @@ impl<'a> Printer<'a> {
}
Pat::Path(path) => self.print_path(path),
Pat::Lit(expr) => self.print_expr(*expr),
- Pat::Bind { mode, name, subpat } => {
- let mode = match mode {
- BindingAnnotation::Unannotated => "",
- BindingAnnotation::Mutable => "mut ",
- BindingAnnotation::Ref => "ref ",
- BindingAnnotation::RefMut => "ref mut ",
- };
- w!(self, "{}{}", mode, name);
+ Pat::Bind { id, subpat } => {
+ self.print_binding(*id);
if let Some(pat) = subpat {
self.whitespace();
self.print_pat(*pat);
@@ -629,4 +635,15 @@ impl<'a> Printer<'a> {
fn print_path(&mut self, path: &Path) {
print_path(path, self).unwrap();
}
+
+ fn print_binding(&mut self, id: BindingId) {
+ let Binding { name, mode, .. } = &self.body.bindings[id];
+ let mode = match mode {
+ BindingAnnotation::Unannotated => "",
+ BindingAnnotation::Mutable => "mut ",
+ BindingAnnotation::Ref => "ref ",
+ BindingAnnotation::RefMut => "ref mut ",
+ };
+ w!(self, "{}{}", mode, name);
+ }
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs
index 2617d4288..12fc1f116 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs
@@ -8,7 +8,7 @@ use rustc_hash::FxHashMap;
use crate::{
body::Body,
db::DefDatabase,
- expr::{Expr, ExprId, LabelId, Pat, PatId, Statement},
+ expr::{Binding, BindingId, Expr, ExprId, LabelId, Pat, PatId, Statement},
BlockId, DefWithBodyId,
};
@@ -23,7 +23,7 @@ pub struct ExprScopes {
#[derive(Debug, PartialEq, Eq)]
pub struct ScopeEntry {
name: Name,
- pat: PatId,
+ binding: BindingId,
}
impl ScopeEntry {
@@ -31,8 +31,8 @@ impl ScopeEntry {
&self.name
}
- pub fn pat(&self) -> PatId {
- self.pat
+ pub fn binding(&self) -> BindingId {
+ self.binding
}
}
@@ -66,6 +66,7 @@ impl ExprScopes {
self.scopes[scope].label.clone()
}
+ /// Returns the scopes in ascending order.
pub fn scope_chain(&self, scope: Option<ScopeId>) -> impl Iterator<Item = ScopeId> + '_ {
std::iter::successors(scope, move |&scope| self.scopes[scope].parent)
}
@@ -125,18 +126,23 @@ impl ExprScopes {
})
}
- fn add_bindings(&mut self, body: &Body, scope: ScopeId, pat: PatId) {
+ fn add_bindings(&mut self, body: &Body, scope: ScopeId, binding: BindingId) {
+ let Binding { name, .. } = &body.bindings[binding];
+ let entry = ScopeEntry { name: name.clone(), binding };
+ self.scopes[scope].entries.push(entry);
+ }
+
+ fn add_pat_bindings(&mut self, body: &Body, scope: ScopeId, pat: PatId) {
let pattern = &body[pat];
- if let Pat::Bind { name, .. } = pattern {
- let entry = ScopeEntry { name: name.clone(), pat };
- self.scopes[scope].entries.push(entry);
+ if let Pat::Bind { id, .. } = pattern {
+ self.add_bindings(body, scope, *id);
}
- pattern.walk_child_pats(|pat| self.add_bindings(body, scope, pat));
+ pattern.walk_child_pats(|pat| self.add_pat_bindings(body, scope, pat));
}
fn add_params_bindings(&mut self, body: &Body, scope: ScopeId, params: &[PatId]) {
- params.iter().for_each(|pat| self.add_bindings(body, scope, *pat));
+ params.iter().for_each(|pat| self.add_pat_bindings(body, scope, *pat));
}
fn set_scope(&mut self, node: ExprId, scope: ScopeId) {
@@ -169,7 +175,7 @@ fn compute_block_scopes(
}
*scope = scopes.new_scope(*scope);
- scopes.add_bindings(body, *scope, *pat);
+ scopes.add_pat_bindings(body, *scope, *pat);
}
Statement::Expr { expr, .. } => {
compute_expr_scopes(*expr, body, scopes, scope);
@@ -194,10 +200,20 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
scopes.set_scope(expr, scope);
compute_block_scopes(statements, *tail, body, scopes, &mut scope);
}
+ Expr::Unsafe { id, statements, tail }
+ | Expr::Async { id, statements, tail }
+ | Expr::Const { id, statements, tail }
+ | Expr::TryBlock { id, statements, tail } => {
+ let mut scope = scopes.new_block_scope(*scope, *id, None);
+ // Overwrite the old scope for the block expr, so that every block scope can be found
+ // via the block itself (important for blocks that only contain items, no expressions).
+ scopes.set_scope(expr, scope);
+ compute_block_scopes(statements, *tail, body, scopes, &mut scope);
+ }
Expr::For { iterable, pat, body: body_expr, label } => {
compute_expr_scopes(*iterable, body, scopes, scope);
let mut scope = scopes.new_labeled_scope(*scope, make_label(label));
- scopes.add_bindings(body, scope, *pat);
+ scopes.add_pat_bindings(body, scope, *pat);
compute_expr_scopes(*body_expr, body, scopes, &mut scope);
}
Expr::While { condition, body: body_expr, label } => {
@@ -218,7 +234,7 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
compute_expr_scopes(*expr, body, scopes, scope);
for arm in arms.iter() {
let mut scope = scopes.new_scope(*scope);
- scopes.add_bindings(body, scope, arm.pat);
+ scopes.add_pat_bindings(body, scope, arm.pat);
if let Some(guard) = arm.guard {
scope = scopes.new_scope(scope);
compute_expr_scopes(guard, body, scopes, &mut scope);
@@ -237,7 +253,7 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
&Expr::Let { pat, expr } => {
compute_expr_scopes(expr, body, scopes, scope);
*scope = scopes.new_scope(*scope);
- scopes.add_bindings(body, *scope, pat);
+ scopes.add_pat_bindings(body, *scope, pat);
}
e => e.walk_child_exprs(|e| compute_expr_scopes(e, body, scopes, scope)),
};
@@ -439,7 +455,7 @@ fn foo() {
let function = find_function(&db, file_id);
let scopes = db.expr_scopes(function.into());
- let (_body, source_map) = db.body_with_source_map(function.into());
+ let (body, source_map) = db.body_with_source_map(function.into());
let expr_scope = {
let expr_ast = name_ref.syntax().ancestors().find_map(ast::Expr::cast).unwrap();
@@ -449,7 +465,9 @@ fn foo() {
};
let resolved = scopes.resolve_name_in_scope(expr_scope, &name_ref.as_name()).unwrap();
- let pat_src = source_map.pat_syntax(resolved.pat()).unwrap();
+ let pat_src = source_map
+ .pat_syntax(*body.bindings[resolved.binding()].definitions.first().unwrap())
+ .unwrap();
let local_name = pat_src.value.either(
|it| it.syntax_node_ptr().to_node(file.syntax()),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs
index 3bba08cfc..77ac221e5 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs
@@ -395,3 +395,25 @@ fn foo() {
"#]],
)
}
+
+#[test]
+fn trailing_expr_macro_expands_stmts() {
+ check_at(
+ r#"
+macro_rules! foo {
+ () => { const FOO: u32 = 0;const BAR: u32 = 0; };
+}
+fn f() {$0
+ foo!{}
+};
+ "#,
+ expect![[r#"
+ block scope
+ BAR: v
+ FOO: v
+
+ crate
+ f: v
+ "#]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs b/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs
index 19d2fe956..68b57acca 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs
@@ -1,7 +1,7 @@
//! When *constructing* `hir`, we start at some parent syntax node and recursively
//! lower the children.
//!
-//! This modules allows one to go in the opposite direction: start with a syntax
+//! This module allows one to go in the opposite direction: start with a syntax
//! node for a *child*, and get its hir.
use either::Either;
@@ -145,6 +145,7 @@ impl ChildBySource for ItemScope {
ModuleDefId::StaticId(id) => insert!(map[keys::STATIC].insert(id)),
ModuleDefId::TypeAliasId(id) => insert!(map[keys::TYPE_ALIAS].insert(id)),
ModuleDefId::TraitId(id) => insert!(map[keys::TRAIT].insert(id)),
+ ModuleDefId::TraitAliasId(id) => insert!(map[keys::TRAIT_ALIAS].insert(id)),
ModuleDefId::AdtId(adt) => match adt {
AdtId::StructId(id) => insert!(map[keys::STRUCT].insert(id)),
AdtId::UnionId(id) => insert!(map[keys::UNION].insert(id)),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data.rs b/src/tools/rust-analyzer/crates/hir-def/src/data.rs
index c3c1dfd39..1633a33be 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/data.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/data.rs
@@ -22,7 +22,7 @@ use crate::{
visibility::RawVisibility,
AssocItemId, AstIdWithPath, ConstId, ConstLoc, FunctionId, FunctionLoc, HasModule, ImplId,
Intern, ItemContainerId, ItemLoc, Lookup, Macro2Id, MacroRulesId, ModuleId, ProcMacroId,
- StaticId, TraitId, TypeAliasId, TypeAliasLoc,
+ StaticId, TraitAliasId, TraitId, TypeAliasId, TypeAliasLoc,
};
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -35,6 +35,7 @@ pub struct FunctionData {
pub visibility: RawVisibility,
pub abi: Option<Interned<str>>,
pub legacy_const_generics_indices: Box<[u32]>,
+ pub rustc_allow_incoherent_impl: bool,
flags: FnFlags,
}
@@ -84,13 +85,14 @@ impl FunctionData {
}
}
- let legacy_const_generics_indices = item_tree
- .attrs(db, krate, ModItem::from(loc.id.value).into())
+ let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into());
+ let legacy_const_generics_indices = attrs
.by_key("rustc_legacy_const_generics")
.tt_values()
.next()
.map(parse_rustc_legacy_const_generics)
.unwrap_or_default();
+ let rustc_allow_incoherent_impl = attrs.by_key("rustc_allow_incoherent_impl").exists();
Arc::new(FunctionData {
name: func.name.clone(),
@@ -108,6 +110,7 @@ impl FunctionData {
abi: func.abi.clone(),
legacy_const_generics_indices,
flags,
+ rustc_allow_incoherent_impl,
})
}
@@ -171,6 +174,7 @@ pub struct TypeAliasData {
pub visibility: RawVisibility,
pub is_extern: bool,
pub rustc_has_incoherent_inherent_impls: bool,
+ pub rustc_allow_incoherent_impl: bool,
/// Bounds restricting the type alias itself (eg. `type Ty: Bound;` in a trait or impl).
pub bounds: Vec<Interned<TypeBound>>,
}
@@ -189,10 +193,14 @@ impl TypeAliasData {
item_tree[typ.visibility].clone()
};
- let rustc_has_incoherent_inherent_impls = item_tree
- .attrs(db, loc.container.module(db).krate(), ModItem::from(loc.id.value).into())
- .by_key("rustc_has_incoherent_inherent_impls")
- .exists();
+ let attrs = item_tree.attrs(
+ db,
+ loc.container.module(db).krate(),
+ ModItem::from(loc.id.value).into(),
+ );
+ let rustc_has_incoherent_inherent_impls =
+ attrs.by_key("rustc_has_incoherent_inherent_impls").exists();
+ let rustc_allow_incoherent_impl = attrs.by_key("rustc_allow_incoherent_impl").exists();
Arc::new(TypeAliasData {
name: typ.name.clone(),
@@ -200,6 +208,7 @@ impl TypeAliasData {
visibility,
is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)),
rustc_has_incoherent_inherent_impls,
+ rustc_allow_incoherent_impl,
bounds: typ.bounds.to_vec(),
})
}
@@ -212,11 +221,12 @@ pub struct TraitData {
pub is_auto: bool,
pub is_unsafe: bool,
pub rustc_has_incoherent_inherent_impls: bool,
+ pub skip_array_during_method_dispatch: bool,
+ pub fundamental: bool,
pub visibility: RawVisibility,
/// Whether the trait has `#[rust_skip_array_during_method_dispatch]`. `hir_ty` will ignore
/// method calls to this trait's methods when the receiver is an array and the crate edition is
/// 2015 or 2018.
- pub skip_array_during_method_dispatch: bool,
// box it as the vec is usually empty anyways
pub attribute_calls: Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
}
@@ -245,19 +255,12 @@ impl TraitData {
attrs.by_key("rustc_skip_array_during_method_dispatch").exists();
let rustc_has_incoherent_inherent_impls =
attrs.by_key("rustc_has_incoherent_inherent_impls").exists();
- let (items, attribute_calls, diagnostics) = match &tr_def.items {
- Some(items) => {
- let mut collector = AssocItemCollector::new(
- db,
- module_id,
- tree_id.file_id(),
- ItemContainerId::TraitId(tr),
- );
- collector.collect(&item_tree, tree_id.tree_id(), items);
- collector.finish()
- }
- None => Default::default(),
- };
+ let fundamental = attrs.by_key("fundamental").exists();
+ let mut collector =
+ AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::TraitId(tr));
+ collector.collect(&item_tree, tree_id.tree_id(), &tr_def.items);
+ let (items, attribute_calls, diagnostics) = collector.finish();
+
(
Arc::new(TraitData {
name,
@@ -268,6 +271,7 @@ impl TraitData {
visibility,
skip_array_during_method_dispatch,
rustc_has_incoherent_inherent_impls,
+ fundamental,
}),
diagnostics.into(),
)
@@ -300,6 +304,23 @@ impl TraitData {
}
#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct TraitAliasData {
+ pub name: Name,
+ pub visibility: RawVisibility,
+}
+
+impl TraitAliasData {
+ pub(crate) fn trait_alias_query(db: &dyn DefDatabase, id: TraitAliasId) -> Arc<TraitAliasData> {
+ let loc = id.lookup(db);
+ let item_tree = loc.id.item_tree(db);
+ let alias = &item_tree[loc.id.value];
+ let visibility = item_tree[alias.visibility].clone();
+
+ Arc::new(TraitAliasData { name: alias.name.clone(), visibility })
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ImplData {
pub target_trait: Option<Interned<TraitRef>>,
pub self_ty: Interned<TypeRef>,
@@ -441,6 +462,7 @@ pub struct ConstData {
pub name: Option<Name>,
pub type_ref: Interned<TypeRef>,
pub visibility: RawVisibility,
+ pub rustc_allow_incoherent_impl: bool,
}
impl ConstData {
@@ -454,10 +476,16 @@ impl ConstData {
item_tree[konst.visibility].clone()
};
+ let rustc_allow_incoherent_impl = item_tree
+ .attrs(db, loc.container.module(db).krate(), ModItem::from(loc.id.value).into())
+ .by_key("rustc_allow_incoherent_impl")
+ .exists();
+
Arc::new(ConstData {
name: konst.name.clone(),
type_ref: konst.type_ref.clone(),
visibility,
+ rustc_allow_incoherent_impl,
})
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/db.rs b/src/tools/rust-analyzer/crates/hir-def/src/db.rs
index b23427a73..9371fc14d 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/db.rs
@@ -3,7 +3,7 @@ use std::sync::Arc;
use base_db::{salsa, CrateId, SourceDatabase, Upcast};
use either::Either;
-use hir_expand::{db::AstDatabase, HirFileId};
+use hir_expand::{db::ExpandDatabase, HirFileId};
use intern::Interned;
use la_arena::ArenaMap;
use syntax::{ast, AstPtr};
@@ -14,7 +14,7 @@ use crate::{
body::{scope::ExprScopes, Body, BodySourceMap},
data::{
ConstData, FunctionData, ImplData, Macro2Data, MacroRulesData, ProcMacroData, StaticData,
- TraitData, TypeAliasData,
+ TraitAliasData, TraitData, TypeAliasData,
},
generics::GenericParams,
import_map::ImportMap,
@@ -25,8 +25,8 @@ use crate::{
AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, ExternBlockId,
ExternBlockLoc, FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalEnumVariantId,
LocalFieldId, Macro2Id, Macro2Loc, MacroRulesId, MacroRulesLoc, ProcMacroId, ProcMacroLoc,
- StaticId, StaticLoc, StructId, StructLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc,
- UnionId, UnionLoc, VariantId,
+ StaticId, StaticLoc, StructId, StructLoc, TraitAliasId, TraitAliasLoc, TraitId, TraitLoc,
+ TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, VariantId,
};
#[salsa::query_group(InternDatabaseStorage)]
@@ -46,6 +46,8 @@ pub trait InternDatabase: SourceDatabase {
#[salsa::interned]
fn intern_trait(&self, loc: TraitLoc) -> TraitId;
#[salsa::interned]
+ fn intern_trait_alias(&self, loc: TraitAliasLoc) -> TraitAliasId;
+ #[salsa::interned]
fn intern_type_alias(&self, loc: TypeAliasLoc) -> TypeAliasId;
#[salsa::interned]
fn intern_impl(&self, loc: ImplLoc) -> ImplId;
@@ -62,7 +64,7 @@ pub trait InternDatabase: SourceDatabase {
}
#[salsa::query_group(DefDatabaseStorage)]
-pub trait DefDatabase: InternDatabase + AstDatabase + Upcast<dyn AstDatabase> {
+pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDatabase> {
#[salsa::input]
fn enable_proc_attr_macros(&self) -> bool;
@@ -125,6 +127,9 @@ pub trait DefDatabase: InternDatabase + AstDatabase + Upcast<dyn AstDatabase> {
#[salsa::invoke(TraitData::trait_data_with_diagnostics_query)]
fn trait_data_with_diagnostics(&self, tr: TraitId) -> (Arc<TraitData>, Arc<[DefDiagnostic]>);
+ #[salsa::invoke(TraitAliasData::trait_alias_query)]
+ fn trait_alias_data(&self, e: TraitAliasId) -> Arc<TraitAliasData>;
+
#[salsa::invoke(TypeAliasData::type_alias_data_query)]
fn type_alias_data(&self, e: TypeAliasId) -> Arc<TypeAliasData>;
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr.rs
index 48028b7c6..19fa6b254 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr.rs
@@ -17,6 +17,7 @@ use std::fmt;
use hir_expand::name::Name;
use intern::Interned;
use la_arena::{Idx, RawIdx};
+use smallvec::SmallVec;
use crate::{
builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint},
@@ -29,6 +30,8 @@ pub use syntax::ast::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, Unar
pub type ExprId = Idx<Expr>;
+pub type BindingId = Idx<Binding>;
+
/// FIXME: this is a hacky function which should be removed
pub(crate) fn dummy_expr_id() -> ExprId {
ExprId::from_raw(RawIdx::from(u32::MAX))
@@ -52,13 +55,21 @@ pub type LabelId = Idx<Label>;
// We convert float values into bits and that's how we don't need to deal with f32 and f64.
// For PartialEq, bits comparison should work, as ordering is not important
// https://github.com/rust-lang/rust-analyzer/issues/12380#issuecomment-1137284360
-#[derive(Default, Debug, Clone, Eq, PartialEq)]
+#[derive(Default, Debug, Clone, Copy, Eq, PartialEq)]
pub struct FloatTypeWrapper(u64);
impl FloatTypeWrapper {
pub fn new(value: f64) -> Self {
Self(value.to_bits())
}
+
+ pub fn into_f64(self) -> f64 {
+ f64::from_bits(self.0)
+ }
+
+ pub fn into_f32(self) -> f32 {
+ f64::from_bits(self.0) as f32
+ }
}
impl fmt::Display for FloatTypeWrapper {
@@ -101,6 +112,26 @@ pub enum Expr {
tail: Option<ExprId>,
label: Option<LabelId>,
},
+ TryBlock {
+ id: BlockId,
+ statements: Box<[Statement]>,
+ tail: Option<ExprId>,
+ },
+ Async {
+ id: BlockId,
+ statements: Box<[Statement]>,
+ tail: Option<ExprId>,
+ },
+ Const {
+ id: BlockId,
+ statements: Box<[Statement]>,
+ tail: Option<ExprId>,
+ },
+ Unsafe {
+ id: BlockId,
+ statements: Box<[Statement]>,
+ tail: Option<ExprId>,
+ },
Loop {
body: ExprId,
label: Option<LabelId>,
@@ -164,15 +195,6 @@ pub enum Expr {
Try {
expr: ExprId,
},
- TryBlock {
- body: ExprId,
- },
- Async {
- body: ExprId,
- },
- Const {
- body: ExprId,
- },
Cast {
expr: ExprId,
type_ref: Interned<TypeRef>,
@@ -214,9 +236,6 @@ pub enum Expr {
exprs: Box<[ExprId]>,
is_assignee_expr: bool,
},
- Unsafe {
- body: ExprId,
- },
Array(Array),
Literal(Literal),
Underscore,
@@ -226,6 +245,7 @@ pub enum Expr {
pub enum ClosureKind {
Closure,
Generator(Movability),
+ Async,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
@@ -282,13 +302,20 @@ impl Expr {
Expr::Let { expr, .. } => {
f(*expr);
}
- Expr::Block { statements, tail, .. } => {
+ Expr::Block { statements, tail, .. }
+ | Expr::TryBlock { statements, tail, .. }
+ | Expr::Unsafe { statements, tail, .. }
+ | Expr::Async { statements, tail, .. }
+ | Expr::Const { statements, tail, .. } => {
for stmt in statements.iter() {
match stmt {
- Statement::Let { initializer, .. } => {
+ Statement::Let { initializer, else_branch, .. } => {
if let &Some(expr) = initializer {
f(expr);
}
+ if let &Some(expr) = else_branch {
+ f(expr);
+ }
}
Statement::Expr { expr: expression, .. } => f(*expression),
}
@@ -297,10 +324,6 @@ impl Expr {
f(expr);
}
}
- Expr::TryBlock { body }
- | Expr::Unsafe { body }
- | Expr::Async { body }
- | Expr::Const { body } => f(*body),
Expr::Loop { body, .. } => f(*body),
Expr::While { condition, body, .. } => {
f(*condition);
@@ -415,6 +438,13 @@ impl BindingAnnotation {
}
#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Binding {
+ pub name: Name,
+ pub mode: BindingAnnotation,
+ pub definitions: SmallVec<[PatId; 1]>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
pub struct RecordFieldPat {
pub name: Name,
pub pat: PatId,
@@ -432,7 +462,7 @@ pub enum Pat {
Slice { prefix: Box<[PatId]>, slice: Option<PatId>, suffix: Box<[PatId]> },
Path(Box<Path>),
Lit(ExprId),
- Bind { mode: BindingAnnotation, name: Name, subpat: Option<PatId> },
+ Bind { id: BindingId, subpat: Option<PatId> },
TupleStruct { path: Option<Box<Path>>, args: Box<[PatId]>, ellipsis: Option<usize> },
Ref { pat: PatId, mutability: Mutability },
Box { inner: PatId },
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
index b2ab0c30e..e4912fa8a 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
@@ -187,6 +187,7 @@ impl GenericParams {
GenericDefId::AdtId(AdtId::EnumId(id)) => id_to_generics!(id),
GenericDefId::AdtId(AdtId::UnionId(id)) => id_to_generics!(id),
GenericDefId::TraitId(id) => id_to_generics!(id),
+ GenericDefId::TraitAliasId(id) => id_to_generics!(id),
GenericDefId::TypeAliasId(id) => id_to_generics!(id),
GenericDefId::ImplId(id) => id_to_generics!(id),
GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => {
@@ -207,12 +208,10 @@ impl GenericParams {
pub(crate) fn fill_bounds(
&mut self,
lower_ctx: &LowerCtx<'_>,
- node: &dyn ast::HasTypeBounds,
+ type_bounds: Option<ast::TypeBoundList>,
target: Either<TypeRef, LifetimeRef>,
) {
- for bound in
- node.type_bound_list().iter().flat_map(|type_bound_list| type_bound_list.bounds())
- {
+ for bound in type_bounds.iter().flat_map(|type_bound_list| type_bound_list.bounds()) {
self.add_where_predicate_from_bound(lower_ctx, bound, None, target.clone());
}
}
@@ -233,7 +232,11 @@ impl GenericParams {
};
self.type_or_consts.alloc(param.into());
let type_ref = TypeRef::Path(name.into());
- self.fill_bounds(lower_ctx, &type_param, Either::Left(type_ref));
+ self.fill_bounds(
+ lower_ctx,
+ type_param.type_bound_list(),
+ Either::Left(type_ref),
+ );
}
ast::TypeOrConstParam::Const(const_param) => {
let name = const_param.name().map_or_else(Name::missing, |it| it.as_name());
@@ -255,7 +258,11 @@ impl GenericParams {
let param = LifetimeParamData { name: name.clone() };
self.lifetimes.alloc(param);
let lifetime_ref = LifetimeRef::new_name(name);
- self.fill_bounds(lower_ctx, &lifetime_param, Either::Right(lifetime_ref));
+ self.fill_bounds(
+ lower_ctx,
+ lifetime_param.type_bound_list(),
+ Either::Right(lifetime_ref),
+ );
}
}
@@ -421,6 +428,10 @@ fn file_id_and_params_of(
let src = it.lookup(db).source(db);
(src.file_id, src.value.generic_param_list())
}
+ GenericDefId::TraitAliasId(it) => {
+ let src = it.lookup(db).source(db);
+ (src.file_id, src.value.generic_param_list())
+ }
GenericDefId::TypeAliasId(it) => {
let src = it.lookup(db).source(db);
(src.file_id, src.value.generic_param_list())
@@ -435,7 +446,7 @@ fn file_id_and_params_of(
}
impl HasChildSource<LocalTypeOrConstParamId> for GenericDefId {
- type Value = Either<ast::TypeOrConstParam, ast::Trait>;
+ type Value = Either<ast::TypeOrConstParam, ast::TraitOrAlias>;
fn child_source(
&self,
db: &dyn DefDatabase,
@@ -447,11 +458,20 @@ impl HasChildSource<LocalTypeOrConstParamId> for GenericDefId {
let mut params = ArenaMap::default();
- // For traits the first type index is `Self`, we need to add it before the other params.
- if let GenericDefId::TraitId(id) = *self {
- let trait_ref = id.lookup(db).source(db).value;
- let idx = idx_iter.next().unwrap();
- params.insert(idx, Either::Right(trait_ref));
+ // For traits and trait aliases the first type index is `Self`, we need to add it before
+ // the other params.
+ match *self {
+ GenericDefId::TraitId(id) => {
+ let trait_ref = id.lookup(db).source(db).value;
+ let idx = idx_iter.next().unwrap();
+ params.insert(idx, Either::Right(ast::TraitOrAlias::Trait(trait_ref)));
+ }
+ GenericDefId::TraitAliasId(id) => {
+ let alias = id.lookup(db).source(db).value;
+ let idx = idx_iter.next().unwrap();
+ params.insert(idx, Either::Right(ast::TraitOrAlias::TraitAlias(alias)));
+ }
+ _ => {}
}
if let Some(generic_params_list) = generic_params_list {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
index 1ce191942..4f1f6000d 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
@@ -264,6 +264,7 @@ pub enum ImportKind {
Const,
Static,
Trait,
+ TraitAlias,
TypeAlias,
BuiltinType,
AssociatedItem,
@@ -459,6 +460,7 @@ fn item_import_kind(item: ItemInNs) -> Option<ImportKind> {
ModuleDefId::ConstId(_) => ImportKind::Const,
ModuleDefId::StaticId(_) => ImportKind::Static,
ModuleDefId::TraitId(_) => ImportKind::Trait,
+ ModuleDefId::TraitAliasId(_) => ImportKind::TraitAlias,
ModuleDefId::TypeAliasId(_) => ImportKind::TypeAlias,
ModuleDefId::BuiltinType(_) => ImportKind::BuiltinType,
ModuleDefId::MacroId(_) => ImportKind::Macro,
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
index 53a4173ff..991e44703 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
@@ -431,6 +431,7 @@ impl PerNs {
ModuleDefId::EnumVariantId(_) => PerNs::both(def, def, v),
ModuleDefId::ConstId(_) | ModuleDefId::StaticId(_) => PerNs::values(def, v),
ModuleDefId::TraitId(_) => PerNs::types(def, v),
+ ModuleDefId::TraitAliasId(_) => PerNs::types(def, v),
ModuleDefId::TypeAliasId(_) => PerNs::types(def, v),
ModuleDefId::BuiltinType(_) => PerNs::types(def, v),
ModuleDefId::MacroId(mac) => PerNs::macros(mac, v),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
index 19d01630e..9da5b2d47 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
@@ -204,6 +204,7 @@ impl ItemTree {
consts,
statics,
traits,
+ trait_aliases,
impls,
type_aliases,
mods,
@@ -226,6 +227,7 @@ impl ItemTree {
consts.shrink_to_fit();
statics.shrink_to_fit();
traits.shrink_to_fit();
+ trait_aliases.shrink_to_fit();
impls.shrink_to_fit();
type_aliases.shrink_to_fit();
mods.shrink_to_fit();
@@ -276,6 +278,7 @@ struct ItemTreeData {
consts: Arena<Const>,
statics: Arena<Static>,
traits: Arena<Trait>,
+ trait_aliases: Arena<TraitAlias>,
impls: Arena<Impl>,
type_aliases: Arena<TypeAlias>,
mods: Arena<Mod>,
@@ -496,6 +499,7 @@ mod_items! {
Const in consts -> ast::Const,
Static in statics -> ast::Static,
Trait in traits -> ast::Trait,
+ TraitAlias in trait_aliases -> ast::TraitAlias,
Impl in impls -> ast::Impl,
TypeAlias in type_aliases -> ast::TypeAlias,
Mod in mods -> ast::Module,
@@ -672,12 +676,19 @@ pub struct Trait {
pub generic_params: Interned<GenericParams>,
pub is_auto: bool,
pub is_unsafe: bool,
- /// This is [`None`] if this Trait is a trait alias.
- pub items: Option<Box<[AssocItem]>>,
+ pub items: Box<[AssocItem]>,
pub ast_id: FileAstId<ast::Trait>,
}
#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct TraitAlias {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub generic_params: Interned<GenericParams>,
+ pub ast_id: FileAstId<ast::TraitAlias>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Impl {
pub generic_params: Interned<GenericParams>,
pub target_trait: Option<Interned<TraitRef>>,
@@ -872,6 +883,7 @@ impl ModItem {
| ModItem::Enum(_)
| ModItem::Static(_)
| ModItem::Trait(_)
+ | ModItem::TraitAlias(_)
| ModItem::Impl(_)
| ModItem::Mod(_)
| ModItem::MacroRules(_)
@@ -899,6 +911,7 @@ impl ModItem {
ModItem::Const(it) => tree[it.index].ast_id().upcast(),
ModItem::Static(it) => tree[it.index].ast_id().upcast(),
ModItem::Trait(it) => tree[it.index].ast_id().upcast(),
+ ModItem::TraitAlias(it) => tree[it.index].ast_id().upcast(),
ModItem::Impl(it) => tree[it.index].ast_id().upcast(),
ModItem::TypeAlias(it) => tree[it.index].ast_id().upcast(),
ModItem::Mod(it) => tree[it.index].ast_id().upcast(),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
index d4d3c5ef1..77b186f8e 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
@@ -3,7 +3,7 @@
use std::{collections::hash_map::Entry, sync::Arc};
use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId};
-use syntax::ast::{self, HasModuleItem};
+use syntax::ast::{self, HasModuleItem, HasTypeBounds};
use crate::{
generics::{GenericParams, TypeParamData, TypeParamProvenance},
@@ -90,6 +90,13 @@ impl<'a> Ctx<'a> {
_ => None,
})
.collect();
+ if let Some(ast::Expr::MacroExpr(expr)) = block.tail_expr() {
+ if let Some(call) = expr.macro_call() {
+ if let Some(mod_item) = self.lower_mod_item(&call.into()) {
+ self.tree.top_level.push(mod_item);
+ }
+ }
+ }
self.tree
}
@@ -110,6 +117,7 @@ impl<'a> Ctx<'a> {
ast::Item::Const(ast) => self.lower_const(ast).into(),
ast::Item::Module(ast) => self.lower_module(ast)?.into(),
ast::Item::Trait(ast) => self.lower_trait(ast)?.into(),
+ ast::Item::TraitAlias(ast) => self.lower_trait_alias(ast)?.into(),
ast::Item::Impl(ast) => self.lower_impl(ast)?.into(),
ast::Item::Use(ast) => self.lower_use(ast)?.into(),
ast::Item::ExternCrate(ast) => self.lower_extern_crate(ast)?.into(),
@@ -147,7 +155,7 @@ impl<'a> Ctx<'a> {
fn lower_struct(&mut self, strukt: &ast::Struct) -> Option<FileItemTreeId<Struct>> {
let visibility = self.lower_visibility(strukt);
let name = strukt.name()?.as_name();
- let generic_params = self.lower_generic_params(GenericsOwner::Struct, strukt);
+ let generic_params = self.lower_generic_params(HasImplicitSelf::No, strukt);
let fields = self.lower_fields(&strukt.kind());
let ast_id = self.source_ast_id_map.ast_id(strukt);
let res = Struct { name, visibility, generic_params, fields, ast_id };
@@ -211,7 +219,7 @@ impl<'a> Ctx<'a> {
fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> {
let visibility = self.lower_visibility(union);
let name = union.name()?.as_name();
- let generic_params = self.lower_generic_params(GenericsOwner::Union, union);
+ let generic_params = self.lower_generic_params(HasImplicitSelf::No, union);
let fields = match union.record_field_list() {
Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)),
None => Fields::Record(IdxRange::new(self.next_field_idx()..self.next_field_idx())),
@@ -224,7 +232,7 @@ impl<'a> Ctx<'a> {
fn lower_enum(&mut self, enum_: &ast::Enum) -> Option<FileItemTreeId<Enum>> {
let visibility = self.lower_visibility(enum_);
let name = enum_.name()?.as_name();
- let generic_params = self.lower_generic_params(GenericsOwner::Enum, enum_);
+ let generic_params = self.lower_generic_params(HasImplicitSelf::No, enum_);
let variants = match &enum_.variant_list() {
Some(variant_list) => self.lower_variants(variant_list),
None => IdxRange::new(self.next_variant_idx()..self.next_variant_idx()),
@@ -372,8 +380,7 @@ impl<'a> Ctx<'a> {
ast_id,
flags,
};
- res.explicit_generic_params =
- self.lower_generic_params(GenericsOwner::Function(&res), func);
+ res.explicit_generic_params = self.lower_generic_params(HasImplicitSelf::No, func);
Some(id(self.data().functions.alloc(res)))
}
@@ -386,7 +393,7 @@ impl<'a> Ctx<'a> {
let type_ref = type_alias.ty().map(|it| self.lower_type_ref(&it));
let visibility = self.lower_visibility(type_alias);
let bounds = self.lower_type_bounds(type_alias);
- let generic_params = self.lower_generic_params(GenericsOwner::TypeAlias, type_alias);
+ let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias);
let ast_id = self.source_ast_id_map.ast_id(type_alias);
let res = TypeAlias {
name,
@@ -442,27 +449,49 @@ impl<'a> Ctx<'a> {
fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option<FileItemTreeId<Trait>> {
let name = trait_def.name()?.as_name();
let visibility = self.lower_visibility(trait_def);
- let generic_params = self.lower_generic_params(GenericsOwner::Trait(trait_def), trait_def);
+ let generic_params =
+ self.lower_generic_params(HasImplicitSelf::Yes(trait_def.type_bound_list()), trait_def);
let is_auto = trait_def.auto_token().is_some();
let is_unsafe = trait_def.unsafe_token().is_some();
- let items = trait_def.assoc_item_list().map(|list| {
- list.assoc_items()
- .filter_map(|item| {
- let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
- self.lower_assoc_item(&item).map(|item| {
- self.add_attrs(ModItem::from(item).into(), attrs);
- item
- })
- })
- .collect()
- });
let ast_id = self.source_ast_id_map.ast_id(trait_def);
- let res = Trait { name, visibility, generic_params, is_auto, is_unsafe, items, ast_id };
- Some(id(self.data().traits.alloc(res)))
+
+ let items = trait_def
+ .assoc_item_list()
+ .into_iter()
+ .flat_map(|list| list.assoc_items())
+ .filter_map(|item| {
+ let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
+ self.lower_assoc_item(&item).map(|item| {
+ self.add_attrs(ModItem::from(item).into(), attrs);
+ item
+ })
+ })
+ .collect();
+
+ let def = Trait { name, visibility, generic_params, is_auto, is_unsafe, items, ast_id };
+ Some(id(self.data().traits.alloc(def)))
+ }
+
+ fn lower_trait_alias(
+ &mut self,
+ trait_alias_def: &ast::TraitAlias,
+ ) -> Option<FileItemTreeId<TraitAlias>> {
+ let name = trait_alias_def.name()?.as_name();
+ let visibility = self.lower_visibility(trait_alias_def);
+ let generic_params = self.lower_generic_params(
+ HasImplicitSelf::Yes(trait_alias_def.type_bound_list()),
+ trait_alias_def,
+ );
+ let ast_id = self.source_ast_id_map.ast_id(trait_alias_def);
+
+ let alias = TraitAlias { name, visibility, generic_params, ast_id };
+ Some(id(self.data().trait_aliases.alloc(alias)))
}
fn lower_impl(&mut self, impl_def: &ast::Impl) -> Option<FileItemTreeId<Impl>> {
- let generic_params = self.lower_generic_params(GenericsOwner::Impl, impl_def);
+ // Note that trait impls don't get implicit `Self` unlike traits, because here they are a
+ // type alias rather than a type parameter, so this is handled by the resolver.
+ let generic_params = self.lower_generic_params(HasImplicitSelf::No, impl_def);
// FIXME: If trait lowering fails, due to a non PathType for example, we treat this impl
// as if it was an non-trait impl. Ideally we want to create a unique missing ref that only
// equals itself.
@@ -566,42 +595,29 @@ impl<'a> Ctx<'a> {
fn lower_generic_params(
&mut self,
- owner: GenericsOwner<'_>,
+ has_implicit_self: HasImplicitSelf,
node: &dyn ast::HasGenericParams,
) -> Interned<GenericParams> {
let mut generics = GenericParams::default();
- match owner {
- GenericsOwner::Function(_)
- | GenericsOwner::Struct
- | GenericsOwner::Enum
- | GenericsOwner::Union
- | GenericsOwner::TypeAlias => {
- generics.fill(&self.body_ctx, node);
- }
- GenericsOwner::Trait(trait_def) => {
- // traits get the Self type as an implicit first type parameter
- generics.type_or_consts.alloc(
- TypeParamData {
- name: Some(name![Self]),
- default: None,
- provenance: TypeParamProvenance::TraitSelf,
- }
- .into(),
- );
- // add super traits as bounds on Self
- // i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar
- let self_param = TypeRef::Path(name![Self].into());
- generics.fill_bounds(&self.body_ctx, trait_def, Either::Left(self_param));
- generics.fill(&self.body_ctx, node);
- }
- GenericsOwner::Impl => {
- // Note that we don't add `Self` here: in `impl`s, `Self` is not a
- // type-parameter, but rather is a type-alias for impl's target
- // type, so this is handled by the resolver.
- generics.fill(&self.body_ctx, node);
- }
+
+ if let HasImplicitSelf::Yes(bounds) = has_implicit_self {
+ // Traits and trait aliases get the Self type as an implicit first type parameter.
+ generics.type_or_consts.alloc(
+ TypeParamData {
+ name: Some(name![Self]),
+ default: None,
+ provenance: TypeParamProvenance::TraitSelf,
+ }
+ .into(),
+ );
+ // add super traits as bounds on Self
+ // i.e., `trait Foo: Bar` is equivalent to `trait Foo where Self: Bar`
+ let self_param = TypeRef::Path(name![Self].into());
+ generics.fill_bounds(&self.body_ctx, bounds, Either::Left(self_param));
}
+ generics.fill(&self.body_ctx, node);
+
generics.shrink_to_fit();
Interned::new(generics)
}
@@ -673,17 +689,10 @@ fn desugar_future_path(orig: TypeRef) -> Path {
Path::from_known_path(path, generic_args)
}
-enum GenericsOwner<'a> {
- /// We need access to the partially-lowered `Function` for lowering `impl Trait` in argument
- /// position.
- Function(&'a Function),
- Struct,
- Enum,
- Union,
- /// The `TraitDef` is needed to fill the source map for the implicit `Self` parameter.
- Trait(&'a ast::Trait),
- TypeAlias,
- Impl,
+enum HasImplicitSelf {
+ /// Inner list is a type bound list for the implicit `Self`.
+ Yes(Option<ast::TypeBoundList>),
+ No,
}
fn lower_abi(abi: ast::Abi) -> Interned<str> {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
index 8f230b87d..5f2999796 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
@@ -374,23 +374,24 @@ impl<'a> Printer<'a> {
}
w!(self, "trait {}", name);
self.print_generic_params(generic_params);
- match items {
- Some(items) => {
- self.print_where_clause_and_opening_brace(generic_params);
- self.indented(|this| {
- for item in &**items {
- this.print_mod_item((*item).into());
- }
- });
- }
- None => {
- w!(self, " = ");
- // FIXME: Print the aliased traits
- self.print_where_clause_and_opening_brace(generic_params);
+ self.print_where_clause_and_opening_brace(generic_params);
+ self.indented(|this| {
+ for item in &**items {
+ this.print_mod_item((*item).into());
}
- }
+ });
wln!(self, "}}");
}
+ ModItem::TraitAlias(it) => {
+ let TraitAlias { name, visibility, generic_params, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "trait {}", name);
+ self.print_generic_params(generic_params);
+ w!(self, " = ");
+ self.print_where_clause(generic_params);
+ w!(self, ";");
+ wln!(self);
+ }
ModItem::Impl(it) => {
let Impl { target_trait, self_ty, is_negative, items, generic_params, ast_id: _ } =
&self.tree[it];
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/keys.rs b/src/tools/rust-analyzer/crates/hir-def/src/keys.rs
index 72beec818..f30be6b64 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/keys.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/keys.rs
@@ -9,8 +9,8 @@ use syntax::{ast, AstNode, AstPtr};
use crate::{
dyn_map::{DynMap, Policy},
ConstId, EnumId, EnumVariantId, FieldId, FunctionId, ImplId, LifetimeParamId, Macro2Id,
- MacroRulesId, ProcMacroId, StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId,
- UnionId,
+ MacroRulesId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId,
+ TypeOrConstParamId, UnionId,
};
pub type Key<K, V> = crate::dyn_map::Key<K, V, AstPtrPolicy<K, V>>;
@@ -21,6 +21,7 @@ pub const STATIC: Key<ast::Static, StaticId> = Key::new();
pub const TYPE_ALIAS: Key<ast::TypeAlias, TypeAliasId> = Key::new();
pub const IMPL: Key<ast::Impl, ImplId> = Key::new();
pub const TRAIT: Key<ast::Trait, TraitId> = Key::new();
+pub const TRAIT_ALIAS: Key<ast::TraitAlias, TraitAliasId> = Key::new();
pub const STRUCT: Key<ast::Struct, StructId> = Key::new();
pub const UNION: Key<ast::Union, UnionId> = Key::new();
pub const ENUM: Key<ast::Enum, EnumId> = Key::new();
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
index ab9bc615d..4096e0a38 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
@@ -181,15 +181,15 @@ impl LangItems {
T: Into<AttrDefId> + Copy,
{
let _p = profile::span("collect_lang_item");
- if let Some(lang_item) = lang_attr(db, item).and_then(|it| LangItem::from_str(&it)) {
+ if let Some(lang_item) = lang_attr(db, item) {
self.items.entry(lang_item).or_insert_with(|| constructor(item));
}
}
}
-pub fn lang_attr(db: &dyn DefDatabase, item: impl Into<AttrDefId> + Copy) -> Option<SmolStr> {
+pub fn lang_attr(db: &dyn DefDatabase, item: impl Into<AttrDefId> + Copy) -> Option<LangItem> {
let attrs = db.attrs(item.into());
- attrs.by_key("lang").string_value().cloned()
+ attrs.by_key("lang").string_value().cloned().and_then(|it| LangItem::from_str(&it))
}
pub enum GenericRequirement {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
index d07c5fb67..8c2e93f09 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
@@ -86,7 +86,7 @@ use crate::{
builtin_type::BuiltinType,
item_tree::{
Const, Enum, Function, Impl, ItemTreeId, ItemTreeNode, MacroDef, MacroRules, ModItem,
- Static, Struct, Trait, TypeAlias, Union,
+ Static, Struct, Trait, TraitAlias, TypeAlias, Union,
},
};
@@ -128,7 +128,7 @@ impl ModuleId {
}
}
-/// An ID of a module, **local** to a specific crate
+/// An ID of a module, **local** to a `DefMap`.
pub type LocalModuleId = Idx<nameres::ModuleData>;
#[derive(Debug)]
@@ -262,6 +262,11 @@ pub type TraitLoc = ItemLoc<Trait>;
impl_intern!(TraitId, TraitLoc, intern_trait, lookup_intern_trait);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TraitAliasId(salsa::InternId);
+pub type TraitAliasLoc = ItemLoc<TraitAlias>;
+impl_intern!(TraitAliasId, TraitAliasLoc, intern_trait_alias, lookup_intern_trait_alias);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TypeAliasId(salsa::InternId);
type TypeAliasLoc = AssocItemLoc<TypeAlias>;
impl_intern!(TypeAliasId, TypeAliasLoc, intern_type_alias, lookup_intern_type_alias);
@@ -453,6 +458,7 @@ pub enum ModuleDefId {
ConstId(ConstId),
StaticId(StaticId),
TraitId(TraitId),
+ TraitAliasId(TraitAliasId),
TypeAliasId(TypeAliasId),
BuiltinType(BuiltinType),
MacroId(MacroId),
@@ -466,6 +472,7 @@ impl_from!(
ConstId,
StaticId,
TraitId,
+ TraitAliasId,
TypeAliasId,
BuiltinType
for ModuleDefId
@@ -516,6 +523,7 @@ pub enum GenericDefId {
FunctionId(FunctionId),
AdtId(AdtId),
TraitId(TraitId),
+ TraitAliasId(TraitAliasId),
TypeAliasId(TypeAliasId),
ImplId(ImplId),
// enum variants cannot have generics themselves, but their parent enums
@@ -528,6 +536,7 @@ impl_from!(
FunctionId,
AdtId(StructId, EnumId, UnionId),
TraitId,
+ TraitAliasId,
TypeAliasId,
ImplId,
EnumVariantId,
@@ -555,6 +564,7 @@ pub enum AttrDefId {
StaticId(StaticId),
ConstId(ConstId),
TraitId(TraitId),
+ TraitAliasId(TraitAliasId),
TypeAliasId(TypeAliasId),
MacroId(MacroId),
ImplId(ImplId),
@@ -714,6 +724,7 @@ impl HasModule for GenericDefId {
GenericDefId::FunctionId(it) => it.lookup(db).module(db),
GenericDefId::AdtId(it) => it.module(db),
GenericDefId::TraitId(it) => it.lookup(db).container,
+ GenericDefId::TraitAliasId(it) => it.lookup(db).container,
GenericDefId::TypeAliasId(it) => it.lookup(db).module(db),
GenericDefId::ImplId(it) => it.lookup(db).container,
GenericDefId::EnumVariantId(it) => it.parent.lookup(db).container,
@@ -747,6 +758,7 @@ impl ModuleDefId {
ModuleDefId::ConstId(id) => id.lookup(db).container.module(db),
ModuleDefId::StaticId(id) => id.lookup(db).module(db),
ModuleDefId::TraitId(id) => id.lookup(db).container,
+ ModuleDefId::TraitAliasId(id) => id.lookup(db).container,
ModuleDefId::TypeAliasId(id) => id.lookup(db).module(db),
ModuleDefId::MacroId(id) => id.module(db),
ModuleDefId::BuiltinType(_) => return None,
@@ -765,6 +777,7 @@ impl AttrDefId {
AttrDefId::StaticId(it) => it.lookup(db).module(db).krate,
AttrDefId::ConstId(it) => it.lookup(db).module(db).krate,
AttrDefId::TraitId(it) => it.lookup(db).container.krate,
+ AttrDefId::TraitAliasId(it) => it.lookup(db).container.krate,
AttrDefId::TypeAliasId(it) => it.lookup(db).module(db).krate,
AttrDefId::ImplId(it) => it.lookup(db).container.krate,
AttrDefId::ExternBlockId(it) => it.lookup(db).container.krate,
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests.rs
index 5ab90d92d..314bf22b9 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests.rs
@@ -20,7 +20,7 @@ use ::mbe::TokenMap;
use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase};
use expect_test::Expect;
use hir_expand::{
- db::{AstDatabase, TokenExpander},
+ db::{ExpandDatabase, TokenExpander},
AstId, InFile, MacroDefId, MacroDefKind, MacroFile,
};
use stdx::format_to;
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
index bb4526672..0b72ca1ee 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
@@ -143,7 +143,7 @@ macro_rules! assert {
fn main() {
{
- if !true {
+ if !(true ) {
$crate::panic!("{} {:?}", arg1(a, b, c), arg2);
}
};
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
index 8358a46f0..b663a2917 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
@@ -827,6 +827,7 @@ macro_rules! rgb_color {
/* parse error: expected type */
/* parse error: expected R_PAREN */
/* parse error: expected R_ANGLE */
+/* parse error: expected `::` */
/* parse error: expected COMMA */
/* parse error: expected R_ANGLE */
/* parse error: expected SEMICOLON */
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
index f42b0079d..4efe8c58a 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
@@ -120,6 +120,8 @@ pub struct DefMap {
registered_tools: Vec<SmolStr>,
/// Unstable features of Rust enabled with `#![feature(A, B)]`.
unstable_features: FxHashSet<SmolStr>,
+ /// #[rustc_coherence_is_core]
+ rustc_coherence_is_core: bool,
edition: Edition,
recursion_limit: Option<u32>,
@@ -215,7 +217,7 @@ pub struct ModuleData {
pub origin: ModuleOrigin,
/// Declared visibility of this module.
pub visibility: Visibility,
-
+ /// Always [`None`] for block modules
pub parent: Option<LocalModuleId>,
pub children: FxHashMap<Name, LocalModuleId>,
pub scope: ItemScope,
@@ -292,6 +294,7 @@ impl DefMap {
registered_tools: Vec::new(),
unstable_features: FxHashSet::default(),
diagnostics: Vec::new(),
+ rustc_coherence_is_core: false,
}
}
@@ -325,6 +328,10 @@ impl DefMap {
self.unstable_features.contains(feature)
}
+ pub fn is_rustc_coherence_is_core(&self) -> bool {
+ self.rustc_coherence_is_core
+ }
+
pub fn root(&self) -> LocalModuleId {
self.root
}
@@ -337,12 +344,12 @@ impl DefMap {
self.proc_macro_loading_error.as_deref()
}
- pub(crate) fn krate(&self) -> CrateId {
+ pub fn krate(&self) -> CrateId {
self.krate
}
pub(crate) fn block_id(&self) -> Option<BlockId> {
- self.block.as_ref().map(|block| block.block)
+ self.block.map(|block| block.block)
}
pub(crate) fn prelude(&self) -> Option<ModuleId> {
@@ -354,7 +361,7 @@ impl DefMap {
}
pub fn module_id(&self, local_id: LocalModuleId) -> ModuleId {
- let block = self.block.as_ref().map(|b| b.block);
+ let block = self.block.map(|b| b.block);
ModuleId { krate: self.krate, local_id, block }
}
@@ -425,12 +432,12 @@ impl DefMap {
Some(self.block?.parent)
}
- /// Returns the module containing `local_mod`, either the parent `mod`, or the module containing
+ /// Returns the module containing `local_mod`, either the parent `mod`, or the module (or block) containing
/// the block, if `self` corresponds to a block expression.
pub fn containing_module(&self, local_mod: LocalModuleId) -> Option<ModuleId> {
- match &self[local_mod].parent {
- Some(parent) => Some(self.module_id(*parent)),
- None => self.block.as_ref().map(|block| block.parent),
+ match self[local_mod].parent {
+ Some(parent) => Some(self.module_id(parent)),
+ None => self.block.map(|block| block.parent),
}
}
@@ -440,11 +447,11 @@ impl DefMap {
let mut buf = String::new();
let mut arc;
let mut current_map = self;
- while let Some(block) = &current_map.block {
+ while let Some(block) = current_map.block {
go(&mut buf, current_map, "block scope", current_map.root);
buf.push('\n');
arc = block.parent.def_map(db);
- current_map = &*arc;
+ current_map = &arc;
}
go(&mut buf, current_map, "crate", current_map.root);
return buf;
@@ -468,10 +475,10 @@ impl DefMap {
let mut buf = String::new();
let mut arc;
let mut current_map = self;
- while let Some(block) = &current_map.block {
+ while let Some(block) = current_map.block {
format_to!(buf, "{:?} in {:?}\n", block.block, block.parent);
arc = block.parent.def_map(db);
- current_map = &*arc;
+ current_map = &arc;
}
format_to!(buf, "crate scope\n");
@@ -498,6 +505,7 @@ impl DefMap {
krate: _,
prelude: _,
root: _,
+ rustc_coherence_is_core: _,
} = self;
extern_prelude.shrink_to_fit();
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
index 4b39a20d8..ddcee77ec 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
@@ -51,7 +51,8 @@ use crate::{
AdtId, AstId, AstIdWithPath, ConstLoc, EnumLoc, EnumVariantId, ExternBlockLoc, FunctionId,
FunctionLoc, ImplLoc, Intern, ItemContainerId, LocalModuleId, Macro2Id, Macro2Loc,
MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, ModuleDefId, ModuleId, ProcMacroId,
- ProcMacroLoc, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro,
+ ProcMacroLoc, StaticLoc, StructLoc, TraitAliasLoc, TraitLoc, TypeAliasLoc, UnionLoc,
+ UnresolvedMacro,
};
static GLOB_RECURSION_LIMIT: Limit = Limit::new(100);
@@ -86,10 +87,7 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: DefMap, tree_id: T
// FIXME: a hacky way to create a Name from string.
let name =
tt::Ident { text: it.name.clone(), span: tt::TokenId::unspecified() };
- (
- name.as_name(),
- ProcMacroExpander::new(def_map.krate, base_db::ProcMacroId(idx as u32)),
- )
+ (name.as_name(), ProcMacroExpander::new(base_db::ProcMacroId(idx as u32)))
})
.collect()
}
@@ -298,6 +296,11 @@ impl DefCollector<'_> {
continue;
}
+ if attr_name.as_text().as_deref() == Some("rustc_coherence_is_core") {
+ self.def_map.rustc_coherence_is_core = true;
+ continue;
+ }
+
if *attr_name == hir_expand::name![feature] {
let features =
attr.parse_path_comma_token_tree().into_iter().flatten().filter_map(
@@ -580,7 +583,7 @@ impl DefCollector<'_> {
let kind = def.kind.to_basedb_kind();
let (expander, kind) = match self.proc_macros.iter().find(|(n, _)| n == &def.name) {
Some(&(_, expander)) => (expander, kind),
- None => (ProcMacroExpander::dummy(self.def_map.krate), kind),
+ None => (ProcMacroExpander::dummy(), kind),
};
let proc_macro_id =
@@ -666,8 +669,10 @@ impl DefCollector<'_> {
macro_: Macro2Id,
vis: &RawVisibility,
) {
- let vis =
- self.def_map.resolve_visibility(self.db, module_id, vis).unwrap_or(Visibility::Public);
+ let vis = self
+ .def_map
+ .resolve_visibility(self.db, module_id, vis, false)
+ .unwrap_or(Visibility::Public);
self.def_map.modules[module_id].scope.declare(macro_.into());
self.update(
module_id,
@@ -831,7 +836,7 @@ impl DefCollector<'_> {
let mut def = directive.status.namespaces();
let vis = self
.def_map
- .resolve_visibility(self.db, module_id, &directive.import.visibility)
+ .resolve_visibility(self.db, module_id, &directive.import.visibility, false)
.unwrap_or(Visibility::Public);
match import.kind {
@@ -1547,7 +1552,7 @@ impl ModCollector<'_, '_> {
};
let resolve_vis = |def_map: &DefMap, visibility| {
def_map
- .resolve_visibility(db, self.module_id, visibility)
+ .resolve_visibility(db, self.module_id, visibility, false)
.unwrap_or(Visibility::Public)
};
@@ -1707,6 +1712,20 @@ impl ModCollector<'_, '_> {
false,
);
}
+ ModItem::TraitAlias(id) => {
+ let it = &self.item_tree[id];
+
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ update_def(
+ self.def_collector,
+ TraitAliasLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
+ .intern(db)
+ .into(),
+ &it.name,
+ vis,
+ false,
+ );
+ }
ModItem::TypeAlias(id) => {
let it = &self.item_tree[id];
@@ -1823,7 +1842,7 @@ impl ModCollector<'_, '_> {
) -> LocalModuleId {
let def_map = &mut self.def_collector.def_map;
let vis = def_map
- .resolve_visibility(self.def_collector.db, self.module_id, visibility)
+ .resolve_visibility(self.def_collector.db, self.module_id, visibility, false)
.unwrap_or(Visibility::Public);
let modules = &mut def_map.modules;
let origin = match definition {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs
index 1d9d5cccd..25478481d 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs
@@ -78,6 +78,7 @@ impl DefMap {
// pub(path)
// ^^^^ this
visibility: &RawVisibility,
+ within_impl: bool,
) -> Option<Visibility> {
let mut vis = match visibility {
RawVisibility::Module(path) => {
@@ -102,7 +103,8 @@ impl DefMap {
// `super` to its parent (etc.). However, visibilities must only refer to a module in the
// DefMap they're written in, so we restrict them when that happens.
if let Visibility::Module(m) = vis {
- if self.block_id() != m.block {
+ // ...unless we're resolving visibility for an associated item in an impl.
+ if self.block_id() != m.block && !within_impl {
cov_mark::hit!(adjust_vis_in_block_def_map);
vis = Visibility::Module(self.module_id(self.root()));
tracing::debug!("visibility {:?} points outside DefMap, adjusting to {:?}", m, vis);
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
index f5190b76d..13e6825f8 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
@@ -223,6 +223,7 @@ pub type Ty = ();
ModuleDefId::ConstId(it) => drop(db.const_data(it)),
ModuleDefId::StaticId(it) => drop(db.static_data(it)),
ModuleDefId::TraitId(it) => drop(db.trait_data(it)),
+ ModuleDefId::TraitAliasId(it) => drop(db.trait_alias_data(it)),
ModuleDefId::TypeAliasId(it) => drop(db.type_alias_data(it)),
ModuleDefId::EnumVariantId(_)
| ModuleDefId::ModuleId(_)
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path.rs b/src/tools/rust-analyzer/crates/hir-def/src/path.rs
index 36d4c36a2..f3197d180 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/path.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/path.rs
@@ -8,7 +8,7 @@ use std::{
use crate::{
body::LowerCtx,
- type_ref::{ConstScalarOrPath, LifetimeRef},
+ type_ref::{ConstRefOrPath, LifetimeRef},
};
use hir_expand::name::Name;
use intern::Interned;
@@ -85,7 +85,7 @@ pub struct AssociatedTypeBinding {
pub enum GenericArg {
Type(TypeRef),
Lifetime(LifetimeRef),
- Const(ConstScalarOrPath),
+ Const(ConstRefOrPath),
}
impl Path {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
index c85a11db6..b7542bd77 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
@@ -2,7 +2,7 @@
use std::iter;
-use crate::type_ref::ConstScalarOrPath;
+use crate::type_ref::ConstRefOrPath;
use either::Either;
use hir_expand::name::{name, AsName};
@@ -212,7 +212,7 @@ pub(super) fn lower_generic_args(
}
}
ast::GenericArg::ConstArg(arg) => {
- let arg = ConstScalarOrPath::from_expr_opt(arg.expr());
+ let arg = ConstRefOrPath::from_expr_opt(arg.expr());
args.push(GenericArg::Const(arg))
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
index 86958e3da..61e64fc10 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
@@ -1,5 +1,5 @@
//! Name resolution façade.
-use std::{hash::BuildHasherDefault, sync::Arc};
+use std::{fmt, hash::BuildHasherDefault, sync::Arc};
use base_db::CrateId;
use hir_expand::name::{name, Name};
@@ -12,7 +12,7 @@ use crate::{
body::scope::{ExprScopes, ScopeId},
builtin_type::BuiltinType,
db::DefDatabase,
- expr::{ExprId, LabelId, PatId},
+ expr::{BindingId, ExprId, LabelId},
generics::{GenericParams, TypeOrConstParamData},
item_scope::{BuiltinShadowMode, BUILTIN_SCOPE},
nameres::DefMap,
@@ -22,7 +22,8 @@ use crate::{
AdtId, AssocItemId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId,
FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
LocalModuleId, Lookup, Macro2Id, MacroId, MacroRulesId, ModuleDefId, ModuleId, ProcMacroId,
- StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, VariantId,
+ StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId,
+ VariantId,
};
#[derive(Debug, Clone)]
@@ -35,19 +36,34 @@ pub struct Resolver {
module_scope: ModuleItemMap,
}
-#[derive(Debug, Clone)]
+#[derive(Clone)]
struct ModuleItemMap {
def_map: Arc<DefMap>,
module_id: LocalModuleId,
}
-#[derive(Debug, Clone)]
+impl fmt::Debug for ModuleItemMap {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("ModuleItemMap").field("module_id", &self.module_id).finish()
+ }
+}
+
+#[derive(Clone)]
struct ExprScope {
owner: DefWithBodyId,
expr_scopes: Arc<ExprScopes>,
scope_id: ScopeId,
}
+impl fmt::Debug for ExprScope {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("ExprScope")
+ .field("owner", &self.owner)
+ .field("scope_id", &self.scope_id)
+ .finish()
+ }
+}
+
#[derive(Debug, Clone)]
enum Scope {
/// All the items and imported names of a module
@@ -74,6 +90,7 @@ pub enum TypeNs {
TypeAliasId(TypeAliasId),
BuiltinType(BuiltinType),
TraitId(TraitId),
+ TraitAliasId(TraitAliasId),
// Module belong to type ns, but the resolver is used when all module paths
// are fully resolved.
// ModuleId(ModuleId)
@@ -85,10 +102,10 @@ pub enum ResolveValueResult {
Partial(TypeNs, usize),
}
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum ValueNs {
ImplSelf(ImplId),
- LocalBinding(PatId),
+ LocalBinding(BindingId),
FunctionId(FunctionId),
ConstId(ConstId),
StaticId(StaticId),
@@ -214,10 +231,12 @@ impl Resolver {
db: &dyn DefDatabase,
visibility: &RawVisibility,
) -> Option<Visibility> {
+ let within_impl =
+ self.scopes().find(|scope| matches!(scope, Scope::ImplDefScope(_))).is_some();
match visibility {
RawVisibility::Module(_) => {
let (item_map, module) = self.item_scope();
- item_map.resolve_visibility(db, module, visibility)
+ item_map.resolve_visibility(db, module, visibility, within_impl)
}
RawVisibility::Public => Some(Visibility::Public),
}
@@ -236,69 +255,81 @@ impl Resolver {
return self.module_scope.resolve_path_in_value_ns(db, path);
}
- for scope in self.scopes() {
- match scope {
- Scope::ExprScope(_) if n_segments > 1 => continue,
- Scope::ExprScope(scope) => {
- let entry = scope
- .expr_scopes
- .entries(scope.scope_id)
- .iter()
- .find(|entry| entry.name() == first_name);
-
- if let Some(e) = entry {
- return Some(ResolveValueResult::ValueNs(ValueNs::LocalBinding(e.pat())));
+ if n_segments <= 1 {
+ for scope in self.scopes() {
+ match scope {
+ Scope::ExprScope(scope) => {
+ let entry = scope
+ .expr_scopes
+ .entries(scope.scope_id)
+ .iter()
+ .find(|entry| entry.name() == first_name);
+
+ if let Some(e) = entry {
+ return Some(ResolveValueResult::ValueNs(ValueNs::LocalBinding(
+ e.binding(),
+ )));
+ }
}
- }
- Scope::GenericParams { params, def } if n_segments > 1 => {
- if let Some(id) = params.find_type_by_name(first_name, *def) {
- let ty = TypeNs::GenericParam(id);
- return Some(ResolveValueResult::Partial(ty, 1));
+ Scope::GenericParams { params, def } => {
+ if let Some(id) = params.find_const_by_name(first_name, *def) {
+ let val = ValueNs::GenericParam(id);
+ return Some(ResolveValueResult::ValueNs(val));
+ }
}
- }
- Scope::GenericParams { .. } if n_segments != 1 => continue,
- Scope::GenericParams { params, def } => {
- if let Some(id) = params.find_const_by_name(first_name, *def) {
- let val = ValueNs::GenericParam(id);
- return Some(ResolveValueResult::ValueNs(val));
+ &Scope::ImplDefScope(impl_) => {
+ if first_name == &name![Self] {
+ return Some(ResolveValueResult::ValueNs(ValueNs::ImplSelf(impl_)));
+ }
}
- }
-
- &Scope::ImplDefScope(impl_) => {
- if first_name == &name![Self] {
- return Some(if n_segments > 1 {
- ResolveValueResult::Partial(TypeNs::SelfType(impl_), 1)
- } else {
- ResolveValueResult::ValueNs(ValueNs::ImplSelf(impl_))
- });
+ // bare `Self` doesn't work in the value namespace in a struct/enum definition
+ Scope::AdtScope(_) => continue,
+ Scope::BlockScope(m) => {
+ if let Some(def) = m.resolve_path_in_value_ns(db, path) {
+ return Some(def);
+ }
}
}
- // bare `Self` doesn't work in the value namespace in a struct/enum definition
- Scope::AdtScope(_) if n_segments == 1 => continue,
- Scope::AdtScope(adt) => {
- if first_name == &name![Self] {
- let ty = TypeNs::AdtSelfType(*adt);
- return Some(ResolveValueResult::Partial(ty, 1));
+ }
+ } else {
+ for scope in self.scopes() {
+ match scope {
+ Scope::ExprScope(_) => continue,
+ Scope::GenericParams { params, def } => {
+ if let Some(id) = params.find_type_by_name(first_name, *def) {
+ let ty = TypeNs::GenericParam(id);
+ return Some(ResolveValueResult::Partial(ty, 1));
+ }
}
- }
-
- Scope::BlockScope(m) => {
- if let Some(def) = m.resolve_path_in_value_ns(db, path) {
- return Some(def);
+ &Scope::ImplDefScope(impl_) => {
+ if first_name == &name![Self] {
+ return Some(ResolveValueResult::Partial(TypeNs::SelfType(impl_), 1));
+ }
+ }
+ Scope::AdtScope(adt) => {
+ if first_name == &name![Self] {
+ let ty = TypeNs::AdtSelfType(*adt);
+ return Some(ResolveValueResult::Partial(ty, 1));
+ }
+ }
+ Scope::BlockScope(m) => {
+ if let Some(def) = m.resolve_path_in_value_ns(db, path) {
+ return Some(def);
+ }
}
}
}
}
- if let res @ Some(_) = self.module_scope.resolve_path_in_value_ns(db, path) {
- return res;
+ if let Some(res) = self.module_scope.resolve_path_in_value_ns(db, path) {
+ return Some(res);
}
// If a path of the shape `u16::from_le_bytes` failed to resolve at all, then we fall back
// to resolving to the primitive type, to allow this to still work in the presence of
// `use core::u16;`.
- if path.kind == PathKind::Plain && path.segments().len() > 1 {
- if let Some(builtin) = BuiltinType::by_name(&path.segments()[0]) {
+ if path.kind == PathKind::Plain && n_segments > 1 {
+ if let Some(builtin) = BuiltinType::by_name(first_name) {
return Some(ResolveValueResult::Partial(TypeNs::BuiltinType(builtin), 1));
}
}
@@ -400,6 +431,8 @@ impl Resolver {
}
pub fn traits_in_scope(&self, db: &dyn DefDatabase) -> FxHashSet<TraitId> {
+ // FIXME(trait_alias): Trait alias brings aliased traits in scope! Note that supertraits of
+ // aliased traits are NOT brought in scope (unless also aliased).
let mut traits = FxHashSet::default();
for scope in self.scopes() {
@@ -428,6 +461,15 @@ impl Resolver {
traits
}
+ pub fn traits_in_scope_from_block_scopes(&self) -> impl Iterator<Item = TraitId> + '_ {
+ self.scopes()
+ .filter_map(|scope| match scope {
+ Scope::BlockScope(m) => Some(m.def_map[m.module_id].scope.traits()),
+ _ => None,
+ })
+ .flatten()
+ }
+
pub fn module(&self) -> ModuleId {
let (def_map, local_id) = self.item_scope();
def_map.module_id(local_id)
@@ -459,14 +501,85 @@ impl Resolver {
})
}
+ pub fn generic_params(&self) -> Option<&Interned<GenericParams>> {
+ self.scopes().find_map(|scope| match scope {
+ Scope::GenericParams { params, .. } => Some(params),
+ _ => None,
+ })
+ }
+
pub fn body_owner(&self) -> Option<DefWithBodyId> {
self.scopes().find_map(|scope| match scope {
Scope::ExprScope(it) => Some(it.owner),
_ => None,
})
}
+ /// `expr_id` is required to be an expression id that comes after the top level expression scope in the given resolver
+ #[must_use]
+ pub fn update_to_inner_scope(
+ &mut self,
+ db: &dyn DefDatabase,
+ owner: DefWithBodyId,
+ expr_id: ExprId,
+ ) -> UpdateGuard {
+ #[inline(always)]
+ fn append_expr_scope(
+ db: &dyn DefDatabase,
+ resolver: &mut Resolver,
+ owner: DefWithBodyId,
+ expr_scopes: &Arc<ExprScopes>,
+ scope_id: ScopeId,
+ ) {
+ resolver.scopes.push(Scope::ExprScope(ExprScope {
+ owner,
+ expr_scopes: expr_scopes.clone(),
+ scope_id,
+ }));
+ if let Some(block) = expr_scopes.block(scope_id) {
+ if let Some(def_map) = db.block_def_map(block) {
+ let root = def_map.root();
+ resolver
+ .scopes
+ .push(Scope::BlockScope(ModuleItemMap { def_map, module_id: root }));
+ // FIXME: This adds as many module scopes as there are blocks, but resolving in each
+ // already traverses all parents, so this is O(n²). I think we could only store the
+ // innermost module scope instead?
+ }
+ }
+ }
+
+ let start = self.scopes.len();
+ let innermost_scope = self.scopes().next();
+ match innermost_scope {
+ Some(&Scope::ExprScope(ExprScope { scope_id, ref expr_scopes, owner })) => {
+ let expr_scopes = expr_scopes.clone();
+ let scope_chain = expr_scopes
+ .scope_chain(expr_scopes.scope_for(expr_id))
+ .take_while(|&it| it != scope_id);
+ for scope_id in scope_chain {
+ append_expr_scope(db, self, owner, &expr_scopes, scope_id);
+ }
+ }
+ _ => {
+ let expr_scopes = db.expr_scopes(owner);
+ let scope_chain = expr_scopes.scope_chain(expr_scopes.scope_for(expr_id));
+
+ for scope_id in scope_chain {
+ append_expr_scope(db, self, owner, &expr_scopes, scope_id);
+ }
+ }
+ }
+ self.scopes[start..].reverse();
+ UpdateGuard(start)
+ }
+
+ pub fn reset_to_guard(&mut self, UpdateGuard(start): UpdateGuard) {
+ self.scopes.truncate(start);
+ }
}
+pub struct UpdateGuard(usize);
+
impl Resolver {
fn scopes(&self) -> impl Iterator<Item = &Scope> {
self.scopes.iter().rev()
@@ -504,7 +617,7 @@ pub enum ScopeDef {
ImplSelfType(ImplId),
AdtSelfType(AdtId),
GenericParam(GenericParamId),
- Local(PatId),
+ Local(BindingId),
Label(LabelId),
}
@@ -556,17 +669,18 @@ impl Scope {
acc.add(&name, ScopeDef::Label(label))
}
scope.expr_scopes.entries(scope.scope_id).iter().for_each(|e| {
- acc.add_local(e.name(), e.pat());
+ acc.add_local(e.name(), e.binding());
});
}
}
}
}
-// needs arbitrary_self_types to be a method... or maybe move to the def?
pub fn resolver_for_expr(db: &dyn DefDatabase, owner: DefWithBodyId, expr_id: ExprId) -> Resolver {
+ let r = owner.resolver(db);
let scopes = db.expr_scopes(owner);
- resolver_for_scope(db, owner, scopes.scope_for(expr_id))
+ let scope_id = scopes.scope_for(expr_id);
+ resolver_for_scope_(db, scopes, scope_id, r, owner)
}
pub fn resolver_for_scope(
@@ -574,8 +688,18 @@ pub fn resolver_for_scope(
owner: DefWithBodyId,
scope_id: Option<ScopeId>,
) -> Resolver {
- let mut r = owner.resolver(db);
+ let r = owner.resolver(db);
let scopes = db.expr_scopes(owner);
+ resolver_for_scope_(db, scopes, scope_id, r, owner)
+}
+
+fn resolver_for_scope_(
+ db: &dyn DefDatabase,
+ scopes: Arc<ExprScopes>,
+ scope_id: Option<ScopeId>,
+ mut r: Resolver,
+ owner: DefWithBodyId,
+) -> Resolver {
let scope_chain = scopes.scope_chain(scope_id).collect::<Vec<_>>();
r.scopes.reserve(scope_chain.len());
@@ -641,6 +765,7 @@ impl ModuleItemMap {
let ty = match module_def.take_types()? {
ModuleDefId::AdtId(it) => TypeNs::AdtId(it),
ModuleDefId::TraitId(it) => TypeNs::TraitId(it),
+ ModuleDefId::TraitAliasId(it) => TypeNs::TraitAliasId(it),
ModuleDefId::TypeAliasId(it) => TypeNs::TypeAliasId(it),
ModuleDefId::BuiltinType(it) => TypeNs::BuiltinType(it),
@@ -678,6 +803,7 @@ fn to_value_ns(per_ns: PerNs) -> Option<ValueNs> {
ModuleDefId::AdtId(AdtId::EnumId(_) | AdtId::UnionId(_))
| ModuleDefId::TraitId(_)
+ | ModuleDefId::TraitAliasId(_)
| ModuleDefId::TypeAliasId(_)
| ModuleDefId::BuiltinType(_)
| ModuleDefId::MacroId(_)
@@ -695,6 +821,7 @@ fn to_type_ns(per_ns: PerNs) -> Option<TypeNs> {
ModuleDefId::BuiltinType(it) => TypeNs::BuiltinType(it),
ModuleDefId::TraitId(it) => TypeNs::TraitId(it),
+ ModuleDefId::TraitAliasId(it) => TypeNs::TraitAliasId(it),
ModuleDefId::FunctionId(_)
| ModuleDefId::ConstId(_)
@@ -732,7 +859,7 @@ impl ScopeNames {
self.add(name, ScopeDef::Unknown)
}
}
- fn add_local(&mut self, name: &Name, pat: PatId) {
+ fn add_local(&mut self, name: &Name, binding: BindingId) {
let set = self.map.entry(name.clone()).or_default();
// XXX: hack, account for local (and only local) shadowing.
//
@@ -743,7 +870,7 @@ impl ScopeNames {
cov_mark::hit!(shadowing_shows_single_completion);
return;
}
- set.push(ScopeDef::Local(pat))
+ set.push(ScopeDef::Local(binding))
}
}
@@ -779,6 +906,12 @@ impl HasResolver for TraitId {
}
}
+impl HasResolver for TraitAliasId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into())
+ }
+}
+
impl<T: Into<AdtId> + Copy> HasResolver for T {
fn resolver(self, db: &dyn DefDatabase) -> Resolver {
let def = self.into();
@@ -858,6 +991,7 @@ impl HasResolver for GenericDefId {
GenericDefId::FunctionId(inner) => inner.resolver(db),
GenericDefId::AdtId(adt) => adt.resolver(db),
GenericDefId::TraitId(inner) => inner.resolver(db),
+ GenericDefId::TraitAliasId(inner) => inner.resolver(db),
GenericDefId::TypeAliasId(inner) => inner.resolver(db),
GenericDefId::ImplId(inner) => inner.resolver(db),
GenericDefId::EnumVariantId(inner) => inner.parent.resolver(db),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
index b7908bdda..ee143b19a 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
@@ -9,7 +9,7 @@ use base_db::{
salsa, AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, FilePosition,
SourceDatabase, Upcast,
};
-use hir_expand::{db::AstDatabase, InFile};
+use hir_expand::{db::ExpandDatabase, InFile};
use stdx::hash::NoHashHashSet;
use syntax::{algo, ast, AstNode};
@@ -23,7 +23,7 @@ use crate::{
#[salsa::database(
base_db::SourceDatabaseExtStorage,
base_db::SourceDatabaseStorage,
- hir_expand::db::AstDatabaseStorage,
+ hir_expand::db::ExpandDatabaseStorage,
crate::db::InternDatabaseStorage,
crate::db::DefDatabaseStorage
)]
@@ -40,8 +40,8 @@ impl Default for TestDB {
}
}
-impl Upcast<dyn AstDatabase> for TestDB {
- fn upcast(&self) -> &(dyn AstDatabase + 'static) {
+impl Upcast<dyn ExpandDatabase> for TestDB {
+ fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
&*self
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/type_ref.rs b/src/tools/rust-analyzer/crates/hir-def/src/type_ref.rs
index 9652b01b9..8e30f429a 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/type_ref.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/type_ref.rs
@@ -116,7 +116,7 @@ pub enum TypeRef {
Reference(Box<TypeRef>, Option<LifetimeRef>, Mutability),
// FIXME: for full const generics, the latter element (length) here is going to have to be an
// expression that is further lowered later in hir_ty.
- Array(Box<TypeRef>, ConstScalarOrPath),
+ Array(Box<TypeRef>, ConstRefOrPath),
Slice(Box<TypeRef>),
/// A fn pointer. Last element of the vector is the return type.
Fn(Vec<(Option<Name>, TypeRef)>, bool /*varargs*/, bool /*is_unsafe*/),
@@ -188,7 +188,7 @@ impl TypeRef {
// `hir_def::body::lower` to lower this into an `Expr` and then evaluate it at the
// `hir_ty` level, which would allow knowing the type of:
// let v: [u8; 2 + 2] = [0u8; 4];
- let len = ConstScalarOrPath::from_expr_opt(inner.expr());
+ let len = ConstRefOrPath::from_expr_opt(inner.expr());
TypeRef::Array(Box::new(TypeRef::from_ast_opt(ctx, inner.ty())), len)
}
ast::Type::SliceType(inner) => {
@@ -378,25 +378,25 @@ impl TypeBound {
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum ConstScalarOrPath {
- Scalar(ConstScalar),
+pub enum ConstRefOrPath {
+ Scalar(ConstRef),
Path(Name),
}
-impl std::fmt::Display for ConstScalarOrPath {
+impl std::fmt::Display for ConstRefOrPath {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
- ConstScalarOrPath::Scalar(s) => s.fmt(f),
- ConstScalarOrPath::Path(n) => n.fmt(f),
+ ConstRefOrPath::Scalar(s) => s.fmt(f),
+ ConstRefOrPath::Path(n) => n.fmt(f),
}
}
}
-impl ConstScalarOrPath {
+impl ConstRefOrPath {
pub(crate) fn from_expr_opt(expr: Option<ast::Expr>) -> Self {
match expr {
Some(x) => Self::from_expr(x),
- None => Self::Scalar(ConstScalar::Unknown),
+ None => Self::Scalar(ConstRef::Unknown),
}
}
@@ -407,7 +407,7 @@ impl ConstScalarOrPath {
ast::Expr::PathExpr(p) => {
match p.path().and_then(|x| x.segment()).and_then(|x| x.name_ref()) {
Some(x) => Self::Path(x.as_name()),
- None => Self::Scalar(ConstScalar::Unknown),
+ None => Self::Scalar(ConstRef::Unknown),
}
}
ast::Expr::PrefixExpr(prefix_expr) => match prefix_expr.op_kind() {
@@ -415,8 +415,8 @@ impl ConstScalarOrPath {
let unsigned = Self::from_expr_opt(prefix_expr.expr());
// Add sign
match unsigned {
- Self::Scalar(ConstScalar::UInt(num)) => {
- Self::Scalar(ConstScalar::Int(-(num as i128)))
+ Self::Scalar(ConstRef::UInt(num)) => {
+ Self::Scalar(ConstRef::Int(-(num as i128)))
}
other => other,
}
@@ -425,22 +425,22 @@ impl ConstScalarOrPath {
},
ast::Expr::Literal(literal) => Self::Scalar(match literal.kind() {
ast::LiteralKind::IntNumber(num) => {
- num.value().map(ConstScalar::UInt).unwrap_or(ConstScalar::Unknown)
+ num.value().map(ConstRef::UInt).unwrap_or(ConstRef::Unknown)
}
ast::LiteralKind::Char(c) => {
- c.value().map(ConstScalar::Char).unwrap_or(ConstScalar::Unknown)
+ c.value().map(ConstRef::Char).unwrap_or(ConstRef::Unknown)
}
- ast::LiteralKind::Bool(f) => ConstScalar::Bool(f),
- _ => ConstScalar::Unknown,
+ ast::LiteralKind::Bool(f) => ConstRef::Bool(f),
+ _ => ConstRef::Unknown,
}),
- _ => Self::Scalar(ConstScalar::Unknown),
+ _ => Self::Scalar(ConstRef::Unknown),
}
}
}
/// A concrete constant value
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub enum ConstScalar {
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum ConstRef {
Int(i128),
UInt(u128),
Bool(bool),
@@ -454,18 +454,18 @@ pub enum ConstScalar {
Unknown,
}
-impl ConstScalar {
+impl ConstRef {
pub fn builtin_type(&self) -> BuiltinType {
match self {
- ConstScalar::UInt(_) | ConstScalar::Unknown => BuiltinType::Uint(BuiltinUint::U128),
- ConstScalar::Int(_) => BuiltinType::Int(BuiltinInt::I128),
- ConstScalar::Char(_) => BuiltinType::Char,
- ConstScalar::Bool(_) => BuiltinType::Bool,
+ ConstRef::UInt(_) | ConstRef::Unknown => BuiltinType::Uint(BuiltinUint::U128),
+ ConstRef::Int(_) => BuiltinType::Int(BuiltinInt::I128),
+ ConstRef::Char(_) => BuiltinType::Char,
+ ConstRef::Bool(_) => BuiltinType::Bool,
}
}
}
-impl From<Literal> for ConstScalar {
+impl From<Literal> for ConstRef {
fn from(literal: Literal) -> Self {
match literal {
Literal::Char(c) => Self::Char(c),
@@ -477,14 +477,14 @@ impl From<Literal> for ConstScalar {
}
}
-impl std::fmt::Display for ConstScalar {
+impl std::fmt::Display for ConstRef {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
match self {
- ConstScalar::Int(num) => num.fmt(f),
- ConstScalar::UInt(num) => num.fmt(f),
- ConstScalar::Bool(flag) => flag.fmt(f),
- ConstScalar::Char(c) => write!(f, "'{c}'"),
- ConstScalar::Unknown => f.write_char('_'),
+ ConstRef::Int(num) => num.fmt(f),
+ ConstRef::UInt(num) => num.fmt(f),
+ ConstRef::Bool(flag) => flag.fmt(f),
+ ConstRef::Char(c) => write!(f, "'{c}'"),
+ ConstRef::Unknown => f.write_char('_'),
}
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs b/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs
index 087268a9e..ab76ed43d 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs
@@ -11,7 +11,7 @@ use crate::{
nameres::DefMap,
path::{ModPath, PathKind},
resolver::HasResolver,
- ConstId, FunctionId, HasModule, LocalFieldId, ModuleId, VariantId,
+ ConstId, FunctionId, HasModule, LocalFieldId, LocalModuleId, ModuleId, VariantId,
};
/// Visibility of an item, not yet resolved.
@@ -120,7 +120,7 @@ impl Visibility {
self,
db: &dyn DefDatabase,
def_map: &DefMap,
- mut from_module: crate::LocalModuleId,
+ mut from_module: LocalModuleId,
) -> bool {
let mut to_module = match self {
Visibility::Module(m) => m,
@@ -131,20 +131,23 @@ impl Visibility {
// visibility as the containing module (even though no items are directly nameable from
// there, getting this right is important for method resolution).
// In that case, we adjust the visibility of `to_module` to point to the containing module.
+
// Additional complication: `to_module` might be in `from_module`'s `DefMap`, which we're
// currently computing, so we must not call the `def_map` query for it.
- let arc;
- let to_module_def_map =
- if to_module.krate == def_map.krate() && to_module.block == def_map.block_id() {
- cov_mark::hit!(is_visible_from_same_block_def_map);
- def_map
- } else {
- arc = to_module.def_map(db);
- &arc
- };
- let is_block_root = matches!(to_module.block, Some(_) if to_module_def_map[to_module.local_id].parent.is_none());
- if is_block_root {
- to_module = to_module_def_map.containing_module(to_module.local_id).unwrap();
+ let mut arc;
+ loop {
+ let to_module_def_map =
+ if to_module.krate == def_map.krate() && to_module.block == def_map.block_id() {
+ cov_mark::hit!(is_visible_from_same_block_def_map);
+ def_map
+ } else {
+ arc = to_module.def_map(db);
+ &arc
+ };
+ match to_module_def_map.parent() {
+ Some(parent) => to_module = parent,
+ None => break,
+ }
}
// from_module needs to be a descendant of to_module
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
index 5c04f8e8b..8d1e88725 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
@@ -10,7 +10,7 @@ use smallvec::{smallvec, SmallVec};
use syntax::{ast, match_ast, AstNode, SmolStr, SyntaxNode};
use crate::{
- db::AstDatabase,
+ db::ExpandDatabase,
hygiene::Hygiene,
mod_path::{ModPath, PathKind},
name::AsName,
@@ -38,7 +38,7 @@ impl ops::Deref for RawAttrs {
impl RawAttrs {
pub const EMPTY: Self = Self { entries: None };
- pub fn new(db: &dyn AstDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self {
+ pub fn new(db: &dyn ExpandDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self {
let entries = collect_attrs(owner)
.filter_map(|(id, attr)| match attr {
Either::Left(attr) => {
@@ -55,7 +55,7 @@ impl RawAttrs {
Self { entries: if entries.is_empty() { None } else { Some(entries) } }
}
- pub fn from_attrs_owner(db: &dyn AstDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self {
+ pub fn from_attrs_owner(db: &dyn ExpandDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self {
let hygiene = Hygiene::new(db, owner.file_id);
Self::new(db, owner.value, &hygiene)
}
@@ -87,7 +87,7 @@ impl RawAttrs {
/// Processes `cfg_attr`s, returning the resulting semantic `Attrs`.
// FIXME: This should return a different type
- pub fn filter(self, db: &dyn AstDatabase, krate: CrateId) -> RawAttrs {
+ pub fn filter(self, db: &dyn ExpandDatabase, krate: CrateId) -> RawAttrs {
let has_cfg_attrs = self
.iter()
.any(|attr| attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]));
@@ -199,7 +199,7 @@ impl fmt::Display for AttrInput {
impl Attr {
fn from_src(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
ast: ast::Meta,
hygiene: &Hygiene,
id: AttrId,
@@ -221,7 +221,7 @@ impl Attr {
}
fn from_tt(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
tt: &tt::Subtree,
hygiene: &Hygiene,
id: AttrId,
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
index 906ca991d..277ecd939 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
@@ -1,6 +1,6 @@
//! Builtin attributes.
-use crate::{db::AstDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind};
+use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind};
macro_rules! register_builtin {
( $(($name:ident, $variant:ident) => $expand:ident),* ) => {
@@ -12,7 +12,7 @@ macro_rules! register_builtin {
impl BuiltinAttrExpander {
pub fn expand(
&self,
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -60,7 +60,7 @@ pub fn find_builtin_attr(ident: &name::Name) -> Option<BuiltinAttrExpander> {
}
fn dummy_attr_expand(
- _db: &dyn AstDatabase,
+ _db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -90,7 +90,7 @@ fn dummy_attr_expand(
/// So this hacky approach is a lot more friendly for us, though it does require a bit of support in
/// [`hir::Semantics`] to make this work.
fn derive_attr_expand(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
index 060a68054..5c1a75132 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
@@ -9,7 +9,7 @@ use syntax::{
match_ast,
};
-use crate::{db::AstDatabase, name, quote, ExpandError, ExpandResult, MacroCallId};
+use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult, MacroCallId};
macro_rules! register_builtin {
( $($trait:ident => $expand:ident),* ) => {
@@ -21,7 +21,7 @@ macro_rules! register_builtin {
impl BuiltinDeriveExpander {
pub fn expand(
&self,
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -141,7 +141,7 @@ fn expand_simple_derive(tt: &tt::Subtree, trait_path: tt::Subtree) -> ExpandResu
ExpandResult::ok(expanded)
}
-fn find_builtin_crate(db: &dyn AstDatabase, id: MacroCallId) -> tt::TokenTree {
+fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree {
// FIXME: make hygiene works for builtin derive macro
// such that $crate can be used here.
let cg = db.crate_graph();
@@ -158,7 +158,7 @@ fn find_builtin_crate(db: &dyn AstDatabase, id: MacroCallId) -> tt::TokenTree {
}
fn copy_expand(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -167,7 +167,7 @@ fn copy_expand(
}
fn clone_expand(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -176,7 +176,7 @@ fn clone_expand(
}
fn default_expand(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -185,7 +185,7 @@ fn default_expand(
}
fn debug_expand(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -194,7 +194,7 @@ fn debug_expand(
}
fn hash_expand(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -202,13 +202,17 @@ fn hash_expand(
expand_simple_derive(tt, quote! { #krate::hash::Hash })
}
-fn eq_expand(db: &dyn AstDatabase, id: MacroCallId, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
+fn eq_expand(
+ db: &dyn ExpandDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
expand_simple_derive(tt, quote! { #krate::cmp::Eq })
}
fn partial_eq_expand(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -217,7 +221,7 @@ fn partial_eq_expand(
}
fn ord_expand(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -226,7 +230,7 @@ fn ord_expand(
}
fn partial_ord_expand(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
index 9f3fa73d4..44510f2b7 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
@@ -10,7 +10,7 @@ use syntax::{
};
use crate::{
- db::AstDatabase, name, quote, tt, ExpandError, ExpandResult, MacroCallId, MacroCallLoc,
+ db::ExpandDatabase, name, quote, tt, ExpandError, ExpandResult, MacroCallId, MacroCallLoc,
};
macro_rules! register_builtin {
@@ -28,7 +28,7 @@ macro_rules! register_builtin {
impl BuiltinFnLikeExpander {
pub fn expand(
&self,
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -42,7 +42,7 @@ macro_rules! register_builtin {
impl EagerExpander {
pub fn expand(
&self,
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<ExpandedEager> {
@@ -121,7 +121,7 @@ const DOLLAR_CRATE: tt::Ident =
tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::TokenId::unspecified() };
fn module_path_expand(
- _db: &dyn AstDatabase,
+ _db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -130,7 +130,7 @@ fn module_path_expand(
}
fn line_expand(
- _db: &dyn AstDatabase,
+ _db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -144,7 +144,7 @@ fn line_expand(
}
fn log_syntax_expand(
- _db: &dyn AstDatabase,
+ _db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -152,7 +152,7 @@ fn log_syntax_expand(
}
fn trace_macros_expand(
- _db: &dyn AstDatabase,
+ _db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -160,7 +160,7 @@ fn trace_macros_expand(
}
fn stringify_expand(
- _db: &dyn AstDatabase,
+ _db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -174,7 +174,7 @@ fn stringify_expand(
}
fn column_expand(
- _db: &dyn AstDatabase,
+ _db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -188,7 +188,7 @@ fn column_expand(
}
fn assert_expand(
- _db: &dyn AstDatabase,
+ _db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -206,7 +206,7 @@ fn assert_expand(
let cond = cond.clone();
let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma);
quote! {{
- if !#cond {
+ if !(#cond) {
#DOLLAR_CRATE::panic!(##panic_args);
}
}}
@@ -218,7 +218,7 @@ fn assert_expand(
}
fn file_expand(
- _db: &dyn AstDatabase,
+ _db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -234,7 +234,7 @@ fn file_expand(
}
fn format_args_expand(
- _db: &dyn AstDatabase,
+ _db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -276,7 +276,7 @@ fn format_args_expand(
}
fn asm_expand(
- _db: &dyn AstDatabase,
+ _db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -304,7 +304,7 @@ fn asm_expand(
}
fn global_asm_expand(
- _db: &dyn AstDatabase,
+ _db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -313,7 +313,7 @@ fn global_asm_expand(
}
fn cfg_expand(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -325,7 +325,7 @@ fn cfg_expand(
}
fn panic_expand(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -343,7 +343,7 @@ fn panic_expand(
}
fn unreachable_expand(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -379,7 +379,7 @@ fn unquote_byte_string(lit: &tt::Literal) -> Option<Vec<u8>> {
}
fn compile_error_expand(
- _db: &dyn AstDatabase,
+ _db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<ExpandedEager> {
@@ -395,7 +395,7 @@ fn compile_error_expand(
}
fn concat_expand(
- _db: &dyn AstDatabase,
+ _db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<ExpandedEager> {
@@ -441,7 +441,7 @@ fn concat_expand(
}
fn concat_bytes_expand(
- _db: &dyn AstDatabase,
+ _db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<ExpandedEager> {
@@ -507,7 +507,7 @@ fn concat_bytes_expand_subtree(
}
fn concat_idents_expand(
- _db: &dyn AstDatabase,
+ _db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<ExpandedEager> {
@@ -529,7 +529,7 @@ fn concat_idents_expand(
}
fn relative_file(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
call_id: MacroCallId,
path_str: &str,
allow_recursion: bool,
@@ -558,7 +558,7 @@ fn parse_string(tt: &tt::Subtree) -> Result<String, ExpandError> {
}
fn include_expand(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<ExpandedEager> {
@@ -583,7 +583,7 @@ fn include_expand(
}
fn include_bytes_expand(
- _db: &dyn AstDatabase,
+ _db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<ExpandedEager> {
@@ -606,7 +606,7 @@ fn include_bytes_expand(
}
fn include_str_expand(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<ExpandedEager> {
@@ -637,13 +637,13 @@ fn include_str_expand(
ExpandResult::ok(ExpandedEager::new(quote!(#text)))
}
-fn get_env_inner(db: &dyn AstDatabase, arg_id: MacroCallId, key: &str) -> Option<String> {
+fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &str) -> Option<String> {
let krate = db.lookup_intern_macro_call(arg_id).krate;
db.crate_graph()[krate].env.get(key)
}
fn env_expand(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<ExpandedEager> {
@@ -679,7 +679,7 @@ fn env_expand(
}
fn option_env_expand(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<ExpandedEager> {
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
index 76016274f..45572499e 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
@@ -44,7 +44,7 @@ pub enum TokenExpander {
impl TokenExpander {
fn expand(
&self,
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@@ -83,9 +83,8 @@ impl TokenExpander {
}
}
-// FIXME: rename to ExpandDatabase
-#[salsa::query_group(AstDatabaseStorage)]
-pub trait AstDatabase: SourceDatabase {
+#[salsa::query_group(ExpandDatabaseStorage)]
+pub trait ExpandDatabase: SourceDatabase {
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
/// Main public API -- parses a hir file, not caring whether it's a real
@@ -138,7 +137,7 @@ pub trait AstDatabase: SourceDatabase {
/// token. The `token_to_map` mapped down into the expansion, with the mapped
/// token returned.
pub fn expand_speculative(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
actual_macro_call: MacroCallId,
speculative_args: &SyntaxNode,
token_to_map: SyntaxToken,
@@ -211,7 +210,7 @@ pub fn expand_speculative(
let mut speculative_expansion = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => {
tt.delimiter = tt::Delimiter::unspecified();
- expander.expand(db, loc.krate, &tt, attr_arg.as_ref())
+ expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref())
}
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
@@ -236,12 +235,12 @@ pub fn expand_speculative(
Some((node.syntax_node(), token))
}
-fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
+fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
let map = db.parse_or_expand(file_id).map(|it| AstIdMap::from_source(&it)).unwrap_or_default();
Arc::new(map)
}
-fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
+fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()),
HirFileIdRepr::MacroFile(macro_file) => {
@@ -253,13 +252,13 @@ fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNod
}
fn parse_macro_expansion(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
macro_file: MacroFile,
) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> {
let _p = profile::span("parse_macro_expansion");
- let result = db.macro_expand(macro_file.macro_call_id);
+ let mbe::ValueResult { value, err } = db.macro_expand(macro_file.macro_call_id);
- if let Some(err) = &result.err {
+ if let Some(err) = &err {
// Note:
// The final goal we would like to make all parse_macro success,
// such that the following log will not call anyway.
@@ -280,9 +279,9 @@ fn parse_macro_expansion(
parents
);
}
- let tt = match result.value {
+ let tt = match value {
Some(tt) => tt,
- None => return ExpandResult { value: None, err: result.err },
+ None => return ExpandResult { value: None, err },
};
let expand_to = macro_expand_to(db, macro_file.macro_call_id);
@@ -292,11 +291,11 @@ fn parse_macro_expansion(
let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to);
- ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: result.err }
+ ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err }
}
fn macro_arg(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
id: MacroCallId,
) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>> {
let arg = db.macro_arg_text(id)?;
@@ -357,7 +356,7 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
.unwrap_or_default()
}
-fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
+fn macro_arg_text(db: &dyn ExpandDatabase, id: MacroCallId) -> Option<GreenNode> {
let loc = db.lookup_intern_macro_call(id);
let arg = loc.kind.arg(db)?;
if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
@@ -380,7 +379,10 @@ fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
Some(arg.green().into())
}
-fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Result<Arc<TokenExpander>, mbe::ParseError> {
+fn macro_def(
+ db: &dyn ExpandDatabase,
+ id: MacroDefId,
+) -> Result<Arc<TokenExpander>, mbe::ParseError> {
match id.kind {
MacroDefKind::Declarative(ast_id) => {
let (mac, def_site_token_map) = match ast_id.to_node(db) {
@@ -419,7 +421,10 @@ fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Result<Arc<TokenExpander>,
}
}
-fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>> {
+fn macro_expand(
+ db: &dyn ExpandDatabase,
+ id: MacroCallId,
+) -> ExpandResult<Option<Arc<tt::Subtree>>> {
let _p = profile::span("macro_expand");
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
if let Some(eager) = &loc.eager {
@@ -469,11 +474,11 @@ fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Ar
ExpandResult { value: Some(Arc::new(tt)), err }
}
-fn macro_expand_error(db: &dyn AstDatabase, macro_call: MacroCallId) -> Option<ExpandError> {
+fn macro_expand_error(db: &dyn ExpandDatabase, macro_call: MacroCallId) -> Option<ExpandError> {
db.macro_expand(macro_call).err
}
-fn expand_proc_macro(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<tt::Subtree> {
+fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<tt::Subtree> {
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
let macro_arg = match db.macro_arg(id) {
Some(it) => it,
@@ -499,14 +504,14 @@ fn expand_proc_macro(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<tt::
_ => None,
};
- expander.expand(db, loc.krate, &macro_arg.0, attr_arg.as_ref())
+ expander.expand(db, loc.def.krate, loc.krate, &macro_arg.0, attr_arg.as_ref())
}
-fn hygiene_frame(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
+fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
Arc::new(HygieneFrame::new(db, file_id))
}
-fn macro_expand_to(db: &dyn AstDatabase, id: MacroCallId) -> ExpandTo {
+fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo {
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
loc.kind.expand_to()
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
index dfab7ec92..aca41b11f 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
@@ -25,7 +25,7 @@ use syntax::{ted, SyntaxNode};
use crate::{
ast::{self, AstNode},
- db::AstDatabase,
+ db::ExpandDatabase,
hygiene::Hygiene,
mod_path::ModPath,
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind,
@@ -96,7 +96,7 @@ impl ErrorSink for &'_ mut dyn FnMut(ExpandError) {
}
pub fn expand_eager_macro(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
krate: CrateId,
macro_call: InFile<ast::MacroCall>,
def: MacroDefId,
@@ -172,7 +172,7 @@ fn to_subtree(node: &SyntaxNode) -> crate::tt::Subtree {
}
fn lazy_expand(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
def: &MacroDefId,
macro_call: InFile<ast::MacroCall>,
krate: CrateId,
@@ -193,7 +193,7 @@ fn lazy_expand(
}
fn eager_macro_recur(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
hygiene: &Hygiene,
curr: InFile<SyntaxNode>,
krate: CrateId,
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
index c811d1c66..b273f2176 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
@@ -636,9 +636,8 @@ fn foo() {
if {}
}
"#,
- // the {} gets parsed as the condition, I think?
expect![[r#"
-fn foo () {if {} {}}
+fn foo () {if __ra_fixup {} {}}
"#]],
)
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
index 2300ee9d0..2eb56fc9e 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
@@ -14,7 +14,7 @@ use syntax::{
};
use crate::{
- db::{self, AstDatabase},
+ db::{self, ExpandDatabase},
fixup,
name::{AsName, Name},
HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile,
@@ -26,7 +26,7 @@ pub struct Hygiene {
}
impl Hygiene {
- pub fn new(db: &dyn AstDatabase, file_id: HirFileId) -> Hygiene {
+ pub fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Hygiene {
Hygiene { frames: Some(HygieneFrames::new(db, file_id)) }
}
@@ -37,7 +37,7 @@ impl Hygiene {
// FIXME: this should just return name
pub fn name_ref_to_name(
&self,
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
name_ref: ast::NameRef,
) -> Either<Name, CrateId> {
if let Some(frames) = &self.frames {
@@ -51,7 +51,7 @@ impl Hygiene {
Either::Left(name_ref.as_name())
}
- pub fn local_inner_macros(&self, db: &dyn AstDatabase, path: ast::Path) -> Option<CrateId> {
+ pub fn local_inner_macros(&self, db: &dyn ExpandDatabase, path: ast::Path) -> Option<CrateId> {
let mut token = path.syntax().first_token()?.text_range();
let frames = self.frames.as_ref()?;
let mut current = &frames.0;
@@ -87,13 +87,13 @@ pub struct HygieneFrame {
}
impl HygieneFrames {
- fn new(db: &dyn AstDatabase, file_id: HirFileId) -> Self {
+ fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Self {
// Note that this intentionally avoids the `hygiene_frame` query to avoid blowing up memory
// usage. The query is only helpful for nested `HygieneFrame`s as it avoids redundant work.
HygieneFrames(Arc::new(HygieneFrame::new(db, file_id)))
}
- fn root_crate(&self, db: &dyn AstDatabase, node: &SyntaxNode) -> Option<CrateId> {
+ fn root_crate(&self, db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option<CrateId> {
let mut token = node.first_token()?.text_range();
let mut result = self.0.krate;
let mut current = self.0.clone();
@@ -136,7 +136,7 @@ struct HygieneInfo {
impl HygieneInfo {
fn map_ident_up(
&self,
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
token: TextRange,
) -> Option<(InFile<TextRange>, Origin)> {
let token_id = self.exp_map.token_by_range(token)?;
@@ -175,7 +175,7 @@ impl HygieneInfo {
}
fn make_hygiene_info(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
macro_file: MacroFile,
loc: &MacroCallLoc,
) -> Option<HygieneInfo> {
@@ -215,7 +215,7 @@ fn make_hygiene_info(
}
impl HygieneFrame {
- pub(crate) fn new(db: &dyn AstDatabase, file_id: HirFileId) -> HygieneFrame {
+ pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> HygieneFrame {
let (info, krate, local_inner) = match file_id.macro_file() {
None => (None, None, false),
Some(macro_file) => {
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
index a52716cc0..5e99eacc1 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
@@ -198,7 +198,7 @@ impl HirFileId {
/// For macro-expansion files, returns the file original source file the
/// expansion originated from.
- pub fn original_file(self, db: &dyn db::AstDatabase) -> FileId {
+ pub fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId {
let mut file_id = self;
loop {
match file_id.repr() {
@@ -214,7 +214,7 @@ impl HirFileId {
}
}
- pub fn expansion_level(self, db: &dyn db::AstDatabase) -> u32 {
+ pub fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 {
let mut level = 0;
let mut curr = self;
while let Some(macro_file) = curr.macro_file() {
@@ -227,14 +227,14 @@ impl HirFileId {
}
/// If this is a macro call, returns the syntax node of the call.
- pub fn call_node(self, db: &dyn db::AstDatabase) -> Option<InFile<SyntaxNode>> {
+ pub fn call_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
let macro_file = self.macro_file()?;
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
Some(loc.kind.to_node(db))
}
/// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
- pub fn original_call_node(self, db: &dyn db::AstDatabase) -> Option<(FileId, SyntaxNode)> {
+ pub fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<(FileId, SyntaxNode)> {
let mut call =
db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).kind.to_node(db);
loop {
@@ -248,7 +248,7 @@ impl HirFileId {
}
/// Return expansion information if it is a macro-expansion file
- pub fn expansion_info(self, db: &dyn db::AstDatabase) -> Option<ExpansionInfo> {
+ pub fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo> {
let macro_file = self.macro_file()?;
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
@@ -294,7 +294,7 @@ impl HirFileId {
}
/// Indicate it is macro file generated for builtin derive
- pub fn is_builtin_derive(&self, db: &dyn db::AstDatabase) -> Option<InFile<ast::Attr>> {
+ pub fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> Option<InFile<ast::Attr>> {
let macro_file = self.macro_file()?;
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let attr = match loc.def.kind {
@@ -304,7 +304,7 @@ impl HirFileId {
Some(attr.with_value(ast::Attr::cast(attr.value.clone())?))
}
- pub fn is_custom_derive(&self, db: &dyn db::AstDatabase) -> bool {
+ pub fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
match self.macro_file() {
Some(macro_file) => {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
@@ -315,7 +315,7 @@ impl HirFileId {
}
/// Return whether this file is an include macro
- pub fn is_include_macro(&self, db: &dyn db::AstDatabase) -> bool {
+ pub fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
match self.macro_file() {
Some(macro_file) => {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
@@ -326,7 +326,7 @@ impl HirFileId {
}
/// Return whether this file is an attr macro
- pub fn is_attr_macro(&self, db: &dyn db::AstDatabase) -> bool {
+ pub fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
match self.macro_file() {
Some(macro_file) => {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
@@ -338,7 +338,7 @@ impl HirFileId {
/// Return whether this file is the pseudo expansion of the derive attribute.
/// See [`crate::builtin_attr_macro::derive_attr_expand`].
- pub fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::AstDatabase) -> bool {
+ pub fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool {
match self.macro_file() {
Some(macro_file) => {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
@@ -384,7 +384,7 @@ impl HirFileId {
impl MacroDefId {
pub fn as_lazy_macro(
self,
- db: &dyn db::AstDatabase,
+ db: &dyn db::ExpandDatabase,
krate: CrateId,
kind: MacroCallKind,
) -> MacroCallId {
@@ -427,7 +427,7 @@ impl MacroCallKind {
}
}
- pub fn to_node(&self, db: &dyn db::AstDatabase) -> InFile<SyntaxNode> {
+ pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> InFile<SyntaxNode> {
match self {
MacroCallKind::FnLike { ast_id, .. } => {
ast_id.with_value(ast_id.to_node(db).syntax().clone())
@@ -465,7 +465,7 @@ impl MacroCallKind {
/// Returns the original file range that best describes the location of this macro call.
///
/// Unlike `MacroCallKind::original_call_range`, this also spans the item of attributes and derives.
- pub fn original_call_range_with_body(self, db: &dyn db::AstDatabase) -> FileRange {
+ pub fn original_call_range_with_body(self, db: &dyn db::ExpandDatabase) -> FileRange {
let mut kind = self;
let file_id = loop {
match kind.file_id().repr() {
@@ -490,7 +490,7 @@ impl MacroCallKind {
/// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros
/// get the whole `ast::MacroCall`, attribute macros get the attribute's range, and derives
/// get only the specific derive that is being referred to.
- pub fn original_call_range(self, db: &dyn db::AstDatabase) -> FileRange {
+ pub fn original_call_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
let mut kind = self;
let file_id = loop {
match kind.file_id().repr() {
@@ -529,7 +529,7 @@ impl MacroCallKind {
FileRange { range, file_id }
}
- fn arg(&self, db: &dyn db::AstDatabase) -> Option<SyntaxNode> {
+ fn arg(&self, db: &dyn db::ExpandDatabase) -> Option<SyntaxNode> {
match self {
MacroCallKind::FnLike { ast_id, .. } => {
Some(ast_id.to_node(db).token_tree()?.syntax().clone())
@@ -597,7 +597,7 @@ impl ExpansionInfo {
/// Both of these only have one simple call site input so no special handling is required here.
pub fn map_token_down(
&self,
- db: &dyn db::AstDatabase,
+ db: &dyn db::ExpandDatabase,
item: Option<ast::Item>,
token: InFile<&SyntaxToken>,
) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> {
@@ -666,7 +666,7 @@ impl ExpansionInfo {
/// Map a token up out of the expansion it resides in into the arguments of the macro call of the expansion.
pub fn map_token_up(
&self,
- db: &dyn db::AstDatabase,
+ db: &dyn db::ExpandDatabase,
token: InFile<&SyntaxToken>,
) -> Option<(InFile<SyntaxToken>, Origin)> {
// Fetch the id through its text range,
@@ -717,7 +717,7 @@ impl ExpansionInfo {
pub type AstId<N> = InFile<FileAstId<N>>;
impl<N: AstNode> AstId<N> {
- pub fn to_node(&self, db: &dyn db::AstDatabase) -> N {
+ pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N {
let root = db.parse_or_expand(self.file_id).unwrap();
db.ast_id_map(self.file_id).get(self.value).to_node(&root)
}
@@ -753,7 +753,7 @@ impl<T> InFile<T> {
self.with_value(&self.value)
}
- pub fn file_syntax(&self, db: &dyn db::AstDatabase) -> SyntaxNode {
+ pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
db.parse_or_expand(self.file_id).expect("source created from invalid file")
}
}
@@ -771,10 +771,19 @@ impl<T> InFile<Option<T>> {
}
}
+impl<L, R> InFile<Either<L, R>> {
+ pub fn transpose(self) -> Either<InFile<L>, InFile<R>> {
+ match self.value {
+ Either::Left(l) => Either::Left(InFile::new(self.file_id, l)),
+ Either::Right(r) => Either::Right(InFile::new(self.file_id, r)),
+ }
+ }
+}
+
impl<'a> InFile<&'a SyntaxNode> {
pub fn ancestors_with_macros(
self,
- db: &dyn db::AstDatabase,
+ db: &dyn db::ExpandDatabase,
) -> impl Iterator<Item = InFile<SyntaxNode>> + Clone + '_ {
iter::successors(Some(self.cloned()), move |node| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
@@ -785,7 +794,7 @@ impl<'a> InFile<&'a SyntaxNode> {
/// Skips the attributed item that caused the macro invocation we are climbing up
pub fn ancestors_with_macros_skip_attr_item(
self,
- db: &dyn db::AstDatabase,
+ db: &dyn db::ExpandDatabase,
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
@@ -806,8 +815,8 @@ impl<'a> InFile<&'a SyntaxNode> {
/// Falls back to the macro call range if the node cannot be mapped up fully.
///
/// For attributes and derives, this will point back to the attribute only.
- /// For the entire item `InFile::use original_file_range_full`.
- pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange {
+ /// For the entire item use [`InFile::original_file_range_full`].
+ pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
@@ -821,8 +830,23 @@ impl<'a> InFile<&'a SyntaxNode> {
}
}
+ /// Falls back to the macro call range if the node cannot be mapped up fully.
+ pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
+ HirFileIdRepr::MacroFile(mac_file) => {
+ if let Some(res) = self.original_file_range_opt(db) {
+ return res;
+ }
+ // Fall back to whole macro call.
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ loc.kind.original_call_range_with_body(db)
+ }
+ }
+ }
+
/// Attempts to map the syntax node back up its macro calls.
- pub fn original_file_range_opt(self, db: &dyn db::AstDatabase) -> Option<FileRange> {
+ pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
match ascend_node_border_tokens(db, self) {
Some(InFile { file_id, value: (first, last) }) => {
let original_file = file_id.original_file(db);
@@ -841,7 +865,7 @@ impl<'a> InFile<&'a SyntaxNode> {
}
}
- pub fn original_syntax_node(self, db: &dyn db::AstDatabase) -> Option<InFile<SyntaxNode>> {
+ pub fn original_syntax_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
// This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
if !self.file_id.is_macro() {
@@ -868,13 +892,13 @@ impl<'a> InFile<&'a SyntaxNode> {
}
impl InFile<SyntaxToken> {
- pub fn upmap(self, db: &dyn db::AstDatabase) -> Option<InFile<SyntaxToken>> {
+ pub fn upmap(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxToken>> {
let expansion = self.file_id.expansion_info(db)?;
expansion.map_token_up(db, self.as_ref()).map(|(it, _)| it)
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
- pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange {
+ pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
@@ -889,7 +913,7 @@ impl InFile<SyntaxToken> {
}
/// Attempts to map the syntax node back up its macro calls.
- pub fn original_file_range_opt(self, db: &dyn db::AstDatabase) -> Option<FileRange> {
+ pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
Some(FileRange { file_id, range: self.value.text_range() })
@@ -908,7 +932,7 @@ impl InFile<SyntaxToken> {
pub fn ancestors_with_macros(
self,
- db: &dyn db::AstDatabase,
+ db: &dyn db::ExpandDatabase,
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
self.value.parent().into_iter().flat_map({
let file_id = self.file_id;
@@ -918,7 +942,7 @@ impl InFile<SyntaxToken> {
}
fn ascend_node_border_tokens(
- db: &dyn db::AstDatabase,
+ db: &dyn db::ExpandDatabase,
InFile { file_id, value: node }: InFile<&SyntaxNode>,
) -> Option<InFile<(SyntaxToken, SyntaxToken)>> {
let expansion = file_id.expansion_info(db)?;
@@ -934,7 +958,7 @@ fn ascend_node_border_tokens(
}
fn ascend_call_token(
- db: &dyn db::AstDatabase,
+ db: &dyn db::ExpandDatabase,
expansion: &ExpansionInfo,
token: InFile<SyntaxToken>,
) -> Option<InFile<SyntaxToken>> {
@@ -953,7 +977,7 @@ impl<N: AstNode> InFile<N> {
self.value.syntax().descendants().filter_map(T::cast).map(move |n| self.with_value(n))
}
- pub fn original_ast_node(self, db: &dyn db::AstDatabase) -> Option<InFile<N>> {
+ pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<N>> {
// This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
if !self.file_id.is_macro() {
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
index d7586d129..e9393cc89 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
@@ -6,7 +6,7 @@ use std::{
};
use crate::{
- db::AstDatabase,
+ db::ExpandDatabase,
hygiene::Hygiene,
name::{known, Name},
};
@@ -37,7 +37,11 @@ pub enum PathKind {
}
impl ModPath {
- pub fn from_src(db: &dyn AstDatabase, path: ast::Path, hygiene: &Hygiene) -> Option<ModPath> {
+ pub fn from_src(
+ db: &dyn ExpandDatabase,
+ path: ast::Path,
+ hygiene: &Hygiene,
+ ) -> Option<ModPath> {
convert_path(db, None, path, hygiene)
}
@@ -162,7 +166,7 @@ impl From<Name> for ModPath {
}
fn convert_path(
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
prefix: Option<ModPath>,
path: ast::Path,
hygiene: &Hygiene,
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
index 3f4d2540c..d758e9302 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
@@ -3,22 +3,20 @@
use base_db::{CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
use stdx::never;
-use crate::{db::AstDatabase, tt, ExpandError, ExpandResult};
+use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult};
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub struct ProcMacroExpander {
- krate: CrateId,
proc_macro_id: Option<ProcMacroId>,
}
impl ProcMacroExpander {
- pub fn new(krate: CrateId, proc_macro_id: ProcMacroId) -> Self {
- Self { krate, proc_macro_id: Some(proc_macro_id) }
+ pub fn new(proc_macro_id: ProcMacroId) -> Self {
+ Self { proc_macro_id: Some(proc_macro_id) }
}
- pub fn dummy(krate: CrateId) -> Self {
- // FIXME: Should store the name for better errors
- Self { krate, proc_macro_id: None }
+ pub fn dummy() -> Self {
+ Self { proc_macro_id: None }
}
pub fn is_dummy(&self) -> bool {
@@ -27,7 +25,8 @@ impl ProcMacroExpander {
pub fn expand(
self,
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
+ def_crate: CrateId,
calling_crate: CrateId,
tt: &tt::Subtree,
attr_arg: Option<&tt::Subtree>,
@@ -35,7 +34,7 @@ impl ProcMacroExpander {
match self.proc_macro_id {
Some(id) => {
let krate_graph = db.crate_graph();
- let proc_macros = match &krate_graph[self.krate].proc_macro {
+ let proc_macros = match &krate_graph[def_crate].proc_macro {
Ok(proc_macros) => proc_macros,
Err(_) => {
never!("Non-dummy expander even though there are no proc macros");
@@ -84,7 +83,7 @@ impl ProcMacroExpander {
}
None => ExpandResult::with_err(
tt::Subtree::empty(),
- ExpandError::UnresolvedProcMacro(self.krate),
+ ExpandError::UnresolvedProcMacro(def_crate),
),
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
index a8b8d5222..9b3296df2 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
@@ -18,13 +18,14 @@ arrayvec = "0.7.2"
bitflags = "1.3.2"
smallvec.workspace = true
ena = "0.14.0"
+either = "1.7.0"
tracing = "0.1.35"
rustc-hash = "1.1.0"
scoped-tls = "1.0.0"
-chalk-solve = { version = "0.88.0", default-features = false }
-chalk-ir = "0.88.0"
-chalk-recursive = { version = "0.88.0", default-features = false }
-chalk-derive = "0.88.0"
+chalk-solve = { version = "0.89.0", default-features = false }
+chalk-ir = "0.89.0"
+chalk-recursive = { version = "0.89.0", default-features = false }
+chalk-derive = "0.89.0"
la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
once_cell = "1.17.0"
typed-arena = "2.0.1"
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs
index 8faef7bf7..03e944359 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs
@@ -152,6 +152,15 @@ impl TyBuilder<()> {
TyKind::Tuple(0, Substitution::empty(Interner)).intern(Interner)
}
+ // FIXME: rustc's ty is dependent on the adt type, maybe we need to do that as well
+ pub fn discr_ty() -> Ty {
+ TyKind::Scalar(chalk_ir::Scalar::Int(chalk_ir::IntTy::I128)).intern(Interner)
+ }
+
+ pub fn bool() -> Ty {
+ TyKind::Scalar(chalk_ir::Scalar::Bool).intern(Interner)
+ }
+
pub fn usize() -> Ty {
TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize)).intern(Interner)
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
index 6989e9fb9..28ae4c349 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
@@ -540,8 +540,7 @@ pub(crate) fn trait_datum_query(
let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars);
let associated_ty_ids = trait_data.associated_types().map(to_assoc_type_id).collect();
let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses };
- let well_known = lang_attr(db.upcast(), trait_)
- .and_then(|name| well_known_trait_from_lang_item(LangItem::from_str(&name)?));
+ let well_known = lang_attr(db.upcast(), trait_).and_then(well_known_trait_from_lang_item);
let trait_datum = TraitDatum {
id: trait_id,
binders: make_binders(db, &generic_params, trait_datum_bound),
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
index 45c975dfc..214189492 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
@@ -12,8 +12,8 @@ use hir_def::{
use crate::{
db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
from_placeholder_idx, to_chalk_trait_id, utils::generics, AdtId, AliasEq, AliasTy, Binders,
- CallableDefId, CallableSig, FnPointer, ImplTraitId, Interner, Lifetime, ProjectionTy,
- QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyKind, WhereClause,
+ CallableDefId, CallableSig, DynTy, FnPointer, ImplTraitId, Interner, Lifetime, ProjectionTy,
+ QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyKind, TypeFlags, WhereClause,
};
pub trait TyExt {
@@ -22,6 +22,7 @@ pub trait TyExt {
fn is_floating_point(&self) -> bool;
fn is_never(&self) -> bool;
fn is_unknown(&self) -> bool;
+ fn contains_unknown(&self) -> bool;
fn is_ty_var(&self) -> bool;
fn as_adt(&self) -> Option<(hir_def::AdtId, &Substitution)>;
@@ -76,6 +77,10 @@ impl TyExt for Ty {
matches!(self.kind(Interner), TyKind::Error)
}
+ fn contains_unknown(&self) -> bool {
+ self.data(Interner).flags.contains(TypeFlags::HAS_ERROR)
+ }
+
fn is_ty_var(&self) -> bool {
matches!(self.kind(Interner), TyKind::InferenceVar(_, _))
}
@@ -373,6 +378,19 @@ impl ProjectionTyExt for ProjectionTy {
}
}
+pub trait DynTyExt {
+ fn principal(&self) -> Option<&TraitRef>;
+}
+
+impl DynTyExt for DynTy {
+ fn principal(&self) -> Option<&TraitRef> {
+ self.bounds.skip_binders().interned().get(0).and_then(|b| match b.skip_binders() {
+ crate::WhereClause::Implemented(trait_ref) => Some(trait_ref),
+ _ => None,
+ })
+ }
+}
+
pub trait TraitRefExt {
fn hir_trait_id(&self) -> TraitId;
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
index 8df70330f..5830c4898 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
@@ -1,30 +1,25 @@
//! Constant evaluation details
-use std::{
- collections::HashMap,
- fmt::{Display, Write},
-};
-
-use chalk_ir::{BoundVar, DebruijnIndex, GenericArgData, IntTy, Scalar};
+use base_db::CrateId;
+use chalk_ir::{BoundVar, DebruijnIndex, GenericArgData};
use hir_def::{
- builtin_type::BuiltinInt,
- expr::{ArithOp, BinaryOp, Expr, ExprId, Literal, Pat, PatId},
+ expr::Expr,
path::ModPath,
- resolver::{resolver_for_expr, ResolveValueResult, Resolver, ValueNs},
- src::HasChildSource,
- type_ref::ConstScalar,
- ConstId, DefWithBodyId, EnumVariantId, Lookup,
+ resolver::{Resolver, ValueNs},
+ type_ref::ConstRef,
+ ConstId, EnumVariantId,
};
-use la_arena::{Arena, Idx, RawIdx};
+use la_arena::{Idx, RawIdx};
use stdx::never;
-use syntax::ast::HasName;
use crate::{
- db::HirDatabase, infer::InferenceContext, lower::ParamLoweringMode, to_placeholder_idx,
- utils::Generics, Const, ConstData, ConstValue, GenericArg, InferenceResult, Interner, Ty,
- TyBuilder, TyKind,
+ db::HirDatabase, infer::InferenceContext, layout::layout_of_ty, lower::ParamLoweringMode,
+ to_placeholder_idx, utils::Generics, Const, ConstData, ConstScalar, ConstValue, GenericArg,
+ Interner, MemoryMap, Ty, TyBuilder,
};
+use super::mir::{interpret_mir, lower_to_mir, pad16, MirEvalError, MirLowerError};
+
/// Extension trait for [`Const`]
pub trait ConstExt {
/// Is a [`Const`] unknown?
@@ -53,346 +48,24 @@ impl ConstExt for Const {
}
}
-pub struct ConstEvalCtx<'a> {
- pub db: &'a dyn HirDatabase,
- pub owner: DefWithBodyId,
- pub exprs: &'a Arena<Expr>,
- pub pats: &'a Arena<Pat>,
- pub local_data: HashMap<PatId, ComputedExpr>,
- infer: &'a InferenceResult,
-}
-
-impl ConstEvalCtx<'_> {
- fn expr_ty(&mut self, expr: ExprId) -> Ty {
- self.infer[expr].clone()
- }
-}
-
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ConstEvalError {
- NotSupported(&'static str),
- SemanticError(&'static str),
- Loop,
- IncompleteExpr,
- Panic(String),
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub enum ComputedExpr {
- Literal(Literal),
- Enum(String, EnumVariantId, Literal),
- Tuple(Box<[ComputedExpr]>),
+ MirLowerError(MirLowerError),
+ MirEvalError(MirEvalError),
}
-impl Display for ComputedExpr {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- match self {
- ComputedExpr::Literal(l) => match l {
- Literal::Int(x, _) => {
- if *x >= 10 {
- write!(f, "{x} ({x:#X})")
- } else {
- x.fmt(f)
- }
- }
- Literal::Uint(x, _) => {
- if *x >= 10 {
- write!(f, "{x} ({x:#X})")
- } else {
- x.fmt(f)
- }
- }
- Literal::Float(x, _) => x.fmt(f),
- Literal::Bool(x) => x.fmt(f),
- Literal::Char(x) => std::fmt::Debug::fmt(x, f),
- Literal::String(x) => std::fmt::Debug::fmt(x, f),
- Literal::ByteString(x) => std::fmt::Debug::fmt(x, f),
- },
- ComputedExpr::Enum(name, _, _) => name.fmt(f),
- ComputedExpr::Tuple(t) => {
- f.write_char('(')?;
- for x in &**t {
- x.fmt(f)?;
- f.write_str(", ")?;
- }
- f.write_char(')')
- }
+impl From<MirLowerError> for ConstEvalError {
+ fn from(value: MirLowerError) -> Self {
+ match value {
+ MirLowerError::ConstEvalError(e) => *e,
+ _ => ConstEvalError::MirLowerError(value),
}
}
}
-fn scalar_max(scalar: &Scalar) -> i128 {
- match scalar {
- Scalar::Bool => 1,
- Scalar::Char => u32::MAX as i128,
- Scalar::Int(x) => match x {
- IntTy::Isize => isize::MAX as i128,
- IntTy::I8 => i8::MAX as i128,
- IntTy::I16 => i16::MAX as i128,
- IntTy::I32 => i32::MAX as i128,
- IntTy::I64 => i64::MAX as i128,
- IntTy::I128 => i128::MAX,
- },
- Scalar::Uint(x) => match x {
- chalk_ir::UintTy::Usize => usize::MAX as i128,
- chalk_ir::UintTy::U8 => u8::MAX as i128,
- chalk_ir::UintTy::U16 => u16::MAX as i128,
- chalk_ir::UintTy::U32 => u32::MAX as i128,
- chalk_ir::UintTy::U64 => u64::MAX as i128,
- chalk_ir::UintTy::U128 => i128::MAX, // ignore too big u128 for now
- },
- Scalar::Float(_) => 0,
- }
-}
-
-fn is_valid(scalar: &Scalar, value: i128) -> bool {
- if value < 0 {
- !matches!(scalar, Scalar::Uint(_)) && -scalar_max(scalar) - 1 <= value
- } else {
- value <= scalar_max(scalar)
- }
-}
-
-fn get_name(ctx: &mut ConstEvalCtx<'_>, variant: EnumVariantId) -> String {
- let loc = variant.parent.lookup(ctx.db.upcast());
- let children = variant.parent.child_source(ctx.db.upcast());
- let item_tree = loc.id.item_tree(ctx.db.upcast());
-
- let variant_name = children.value[variant.local_id].name();
- let enum_name = item_tree[loc.id.value].name.to_string();
- enum_name + "::" + &variant_name.unwrap().to_string()
-}
-
-pub fn eval_const(
- expr_id: ExprId,
- ctx: &mut ConstEvalCtx<'_>,
-) -> Result<ComputedExpr, ConstEvalError> {
- let u128_to_i128 = |it: u128| -> Result<i128, ConstEvalError> {
- it.try_into().map_err(|_| ConstEvalError::NotSupported("u128 is too big"))
- };
-
- let expr = &ctx.exprs[expr_id];
- match expr {
- Expr::Missing => match ctx.owner {
- // evaluate the implicit variant index of an enum variant without expression
- // FIXME: This should return the type of the enum representation
- DefWithBodyId::VariantId(variant) => {
- let prev_idx: u32 = variant.local_id.into_raw().into();
- let prev_idx = prev_idx.checked_sub(1).map(RawIdx::from).map(Idx::from_raw);
- let value = match prev_idx {
- Some(local_id) => {
- let prev_variant = EnumVariantId { local_id, parent: variant.parent };
- 1 + match ctx.db.const_eval_variant(prev_variant)? {
- ComputedExpr::Literal(Literal::Int(v, _)) => v,
- ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?,
- _ => {
- return Err(ConstEvalError::NotSupported(
- "Enum can't contain this kind of value",
- ))
- }
- }
- }
- _ => 0,
- };
- Ok(ComputedExpr::Literal(Literal::Int(value, Some(BuiltinInt::I128))))
- }
- _ => Err(ConstEvalError::IncompleteExpr),
- },
- Expr::Literal(l) => Ok(ComputedExpr::Literal(l.clone())),
- &Expr::UnaryOp { expr, op } => {
- let ty = &ctx.expr_ty(expr);
- let ev = eval_const(expr, ctx)?;
- match op {
- hir_def::expr::UnaryOp::Deref => Err(ConstEvalError::NotSupported("deref")),
- hir_def::expr::UnaryOp::Not => {
- let v = match ev {
- ComputedExpr::Literal(Literal::Bool(b)) => {
- return Ok(ComputedExpr::Literal(Literal::Bool(!b)))
- }
- ComputedExpr::Literal(Literal::Int(v, _)) => v,
- ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?,
- _ => return Err(ConstEvalError::NotSupported("this kind of operator")),
- };
- let r = match ty.kind(Interner) {
- TyKind::Scalar(Scalar::Uint(x)) => match x {
- chalk_ir::UintTy::U8 => !(v as u8) as i128,
- chalk_ir::UintTy::U16 => !(v as u16) as i128,
- chalk_ir::UintTy::U32 => !(v as u32) as i128,
- chalk_ir::UintTy::U64 => !(v as u64) as i128,
- chalk_ir::UintTy::U128 => {
- return Err(ConstEvalError::NotSupported("negation of u128"))
- }
- chalk_ir::UintTy::Usize => !(v as usize) as i128,
- },
- TyKind::Scalar(Scalar::Int(x)) => match x {
- chalk_ir::IntTy::I8 => !(v as i8) as i128,
- chalk_ir::IntTy::I16 => !(v as i16) as i128,
- chalk_ir::IntTy::I32 => !(v as i32) as i128,
- chalk_ir::IntTy::I64 => !(v as i64) as i128,
- chalk_ir::IntTy::I128 => !v,
- chalk_ir::IntTy::Isize => !(v as isize) as i128,
- },
- _ => return Err(ConstEvalError::NotSupported("unreachable?")),
- };
- Ok(ComputedExpr::Literal(Literal::Int(r, None)))
- }
- hir_def::expr::UnaryOp::Neg => {
- let v = match ev {
- ComputedExpr::Literal(Literal::Int(v, _)) => v,
- ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?,
- _ => return Err(ConstEvalError::NotSupported("this kind of operator")),
- };
- Ok(ComputedExpr::Literal(Literal::Int(
- v.checked_neg().ok_or_else(|| {
- ConstEvalError::Panic("overflow in negation".to_string())
- })?,
- None,
- )))
- }
- }
- }
- &Expr::BinaryOp { lhs, rhs, op } => {
- let ty = &ctx.expr_ty(lhs);
- let lhs = eval_const(lhs, ctx)?;
- let rhs = eval_const(rhs, ctx)?;
- let op = op.ok_or(ConstEvalError::IncompleteExpr)?;
- let v1 = match lhs {
- ComputedExpr::Literal(Literal::Int(v, _)) => v,
- ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?,
- _ => return Err(ConstEvalError::NotSupported("this kind of operator")),
- };
- let v2 = match rhs {
- ComputedExpr::Literal(Literal::Int(v, _)) => v,
- ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?,
- _ => return Err(ConstEvalError::NotSupported("this kind of operator")),
- };
- match op {
- BinaryOp::ArithOp(b) => {
- let panic_arith = ConstEvalError::Panic(
- "attempt to run invalid arithmetic operation".to_string(),
- );
- let r = match b {
- ArithOp::Add => v1.checked_add(v2).ok_or_else(|| panic_arith.clone())?,
- ArithOp::Mul => v1.checked_mul(v2).ok_or_else(|| panic_arith.clone())?,
- ArithOp::Sub => v1.checked_sub(v2).ok_or_else(|| panic_arith.clone())?,
- ArithOp::Div => v1.checked_div(v2).ok_or_else(|| panic_arith.clone())?,
- ArithOp::Rem => v1.checked_rem(v2).ok_or_else(|| panic_arith.clone())?,
- ArithOp::Shl => v1
- .checked_shl(v2.try_into().map_err(|_| panic_arith.clone())?)
- .ok_or_else(|| panic_arith.clone())?,
- ArithOp::Shr => v1
- .checked_shr(v2.try_into().map_err(|_| panic_arith.clone())?)
- .ok_or_else(|| panic_arith.clone())?,
- ArithOp::BitXor => v1 ^ v2,
- ArithOp::BitOr => v1 | v2,
- ArithOp::BitAnd => v1 & v2,
- };
- if let TyKind::Scalar(s) = ty.kind(Interner) {
- if !is_valid(s, r) {
- return Err(panic_arith);
- }
- }
- Ok(ComputedExpr::Literal(Literal::Int(r, None)))
- }
- BinaryOp::LogicOp(_) => Err(ConstEvalError::SemanticError("logic op on numbers")),
- _ => Err(ConstEvalError::NotSupported("bin op on this operators")),
- }
- }
- Expr::Block { statements, tail, .. } => {
- let mut prev_values = HashMap::<PatId, Option<ComputedExpr>>::default();
- for statement in &**statements {
- match *statement {
- hir_def::expr::Statement::Let { pat: pat_id, initializer, .. } => {
- let pat = &ctx.pats[pat_id];
- match pat {
- Pat::Bind { subpat, .. } if subpat.is_none() => (),
- _ => {
- return Err(ConstEvalError::NotSupported("complex patterns in let"))
- }
- };
- let value = match initializer {
- Some(x) => eval_const(x, ctx)?,
- None => continue,
- };
- if !prev_values.contains_key(&pat_id) {
- let prev = ctx.local_data.insert(pat_id, value);
- prev_values.insert(pat_id, prev);
- } else {
- ctx.local_data.insert(pat_id, value);
- }
- }
- hir_def::expr::Statement::Expr { .. } => {
- return Err(ConstEvalError::NotSupported("this kind of statement"))
- }
- }
- }
- let r = match tail {
- &Some(x) => eval_const(x, ctx),
- None => Ok(ComputedExpr::Tuple(Box::new([]))),
- };
- // clean up local data, so caller will receive the exact map that passed to us
- for (name, val) in prev_values {
- match val {
- Some(x) => ctx.local_data.insert(name, x),
- None => ctx.local_data.remove(&name),
- };
- }
- r
- }
- Expr::Path(p) => {
- let resolver = resolver_for_expr(ctx.db.upcast(), ctx.owner, expr_id);
- let pr = resolver
- .resolve_path_in_value_ns(ctx.db.upcast(), p.mod_path())
- .ok_or(ConstEvalError::SemanticError("unresolved path"))?;
- let pr = match pr {
- ResolveValueResult::ValueNs(v) => v,
- ResolveValueResult::Partial(..) => {
- return match ctx
- .infer
- .assoc_resolutions_for_expr(expr_id)
- .ok_or(ConstEvalError::SemanticError("unresolved assoc item"))?
- .0
- {
- hir_def::AssocItemId::FunctionId(_) => {
- Err(ConstEvalError::NotSupported("assoc function"))
- }
- // FIXME use actual impl for trait assoc const
- hir_def::AssocItemId::ConstId(c) => ctx.db.const_eval(c),
- hir_def::AssocItemId::TypeAliasId(_) => {
- Err(ConstEvalError::NotSupported("assoc type alias"))
- }
- };
- }
- };
- match pr {
- ValueNs::LocalBinding(pat_id) => {
- let r = ctx
- .local_data
- .get(&pat_id)
- .ok_or(ConstEvalError::NotSupported("Unexpected missing local"))?;
- Ok(r.clone())
- }
- ValueNs::ConstId(id) => ctx.db.const_eval(id),
- ValueNs::GenericParam(_) => {
- Err(ConstEvalError::NotSupported("const generic without substitution"))
- }
- ValueNs::EnumVariantId(id) => match ctx.db.const_eval_variant(id)? {
- ComputedExpr::Literal(lit) => {
- Ok(ComputedExpr::Enum(get_name(ctx, id), id, lit))
- }
- _ => Err(ConstEvalError::NotSupported(
- "Enums can't evalute to anything but numbers",
- )),
- },
- _ => Err(ConstEvalError::NotSupported("path that are not const or local")),
- }
- }
- // FIXME: Handle the cast target
- &Expr::Cast { expr, .. } => match eval_const(expr, ctx)? {
- ComputedExpr::Enum(_, _, lit) => Ok(ComputedExpr::Literal(lit)),
- _ => Err(ConstEvalError::NotSupported("Can't cast these types")),
- },
- _ => Err(ConstEvalError::NotSupported("This kind of expression")),
+impl From<MirEvalError> for ConstEvalError {
+ fn from(value: MirEvalError) -> Self {
+ ConstEvalError::MirEvalError(value)
}
}
@@ -449,68 +122,102 @@ pub fn intern_const_scalar(value: ConstScalar, ty: Ty) -> Const {
.intern(Interner)
}
+/// Interns a constant scalar with the given type
+pub fn intern_const_ref(db: &dyn HirDatabase, value: &ConstRef, ty: Ty, krate: CrateId) -> Const {
+ let bytes = match value {
+ ConstRef::Int(i) => {
+ // FIXME: We should handle failure of layout better.
+ let size = layout_of_ty(db, &ty, krate).map(|x| x.size.bytes_usize()).unwrap_or(16);
+ ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
+ }
+ ConstRef::UInt(i) => {
+ let size = layout_of_ty(db, &ty, krate).map(|x| x.size.bytes_usize()).unwrap_or(16);
+ ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
+ }
+ ConstRef::Bool(b) => ConstScalar::Bytes(vec![*b as u8], MemoryMap::default()),
+ ConstRef::Char(c) => {
+ ConstScalar::Bytes((*c as u32).to_le_bytes().to_vec(), MemoryMap::default())
+ }
+ ConstRef::Unknown => ConstScalar::Unknown,
+ };
+ intern_const_scalar(bytes, ty)
+}
+
/// Interns a possibly-unknown target usize
-pub fn usize_const(value: Option<u128>) -> Const {
- intern_const_scalar(value.map_or(ConstScalar::Unknown, ConstScalar::UInt), TyBuilder::usize())
+pub fn usize_const(db: &dyn HirDatabase, value: Option<u128>, krate: CrateId) -> Const {
+ intern_const_ref(
+ db,
+ &value.map_or(ConstRef::Unknown, ConstRef::UInt),
+ TyBuilder::usize(),
+ krate,
+ )
+}
+
+pub fn try_const_usize(c: &Const) -> Option<u128> {
+ match &c.data(Interner).value {
+ chalk_ir::ConstValue::BoundVar(_) => None,
+ chalk_ir::ConstValue::InferenceVar(_) => None,
+ chalk_ir::ConstValue::Placeholder(_) => None,
+ chalk_ir::ConstValue::Concrete(c) => match &c.interned {
+ ConstScalar::Bytes(x, _) => Some(u128::from_le_bytes(pad16(&x, false))),
+ _ => None,
+ },
+ }
}
pub(crate) fn const_eval_recover(
_: &dyn HirDatabase,
_: &[String],
_: &ConstId,
-) -> Result<ComputedExpr, ConstEvalError> {
- Err(ConstEvalError::Loop)
+) -> Result<Const, ConstEvalError> {
+ Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
-pub(crate) fn const_eval_variant_recover(
+pub(crate) fn const_eval_discriminant_recover(
_: &dyn HirDatabase,
_: &[String],
_: &EnumVariantId,
-) -> Result<ComputedExpr, ConstEvalError> {
- Err(ConstEvalError::Loop)
+) -> Result<i128, ConstEvalError> {
+ Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
-pub(crate) fn const_eval_variant_query(
+pub(crate) fn const_eval_query(
db: &dyn HirDatabase,
const_id: ConstId,
-) -> Result<ComputedExpr, ConstEvalError> {
+) -> Result<Const, ConstEvalError> {
let def = const_id.into();
- let body = db.body(def);
- let infer = &db.infer(def);
- let result = eval_const(
- body.body_expr,
- &mut ConstEvalCtx {
- db,
- owner: const_id.into(),
- exprs: &body.exprs,
- pats: &body.pats,
- local_data: HashMap::default(),
- infer,
- },
- );
- result
+ let body = db.mir_body(def)?;
+ let c = interpret_mir(db, &body, false)?;
+ Ok(c)
}
-pub(crate) fn const_eval_query_variant(
+pub(crate) fn const_eval_discriminant_variant(
db: &dyn HirDatabase,
variant_id: EnumVariantId,
-) -> Result<ComputedExpr, ConstEvalError> {
+) -> Result<i128, ConstEvalError> {
let def = variant_id.into();
let body = db.body(def);
- let infer = &db.infer(def);
- eval_const(
- body.body_expr,
- &mut ConstEvalCtx {
- db,
- owner: def,
- exprs: &body.exprs,
- pats: &body.pats,
- local_data: HashMap::default(),
- infer,
- },
- )
+ if body.exprs[body.body_expr] == Expr::Missing {
+ let prev_idx: u32 = variant_id.local_id.into_raw().into();
+ let prev_idx = prev_idx.checked_sub(1).map(RawIdx::from).map(Idx::from_raw);
+ let value = match prev_idx {
+ Some(local_id) => {
+ let prev_variant = EnumVariantId { local_id, parent: variant_id.parent };
+ 1 + db.const_eval_discriminant(prev_variant)?
+ }
+ _ => 0,
+ };
+ return Ok(value);
+ }
+ let mir_body = db.mir_body(def)?;
+ let c = interpret_mir(db, &mir_body, false)?;
+ let c = try_const_usize(&c).unwrap() as i128;
+ Ok(c)
}
+// FIXME: Ideally constants in const eval should have separate body (issue #7434), and this function should
+// get an `InferenceResult` instead of an `InferenceContext`. And we should remove `ctx.clone().resolve_all()` here
+// and make this function private. See the fixme comment on `InferenceContext::resolve_all`.
pub(crate) fn eval_to_const(
expr: Idx<Expr>,
mode: ParamLoweringMode,
@@ -518,28 +225,20 @@ pub(crate) fn eval_to_const(
args: impl FnOnce() -> Generics,
debruijn: DebruijnIndex,
) -> Const {
+ let db = ctx.db;
if let Expr::Path(p) = &ctx.body.exprs[expr] {
- let db = ctx.db;
let resolver = &ctx.resolver;
if let Some(c) = path_to_const(db, resolver, p.mod_path(), mode, args, debruijn) {
return c;
}
}
- let body = ctx.body.clone();
- let mut ctx = ConstEvalCtx {
- db: ctx.db,
- owner: ctx.owner,
- exprs: &body.exprs,
- pats: &body.pats,
- local_data: HashMap::default(),
- infer: &ctx.result,
- };
- let computed_expr = eval_const(expr, &mut ctx);
- let const_scalar = match computed_expr {
- Ok(ComputedExpr::Literal(literal)) => literal.into(),
- _ => ConstScalar::Unknown,
- };
- intern_const_scalar(const_scalar, TyBuilder::usize())
+ let infer = ctx.clone().resolve_all();
+ if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, &ctx.body, &infer, expr) {
+ if let Ok(result) = interpret_mir(db, &mir_body, true) {
+ return result;
+ }
+ }
+ unknown_const(infer[expr].clone())
}
#[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
index 3c930c077..6a29e8ce5 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
@@ -1,24 +1,44 @@
use base_db::fixture::WithFixture;
-use hir_def::{db::DefDatabase, expr::Literal};
+use hir_def::db::DefDatabase;
-use crate::{consteval::ComputedExpr, db::HirDatabase, test_db::TestDB};
+use crate::{
+ consteval::try_const_usize, db::HirDatabase, test_db::TestDB, Const, ConstScalar, Interner,
+};
-use super::ConstEvalError;
+use super::{
+ super::mir::{MirEvalError, MirLowerError},
+ ConstEvalError,
+};
+fn simplify(e: ConstEvalError) -> ConstEvalError {
+ match e {
+ ConstEvalError::MirEvalError(MirEvalError::InFunction(_, e)) => {
+ simplify(ConstEvalError::MirEvalError(*e))
+ }
+ _ => e,
+ }
+}
+
+#[track_caller]
fn check_fail(ra_fixture: &str, error: ConstEvalError) {
- assert_eq!(eval_goal(ra_fixture), Err(error));
+ assert_eq!(eval_goal(ra_fixture).map_err(simplify), Err(error));
}
+#[track_caller]
fn check_number(ra_fixture: &str, answer: i128) {
let r = eval_goal(ra_fixture).unwrap();
- match r {
- ComputedExpr::Literal(Literal::Int(r, _)) => assert_eq!(r, answer),
- ComputedExpr::Literal(Literal::Uint(r, _)) => assert_eq!(r, answer as u128),
- x => panic!("Expected number but found {x:?}"),
+ match &r.data(Interner).value {
+ chalk_ir::ConstValue::Concrete(c) => match &c.interned {
+ ConstScalar::Bytes(b, _) => {
+ assert_eq!(b, &answer.to_le_bytes()[0..b.len()]);
+ }
+ x => panic!("Expected number but found {:?}", x),
+ },
+ _ => panic!("result of const eval wasn't a concrete const"),
}
}
-fn eval_goal(ra_fixture: &str) -> Result<ComputedExpr, ConstEvalError> {
+fn eval_goal(ra_fixture: &str) -> Result<Const, ConstEvalError> {
let (db, file_id) = TestDB::with_single_file(ra_fixture);
let module_id = db.module_for_file(file_id);
let def_map = module_id.def_map(&db);
@@ -42,21 +62,18 @@ fn eval_goal(ra_fixture: &str) -> Result<ComputedExpr, ConstEvalError> {
#[test]
fn add() {
check_number(r#"const GOAL: usize = 2 + 2;"#, 4);
+ check_number(r#"const GOAL: i32 = -2 + --5;"#, 3);
+ check_number(r#"const GOAL: i32 = 7 - 5;"#, 2);
+ check_number(r#"const GOAL: i32 = 7 + (1 - 5);"#, 3);
}
#[test]
fn bit_op() {
check_number(r#"const GOAL: u8 = !0 & !(!0 >> 1)"#, 128);
check_number(r#"const GOAL: i8 = !0 & !(!0 >> 1)"#, 0);
- // FIXME: rustc evaluate this to -128
- check_fail(
- r#"const GOAL: i8 = 1 << 7"#,
- ConstEvalError::Panic("attempt to run invalid arithmetic operation".to_string()),
- );
- check_fail(
- r#"const GOAL: i8 = 1 << 8"#,
- ConstEvalError::Panic("attempt to run invalid arithmetic operation".to_string()),
- );
+ check_number(r#"const GOAL: i8 = 1 << 7"#, (1i8 << 7) as i128);
+ // FIXME: report panic here
+ check_number(r#"const GOAL: i8 = 1 << 8"#, 0);
}
#[test]
@@ -74,6 +91,803 @@ fn locals() {
}
#[test]
+fn references() {
+ check_number(
+ r#"
+ const GOAL: usize = {
+ let x = 3;
+ let y = &mut x;
+ *y = 5;
+ x
+ };
+ "#,
+ 5,
+ );
+ check_number(
+ r#"
+ struct Foo(i32);
+ impl Foo {
+ fn method(&mut self, x: i32) {
+ self.0 = 2 * self.0 + x;
+ }
+ }
+ const GOAL: i32 = {
+ let mut x = Foo(3);
+ x.method(5);
+ x.0
+ };
+ "#,
+ 11,
+ );
+}
+
+#[test]
+fn reference_autoderef() {
+ check_number(
+ r#"
+ const GOAL: usize = {
+ let x = 3;
+ let y = &mut x;
+ let y: &mut usize = &mut y;
+ *y = 5;
+ x
+ };
+ "#,
+ 5,
+ );
+ check_number(
+ r#"
+ const GOAL: usize = {
+ let x = 3;
+ let y = &&&&&&&x;
+ let z: &usize = &y;
+ *z
+ };
+ "#,
+ 3,
+ );
+ check_number(
+ r#"
+ struct Foo<T> { x: T }
+ impl<T> Foo<T> {
+ fn foo(&mut self) -> T { self.x }
+ }
+ fn f(i: &mut &mut Foo<Foo<i32>>) -> i32 {
+ ((**i).x).foo()
+ }
+ fn g(i: Foo<Foo<i32>>) -> i32 {
+ i.x.foo()
+ }
+ const GOAL: i32 = f(&mut &mut Foo { x: Foo { x: 3 } }) + g(Foo { x: Foo { x: 5 } });
+ "#,
+ 8,
+ );
+}
+
+#[test]
+fn overloaded_deref() {
+ // FIXME: We should support this.
+ check_fail(
+ r#"
+ //- minicore: deref_mut
+ struct Foo;
+
+ impl core::ops::Deref for Foo {
+ type Target = i32;
+ fn deref(&self) -> &i32 {
+ &5
+ }
+ }
+
+ const GOAL: i32 = {
+ let x = Foo;
+ let y = &*x;
+ *y + *x
+ };
+ "#,
+ ConstEvalError::MirLowerError(MirLowerError::NotSupported(
+ "explicit overloaded deref".into(),
+ )),
+ );
+}
+
+#[test]
+fn overloaded_deref_autoref() {
+ check_number(
+ r#"
+ //- minicore: deref_mut
+ struct Foo;
+ struct Bar;
+
+ impl core::ops::Deref for Foo {
+ type Target = Bar;
+ fn deref(&self) -> &Bar {
+ &Bar
+ }
+ }
+
+ impl Bar {
+ fn method(&self) -> i32 {
+ 5
+ }
+ }
+
+ const GOAL: i32 = Foo.method();
+ "#,
+ 5,
+ );
+}
+
+#[test]
+fn function_call() {
+ check_number(
+ r#"
+ const fn f(x: usize) -> usize {
+ 2 * x + 5
+ }
+ const GOAL: usize = f(3);
+ "#,
+ 11,
+ );
+ check_number(
+ r#"
+ const fn add(x: usize, y: usize) -> usize {
+ x + y
+ }
+ const GOAL: usize = add(add(1, 2), add(3, add(4, 5)));
+ "#,
+ 15,
+ );
+}
+
+#[test]
+fn intrinsics() {
+ check_number(
+ r#"
+ extern "rust-intrinsic" {
+ pub fn size_of<T>() -> usize;
+ }
+
+ const GOAL: usize = size_of::<i32>();
+ "#,
+ 4,
+ );
+}
+
+#[test]
+fn trait_basic() {
+ check_number(
+ r#"
+ trait Foo {
+ fn f(&self) -> u8;
+ }
+
+ impl Foo for u8 {
+ fn f(&self) -> u8 {
+ *self + 33
+ }
+ }
+
+ const GOAL: u8 = {
+ let x = 3;
+ Foo::f(&x)
+ };
+ "#,
+ 36,
+ );
+}
+
+#[test]
+fn trait_method() {
+ check_number(
+ r#"
+ trait Foo {
+ fn f(&self) -> u8;
+ }
+
+ impl Foo for u8 {
+ fn f(&self) -> u8 {
+ *self + 33
+ }
+ }
+
+ const GOAL: u8 = {
+ let x = 3;
+ x.f()
+ };
+ "#,
+ 36,
+ );
+}
+
+#[test]
+fn generic_fn() {
+ check_number(
+ r#"
+ trait Foo {
+ fn f(&self) -> u8;
+ }
+
+ impl Foo for () {
+ fn f(&self) -> u8 {
+ 0
+ }
+ }
+
+ struct Succ<S>(S);
+
+ impl<T: Foo> Foo for Succ<T> {
+ fn f(&self) -> u8 {
+ self.0.f() + 1
+ }
+ }
+
+ const GOAL: u8 = Succ(Succ(())).f();
+ "#,
+ 2,
+ );
+ check_number(
+ r#"
+ trait Foo {
+ fn f(&self) -> u8;
+ }
+
+ impl Foo for u8 {
+ fn f(&self) -> u8 {
+ *self + 33
+ }
+ }
+
+ fn foof<T: Foo>(x: T, y: T) -> u8 {
+ x.f() + y.f()
+ }
+
+ const GOAL: u8 = foof(2, 5);
+ "#,
+ 73,
+ );
+ check_number(
+ r#"
+ fn bar<A, B>(a: A, b: B) -> B {
+ b
+ }
+ const GOAL: u8 = bar("hello", 12);
+ "#,
+ 12,
+ );
+ check_number(
+ r#"
+ //- minicore: coerce_unsized, index, slice
+ fn bar<A, B>(a: A, b: B) -> B {
+ b
+ }
+ fn foo<T>(x: [T; 2]) -> T {
+ bar(x[0], x[1])
+ }
+
+ const GOAL: u8 = foo([2, 5]);
+ "#,
+ 5,
+ );
+}
+
+#[test]
+fn impl_trait() {
+ check_number(
+ r#"
+ trait Foo {
+ fn f(&self) -> u8;
+ }
+
+ impl Foo for u8 {
+ fn f(&self) -> u8 {
+ *self + 33
+ }
+ }
+
+ fn foof(x: impl Foo, y: impl Foo) -> impl Foo {
+ x.f() + y.f()
+ }
+
+ const GOAL: u8 = foof(2, 5).f();
+ "#,
+ 106,
+ );
+ check_number(
+ r#"
+ struct Foo<T>(T, T, (T, T));
+ trait S {
+ fn sum(&self) -> i64;
+ }
+ impl S for i64 {
+ fn sum(&self) -> i64 {
+ *self
+ }
+ }
+ impl<T: S> S for Foo<T> {
+ fn sum(&self) -> i64 {
+ self.0.sum() + self.1.sum() + self.2 .0.sum() + self.2 .1.sum()
+ }
+ }
+
+ fn foo() -> Foo<impl S> {
+ Foo(
+ Foo(1i64, 2, (3, 4)),
+ Foo(5, 6, (7, 8)),
+ (
+ Foo(9, 10, (11, 12)),
+ Foo(13, 14, (15, 16)),
+ ),
+ )
+ }
+ const GOAL: i64 = foo().sum();
+ "#,
+ 136,
+ );
+}
+
+#[test]
+fn ifs() {
+ check_number(
+ r#"
+ const fn f(b: bool) -> u8 {
+ if b { 1 } else { 10 }
+ }
+
+ const GOAL: u8 = f(true) + f(true) + f(false);
+ "#,
+ 12,
+ );
+ check_number(
+ r#"
+ const fn max(a: i32, b: i32) -> i32 {
+ if a < b { b } else { a }
+ }
+
+ const GOAL: i32 = max(max(1, max(10, 3)), 0-122);
+ "#,
+ 10,
+ );
+
+ check_number(
+ r#"
+ const fn max(a: &i32, b: &i32) -> &i32 {
+ if *a < *b { b } else { a }
+ }
+
+ const GOAL: i32 = *max(max(&1, max(&10, &3)), &5);
+ "#,
+ 10,
+ );
+}
+
+#[test]
+fn loops() {
+ check_number(
+ r#"
+ const GOAL: u8 = {
+ let mut x = 0;
+ loop {
+ x = x + 1;
+ while true {
+ break;
+ }
+ x = x + 1;
+ if x == 2 {
+ continue;
+ }
+ break;
+ }
+ x
+ };
+ "#,
+ 4,
+ );
+}
+
+#[test]
+fn for_loops() {
+ check_number(
+ r#"
+ //- minicore: iterator
+
+ struct Range {
+ start: u8,
+ end: u8,
+ }
+
+ impl Iterator for Range {
+ type Item = u8;
+ fn next(&mut self) -> Option<u8> {
+ if self.start >= self.end {
+ None
+ } else {
+ let r = self.start;
+ self.start = self.start + 1;
+ Some(r)
+ }
+ }
+ }
+
+ const GOAL: u8 = {
+ let mut sum = 0;
+ let ar = Range { start: 1, end: 11 };
+ for i in ar {
+ sum = sum + i;
+ }
+ sum
+ };
+ "#,
+ 55,
+ );
+}
+
+#[test]
+fn recursion() {
+ check_number(
+ r#"
+ const fn fact(k: i32) -> i32 {
+ if k > 0 { fact(k - 1) * k } else { 1 }
+ }
+
+ const GOAL: i32 = fact(5);
+ "#,
+ 120,
+ );
+}
+
+#[test]
+fn structs() {
+ check_number(
+ r#"
+ struct Point {
+ x: i32,
+ y: i32,
+ }
+
+ const GOAL: i32 = {
+ let p = Point { x: 5, y: 2 };
+ let y = 1;
+ let x = 3;
+ let q = Point { y, x };
+ p.x + p.y + p.x + q.y + q.y + q.x
+ };
+ "#,
+ 17,
+ );
+}
+
+#[test]
+fn unions() {
+ check_number(
+ r#"
+ union U {
+ f1: i64,
+ f2: (i32, i32),
+ }
+
+ const GOAL: i32 = {
+ let p = U { f1: 0x0123ABCD0123DCBA };
+ let p = unsafe { p.f2 };
+ p.0 + p.1 + p.1
+ };
+ "#,
+ 0x0123ABCD * 2 + 0x0123DCBA,
+ );
+}
+
+#[test]
+fn tuples() {
+ check_number(
+ r#"
+ const GOAL: u8 = {
+ let a = (10, 20, 3, 15);
+ a.1
+ };
+ "#,
+ 20,
+ );
+ check_number(
+ r#"
+ const GOAL: u8 = {
+ let mut a = (10, 20, 3, 15);
+ a.1 = 2;
+ a.0 + a.1 + a.2 + a.3
+ };
+ "#,
+ 30,
+ );
+ check_number(
+ r#"
+ struct TupleLike(i32, u8, i64, u16);
+ const GOAL: u8 = {
+ let a = TupleLike(10, 20, 3, 15);
+ a.1
+ };
+ "#,
+ 20,
+ );
+ check_number(
+ r#"
+ const GOAL: u8 = {
+ match (&(2 + 2), &4) {
+ (left_val, right_val) => {
+ if !(*left_val == *right_val) {
+ 2
+ } else {
+ 5
+ }
+ }
+ }
+ };
+ "#,
+ 5,
+ );
+}
+
+#[test]
+fn path_pattern_matching() {
+ check_number(
+ r#"
+ enum Season {
+ Spring,
+ Summer,
+ Fall,
+ Winter,
+ }
+
+ use Season::*;
+
+ const fn f(x: Season) -> i32 {
+ match x {
+ Spring => 1,
+ Summer => 2,
+ Fall => 3,
+ Winter => 4,
+ }
+ }
+ const GOAL: i32 = f(Spring) + 10 * f(Summer) + 100 * f(Fall) + 1000 * f(Winter);
+ "#,
+ 4321,
+ );
+}
+
+#[test]
+fn pattern_matching_ergonomics() {
+ check_number(
+ r#"
+ const fn f(x: &(u8, u8)) -> u8 {
+ match x {
+ (a, b) => *a + *b
+ }
+ }
+ const GOAL: u8 = f(&(2, 3));
+ "#,
+ 5,
+ );
+}
+
+#[test]
+fn let_else() {
+ check_number(
+ r#"
+ const fn f(x: &(u8, u8)) -> u8 {
+ let (a, b) = x;
+ *a + *b
+ }
+ const GOAL: u8 = f(&(2, 3));
+ "#,
+ 5,
+ );
+ check_number(
+ r#"
+ enum SingleVariant {
+ Var(u8, u8),
+ }
+ const fn f(x: &&&&&SingleVariant) -> u8 {
+ let SingleVariant::Var(a, b) = x;
+ *a + *b
+ }
+ const GOAL: u8 = f(&&&&&SingleVariant::Var(2, 3));
+ "#,
+ 5,
+ );
+ check_number(
+ r#"
+ //- minicore: option
+ const fn f(x: Option<i32>) -> i32 {
+ let Some(x) = x else { return 10 };
+ 2 * x
+ }
+ const GOAL: i32 = f(Some(1000)) + f(None);
+ "#,
+ 2010,
+ );
+}
+
+#[test]
+fn function_param_patterns() {
+ check_number(
+ r#"
+ const fn f((a, b): &(u8, u8)) -> u8 {
+ *a + *b
+ }
+ const GOAL: u8 = f(&(2, 3));
+ "#,
+ 5,
+ );
+ check_number(
+ r#"
+ const fn f(c @ (a, b): &(u8, u8)) -> u8 {
+ *a + *b + c.0 + (*c).1
+ }
+ const GOAL: u8 = f(&(2, 3));
+ "#,
+ 10,
+ );
+ check_number(
+ r#"
+ const fn f(ref a: u8) -> u8 {
+ *a
+ }
+ const GOAL: u8 = f(2);
+ "#,
+ 2,
+ );
+ check_number(
+ r#"
+ struct Foo(u8);
+ impl Foo {
+ const fn f(&self, (a, b): &(u8, u8)) -> u8 {
+ self.0 + *a + *b
+ }
+ }
+ const GOAL: u8 = Foo(4).f(&(2, 3));
+ "#,
+ 9,
+ );
+}
+
+#[test]
+fn options() {
+ check_number(
+ r#"
+ //- minicore: option
+ const GOAL: u8 = {
+ let x = Some(2);
+ match x {
+ Some(y) => 2 * y,
+ _ => 10,
+ }
+ };
+ "#,
+ 4,
+ );
+ check_number(
+ r#"
+ //- minicore: option
+ fn f(x: Option<Option<i32>>) -> i32 {
+ if let Some(y) = x && let Some(z) = y {
+ z
+ } else if let Some(y) = x {
+ 1
+ } else {
+ 0
+ }
+ }
+ const GOAL: i32 = f(Some(Some(10))) + f(Some(None)) + f(None);
+ "#,
+ 11,
+ );
+ check_number(
+ r#"
+ //- minicore: option
+ const GOAL: u8 = {
+ let x = None;
+ match x {
+ Some(y) => 2 * y,
+ _ => 10,
+ }
+ };
+ "#,
+ 10,
+ );
+ check_number(
+ r#"
+ //- minicore: option
+ const GOAL: Option<&u8> = None;
+ "#,
+ 0,
+ );
+}
+
+#[test]
+fn or_pattern() {
+ check_number(
+ r#"
+ const GOAL: u8 = {
+ let (a | a) = 2;
+ a
+ };
+ "#,
+ 2,
+ );
+ check_number(
+ r#"
+ //- minicore: option
+ const fn f(x: Option<i32>) -> i32 {
+ let (Some(a) | Some(a)) = x else { return 2; };
+ a
+ }
+ const GOAL: i32 = f(Some(10)) + f(None);
+ "#,
+ 12,
+ );
+ check_number(
+ r#"
+ //- minicore: option
+ const fn f(x: Option<i32>, y: Option<i32>) -> i32 {
+ match (x, y) {
+ (Some(x), Some(y)) => x * y,
+ (Some(a), _) | (_, Some(a)) => a,
+ _ => 10,
+ }
+ }
+ const GOAL: i32 = f(Some(10), Some(20)) + f(Some(30), None) + f(None, Some(40)) + f(None, None);
+ "#,
+ 280,
+ );
+}
+
+#[test]
+fn array_and_index() {
+ check_number(
+ r#"
+ //- minicore: coerce_unsized, index, slice
+ const GOAL: u8 = {
+ let a = [10, 20, 3, 15];
+ let x: &[u8] = &a;
+ x[1]
+ };
+ "#,
+ 20,
+ );
+ check_number(
+ r#"
+ //- minicore: coerce_unsized, index, slice
+ const GOAL: usize = [1, 2, 3][2];"#,
+ 3,
+ );
+ check_number(
+ r#"
+ //- minicore: coerce_unsized, index, slice
+ const GOAL: usize = { let a = [1, 2, 3]; let x: &[i32] = &a; x.len() };"#,
+ 3,
+ );
+ check_number(
+ r#"
+ //- minicore: coerce_unsized, index, slice
+ const GOAL: usize = [1, 2, 3, 4, 5].len();"#,
+ 5,
+ );
+}
+
+#[test]
+fn byte_string() {
+ check_number(
+ r#"
+ //- minicore: coerce_unsized, index, slice
+ const GOAL: u8 = {
+ let a = b"hello";
+ let x: &[u8] = a;
+ x[0]
+ };
+ "#,
+ 104,
+ );
+}
+
+#[test]
fn consts() {
check_number(
r#"
@@ -92,41 +906,35 @@ fn enums() {
r#"
enum E {
F1 = 1,
- F2 = 2 * E::F1 as u8,
- F3 = 3 * E::F2 as u8,
+ F2 = 2 * E::F1 as isize, // Rustc expects an isize here
+ F3 = 3 * E::F2 as isize,
}
- const GOAL: i32 = E::F3 as u8;
+ const GOAL: u8 = E::F3 as u8;
"#,
6,
);
check_number(
r#"
enum E { F1 = 1, F2, }
- const GOAL: i32 = E::F2 as u8;
+ const GOAL: u8 = E::F2 as u8;
"#,
2,
);
check_number(
r#"
enum E { F1, }
- const GOAL: i32 = E::F1 as u8;
+ const GOAL: u8 = E::F1 as u8;
"#,
0,
);
let r = eval_goal(
r#"
- enum E { A = 1, }
+ enum E { A = 1, B }
const GOAL: E = E::A;
"#,
)
.unwrap();
- match r {
- ComputedExpr::Enum(name, _, Literal::Uint(val, _)) => {
- assert_eq!(name, "E::A");
- assert_eq!(val, 1);
- }
- x => panic!("Expected enum but found {x:?}"),
- }
+ assert_eq!(try_const_usize(&r), Some(1));
}
#[test]
@@ -138,7 +946,19 @@ fn const_loop() {
const F2: i32 = 2 * F1;
const GOAL: i32 = F3;
"#,
- ConstEvalError::Loop,
+ ConstEvalError::MirLowerError(MirLowerError::Loop),
+ );
+}
+
+#[test]
+fn const_transfer_memory() {
+ check_number(
+ r#"
+ const A1: &i32 = &2;
+ const A2: &i32 = &5;
+ const GOAL: i32 = *A1 + *A2;
+ "#,
+ 7,
);
}
@@ -157,7 +977,20 @@ fn const_impl_assoc() {
}
#[test]
-fn const_generic_subst() {
+fn const_generic_subst_fn() {
+ check_number(
+ r#"
+ const fn f<const A: usize>(x: usize) -> usize {
+ A * x + 5
+ }
+ const GOAL: usize = f::<2>(3);
+ "#,
+ 11,
+ );
+}
+
+#[test]
+fn const_generic_subst_assoc_const_impl() {
// FIXME: this should evaluate to 5
check_fail(
r#"
@@ -167,7 +1000,7 @@ fn const_generic_subst() {
}
const GOAL: usize = Adder::<2, 3>::VAL;
"#,
- ConstEvalError::NotSupported("const generic without substitution"),
+ ConstEvalError::MirEvalError(MirEvalError::TypeError("missing generic arg")),
);
}
@@ -185,6 +1018,58 @@ fn const_trait_assoc() {
}
const GOAL: usize = U0::VAL;
"#,
- ConstEvalError::IncompleteExpr,
+ ConstEvalError::MirLowerError(MirLowerError::IncompleteExpr),
+ );
+}
+
+#[test]
+fn exec_limits() {
+ check_fail(
+ r#"
+ const GOAL: usize = loop {};
+ "#,
+ ConstEvalError::MirEvalError(MirEvalError::ExecutionLimitExceeded),
+ );
+ check_fail(
+ r#"
+ const fn f(x: i32) -> i32 {
+ f(x + 1)
+ }
+ const GOAL: i32 = f(0);
+ "#,
+ ConstEvalError::MirEvalError(MirEvalError::StackOverflow),
+ );
+ // Reasonable code should still work
+ check_number(
+ r#"
+ const fn nth_odd(n: i32) -> i32 {
+ 2 * n - 1
+ }
+ const fn f(n: i32) -> i32 {
+ let sum = 0;
+ let i = 0;
+ while i < n {
+ i = i + 1;
+ sum = sum + nth_odd(i);
+ }
+ sum
+ }
+ const GOAL: i32 = f(10000);
+ "#,
+ 10000 * 10000,
+ );
+}
+
+#[test]
+fn type_error() {
+ let e = eval_goal(
+ r#"
+ const GOAL: u8 = {
+ let x: u16 = 2;
+ let y: (u8, u8) = x;
+ y.0
+ };
+ "#,
);
+ assert!(matches!(e, Err(ConstEvalError::MirLowerError(MirLowerError::TypeMismatch(_)))));
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
index d45e2a943..304c78767 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
@@ -16,10 +16,12 @@ use smallvec::SmallVec;
use crate::{
chalk_db,
- consteval::{ComputedExpr, ConstEvalError},
+ consteval::ConstEvalError,
method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
- Binders, CallableDefId, FnDefId, GenericArg, ImplTraitId, InferenceResult, Interner, PolyFnSig,
- QuantifiedWhereClause, ReturnTypeImplTraits, Substitution, TraitRef, Ty, TyDefId, ValueTyDefId,
+ mir::{BorrowckResult, MirBody, MirLowerError},
+ Binders, CallableDefId, Const, FnDefId, GenericArg, ImplTraitId, InferenceResult, Interner,
+ PolyFnSig, QuantifiedWhereClause, ReturnTypeImplTraits, Substitution, TraitRef, Ty, TyDefId,
+ ValueTyDefId,
};
use hir_expand::name::Name;
@@ -32,6 +34,13 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::infer::infer_query)]
fn infer_query(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
+ #[salsa::invoke(crate::mir::mir_body_query)]
+ #[salsa::cycle(crate::mir::mir_body_recover)]
+ fn mir_body(&self, def: DefWithBodyId) -> Result<Arc<MirBody>, MirLowerError>;
+
+ #[salsa::invoke(crate::mir::borrowck_query)]
+ fn borrowck(&self, def: DefWithBodyId) -> Result<Arc<BorrowckResult>, MirLowerError>;
+
#[salsa::invoke(crate::lower::ty_query)]
#[salsa::cycle(crate::lower::ty_recover)]
fn ty(&self, def: TyDefId) -> Binders<Ty>;
@@ -46,13 +55,13 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::lower::const_param_ty_query)]
fn const_param_ty(&self, def: ConstParamId) -> Ty;
- #[salsa::invoke(crate::consteval::const_eval_variant_query)]
+ #[salsa::invoke(crate::consteval::const_eval_query)]
#[salsa::cycle(crate::consteval::const_eval_recover)]
- fn const_eval(&self, def: ConstId) -> Result<ComputedExpr, ConstEvalError>;
+ fn const_eval(&self, def: ConstId) -> Result<Const, ConstEvalError>;
- #[salsa::invoke(crate::consteval::const_eval_query_variant)]
- #[salsa::cycle(crate::consteval::const_eval_variant_recover)]
- fn const_eval_variant(&self, def: EnumVariantId) -> Result<ComputedExpr, ConstEvalError>;
+ #[salsa::invoke(crate::consteval::const_eval_discriminant_variant)]
+ #[salsa::cycle(crate::consteval::const_eval_discriminant_recover)]
+ fn const_eval_discriminant(&self, def: EnumVariantId) -> Result<i128, ConstEvalError>;
#[salsa::invoke(crate::lower::impl_trait_query)]
fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
index 37eb06be1..4b147b997 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
@@ -11,3 +11,9 @@ pub use crate::diagnostics::{
},
unsafe_check::{missing_unsafe, unsafe_expressions, UnsafeExpr},
};
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct IncoherentImpl {
+ pub file_id: hir_expand::HirFileId,
+ pub impl_: syntax::AstPtr<syntax::ast::Impl>,
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
index f7031a854..d36b93e3b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -178,6 +178,7 @@ impl<'a> DeclValidator<'a> {
AttrDefId::StaticId(sid) => Some(sid.lookup(self.db.upcast()).container.into()),
AttrDefId::ConstId(cid) => Some(cid.lookup(self.db.upcast()).container.into()),
AttrDefId::TraitId(tid) => Some(tid.lookup(self.db.upcast()).container.into()),
+ AttrDefId::TraitAliasId(taid) => Some(taid.lookup(self.db.upcast()).container.into()),
AttrDefId::ImplId(iid) => Some(iid.lookup(self.db.upcast()).container.into()),
AttrDefId::ExternBlockId(id) => Some(id.lookup(self.db.upcast()).container.into()),
// These warnings should not explore macro definitions at all
@@ -234,8 +235,8 @@ impl<'a> DeclValidator<'a> {
let pats_replacements = body
.pats
.iter()
- .filter_map(|(id, pat)| match pat {
- Pat::Bind { name, .. } => Some((id, name)),
+ .filter_map(|(pat_id, pat)| match pat {
+ Pat::Bind { id, .. } => Some((pat_id, &body.bindings[*id].name)),
_ => None,
})
.filter_map(|(id, bind_name)| {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
index 3286dcb5a..2e9066788 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
@@ -5,11 +5,11 @@
use std::fmt;
use std::sync::Arc;
+use either::Either;
use hir_def::lang_item::LangItem;
use hir_def::{resolver::HasResolver, AdtId, AssocItemId, DefWithBodyId, HasModule};
use hir_def::{ItemContainerId, Lookup};
use hir_expand::name;
-use itertools::Either;
use itertools::Itertools;
use rustc_hash::FxHashSet;
use typed_arena::Arena;
@@ -84,7 +84,7 @@ impl ExprValidator {
match expr {
Expr::Match { expr, arms } => {
- self.validate_match(id, *expr, arms, db, self.infer.clone());
+ self.validate_match(id, *expr, arms, db);
}
Expr::Call { .. } | Expr::MethodCall { .. } => {
self.validate_call(db, id, expr, &mut filter_map_next_checker);
@@ -147,16 +147,15 @@ impl ExprValidator {
fn validate_match(
&mut self,
- id: ExprId,
match_expr: ExprId,
+ scrutinee_expr: ExprId,
arms: &[MatchArm],
db: &dyn HirDatabase,
- infer: Arc<InferenceResult>,
) {
let body = db.body(self.owner);
- let match_expr_ty = &infer[match_expr];
- if match_expr_ty.is_unknown() {
+ let scrut_ty = &self.infer[scrutinee_expr];
+ if scrut_ty.is_unknown() {
return;
}
@@ -166,23 +165,23 @@ impl ExprValidator {
let mut m_arms = Vec::with_capacity(arms.len());
let mut has_lowering_errors = false;
for arm in arms {
- if let Some(pat_ty) = infer.type_of_pat.get(arm.pat) {
+ if let Some(pat_ty) = self.infer.type_of_pat.get(arm.pat) {
// We only include patterns whose type matches the type
- // of the match expression. If we had an InvalidMatchArmPattern
+ // of the scrutinee expression. If we had an InvalidMatchArmPattern
// diagnostic or similar we could raise that in an else
// block here.
//
// When comparing the types, we also have to consider that rustc
- // will automatically de-reference the match expression type if
+ // will automatically de-reference the scrutinee expression type if
// necessary.
//
// FIXME we should use the type checker for this.
- if (pat_ty == match_expr_ty
- || match_expr_ty
+ if (pat_ty == scrut_ty
+ || scrut_ty
.as_reference()
.map(|(match_expr_ty, ..)| match_expr_ty == pat_ty)
.unwrap_or(false))
- && types_of_subpatterns_do_match(arm.pat, &body, &infer)
+ && types_of_subpatterns_do_match(arm.pat, &body, &self.infer)
{
// If we had a NotUsefulMatchArm diagnostic, we could
// check the usefulness of each pattern as we added it
@@ -206,7 +205,7 @@ impl ExprValidator {
return;
}
- let report = compute_match_usefulness(&cx, &m_arms, match_expr_ty);
+ let report = compute_match_usefulness(&cx, &m_arms, scrut_ty);
// FIXME Report unreacheble arms
// https://github.com/rust-lang/rust/blob/f31622a50/compiler/rustc_mir_build/src/thir/pattern/check_match.rs#L200
@@ -214,8 +213,8 @@ impl ExprValidator {
let witnesses = report.non_exhaustiveness_witnesses;
if !witnesses.is_empty() {
self.diagnostics.push(BodyValidationDiagnostic::MissingMatchArms {
- match_expr: id,
- uncovered_patterns: missing_match_arms(&cx, match_expr_ty, witnesses, arms),
+ match_expr,
+ uncovered_patterns: missing_match_arms(&cx, scrut_ty, witnesses, arms),
});
}
}
@@ -379,7 +378,7 @@ fn missing_match_arms<'p>(
arms: &[MatchArm],
) -> String {
struct DisplayWitness<'a, 'p>(&'a DeconstructedPat<'p>, &'a MatchCheckCtx<'a, 'p>);
- impl<'a, 'p> fmt::Display for DisplayWitness<'a, 'p> {
+ impl fmt::Display for DisplayWitness<'_, '_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let DisplayWitness(witness, cx) = *self;
let pat = witness.to_pat(cx);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs
index 8b0f051b4..859a37804 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs
@@ -146,8 +146,9 @@ impl<'a> PatCtxt<'a> {
PatKind::Leaf { subpatterns }
}
- hir_def::expr::Pat::Bind { ref name, subpat, .. } => {
+ hir_def::expr::Pat::Bind { id, subpat, .. } => {
let bm = self.infer.pat_binding_modes[&pat];
+ let name = &self.body.bindings[id].name;
match (bm, ty.kind(Interner)) {
(BindingMode::Ref(_), TyKind::Ref(.., rty)) => ty = rty,
(BindingMode::Ref(_), _) => {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs
index 431ab949b..d25c0ccf0 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs
@@ -94,8 +94,10 @@ fn walk_unsafe(
unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });
}
}
- Expr::Unsafe { body: child } => {
- return walk_unsafe(db, infer, def, body, *child, true, unsafe_expr_cb);
+ Expr::Unsafe { .. } => {
+ return expr.walk_child_exprs(|child| {
+ walk_unsafe(db, infer, def, body, child, true, unsafe_expr_cb);
+ });
}
_ => {}
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
index b22064d8c..bd3eccfe4 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
@@ -5,8 +5,9 @@
use std::fmt::{self, Debug};
use base_db::CrateId;
-use chalk_ir::BoundVar;
+use chalk_ir::{BoundVar, TyKind};
use hir_def::{
+ adt::VariantData,
body,
db::DefDatabase,
find_path,
@@ -14,9 +15,9 @@ use hir_def::{
item_scope::ItemInNs,
lang_item::{LangItem, LangItemTarget},
path::{Path, PathKind},
- type_ref::{ConstScalar, TraitBoundModifier, TypeBound, TypeRef},
+ type_ref::{TraitBoundModifier, TypeBound, TypeRef},
visibility::Visibility,
- HasModule, ItemContainerId, Lookup, ModuleDefId, ModuleId, TraitId,
+ HasModule, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId, TraitId,
};
use hir_expand::{hygiene::Hygiene, name::Name};
use intern::{Internable, Interned};
@@ -25,14 +26,17 @@ use smallvec::SmallVec;
use crate::{
db::HirDatabase,
- from_assoc_type_id, from_foreign_def_id, from_placeholder_idx, lt_from_placeholder_idx,
+ from_assoc_type_id, from_foreign_def_id, from_placeholder_idx,
+ layout::layout_of_ty,
+ lt_from_placeholder_idx,
mapping::from_chalk,
+ mir::pad16,
primitive, to_assoc_type_id,
utils::{self, generics},
- AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Const, ConstValue, DomainGoal,
- GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData, LifetimeOutlives, Mutability,
- OpaqueTy, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Scalar, Substitution, TraitRef,
- TraitRefExt, Ty, TyExt, TyKind, WhereClause,
+ AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Const, ConstScalar, ConstValue,
+ DomainGoal, GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData, LifetimeOutlives,
+ MemoryMap, Mutability, OpaqueTy, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Scalar,
+ Substitution, TraitRef, TraitRefExt, Ty, TyExt, WhereClause,
};
pub trait HirWrite: fmt::Write {
@@ -362,20 +366,176 @@ impl HirDisplay for GenericArg {
impl HirDisplay for Const {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
let data = self.interned();
- match data.value {
+ match &data.value {
ConstValue::BoundVar(idx) => idx.hir_fmt(f),
ConstValue::InferenceVar(..) => write!(f, "#c#"),
ConstValue::Placeholder(idx) => {
- let id = from_placeholder_idx(f.db, idx);
+ let id = from_placeholder_idx(f.db, *idx);
let generics = generics(f.db.upcast(), id.parent);
let param_data = &generics.params.type_or_consts[id.local_id];
write!(f, "{}", param_data.name().unwrap())
}
- ConstValue::Concrete(c) => write!(f, "{}", c.interned),
+ ConstValue::Concrete(c) => match &c.interned {
+ ConstScalar::Bytes(b, m) => render_const_scalar(f, &b, m, &data.ty),
+ ConstScalar::Unknown => f.write_char('_'),
+ },
}
}
}
+pub struct HexifiedConst(pub Const);
+
+impl HirDisplay for HexifiedConst {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ let data = &self.0.data(Interner);
+ if let TyKind::Scalar(s) = data.ty.kind(Interner) {
+ if matches!(s, Scalar::Int(_) | Scalar::Uint(_)) {
+ if let ConstValue::Concrete(c) = &data.value {
+ if let ConstScalar::Bytes(b, m) = &c.interned {
+ let value = u128::from_le_bytes(pad16(b, false));
+ if value >= 10 {
+ render_const_scalar(f, &b, m, &data.ty)?;
+ return write!(f, " ({:#X})", value);
+ }
+ }
+ }
+ }
+ }
+ self.0.hir_fmt(f)
+ }
+}
+
+fn render_const_scalar(
+ f: &mut HirFormatter<'_>,
+ b: &[u8],
+ memory_map: &MemoryMap,
+ ty: &Ty,
+) -> Result<(), HirDisplayError> {
+ match ty.kind(Interner) {
+ chalk_ir::TyKind::Scalar(s) => match s {
+ Scalar::Bool => write!(f, "{}", if b[0] == 0 { false } else { true }),
+ Scalar::Char => {
+ let x = u128::from_le_bytes(pad16(b, false)) as u32;
+ let Ok(c) = char::try_from(x) else {
+ return f.write_str("<unicode-error>");
+ };
+ write!(f, "{c:?}")
+ }
+ Scalar::Int(_) => {
+ let x = i128::from_le_bytes(pad16(b, true));
+ write!(f, "{x}")
+ }
+ Scalar::Uint(_) => {
+ let x = u128::from_le_bytes(pad16(b, false));
+ write!(f, "{x}")
+ }
+ Scalar::Float(fl) => match fl {
+ chalk_ir::FloatTy::F32 => {
+ let x = f32::from_le_bytes(b.try_into().unwrap());
+ write!(f, "{x:?}")
+ }
+ chalk_ir::FloatTy::F64 => {
+ let x = f64::from_le_bytes(b.try_into().unwrap());
+ write!(f, "{x:?}")
+ }
+ },
+ },
+ chalk_ir::TyKind::Ref(_, _, t) => match t.kind(Interner) {
+ chalk_ir::TyKind::Str => {
+ let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap());
+ let bytes = memory_map.0.get(&addr).map(|x| &**x).unwrap_or(&[]);
+ let s = std::str::from_utf8(bytes).unwrap_or("<utf8-error>");
+ write!(f, "{s:?}")
+ }
+ _ => f.write_str("<ref-not-supported>"),
+ },
+ chalk_ir::TyKind::Tuple(_, subst) => {
+ // FIXME: Remove this line. If the target data layout is independent
+ // of the krate, the `db.target_data_layout` and its callers like `layout_of_ty` don't need
+ // to get krate. Otherwise, we need to get krate from the final callers of the hir display
+ // infrastructure and have it here as a field on `f`.
+ let krate = *f.db.crate_graph().crates_in_topological_order().last().unwrap();
+ let Ok(layout) = layout_of_ty(f.db, ty, krate) else {
+ return f.write_str("<layout-error>");
+ };
+ f.write_str("(")?;
+ let mut first = true;
+ for (id, ty) in subst.iter(Interner).enumerate() {
+ if first {
+ first = false;
+ } else {
+ f.write_str(", ")?;
+ }
+ let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument
+ let offset = layout.fields.offset(id).bytes_usize();
+ let Ok(layout) = layout_of_ty(f.db, &ty, krate) else {
+ f.write_str("<layout-error>")?;
+ continue;
+ };
+ let size = layout.size.bytes_usize();
+ render_const_scalar(f, &b[offset..offset + size], memory_map, &ty)?;
+ }
+ f.write_str(")")
+ }
+ chalk_ir::TyKind::Adt(adt, subst) => match adt.0 {
+ hir_def::AdtId::StructId(s) => {
+ let data = f.db.struct_data(s);
+ let Ok(layout) = f.db.layout_of_adt(adt.0, subst.clone()) else {
+ return f.write_str("<layout-error>");
+ };
+ match data.variant_data.as_ref() {
+ VariantData::Record(fields) | VariantData::Tuple(fields) => {
+ let field_types = f.db.field_types(s.into());
+ let krate = adt.0.module(f.db.upcast()).krate();
+ let render_field = |f: &mut HirFormatter<'_>, id: LocalFieldId| {
+ let offset = layout
+ .fields
+ .offset(u32::from(id.into_raw()) as usize)
+ .bytes_usize();
+ let ty = field_types[id].clone().substitute(Interner, subst);
+ let Ok(layout) = layout_of_ty(f.db, &ty, krate) else {
+ return f.write_str("<layout-error>");
+ };
+ let size = layout.size.bytes_usize();
+ render_const_scalar(f, &b[offset..offset + size], memory_map, &ty)
+ };
+ let mut it = fields.iter();
+ if matches!(data.variant_data.as_ref(), VariantData::Record(_)) {
+ write!(f, "{} {{", data.name)?;
+ if let Some((id, data)) = it.next() {
+ write!(f, " {}: ", data.name)?;
+ render_field(f, id)?;
+ }
+ for (id, data) in it {
+ write!(f, ", {}: ", data.name)?;
+ render_field(f, id)?;
+ }
+ write!(f, " }}")?;
+ } else {
+ let mut it = it.map(|x| x.0);
+ write!(f, "{}(", data.name)?;
+ if let Some(id) = it.next() {
+ render_field(f, id)?;
+ }
+ for id in it {
+ write!(f, ", ")?;
+ render_field(f, id)?;
+ }
+ write!(f, ")")?;
+ }
+ return Ok(());
+ }
+ VariantData::Unit => write!(f, "{}", data.name),
+ }
+ }
+ hir_def::AdtId::UnionId(u) => write!(f, "{}", f.db.union_data(u).name),
+ hir_def::AdtId::EnumId(_) => f.write_str("<enum-not-supported>"),
+ },
+ chalk_ir::TyKind::FnDef(..) => ty.hir_fmt(f),
+ _ => f.write_str("<not-supported>"),
+ }
+}
+
impl HirDisplay for BoundVar {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write!(f, "?{}.{}", self.debruijn.depth(), self.index)
@@ -614,8 +774,9 @@ impl HirDisplay for Ty {
{
return true;
}
- if let Some(ConstValue::Concrete(c)) =
- parameter.constant(Interner).map(|x| x.data(Interner).value)
+ if let Some(ConstValue::Concrete(c)) = parameter
+ .constant(Interner)
+ .map(|x| &x.data(Interner).value)
{
if c.interned == ConstScalar::Unknown {
return true;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
index 767afdf9e..7de5b4295 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
@@ -17,11 +17,12 @@ use std::ops::Index;
use std::sync::Arc;
use chalk_ir::{cast::Cast, ConstValue, DebruijnIndex, Mutability, Safety, Scalar, TypeFlags};
+use either::Either;
use hir_def::{
body::Body,
builtin_type::{BuiltinInt, BuiltinType, BuiltinUint},
data::{ConstData, StaticData},
- expr::{BindingAnnotation, ExprId, ExprOrPatId, PatId},
+ expr::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, PatId},
lang_item::{LangItem, LangItemTarget},
layout::Integer,
path::Path,
@@ -30,10 +31,9 @@ use hir_def::{
AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, HasModule,
ItemContainerId, Lookup, TraitId, TypeAliasId, VariantId,
};
-use hir_expand::name::name;
-use itertools::Either;
+use hir_expand::name::{name, Name};
use la_arena::ArenaMap;
-use rustc_hash::FxHashMap;
+use rustc_hash::{FxHashMap, FxHashSet};
use stdx::always;
use crate::{
@@ -66,8 +66,10 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
let mut ctx = InferenceContext::new(db, def, &body, resolver);
match def {
+ DefWithBodyId::FunctionId(f) => {
+ ctx.collect_fn(f);
+ }
DefWithBodyId::ConstId(c) => ctx.collect_const(&db.const_data(c)),
- DefWithBodyId::FunctionId(f) => ctx.collect_fn(f),
DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_data(s)),
DefWithBodyId::VariantId(v) => {
ctx.return_ty = TyBuilder::builtin(match db.enum_data(v.parent).variant_body_type() {
@@ -144,44 +146,6 @@ impl Default for BindingMode {
}
}
-/// Used to generalize patterns and assignee expressions.
-trait PatLike: Into<ExprOrPatId> + Copy {
- type BindingMode: Copy;
-
- fn infer(
- this: &mut InferenceContext<'_>,
- id: Self,
- expected_ty: &Ty,
- default_bm: Self::BindingMode,
- ) -> Ty;
-}
-
-impl PatLike for ExprId {
- type BindingMode = ();
-
- fn infer(
- this: &mut InferenceContext<'_>,
- id: Self,
- expected_ty: &Ty,
- _: Self::BindingMode,
- ) -> Ty {
- this.infer_assignee_expr(id, expected_ty)
- }
-}
-
-impl PatLike for PatId {
- type BindingMode = BindingMode;
-
- fn infer(
- this: &mut InferenceContext<'_>,
- id: Self,
- expected_ty: &Ty,
- default_bm: Self::BindingMode,
- ) -> Ty {
- this.infer_pat(id, expected_ty, default_bm)
- }
-}
-
#[derive(Debug)]
pub(crate) struct InferOk<T> {
value: T,
@@ -200,11 +164,45 @@ pub(crate) type InferResult<T> = Result<InferOk<T>, TypeError>;
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum InferenceDiagnostic {
- NoSuchField { expr: ExprId },
- PrivateField { expr: ExprId, field: FieldId },
- PrivateAssocItem { id: ExprOrPatId, item: AssocItemId },
- BreakOutsideOfLoop { expr: ExprId, is_break: bool },
- MismatchedArgCount { call_expr: ExprId, expected: usize, found: usize },
+ NoSuchField {
+ expr: ExprId,
+ },
+ PrivateField {
+ expr: ExprId,
+ field: FieldId,
+ },
+ PrivateAssocItem {
+ id: ExprOrPatId,
+ item: AssocItemId,
+ },
+ UnresolvedField {
+ expr: ExprId,
+ receiver: Ty,
+ name: Name,
+ method_with_same_name_exists: bool,
+ },
+ UnresolvedMethodCall {
+ expr: ExprId,
+ receiver: Ty,
+ name: Name,
+ /// Contains the type the field resolves to
+ field_with_same_name: Option<Ty>,
+ },
+ // FIXME: Make this proper
+ BreakOutsideOfLoop {
+ expr: ExprId,
+ is_break: bool,
+ bad_value_break: bool,
+ },
+ MismatchedArgCount {
+ call_expr: ExprId,
+ expected: usize,
+ found: usize,
+ },
+ ExpectedFunction {
+ call_expr: ExprId,
+ found: Ty,
+ },
}
/// A mismatch between an expected and an inferred type.
@@ -293,8 +291,10 @@ pub enum Adjust {
/// call, with the signature `&'a T -> &'a U` or `&'a mut T -> &'a mut U`.
/// The target type is `U` in both cases, with the region and mutability
/// being those shared by both the receiver and the returned reference.
+///
+/// Mutability is `None` when we are not sure.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
-pub struct OverloadedDeref(pub Mutability);
+pub struct OverloadedDeref(pub Option<Mutability>);
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum AutoBorrow {
@@ -354,7 +354,10 @@ pub struct InferenceResult {
/// **Note**: When a pattern type is resolved it may still contain
/// unresolved or missing subpatterns or subpatterns of mismatched types.
pub type_of_pat: ArenaMap<PatId, Ty>,
+ pub type_of_binding: ArenaMap<BindingId, Ty>,
pub type_of_rpit: ArenaMap<RpitId, Ty>,
+ /// Type of the result of `.into_iter()` on the for. `ExprId` is the one of the whole for loop.
+ pub type_of_for_iterator: FxHashMap<ExprId, Ty>,
type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch>,
/// Interned common types to return references to.
standard_types: InternedStandardTypes,
@@ -389,18 +392,15 @@ impl InferenceResult {
pub fn type_mismatch_for_pat(&self, pat: PatId) -> Option<&TypeMismatch> {
self.type_mismatches.get(&pat.into())
}
+ pub fn type_mismatches(&self) -> impl Iterator<Item = (ExprOrPatId, &TypeMismatch)> {
+ self.type_mismatches.iter().map(|(expr_or_pat, mismatch)| (*expr_or_pat, mismatch))
+ }
pub fn expr_type_mismatches(&self) -> impl Iterator<Item = (ExprId, &TypeMismatch)> {
self.type_mismatches.iter().filter_map(|(expr_or_pat, mismatch)| match *expr_or_pat {
ExprOrPatId::ExprId(expr) => Some((expr, mismatch)),
_ => None,
})
}
- pub fn pat_type_mismatches(&self) -> impl Iterator<Item = (PatId, &TypeMismatch)> {
- self.type_mismatches.iter().filter_map(|(expr_or_pat, mismatch)| match *expr_or_pat {
- ExprOrPatId::PatId(pat) => Some((pat, mismatch)),
- _ => None,
- })
- }
}
impl Index<ExprId> for InferenceResult {
@@ -419,6 +419,14 @@ impl Index<PatId> for InferenceResult {
}
}
+impl Index<BindingId> for InferenceResult {
+ type Output = Ty;
+
+ fn index(&self, b: BindingId) -> &Ty {
+ self.type_of_binding.get(b).unwrap_or(&self.standard_types.unknown)
+ }
+}
+
/// The inference context contains all information needed during type inference.
#[derive(Clone, Debug)]
pub(crate) struct InferenceContext<'a> {
@@ -428,14 +436,19 @@ pub(crate) struct InferenceContext<'a> {
pub(crate) resolver: Resolver,
table: unify::InferenceTable<'a>,
trait_env: Arc<TraitEnvironment>,
+ /// The traits in scope, disregarding block modules. This is used for caching purposes.
+ traits_in_scope: FxHashSet<TraitId>,
pub(crate) result: InferenceResult,
/// The return type of the function being inferred, the closure or async block if we're
/// currently within one.
///
/// We might consider using a nested inference context for checking
- /// closures, but currently this is the only field that will change there,
- /// so it doesn't make sense.
+ /// closures so we can swap all shared things out at once.
return_ty: Ty,
+ /// If `Some`, this stores coercion information for returned
+ /// expressions. If `None`, this is in a context where return is
+ /// inappropriate, such as a const expression.
+ return_coercion: Option<CoerceMany>,
/// The resume type and the yield type, respectively, of the generator being inferred.
resume_yield_tys: Option<(Ty, Ty)>,
diverges: Diverges,
@@ -447,7 +460,7 @@ struct BreakableContext {
/// Whether this context contains at least one break expression.
may_break: bool,
/// The coercion target of the context.
- coerce: CoerceMany,
+ coerce: Option<CoerceMany>,
/// The optional label of the context.
label: Option<name::Name>,
kind: BreakableKind,
@@ -503,16 +516,22 @@ impl<'a> InferenceContext<'a> {
trait_env,
return_ty: TyKind::Error.intern(Interner), // set in collect_* calls
resume_yield_tys: None,
+ return_coercion: None,
db,
owner,
body,
+ traits_in_scope: resolver.traits_in_scope(db.upcast()),
resolver,
diverges: Diverges::Maybe,
breakables: Vec::new(),
}
}
- fn resolve_all(self) -> InferenceResult {
+ // FIXME: This function should be private in module. It is currently only used in the consteval, since we need
+ // `InferenceResult` in the middle of inference. See the fixme comment in `consteval::eval_to_const`. If you
+ // used this function for another workaround, mention it here. If you really need this function and believe that
+ // there is no problem in it being `pub(crate)`, remove this comment.
+ pub(crate) fn resolve_all(self) -> InferenceResult {
let InferenceContext { mut table, mut result, .. } = self;
table.fallback_if_possible();
@@ -528,13 +547,46 @@ impl<'a> InferenceContext<'a> {
for ty in result.type_of_pat.values_mut() {
*ty = table.resolve_completely(ty.clone());
}
- for ty in result.type_of_rpit.iter_mut().map(|x| x.1) {
+ for ty in result.type_of_binding.values_mut() {
+ *ty = table.resolve_completely(ty.clone());
+ }
+ for ty in result.type_of_rpit.values_mut() {
+ *ty = table.resolve_completely(ty.clone());
+ }
+ for ty in result.type_of_for_iterator.values_mut() {
*ty = table.resolve_completely(ty.clone());
}
for mismatch in result.type_mismatches.values_mut() {
mismatch.expected = table.resolve_completely(mismatch.expected.clone());
mismatch.actual = table.resolve_completely(mismatch.actual.clone());
}
+ result.diagnostics.retain_mut(|diagnostic| {
+ if let InferenceDiagnostic::ExpectedFunction { found: ty, .. }
+ | InferenceDiagnostic::UnresolvedField { receiver: ty, .. }
+ | InferenceDiagnostic::UnresolvedMethodCall { receiver: ty, .. } = diagnostic
+ {
+ *ty = table.resolve_completely(ty.clone());
+ // FIXME: Remove this when we are on par with rustc in terms of inference
+ if ty.contains_unknown() {
+ return false;
+ }
+
+ if let InferenceDiagnostic::UnresolvedMethodCall { field_with_same_name, .. } =
+ diagnostic
+ {
+ let clear = if let Some(ty) = field_with_same_name {
+ *ty = table.resolve_completely(ty.clone());
+ ty.contains_unknown()
+ } else {
+ false
+ };
+ if clear {
+ *field_with_same_name = None;
+ }
+ }
+ }
+ true
+ });
for (_, subst) in result.method_resolutions.values_mut() {
*subst = table.resolve_completely(subst.clone());
}
@@ -580,7 +632,7 @@ impl<'a> InferenceContext<'a> {
let ty = self.insert_type_vars(ty);
let ty = self.normalize_associated_types_in(ty);
- self.infer_pat(*pat, &ty, BindingMode::default());
+ self.infer_top_pat(*pat, &ty);
}
let error_ty = &TypeRef::Error;
let return_ty = if data.has_async_kw() {
@@ -632,10 +684,19 @@ impl<'a> InferenceContext<'a> {
};
self.return_ty = self.normalize_associated_types_in(return_ty);
+ self.return_coercion = Some(CoerceMany::new(self.return_ty.clone()));
}
fn infer_body(&mut self) {
- self.infer_expr_coerce(self.body.body_expr, &Expectation::has_type(self.return_ty.clone()));
+ match self.return_coercion {
+ Some(_) => self.infer_return(self.body.body_expr),
+ None => {
+ _ = self.infer_expr_coerce(
+ self.body.body_expr,
+ &Expectation::has_type(self.return_ty.clone()),
+ )
+ }
+ }
}
fn write_expr_ty(&mut self, expr: ExprId, ty: Ty) {
@@ -662,12 +723,15 @@ impl<'a> InferenceContext<'a> {
self.result.type_of_pat.insert(pat, ty);
}
+ fn write_binding_ty(&mut self, id: BindingId, ty: Ty) {
+ self.result.type_of_binding.insert(id, ty);
+ }
+
fn push_diagnostic(&mut self, diagnostic: InferenceDiagnostic) {
self.result.diagnostics.push(diagnostic);
}
fn make_ty(&mut self, type_ref: &TypeRef) -> Ty {
- // FIXME use right resolver for block
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
let ty = ctx.lower_ty(type_ref);
let ty = self.insert_type_vars(ty);
@@ -681,11 +745,9 @@ impl<'a> InferenceContext<'a> {
/// Replaces ConstScalar::Unknown by a new type var, so we can maybe still infer it.
fn insert_const_vars_shallow(&mut self, c: Const) -> Const {
let data = c.data(Interner);
- match data.value {
+ match &data.value {
ConstValue::Concrete(cc) => match cc.interned {
- hir_def::type_ref::ConstScalar::Unknown => {
- self.table.new_const_var(data.ty.clone())
- }
+ crate::ConstScalar::Unknown => self.table.new_const_var(data.ty.clone()),
_ => c,
},
_ => c,
@@ -785,12 +847,11 @@ impl<'a> InferenceContext<'a> {
Some(path) => path,
None => return (self.err_ty(), None),
};
- let resolver = &self.resolver;
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
// FIXME: this should resolve assoc items as well, see this example:
// https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521
let (resolution, unresolved) = if value_ns {
- match resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path()) {
+ match self.resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path()) {
Some(ResolveValueResult::ValueNs(value)) => match value {
ValueNs::EnumVariantId(var) => {
let substs = ctx.substs_from_path(path, var.into(), true);
@@ -811,7 +872,7 @@ impl<'a> InferenceContext<'a> {
None => return (self.err_ty(), None),
}
} else {
- match resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
+ match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
Some(it) => it,
None => return (self.err_ty(), None),
}
@@ -866,7 +927,10 @@ impl<'a> InferenceContext<'a> {
// FIXME potentially resolve assoc type
(self.err_ty(), None)
}
- TypeNs::AdtId(AdtId::EnumId(_)) | TypeNs::BuiltinType(_) | TypeNs::TraitId(_) => {
+ TypeNs::AdtId(AdtId::EnumId(_))
+ | TypeNs::BuiltinType(_)
+ | TypeNs::TraitId(_)
+ | TypeNs::TraitAliasId(_) => {
// FIXME diagnostic
(self.err_ty(), None)
}
@@ -1018,6 +1082,15 @@ impl<'a> InferenceContext<'a> {
let struct_ = self.resolve_lang_item(LangItem::VaList)?.as_struct()?;
Some(struct_.into())
}
+
+ fn get_traits_in_scope(&self) -> Either<FxHashSet<TraitId>, &FxHashSet<TraitId>> {
+ let mut b_traits = self.resolver.traits_in_scope_from_block_scopes().peekable();
+ if b_traits.peek().is_some() {
+ Either::Left(self.traits_in_scope.iter().copied().chain(b_traits).collect())
+ } else {
+ Either::Right(&self.traits_in_scope)
+ }
+ }
}
/// When inferring an expression, we propagate downward whatever type hint we
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
index 3293534a0..48c915302 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
@@ -50,11 +50,44 @@ fn success(
#[derive(Clone, Debug)]
pub(super) struct CoerceMany {
expected_ty: Ty,
+ final_ty: Option<Ty>,
}
impl CoerceMany {
pub(super) fn new(expected: Ty) -> Self {
- CoerceMany { expected_ty: expected }
+ CoerceMany { expected_ty: expected, final_ty: None }
+ }
+
+ /// Returns the "expected type" with which this coercion was
+ /// constructed. This represents the "downward propagated" type
+ /// that was given to us at the start of typing whatever construct
+ /// we are typing (e.g., the match expression).
+ ///
+ /// Typically, this is used as the expected type when
+ /// type-checking each of the alternative expressions whose types
+ /// we are trying to merge.
+ pub(super) fn expected_ty(&self) -> Ty {
+ self.expected_ty.clone()
+ }
+
+ /// Returns the current "merged type", representing our best-guess
+ /// at the LUB of the expressions we've seen so far (if any). This
+ /// isn't *final* until you call `self.complete()`, which will return
+ /// the merged type.
+ pub(super) fn merged_ty(&self) -> Ty {
+ self.final_ty.clone().unwrap_or_else(|| self.expected_ty.clone())
+ }
+
+ pub(super) fn complete(self, ctx: &mut InferenceContext<'_>) -> Ty {
+ if let Some(final_ty) = self.final_ty {
+ final_ty
+ } else {
+ ctx.result.standard_types.never.clone()
+ }
+ }
+
+ pub(super) fn coerce_forced_unit(&mut self, ctx: &mut InferenceContext<'_>) {
+ self.coerce(ctx, None, &ctx.result.standard_types.unit.clone())
}
/// Merge two types from different branches, with possible coercion.
@@ -76,25 +109,25 @@ impl CoerceMany {
// Special case: two function types. Try to coerce both to
// pointers to have a chance at getting a match. See
// https://github.com/rust-lang/rust/blob/7b805396bf46dce972692a6846ce2ad8481c5f85/src/librustc_typeck/check/coercion.rs#L877-L916
- let sig = match (self.expected_ty.kind(Interner), expr_ty.kind(Interner)) {
+ let sig = match (self.merged_ty().kind(Interner), expr_ty.kind(Interner)) {
(TyKind::FnDef(..) | TyKind::Closure(..), TyKind::FnDef(..) | TyKind::Closure(..)) => {
// FIXME: we're ignoring safety here. To be more correct, if we have one FnDef and one Closure,
// we should be coercing the closure to a fn pointer of the safety of the FnDef
cov_mark::hit!(coerce_fn_reification);
let sig =
- self.expected_ty.callable_sig(ctx.db).expect("FnDef without callable sig");
+ self.merged_ty().callable_sig(ctx.db).expect("FnDef without callable sig");
Some(sig)
}
_ => None,
};
if let Some(sig) = sig {
let target_ty = TyKind::Function(sig.to_fn_ptr()).intern(Interner);
- let result1 = ctx.table.coerce_inner(self.expected_ty.clone(), &target_ty);
+ let result1 = ctx.table.coerce_inner(self.merged_ty(), &target_ty);
let result2 = ctx.table.coerce_inner(expr_ty.clone(), &target_ty);
if let (Ok(result1), Ok(result2)) = (result1, result2) {
ctx.table.register_infer_ok(result1);
ctx.table.register_infer_ok(result2);
- return self.expected_ty = target_ty;
+ return self.final_ty = Some(target_ty);
}
}
@@ -102,25 +135,20 @@ impl CoerceMany {
// type is a type variable and the new one is `!`, trying it the other
// way around first would mean we make the type variable `!`, instead of
// just marking it as possibly diverging.
- if ctx.coerce(expr, &expr_ty, &self.expected_ty).is_ok() {
- /* self.expected_ty is already correct */
- } else if ctx.coerce(expr, &self.expected_ty, &expr_ty).is_ok() {
- self.expected_ty = expr_ty;
+ if let Ok(res) = ctx.coerce(expr, &expr_ty, &self.merged_ty()) {
+ self.final_ty = Some(res);
+ } else if let Ok(res) = ctx.coerce(expr, &self.merged_ty(), &expr_ty) {
+ self.final_ty = Some(res);
} else {
if let Some(id) = expr {
ctx.result.type_mismatches.insert(
id.into(),
- TypeMismatch { expected: self.expected_ty.clone(), actual: expr_ty },
+ TypeMismatch { expected: self.merged_ty().clone(), actual: expr_ty.clone() },
);
}
cov_mark::hit!(coerce_merge_fail_fallback);
- /* self.expected_ty is already correct */
}
}
-
- pub(super) fn complete(self) -> Ty {
- self.expected_ty
- }
}
pub fn could_coerce(
@@ -665,7 +693,7 @@ pub(super) fn auto_deref_adjust_steps(autoderef: &Autoderef<'_, '_>) -> Vec<Adju
.iter()
.map(|(kind, _source)| match kind {
// We do not know what kind of deref we require at this point yet
- AutoderefKind::Overloaded => Some(OverloadedDeref(Mutability::Not)),
+ AutoderefKind::Overloaded => Some(OverloadedDeref(None)),
AutoderefKind::Builtin => None,
})
.zip(targets)
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
index 175fded8c..ee186673e 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
@@ -15,7 +15,6 @@ use hir_def::{
generics::TypeOrConstParamData,
lang_item::LangItem,
path::{GenericArg, GenericArgs},
- resolver::resolver_for_expr,
ConstParamId, FieldId, ItemContainerId, Lookup,
};
use hir_expand::name::{name, Name};
@@ -25,7 +24,9 @@ use syntax::ast::RangeOp;
use crate::{
autoderef::{self, Autoderef},
consteval,
- infer::{coerce::CoerceMany, find_continuable, BreakableKind},
+ infer::{
+ coerce::CoerceMany, find_continuable, pat::contains_explicit_ref_binding, BreakableKind,
+ },
lower::{
const_or_path_to_chalk, generic_arg_to_chalk, lower_to_chalk_mutability, ParamLoweringMode,
},
@@ -39,8 +40,8 @@ use crate::{
};
use super::{
- coerce::auto_deref_adjust_steps, find_breakable, BindingMode, BreakableContext, Diverges,
- Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch,
+ coerce::auto_deref_adjust_steps, find_breakable, BreakableContext, Diverges, Expectation,
+ InferenceContext, InferenceDiagnostic, TypeMismatch,
};
impl<'a> InferenceContext<'a> {
@@ -58,6 +59,10 @@ impl<'a> InferenceContext<'a> {
ty
}
+ pub(crate) fn infer_expr_no_expect(&mut self, tgt_expr: ExprId) -> Ty {
+ self.infer_expr_inner(tgt_expr, &Expectation::None)
+ }
+
/// Infer type of expression with possibly implicit coerce to the expected type.
/// Return the type after possible coercion.
pub(super) fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty {
@@ -78,6 +83,30 @@ impl<'a> InferenceContext<'a> {
}
}
+ pub(super) fn infer_expr_coerce_never(&mut self, expr: ExprId, expected: &Expectation) -> Ty {
+ let ty = self.infer_expr_inner(expr, expected);
+ // While we don't allow *arbitrary* coercions here, we *do* allow
+ // coercions from ! to `expected`.
+ if ty.is_never() {
+ if let Some(adjustments) = self.result.expr_adjustments.get(&expr) {
+ return if let [Adjustment { kind: Adjust::NeverToAny, target }] = &**adjustments {
+ target.clone()
+ } else {
+ self.err_ty()
+ };
+ }
+
+ let adj_ty = self.table.new_type_var();
+ self.write_expr_adj(
+ expr,
+ vec![Adjustment { kind: Adjust::NeverToAny, target: adj_ty.clone() }],
+ );
+ adj_ty
+ } else {
+ ty
+ }
+ }
+
fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
self.db.unwind_if_cancelled();
@@ -85,7 +114,7 @@ impl<'a> InferenceContext<'a> {
Expr::Missing => self.err_ty(),
&Expr::If { condition, then_branch, else_branch } => {
let expected = &expected.adjust_for_branches(&mut self.table);
- self.infer_expr(
+ self.infer_expr_coerce_never(
condition,
&Expectation::HasType(self.result.standard_types.bool_.clone()),
);
@@ -97,59 +126,39 @@ impl<'a> InferenceContext<'a> {
both_arms_diverge &= mem::replace(&mut self.diverges, Diverges::Maybe);
let mut coerce = CoerceMany::new(expected.coercion_target_type(&mut self.table));
coerce.coerce(self, Some(then_branch), &then_ty);
- let else_ty = match else_branch {
- Some(else_branch) => self.infer_expr_inner(else_branch, expected),
- None => TyBuilder::unit(),
- };
+ match else_branch {
+ Some(else_branch) => {
+ let else_ty = self.infer_expr_inner(else_branch, expected);
+ coerce.coerce(self, Some(else_branch), &else_ty);
+ }
+ None => {
+ coerce.coerce_forced_unit(self);
+ }
+ }
both_arms_diverge &= self.diverges;
- // FIXME: create a synthetic `else {}` so we have something to refer to here instead of None?
- coerce.coerce(self, else_branch, &else_ty);
self.diverges = condition_diverges | both_arms_diverge;
- coerce.complete()
+ coerce.complete(self)
}
&Expr::Let { pat, expr } => {
let input_ty = self.infer_expr(expr, &Expectation::none());
- self.infer_pat(pat, &input_ty, BindingMode::default());
+ self.infer_top_pat(pat, &input_ty);
self.result.standard_types.bool_.clone()
}
Expr::Block { statements, tail, label, id: _ } => {
- let old_resolver = mem::replace(
- &mut self.resolver,
- resolver_for_expr(self.db.upcast(), self.owner, tgt_expr),
- );
- let ty = match label {
- Some(_) => {
- let break_ty = self.table.new_type_var();
- let (breaks, ty) = self.with_breakable_ctx(
- BreakableKind::Block,
- break_ty.clone(),
- *label,
- |this| {
- this.infer_block(
- tgt_expr,
- statements,
- *tail,
- &Expectation::has_type(break_ty),
- )
- },
- );
- breaks.unwrap_or(ty)
- }
- None => self.infer_block(tgt_expr, statements, *tail, expected),
- };
- self.resolver = old_resolver;
- ty
+ self.infer_block(tgt_expr, statements, *tail, *label, expected)
+ }
+ Expr::Unsafe { id: _, statements, tail } => {
+ self.infer_block(tgt_expr, statements, *tail, None, expected)
}
- Expr::Unsafe { body } => self.infer_expr(*body, expected),
- Expr::Const { body } => {
- self.with_breakable_ctx(BreakableKind::Border, self.err_ty(), None, |this| {
- this.infer_expr(*body, expected)
+ Expr::Const { id: _, statements, tail } => {
+ self.with_breakable_ctx(BreakableKind::Border, None, None, |this| {
+ this.infer_block(tgt_expr, statements, *tail, None, expected)
})
.1
}
- Expr::TryBlock { body } => {
+ Expr::TryBlock { id: _, statements, tail } => {
// The type that is returned from the try block
let try_ty = self.table.new_type_var();
if let Some(ty) = expected.only_has_type(&mut self.table) {
@@ -160,28 +169,41 @@ impl<'a> InferenceContext<'a> {
let ok_ty =
self.resolve_associated_type(try_ty.clone(), self.resolve_ops_try_output());
- self.with_breakable_ctx(BreakableKind::Block, ok_ty.clone(), None, |this| {
- this.infer_expr(*body, &Expectation::has_type(ok_ty));
- });
-
+ self.infer_block(
+ tgt_expr,
+ statements,
+ *tail,
+ None,
+ &Expectation::has_type(ok_ty.clone()),
+ );
try_ty
}
- Expr::Async { body } => {
+ Expr::Async { id: _, statements, tail } => {
let ret_ty = self.table.new_type_var();
let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
+ let prev_ret_coercion =
+ mem::replace(&mut self.return_coercion, Some(CoerceMany::new(ret_ty.clone())));
let (_, inner_ty) =
- self.with_breakable_ctx(BreakableKind::Border, self.err_ty(), None, |this| {
- this.infer_expr_coerce(*body, &Expectation::has_type(ret_ty))
+ self.with_breakable_ctx(BreakableKind::Border, None, None, |this| {
+ this.infer_block(
+ tgt_expr,
+ statements,
+ *tail,
+ None,
+ &Expectation::has_type(ret_ty),
+ )
});
self.diverges = prev_diverges;
self.return_ty = prev_ret_ty;
+ self.return_coercion = prev_ret_coercion;
// Use the first type parameter as the output type of future.
// existential type AsyncBlockImplTrait<InnerType>: Future<Output = InnerType>
- let impl_trait_id = crate::ImplTraitId::AsyncBlockTypeImplTrait(self.owner, *body);
+ let impl_trait_id =
+ crate::ImplTraitId::AsyncBlockTypeImplTrait(self.owner, tgt_expr);
let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
TyKind::OpaqueType(opaque_ty_id, Substitution::from1(Interner, inner_ty))
.intern(Interner)
@@ -191,7 +213,7 @@ impl<'a> InferenceContext<'a> {
// let ty = expected.coercion_target_type(&mut self.table);
let ty = self.table.new_type_var();
let (breaks, ()) =
- self.with_breakable_ctx(BreakableKind::Loop, ty, label, |this| {
+ self.with_breakable_ctx(BreakableKind::Loop, Some(ty), label, |this| {
this.infer_expr(body, &Expectation::HasType(TyBuilder::unit()));
});
@@ -204,7 +226,7 @@ impl<'a> InferenceContext<'a> {
}
}
&Expr::While { condition, body, label } => {
- self.with_breakable_ctx(BreakableKind::Loop, self.err_ty(), label, |this| {
+ self.with_breakable_ctx(BreakableKind::Loop, None, label, |this| {
this.infer_expr(
condition,
&Expectation::HasType(this.result.standard_types.bool_.clone()),
@@ -220,11 +242,13 @@ impl<'a> InferenceContext<'a> {
let iterable_ty = self.infer_expr(iterable, &Expectation::none());
let into_iter_ty =
self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item());
- let pat_ty =
- self.resolve_associated_type(into_iter_ty, self.resolve_iterator_item());
+ let pat_ty = self
+ .resolve_associated_type(into_iter_ty.clone(), self.resolve_iterator_item());
+
+ self.result.type_of_for_iterator.insert(tgt_expr, into_iter_ty);
- self.infer_pat(pat, &pat_ty, BindingMode::default());
- self.with_breakable_ctx(BreakableKind::Loop, self.err_ty(), label, |this| {
+ self.infer_top_pat(pat, &pat_ty);
+ self.with_breakable_ctx(BreakableKind::Loop, None, label, |this| {
this.infer_expr(body, &Expectation::HasType(TyBuilder::unit()));
});
@@ -251,7 +275,23 @@ impl<'a> InferenceContext<'a> {
Some(type_ref) => self.make_ty(type_ref),
None => self.table.new_type_var(),
};
- sig_tys.push(ret_ty.clone());
+ if let ClosureKind::Async = closure_kind {
+ // Use the first type parameter as the output type of future.
+ // existential type AsyncBlockImplTrait<InnerType>: Future<Output = InnerType>
+ let impl_trait_id =
+ crate::ImplTraitId::AsyncBlockTypeImplTrait(self.owner, *body);
+ let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
+ sig_tys.push(
+ TyKind::OpaqueType(
+ opaque_ty_id,
+ Substitution::from1(Interner, ret_ty.clone()),
+ )
+ .intern(Interner),
+ );
+ } else {
+ sig_tys.push(ret_ty.clone());
+ }
+
let sig_ty = TyKind::Function(FnPointer {
num_binders: 0,
sig: FnSig { abi: (), safety: chalk_ir::Safety::Safe, variadic: false },
@@ -262,33 +302,38 @@ impl<'a> InferenceContext<'a> {
})
.intern(Interner);
- let (ty, resume_yield_tys) = if matches!(closure_kind, ClosureKind::Generator(_)) {
- // FIXME: report error when there are more than 1 parameter.
- let resume_ty = match sig_tys.first() {
- // When `sig_tys.len() == 1` the first type is the return type, not the
- // first parameter type.
- Some(ty) if sig_tys.len() > 1 => ty.clone(),
- _ => self.result.standard_types.unit.clone(),
- };
- let yield_ty = self.table.new_type_var();
-
- let subst = TyBuilder::subst_for_generator(self.db, self.owner)
- .push(resume_ty.clone())
- .push(yield_ty.clone())
- .push(ret_ty.clone())
- .build();
+ let (ty, resume_yield_tys) = match closure_kind {
+ ClosureKind::Generator(_) => {
+ // FIXME: report error when there are more than 1 parameter.
+ let resume_ty = match sig_tys.first() {
+ // When `sig_tys.len() == 1` the first type is the return type, not the
+ // first parameter type.
+ Some(ty) if sig_tys.len() > 1 => ty.clone(),
+ _ => self.result.standard_types.unit.clone(),
+ };
+ let yield_ty = self.table.new_type_var();
+
+ let subst = TyBuilder::subst_for_generator(self.db, self.owner)
+ .push(resume_ty.clone())
+ .push(yield_ty.clone())
+ .push(ret_ty.clone())
+ .build();
- let generator_id = self.db.intern_generator((self.owner, tgt_expr)).into();
- let generator_ty = TyKind::Generator(generator_id, subst).intern(Interner);
+ let generator_id = self.db.intern_generator((self.owner, tgt_expr)).into();
+ let generator_ty = TyKind::Generator(generator_id, subst).intern(Interner);
- (generator_ty, Some((resume_ty, yield_ty)))
- } else {
- let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into();
- let closure_ty =
- TyKind::Closure(closure_id, Substitution::from1(Interner, sig_ty.clone()))
- .intern(Interner);
+ (generator_ty, Some((resume_ty, yield_ty)))
+ }
+ ClosureKind::Closure | ClosureKind::Async => {
+ let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into();
+ let closure_ty = TyKind::Closure(
+ closure_id,
+ Substitution::from1(Interner, sig_ty.clone()),
+ )
+ .intern(Interner);
- (closure_ty, None)
+ (closure_ty, None)
+ }
};
// Eagerly try to relate the closure type with the expected
@@ -297,21 +342,25 @@ impl<'a> InferenceContext<'a> {
self.deduce_closure_type_from_expectations(tgt_expr, &ty, &sig_ty, expected);
// Now go through the argument patterns
- for (arg_pat, arg_ty) in args.iter().zip(sig_tys) {
- self.infer_pat(*arg_pat, &arg_ty, BindingMode::default());
+ for (arg_pat, arg_ty) in args.iter().zip(&sig_tys) {
+ self.infer_top_pat(*arg_pat, &arg_ty);
}
+ // FIXME: lift these out into a struct
let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
+ let prev_ret_coercion =
+ mem::replace(&mut self.return_coercion, Some(CoerceMany::new(ret_ty.clone())));
let prev_resume_yield_tys =
mem::replace(&mut self.resume_yield_tys, resume_yield_tys);
- self.with_breakable_ctx(BreakableKind::Border, self.err_ty(), None, |this| {
- this.infer_expr_coerce(*body, &Expectation::has_type(ret_ty));
+ self.with_breakable_ctx(BreakableKind::Border, None, None, |this| {
+ this.infer_return(*body);
});
self.diverges = prev_diverges;
self.return_ty = prev_ret_ty;
+ self.return_coercion = prev_ret_coercion;
self.resume_yield_tys = prev_resume_yield_tys;
ty
@@ -348,7 +397,13 @@ impl<'a> InferenceContext<'a> {
}
(params, ret_ty)
}
- None => (Vec::new(), self.err_ty()), // FIXME diagnostic
+ None => {
+ self.result.diagnostics.push(InferenceDiagnostic::ExpectedFunction {
+ call_expr: tgt_expr,
+ found: callee_ty.clone(),
+ });
+ (Vec::new(), self.err_ty())
+ }
};
let indices_to_skip = self.check_legacy_const_generics(derefed_callee, args);
self.register_obligations_for_call(&callee_ty);
@@ -381,92 +436,109 @@ impl<'a> InferenceContext<'a> {
Expr::Match { expr, arms } => {
let input_ty = self.infer_expr(*expr, &Expectation::none());
- let expected = expected.adjust_for_branches(&mut self.table);
-
- let result_ty = if arms.is_empty() {
+ if arms.is_empty() {
+ self.diverges = Diverges::Always;
self.result.standard_types.never.clone()
} else {
- expected.coercion_target_type(&mut self.table)
- };
- let mut coerce = CoerceMany::new(result_ty);
-
- let matchee_diverges = self.diverges;
- let mut all_arms_diverge = Diverges::Always;
-
- for arm in arms.iter() {
- self.diverges = Diverges::Maybe;
- let _pat_ty = self.infer_pat(arm.pat, &input_ty, BindingMode::default());
- if let Some(guard_expr) = arm.guard {
- self.infer_expr(
- guard_expr,
- &Expectation::HasType(self.result.standard_types.bool_.clone()),
- );
+ let matchee_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
+ let mut all_arms_diverge = Diverges::Always;
+ for arm in arms.iter() {
+ let input_ty = self.resolve_ty_shallow(&input_ty);
+ self.infer_top_pat(arm.pat, &input_ty);
}
- let arm_ty = self.infer_expr_inner(arm.expr, &expected);
- all_arms_diverge &= self.diverges;
- coerce.coerce(self, Some(arm.expr), &arm_ty);
- }
+ let expected = expected.adjust_for_branches(&mut self.table);
+ let result_ty = match &expected {
+ // We don't coerce to `()` so that if the match expression is a
+ // statement it's branches can have any consistent type.
+ Expectation::HasType(ty) if *ty != self.result.standard_types.unit => {
+ ty.clone()
+ }
+ _ => self.table.new_type_var(),
+ };
+ let mut coerce = CoerceMany::new(result_ty);
+
+ for arm in arms.iter() {
+ if let Some(guard_expr) = arm.guard {
+ self.diverges = Diverges::Maybe;
+ self.infer_expr_coerce_never(
+ guard_expr,
+ &Expectation::HasType(self.result.standard_types.bool_.clone()),
+ );
+ }
+ self.diverges = Diverges::Maybe;
- self.diverges = matchee_diverges | all_arms_diverge;
+ let arm_ty = self.infer_expr_inner(arm.expr, &expected);
+ all_arms_diverge &= self.diverges;
+ coerce.coerce(self, Some(arm.expr), &arm_ty);
+ }
- coerce.complete()
+ self.diverges = matchee_diverges | all_arms_diverge;
+
+ coerce.complete(self)
+ }
}
Expr::Path(p) => {
- // FIXME this could be more efficient...
- let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr);
- self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or_else(|| self.err_ty())
+ let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, tgt_expr);
+ let ty = self.infer_path(p, tgt_expr.into()).unwrap_or_else(|| self.err_ty());
+ self.resolver.reset_to_guard(g);
+ ty
}
Expr::Continue { label } => {
if let None = find_continuable(&mut self.breakables, label.as_ref()) {
self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop {
expr: tgt_expr,
is_break: false,
+ bad_value_break: false,
});
};
self.result.standard_types.never.clone()
}
Expr::Break { expr, label } => {
let val_ty = if let Some(expr) = *expr {
- self.infer_expr(expr, &Expectation::none())
+ let opt_coerce_to = match find_breakable(&mut self.breakables, label.as_ref()) {
+ Some(ctxt) => match &ctxt.coerce {
+ Some(coerce) => coerce.expected_ty(),
+ None => {
+ self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop {
+ expr: tgt_expr,
+ is_break: true,
+ bad_value_break: true,
+ });
+ self.err_ty()
+ }
+ },
+ None => self.err_ty(),
+ };
+ self.infer_expr_inner(expr, &Expectation::HasType(opt_coerce_to))
} else {
TyBuilder::unit()
};
match find_breakable(&mut self.breakables, label.as_ref()) {
- Some(ctxt) => {
- // avoiding the borrowck
- let mut coerce = mem::replace(
- &mut ctxt.coerce,
- CoerceMany::new(expected.coercion_target_type(&mut self.table)),
- );
-
- // FIXME: create a synthetic `()` during lowering so we have something to refer to here?
- coerce.coerce(self, *expr, &val_ty);
-
- let ctxt = find_breakable(&mut self.breakables, label.as_ref())
- .expect("breakable stack changed during coercion");
- ctxt.coerce = coerce;
- ctxt.may_break = true;
- }
+ Some(ctxt) => match ctxt.coerce.take() {
+ Some(mut coerce) => {
+ coerce.coerce(self, *expr, &val_ty);
+
+ // Avoiding borrowck
+ let ctxt = find_breakable(&mut self.breakables, label.as_ref())
+ .expect("breakable stack changed during coercion");
+ ctxt.may_break = true;
+ ctxt.coerce = Some(coerce);
+ }
+ None => ctxt.may_break = true,
+ },
None => {
self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop {
expr: tgt_expr,
is_break: true,
+ bad_value_break: false,
});
}
}
self.result.standard_types.never.clone()
}
- Expr::Return { expr } => {
- if let Some(expr) = expr {
- self.infer_expr_coerce(*expr, &Expectation::has_type(self.return_ty.clone()));
- } else {
- let unit = TyBuilder::unit();
- let _ = self.coerce(Some(tgt_expr), &unit, &self.return_ty.clone());
- }
- self.result.standard_types.never.clone()
- }
+ &Expr::Return { expr } => self.infer_expr_return(expr),
Expr::Yield { expr } => {
if let Some((resume_ty, yield_ty)) = self.resume_yield_tys.clone() {
if let Some(expr) = expr {
@@ -483,7 +555,7 @@ impl<'a> InferenceContext<'a> {
}
Expr::Yeet { expr } => {
if let &Some(expr) = expr {
- self.infer_expr_inner(expr, &Expectation::None);
+ self.infer_expr_no_expect(expr);
}
self.result.standard_types.never.clone()
}
@@ -524,71 +596,7 @@ impl<'a> InferenceContext<'a> {
}
ty
}
- Expr::Field { expr, name } => {
- let receiver_ty = self.infer_expr_inner(*expr, &Expectation::none());
-
- let mut autoderef = Autoderef::new(&mut self.table, receiver_ty);
- let mut private_field = None;
- let ty = autoderef.by_ref().find_map(|(derefed_ty, _)| {
- let (field_id, parameters) = match derefed_ty.kind(Interner) {
- TyKind::Tuple(_, substs) => {
- return name.as_tuple_index().and_then(|idx| {
- substs
- .as_slice(Interner)
- .get(idx)
- .map(|a| a.assert_ty_ref(Interner))
- .cloned()
- });
- }
- TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), parameters) => {
- let local_id = self.db.struct_data(*s).variant_data.field(name)?;
- let field = FieldId { parent: (*s).into(), local_id };
- (field, parameters.clone())
- }
- TyKind::Adt(AdtId(hir_def::AdtId::UnionId(u)), parameters) => {
- let local_id = self.db.union_data(*u).variant_data.field(name)?;
- let field = FieldId { parent: (*u).into(), local_id };
- (field, parameters.clone())
- }
- _ => return None,
- };
- let is_visible = self.db.field_visibilities(field_id.parent)[field_id.local_id]
- .is_visible_from(self.db.upcast(), self.resolver.module());
- if !is_visible {
- if private_field.is_none() {
- private_field = Some(field_id);
- }
- return None;
- }
- // can't have `write_field_resolution` here because `self.table` is borrowed :(
- self.result.field_resolutions.insert(tgt_expr, field_id);
- let ty = self.db.field_types(field_id.parent)[field_id.local_id]
- .clone()
- .substitute(Interner, &parameters);
- Some(ty)
- });
- let ty = match ty {
- Some(ty) => {
- let adjustments = auto_deref_adjust_steps(&autoderef);
- self.write_expr_adj(*expr, adjustments);
- let ty = self.insert_type_vars(ty);
- let ty = self.normalize_associated_types_in(ty);
- ty
- }
- _ => {
- // Write down the first private field resolution if we found no field
- // This aids IDE features for private fields like goto def
- if let Some(field) = private_field {
- self.result.field_resolutions.insert(tgt_expr, field);
- self.result
- .diagnostics
- .push(InferenceDiagnostic::PrivateField { expr: tgt_expr, field });
- }
- self.err_ty()
- }
- };
- ty
- }
+ Expr::Field { expr, name } => self.infer_field_access(tgt_expr, *expr, name),
Expr::Await { expr } => {
let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
self.resolve_associated_type(inner_ty, self.resolve_future_future_output())
@@ -611,7 +619,7 @@ impl<'a> InferenceContext<'a> {
Expr::Cast { expr, type_ref } => {
let cast_ty = self.make_ty(type_ref);
// FIXME: propagate the "castable to" expectation
- let _inner_ty = self.infer_expr_inner(*expr, &Expectation::None);
+ let _inner_ty = self.infer_expr_no_expect(*expr);
// FIXME check the cast...
cast_ty
}
@@ -807,49 +815,7 @@ impl<'a> InferenceContext<'a> {
TyKind::Tuple(tys.len(), Substitution::from_iter(Interner, tys)).intern(Interner)
}
- Expr::Array(array) => {
- let elem_ty =
- match expected.to_option(&mut self.table).as_ref().map(|t| t.kind(Interner)) {
- Some(TyKind::Array(st, _) | TyKind::Slice(st)) => st.clone(),
- _ => self.table.new_type_var(),
- };
- let mut coerce = CoerceMany::new(elem_ty.clone());
-
- let expected = Expectation::has_type(elem_ty.clone());
- let len = match array {
- Array::ElementList { elements, .. } => {
- for &expr in elements.iter() {
- let cur_elem_ty = self.infer_expr_inner(expr, &expected);
- coerce.coerce(self, Some(expr), &cur_elem_ty);
- }
- consteval::usize_const(Some(elements.len() as u128))
- }
- &Array::Repeat { initializer, repeat } => {
- self.infer_expr_coerce(initializer, &Expectation::has_type(elem_ty));
- self.infer_expr(
- repeat,
- &Expectation::HasType(
- TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(Interner),
- ),
- );
-
- if let Some(g_def) = self.owner.as_generic_def_id() {
- let generics = generics(self.db.upcast(), g_def);
- consteval::eval_to_const(
- repeat,
- ParamLoweringMode::Placeholder,
- self,
- || generics,
- DebruijnIndex::INNERMOST,
- )
- } else {
- consteval::usize_const(None)
- }
- }
- };
-
- TyKind::Array(coerce.complete(), len).intern(Interner)
- }
+ Expr::Array(array) => self.infer_expr_array(array, expected),
Expr::Literal(lit) => match lit {
Literal::Bool(..) => self.result.standard_types.bool_.clone(),
Literal::String(..) => {
@@ -859,7 +825,11 @@ impl<'a> InferenceContext<'a> {
Literal::ByteString(bs) => {
let byte_type = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner);
- let len = consteval::usize_const(Some(bs.len() as u128));
+ let len = consteval::usize_const(
+ self.db,
+ Some(bs.len() as u128),
+ self.resolver.krate(),
+ );
let array_type = TyKind::Array(byte_type, len).intern(Interner);
TyKind::Ref(Mutability::Not, static_lifetime(), array_type).intern(Interner)
@@ -904,6 +874,97 @@ impl<'a> InferenceContext<'a> {
ty
}
+ fn infer_expr_array(
+ &mut self,
+ array: &Array,
+ expected: &Expectation,
+ ) -> chalk_ir::Ty<Interner> {
+ let elem_ty = match expected.to_option(&mut self.table).as_ref().map(|t| t.kind(Interner)) {
+ Some(TyKind::Array(st, _) | TyKind::Slice(st)) => st.clone(),
+ _ => self.table.new_type_var(),
+ };
+
+ let krate = self.resolver.krate();
+
+ let expected = Expectation::has_type(elem_ty.clone());
+ let (elem_ty, len) = match array {
+ Array::ElementList { elements, .. } if elements.is_empty() => {
+ (elem_ty, consteval::usize_const(self.db, Some(0), krate))
+ }
+ Array::ElementList { elements, .. } => {
+ let mut coerce = CoerceMany::new(elem_ty.clone());
+ for &expr in elements.iter() {
+ let cur_elem_ty = self.infer_expr_inner(expr, &expected);
+ coerce.coerce(self, Some(expr), &cur_elem_ty);
+ }
+ (
+ coerce.complete(self),
+ consteval::usize_const(self.db, Some(elements.len() as u128), krate),
+ )
+ }
+ &Array::Repeat { initializer, repeat } => {
+ self.infer_expr_coerce(initializer, &Expectation::has_type(elem_ty.clone()));
+ self.infer_expr(
+ repeat,
+ &Expectation::HasType(
+ TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(Interner),
+ ),
+ );
+
+ (
+ elem_ty,
+ if let Some(g_def) = self.owner.as_generic_def_id() {
+ let generics = generics(self.db.upcast(), g_def);
+ consteval::eval_to_const(
+ repeat,
+ ParamLoweringMode::Placeholder,
+ self,
+ || generics,
+ DebruijnIndex::INNERMOST,
+ )
+ } else {
+ consteval::usize_const(self.db, None, krate)
+ },
+ )
+ }
+ };
+
+ TyKind::Array(elem_ty, len).intern(Interner)
+ }
+
+ pub(super) fn infer_return(&mut self, expr: ExprId) {
+ let ret_ty = self
+ .return_coercion
+ .as_mut()
+ .expect("infer_return called outside function body")
+ .expected_ty();
+ let return_expr_ty = self.infer_expr_inner(expr, &Expectation::HasType(ret_ty));
+ let mut coerce_many = self.return_coercion.take().unwrap();
+ coerce_many.coerce(self, Some(expr), &return_expr_ty);
+ self.return_coercion = Some(coerce_many);
+ }
+
+ fn infer_expr_return(&mut self, expr: Option<ExprId>) -> Ty {
+ match self.return_coercion {
+ Some(_) => {
+ if let Some(expr) = expr {
+ self.infer_return(expr);
+ } else {
+ let mut coerce = self.return_coercion.take().unwrap();
+ coerce.coerce_forced_unit(self);
+ self.return_coercion = Some(coerce);
+ }
+ }
+ None => {
+ // FIXME: diagnose return outside of function
+ if let Some(expr) = expr {
+ self.infer_expr_no_expect(expr);
+ }
+ }
+ }
+ self.result.standard_types.never.clone()
+ }
+
fn infer_expr_box(&mut self, inner_expr: ExprId, expected: &Expectation) -> Ty {
if let Some(box_id) = self.resolve_boxed_box() {
let table = &mut self.table;
@@ -982,8 +1043,11 @@ impl<'a> InferenceContext<'a> {
// type and length). This should not be just an error type,
// because we are to compute the unifiability of this type and
// `rhs_ty` in the end of this function to issue type mismatches.
- _ => TyKind::Array(self.err_ty(), crate::consteval::usize_const(None))
- .intern(Interner),
+ _ => TyKind::Array(
+ self.err_ty(),
+ crate::consteval::usize_const(self.db, None, self.resolver.krate()),
+ )
+ .intern(Interner),
}
}
Expr::RecordLit { path, fields, .. } => {
@@ -1123,65 +1187,211 @@ impl<'a> InferenceContext<'a> {
expr: ExprId,
statements: &[Statement],
tail: Option<ExprId>,
+ label: Option<LabelId>,
expected: &Expectation,
) -> Ty {
- for stmt in statements {
- match stmt {
- Statement::Let { pat, type_ref, initializer, else_branch } => {
- let decl_ty = type_ref
- .as_ref()
- .map(|tr| self.make_ty(tr))
- .unwrap_or_else(|| self.err_ty());
-
- // Always use the declared type when specified
- let mut ty = decl_ty.clone();
-
- if let Some(expr) = initializer {
- let actual_ty =
- self.infer_expr_coerce(*expr, &Expectation::has_type(decl_ty.clone()));
- if decl_ty.is_unknown() {
- ty = actual_ty;
+ let coerce_ty = expected.coercion_target_type(&mut self.table);
+ let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr);
+
+ let (break_ty, ty) =
+ self.with_breakable_ctx(BreakableKind::Block, Some(coerce_ty.clone()), label, |this| {
+ for stmt in statements {
+ match stmt {
+ Statement::Let { pat, type_ref, initializer, else_branch } => {
+ let decl_ty = type_ref
+ .as_ref()
+ .map(|tr| this.make_ty(tr))
+ .unwrap_or_else(|| this.table.new_type_var());
+
+ let ty = if let Some(expr) = initializer {
+ let ty = if contains_explicit_ref_binding(&this.body, *pat) {
+ this.infer_expr(*expr, &Expectation::has_type(decl_ty.clone()))
+ } else {
+ this.infer_expr_coerce(
+ *expr,
+ &Expectation::has_type(decl_ty.clone()),
+ )
+ };
+ if type_ref.is_some() {
+ decl_ty
+ } else {
+ ty
+ }
+ } else {
+ decl_ty
+ };
+
+ this.infer_top_pat(*pat, &ty);
+
+ if let Some(expr) = else_branch {
+ let previous_diverges =
+ mem::replace(&mut this.diverges, Diverges::Maybe);
+ this.infer_expr_coerce(
+ *expr,
+ &Expectation::HasType(this.result.standard_types.never.clone()),
+ );
+ this.diverges = previous_diverges;
+ }
+ }
+ &Statement::Expr { expr, has_semi } => {
+ if has_semi {
+ this.infer_expr(expr, &Expectation::none());
+ } else {
+ this.infer_expr_coerce(
+ expr,
+ &Expectation::HasType(this.result.standard_types.unit.clone()),
+ );
+ }
}
}
+ }
- if let Some(expr) = else_branch {
- self.infer_expr_coerce(
- *expr,
- &Expectation::HasType(self.result.standard_types.never.clone()),
- );
+ // FIXME: This should make use of the breakable CoerceMany
+ if let Some(expr) = tail {
+ this.infer_expr_coerce(expr, expected)
+ } else {
+ // Citing rustc: if there is no explicit tail expression,
+ // that is typically equivalent to a tail expression
+ // of `()` -- except if the block diverges. In that
+ // case, there is no value supplied from the tail
+ // expression (assuming there are no other breaks,
+ // this implies that the type of the block will be
+ // `!`).
+ if this.diverges.is_always() {
+ // we don't even make an attempt at coercion
+ this.table.new_maybe_never_var()
+ } else if let Some(t) = expected.only_has_type(&mut this.table) {
+ if this
+ .coerce(Some(expr), &this.result.standard_types.unit.clone(), &t)
+ .is_err()
+ {
+ this.result.type_mismatches.insert(
+ expr.into(),
+ TypeMismatch {
+ expected: t.clone(),
+ actual: this.result.standard_types.unit.clone(),
+ },
+ );
+ }
+ t
+ } else {
+ this.result.standard_types.unit.clone()
}
+ }
+ });
+ self.resolver.reset_to_guard(g);
+
+ break_ty.unwrap_or(ty)
+ }
- self.infer_pat(*pat, &ty, BindingMode::default());
+ fn lookup_field(
+ &mut self,
+ receiver_ty: &Ty,
+ name: &Name,
+ ) -> Option<(Ty, Option<FieldId>, Vec<Adjustment>, bool)> {
+ let mut autoderef = Autoderef::new(&mut self.table, receiver_ty.clone());
+ let mut private_field = None;
+ let res = autoderef.by_ref().find_map(|(derefed_ty, _)| {
+ let (field_id, parameters) = match derefed_ty.kind(Interner) {
+ TyKind::Tuple(_, substs) => {
+ return name.as_tuple_index().and_then(|idx| {
+ substs
+ .as_slice(Interner)
+ .get(idx)
+ .map(|a| a.assert_ty_ref(Interner))
+ .cloned()
+ .map(|ty| (None, ty))
+ });
+ }
+ TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), parameters) => {
+ let local_id = self.db.struct_data(*s).variant_data.field(name)?;
+ let field = FieldId { parent: (*s).into(), local_id };
+ (field, parameters.clone())
}
- Statement::Expr { expr, .. } => {
- self.infer_expr(*expr, &Expectation::none());
+ TyKind::Adt(AdtId(hir_def::AdtId::UnionId(u)), parameters) => {
+ let local_id = self.db.union_data(*u).variant_data.field(name)?;
+ let field = FieldId { parent: (*u).into(), local_id };
+ (field, parameters.clone())
}
+ _ => return None,
+ };
+ let is_visible = self.db.field_visibilities(field_id.parent)[field_id.local_id]
+ .is_visible_from(self.db.upcast(), self.resolver.module());
+ if !is_visible {
+ if private_field.is_none() {
+ private_field = Some((field_id, parameters));
+ }
+ return None;
}
- }
+ let ty = self.db.field_types(field_id.parent)[field_id.local_id]
+ .clone()
+ .substitute(Interner, &parameters);
+ Some((Some(field_id), ty))
+ });
- if let Some(expr) = tail {
- self.infer_expr_coerce(expr, expected)
- } else {
- // Citing rustc: if there is no explicit tail expression,
- // that is typically equivalent to a tail expression
- // of `()` -- except if the block diverges. In that
- // case, there is no value supplied from the tail
- // expression (assuming there are no other breaks,
- // this implies that the type of the block will be
- // `!`).
- if self.diverges.is_always() {
- // we don't even make an attempt at coercion
- self.table.new_maybe_never_var()
- } else if let Some(t) = expected.only_has_type(&mut self.table) {
- if self.coerce(Some(expr), &TyBuilder::unit(), &t).is_err() {
- self.result.type_mismatches.insert(
- expr.into(),
- TypeMismatch { expected: t.clone(), actual: TyBuilder::unit() },
- );
+ Some(match res {
+ Some((field_id, ty)) => {
+ let adjustments = auto_deref_adjust_steps(&autoderef);
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+
+ (ty, field_id, adjustments, true)
+ }
+ None => {
+ let (field_id, subst) = private_field?;
+ let adjustments = auto_deref_adjust_steps(&autoderef);
+ let ty = self.db.field_types(field_id.parent)[field_id.local_id]
+ .clone()
+ .substitute(Interner, &subst);
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+
+ (ty, Some(field_id), adjustments, false)
+ }
+ })
+ }
+
+ fn infer_field_access(&mut self, tgt_expr: ExprId, receiver: ExprId, name: &Name) -> Ty {
+ let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none());
+ match self.lookup_field(&receiver_ty, name) {
+ Some((ty, field_id, adjustments, is_public)) => {
+ self.write_expr_adj(receiver, adjustments);
+ if let Some(field_id) = field_id {
+ self.result.field_resolutions.insert(tgt_expr, field_id);
}
- t
- } else {
- TyBuilder::unit()
+ if !is_public {
+ if let Some(field) = field_id {
+ // FIXME: Merge this diagnostic into UnresolvedField?
+ self.result
+ .diagnostics
+ .push(InferenceDiagnostic::PrivateField { expr: tgt_expr, field });
+ }
+ }
+ ty
+ }
+ None => {
+ // no field found,
+ let method_with_same_name_exists = {
+ self.get_traits_in_scope();
+
+ let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
+ method_resolution::lookup_method(
+ self.db,
+ &canonicalized_receiver.value,
+ self.trait_env.clone(),
+ self.get_traits_in_scope().as_ref().left_or_else(|&it| it),
+ VisibleFromModule::Filter(self.resolver.module()),
+ name,
+ )
+ .is_some()
+ };
+ self.result.diagnostics.push(InferenceDiagnostic::UnresolvedField {
+ expr: tgt_expr,
+ receiver: receiver_ty,
+ name: name.clone(),
+ method_with_same_name_exists,
+ });
+ self.err_ty()
}
}
}
@@ -1198,13 +1408,11 @@ impl<'a> InferenceContext<'a> {
let receiver_ty = self.infer_expr(receiver, &Expectation::none());
let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
- let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
-
let resolved = method_resolution::lookup_method(
self.db,
&canonicalized_receiver.value,
self.trait_env.clone(),
- &traits_in_scope,
+ self.get_traits_in_scope().as_ref().left_or_else(|&it| it),
VisibleFromModule::Filter(self.resolver.module()),
method_name,
);
@@ -1223,11 +1431,30 @@ impl<'a> InferenceContext<'a> {
}
(ty, self.db.value_ty(func.into()), substs)
}
- None => (
- receiver_ty,
- Binders::empty(Interner, self.err_ty()),
- Substitution::empty(Interner),
- ),
+ None => {
+ let field_with_same_name_exists = match self.lookup_field(&receiver_ty, method_name)
+ {
+ Some((ty, field_id, adjustments, _public)) => {
+ self.write_expr_adj(receiver, adjustments);
+ if let Some(field_id) = field_id {
+ self.result.field_resolutions.insert(tgt_expr, field_id);
+ }
+ Some(ty)
+ }
+ None => None,
+ };
+ self.result.diagnostics.push(InferenceDiagnostic::UnresolvedMethodCall {
+ expr: tgt_expr,
+ receiver: receiver_ty.clone(),
+ name: method_name.clone(),
+ field_with_same_name: field_with_same_name_exists,
+ });
+ (
+ receiver_ty,
+ Binders::empty(Interner, self.err_ty()),
+ Substitution::empty(Interner),
+ )
+ }
};
let method_ty = method_ty.substitute(Interner, &substs);
self.register_obligations_for_call(&method_ty);
@@ -1636,16 +1863,16 @@ impl<'a> InferenceContext<'a> {
fn with_breakable_ctx<T>(
&mut self,
kind: BreakableKind,
- ty: Ty,
+ ty: Option<Ty>,
label: Option<LabelId>,
cb: impl FnOnce(&mut Self) -> T,
) -> (Option<Ty>, T) {
self.breakables.push({
let label = label.map(|label| self.body[label].name.clone());
- BreakableContext { kind, may_break: false, coerce: CoerceMany::new(ty), label }
+ BreakableContext { kind, may_break: false, coerce: ty.map(CoerceMany::new), label }
});
let res = cb(self);
let ctx = self.breakables.pop().expect("breakable stack broken");
- (ctx.may_break.then(|| ctx.coerce.complete()), res)
+ (if ctx.may_break { ctx.coerce.map(|ctx| ctx.complete(self)) } else { None }, res)
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
index f154dac8e..5f839fc30 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
@@ -4,22 +4,57 @@ use std::iter::repeat_with;
use chalk_ir::Mutability;
use hir_def::{
- expr::{BindingAnnotation, Expr, Literal, Pat, PatId},
+ body::Body,
+ expr::{Binding, BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, Literal, Pat, PatId},
path::Path,
- type_ref::ConstScalar,
};
use hir_expand::name::Name;
use crate::{
- consteval::intern_const_scalar,
+ consteval::{try_const_usize, usize_const},
infer::{BindingMode, Expectation, InferenceContext, TypeMismatch},
lower::lower_to_chalk_mutability,
primitive::UintTy,
- static_lifetime, ConcreteConst, ConstValue, Interner, Scalar, Substitution, Ty, TyBuilder,
- TyExt, TyKind,
+ static_lifetime, Interner, Scalar, Substitution, Ty, TyBuilder, TyExt, TyKind,
};
-use super::PatLike;
+/// Used to generalize patterns and assignee expressions.
+pub(super) trait PatLike: Into<ExprOrPatId> + Copy {
+ type BindingMode: Copy;
+
+ fn infer(
+ this: &mut InferenceContext<'_>,
+ id: Self,
+ expected_ty: &Ty,
+ default_bm: Self::BindingMode,
+ ) -> Ty;
+}
+
+impl PatLike for ExprId {
+ type BindingMode = ();
+
+ fn infer(
+ this: &mut InferenceContext<'_>,
+ id: Self,
+ expected_ty: &Ty,
+ (): Self::BindingMode,
+ ) -> Ty {
+ this.infer_assignee_expr(id, expected_ty)
+ }
+}
+
+impl PatLike for PatId {
+ type BindingMode = BindingMode;
+
+ fn infer(
+ this: &mut InferenceContext<'_>,
+ id: Self,
+ expected_ty: &Ty,
+ default_bm: Self::BindingMode,
+ ) -> Ty {
+ this.infer_pat(id, expected_ty, default_bm)
+ }
+}
impl<'a> InferenceContext<'a> {
/// Infers type for tuple struct pattern or its corresponding assignee expression.
@@ -112,6 +147,7 @@ impl<'a> InferenceContext<'a> {
ellipsis: Option<usize>,
subs: &[T],
) -> Ty {
+ let expected = self.resolve_ty_shallow(expected);
let expectations = match expected.as_tuple() {
Some(parameters) => &*parameters.as_slice(Interner),
_ => &[],
@@ -145,12 +181,11 @@ impl<'a> InferenceContext<'a> {
.intern(Interner)
}
- pub(super) fn infer_pat(
- &mut self,
- pat: PatId,
- expected: &Ty,
- mut default_bm: BindingMode,
- ) -> Ty {
+ pub(super) fn infer_top_pat(&mut self, pat: PatId, expected: &Ty) {
+ self.infer_pat(pat, expected, BindingMode::default());
+ }
+
+ fn infer_pat(&mut self, pat: PatId, expected: &Ty, mut default_bm: BindingMode) -> Ty {
let mut expected = self.resolve_ty_shallow(expected);
if is_non_ref_pat(self.body, pat) {
@@ -185,30 +220,17 @@ impl<'a> InferenceContext<'a> {
self.infer_tuple_pat_like(&expected, default_bm, *ellipsis, args)
}
Pat::Or(pats) => {
- if let Some((first_pat, rest)) = pats.split_first() {
- let ty = self.infer_pat(*first_pat, &expected, default_bm);
- for pat in rest {
- self.infer_pat(*pat, &expected, default_bm);
- }
- ty
- } else {
- self.err_ty()
+ for pat in pats.iter() {
+ self.infer_pat(*pat, &expected, default_bm);
}
+ expected.clone()
}
- Pat::Ref { pat, mutability } => {
- let mutability = lower_to_chalk_mutability(*mutability);
- let expectation = match expected.as_reference() {
- Some((inner_ty, _lifetime, exp_mut)) => {
- if mutability != exp_mut {
- // FIXME: emit type error?
- }
- inner_ty.clone()
- }
- _ => self.result.standard_types.unknown.clone(),
- };
- let subty = self.infer_pat(*pat, &expectation, default_bm);
- TyKind::Ref(mutability, static_lifetime(), subty).intern(Interner)
- }
+ &Pat::Ref { pat, mutability } => self.infer_ref_pat(
+ pat,
+ lower_to_chalk_mutability(mutability),
+ &expected,
+ default_bm,
+ ),
Pat::TupleStruct { path: p, args: subpats, ellipsis } => self
.infer_tuple_struct_pat_like(
p.as_deref(),
@@ -223,72 +245,14 @@ impl<'a> InferenceContext<'a> {
self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat, subs)
}
Pat::Path(path) => {
- // FIXME use correct resolver for the surrounding expression
- let resolver = self.resolver.clone();
- self.infer_path(&resolver, path, pat.into()).unwrap_or_else(|| self.err_ty())
+ // FIXME update resolver for the surrounding expression
+ self.infer_path(path, pat.into()).unwrap_or_else(|| self.err_ty())
}
- Pat::Bind { mode, name: _, subpat } => {
- let mode = if mode == &BindingAnnotation::Unannotated {
- default_bm
- } else {
- BindingMode::convert(*mode)
- };
- self.result.pat_binding_modes.insert(pat, mode);
-
- let inner_ty = match subpat {
- Some(subpat) => self.infer_pat(*subpat, &expected, default_bm),
- None => expected,
- };
- let inner_ty = self.insert_type_vars_shallow(inner_ty);
-
- let bound_ty = match mode {
- BindingMode::Ref(mutability) => {
- TyKind::Ref(mutability, static_lifetime(), inner_ty.clone())
- .intern(Interner)
- }
- BindingMode::Move => inner_ty.clone(),
- };
- self.write_pat_ty(pat, bound_ty);
- return inner_ty;
+ Pat::Bind { id, subpat } => {
+ return self.infer_bind_pat(pat, *id, default_bm, *subpat, &expected);
}
Pat::Slice { prefix, slice, suffix } => {
- let elem_ty = match expected.kind(Interner) {
- TyKind::Array(st, _) | TyKind::Slice(st) => st.clone(),
- _ => self.err_ty(),
- };
-
- for &pat_id in prefix.iter().chain(suffix.iter()) {
- self.infer_pat(pat_id, &elem_ty, default_bm);
- }
-
- if let &Some(slice_pat_id) = slice {
- let rest_pat_ty = match expected.kind(Interner) {
- TyKind::Array(_, length) => {
- let len = match length.data(Interner).value {
- ConstValue::Concrete(ConcreteConst {
- interned: ConstScalar::UInt(len),
- }) => len.checked_sub((prefix.len() + suffix.len()) as u128),
- _ => None,
- };
- TyKind::Array(
- elem_ty.clone(),
- intern_const_scalar(
- len.map_or(ConstScalar::Unknown, |len| ConstScalar::UInt(len)),
- TyBuilder::usize(),
- ),
- )
- }
- _ => TyKind::Slice(elem_ty.clone()),
- }
- .intern(Interner);
- self.infer_pat(slice_pat_id, &rest_pat_ty, default_bm);
- }
-
- match expected.kind(Interner) {
- TyKind::Array(_, const_) => TyKind::Array(elem_ty, const_.clone()),
- _ => TyKind::Slice(elem_ty),
- }
- .intern(Interner)
+ self.infer_slice_pat(&expected, prefix, slice, suffix, default_bm)
}
Pat::Wild => expected.clone(),
Pat::Range { start, end } => {
@@ -296,27 +260,10 @@ impl<'a> InferenceContext<'a> {
self.infer_expr(*end, &Expectation::has_type(start_ty))
}
&Pat::Lit(expr) => {
- // FIXME: using `Option` here is a workaround until we can use if-let chains in stable.
- let mut pat_ty = None;
-
- // Like slice patterns, byte string patterns can denote both `&[u8; N]` and `&[u8]`.
- if let Expr::Literal(Literal::ByteString(_)) = self.body[expr] {
- if let Some((inner, ..)) = expected.as_reference() {
- let inner = self.resolve_ty_shallow(inner);
- if matches!(inner.kind(Interner), TyKind::Slice(_)) {
- let elem_ty = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner);
- let slice_ty = TyKind::Slice(elem_ty).intern(Interner);
- let ty = TyKind::Ref(Mutability::Not, static_lifetime(), slice_ty)
- .intern(Interner);
- self.write_expr_ty(expr, ty.clone());
- pat_ty = Some(ty);
- }
- }
- }
-
- pat_ty.unwrap_or_else(|| {
- self.infer_expr(expr, &Expectation::has_type(expected.clone()))
- })
+ // Don't emit type mismatches again, the expression lowering already did that.
+ let ty = self.infer_lit_pat(expr, &expected);
+ self.write_pat_ty(pat, ty.clone());
+ return ty;
}
Pat::Box { inner } => match self.resolve_boxed_box() {
Some(box_adt) => {
@@ -345,7 +292,8 @@ impl<'a> InferenceContext<'a> {
};
// use a new type variable if we got error type here
let ty = self.insert_type_vars_shallow(ty);
- if !self.unify(&ty, &expected) {
+ // FIXME: This never check is odd, but required with out we do inference right now
+ if !expected.is_never() && !self.unify(&ty, &expected) {
self.result
.type_mismatches
.insert(pat.into(), TypeMismatch { expected, actual: ty.clone() });
@@ -353,6 +301,111 @@ impl<'a> InferenceContext<'a> {
self.write_pat_ty(pat, ty.clone());
ty
}
+
+ fn infer_ref_pat(
+ &mut self,
+ pat: PatId,
+ mutability: Mutability,
+ expected: &Ty,
+ default_bm: BindingMode,
+ ) -> Ty {
+ let expectation = match expected.as_reference() {
+ Some((inner_ty, _lifetime, _exp_mut)) => inner_ty.clone(),
+ _ => self.result.standard_types.unknown.clone(),
+ };
+ let subty = self.infer_pat(pat, &expectation, default_bm);
+ TyKind::Ref(mutability, static_lifetime(), subty).intern(Interner)
+ }
+
+ fn infer_bind_pat(
+ &mut self,
+ pat: PatId,
+ binding: BindingId,
+ default_bm: BindingMode,
+ subpat: Option<PatId>,
+ expected: &Ty,
+ ) -> Ty {
+ let Binding { mode, .. } = self.body.bindings[binding];
+ let mode = if mode == BindingAnnotation::Unannotated {
+ default_bm
+ } else {
+ BindingMode::convert(mode)
+ };
+ self.result.pat_binding_modes.insert(pat, mode);
+
+ let inner_ty = match subpat {
+ Some(subpat) => self.infer_pat(subpat, &expected, default_bm),
+ None => expected.clone(),
+ };
+ let inner_ty = self.insert_type_vars_shallow(inner_ty);
+
+ let bound_ty = match mode {
+ BindingMode::Ref(mutability) => {
+ TyKind::Ref(mutability, static_lifetime(), inner_ty.clone()).intern(Interner)
+ }
+ BindingMode::Move => inner_ty.clone(),
+ };
+ self.write_pat_ty(pat, bound_ty.clone());
+ self.write_binding_ty(binding, bound_ty);
+ return inner_ty;
+ }
+
+ fn infer_slice_pat(
+ &mut self,
+ expected: &Ty,
+ prefix: &[PatId],
+ slice: &Option<PatId>,
+ suffix: &[PatId],
+ default_bm: BindingMode,
+ ) -> Ty {
+ let elem_ty = match expected.kind(Interner) {
+ TyKind::Array(st, _) | TyKind::Slice(st) => st.clone(),
+ _ => self.err_ty(),
+ };
+
+ for &pat_id in prefix.iter().chain(suffix.iter()) {
+ self.infer_pat(pat_id, &elem_ty, default_bm);
+ }
+
+ if let &Some(slice_pat_id) = slice {
+ let rest_pat_ty = match expected.kind(Interner) {
+ TyKind::Array(_, length) => {
+ let len = try_const_usize(length);
+ let len =
+ len.and_then(|len| len.checked_sub((prefix.len() + suffix.len()) as u128));
+ TyKind::Array(elem_ty.clone(), usize_const(self.db, len, self.resolver.krate()))
+ }
+ _ => TyKind::Slice(elem_ty.clone()),
+ }
+ .intern(Interner);
+ self.infer_pat(slice_pat_id, &rest_pat_ty, default_bm);
+ }
+
+ match expected.kind(Interner) {
+ TyKind::Array(_, const_) => TyKind::Array(elem_ty, const_.clone()),
+ _ => TyKind::Slice(elem_ty),
+ }
+ .intern(Interner)
+ }
+
+ fn infer_lit_pat(&mut self, expr: ExprId, expected: &Ty) -> Ty {
+ // Like slice patterns, byte string patterns can denote both `&[u8; N]` and `&[u8]`.
+ if let Expr::Literal(Literal::ByteString(_)) = self.body[expr] {
+ if let Some((inner, ..)) = expected.as_reference() {
+ let inner = self.resolve_ty_shallow(inner);
+ if matches!(inner.kind(Interner), TyKind::Slice(_)) {
+ let elem_ty = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner);
+ let slice_ty = TyKind::Slice(elem_ty).intern(Interner);
+ let ty =
+ TyKind::Ref(Mutability::Not, static_lifetime(), slice_ty).intern(Interner);
+ self.write_expr_ty(expr, ty.clone());
+ return ty;
+ }
+ }
+ }
+
+ self.infer_expr(expr, &Expectation::has_type(expected.clone()))
+ }
}
fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool {
@@ -369,11 +422,22 @@ fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool {
Pat::Lit(expr) => {
!matches!(body[*expr], Expr::Literal(Literal::String(..) | Literal::ByteString(..)))
}
- Pat::Bind {
- mode: BindingAnnotation::Mutable | BindingAnnotation::Unannotated,
- subpat: Some(subpat),
- ..
- } => is_non_ref_pat(body, *subpat),
+ Pat::Bind { id, subpat: Some(subpat), .. }
+ if matches!(
+ body.bindings[*id].mode,
+ BindingAnnotation::Mutable | BindingAnnotation::Unannotated
+ ) =>
+ {
+ is_non_ref_pat(body, *subpat)
+ }
Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Box { .. } | Pat::Missing => false,
}
}
+
+pub(super) fn contains_explicit_ref_binding(body: &Body, pat_id: PatId) -> bool {
+ let mut res = false;
+ body.walk_pats(pat_id, &mut |pat| {
+ res |= matches!(pat, Pat::Bind { id, .. } if body.bindings[*id].mode == BindingAnnotation::Ref);
+ });
+ res
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
index 0a8527afb..2267fedaa 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
@@ -3,7 +3,7 @@
use chalk_ir::cast::Cast;
use hir_def::{
path::{Path, PathSegment},
- resolver::{ResolveValueResult, Resolver, TypeNs, ValueNs},
+ resolver::{ResolveValueResult, TypeNs, ValueNs},
AdtId, AssocItemId, EnumVariantId, ItemContainerId, Lookup,
};
use hir_expand::name::Name;
@@ -21,55 +21,42 @@ use crate::{
use super::{ExprOrPatId, InferenceContext, TraitRef};
impl<'a> InferenceContext<'a> {
- pub(super) fn infer_path(
- &mut self,
- resolver: &Resolver,
- path: &Path,
- id: ExprOrPatId,
- ) -> Option<Ty> {
- let ty = self.resolve_value_path(resolver, path, id)?;
+ pub(super) fn infer_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<Ty> {
+ let ty = self.resolve_value_path(path, id)?;
let ty = self.insert_type_vars(ty);
let ty = self.normalize_associated_types_in(ty);
Some(ty)
}
- fn resolve_value_path(
- &mut self,
- resolver: &Resolver,
- path: &Path,
- id: ExprOrPatId,
- ) -> Option<Ty> {
+ fn resolve_value_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<Ty> {
let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
- if path.segments().is_empty() {
- // This can't actually happen syntax-wise
- return None;
- }
+ let Some(last) = path.segments().last() else { return None };
let ty = self.make_ty(type_ref);
let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
- let ctx = crate::lower::TyLoweringContext::new(self.db, resolver);
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
let (ty, _) = ctx.lower_ty_relative_path(ty, None, remaining_segments_for_ty);
- self.resolve_ty_assoc_item(
- ty,
- path.segments().last().expect("path had at least one segment").name,
- id,
- )?
+ self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))?
} else {
+ // FIXME: report error, unresolved first path segment
let value_or_partial =
- resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path())?;
+ self.resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path())?;
match value_or_partial {
ResolveValueResult::ValueNs(it) => (it, None),
- ResolveValueResult::Partial(def, remaining_index) => {
- self.resolve_assoc_item(def, path, remaining_index, id)?
- }
+ ResolveValueResult::Partial(def, remaining_index) => self
+ .resolve_assoc_item(def, path, remaining_index, id)
+ .map(|(it, substs)| (it, Some(substs)))?,
}
};
let typable: ValueTyDefId = match value {
- ValueNs::LocalBinding(pat) => {
- let ty = self.result.type_of_pat.get(pat)?.clone();
- return Some(ty);
- }
+ ValueNs::LocalBinding(pat) => match self.result.type_of_binding.get(pat) {
+ Some(ty) => return Some(ty.clone()),
+ None => {
+ never!("uninferred pattern?");
+ return None;
+ }
+ },
ValueNs::FunctionId(it) => it.into(),
ValueNs::ConstId(it) => it.into(),
ValueNs::StaticId(it) => it.into(),
@@ -91,7 +78,7 @@ impl<'a> InferenceContext<'a> {
let ty = self.db.value_ty(struct_id.into()).substitute(Interner, &substs);
return Some(ty);
} else {
- // FIXME: diagnostic, invalid Self reference
+ // FIXME: report error, invalid Self reference
return None;
}
}
@@ -126,7 +113,7 @@ impl<'a> InferenceContext<'a> {
path: &Path,
remaining_index: usize,
id: ExprOrPatId,
- ) -> Option<(ValueNs, Option<Substitution>)> {
+ ) -> Option<(ValueNs, Substitution)> {
assert!(remaining_index < path.segments().len());
// there may be more intermediate segments between the resolved one and
// the end. Only the last segment needs to be resolved to a value; from
@@ -179,7 +166,7 @@ impl<'a> InferenceContext<'a> {
trait_ref: TraitRef,
segment: PathSegment<'_>,
id: ExprOrPatId,
- ) -> Option<(ValueNs, Option<Substitution>)> {
+ ) -> Option<(ValueNs, Substitution)> {
let trait_ = trait_ref.hir_trait_id();
let item =
self.db.trait_data(trait_).items.iter().map(|(_name, id)| (*id)).find_map(|item| {
@@ -215,7 +202,7 @@ impl<'a> InferenceContext<'a> {
};
self.write_assoc_resolution(id, item, trait_ref.substitution.clone());
- Some((def, Some(trait_ref.substitution)))
+ Some((def, trait_ref.substitution))
}
fn resolve_ty_assoc_item(
@@ -223,7 +210,7 @@ impl<'a> InferenceContext<'a> {
ty: Ty,
name: &Name,
id: ExprOrPatId,
- ) -> Option<(ValueNs, Option<Substitution>)> {
+ ) -> Option<(ValueNs, Substitution)> {
if let TyKind::Error = ty.kind(Interner) {
return None;
}
@@ -233,70 +220,66 @@ impl<'a> InferenceContext<'a> {
}
let canonical_ty = self.canonicalize(ty.clone());
- let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
let mut not_visible = None;
let res = method_resolution::iterate_method_candidates(
&canonical_ty.value,
self.db,
self.table.trait_env.clone(),
- &traits_in_scope,
+ self.get_traits_in_scope().as_ref().left_or_else(|&it| it),
VisibleFromModule::Filter(self.resolver.module()),
Some(name),
method_resolution::LookupMode::Path,
|_ty, item, visible| {
- let (def, container) = match item {
- AssocItemId::FunctionId(f) => {
- (ValueNs::FunctionId(f), f.lookup(self.db.upcast()).container)
- }
- AssocItemId::ConstId(c) => {
- (ValueNs::ConstId(c), c.lookup(self.db.upcast()).container)
- }
- AssocItemId::TypeAliasId(_) => unreachable!(),
- };
- let substs = match container {
- ItemContainerId::ImplId(impl_id) => {
- let impl_substs = TyBuilder::subst_for_def(self.db, impl_id, None)
- .fill_with_inference_vars(&mut self.table)
- .build();
- let impl_self_ty =
- self.db.impl_self_ty(impl_id).substitute(Interner, &impl_substs);
- self.unify(&impl_self_ty, &ty);
- impl_substs
- }
- ItemContainerId::TraitId(trait_) => {
- // we're picking this method
- let trait_ref = TyBuilder::trait_ref(self.db, trait_)
- .push(ty.clone())
- .fill_with_inference_vars(&mut self.table)
- .build();
- self.push_obligation(trait_ref.clone().cast(Interner));
- trait_ref.substitution
- }
- ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
- never!("assoc item contained in module/extern block");
- return None;
- }
- };
-
if visible {
- Some((def, item, Some(substs), true))
+ Some((item, true))
} else {
if not_visible.is_none() {
- not_visible = Some((def, item, Some(substs), false));
+ not_visible = Some((item, false));
}
None
}
},
);
let res = res.or(not_visible);
- if let Some((_, item, Some(ref substs), visible)) = res {
- self.write_assoc_resolution(id, item, substs.clone());
- if !visible {
- self.push_diagnostic(InferenceDiagnostic::PrivateAssocItem { id, item })
+ let (item, visible) = res?;
+
+ let (def, container) = match item {
+ AssocItemId::FunctionId(f) => {
+ (ValueNs::FunctionId(f), f.lookup(self.db.upcast()).container)
+ }
+ AssocItemId::ConstId(c) => (ValueNs::ConstId(c), c.lookup(self.db.upcast()).container),
+ AssocItemId::TypeAliasId(_) => unreachable!(),
+ };
+ let substs = match container {
+ ItemContainerId::ImplId(impl_id) => {
+ let impl_substs = TyBuilder::subst_for_def(self.db, impl_id, None)
+ .fill_with_inference_vars(&mut self.table)
+ .build();
+ let impl_self_ty = self.db.impl_self_ty(impl_id).substitute(Interner, &impl_substs);
+ self.unify(&impl_self_ty, &ty);
+ impl_substs
}
+ ItemContainerId::TraitId(trait_) => {
+ // we're picking this method
+ let trait_ref = TyBuilder::trait_ref(self.db, trait_)
+ .push(ty.clone())
+ .fill_with_inference_vars(&mut self.table)
+ .build();
+ self.push_obligation(trait_ref.clone().cast(Interner));
+ trait_ref.substitution
+ }
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
+ never!("assoc item contained in module/extern block");
+ return None;
+ }
+ };
+
+ self.write_assoc_resolution(id, item, substs.clone());
+ if !visible {
+ self.push_diagnostic(InferenceDiagnostic::PrivateAssocItem { id, item });
}
- res.map(|(def, _, substs, _)| (def, substs))
+ Some((def, substs))
}
fn resolve_enum_variant_on_ty(
@@ -304,7 +287,7 @@ impl<'a> InferenceContext<'a> {
ty: &Ty,
name: &Name,
id: ExprOrPatId,
- ) -> Option<(ValueNs, Option<Substitution>)> {
+ ) -> Option<(ValueNs, Substitution)> {
let ty = self.resolve_ty_shallow(ty);
let (enum_id, subst) = match ty.as_adt() {
Some((AdtId::EnumId(e), subst)) => (e, subst),
@@ -314,6 +297,6 @@ impl<'a> InferenceContext<'a> {
let local_id = enum_data.variant(name)?;
let variant = EnumVariantId { parent: enum_id, local_id };
self.write_variant_resolution(id, variant.into());
- Some((ValueNs::EnumVariantId(variant), Some(subst.clone())))
+ Some((ValueNs::EnumVariantId(variant), subst.clone()))
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
index 46ed3533c..504f0743a 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
@@ -704,14 +704,13 @@ impl<'a> fmt::Debug for InferenceTable<'a> {
mod resolve {
use super::InferenceTable;
use crate::{
- ConcreteConst, Const, ConstData, ConstValue, DebruijnIndex, GenericArg, InferenceVar,
- Interner, Lifetime, Ty, TyVariableKind, VariableKind,
+ ConcreteConst, Const, ConstData, ConstScalar, ConstValue, DebruijnIndex, GenericArg,
+ InferenceVar, Interner, Lifetime, Ty, TyVariableKind, VariableKind,
};
use chalk_ir::{
cast::Cast,
fold::{TypeFoldable, TypeFolder},
};
- use hir_def::type_ref::ConstScalar;
#[derive(chalk_derive::FallibleTypeFolder)]
#[has_interner(Interner)]
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs b/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs
index 0c547192a..36af78153 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs
@@ -6,12 +6,12 @@ use chalk_ir::{
DebruijnIndex,
};
use hir_def::{
- adt::VariantData, attr::Attrs, type_ref::ConstScalar, visibility::Visibility, AdtId,
- EnumVariantId, HasModule, Lookup, ModuleId, VariantId,
+ adt::VariantData, attr::Attrs, visibility::Visibility, AdtId, EnumVariantId, HasModule, Lookup,
+ ModuleId, VariantId,
};
use crate::{
- db::HirDatabase, Binders, ConcreteConst, Const, ConstValue, Interner, Substitution, Ty, TyKind,
+ consteval::try_const_usize, db::HirDatabase, Binders, Interner, Substitution, Ty, TyKind,
};
/// Checks whether a type is visibly uninhabited from a particular module.
@@ -69,7 +69,7 @@ impl TypeVisitor<Interner> for UninhabitedFrom<'_> {
TyKind::Adt(adt, subst) => self.visit_adt(adt.0, subst),
TyKind::Never => BREAK_VISIBLY_UNINHABITED,
TyKind::Tuple(..) => ty.super_visit_with(self, outer_binder),
- TyKind::Array(item_ty, len) => match try_usize_const(len) {
+ TyKind::Array(item_ty, len) => match try_const_usize(len) {
Some(0) | None => CONTINUE_OPAQUELY_INHABITED,
Some(1..) => item_ty.super_visit_with(self, outer_binder),
},
@@ -160,14 +160,3 @@ impl UninhabitedFrom<'_> {
}
}
}
-
-fn try_usize_const(c: &Const) -> Option<u128> {
- let data = &c.data(Interner);
- if data.ty.kind(Interner) != &TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize)) {
- return None;
- }
- match data.value {
- ConstValue::Concrete(ConcreteConst { interned: ConstScalar::UInt(value) }) => Some(value),
- _ => None,
- }
-}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs
index 7bf73560c..aea7e9762 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs
@@ -1,10 +1,10 @@
//! Implementation of the Chalk `Interner` trait, which allows customizing the
//! representation of the various objects Chalk deals with (types, goals etc.).
-use crate::{chalk_db, tls, GenericArg};
+use crate::{chalk_db, tls, ConstScalar, GenericArg};
use base_db::salsa::InternId;
use chalk_ir::{Goal, GoalData};
-use hir_def::{type_ref::ConstScalar, TypeAliasId};
+use hir_def::TypeAliasId;
use intern::{impl_internable, Interned};
use smallvec::SmallVec;
use std::{fmt, sync::Arc};
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
index f21b4f84c..b95bb01fc 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
@@ -11,7 +11,7 @@ use hir_def::{
};
use stdx::never;
-use crate::{db::HirDatabase, Interner, Substitution, Ty};
+use crate::{consteval::try_const_usize, db::HirDatabase, Interner, Substitution, Ty};
use self::adt::struct_variant_idx;
pub use self::{
@@ -122,17 +122,9 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)?
}
TyKind::Array(element, count) => {
- let count = match count.data(Interner).value {
- chalk_ir::ConstValue::Concrete(c) => match c.interned {
- hir_def::type_ref::ConstScalar::Int(x) => x as u64,
- hir_def::type_ref::ConstScalar::UInt(x) => x as u64,
- hir_def::type_ref::ConstScalar::Unknown => {
- user_error!("unknown const generic parameter")
- }
- _ => user_error!("mismatched type of const generic parameter"),
- },
- _ => return Err(LayoutError::HasPlaceholder),
- };
+ let count = try_const_usize(&count).ok_or(LayoutError::UserError(
+ "mismatched type of const generic parameter".to_string(),
+ ))? as u64;
let element = layout_of_ty(db, element, krate)?;
let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
index cb7968c14..b22d0fe8d 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
@@ -76,17 +76,8 @@ pub fn layout_of_adt_query(
|min, max| Integer::repr_discr(&dl, &repr, min, max).unwrap_or((Integer::I8, false)),
variants.iter_enumerated().filter_map(|(id, _)| {
let AdtId::EnumId(e) = def else { return None };
- let d = match db
- .const_eval_variant(EnumVariantId { parent: e, local_id: id.0 })
- .ok()?
- {
- crate::consteval::ComputedExpr::Literal(l) => match l {
- hir_def::expr::Literal::Int(i, _) => i,
- hir_def::expr::Literal::Uint(i, _) => i as i128,
- _ => return None,
- },
- _ => return None,
- };
+ let d =
+ db.const_eval_discriminant(EnumVariantId { parent: e, local_id: id.0 }).ok()?;
Some((id, d))
}),
// FIXME: The current code for niche-filling relies on variant indices
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
index 067bdc960..a8971fde3 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
@@ -65,25 +65,17 @@ fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
})
.unwrap();
let hir_body = db.body(adt_id.into());
- let pat = hir_body
- .pats
- .iter()
- .find(|x| match x.1 {
- hir_def::expr::Pat::Bind { name, .. } => name.to_smol_str() == "goal",
- _ => false,
- })
- .unwrap()
- .0;
+ let b = hir_body.bindings.iter().find(|x| x.1.name.to_smol_str() == "goal").unwrap().0;
let infer = db.infer(adt_id.into());
- let goal_ty = infer.type_of_pat[pat].clone();
+ let goal_ty = infer.type_of_binding[b].clone();
layout_of_ty(&db, &goal_ty, module_id.krate())
}
#[track_caller]
fn check_size_and_align(ra_fixture: &str, minicore: &str, size: u64, align: u64) {
let l = eval_goal(ra_fixture, minicore).unwrap();
- assert_eq!(l.size.bytes(), size);
- assert_eq!(l.align.abi.bytes(), align);
+ assert_eq!(l.size.bytes(), size, "size mismatch");
+ assert_eq!(l.align.abi.bytes(), align, "align mismatch");
}
#[track_caller]
@@ -300,4 +292,9 @@ fn enums_with_discriminants() {
C, // implicitly becomes 256, so we need two bytes
}
}
+ size_and_align! {
+ enum Goal {
+ A = 1, // This one is (perhaps surprisingly) zero sized.
+ }
+ }
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
index 59a5ef8c1..9c63d67ab 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
@@ -13,6 +13,7 @@ mod builder;
mod chalk_db;
mod chalk_ext;
pub mod consteval;
+pub mod mir;
mod infer;
mod inhabitedness;
mod interner;
@@ -34,7 +35,7 @@ mod tests;
#[cfg(test)]
mod test_db;
-use std::sync::Arc;
+use std::{collections::HashMap, hash::Hash, sync::Arc};
use chalk_ir::{
fold::{Shift, TypeFoldable},
@@ -42,10 +43,11 @@ use chalk_ir::{
visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
NoSolution, TyData,
};
+use either::Either;
use hir_def::{expr::ExprId, type_ref::Rawness, TypeOrConstParamId};
use hir_expand::name;
-use itertools::Either;
use la_arena::{Arena, Idx};
+use mir::MirEvalError;
use rustc_hash::FxHashSet;
use traits::FnTrait;
use utils::Generics;
@@ -145,6 +147,49 @@ pub type ConstrainedSubst = chalk_ir::ConstrainedSubst<Interner>;
pub type Guidance = chalk_solve::Guidance<Interner>;
pub type WhereClause = chalk_ir::WhereClause<Interner>;
+/// A constant can have reference to other things. Memory map job is holding
+/// the neccessary bits of memory of the const eval session to keep the constant
+/// meaningful.
+#[derive(Debug, Default, Clone, PartialEq, Eq)]
+pub struct MemoryMap(pub HashMap<usize, Vec<u8>>);
+
+impl MemoryMap {
+ fn insert(&mut self, addr: usize, x: Vec<u8>) {
+ self.0.insert(addr, x);
+ }
+
+ /// This functions convert each address by a function `f` which gets the byte intervals and assign an address
+ /// to them. It is useful when you want to load a constant with a memory map in a new memory. You can pass an
+ /// allocator function as `f` and it will return a mapping of old addresses to new addresses.
+ fn transform_addresses(
+ &self,
+ mut f: impl FnMut(&[u8]) -> Result<usize, MirEvalError>,
+ ) -> Result<HashMap<usize, usize>, MirEvalError> {
+ self.0.iter().map(|x| Ok((*x.0, f(x.1)?))).collect()
+ }
+}
+
+/// A concrete constant value
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ConstScalar {
+ Bytes(Vec<u8>, MemoryMap),
+ /// Case of an unknown value that rustc might know but we don't
+ // FIXME: this is a hack to get around chalk not being able to represent unevaluatable
+ // constants
+ // https://github.com/rust-lang/rust-analyzer/pull/8813#issuecomment-840679177
+ // https://rust-lang.zulipchat.com/#narrow/stream/144729-wg-traits/topic/Handling.20non.20evaluatable.20constants'.20equality/near/238386348
+ Unknown,
+}
+
+impl Hash for ConstScalar {
+ fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+ core::mem::discriminant(self).hash(state);
+ if let ConstScalar::Bytes(b, _) = self {
+ b.hash(state)
+ }
+ }
+}
+
/// Return an index of a parameter in the generic type parameter list by it's id.
pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<usize> {
generics(db.upcast(), id.parent).param_idx(id)
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
index 299646737..23b15087e 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
@@ -16,6 +16,7 @@ use chalk_ir::{
cast::Cast, fold::Shift, fold::TypeFoldable, interner::HasInterner, Mutability, Safety,
};
+use either::Either;
use hir_def::{
adt::StructKind,
body::{Expander, LowerCtx},
@@ -26,16 +27,13 @@ use hir_def::{
lang_item::{lang_attr, LangItem},
path::{GenericArg, ModPath, Path, PathKind, PathSegment, PathSegments},
resolver::{HasResolver, Resolver, TypeNs},
- type_ref::{
- ConstScalarOrPath, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef,
- },
+ type_ref::{ConstRefOrPath, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef},
AdtId, AssocItemId, ConstId, ConstParamId, EnumId, EnumVariantId, FunctionId, GenericDefId,
HasModule, ImplId, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, StaticId, StructId,
TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, VariantId,
};
use hir_expand::{name::Name, ExpandResult};
use intern::Interned;
-use itertools::Either;
use la_arena::{Arena, ArenaMap};
use rustc_hash::FxHashSet;
use smallvec::SmallVec;
@@ -44,7 +42,7 @@ use syntax::ast;
use crate::{
all_super_traits,
- consteval::{intern_const_scalar, path_to_const, unknown_const, unknown_const_as_generic},
+ consteval::{intern_const_ref, path_to_const, unknown_const, unknown_const_as_generic},
db::HirDatabase,
make_binders,
mapping::{from_chalk_trait_id, ToChalk},
@@ -524,6 +522,10 @@ impl<'a> TyLoweringContext<'a> {
};
return (ty, None);
}
+ TypeNs::TraitAliasId(_) => {
+ // FIXME(trait_alias): Implement trait alias.
+ return (TyKind::Error.intern(Interner), None);
+ }
TypeNs::GenericParam(param_id) => {
let generics = generics(
self.db.upcast(),
@@ -879,6 +881,7 @@ impl<'a> TyLoweringContext<'a> {
) -> Option<TraitRef> {
let resolved =
match self.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), path.mod_path())? {
+ // FIXME(trait_alias): We need to handle trait alias here.
TypeNs::TraitId(tr) => tr,
_ => return None,
};
@@ -968,7 +971,7 @@ impl<'a> TyLoweringContext<'a> {
// - `Destruct` impls are built-in in 1.62 (current nightlies as of 08-04-2022), so until
// the builtin impls are supported by Chalk, we ignore them here.
if let Some(lang) = lang_attr(self.db.upcast(), tr.hir_trait_id()) {
- if lang == "drop" || lang == "destruct" {
+ if matches!(lang, LangItem::Drop | LangItem::Destruct) {
return false;
}
}
@@ -1444,6 +1447,7 @@ pub(crate) fn trait_environment_query(
GenericDefId::FunctionId(f) => Some(f.lookup(db.upcast()).container),
GenericDefId::AdtId(_) => None,
GenericDefId::TraitId(_) => None,
+ GenericDefId::TraitAliasId(_) => None,
GenericDefId::TypeAliasId(t) => Some(t.lookup(db.upcast()).container),
GenericDefId::ImplId(_) => None,
GenericDefId::EnumVariantId(_) => None,
@@ -1583,10 +1587,10 @@ pub(crate) fn generic_defaults_recover(
.iter_id()
.map(|id| {
let val = match id {
- itertools::Either::Left(_) => {
+ Either::Left(_) => {
GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
}
- itertools::Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
+ Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
};
crate::make_binders(db, &generic_params, val)
})
@@ -1919,7 +1923,7 @@ pub(crate) fn generic_arg_to_chalk<'a, T>(
arg: &'a GenericArg,
this: &mut T,
for_type: impl FnOnce(&mut T, &TypeRef) -> Ty + 'a,
- for_const: impl FnOnce(&mut T, &ConstScalarOrPath, Ty) -> Const + 'a,
+ for_const: impl FnOnce(&mut T, &ConstRefOrPath, Ty) -> Const + 'a,
) -> Option<crate::GenericArg> {
let kind = match kind_id {
Either::Left(_) => ParamKind::Type,
@@ -1947,7 +1951,7 @@ pub(crate) fn generic_arg_to_chalk<'a, T>(
let p = p.mod_path();
if p.kind == PathKind::Plain {
if let [n] = p.segments() {
- let c = ConstScalarOrPath::Path(n.clone());
+ let c = ConstRefOrPath::Path(n.clone());
return Some(
GenericArgData::Const(for_const(this, &c, c_ty)).intern(Interner),
);
@@ -1964,14 +1968,14 @@ pub(crate) fn const_or_path_to_chalk(
db: &dyn HirDatabase,
resolver: &Resolver,
expected_ty: Ty,
- value: &ConstScalarOrPath,
+ value: &ConstRefOrPath,
mode: ParamLoweringMode,
args: impl FnOnce() -> Generics,
debruijn: DebruijnIndex,
) -> Const {
match value {
- ConstScalarOrPath::Scalar(s) => intern_const_scalar(*s, expected_ty),
- ConstScalarOrPath::Path(n) => {
+ ConstRefOrPath::Scalar(s) => intern_const_ref(db, s, expected_ty, resolver.krate()),
+ ConstRefOrPath::Path(n) => {
let path = ModPath::from_segments(PathKind::Plain, Some(n.clone()));
path_to_const(db, resolver, &path, mode, args, debruijn)
.unwrap_or_else(|| unknown_const(expected_ty))
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
index 8c7714b9a..f3a27632b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
@@ -19,13 +19,13 @@ use stdx::never;
use crate::{
autoderef::{self, AutoderefKind},
db::HirDatabase,
- from_foreign_def_id,
+ from_chalk_trait_id, from_foreign_def_id,
infer::{unify::InferenceTable, Adjust, Adjustment, AutoBorrow, OverloadedDeref, PointerCast},
primitive::{FloatTy, IntTy, UintTy},
static_lifetime, to_chalk_trait_id,
utils::all_super_traits,
- AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, ForeignDefId, InEnvironment, Interner,
- Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt,
+ AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, DynTyExt, ForeignDefId, InEnvironment,
+ Interner, Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt,
};
/// This is used as a key for indexing impls.
@@ -266,11 +266,12 @@ impl TraitImpls {
#[derive(Debug, Eq, PartialEq)]
pub struct InherentImpls {
map: FxHashMap<TyFingerprint, Vec<ImplId>>,
+ invalid_impls: Vec<ImplId>,
}
impl InherentImpls {
pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
- let mut impls = Self { map: FxHashMap::default() };
+ let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
let crate_def_map = db.crate_def_map(krate);
impls.collect_def_map(db, &crate_def_map);
@@ -283,7 +284,7 @@ impl InherentImpls {
db: &dyn HirDatabase,
block: BlockId,
) -> Option<Arc<Self>> {
- let mut impls = Self { map: FxHashMap::default() };
+ let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
if let Some(block_def_map) = db.block_def_map(block) {
impls.collect_def_map(db, &block_def_map);
impls.shrink_to_fit();
@@ -306,11 +307,17 @@ impl InherentImpls {
}
let self_ty = db.impl_self_ty(impl_id);
- let fp = TyFingerprint::for_inherent_impl(self_ty.skip_binders());
- if let Some(fp) = fp {
- self.map.entry(fp).or_default().push(impl_id);
+ let self_ty = self_ty.skip_binders();
+
+ match is_inherent_impl_coherent(db, def_map, &data, self_ty) {
+ true => {
+ // `fp` should only be `None` in error cases (either erroneous code or incomplete name resolution)
+ if let Some(fp) = TyFingerprint::for_inherent_impl(self_ty) {
+ self.map.entry(fp).or_default().push(impl_id);
+ }
+ }
+ false => self.invalid_impls.push(impl_id),
}
- // `fp` should only be `None` in error cases (either erroneous code or incomplete name resolution)
}
// To better support custom derives, collect impls in all unnamed const items.
@@ -334,6 +341,10 @@ impl InherentImpls {
pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
self.map.values().flat_map(|v| v.iter().copied())
}
+
+ pub fn invalid_impls(&self) -> &[ImplId] {
+ &self.invalid_impls
+ }
}
pub(crate) fn incoherent_inherent_impl_crates(
@@ -579,8 +590,8 @@ impl ReceiverAdjustments {
ty = new_ty.clone();
adjust.push(Adjustment {
kind: Adjust::Deref(match kind {
- // FIXME should we know the mutability here?
- AutoderefKind::Overloaded => Some(OverloadedDeref(Mutability::Not)),
+ // FIXME should we know the mutability here, when autoref is `None`?
+ AutoderefKind::Overloaded => Some(OverloadedDeref(self.autoref)),
AutoderefKind::Builtin => None,
}),
target: new_ty,
@@ -660,10 +671,10 @@ pub fn lookup_impl_const(
env: Arc<TraitEnvironment>,
const_id: ConstId,
subs: Substitution,
-) -> ConstId {
+) -> (ConstId, Substitution) {
let trait_id = match const_id.lookup(db.upcast()).container {
ItemContainerId::TraitId(id) => id,
- _ => return const_id,
+ _ => return (const_id, subs),
};
let substitution = Substitution::from_iter(Interner, subs.iter(Interner));
let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution };
@@ -671,12 +682,14 @@ pub fn lookup_impl_const(
let const_data = db.const_data(const_id);
let name = match const_data.name.as_ref() {
Some(name) => name,
- None => return const_id,
+ None => return (const_id, subs),
};
lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name)
- .and_then(|assoc| if let AssocItemId::ConstId(id) = assoc { Some(id) } else { None })
- .unwrap_or(const_id)
+ .and_then(
+ |assoc| if let (AssocItemId::ConstId(id), s) = assoc { Some((id, s)) } else { None },
+ )
+ .unwrap_or((const_id, subs))
}
/// Looks up the impl method that actually runs for the trait method `func`.
@@ -687,10 +700,10 @@ pub fn lookup_impl_method(
env: Arc<TraitEnvironment>,
func: FunctionId,
fn_subst: Substitution,
-) -> FunctionId {
+) -> (FunctionId, Substitution) {
let trait_id = match func.lookup(db.upcast()).container {
ItemContainerId::TraitId(id) => id,
- _ => return func,
+ _ => return (func, fn_subst),
};
let trait_params = db.generic_params(trait_id.into()).type_or_consts.len();
let fn_params = fn_subst.len(Interner) - trait_params;
@@ -701,8 +714,14 @@ pub fn lookup_impl_method(
let name = &db.function_data(func).name;
lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name)
- .and_then(|assoc| if let AssocItemId::FunctionId(id) = assoc { Some(id) } else { None })
- .unwrap_or(func)
+ .and_then(|assoc| {
+ if let (AssocItemId::FunctionId(id), subst) = assoc {
+ Some((id, subst))
+ } else {
+ None
+ }
+ })
+ .unwrap_or((func, fn_subst))
}
fn lookup_impl_assoc_item_for_trait_ref(
@@ -710,7 +729,7 @@ fn lookup_impl_assoc_item_for_trait_ref(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
name: &Name,
-) -> Option<AssocItemId> {
+) -> Option<(AssocItemId, Substitution)> {
let self_ty = trait_ref.self_type_parameter(Interner);
let self_ty_fp = TyFingerprint::for_trait_impl(&self_ty)?;
let impls = db.trait_impls_in_deps(env.krate);
@@ -718,8 +737,8 @@ fn lookup_impl_assoc_item_for_trait_ref(
let table = InferenceTable::new(db, env);
- let impl_data = find_matching_impl(impls, table, trait_ref)?;
- impl_data.items.iter().find_map(|&it| match it {
+ let (impl_data, impl_subst) = find_matching_impl(impls, table, trait_ref)?;
+ let item = impl_data.items.iter().find_map(|&it| match it {
AssocItemId::FunctionId(f) => {
(db.function_data(f).name == *name).then_some(AssocItemId::FunctionId(f))
}
@@ -730,14 +749,15 @@ fn lookup_impl_assoc_item_for_trait_ref(
.map(|n| n == name)
.and_then(|result| if result { Some(AssocItemId::ConstId(c)) } else { None }),
AssocItemId::TypeAliasId(_) => None,
- })
+ })?;
+ Some((item, impl_subst))
}
fn find_matching_impl(
mut impls: impl Iterator<Item = ImplId>,
mut table: InferenceTable<'_>,
actual_trait_ref: TraitRef,
-) -> Option<Arc<ImplData>> {
+) -> Option<(Arc<ImplData>, Substitution)> {
let db = table.db;
loop {
let impl_ = impls.next()?;
@@ -758,7 +778,7 @@ fn find_matching_impl(
.into_iter()
.map(|b| b.cast(Interner));
let goal = crate::Goal::all(Interner, wcs);
- table.try_obligation(goal).map(|_| impl_data)
+ table.try_obligation(goal).map(|_| (impl_data, table.resolve_completely(impl_substs)))
});
if r.is_some() {
break r;
@@ -766,6 +786,69 @@ fn find_matching_impl(
}
}
+fn is_inherent_impl_coherent(
+ db: &dyn HirDatabase,
+ def_map: &DefMap,
+ impl_data: &ImplData,
+ self_ty: &Ty,
+) -> bool {
+ let self_ty = self_ty.kind(Interner);
+ let impl_allowed = match self_ty {
+ TyKind::Tuple(_, _)
+ | TyKind::FnDef(_, _)
+ | TyKind::Array(_, _)
+ | TyKind::Never
+ | TyKind::Raw(_, _)
+ | TyKind::Ref(_, _, _)
+ | TyKind::Slice(_)
+ | TyKind::Str
+ | TyKind::Scalar(_) => def_map.is_rustc_coherence_is_core(),
+
+ &TyKind::Adt(AdtId(adt), _) => adt.module(db.upcast()).krate() == def_map.krate(),
+ TyKind::Dyn(it) => it.principal().map_or(false, |trait_ref| {
+ from_chalk_trait_id(trait_ref.trait_id).module(db.upcast()).krate() == def_map.krate()
+ }),
+
+ _ => true,
+ };
+ impl_allowed || {
+ let rustc_has_incoherent_inherent_impls = match self_ty {
+ TyKind::Tuple(_, _)
+ | TyKind::FnDef(_, _)
+ | TyKind::Array(_, _)
+ | TyKind::Never
+ | TyKind::Raw(_, _)
+ | TyKind::Ref(_, _, _)
+ | TyKind::Slice(_)
+ | TyKind::Str
+ | TyKind::Scalar(_) => true,
+
+ &TyKind::Adt(AdtId(adt), _) => match adt {
+ hir_def::AdtId::StructId(it) => {
+ db.struct_data(it).rustc_has_incoherent_inherent_impls
+ }
+ hir_def::AdtId::UnionId(it) => {
+ db.union_data(it).rustc_has_incoherent_inherent_impls
+ }
+ hir_def::AdtId::EnumId(it) => db.enum_data(it).rustc_has_incoherent_inherent_impls,
+ },
+ TyKind::Dyn(it) => it.principal().map_or(false, |trait_ref| {
+ db.trait_data(from_chalk_trait_id(trait_ref.trait_id))
+ .rustc_has_incoherent_inherent_impls
+ }),
+
+ _ => false,
+ };
+ rustc_has_incoherent_inherent_impls
+ && !impl_data.items.is_empty()
+ && impl_data.items.iter().copied().all(|assoc| match assoc {
+ AssocItemId::FunctionId(it) => db.function_data(it).rustc_allow_incoherent_impl,
+ AssocItemId::ConstId(it) => db.const_data(it).rustc_allow_incoherent_impl,
+ AssocItemId::TypeAliasId(it) => db.type_alias_data(it).rustc_allow_incoherent_impl,
+ })
+ }
+}
+
pub fn iterate_path_candidates(
ty: &Canonical<Ty>,
db: &dyn HirDatabase,
@@ -821,9 +904,9 @@ pub fn iterate_method_candidates_dyn(
let mut table = InferenceTable::new(db, env.clone());
let ty = table.instantiate_canonical(ty.clone());
- let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty);
+ let deref_chain = autoderef_method_receiver(&mut table, ty);
- let result = deref_chain.into_iter().zip(adj).try_for_each(|(receiver_ty, adj)| {
+ let result = deref_chain.into_iter().try_for_each(|(receiver_ty, adj)| {
iterate_method_candidates_with_autoref(
&receiver_ty,
adj,
@@ -867,16 +950,20 @@ fn iterate_method_candidates_with_autoref(
return ControlFlow::Continue(());
}
- iterate_method_candidates_by_receiver(
- receiver_ty,
- first_adjustment.clone(),
- db,
- env.clone(),
- traits_in_scope,
- visible_from_module,
- name,
- &mut callback,
- )?;
+ let mut iterate_method_candidates_by_receiver = move |receiver_ty, first_adjustment| {
+ iterate_method_candidates_by_receiver(
+ receiver_ty,
+ first_adjustment,
+ db,
+ env.clone(),
+ traits_in_scope,
+ visible_from_module,
+ name,
+ &mut callback,
+ )
+ };
+
+ iterate_method_candidates_by_receiver(receiver_ty, first_adjustment.clone())?;
let refed = Canonical {
value: TyKind::Ref(Mutability::Not, static_lifetime(), receiver_ty.value.clone())
@@ -884,16 +971,7 @@ fn iterate_method_candidates_with_autoref(
binders: receiver_ty.binders.clone(),
};
- iterate_method_candidates_by_receiver(
- &refed,
- first_adjustment.with_autoref(Mutability::Not),
- db,
- env.clone(),
- traits_in_scope,
- visible_from_module,
- name,
- &mut callback,
- )?;
+ iterate_method_candidates_by_receiver(&refed, first_adjustment.with_autoref(Mutability::Not))?;
let ref_muted = Canonical {
value: TyKind::Ref(Mutability::Mut, static_lifetime(), receiver_ty.value.clone())
@@ -904,12 +982,6 @@ fn iterate_method_candidates_with_autoref(
iterate_method_candidates_by_receiver(
&ref_muted,
first_adjustment.with_autoref(Mutability::Mut),
- db,
- env,
- traits_in_scope,
- visible_from_module,
- name,
- &mut callback,
)
}
@@ -1210,8 +1282,8 @@ pub fn resolve_indexing_op(
) -> Option<ReceiverAdjustments> {
let mut table = InferenceTable::new(db, env.clone());
let ty = table.instantiate_canonical(ty);
- let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty);
- for (ty, adj) in deref_chain.into_iter().zip(adj) {
+ let deref_chain = autoderef_method_receiver(&mut table, ty);
+ for (ty, adj) in deref_chain {
let goal = generic_implements_goal(db, env.clone(), index_trait, &ty);
if db.trait_solve(env.krate, goal.cast(Interner)).is_some() {
return Some(adj);
@@ -1421,25 +1493,24 @@ fn generic_implements_goal(
fn autoderef_method_receiver(
table: &mut InferenceTable<'_>,
ty: Ty,
-) -> (Vec<Canonical<Ty>>, Vec<ReceiverAdjustments>) {
- let (mut deref_chain, mut adjustments): (Vec<_>, Vec<_>) = (Vec::new(), Vec::new());
+) -> Vec<(Canonical<Ty>, ReceiverAdjustments)> {
+ let mut deref_chain: Vec<_> = Vec::new();
let mut autoderef = autoderef::Autoderef::new(table, ty);
while let Some((ty, derefs)) = autoderef.next() {
- deref_chain.push(autoderef.table.canonicalize(ty).value);
- adjustments.push(ReceiverAdjustments {
- autoref: None,
- autoderefs: derefs,
- unsize_array: false,
- });
+ deref_chain.push((
+ autoderef.table.canonicalize(ty).value,
+ ReceiverAdjustments { autoref: None, autoderefs: derefs, unsize_array: false },
+ ));
}
// As a last step, we can do array unsizing (that's the only unsizing that rustc does for method receivers!)
- if let (Some((TyKind::Array(parameters, _), binders)), Some(adj)) = (
- deref_chain.last().map(|ty| (ty.value.kind(Interner), ty.binders.clone())),
- adjustments.last().cloned(),
- ) {
+ if let Some((TyKind::Array(parameters, _), binders, adj)) =
+ deref_chain.last().map(|(ty, adj)| (ty.value.kind(Interner), ty.binders.clone(), adj))
+ {
let unsized_ty = TyKind::Slice(parameters.clone()).intern(Interner);
- deref_chain.push(Canonical { value: unsized_ty, binders });
- adjustments.push(ReceiverAdjustments { unsize_array: true, ..adj });
+ deref_chain.push((
+ Canonical { value: unsized_ty, binders },
+ ReceiverAdjustments { unsize_array: true, ..adj.clone() },
+ ));
}
- (deref_chain, adjustments)
+ deref_chain
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
new file mode 100644
index 000000000..7c1cbbdf5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
@@ -0,0 +1,863 @@
+//! MIR definitions and implementation
+
+use std::{fmt::Display, iter};
+
+use crate::{
+ infer::PointerCast, Const, ConstScalar, InferenceResult, Interner, MemoryMap, Substitution, Ty,
+};
+use chalk_ir::Mutability;
+use hir_def::{
+ expr::{BindingId, Expr, ExprId, Ordering, PatId},
+ DefWithBodyId, FieldId, UnionId, VariantId,
+};
+use la_arena::{Arena, ArenaMap, Idx, RawIdx};
+
+mod eval;
+mod lower;
+mod borrowck;
+mod pretty;
+
+pub use borrowck::{borrowck_query, BorrowckResult, MutabilityReason};
+pub use eval::{interpret_mir, pad16, Evaluator, MirEvalError};
+pub use lower::{lower_to_mir, mir_body_query, mir_body_recover, MirLowerError};
+use smallvec::{smallvec, SmallVec};
+use stdx::impl_from;
+
+use super::consteval::{intern_const_scalar, try_const_usize};
+
+pub type BasicBlockId = Idx<BasicBlock>;
+pub type LocalId = Idx<Local>;
+
+fn return_slot() -> LocalId {
+ LocalId::from_raw(RawIdx::from(0))
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct Local {
+ pub ty: Ty,
+}
+
+/// An operand in MIR represents a "value" in Rust, the definition of which is undecided and part of
+/// the memory model. One proposal for a definition of values can be found [on UCG][value-def].
+///
+/// [value-def]: https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/value-domain.md
+///
+/// The most common way to create values is via loading a place. Loading a place is an operation
+/// which reads the memory of the place and converts it to a value. This is a fundamentally *typed*
+/// operation. The nature of the value produced depends on the type of the conversion. Furthermore,
+/// there may be other effects: if the type has a validity constraint loading the place might be UB
+/// if the validity constraint is not met.
+///
+/// **Needs clarification:** Ralf proposes that loading a place not have side-effects.
+/// This is what is implemented in miri today. Are these the semantics we want for MIR? Is this
+/// something we can even decide without knowing more about Rust's memory model?
+///
+/// **Needs clarifiation:** Is loading a place that has its variant index set well-formed? Miri
+/// currently implements it, but it seems like this may be something to check against in the
+/// validator.
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum Operand {
+ /// Creates a value by loading the given place.
+ ///
+ /// Before drop elaboration, the type of the place must be `Copy`. After drop elaboration there
+ /// is no such requirement.
+ Copy(Place),
+
+ /// Creates a value by performing loading the place, just like the `Copy` operand.
+ ///
+ /// This *may* additionally overwrite the place with `uninit` bytes, depending on how we decide
+ /// in [UCG#188]. You should not emit MIR that may attempt a subsequent second load of this
+ /// place without first re-initializing it.
+ ///
+ /// [UCG#188]: https://github.com/rust-lang/unsafe-code-guidelines/issues/188
+ Move(Place),
+ /// Constants are already semantically values, and remain unchanged.
+ Constant(Const),
+}
+
+impl Operand {
+ fn from_concrete_const(data: Vec<u8>, memory_map: MemoryMap, ty: Ty) -> Self {
+ Operand::Constant(intern_const_scalar(ConstScalar::Bytes(data, memory_map), ty))
+ }
+
+ fn from_bytes(data: Vec<u8>, ty: Ty) -> Self {
+ Operand::from_concrete_const(data, MemoryMap::default(), ty)
+ }
+
+ fn const_zst(ty: Ty) -> Operand {
+ Self::from_bytes(vec![], ty)
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum ProjectionElem<V, T> {
+ Deref,
+ Field(FieldId),
+ TupleField(usize),
+ Index(V),
+ ConstantIndex { offset: u64, min_length: u64, from_end: bool },
+ Subslice { from: u64, to: u64, from_end: bool },
+ //Downcast(Option<Symbol>, VariantIdx),
+ OpaqueCast(T),
+}
+
+type PlaceElem = ProjectionElem<LocalId, Ty>;
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Place {
+ pub local: LocalId,
+ pub projection: Vec<PlaceElem>,
+}
+
+impl From<LocalId> for Place {
+ fn from(local: LocalId) -> Self {
+ Self { local, projection: vec![] }
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum AggregateKind {
+ /// The type is of the element
+ Array(Ty),
+ /// The type is of the tuple
+ Tuple(Ty),
+ Adt(VariantId, Substitution),
+ Union(UnionId, FieldId),
+ //Closure(LocalDefId, SubstsRef),
+ //Generator(LocalDefId, SubstsRef, Movability),
+}
+
+#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+pub struct SwitchTargets {
+ /// Possible values. The locations to branch to in each case
+ /// are found in the corresponding indices from the `targets` vector.
+ values: SmallVec<[u128; 1]>,
+
+ /// Possible branch sites. The last element of this vector is used
+ /// for the otherwise branch, so targets.len() == values.len() + 1
+ /// should hold.
+ //
+ // This invariant is quite non-obvious and also could be improved.
+ // One way to make this invariant is to have something like this instead:
+ //
+ // branches: Vec<(ConstInt, BasicBlock)>,
+ // otherwise: Option<BasicBlock> // exhaustive if None
+ //
+ // However we’ve decided to keep this as-is until we figure a case
+ // where some other approach seems to be strictly better than other.
+ targets: SmallVec<[BasicBlockId; 2]>,
+}
+
+impl SwitchTargets {
+ /// Creates switch targets from an iterator of values and target blocks.
+ ///
+ /// The iterator may be empty, in which case the `SwitchInt` instruction is equivalent to
+ /// `goto otherwise;`.
+ pub fn new(
+ targets: impl Iterator<Item = (u128, BasicBlockId)>,
+ otherwise: BasicBlockId,
+ ) -> Self {
+ let (values, mut targets): (SmallVec<_>, SmallVec<_>) = targets.unzip();
+ targets.push(otherwise);
+ Self { values, targets }
+ }
+
+ /// Builds a switch targets definition that jumps to `then` if the tested value equals `value`,
+ /// and to `else_` if not.
+ pub fn static_if(value: u128, then: BasicBlockId, else_: BasicBlockId) -> Self {
+ Self { values: smallvec![value], targets: smallvec![then, else_] }
+ }
+
+ /// Returns the fallback target that is jumped to when none of the values match the operand.
+ pub fn otherwise(&self) -> BasicBlockId {
+ *self.targets.last().unwrap()
+ }
+
+ /// Returns an iterator over the switch targets.
+ ///
+ /// The iterator will yield tuples containing the value and corresponding target to jump to, not
+ /// including the `otherwise` fallback target.
+ ///
+ /// Note that this may yield 0 elements. Only the `otherwise` branch is mandatory.
+ pub fn iter(&self) -> impl Iterator<Item = (u128, BasicBlockId)> + '_ {
+ iter::zip(&self.values, &self.targets).map(|(x, y)| (*x, *y))
+ }
+
+ /// Returns a slice with all possible jump targets (including the fallback target).
+ pub fn all_targets(&self) -> &[BasicBlockId] {
+ &self.targets
+ }
+
+ /// Finds the `BasicBlock` to which this `SwitchInt` will branch given the
+ /// specific value. This cannot fail, as it'll return the `otherwise`
+ /// branch if there's not a specific match for the value.
+ pub fn target_for_value(&self, value: u128) -> BasicBlockId {
+ self.iter().find_map(|(v, t)| (v == value).then_some(t)).unwrap_or_else(|| self.otherwise())
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum Terminator {
+ /// Block has one successor; we continue execution there.
+ Goto { target: BasicBlockId },
+
+ /// Switches based on the computed value.
+ ///
+ /// First, evaluates the `discr` operand. The type of the operand must be a signed or unsigned
+ /// integer, char, or bool, and must match the given type. Then, if the list of switch targets
+ /// contains the computed value, continues execution at the associated basic block. Otherwise,
+ /// continues execution at the "otherwise" basic block.
+ ///
+ /// Target values may not appear more than once.
+ SwitchInt {
+ /// The discriminant value being tested.
+ discr: Operand,
+
+ targets: SwitchTargets,
+ },
+
+ /// Indicates that the landing pad is finished and that the process should continue unwinding.
+ ///
+ /// Like a return, this marks the end of this invocation of the function.
+ ///
+ /// Only permitted in cleanup blocks. `Resume` is not permitted with `-C unwind=abort` after
+ /// deaggregation runs.
+ Resume,
+
+ /// Indicates that the landing pad is finished and that the process should abort.
+ ///
+ /// Used to prevent unwinding for foreign items or with `-C unwind=abort`. Only permitted in
+ /// cleanup blocks.
+ Abort,
+
+ /// Returns from the function.
+ ///
+ /// Like function calls, the exact semantics of returns in Rust are unclear. Returning very
+ /// likely at least assigns the value currently in the return place (`_0`) to the place
+ /// specified in the associated `Call` terminator in the calling function, as if assigned via
+ /// `dest = move _0`. It might additionally do other things, like have side-effects in the
+ /// aliasing model.
+ ///
+ /// If the body is a generator body, this has slightly different semantics; it instead causes a
+ /// `GeneratorState::Returned(_0)` to be created (as if by an `Aggregate` rvalue) and assigned
+ /// to the return place.
+ Return,
+
+ /// Indicates a terminator that can never be reached.
+ ///
+ /// Executing this terminator is UB.
+ Unreachable,
+
+ /// The behavior of this statement differs significantly before and after drop elaboration.
+ /// After drop elaboration, `Drop` executes the drop glue for the specified place, after which
+ /// it continues execution/unwinds at the given basic blocks. It is possible that executing drop
+ /// glue is special - this would be part of Rust's memory model. (**FIXME**: due we have an
+ /// issue tracking if drop glue has any interesting semantics in addition to those of a function
+ /// call?)
+ ///
+ /// `Drop` before drop elaboration is a *conditional* execution of the drop glue. Specifically, the
+ /// `Drop` will be executed if...
+ ///
+ /// **Needs clarification**: End of that sentence. This in effect should document the exact
+ /// behavior of drop elaboration. The following sounds vaguely right, but I'm not quite sure:
+ ///
+ /// > The drop glue is executed if, among all statements executed within this `Body`, an assignment to
+ /// > the place or one of its "parents" occurred more recently than a move out of it. This does not
+ /// > consider indirect assignments.
+ Drop { place: Place, target: BasicBlockId, unwind: Option<BasicBlockId> },
+
+ /// Drops the place and assigns a new value to it.
+ ///
+ /// This first performs the exact same operation as the pre drop-elaboration `Drop` terminator;
+ /// it then additionally assigns the `value` to the `place` as if by an assignment statement.
+ /// This assignment occurs both in the unwind and the regular code paths. The semantics are best
+ /// explained by the elaboration:
+ ///
+ /// ```ignore (MIR)
+ /// BB0 {
+ /// DropAndReplace(P <- V, goto BB1, unwind BB2)
+ /// }
+ /// ```
+ ///
+ /// becomes
+ ///
+ /// ```ignore (MIR)
+ /// BB0 {
+ /// Drop(P, goto BB1, unwind BB2)
+ /// }
+ /// BB1 {
+ /// // P is now uninitialized
+ /// P <- V
+ /// }
+ /// BB2 {
+ /// // P is now uninitialized -- its dtor panicked
+ /// P <- V
+ /// }
+ /// ```
+ ///
+ /// Disallowed after drop elaboration.
+ DropAndReplace {
+ place: Place,
+ value: Operand,
+ target: BasicBlockId,
+ unwind: Option<BasicBlockId>,
+ },
+
+ /// Roughly speaking, evaluates the `func` operand and the arguments, and starts execution of
+ /// the referred to function. The operand types must match the argument types of the function.
+ /// The return place type must match the return type. The type of the `func` operand must be
+ /// callable, meaning either a function pointer, a function type, or a closure type.
+ ///
+ /// **Needs clarification**: The exact semantics of this. Current backends rely on `move`
+ /// operands not aliasing the return place. It is unclear how this is justified in MIR, see
+ /// [#71117].
+ ///
+ /// [#71117]: https://github.com/rust-lang/rust/issues/71117
+ Call {
+ /// The function that’s being called.
+ func: Operand,
+ /// Arguments the function is called with.
+ /// These are owned by the callee, which is free to modify them.
+ /// This allows the memory occupied by "by-value" arguments to be
+ /// reused across function calls without duplicating the contents.
+ args: Vec<Operand>,
+ /// Where the returned value will be written
+ destination: Place,
+ /// Where to go after this call returns. If none, the call necessarily diverges.
+ target: Option<BasicBlockId>,
+ /// Cleanups to be done if the call unwinds.
+ cleanup: Option<BasicBlockId>,
+ /// `true` if this is from a call in HIR rather than from an overloaded
+ /// operator. True for overloaded function call.
+ from_hir_call: bool,
+ // This `Span` is the span of the function, without the dot and receiver
+ // (e.g. `foo(a, b)` in `x.foo(a, b)`
+ //fn_span: Span,
+ },
+
+ /// Evaluates the operand, which must have type `bool`. If it is not equal to `expected`,
+ /// initiates a panic. Initiating a panic corresponds to a `Call` terminator with some
+ /// unspecified constant as the function to call, all the operands stored in the `AssertMessage`
+ /// as parameters, and `None` for the destination. Keep in mind that the `cleanup` path is not
+ /// necessarily executed even in the case of a panic, for example in `-C panic=abort`. If the
+ /// assertion does not fail, execution continues at the specified basic block.
+ Assert {
+ cond: Operand,
+ expected: bool,
+ //msg: AssertMessage,
+ target: BasicBlockId,
+ cleanup: Option<BasicBlockId>,
+ },
+
+ /// Marks a suspend point.
+ ///
+ /// Like `Return` terminators in generator bodies, this computes `value` and then a
+ /// `GeneratorState::Yielded(value)` as if by `Aggregate` rvalue. That value is then assigned to
+ /// the return place of the function calling this one, and execution continues in the calling
+ /// function. When next invoked with the same first argument, execution of this function
+ /// continues at the `resume` basic block, with the second argument written to the `resume_arg`
+ /// place. If the generator is dropped before then, the `drop` basic block is invoked.
+ ///
+ /// Not permitted in bodies that are not generator bodies, or after generator lowering.
+ ///
+ /// **Needs clarification**: What about the evaluation order of the `resume_arg` and `value`?
+ Yield {
+ /// The value to return.
+ value: Operand,
+ /// Where to resume to.
+ resume: BasicBlockId,
+ /// The place to store the resume argument in.
+ resume_arg: Place,
+ /// Cleanup to be done if the generator is dropped at this suspend point.
+ drop: Option<BasicBlockId>,
+ },
+
+ /// Indicates the end of dropping a generator.
+ ///
+ /// Semantically just a `return` (from the generators drop glue). Only permitted in the same situations
+ /// as `yield`.
+ ///
+ /// **Needs clarification**: Is that even correct? The generator drop code is always confusing
+ /// to me, because it's not even really in the current body.
+ ///
+ /// **Needs clarification**: Are there type system constraints on these terminators? Should
+ /// there be a "block type" like `cleanup` blocks for them?
+ GeneratorDrop,
+
+ /// A block where control flow only ever takes one real path, but borrowck needs to be more
+ /// conservative.
+ ///
+ /// At runtime this is semantically just a goto.
+ ///
+ /// Disallowed after drop elaboration.
+ FalseEdge {
+ /// The target normal control flow will take.
+ real_target: BasicBlockId,
+ /// A block control flow could conceptually jump to, but won't in
+ /// practice.
+ imaginary_target: BasicBlockId,
+ },
+
+ /// A terminator for blocks that only take one path in reality, but where we reserve the right
+ /// to unwind in borrowck, even if it won't happen in practice. This can arise in infinite loops
+ /// with no function calls for example.
+ ///
+ /// At runtime this is semantically just a goto.
+ ///
+ /// Disallowed after drop elaboration.
+ FalseUnwind {
+ /// The target normal control flow will take.
+ real_target: BasicBlockId,
+ /// The imaginary cleanup block link. This particular path will never be taken
+ /// in practice, but in order to avoid fragility we want to always
+ /// consider it in borrowck. We don't want to accept programs which
+ /// pass borrowck only when `panic=abort` or some assertions are disabled
+ /// due to release vs. debug mode builds. This needs to be an `Option` because
+ /// of the `remove_noop_landing_pads` and `abort_unwinding_calls` passes.
+ unwind: Option<BasicBlockId>,
+ },
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum BorrowKind {
+ /// Data must be immutable and is aliasable.
+ Shared,
+
+ /// The immediately borrowed place must be immutable, but projections from
+ /// it don't need to be. For example, a shallow borrow of `a.b` doesn't
+ /// conflict with a mutable borrow of `a.b.c`.
+ ///
+ /// This is used when lowering matches: when matching on a place we want to
+ /// ensure that place have the same value from the start of the match until
+ /// an arm is selected. This prevents this code from compiling:
+ /// ```compile_fail,E0510
+ /// let mut x = &Some(0);
+ /// match *x {
+ /// None => (),
+ /// Some(_) if { x = &None; false } => (),
+ /// Some(_) => (),
+ /// }
+ /// ```
+ /// This can't be a shared borrow because mutably borrowing (*x as Some).0
+ /// should not prevent `if let None = x { ... }`, for example, because the
+ /// mutating `(*x as Some).0` can't affect the discriminant of `x`.
+ /// We can also report errors with this kind of borrow differently.
+ Shallow,
+
+ /// Data must be immutable but not aliasable. This kind of borrow
+ /// cannot currently be expressed by the user and is used only in
+ /// implicit closure bindings. It is needed when the closure is
+ /// borrowing or mutating a mutable referent, e.g.:
+ /// ```
+ /// let mut z = 3;
+ /// let x: &mut isize = &mut z;
+ /// let y = || *x += 5;
+ /// ```
+ /// If we were to try to translate this closure into a more explicit
+ /// form, we'd encounter an error with the code as written:
+ /// ```compile_fail,E0594
+ /// struct Env<'a> { x: &'a &'a mut isize }
+ /// let mut z = 3;
+ /// let x: &mut isize = &mut z;
+ /// let y = (&mut Env { x: &x }, fn_ptr); // Closure is pair of env and fn
+ /// fn fn_ptr(env: &mut Env) { **env.x += 5; }
+ /// ```
+ /// This is then illegal because you cannot mutate an `&mut` found
+ /// in an aliasable location. To solve, you'd have to translate with
+ /// an `&mut` borrow:
+ /// ```compile_fail,E0596
+ /// struct Env<'a> { x: &'a mut &'a mut isize }
+ /// let mut z = 3;
+ /// let x: &mut isize = &mut z;
+ /// let y = (&mut Env { x: &mut x }, fn_ptr); // changed from &x to &mut x
+ /// fn fn_ptr(env: &mut Env) { **env.x += 5; }
+ /// ```
+ /// Now the assignment to `**env.x` is legal, but creating a
+ /// mutable pointer to `x` is not because `x` is not mutable. We
+ /// could fix this by declaring `x` as `let mut x`. This is ok in
+ /// user code, if awkward, but extra weird for closures, since the
+ /// borrow is hidden.
+ ///
+ /// So we introduce a "unique imm" borrow -- the referent is
+ /// immutable, but not aliasable. This solves the problem. For
+ /// simplicity, we don't give users the way to express this
+ /// borrow, it's just used when translating closures.
+ Unique,
+
+ /// Data is mutable and not aliasable.
+ Mut {
+ /// `true` if this borrow arose from method-call auto-ref
+ /// (i.e., `adjustment::Adjust::Borrow`).
+ allow_two_phase_borrow: bool,
+ },
+}
+
+impl BorrowKind {
+ fn from_hir(m: hir_def::type_ref::Mutability) -> Self {
+ match m {
+ hir_def::type_ref::Mutability::Shared => BorrowKind::Shared,
+ hir_def::type_ref::Mutability::Mut => BorrowKind::Mut { allow_two_phase_borrow: false },
+ }
+ }
+
+ fn from_chalk(m: Mutability) -> Self {
+ match m {
+ Mutability::Not => BorrowKind::Shared,
+ Mutability::Mut => BorrowKind::Mut { allow_two_phase_borrow: false },
+ }
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum UnOp {
+ /// The `!` operator for logical inversion
+ Not,
+ /// The `-` operator for negation
+ Neg,
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum BinOp {
+ /// The `+` operator (addition)
+ Add,
+ /// The `-` operator (subtraction)
+ Sub,
+ /// The `*` operator (multiplication)
+ Mul,
+ /// The `/` operator (division)
+ ///
+ /// Division by zero is UB, because the compiler should have inserted checks
+ /// prior to this.
+ Div,
+ /// The `%` operator (modulus)
+ ///
+ /// Using zero as the modulus (second operand) is UB, because the compiler
+ /// should have inserted checks prior to this.
+ Rem,
+ /// The `^` operator (bitwise xor)
+ BitXor,
+ /// The `&` operator (bitwise and)
+ BitAnd,
+ /// The `|` operator (bitwise or)
+ BitOr,
+ /// The `<<` operator (shift left)
+ ///
+ /// The offset is truncated to the size of the first operand before shifting.
+ Shl,
+ /// The `>>` operator (shift right)
+ ///
+ /// The offset is truncated to the size of the first operand before shifting.
+ Shr,
+ /// The `==` operator (equality)
+ Eq,
+ /// The `<` operator (less than)
+ Lt,
+ /// The `<=` operator (less than or equal to)
+ Le,
+ /// The `!=` operator (not equal to)
+ Ne,
+ /// The `>=` operator (greater than or equal to)
+ Ge,
+ /// The `>` operator (greater than)
+ Gt,
+ /// The `ptr.offset` operator
+ Offset,
+}
+
+impl Display for BinOp {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.write_str(match self {
+ BinOp::Add => "+",
+ BinOp::Sub => "-",
+ BinOp::Mul => "*",
+ BinOp::Div => "/",
+ BinOp::Rem => "%",
+ BinOp::BitXor => "^",
+ BinOp::BitAnd => "&",
+ BinOp::BitOr => "|",
+ BinOp::Shl => "<<",
+ BinOp::Shr => ">>",
+ BinOp::Eq => "==",
+ BinOp::Lt => "<",
+ BinOp::Le => "<=",
+ BinOp::Ne => "!=",
+ BinOp::Ge => ">=",
+ BinOp::Gt => ">",
+ BinOp::Offset => "`offset`",
+ })
+ }
+}
+
+impl From<hir_def::expr::ArithOp> for BinOp {
+ fn from(value: hir_def::expr::ArithOp) -> Self {
+ match value {
+ hir_def::expr::ArithOp::Add => BinOp::Add,
+ hir_def::expr::ArithOp::Mul => BinOp::Mul,
+ hir_def::expr::ArithOp::Sub => BinOp::Sub,
+ hir_def::expr::ArithOp::Div => BinOp::Div,
+ hir_def::expr::ArithOp::Rem => BinOp::Rem,
+ hir_def::expr::ArithOp::Shl => BinOp::Shl,
+ hir_def::expr::ArithOp::Shr => BinOp::Shr,
+ hir_def::expr::ArithOp::BitXor => BinOp::BitXor,
+ hir_def::expr::ArithOp::BitOr => BinOp::BitOr,
+ hir_def::expr::ArithOp::BitAnd => BinOp::BitAnd,
+ }
+ }
+}
+
+impl From<hir_def::expr::CmpOp> for BinOp {
+ fn from(value: hir_def::expr::CmpOp) -> Self {
+ match value {
+ hir_def::expr::CmpOp::Eq { negated: false } => BinOp::Eq,
+ hir_def::expr::CmpOp::Eq { negated: true } => BinOp::Ne,
+ hir_def::expr::CmpOp::Ord { ordering: Ordering::Greater, strict: false } => BinOp::Ge,
+ hir_def::expr::CmpOp::Ord { ordering: Ordering::Greater, strict: true } => BinOp::Gt,
+ hir_def::expr::CmpOp::Ord { ordering: Ordering::Less, strict: false } => BinOp::Le,
+ hir_def::expr::CmpOp::Ord { ordering: Ordering::Less, strict: true } => BinOp::Lt,
+ }
+ }
+}
+
+impl From<Operand> for Rvalue {
+ fn from(x: Operand) -> Self {
+ Self::Use(x)
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum CastKind {
+ /// An exposing pointer to address cast. A cast between a pointer and an integer type, or
+ /// between a function pointer and an integer type.
+ /// See the docs on `expose_addr` for more details.
+ PointerExposeAddress,
+ /// An address-to-pointer cast that picks up an exposed provenance.
+ /// See the docs on `from_exposed_addr` for more details.
+ PointerFromExposedAddress,
+ /// All sorts of pointer-to-pointer casts. Note that reference-to-raw-ptr casts are
+ /// translated into `&raw mut/const *r`, i.e., they are not actually casts.
+ Pointer(PointerCast),
+ /// Cast into a dyn* object.
+ DynStar,
+ IntToInt,
+ FloatToInt,
+ FloatToFloat,
+ IntToFloat,
+ PtrToPtr,
+ FnPtrToPtr,
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum Rvalue {
+ /// Yields the operand unchanged
+ Use(Operand),
+
+ /// Creates an array where each element is the value of the operand.
+ ///
+ /// This is the cause of a bug in the case where the repetition count is zero because the value
+ /// is not dropped, see [#74836].
+ ///
+ /// Corresponds to source code like `[x; 32]`.
+ ///
+ /// [#74836]: https://github.com/rust-lang/rust/issues/74836
+ //Repeat(Operand, ty::Const),
+
+ /// Creates a reference of the indicated kind to the place.
+ ///
+ /// There is not much to document here, because besides the obvious parts the semantics of this
+ /// are essentially entirely a part of the aliasing model. There are many UCG issues discussing
+ /// exactly what the behavior of this operation should be.
+ ///
+ /// `Shallow` borrows are disallowed after drop lowering.
+ Ref(BorrowKind, Place),
+
+ /// Creates a pointer/reference to the given thread local.
+ ///
+ /// The yielded type is a `*mut T` if the static is mutable, otherwise if the static is extern a
+ /// `*const T`, and if neither of those apply a `&T`.
+ ///
+ /// **Note:** This is a runtime operation that actually executes code and is in this sense more
+ /// like a function call. Also, eliminating dead stores of this rvalue causes `fn main() {}` to
+ /// SIGILL for some reason that I (JakobDegen) never got a chance to look into.
+ ///
+ /// **Needs clarification**: Are there weird additional semantics here related to the runtime
+ /// nature of this operation?
+ //ThreadLocalRef(DefId),
+
+ /// Creates a pointer with the indicated mutability to the place.
+ ///
+ /// This is generated by pointer casts like `&v as *const _` or raw address of expressions like
+ /// `&raw v` or `addr_of!(v)`.
+ ///
+ /// Like with references, the semantics of this operation are heavily dependent on the aliasing
+ /// model.
+ //AddressOf(Mutability, Place),
+
+ /// Yields the length of the place, as a `usize`.
+ ///
+ /// If the type of the place is an array, this is the array length. For slices (`[T]`, not
+ /// `&[T]`) this accesses the place's metadata to determine the length. This rvalue is
+ /// ill-formed for places of other types.
+ Len(Place),
+
+ /// Performs essentially all of the casts that can be performed via `as`.
+ ///
+ /// This allows for casts from/to a variety of types.
+ ///
+ /// **FIXME**: Document exactly which `CastKind`s allow which types of casts. Figure out why
+ /// `ArrayToPointer` and `MutToConstPointer` are special.
+ Cast(CastKind, Operand, Ty),
+
+ // FIXME link to `pointer::offset` when it hits stable.
+ /// * `Offset` has the same semantics as `pointer::offset`, except that the second
+ /// parameter may be a `usize` as well.
+ /// * The comparison operations accept `bool`s, `char`s, signed or unsigned integers, floats,
+ /// raw pointers, or function pointers and return a `bool`. The types of the operands must be
+ /// matching, up to the usual caveat of the lifetimes in function pointers.
+ /// * Left and right shift operations accept signed or unsigned integers not necessarily of the
+ /// same type and return a value of the same type as their LHS. Like in Rust, the RHS is
+ /// truncated as needed.
+ /// * The `Bit*` operations accept signed integers, unsigned integers, or bools with matching
+ /// types and return a value of that type.
+ /// * The remaining operations accept signed integers, unsigned integers, or floats with
+ /// matching types and return a value of that type.
+ //BinaryOp(BinOp, Box<(Operand, Operand)>),
+
+ /// Same as `BinaryOp`, but yields `(T, bool)` with a `bool` indicating an error condition.
+ ///
+ /// When overflow checking is disabled and we are generating run-time code, the error condition
+ /// is false. Otherwise, and always during CTFE, the error condition is determined as described
+ /// below.
+ ///
+ /// For addition, subtraction, and multiplication on integers the error condition is set when
+ /// the infinite precision result would be unequal to the actual result.
+ ///
+ /// For shift operations on integers the error condition is set when the value of right-hand
+ /// side is greater than or equal to the number of bits in the type of the left-hand side, or
+ /// when the value of right-hand side is negative.
+ ///
+ /// Other combinations of types and operators are unsupported.
+ CheckedBinaryOp(BinOp, Operand, Operand),
+
+ /// Computes a value as described by the operation.
+ //NullaryOp(NullOp, Ty),
+
+ /// Exactly like `BinaryOp`, but less operands.
+ ///
+ /// Also does two's-complement arithmetic. Negation requires a signed integer or a float;
+ /// bitwise not requires a signed integer, unsigned integer, or bool. Both operation kinds
+ /// return a value with the same type as their operand.
+ UnaryOp(UnOp, Operand),
+
+ /// Computes the discriminant of the place, returning it as an integer of type
+ /// [`discriminant_ty`]. Returns zero for types without discriminant.
+ ///
+ /// The validity requirements for the underlying value are undecided for this rvalue, see
+ /// [#91095]. Note too that the value of the discriminant is not the same thing as the
+ /// variant index; use [`discriminant_for_variant`] to convert.
+ ///
+ /// [`discriminant_ty`]: crate::ty::Ty::discriminant_ty
+ /// [#91095]: https://github.com/rust-lang/rust/issues/91095
+ /// [`discriminant_for_variant`]: crate::ty::Ty::discriminant_for_variant
+ Discriminant(Place),
+
+ /// Creates an aggregate value, like a tuple or struct.
+ ///
+ /// This is needed because dataflow analysis needs to distinguish
+ /// `dest = Foo { x: ..., y: ... }` from `dest.x = ...; dest.y = ...;` in the case that `Foo`
+ /// has a destructor.
+ ///
+ /// Disallowed after deaggregation for all aggregate kinds except `Array` and `Generator`. After
+ /// generator lowering, `Generator` aggregate kinds are disallowed too.
+ Aggregate(AggregateKind, Vec<Operand>),
+
+ /// Transmutes a `*mut u8` into shallow-initialized `Box<T>`.
+ ///
+ /// This is different from a normal transmute because dataflow analysis will treat the box as
+ /// initialized but its content as uninitialized. Like other pointer casts, this in general
+ /// affects alias analysis.
+ ShallowInitBox(Operand, Ty),
+
+ /// A CopyForDeref is equivalent to a read from a place at the
+ /// codegen level, but is treated specially by drop elaboration. When such a read happens, it
+ /// is guaranteed (via nature of the mir_opt `Derefer` in rustc_mir_transform/src/deref_separator)
+ /// that the only use of the returned value is a deref operation, immediately
+ /// followed by one or more projections. Drop elaboration treats this rvalue as if the
+ /// read never happened and just projects further. This allows simplifying various MIR
+ /// optimizations and codegen backends that previously had to handle deref operations anywhere
+ /// in a place.
+ CopyForDeref(Place),
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum StatementKind {
+ Assign(Place, Rvalue),
+ //FakeRead(Box<(FakeReadCause, Place)>),
+ //SetDiscriminant {
+ // place: Box<Place>,
+ // variant_index: VariantIdx,
+ //},
+ Deinit(Place),
+ StorageLive(LocalId),
+ StorageDead(LocalId),
+ //Retag(RetagKind, Box<Place>),
+ //AscribeUserType(Place, UserTypeProjection, Variance),
+ //Intrinsic(Box<NonDivergingIntrinsic>),
+ Nop,
+}
+impl StatementKind {
+ fn with_span(self, span: MirSpan) -> Statement {
+ Statement { kind: self, span }
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub struct Statement {
+ pub kind: StatementKind,
+ pub span: MirSpan,
+}
+
+#[derive(Debug, Default, PartialEq, Eq)]
+pub struct BasicBlock {
+ /// List of statements in this block.
+ pub statements: Vec<Statement>,
+
+ /// Terminator for this block.
+ ///
+ /// N.B., this should generally ONLY be `None` during construction.
+ /// Therefore, you should generally access it via the
+ /// `terminator()` or `terminator_mut()` methods. The only
+ /// exception is that certain passes, such as `simplify_cfg`, swap
+ /// out the terminator temporarily with `None` while they continue
+ /// to recurse over the set of basic blocks.
+ pub terminator: Option<Terminator>,
+
+ /// If true, this block lies on an unwind path. This is used
+ /// during codegen where distinct kinds of basic blocks may be
+ /// generated (particularly for MSVC cleanup). Unwind blocks must
+ /// only branch to other unwind blocks.
+ pub is_cleanup: bool,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct MirBody {
+ pub basic_blocks: Arena<BasicBlock>,
+ pub locals: Arena<Local>,
+ pub start_block: BasicBlockId,
+ pub owner: DefWithBodyId,
+ pub arg_count: usize,
+ pub binding_locals: ArenaMap<BindingId, LocalId>,
+ pub param_locals: Vec<LocalId>,
+}
+
+fn const_as_usize(c: &Const) -> usize {
+ try_const_usize(c).unwrap() as usize
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub enum MirSpan {
+ ExprId(ExprId),
+ PatId(PatId),
+ Unknown,
+}
+
+impl_from!(ExprId, PatId for MirSpan);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
new file mode 100644
index 000000000..c8729af86
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
@@ -0,0 +1,223 @@
+//! MIR borrow checker, which is used in diagnostics like `unused_mut`
+
+// Currently it is an ad-hoc implementation, only useful for mutability analysis. Feel free to remove all of these
+// if needed for implementing a proper borrow checker.
+
+use std::sync::Arc;
+
+use hir_def::DefWithBodyId;
+use la_arena::ArenaMap;
+use stdx::never;
+
+use crate::db::HirDatabase;
+
+use super::{
+ BasicBlockId, BorrowKind, LocalId, MirBody, MirLowerError, MirSpan, Place, ProjectionElem,
+ Rvalue, StatementKind, Terminator,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+/// Stores spans which implies that the local should be mutable.
+pub enum MutabilityReason {
+ Mut { spans: Vec<MirSpan> },
+ Not,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct BorrowckResult {
+ pub mir_body: Arc<MirBody>,
+ pub mutability_of_locals: ArenaMap<LocalId, MutabilityReason>,
+}
+
+pub fn borrowck_query(
+ db: &dyn HirDatabase,
+ def: DefWithBodyId,
+) -> Result<Arc<BorrowckResult>, MirLowerError> {
+ let _p = profile::span("borrowck_query");
+ let body = db.mir_body(def)?;
+ let r = BorrowckResult { mutability_of_locals: mutability_of_locals(&body), mir_body: body };
+ Ok(Arc::new(r))
+}
+
+fn is_place_direct(lvalue: &Place) -> bool {
+ !lvalue.projection.iter().any(|x| *x == ProjectionElem::Deref)
+}
+
+enum ProjectionCase {
+ /// Projection is a local
+ Direct,
+ /// Projection is some field or slice of a local
+ DirectPart,
+ /// Projection is deref of something
+ Indirect,
+}
+
+fn place_case(lvalue: &Place) -> ProjectionCase {
+ let mut is_part_of = false;
+ for proj in lvalue.projection.iter().rev() {
+ match proj {
+ ProjectionElem::Deref => return ProjectionCase::Indirect, // It's indirect
+ ProjectionElem::ConstantIndex { .. }
+ | ProjectionElem::Subslice { .. }
+ | ProjectionElem::Field(_)
+ | ProjectionElem::TupleField(_)
+ | ProjectionElem::Index(_) => {
+ is_part_of = true;
+ }
+ ProjectionElem::OpaqueCast(_) => (),
+ }
+ }
+ if is_part_of {
+ ProjectionCase::DirectPart
+ } else {
+ ProjectionCase::Direct
+ }
+}
+
+/// Returns a map from basic blocks to the set of locals that might be ever initialized before
+/// the start of the block. Only `StorageDead` can remove something from this map, and we ignore
+/// `Uninit` and `drop` and similars after initialization.
+fn ever_initialized_map(body: &MirBody) -> ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> {
+ let mut result: ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> =
+ body.basic_blocks.iter().map(|x| (x.0, ArenaMap::default())).collect();
+ fn dfs(
+ body: &MirBody,
+ b: BasicBlockId,
+ l: LocalId,
+ result: &mut ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>>,
+ ) {
+ let mut is_ever_initialized = result[b][l]; // It must be filled, as we use it as mark for dfs
+ let block = &body.basic_blocks[b];
+ for statement in &block.statements {
+ match &statement.kind {
+ StatementKind::Assign(p, _) => {
+ if p.projection.len() == 0 && p.local == l {
+ is_ever_initialized = true;
+ }
+ }
+ StatementKind::StorageDead(p) => {
+ if *p == l {
+ is_ever_initialized = false;
+ }
+ }
+ StatementKind::Deinit(_) | StatementKind::Nop | StatementKind::StorageLive(_) => (),
+ }
+ }
+ let Some(terminator) = &block.terminator else {
+ never!("Terminator should be none only in construction");
+ return;
+ };
+ let targets = match terminator {
+ Terminator::Goto { target } => vec![*target],
+ Terminator::SwitchInt { targets, .. } => targets.all_targets().to_vec(),
+ Terminator::Resume
+ | Terminator::Abort
+ | Terminator::Return
+ | Terminator::Unreachable => vec![],
+ Terminator::Call { target, cleanup, destination, .. } => {
+ if destination.projection.len() == 0 && destination.local == l {
+ is_ever_initialized = true;
+ }
+ target.into_iter().chain(cleanup.into_iter()).copied().collect()
+ }
+ Terminator::Drop { .. }
+ | Terminator::DropAndReplace { .. }
+ | Terminator::Assert { .. }
+ | Terminator::Yield { .. }
+ | Terminator::GeneratorDrop
+ | Terminator::FalseEdge { .. }
+ | Terminator::FalseUnwind { .. } => {
+ never!("We don't emit these MIR terminators yet");
+ vec![]
+ }
+ };
+ for target in targets {
+ if !result[target].contains_idx(l) || !result[target][l] && is_ever_initialized {
+ result[target].insert(l, is_ever_initialized);
+ dfs(body, target, l, result);
+ }
+ }
+ }
+ for &l in &body.param_locals {
+ result[body.start_block].insert(l, true);
+ dfs(body, body.start_block, l, &mut result);
+ }
+ for l in body.locals.iter().map(|x| x.0) {
+ if !result[body.start_block].contains_idx(l) {
+ result[body.start_block].insert(l, false);
+ dfs(body, body.start_block, l, &mut result);
+ }
+ }
+ result
+}
+
+fn mutability_of_locals(body: &MirBody) -> ArenaMap<LocalId, MutabilityReason> {
+ let mut result: ArenaMap<LocalId, MutabilityReason> =
+ body.locals.iter().map(|x| (x.0, MutabilityReason::Not)).collect();
+ let mut push_mut_span = |local, span| match &mut result[local] {
+ MutabilityReason::Mut { spans } => spans.push(span),
+ x @ MutabilityReason::Not => *x = MutabilityReason::Mut { spans: vec![span] },
+ };
+ let ever_init_maps = ever_initialized_map(body);
+ for (block_id, mut ever_init_map) in ever_init_maps.into_iter() {
+ let block = &body.basic_blocks[block_id];
+ for statement in &block.statements {
+ match &statement.kind {
+ StatementKind::Assign(place, value) => {
+ match place_case(place) {
+ ProjectionCase::Direct => {
+ if ever_init_map.get(place.local).copied().unwrap_or_default() {
+ push_mut_span(place.local, statement.span);
+ } else {
+ ever_init_map.insert(place.local, true);
+ }
+ }
+ ProjectionCase::DirectPart => {
+ // Partial initialization is not supported, so it is definitely `mut`
+ push_mut_span(place.local, statement.span);
+ }
+ ProjectionCase::Indirect => (),
+ }
+ if let Rvalue::Ref(BorrowKind::Mut { .. }, p) = value {
+ if is_place_direct(p) {
+ push_mut_span(p.local, statement.span);
+ }
+ }
+ }
+ StatementKind::StorageDead(p) => {
+ ever_init_map.insert(*p, false);
+ }
+ StatementKind::Deinit(_) | StatementKind::StorageLive(_) | StatementKind::Nop => (),
+ }
+ }
+ let Some(terminator) = &block.terminator else {
+ never!("Terminator should be none only in construction");
+ continue;
+ };
+ match terminator {
+ Terminator::Goto { .. }
+ | Terminator::Resume
+ | Terminator::Abort
+ | Terminator::Return
+ | Terminator::Unreachable
+ | Terminator::FalseEdge { .. }
+ | Terminator::FalseUnwind { .. }
+ | Terminator::GeneratorDrop
+ | Terminator::SwitchInt { .. }
+ | Terminator::Drop { .. }
+ | Terminator::DropAndReplace { .. }
+ | Terminator::Assert { .. }
+ | Terminator::Yield { .. } => (),
+ Terminator::Call { destination, .. } => {
+ if destination.projection.len() == 0 {
+ if ever_init_map.get(destination.local).copied().unwrap_or_default() {
+ push_mut_span(destination.local, MirSpan::Unknown);
+ } else {
+ ever_init_map.insert(destination.local, true);
+ }
+ }
+ }
+ }
+ }
+ result
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
new file mode 100644
index 000000000..c5d843d9e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
@@ -0,0 +1,1253 @@
+//! This module provides a MIR interpreter, which is used in const eval.
+
+use std::{borrow::Cow, collections::HashMap, iter};
+
+use base_db::CrateId;
+use chalk_ir::{
+ fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable},
+ DebruijnIndex, TyKind,
+};
+use hir_def::{
+ builtin_type::BuiltinType,
+ lang_item::{lang_attr, LangItem},
+ layout::{Layout, LayoutError, RustcEnumVariantIdx, TagEncoding, Variants},
+ AdtId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, Lookup, VariantId,
+};
+use intern::Interned;
+use la_arena::ArenaMap;
+
+use crate::{
+ consteval::{intern_const_scalar, ConstEvalError},
+ db::HirDatabase,
+ from_placeholder_idx,
+ infer::{normalize, PointerCast},
+ layout::layout_of_ty,
+ mapping::from_chalk,
+ method_resolution::lookup_impl_method,
+ CallableDefId, Const, ConstScalar, Interner, MemoryMap, Substitution, Ty, TyBuilder, TyExt,
+};
+
+use super::{
+ const_as_usize, return_slot, AggregateKind, BinOp, CastKind, LocalId, MirBody, MirLowerError,
+ Operand, Place, ProjectionElem, Rvalue, StatementKind, Terminator, UnOp,
+};
+
+pub struct Evaluator<'a> {
+ db: &'a dyn HirDatabase,
+ stack: Vec<u8>,
+ heap: Vec<u8>,
+ crate_id: CrateId,
+ // FIXME: This is a workaround, see the comment on `interpret_mir`
+ assert_placeholder_ty_is_unused: bool,
+ /// A general limit on execution, to prevent non terminating programs from breaking r-a main process
+ execution_limit: usize,
+ /// An additional limit on stack depth, to prevent stack overflow
+ stack_depth_limit: usize,
+}
+
+#[derive(Debug, Clone, Copy)]
+enum Address {
+ Stack(usize),
+ Heap(usize),
+}
+
+use Address::*;
+
+struct Interval {
+ addr: Address,
+ size: usize,
+}
+
+impl Interval {
+ fn new(addr: Address, size: usize) -> Self {
+ Self { addr, size }
+ }
+
+ fn get<'a>(&self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]> {
+ memory.read_memory(self.addr, self.size)
+ }
+}
+
+enum IntervalOrOwned {
+ Owned(Vec<u8>),
+ Borrowed(Interval),
+}
+impl IntervalOrOwned {
+ pub(crate) fn to_vec(self, memory: &Evaluator<'_>) -> Result<Vec<u8>> {
+ Ok(match self {
+ IntervalOrOwned::Owned(o) => o,
+ IntervalOrOwned::Borrowed(b) => b.get(memory)?.to_vec(),
+ })
+ }
+}
+
+macro_rules! from_bytes {
+ ($ty:tt, $value:expr) => {
+ ($ty::from_le_bytes(match ($value).try_into() {
+ Ok(x) => x,
+ Err(_) => return Err(MirEvalError::TypeError("mismatched size")),
+ }))
+ };
+}
+
+impl Address {
+ fn from_bytes(x: &[u8]) -> Result<Self> {
+ Ok(Address::from_usize(from_bytes!(usize, x)))
+ }
+
+ fn from_usize(x: usize) -> Self {
+ if x > usize::MAX / 2 {
+ Stack(usize::MAX - x)
+ } else {
+ Heap(x)
+ }
+ }
+
+ fn to_bytes(&self) -> Vec<u8> {
+ usize::to_le_bytes(self.to_usize()).to_vec()
+ }
+
+ fn to_usize(&self) -> usize {
+ let as_num = match self {
+ Stack(x) => usize::MAX - *x,
+ Heap(x) => *x,
+ };
+ as_num
+ }
+
+ fn map(&self, f: impl FnOnce(usize) -> usize) -> Address {
+ match self {
+ Stack(x) => Stack(f(*x)),
+ Heap(x) => Heap(f(*x)),
+ }
+ }
+
+ fn offset(&self, offset: usize) -> Address {
+ self.map(|x| x + offset)
+ }
+}
+
+#[derive(Clone, PartialEq, Eq)]
+pub enum MirEvalError {
+ ConstEvalError(Box<ConstEvalError>),
+ LayoutError(LayoutError, Ty),
+ /// Means that code had type errors (or mismatched args) and we shouldn't generate mir in first place.
+ TypeError(&'static str),
+ /// Means that code had undefined behavior. We don't try to actively detect UB, but if it was detected
+ /// then use this type of error.
+ UndefinedBehavior(&'static str),
+ Panic,
+ MirLowerError(FunctionId, MirLowerError),
+ TypeIsUnsized(Ty, &'static str),
+ NotSupported(String),
+ InvalidConst(Const),
+ InFunction(FunctionId, Box<MirEvalError>),
+ ExecutionLimitExceeded,
+ StackOverflow,
+ TargetDataLayoutNotAvailable,
+}
+
+impl std::fmt::Debug for MirEvalError {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ Self::ConstEvalError(arg0) => f.debug_tuple("ConstEvalError").field(arg0).finish(),
+ Self::LayoutError(arg0, arg1) => {
+ f.debug_tuple("LayoutError").field(arg0).field(arg1).finish()
+ }
+ Self::TypeError(arg0) => f.debug_tuple("TypeError").field(arg0).finish(),
+ Self::UndefinedBehavior(arg0) => {
+ f.debug_tuple("UndefinedBehavior").field(arg0).finish()
+ }
+ Self::Panic => write!(f, "Panic"),
+ Self::TargetDataLayoutNotAvailable => write!(f, "TargetDataLayoutNotAvailable"),
+ Self::TypeIsUnsized(ty, it) => write!(f, "{ty:?} is unsized. {it} should be sized."),
+ Self::ExecutionLimitExceeded => write!(f, "execution limit exceeded"),
+ Self::StackOverflow => write!(f, "stack overflow"),
+ Self::MirLowerError(arg0, arg1) => {
+ f.debug_tuple("MirLowerError").field(arg0).field(arg1).finish()
+ }
+ Self::NotSupported(arg0) => f.debug_tuple("NotSupported").field(arg0).finish(),
+ Self::InvalidConst(arg0) => {
+ let data = &arg0.data(Interner);
+ f.debug_struct("InvalidConst").field("ty", &data.ty).field("value", &arg0).finish()
+ }
+ Self::InFunction(func, e) => {
+ let mut e = &**e;
+ let mut stack = vec![*func];
+ while let Self::InFunction(f, next_e) = e {
+ e = &next_e;
+ stack.push(*f);
+ }
+ f.debug_struct("WithStack").field("error", e).field("stack", &stack).finish()
+ }
+ }
+ }
+}
+
+macro_rules! not_supported {
+ ($x: expr) => {
+ return Err(MirEvalError::NotSupported(format!($x)))
+ };
+}
+
+impl From<ConstEvalError> for MirEvalError {
+ fn from(value: ConstEvalError) -> Self {
+ match value {
+ _ => MirEvalError::ConstEvalError(Box::new(value)),
+ }
+ }
+}
+
+type Result<T> = std::result::Result<T, MirEvalError>;
+
+struct Locals<'a> {
+ ptr: &'a ArenaMap<LocalId, Address>,
+ body: &'a MirBody,
+ subst: &'a Substitution,
+}
+
+pub fn interpret_mir(
+ db: &dyn HirDatabase,
+ body: &MirBody,
+ // FIXME: This is workaround. Ideally, const generics should have a separate body (issue #7434), but now
+ // they share their body with their parent, so in MIR lowering we have locals of the parent body, which
+ // might have placeholders. With this argument, we (wrongly) assume that every placeholder type has
+ // a zero size, hoping that they are all outside of our current body. Even without a fix for #7434, we can
+ // (and probably should) do better here, for example by excluding bindings outside of the target expression.
+ assert_placeholder_ty_is_unused: bool,
+) -> Result<Const> {
+ let ty = body.locals[return_slot()].ty.clone();
+ let mut evaluator =
+ Evaluator::new(db, body.owner.module(db.upcast()).krate(), assert_placeholder_ty_is_unused);
+ let bytes = evaluator.interpret_mir_with_no_arg(&body)?;
+ let memory_map = evaluator.create_memory_map(
+ &bytes,
+ &ty,
+ &Locals { ptr: &ArenaMap::new(), body: &body, subst: &Substitution::empty(Interner) },
+ )?;
+ return Ok(intern_const_scalar(ConstScalar::Bytes(bytes, memory_map), ty));
+}
+
+impl Evaluator<'_> {
+ pub fn new<'a>(
+ db: &'a dyn HirDatabase,
+ crate_id: CrateId,
+ assert_placeholder_ty_is_unused: bool,
+ ) -> Evaluator<'a> {
+ Evaluator {
+ stack: vec![0],
+ heap: vec![0],
+ db,
+ crate_id,
+ assert_placeholder_ty_is_unused,
+ stack_depth_limit: 100,
+ execution_limit: 100_000,
+ }
+ }
+
+ fn place_addr(&self, p: &Place, locals: &Locals<'_>) -> Result<Address> {
+ Ok(self.place_addr_and_ty(p, locals)?.0)
+ }
+
+ fn ptr_size(&self) -> usize {
+ match self.db.target_data_layout(self.crate_id) {
+ Some(x) => x.pointer_size.bytes_usize(),
+ None => 8,
+ }
+ }
+
+ fn place_addr_and_ty<'a>(&'a self, p: &Place, locals: &'a Locals<'a>) -> Result<(Address, Ty)> {
+ let mut addr = locals.ptr[p.local];
+ let mut ty: Ty =
+ self.ty_filler(&locals.body.locals[p.local].ty, locals.subst, locals.body.owner)?;
+ for proj in &p.projection {
+ match proj {
+ ProjectionElem::Deref => {
+ ty = match &ty.data(Interner).kind {
+ TyKind::Raw(_, inner) | TyKind::Ref(_, _, inner) => inner.clone(),
+ _ => {
+ return Err(MirEvalError::TypeError(
+ "Overloaded deref in MIR is disallowed",
+ ))
+ }
+ };
+ let x = from_bytes!(usize, self.read_memory(addr, self.ptr_size())?);
+ addr = Address::from_usize(x);
+ }
+ ProjectionElem::Index(op) => {
+ let offset =
+ from_bytes!(usize, self.read_memory(locals.ptr[*op], self.ptr_size())?);
+ match &ty.data(Interner).kind {
+ TyKind::Ref(_, _, inner) => match &inner.data(Interner).kind {
+ TyKind::Slice(inner) => {
+ ty = inner.clone();
+ let ty_size = self.size_of_sized(
+ &ty,
+ locals,
+ "slice inner type should be sized",
+ )?;
+ let value = self.read_memory(addr, self.ptr_size() * 2)?;
+ addr = Address::from_bytes(&value[0..8])?.offset(ty_size * offset);
+ }
+ x => not_supported!("MIR index for ref type {x:?}"),
+ },
+ TyKind::Array(inner, _) | TyKind::Slice(inner) => {
+ ty = inner.clone();
+ let ty_size = self.size_of_sized(
+ &ty,
+ locals,
+ "array inner type should be sized",
+ )?;
+ addr = addr.offset(ty_size * offset);
+ }
+ x => not_supported!("MIR index for type {x:?}"),
+ }
+ }
+ &ProjectionElem::TupleField(f) => match &ty.data(Interner).kind {
+ TyKind::Tuple(_, subst) => {
+ let layout = self.layout(&ty)?;
+ ty = subst
+ .as_slice(Interner)
+ .get(f)
+ .ok_or(MirEvalError::TypeError("not enough tuple fields"))?
+ .assert_ty_ref(Interner)
+ .clone();
+ let offset = layout.fields.offset(f).bytes_usize();
+ addr = addr.offset(offset);
+ }
+ _ => return Err(MirEvalError::TypeError("Only tuple has tuple fields")),
+ },
+ ProjectionElem::Field(f) => match &ty.data(Interner).kind {
+ TyKind::Adt(adt, subst) => {
+ let layout = self.layout_adt(adt.0, subst.clone())?;
+ let variant_layout = match &layout.variants {
+ Variants::Single { .. } => &layout,
+ Variants::Multiple { variants, .. } => {
+ &variants[match f.parent {
+ hir_def::VariantId::EnumVariantId(x) => {
+ RustcEnumVariantIdx(x.local_id)
+ }
+ _ => {
+ return Err(MirEvalError::TypeError(
+ "Multivariant layout only happens for enums",
+ ))
+ }
+ }]
+ }
+ };
+ ty = self.db.field_types(f.parent)[f.local_id]
+ .clone()
+ .substitute(Interner, subst);
+ let offset = variant_layout
+ .fields
+ .offset(u32::from(f.local_id.into_raw()) as usize)
+ .bytes_usize();
+ addr = addr.offset(offset);
+ }
+ _ => return Err(MirEvalError::TypeError("Only adt has fields")),
+ },
+ ProjectionElem::ConstantIndex { .. } => {
+ not_supported!("constant index")
+ }
+ ProjectionElem::Subslice { .. } => not_supported!("subslice"),
+ ProjectionElem::OpaqueCast(_) => not_supported!("opaque cast"),
+ }
+ }
+ Ok((addr, ty))
+ }
+
+ fn layout(&self, ty: &Ty) -> Result<Layout> {
+ layout_of_ty(self.db, ty, self.crate_id)
+ .map_err(|e| MirEvalError::LayoutError(e, ty.clone()))
+ }
+
+ fn layout_adt(&self, adt: AdtId, subst: Substitution) -> Result<Layout> {
+ self.db.layout_of_adt(adt, subst.clone()).map_err(|e| {
+ MirEvalError::LayoutError(e, TyKind::Adt(chalk_ir::AdtId(adt), subst).intern(Interner))
+ })
+ }
+
+ fn place_ty<'a>(&'a self, p: &Place, locals: &'a Locals<'a>) -> Result<Ty> {
+ Ok(self.place_addr_and_ty(p, locals)?.1)
+ }
+
+ fn operand_ty<'a>(&'a self, o: &'a Operand, locals: &'a Locals<'a>) -> Result<Ty> {
+ Ok(match o {
+ Operand::Copy(p) | Operand::Move(p) => self.place_ty(p, locals)?,
+ Operand::Constant(c) => c.data(Interner).ty.clone(),
+ })
+ }
+
+ fn interpret_mir(
+ &mut self,
+ body: &MirBody,
+ args: impl Iterator<Item = Vec<u8>>,
+ subst: Substitution,
+ ) -> Result<Vec<u8>> {
+ if let Some(x) = self.stack_depth_limit.checked_sub(1) {
+ self.stack_depth_limit = x;
+ } else {
+ return Err(MirEvalError::StackOverflow);
+ }
+ let mut current_block_idx = body.start_block;
+ let mut locals = Locals { ptr: &ArenaMap::new(), body: &body, subst: &subst };
+ let (locals_ptr, stack_size) = {
+ let mut stack_ptr = self.stack.len();
+ let addr = body
+ .locals
+ .iter()
+ .map(|(id, x)| {
+ let size =
+ self.size_of_sized(&x.ty, &locals, "no unsized local in extending stack")?;
+ let my_ptr = stack_ptr;
+ stack_ptr += size;
+ Ok((id, Stack(my_ptr)))
+ })
+ .collect::<Result<ArenaMap<LocalId, _>>>()?;
+ let stack_size = stack_ptr - self.stack.len();
+ (addr, stack_size)
+ };
+ locals.ptr = &locals_ptr;
+ self.stack.extend(iter::repeat(0).take(stack_size));
+ let mut remain_args = body.arg_count;
+ for ((_, addr), value) in locals_ptr.iter().skip(1).zip(args) {
+ self.write_memory(*addr, &value)?;
+ if remain_args == 0 {
+ return Err(MirEvalError::TypeError("more arguments provided"));
+ }
+ remain_args -= 1;
+ }
+ if remain_args > 0 {
+ return Err(MirEvalError::TypeError("not enough arguments provided"));
+ }
+ loop {
+ let current_block = &body.basic_blocks[current_block_idx];
+ if let Some(x) = self.execution_limit.checked_sub(1) {
+ self.execution_limit = x;
+ } else {
+ return Err(MirEvalError::ExecutionLimitExceeded);
+ }
+ for statement in &current_block.statements {
+ match &statement.kind {
+ StatementKind::Assign(l, r) => {
+ let addr = self.place_addr(l, &locals)?;
+ let result = self.eval_rvalue(r, &locals)?.to_vec(&self)?;
+ self.write_memory(addr, &result)?;
+ }
+ StatementKind::Deinit(_) => not_supported!("de-init statement"),
+ StatementKind::StorageLive(_)
+ | StatementKind::StorageDead(_)
+ | StatementKind::Nop => (),
+ }
+ }
+ let Some(terminator) = current_block.terminator.as_ref() else {
+ not_supported!("block without terminator");
+ };
+ match terminator {
+ Terminator::Goto { target } => {
+ current_block_idx = *target;
+ }
+ Terminator::Call {
+ func,
+ args,
+ destination,
+ target,
+ cleanup: _,
+ from_hir_call: _,
+ } => {
+ let fn_ty = self.operand_ty(func, &locals)?;
+ match &fn_ty.data(Interner).kind {
+ TyKind::FnDef(def, generic_args) => {
+ let def: CallableDefId = from_chalk(self.db, *def);
+ let generic_args = self.subst_filler(generic_args, &locals);
+ match def {
+ CallableDefId::FunctionId(def) => {
+ let arg_bytes = args
+ .iter()
+ .map(|x| {
+ Ok(self
+ .eval_operand(x, &locals)?
+ .get(&self)?
+ .to_owned())
+ })
+ .collect::<Result<Vec<_>>>()?
+ .into_iter();
+ let function_data = self.db.function_data(def);
+ let is_intrinsic = match &function_data.abi {
+ Some(abi) => *abi == Interned::new_str("rust-intrinsic"),
+ None => match def.lookup(self.db.upcast()).container {
+ hir_def::ItemContainerId::ExternBlockId(block) => {
+ let id = block.lookup(self.db.upcast()).id;
+ id.item_tree(self.db.upcast())[id.value]
+ .abi
+ .as_deref()
+ == Some("rust-intrinsic")
+ }
+ _ => false,
+ },
+ };
+ let result = if is_intrinsic {
+ self.exec_intrinsic(
+ function_data
+ .name
+ .as_text()
+ .unwrap_or_default()
+ .as_str(),
+ arg_bytes,
+ generic_args,
+ &locals,
+ )?
+ } else if let Some(x) = self.detect_lang_function(def) {
+ self.exec_lang_item(x, arg_bytes)?
+ } else {
+ let trait_env = {
+ let Some(d) = body.owner.as_generic_def_id() else {
+ not_supported!("trait resolving in non generic def id");
+ };
+ self.db.trait_environment(d)
+ };
+ let (imp, generic_args) = lookup_impl_method(
+ self.db,
+ trait_env,
+ def,
+ generic_args.clone(),
+ );
+ let generic_args =
+ self.subst_filler(&generic_args, &locals);
+ let def = imp.into();
+ let mir_body = self
+ .db
+ .mir_body(def)
+ .map_err(|e| MirEvalError::MirLowerError(imp, e))?;
+ self.interpret_mir(&mir_body, arg_bytes, generic_args)
+ .map_err(|e| {
+ MirEvalError::InFunction(imp, Box::new(e))
+ })?
+ };
+ let dest_addr = self.place_addr(destination, &locals)?;
+ self.write_memory(dest_addr, &result)?;
+ }
+ CallableDefId::StructId(id) => {
+ let (size, variant_layout, tag) = self.layout_of_variant(
+ id.into(),
+ generic_args.clone(),
+ &locals,
+ )?;
+ let result = self.make_by_layout(
+ size,
+ &variant_layout,
+ tag,
+ args,
+ &locals,
+ )?;
+ let dest_addr = self.place_addr(destination, &locals)?;
+ self.write_memory(dest_addr, &result)?;
+ }
+ CallableDefId::EnumVariantId(id) => {
+ let (size, variant_layout, tag) = self.layout_of_variant(
+ id.into(),
+ generic_args.clone(),
+ &locals,
+ )?;
+ let result = self.make_by_layout(
+ size,
+ &variant_layout,
+ tag,
+ args,
+ &locals,
+ )?;
+ let dest_addr = self.place_addr(destination, &locals)?;
+ self.write_memory(dest_addr, &result)?;
+ }
+ }
+ current_block_idx =
+ target.expect("broken mir, function without target");
+ }
+ _ => not_supported!("unknown function type"),
+ }
+ }
+ Terminator::SwitchInt { discr, targets } => {
+ let val = u128::from_le_bytes(pad16(
+ self.eval_operand(discr, &locals)?.get(&self)?,
+ false,
+ ));
+ current_block_idx = targets.target_for_value(val);
+ }
+ Terminator::Return => {
+ let ty = body.locals[return_slot()].ty.clone();
+ self.stack_depth_limit += 1;
+ return Ok(self
+ .read_memory(
+ locals.ptr[return_slot()],
+ self.size_of_sized(&ty, &locals, "return type")?,
+ )?
+ .to_owned());
+ }
+ Terminator::Unreachable => {
+ return Err(MirEvalError::UndefinedBehavior("unreachable executed"))
+ }
+ _ => not_supported!("unknown terminator"),
+ }
+ }
+ }
+
+ fn eval_rvalue<'a>(
+ &'a mut self,
+ r: &'a Rvalue,
+ locals: &'a Locals<'a>,
+ ) -> Result<IntervalOrOwned> {
+ use IntervalOrOwned::*;
+ Ok(match r {
+ Rvalue::Use(x) => Borrowed(self.eval_operand(x, locals)?),
+ Rvalue::Ref(_, p) => {
+ let addr = self.place_addr(p, locals)?;
+ Owned(addr.to_bytes())
+ }
+ Rvalue::Len(_) => not_supported!("rvalue len"),
+ Rvalue::UnaryOp(op, val) => {
+ let mut c = self.eval_operand(val, locals)?.get(&self)?;
+ let mut ty = self.operand_ty(val, locals)?;
+ while let TyKind::Ref(_, _, z) = ty.kind(Interner) {
+ ty = z.clone();
+ let size = self.size_of_sized(&ty, locals, "operand of unary op")?;
+ c = self.read_memory(Address::from_bytes(c)?, size)?;
+ }
+ let mut c = c.to_vec();
+ if ty.as_builtin() == Some(BuiltinType::Bool) {
+ c[0] = 1 - c[0];
+ } else {
+ match op {
+ UnOp::Not => c.iter_mut().for_each(|x| *x = !*x),
+ UnOp::Neg => {
+ c.iter_mut().for_each(|x| *x = !*x);
+ for k in c.iter_mut() {
+ let o;
+ (*k, o) = k.overflowing_add(1);
+ if !o {
+ break;
+ }
+ }
+ }
+ }
+ }
+ Owned(c)
+ }
+ Rvalue::CheckedBinaryOp(op, lhs, rhs) => {
+ let lc = self.eval_operand(lhs, locals)?;
+ let rc = self.eval_operand(rhs, locals)?;
+ let mut lc = lc.get(&self)?;
+ let mut rc = rc.get(&self)?;
+ let mut ty = self.operand_ty(lhs, locals)?;
+ while let TyKind::Ref(_, _, z) = ty.kind(Interner) {
+ ty = z.clone();
+ let size = self.size_of_sized(&ty, locals, "operand of binary op")?;
+ lc = self.read_memory(Address::from_bytes(lc)?, size)?;
+ rc = self.read_memory(Address::from_bytes(rc)?, size)?;
+ }
+ let is_signed = matches!(ty.as_builtin(), Some(BuiltinType::Int(_)));
+ let l128 = i128::from_le_bytes(pad16(lc, is_signed));
+ let r128 = i128::from_le_bytes(pad16(rc, is_signed));
+ match op {
+ BinOp::Ge | BinOp::Gt | BinOp::Le | BinOp::Lt | BinOp::Eq | BinOp::Ne => {
+ let r = match op {
+ BinOp::Ge => l128 >= r128,
+ BinOp::Gt => l128 > r128,
+ BinOp::Le => l128 <= r128,
+ BinOp::Lt => l128 < r128,
+ BinOp::Eq => l128 == r128,
+ BinOp::Ne => l128 != r128,
+ _ => unreachable!(),
+ };
+ let r = r as u8;
+ Owned(vec![r])
+ }
+ BinOp::BitAnd
+ | BinOp::BitOr
+ | BinOp::BitXor
+ | BinOp::Add
+ | BinOp::Mul
+ | BinOp::Div
+ | BinOp::Rem
+ | BinOp::Sub => {
+ let r = match op {
+ BinOp::Add => l128.overflowing_add(r128).0,
+ BinOp::Mul => l128.overflowing_mul(r128).0,
+ BinOp::Div => l128.checked_div(r128).ok_or(MirEvalError::Panic)?,
+ BinOp::Rem => l128.checked_rem(r128).ok_or(MirEvalError::Panic)?,
+ BinOp::Sub => l128.overflowing_sub(r128).0,
+ BinOp::BitAnd => l128 & r128,
+ BinOp::BitOr => l128 | r128,
+ BinOp::BitXor => l128 ^ r128,
+ _ => unreachable!(),
+ };
+ let r = r.to_le_bytes();
+ for &k in &r[lc.len()..] {
+ if k != 0 && (k != 255 || !is_signed) {
+ return Err(MirEvalError::Panic);
+ }
+ }
+ Owned(r[0..lc.len()].into())
+ }
+ BinOp::Shl | BinOp::Shr => {
+ let shift_amout = if r128 < 0 {
+ return Err(MirEvalError::Panic);
+ } else if r128 > 128 {
+ return Err(MirEvalError::Panic);
+ } else {
+ r128 as u8
+ };
+ let r = match op {
+ BinOp::Shl => l128 << shift_amout,
+ BinOp::Shr => l128 >> shift_amout,
+ _ => unreachable!(),
+ };
+ Owned(r.to_le_bytes()[0..lc.len()].into())
+ }
+ BinOp::Offset => not_supported!("offset binop"),
+ }
+ }
+ Rvalue::Discriminant(p) => {
+ let ty = self.place_ty(p, locals)?;
+ let bytes = self.eval_place(p, locals)?.get(&self)?;
+ let layout = self.layout(&ty)?;
+ match layout.variants {
+ Variants::Single { .. } => Owned(0u128.to_le_bytes().to_vec()),
+ Variants::Multiple { tag, tag_encoding, .. } => {
+ let Some(target_data_layout) = self.db.target_data_layout(self.crate_id) else {
+ not_supported!("missing target data layout");
+ };
+ let size = tag.size(&*target_data_layout).bytes_usize();
+ let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field
+ match tag_encoding {
+ TagEncoding::Direct => {
+ let tag = &bytes[offset..offset + size];
+ Owned(pad16(tag, false).to_vec())
+ }
+ TagEncoding::Niche { untagged_variant, niche_start, .. } => {
+ let tag = &bytes[offset..offset + size];
+ let candidate_discriminant = i128::from_le_bytes(pad16(tag, false))
+ .wrapping_sub(niche_start as i128);
+ let enum_id = match ty.kind(Interner) {
+ TyKind::Adt(e, _) => match e.0 {
+ AdtId::EnumId(e) => e,
+ _ => not_supported!("Non enum with multi variant layout"),
+ },
+ _ => not_supported!("Non adt with multi variant layout"),
+ };
+ let enum_data = self.db.enum_data(enum_id);
+ let result = 'b: {
+ for (local_id, _) in enum_data.variants.iter() {
+ if candidate_discriminant
+ == self.db.const_eval_discriminant(EnumVariantId {
+ parent: enum_id,
+ local_id,
+ })?
+ {
+ break 'b candidate_discriminant;
+ }
+ }
+ self.db.const_eval_discriminant(EnumVariantId {
+ parent: enum_id,
+ local_id: untagged_variant.0,
+ })?
+ };
+ Owned(result.to_le_bytes().to_vec())
+ }
+ }
+ }
+ }
+ }
+ Rvalue::ShallowInitBox(_, _) => not_supported!("shallow init box"),
+ Rvalue::CopyForDeref(_) => not_supported!("copy for deref"),
+ Rvalue::Aggregate(kind, values) => match kind {
+ AggregateKind::Array(_) => {
+ let mut r = vec![];
+ for x in values {
+ let value = self.eval_operand(x, locals)?.get(&self)?;
+ r.extend(value);
+ }
+ Owned(r)
+ }
+ AggregateKind::Tuple(ty) => {
+ let layout = self.layout(&ty)?;
+ Owned(self.make_by_layout(
+ layout.size.bytes_usize(),
+ &layout,
+ None,
+ values,
+ locals,
+ )?)
+ }
+ AggregateKind::Union(x, f) => {
+ let layout = self.layout_adt((*x).into(), Substitution::empty(Interner))?;
+ let offset = layout
+ .fields
+ .offset(u32::from(f.local_id.into_raw()) as usize)
+ .bytes_usize();
+ let op = self.eval_operand(&values[0], locals)?.get(&self)?;
+ let mut result = vec![0; layout.size.bytes_usize()];
+ result[offset..offset + op.len()].copy_from_slice(op);
+ Owned(result)
+ }
+ AggregateKind::Adt(x, subst) => {
+ let (size, variant_layout, tag) =
+ self.layout_of_variant(*x, subst.clone(), locals)?;
+ Owned(self.make_by_layout(size, &variant_layout, tag, values, locals)?)
+ }
+ },
+ Rvalue::Cast(kind, operand, target_ty) => match kind {
+ CastKind::PointerExposeAddress => not_supported!("exposing pointer address"),
+ CastKind::PointerFromExposedAddress => {
+ not_supported!("creating pointer from exposed address")
+ }
+ CastKind::Pointer(cast) => match cast {
+ PointerCast::Unsize => {
+ let current_ty = self.operand_ty(operand, locals)?;
+ match &target_ty.data(Interner).kind {
+ TyKind::Raw(_, ty) | TyKind::Ref(_, _, ty) => {
+ match &ty.data(Interner).kind {
+ TyKind::Slice(_) => match &current_ty.data(Interner).kind {
+ TyKind::Raw(_, ty) | TyKind::Ref(_, _, ty) => {
+ match &ty.data(Interner).kind {
+ TyKind::Array(_, size) => {
+ let addr = self
+ .eval_operand(operand, locals)?
+ .get(&self)?;
+ let len = const_as_usize(size);
+ let mut r = Vec::with_capacity(16);
+ r.extend(addr.iter().copied());
+ r.extend(len.to_le_bytes().into_iter());
+ Owned(r)
+ }
+ _ => {
+ not_supported!("slice unsizing from non arrays")
+ }
+ }
+ }
+ _ => not_supported!("slice unsizing from non pointers"),
+ },
+ TyKind::Dyn(_) => not_supported!("dyn pointer unsize cast"),
+ _ => not_supported!("unknown unsized cast"),
+ }
+ }
+ _ => not_supported!("unsized cast on unknown pointer type"),
+ }
+ }
+ x => not_supported!("pointer cast {x:?}"),
+ },
+ CastKind::DynStar => not_supported!("dyn star cast"),
+ CastKind::IntToInt => {
+ // FIXME: handle signed cast
+ let current = pad16(self.eval_operand(operand, locals)?.get(&self)?, false);
+ let dest_size =
+ self.size_of_sized(target_ty, locals, "destination of int to int cast")?;
+ Owned(current[0..dest_size].to_vec())
+ }
+ CastKind::FloatToInt => not_supported!("float to int cast"),
+ CastKind::FloatToFloat => not_supported!("float to float cast"),
+ CastKind::IntToFloat => not_supported!("float to int cast"),
+ CastKind::PtrToPtr => not_supported!("ptr to ptr cast"),
+ CastKind::FnPtrToPtr => not_supported!("fn ptr to ptr cast"),
+ },
+ })
+ }
+
+ fn layout_of_variant(
+ &mut self,
+ x: VariantId,
+ subst: Substitution,
+ locals: &Locals<'_>,
+ ) -> Result<(usize, Layout, Option<(usize, usize, i128)>)> {
+ let adt = x.adt_id();
+ if let DefWithBodyId::VariantId(f) = locals.body.owner {
+ if let VariantId::EnumVariantId(x) = x {
+ if AdtId::from(f.parent) == adt {
+ // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and
+ // infinite sized type errors) we use a dummy layout
+ let i = self.db.const_eval_discriminant(x)?;
+ return Ok((16, self.layout(&TyBuilder::unit())?, Some((0, 16, i))));
+ }
+ }
+ }
+ let layout = self.layout_adt(adt, subst)?;
+ Ok(match layout.variants {
+ Variants::Single { .. } => (layout.size.bytes_usize(), layout, None),
+ Variants::Multiple { variants, tag, tag_encoding, .. } => {
+ let cx = self
+ .db
+ .target_data_layout(self.crate_id)
+ .ok_or(MirEvalError::TargetDataLayoutNotAvailable)?;
+ let enum_variant_id = match x {
+ VariantId::EnumVariantId(x) => x,
+ _ => not_supported!("multi variant layout for non-enums"),
+ };
+ let rustc_enum_variant_idx = RustcEnumVariantIdx(enum_variant_id.local_id);
+ let mut discriminant = self.db.const_eval_discriminant(enum_variant_id)?;
+ let variant_layout = variants[rustc_enum_variant_idx].clone();
+ let have_tag = match tag_encoding {
+ TagEncoding::Direct => true,
+ TagEncoding::Niche { untagged_variant, niche_variants: _, niche_start } => {
+ discriminant = discriminant.wrapping_add(niche_start as i128);
+ untagged_variant != rustc_enum_variant_idx
+ }
+ };
+ (
+ layout.size.bytes_usize(),
+ variant_layout,
+ if have_tag {
+ Some((
+ layout.fields.offset(0).bytes_usize(),
+ tag.size(&*cx).bytes_usize(),
+ discriminant,
+ ))
+ } else {
+ None
+ },
+ )
+ }
+ })
+ }
+
+ fn make_by_layout(
+ &mut self,
+ size: usize, // Not neccessarily equal to variant_layout.size
+ variant_layout: &Layout,
+ tag: Option<(usize, usize, i128)>,
+ values: &Vec<Operand>,
+ locals: &Locals<'_>,
+ ) -> Result<Vec<u8>> {
+ let mut result = vec![0; size];
+ if let Some((offset, size, value)) = tag {
+ result[offset..offset + size].copy_from_slice(&value.to_le_bytes()[0..size]);
+ }
+ for (i, op) in values.iter().enumerate() {
+ let offset = variant_layout.fields.offset(i).bytes_usize();
+ let op = self.eval_operand(op, locals)?.get(&self)?;
+ result[offset..offset + op.len()].copy_from_slice(op);
+ }
+ Ok(result)
+ }
+
+ fn eval_operand(&mut self, x: &Operand, locals: &Locals<'_>) -> Result<Interval> {
+ Ok(match x {
+ Operand::Copy(p) | Operand::Move(p) => self.eval_place(p, locals)?,
+ Operand::Constant(konst) => {
+ let data = &konst.data(Interner);
+ match &data.value {
+ chalk_ir::ConstValue::BoundVar(b) => {
+ let c = locals
+ .subst
+ .as_slice(Interner)
+ .get(b.index)
+ .ok_or(MirEvalError::TypeError("missing generic arg"))?
+ .assert_const_ref(Interner);
+ self.eval_operand(&Operand::Constant(c.clone()), locals)?
+ }
+ chalk_ir::ConstValue::InferenceVar(_) => {
+ not_supported!("inference var constant")
+ }
+ chalk_ir::ConstValue::Placeholder(_) => not_supported!("placeholder constant"),
+ chalk_ir::ConstValue::Concrete(c) => match &c.interned {
+ ConstScalar::Bytes(v, memory_map) => {
+ let mut v: Cow<'_, [u8]> = Cow::Borrowed(v);
+ let patch_map = memory_map.transform_addresses(|b| {
+ let addr = self.heap_allocate(b.len());
+ self.write_memory(addr, b)?;
+ Ok(addr.to_usize())
+ })?;
+ let size = self.size_of(&data.ty, locals)?.unwrap_or(v.len());
+ if size != v.len() {
+ // Handle self enum
+ if size == 16 && v.len() < 16 {
+ v = Cow::Owned(pad16(&v, false).to_vec());
+ } else if size < 16 && v.len() == 16 {
+ v = Cow::Owned(v[0..size].to_vec());
+ } else {
+ return Err(MirEvalError::InvalidConst(konst.clone()));
+ }
+ }
+ let addr = self.heap_allocate(size);
+ self.write_memory(addr, &v)?;
+ self.patch_addresses(&patch_map, addr, &data.ty, locals)?;
+ Interval::new(addr, size)
+ }
+ ConstScalar::Unknown => not_supported!("evaluating unknown const"),
+ },
+ }
+ }
+ })
+ }
+
+ fn eval_place(&mut self, p: &Place, locals: &Locals<'_>) -> Result<Interval> {
+ let addr = self.place_addr(p, locals)?;
+ Ok(Interval::new(
+ addr,
+ self.size_of_sized(&self.place_ty(p, locals)?, locals, "type of this place")?,
+ ))
+ }
+
+ fn read_memory(&self, addr: Address, size: usize) -> Result<&[u8]> {
+ let (mem, pos) = match addr {
+ Stack(x) => (&self.stack, x),
+ Heap(x) => (&self.heap, x),
+ };
+ mem.get(pos..pos + size).ok_or(MirEvalError::UndefinedBehavior("out of bound memory read"))
+ }
+
+ fn write_memory(&mut self, addr: Address, r: &[u8]) -> Result<()> {
+ let (mem, pos) = match addr {
+ Stack(x) => (&mut self.stack, x),
+ Heap(x) => (&mut self.heap, x),
+ };
+ mem.get_mut(pos..pos + r.len())
+ .ok_or(MirEvalError::UndefinedBehavior("out of bound memory write"))?
+ .copy_from_slice(r);
+ Ok(())
+ }
+
+ fn size_of(&self, ty: &Ty, locals: &Locals<'_>) -> Result<Option<usize>> {
+ if let DefWithBodyId::VariantId(f) = locals.body.owner {
+ if let Some((adt, _)) = ty.as_adt() {
+ if AdtId::from(f.parent) == adt {
+ // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and
+ // infinite sized type errors) we use a dummy size
+ return Ok(Some(16));
+ }
+ }
+ }
+ let ty = &self.ty_filler(ty, locals.subst, locals.body.owner)?;
+ let layout = self.layout(ty);
+ if self.assert_placeholder_ty_is_unused {
+ if matches!(layout, Err(MirEvalError::LayoutError(LayoutError::HasPlaceholder, _))) {
+ return Ok(Some(0));
+ }
+ }
+ let layout = layout?;
+ Ok(layout.is_sized().then(|| layout.size.bytes_usize()))
+ }
+
+ /// A version of `self.size_of` which returns error if the type is unsized. `what` argument should
+ /// be something that complete this: `error: type {ty} was unsized. {what} should be sized`
+ fn size_of_sized(&self, ty: &Ty, locals: &Locals<'_>, what: &'static str) -> Result<usize> {
+ match self.size_of(ty, locals)? {
+ Some(x) => Ok(x),
+ None => Err(MirEvalError::TypeIsUnsized(ty.clone(), what)),
+ }
+ }
+
+ /// Uses `ty_filler` to fill an entire subst
+ fn subst_filler(&self, subst: &Substitution, locals: &Locals<'_>) -> Substitution {
+ Substitution::from_iter(
+ Interner,
+ subst.iter(Interner).map(|x| match x.data(Interner) {
+ chalk_ir::GenericArgData::Ty(ty) => {
+ let Ok(ty) = self.ty_filler(ty, locals.subst, locals.body.owner) else {
+ return x.clone();
+ };
+ chalk_ir::GenericArgData::Ty(ty).intern(Interner)
+ }
+ _ => x.clone(),
+ }),
+ )
+ }
+
+ /// This function substitutes placeholders of the body with the provided subst, effectively plays
+ /// the rule of monomorphization. In addition to placeholders, it substitutes opaque types (return
+ /// position impl traits) with their underlying type.
+ fn ty_filler(&self, ty: &Ty, subst: &Substitution, owner: DefWithBodyId) -> Result<Ty> {
+ struct Filler<'a> {
+ db: &'a dyn HirDatabase,
+ subst: &'a Substitution,
+ skip_params: usize,
+ }
+ impl FallibleTypeFolder<Interner> for Filler<'_> {
+ type Error = MirEvalError;
+
+ fn as_dyn(&mut self) -> &mut dyn FallibleTypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn try_fold_ty(
+ &mut self,
+ ty: Ty,
+ outer_binder: DebruijnIndex,
+ ) -> std::result::Result<Ty, Self::Error> {
+ match ty.kind(Interner) {
+ TyKind::OpaqueType(id, subst) => {
+ let impl_trait_id = self.db.lookup_intern_impl_trait_id((*id).into());
+ match impl_trait_id {
+ crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
+ let infer = self.db.infer(func.into());
+ let filler = &mut Filler { db: self.db, subst, skip_params: 0 };
+ filler.try_fold_ty(infer.type_of_rpit[idx].clone(), outer_binder)
+ }
+ crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => {
+ not_supported!("async block impl trait");
+ }
+ }
+ }
+ _ => ty.try_super_fold_with(self.as_dyn(), outer_binder),
+ }
+ }
+
+ fn try_fold_free_placeholder_ty(
+ &mut self,
+ idx: chalk_ir::PlaceholderIndex,
+ _outer_binder: DebruijnIndex,
+ ) -> std::result::Result<Ty, Self::Error> {
+ let x = from_placeholder_idx(self.db, idx);
+ Ok(self
+ .subst
+ .as_slice(Interner)
+ .get((u32::from(x.local_id.into_raw()) as usize) + self.skip_params)
+ .and_then(|x| x.ty(Interner))
+ .ok_or(MirEvalError::TypeError("Generic arg not provided"))?
+ .clone())
+ }
+ }
+ let filler = &mut Filler { db: self.db, subst, skip_params: 0 };
+ Ok(normalize(self.db, owner, ty.clone().try_fold_with(filler, DebruijnIndex::INNERMOST)?))
+ }
+
+ fn heap_allocate(&mut self, s: usize) -> Address {
+ let pos = self.heap.len();
+ self.heap.extend(iter::repeat(0).take(s));
+ Address::Heap(pos)
+ }
+
+ pub fn interpret_mir_with_no_arg(&mut self, body: &MirBody) -> Result<Vec<u8>> {
+ self.interpret_mir(&body, vec![].into_iter(), Substitution::empty(Interner))
+ }
+
+ fn detect_lang_function(&self, def: FunctionId) -> Option<LangItem> {
+ let candidate = lang_attr(self.db.upcast(), def)?;
+ // filter normal lang functions out
+ if [LangItem::IntoIterIntoIter, LangItem::IteratorNext].contains(&candidate) {
+ return None;
+ }
+ Some(candidate)
+ }
+
+ fn create_memory_map(&self, bytes: &[u8], ty: &Ty, locals: &Locals<'_>) -> Result<MemoryMap> {
+ // FIXME: support indirect references
+ let mut mm = MemoryMap::default();
+ match ty.kind(Interner) {
+ TyKind::Ref(_, _, t) => {
+ let size = self.size_of(t, locals)?;
+ match size {
+ Some(size) => {
+ let addr_usize = from_bytes!(usize, bytes);
+ mm.insert(
+ addr_usize,
+ self.read_memory(Address::from_usize(addr_usize), size)?.to_vec(),
+ )
+ }
+ None => {
+ let element_size = match t.kind(Interner) {
+ TyKind::Str => 1,
+ TyKind::Slice(t) => {
+ self.size_of_sized(t, locals, "slice inner type")?
+ }
+ _ => return Ok(mm), // FIXME: support other kind of unsized types
+ };
+ let (addr, meta) = bytes.split_at(bytes.len() / 2);
+ let size = element_size * from_bytes!(usize, meta);
+ let addr = Address::from_bytes(addr)?;
+ mm.insert(addr.to_usize(), self.read_memory(addr, size)?.to_vec());
+ }
+ }
+ }
+ _ => (),
+ }
+ Ok(mm)
+ }
+
+ fn patch_addresses(
+ &mut self,
+ patch_map: &HashMap<usize, usize>,
+ addr: Address,
+ ty: &Ty,
+ locals: &Locals<'_>,
+ ) -> Result<()> {
+ // FIXME: support indirect references
+ let my_size = self.size_of_sized(ty, locals, "value to patch address")?;
+ match ty.kind(Interner) {
+ TyKind::Ref(_, _, t) => {
+ let size = self.size_of(t, locals)?;
+ match size {
+ Some(_) => {
+ let current = from_bytes!(usize, self.read_memory(addr, my_size)?);
+ if let Some(x) = patch_map.get(&current) {
+ self.write_memory(addr, &x.to_le_bytes())?;
+ }
+ }
+ None => {
+ let current = from_bytes!(usize, self.read_memory(addr, my_size / 2)?);
+ if let Some(x) = patch_map.get(&current) {
+ self.write_memory(addr, &x.to_le_bytes())?;
+ }
+ }
+ }
+ }
+ _ => (),
+ }
+ Ok(())
+ }
+
+ fn exec_intrinsic(
+ &self,
+ as_str: &str,
+ _arg_bytes: impl Iterator<Item = Vec<u8>>,
+ generic_args: Substitution,
+ locals: &Locals<'_>,
+ ) -> Result<Vec<u8>> {
+ match as_str {
+ "size_of" => {
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
+ return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
+ };
+ let size = self.size_of(ty, locals)?;
+ match size {
+ Some(x) => Ok(x.to_le_bytes().to_vec()),
+ None => return Err(MirEvalError::TypeError("size_of arg is unsized")),
+ }
+ }
+ _ => not_supported!("unknown intrinsic {as_str}"),
+ }
+ }
+
+ pub(crate) fn exec_lang_item(
+ &self,
+ x: LangItem,
+ mut args: std::vec::IntoIter<Vec<u8>>,
+ ) -> Result<Vec<u8>> {
+ use LangItem::*;
+ match x {
+ PanicFmt | BeginPanic => Err(MirEvalError::Panic),
+ SliceLen => {
+ let arg = args
+ .next()
+ .ok_or(MirEvalError::TypeError("argument of <[T]>::len() is not provided"))?;
+ let ptr_size = arg.len() / 2;
+ Ok(arg[ptr_size..].into())
+ }
+ x => not_supported!("Executing lang item {x:?}"),
+ }
+ }
+}
+
+pub fn pad16(x: &[u8], is_signed: bool) -> [u8; 16] {
+ let is_negative = is_signed && x.last().unwrap_or(&0) > &128;
+ let fill_with = if is_negative { 255 } else { 0 };
+ x.iter()
+ .copied()
+ .chain(iter::repeat(fill_with))
+ .take(16)
+ .collect::<Vec<u8>>()
+ .try_into()
+ .expect("iterator take is not working")
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
new file mode 100644
index 000000000..c4dd7c0ac
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
@@ -0,0 +1,1581 @@
+//! This module generates a polymorphic MIR from a hir body
+
+use std::{iter, mem, sync::Arc};
+
+use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind};
+use hir_def::{
+ body::Body,
+ expr::{
+ Array, BindingAnnotation, BindingId, ExprId, LabelId, Literal, MatchArm, Pat, PatId,
+ RecordLitField,
+ },
+ lang_item::{LangItem, LangItemTarget},
+ layout::LayoutError,
+ path::Path,
+ resolver::{resolver_for_expr, ResolveValueResult, ValueNs},
+ DefWithBodyId, EnumVariantId, HasModule,
+};
+use hir_expand::name::Name;
+use la_arena::ArenaMap;
+
+use crate::{
+ consteval::ConstEvalError, db::HirDatabase, display::HirDisplay, infer::TypeMismatch,
+ inhabitedness::is_ty_uninhabited_from, layout::layout_of_ty, mapping::ToChalk, static_lifetime,
+ utils::generics, Adjust, Adjustment, AutoBorrow, CallableDefId, TyBuilder, TyExt,
+};
+
+use super::*;
+
+mod as_place;
+
+#[derive(Debug, Clone, Copy)]
+struct LoopBlocks {
+ begin: BasicBlockId,
+ /// `None` for loops that are not terminating
+ end: Option<BasicBlockId>,
+}
+
+struct MirLowerCtx<'a> {
+ result: MirBody,
+ owner: DefWithBodyId,
+ current_loop_blocks: Option<LoopBlocks>,
+ discr_temp: Option<Place>,
+ db: &'a dyn HirDatabase,
+ body: &'a Body,
+ infer: &'a InferenceResult,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum MirLowerError {
+ ConstEvalError(Box<ConstEvalError>),
+ LayoutError(LayoutError),
+ IncompleteExpr,
+ UnresolvedName(String),
+ RecordLiteralWithoutPath,
+ UnresolvedMethod,
+ UnresolvedField,
+ MissingFunctionDefinition,
+ TypeMismatch(TypeMismatch),
+ /// This should be never happen. Type mismatch should catch everything.
+ TypeError(&'static str),
+ NotSupported(String),
+ ContinueWithoutLoop,
+ BreakWithoutLoop,
+ Loop,
+ /// Something that should never happen and is definitely a bug, but we don't want to panic if it happened
+ ImplementationError(&'static str),
+ LangItemNotFound(LangItem),
+ MutatingRvalue,
+}
+
+macro_rules! not_supported {
+ ($x: expr) => {
+ return Err(MirLowerError::NotSupported(format!($x)))
+ };
+}
+
+macro_rules! implementation_error {
+ ($x: expr) => {{
+ ::stdx::never!("MIR lower implementation bug: {}", $x);
+ return Err(MirLowerError::ImplementationError($x));
+ }};
+}
+
+impl From<ConstEvalError> for MirLowerError {
+ fn from(value: ConstEvalError) -> Self {
+ match value {
+ ConstEvalError::MirLowerError(e) => e,
+ _ => MirLowerError::ConstEvalError(Box::new(value)),
+ }
+ }
+}
+
+impl From<LayoutError> for MirLowerError {
+ fn from(value: LayoutError) -> Self {
+ MirLowerError::LayoutError(value)
+ }
+}
+
+impl MirLowerError {
+ fn unresolved_path(db: &dyn HirDatabase, p: &Path) -> Self {
+ Self::UnresolvedName(p.display(db).to_string())
+ }
+}
+
+type Result<T> = std::result::Result<T, MirLowerError>;
+
+impl MirLowerCtx<'_> {
+ fn temp(&mut self, ty: Ty) -> Result<LocalId> {
+ if matches!(ty.kind(Interner), TyKind::Slice(_) | TyKind::Dyn(_)) {
+ implementation_error!("unsized temporaries");
+ }
+ Ok(self.result.locals.alloc(Local { ty }))
+ }
+
+ fn lower_expr_to_some_operand(
+ &mut self,
+ expr_id: ExprId,
+ current: BasicBlockId,
+ ) -> Result<Option<(Operand, BasicBlockId)>> {
+ if !self.has_adjustments(expr_id) {
+ match &self.body.exprs[expr_id] {
+ Expr::Literal(l) => {
+ let ty = self.expr_ty(expr_id);
+ return Ok(Some((self.lower_literal_to_operand(ty, l)?, current)));
+ }
+ _ => (),
+ }
+ }
+ let Some((p, current)) = self.lower_expr_as_place(current, expr_id, true)? else {
+ return Ok(None);
+ };
+ Ok(Some((Operand::Copy(p), current)))
+ }
+
+ fn lower_expr_to_place_with_adjust(
+ &mut self,
+ expr_id: ExprId,
+ place: Place,
+ current: BasicBlockId,
+ adjustments: &[Adjustment],
+ ) -> Result<Option<BasicBlockId>> {
+ match adjustments.split_last() {
+ Some((last, rest)) => match &last.kind {
+ Adjust::NeverToAny => {
+ let temp = self.temp(TyKind::Never.intern(Interner))?;
+ self.lower_expr_to_place_with_adjust(expr_id, temp.into(), current, rest)
+ }
+ Adjust::Deref(_) => {
+ let Some((p, current)) = self.lower_expr_as_place_with_adjust(current, expr_id, true, adjustments)? else {
+ return Ok(None);
+ };
+ self.push_assignment(current, place, Operand::Copy(p).into(), expr_id.into());
+ Ok(Some(current))
+ }
+ Adjust::Borrow(AutoBorrow::Ref(m) | AutoBorrow::RawPtr(m)) => {
+ let Some((p, current)) = self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)? else {
+ return Ok(None);
+ };
+ let bk = BorrowKind::from_chalk(*m);
+ self.push_assignment(current, place, Rvalue::Ref(bk, p), expr_id.into());
+ Ok(Some(current))
+ }
+ Adjust::Pointer(cast) => {
+ let Some((p, current)) = self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)? else {
+ return Ok(None);
+ };
+ self.push_assignment(
+ current,
+ place,
+ Rvalue::Cast(
+ CastKind::Pointer(cast.clone()),
+ Operand::Copy(p).into(),
+ last.target.clone(),
+ ),
+ expr_id.into(),
+ );
+ Ok(Some(current))
+ }
+ },
+ None => self.lower_expr_to_place_without_adjust(expr_id, place, current),
+ }
+ }
+
+ fn lower_expr_to_place(
+ &mut self,
+ expr_id: ExprId,
+ place: Place,
+ prev_block: BasicBlockId,
+ ) -> Result<Option<BasicBlockId>> {
+ if let Some(adjustments) = self.infer.expr_adjustments.get(&expr_id) {
+ return self.lower_expr_to_place_with_adjust(expr_id, place, prev_block, adjustments);
+ }
+ self.lower_expr_to_place_without_adjust(expr_id, place, prev_block)
+ }
+
+ fn lower_expr_to_place_without_adjust(
+ &mut self,
+ expr_id: ExprId,
+ place: Place,
+ mut current: BasicBlockId,
+ ) -> Result<Option<BasicBlockId>> {
+ match &self.body.exprs[expr_id] {
+ Expr::Missing => Err(MirLowerError::IncompleteExpr),
+ Expr::Path(p) => {
+ let unresolved_name = || MirLowerError::unresolved_path(self.db, p);
+ let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id);
+ let pr = resolver
+ .resolve_path_in_value_ns(self.db.upcast(), p.mod_path())
+ .ok_or_else(unresolved_name)?;
+ let pr = match pr {
+ ResolveValueResult::ValueNs(v) => v,
+ ResolveValueResult::Partial(..) => {
+ if let Some(assoc) = self
+ .infer
+ .assoc_resolutions_for_expr(expr_id)
+ {
+ match assoc.0 {
+ hir_def::AssocItemId::ConstId(c) => {
+ self.lower_const(c, current, place, expr_id.into())?;
+ return Ok(Some(current))
+ },
+ _ => not_supported!("associated functions and types"),
+ }
+ } else if let Some(variant) = self
+ .infer
+ .variant_resolution_for_expr(expr_id)
+ {
+ match variant {
+ VariantId::EnumVariantId(e) => ValueNs::EnumVariantId(e),
+ VariantId::StructId(s) => ValueNs::StructId(s),
+ VariantId::UnionId(_) => implementation_error!("Union variant as path"),
+ }
+ } else {
+ return Err(unresolved_name());
+ }
+ }
+ };
+ match pr {
+ ValueNs::LocalBinding(pat_id) => {
+ self.push_assignment(
+ current,
+ place,
+ Operand::Copy(self.result.binding_locals[pat_id].into()).into(),
+ expr_id.into(),
+ );
+ Ok(Some(current))
+ }
+ ValueNs::ConstId(const_id) => {
+ self.lower_const(const_id, current, place, expr_id.into())?;
+ Ok(Some(current))
+ }
+ ValueNs::EnumVariantId(variant_id) => {
+ let ty = self.infer.type_of_expr[expr_id].clone();
+ let current = self.lower_enum_variant(
+ variant_id,
+ current,
+ place,
+ ty,
+ vec![],
+ expr_id.into(),
+ )?;
+ Ok(Some(current))
+ }
+ ValueNs::GenericParam(p) => {
+ let Some(def) = self.owner.as_generic_def_id() else {
+ not_supported!("owner without generic def id");
+ };
+ let gen = generics(self.db.upcast(), def);
+ let ty = self.expr_ty(expr_id);
+ self.push_assignment(
+ current,
+ place,
+ Operand::Constant(
+ ConstData {
+ ty,
+ value: chalk_ir::ConstValue::BoundVar(BoundVar::new(
+ DebruijnIndex::INNERMOST,
+ gen.param_idx(p.into()).ok_or(MirLowerError::TypeError(
+ "fail to lower const generic param",
+ ))?,
+ )),
+ }
+ .intern(Interner),
+ )
+ .into(),
+ expr_id.into(),
+ );
+ Ok(Some(current))
+ }
+ ValueNs::StructId(_) => {
+ // It's probably a unit struct or a zero sized function, so no action is needed.
+ Ok(Some(current))
+ }
+ x => {
+ not_supported!("unknown name {x:?} in value name space");
+ }
+ }
+ }
+ Expr::If { condition, then_branch, else_branch } => {
+ let Some((discr, current)) = self.lower_expr_to_some_operand(*condition, current)? else {
+ return Ok(None);
+ };
+ let start_of_then = self.new_basic_block();
+ let end_of_then =
+ self.lower_expr_to_place(*then_branch, place.clone(), start_of_then)?;
+ let start_of_else = self.new_basic_block();
+ let end_of_else = if let Some(else_branch) = else_branch {
+ self.lower_expr_to_place(*else_branch, place, start_of_else)?
+ } else {
+ Some(start_of_else)
+ };
+ self.set_terminator(
+ current,
+ Terminator::SwitchInt {
+ discr,
+ targets: SwitchTargets::static_if(1, start_of_then, start_of_else),
+ },
+ );
+ Ok(self.merge_blocks(end_of_then, end_of_else))
+ }
+ Expr::Let { pat, expr } => {
+ let Some((cond_place, current)) = self.lower_expr_as_place(current, *expr, true)? else {
+ return Ok(None);
+ };
+ let (then_target, else_target) = self.pattern_match(
+ current,
+ None,
+ cond_place,
+ self.expr_ty_after_adjustments(*expr),
+ *pat,
+ BindingAnnotation::Unannotated,
+ )?;
+ self.write_bytes_to_place(
+ then_target,
+ place.clone(),
+ vec![1],
+ TyBuilder::bool(),
+ MirSpan::Unknown,
+ )?;
+ if let Some(else_target) = else_target {
+ self.write_bytes_to_place(
+ else_target,
+ place,
+ vec![0],
+ TyBuilder::bool(),
+ MirSpan::Unknown,
+ )?;
+ }
+ Ok(self.merge_blocks(Some(then_target), else_target))
+ }
+ Expr::Unsafe { id: _, statements, tail } => {
+ self.lower_block_to_place(None, statements, current, *tail, place)
+ }
+ Expr::Block { id: _, statements, tail, label } => {
+ self.lower_block_to_place(*label, statements, current, *tail, place)
+ }
+ Expr::Loop { body, label } => self.lower_loop(current, *label, |this, begin| {
+ if let Some((_, block)) = this.lower_expr_as_place(begin, *body, true)? {
+ this.set_goto(block, begin);
+ }
+ Ok(())
+ }),
+ Expr::While { condition, body, label } => {
+ self.lower_loop(current, *label, |this, begin| {
+ let Some((discr, to_switch)) = this.lower_expr_to_some_operand(*condition, begin)? else {
+ return Ok(());
+ };
+ let end = this.current_loop_end()?;
+ let after_cond = this.new_basic_block();
+ this.set_terminator(
+ to_switch,
+ Terminator::SwitchInt {
+ discr,
+ targets: SwitchTargets::static_if(1, after_cond, end),
+ },
+ );
+ if let Some((_, block)) = this.lower_expr_as_place(after_cond, *body, true)? {
+ this.set_goto(block, begin);
+ }
+ Ok(())
+ })
+ }
+ &Expr::For { iterable, pat, body, label } => {
+ let into_iter_fn = self.resolve_lang_item(LangItem::IntoIterIntoIter)?
+ .as_function().ok_or(MirLowerError::LangItemNotFound(LangItem::IntoIterIntoIter))?;
+ let iter_next_fn = self.resolve_lang_item(LangItem::IteratorNext)?
+ .as_function().ok_or(MirLowerError::LangItemNotFound(LangItem::IteratorNext))?;
+ let option_some = self.resolve_lang_item(LangItem::OptionSome)?
+ .as_enum_variant().ok_or(MirLowerError::LangItemNotFound(LangItem::OptionSome))?;
+ let option = option_some.parent;
+ let into_iter_fn_op = Operand::const_zst(
+ TyKind::FnDef(
+ self.db.intern_callable_def(CallableDefId::FunctionId(into_iter_fn)).into(),
+ Substitution::from1(Interner, self.expr_ty(iterable))
+ ).intern(Interner));
+ let iter_next_fn_op = Operand::const_zst(
+ TyKind::FnDef(
+ self.db.intern_callable_def(CallableDefId::FunctionId(iter_next_fn)).into(),
+ Substitution::from1(Interner, self.expr_ty(iterable))
+ ).intern(Interner));
+ let &Some(iterator_ty) = &self.infer.type_of_for_iterator.get(&expr_id) else {
+ return Err(MirLowerError::TypeError("unknown for loop iterator type"));
+ };
+ let ref_mut_iterator_ty = TyKind::Ref(Mutability::Mut, static_lifetime(), iterator_ty.clone()).intern(Interner);
+ let item_ty = &self.infer.type_of_pat[pat];
+ let option_item_ty = TyKind::Adt(chalk_ir::AdtId(option.into()), Substitution::from1(Interner, item_ty.clone())).intern(Interner);
+ let iterator_place: Place = self.temp(iterator_ty.clone())?.into();
+ let option_item_place: Place = self.temp(option_item_ty.clone())?.into();
+ let ref_mut_iterator_place: Place = self.temp(ref_mut_iterator_ty)?.into();
+ let Some(current) = self.lower_call_and_args(into_iter_fn_op, Some(iterable).into_iter(), iterator_place.clone(), current, false)?
+ else {
+ return Ok(None);
+ };
+ self.push_assignment(current, ref_mut_iterator_place.clone(), Rvalue::Ref(BorrowKind::Mut { allow_two_phase_borrow: false }, iterator_place), expr_id.into());
+ self.lower_loop(current, label, |this, begin| {
+ let Some(current) = this.lower_call(iter_next_fn_op, vec![Operand::Copy(ref_mut_iterator_place)], option_item_place.clone(), begin, false)?
+ else {
+ return Ok(());
+ };
+ let end = this.current_loop_end()?;
+ let (current, _) = this.pattern_matching_variant(
+ option_item_ty.clone(),
+ BindingAnnotation::Unannotated,
+ option_item_place.into(),
+ option_some.into(),
+ current,
+ pat.into(),
+ Some(end),
+ &[pat], &None)?;
+ if let Some((_, block)) = this.lower_expr_as_place(current, body, true)? {
+ this.set_goto(block, begin);
+ }
+ Ok(())
+ })
+ },
+ Expr::Call { callee, args, .. } => {
+ let callee_ty = self.expr_ty_after_adjustments(*callee);
+ match &callee_ty.data(Interner).kind {
+ chalk_ir::TyKind::FnDef(..) => {
+ let func = Operand::from_bytes(vec![], callee_ty.clone());
+ self.lower_call_and_args(func, args.iter().copied(), place, current, self.is_uninhabited(expr_id))
+ }
+ TyKind::Scalar(_)
+ | TyKind::Tuple(_, _)
+ | TyKind::Array(_, _)
+ | TyKind::Adt(_, _)
+ | TyKind::Str
+ | TyKind::Foreign(_)
+ | TyKind::Slice(_) => {
+ return Err(MirLowerError::TypeError("function call on data type"))
+ }
+ TyKind::Error => return Err(MirLowerError::MissingFunctionDefinition),
+ TyKind::AssociatedType(_, _)
+ | TyKind::Raw(_, _)
+ | TyKind::Ref(_, _, _)
+ | TyKind::OpaqueType(_, _)
+ | TyKind::Never
+ | TyKind::Closure(_, _)
+ | TyKind::Generator(_, _)
+ | TyKind::GeneratorWitness(_, _)
+ | TyKind::Placeholder(_)
+ | TyKind::Dyn(_)
+ | TyKind::Alias(_)
+ | TyKind::Function(_)
+ | TyKind::BoundVar(_)
+ | TyKind::InferenceVar(_, _) => not_supported!("dynamic function call"),
+ }
+ }
+ Expr::MethodCall { receiver, args, .. } => {
+ let (func_id, generic_args) =
+ self.infer.method_resolution(expr_id).ok_or(MirLowerError::UnresolvedMethod)?;
+ let ty = chalk_ir::TyKind::FnDef(
+ CallableDefId::FunctionId(func_id).to_chalk(self.db),
+ generic_args,
+ )
+ .intern(Interner);
+ let func = Operand::from_bytes(vec![], ty);
+ self.lower_call_and_args(
+ func,
+ iter::once(*receiver).chain(args.iter().copied()),
+ place,
+ current,
+ self.is_uninhabited(expr_id),
+ )
+ }
+ Expr::Match { expr, arms } => {
+ let Some((cond_place, mut current)) = self.lower_expr_as_place(current, *expr, true)?
+ else {
+ return Ok(None);
+ };
+ let cond_ty = self.expr_ty_after_adjustments(*expr);
+ let mut end = None;
+ for MatchArm { pat, guard, expr } in arms.iter() {
+ if guard.is_some() {
+ not_supported!("pattern matching with guard");
+ }
+ let (then, otherwise) = self.pattern_match(
+ current,
+ None,
+ cond_place.clone(),
+ cond_ty.clone(),
+ *pat,
+ BindingAnnotation::Unannotated,
+ )?;
+ if let Some(block) = self.lower_expr_to_place(*expr, place.clone(), then)? {
+ let r = end.get_or_insert_with(|| self.new_basic_block());
+ self.set_goto(block, *r);
+ }
+ match otherwise {
+ Some(o) => current = o,
+ None => {
+ // The current pattern was irrefutable, so there is no need to generate code
+ // for the rest of patterns
+ break;
+ }
+ }
+ }
+ if self.is_unterminated(current) {
+ self.set_terminator(current, Terminator::Unreachable);
+ }
+ Ok(end)
+ }
+ Expr::Continue { label } => match label {
+ Some(_) => not_supported!("continue with label"),
+ None => {
+ let loop_data =
+ self.current_loop_blocks.ok_or(MirLowerError::ContinueWithoutLoop)?;
+ self.set_goto(current, loop_data.begin);
+ Ok(None)
+ }
+ },
+ Expr::Break { expr, label } => {
+ if expr.is_some() {
+ not_supported!("break with value");
+ }
+ match label {
+ Some(_) => not_supported!("break with label"),
+ None => {
+ let end =
+ self.current_loop_end()?;
+ self.set_goto(current, end);
+ Ok(None)
+ }
+ }
+ }
+ Expr::Return { expr } => {
+ if let Some(expr) = expr {
+ if let Some(c) = self.lower_expr_to_place(*expr, return_slot().into(), current)? {
+ current = c;
+ } else {
+ return Ok(None);
+ }
+ }
+ self.set_terminator(current, Terminator::Return);
+ Ok(None)
+ }
+ Expr::Yield { .. } => not_supported!("yield"),
+ Expr::RecordLit { fields, path, .. } => {
+ let variant_id = self
+ .infer
+ .variant_resolution_for_expr(expr_id)
+ .ok_or_else(|| match path {
+ Some(p) => MirLowerError::UnresolvedName(p.display(self.db).to_string()),
+ None => MirLowerError::RecordLiteralWithoutPath,
+ })?;
+ let subst = match self.expr_ty(expr_id).kind(Interner) {
+ TyKind::Adt(_, s) => s.clone(),
+ _ => not_supported!("Non ADT record literal"),
+ };
+ let variant_data = variant_id.variant_data(self.db.upcast());
+ match variant_id {
+ VariantId::EnumVariantId(_) | VariantId::StructId(_) => {
+ let mut operands = vec![None; variant_data.fields().len()];
+ for RecordLitField { name, expr } in fields.iter() {
+ let field_id =
+ variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?;
+ let Some((op, c)) = self.lower_expr_to_some_operand(*expr, current)? else {
+ return Ok(None);
+ };
+ current = c;
+ operands[u32::from(field_id.into_raw()) as usize] = Some(op);
+ }
+ self.push_assignment(
+ current,
+ place,
+ Rvalue::Aggregate(
+ AggregateKind::Adt(variant_id, subst),
+ operands.into_iter().map(|x| x).collect::<Option<_>>().ok_or(
+ MirLowerError::TypeError("missing field in record literal"),
+ )?,
+ ),
+ expr_id.into(),
+ );
+ Ok(Some(current))
+ }
+ VariantId::UnionId(union_id) => {
+ let [RecordLitField { name, expr }] = fields.as_ref() else {
+ not_supported!("Union record literal with more than one field");
+ };
+ let local_id =
+ variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?;
+ let mut place = place;
+ place
+ .projection
+ .push(PlaceElem::Field(FieldId { parent: union_id.into(), local_id }));
+ self.lower_expr_to_place(*expr, place, current)
+ }
+ }
+ }
+ Expr::Await { .. } => not_supported!("await"),
+ Expr::Try { .. } => not_supported!("? operator"),
+ Expr::Yeet { .. } => not_supported!("yeet"),
+ Expr::TryBlock { .. } => not_supported!("try block"),
+ Expr::Async { .. } => not_supported!("async block"),
+ Expr::Const { .. } => not_supported!("anonymous const block"),
+ Expr::Cast { expr, type_ref: _ } => {
+ let Some((x, current)) = self.lower_expr_to_some_operand(*expr, current)? else {
+ return Ok(None);
+ };
+ let source_ty = self.infer[*expr].clone();
+ let target_ty = self.infer[expr_id].clone();
+ self.push_assignment(
+ current,
+ place,
+ Rvalue::Cast(cast_kind(&source_ty, &target_ty)?, x, target_ty),
+ expr_id.into(),
+ );
+ Ok(Some(current))
+ }
+ Expr::Ref { expr, rawness: _, mutability } => {
+ let Some((p, current)) = self.lower_expr_as_place(current, *expr, true)? else {
+ return Ok(None);
+ };
+ let bk = BorrowKind::from_hir(*mutability);
+ self.push_assignment(current, place, Rvalue::Ref(bk, p), expr_id.into());
+ Ok(Some(current))
+ }
+ Expr::Box { .. } => not_supported!("box expression"),
+ Expr::Field { .. } | Expr::Index { .. } | Expr::UnaryOp { op: hir_def::expr::UnaryOp::Deref, .. } => {
+ let Some((p, current)) = self.lower_expr_as_place_without_adjust(current, expr_id, true)? else {
+ return Ok(None);
+ };
+ self.push_assignment(current, place, Operand::Copy(p).into(), expr_id.into());
+ Ok(Some(current))
+ }
+ Expr::UnaryOp { expr, op: op @ (hir_def::expr::UnaryOp::Not | hir_def::expr::UnaryOp::Neg) } => {
+ let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)? else {
+ return Ok(None);
+ };
+ let operation = match op {
+ hir_def::expr::UnaryOp::Not => UnOp::Not,
+ hir_def::expr::UnaryOp::Neg => UnOp::Neg,
+ _ => unreachable!(),
+ };
+ self.push_assignment(
+ current,
+ place,
+ Rvalue::UnaryOp(operation, operand),
+ expr_id.into(),
+ );
+ Ok(Some(current))
+ },
+ Expr::BinaryOp { lhs, rhs, op } => {
+ let op = op.ok_or(MirLowerError::IncompleteExpr)?;
+ if let hir_def::expr::BinaryOp::Assignment { op } = op {
+ if op.is_some() {
+ not_supported!("assignment with arith op (like +=)");
+ }
+ let Some((lhs_place, current)) =
+ self.lower_expr_as_place(current, *lhs, false)?
+ else {
+ return Ok(None);
+ };
+ let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else {
+ return Ok(None);
+ };
+ self.push_assignment(current, lhs_place, rhs_op.into(), expr_id.into());
+ return Ok(Some(current));
+ }
+ let Some((lhs_op, current)) = self.lower_expr_to_some_operand(*lhs, current)? else {
+ return Ok(None);
+ };
+ let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else {
+ return Ok(None);
+ };
+ self.push_assignment(
+ current,
+ place,
+ Rvalue::CheckedBinaryOp(
+ match op {
+ hir_def::expr::BinaryOp::LogicOp(op) => match op {
+ hir_def::expr::LogicOp::And => BinOp::BitAnd, // FIXME: make these short circuit
+ hir_def::expr::LogicOp::Or => BinOp::BitOr,
+ },
+ hir_def::expr::BinaryOp::ArithOp(op) => BinOp::from(op),
+ hir_def::expr::BinaryOp::CmpOp(op) => BinOp::from(op),
+ hir_def::expr::BinaryOp::Assignment { .. } => unreachable!(), // handled above
+ },
+ lhs_op,
+ rhs_op,
+ ),
+ expr_id.into(),
+ );
+ Ok(Some(current))
+ }
+ Expr::Range { .. } => not_supported!("range"),
+ Expr::Closure { .. } => not_supported!("closure"),
+ Expr::Tuple { exprs, is_assignee_expr: _ } => {
+ let Some(values) = exprs
+ .iter()
+ .map(|x| {
+ let Some((o, c)) = self.lower_expr_to_some_operand(*x, current)? else {
+ return Ok(None);
+ };
+ current = c;
+ Ok(Some(o))
+ })
+ .collect::<Result<Option<_>>>()?
+ else {
+ return Ok(None);
+ };
+ let r = Rvalue::Aggregate(
+ AggregateKind::Tuple(self.expr_ty(expr_id)),
+ values,
+ );
+ self.push_assignment(current, place, r, expr_id.into());
+ Ok(Some(current))
+ }
+ Expr::Array(l) => match l {
+ Array::ElementList { elements, .. } => {
+ let elem_ty = match &self.expr_ty(expr_id).data(Interner).kind {
+ TyKind::Array(ty, _) => ty.clone(),
+ _ => {
+ return Err(MirLowerError::TypeError(
+ "Array expression with non array type",
+ ))
+ }
+ };
+ let Some(values) = elements
+ .iter()
+ .map(|x| {
+ let Some((o, c)) = self.lower_expr_to_some_operand(*x, current)? else {
+ return Ok(None);
+ };
+ current = c;
+ Ok(Some(o))
+ })
+ .collect::<Result<Option<_>>>()?
+ else {
+ return Ok(None);
+ };
+ let r = Rvalue::Aggregate(
+ AggregateKind::Array(elem_ty),
+ values,
+ );
+ self.push_assignment(current, place, r, expr_id.into());
+ Ok(Some(current))
+ }
+ Array::Repeat { .. } => not_supported!("array repeat"),
+ },
+ Expr::Literal(l) => {
+ let ty = self.expr_ty(expr_id);
+ let op = self.lower_literal_to_operand(ty, l)?;
+ self.push_assignment(current, place, op.into(), expr_id.into());
+ Ok(Some(current))
+ }
+ Expr::Underscore => not_supported!("underscore"),
+ }
+ }
+
+ fn push_field_projection(&self, place: &mut Place, expr_id: ExprId) -> Result<()> {
+ if let Expr::Field { expr, name } = &self.body[expr_id] {
+ if let TyKind::Tuple(..) = self.expr_ty_after_adjustments(*expr).kind(Interner) {
+ let index = name
+ .as_tuple_index()
+ .ok_or(MirLowerError::TypeError("named field on tuple"))?;
+ place.projection.push(ProjectionElem::TupleField(index))
+ } else {
+ let field =
+ self.infer.field_resolution(expr_id).ok_or(MirLowerError::UnresolvedField)?;
+ place.projection.push(ProjectionElem::Field(field));
+ }
+ } else {
+ not_supported!("")
+ }
+ Ok(())
+ }
+
+ fn lower_literal_to_operand(&mut self, ty: Ty, l: &Literal) -> Result<Operand> {
+ let size = layout_of_ty(self.db, &ty, self.owner.module(self.db.upcast()).krate())?
+ .size
+ .bytes_usize();
+ let bytes = match l {
+ hir_def::expr::Literal::String(b) => {
+ let b = b.as_bytes();
+ let mut data = vec![];
+ data.extend(0usize.to_le_bytes());
+ data.extend(b.len().to_le_bytes());
+ let mut mm = MemoryMap::default();
+ mm.insert(0, b.to_vec());
+ return Ok(Operand::from_concrete_const(data, mm, ty));
+ }
+ hir_def::expr::Literal::ByteString(b) => {
+ let mut data = vec![];
+ data.extend(0usize.to_le_bytes());
+ data.extend(b.len().to_le_bytes());
+ let mut mm = MemoryMap::default();
+ mm.insert(0, b.to_vec());
+ return Ok(Operand::from_concrete_const(data, mm, ty));
+ }
+ hir_def::expr::Literal::Char(c) => u32::from(*c).to_le_bytes().into(),
+ hir_def::expr::Literal::Bool(b) => vec![*b as u8],
+ hir_def::expr::Literal::Int(x, _) => x.to_le_bytes()[0..size].into(),
+ hir_def::expr::Literal::Uint(x, _) => x.to_le_bytes()[0..size].into(),
+ hir_def::expr::Literal::Float(f, _) => match size {
+ 8 => f.into_f64().to_le_bytes().into(),
+ 4 => f.into_f32().to_le_bytes().into(),
+ _ => {
+ return Err(MirLowerError::TypeError("float with size other than 4 or 8 bytes"))
+ }
+ },
+ };
+ Ok(Operand::from_concrete_const(bytes, MemoryMap::default(), ty))
+ }
+
+ fn new_basic_block(&mut self) -> BasicBlockId {
+ self.result.basic_blocks.alloc(BasicBlock::default())
+ }
+
+ fn lower_const(
+ &mut self,
+ const_id: hir_def::ConstId,
+ prev_block: BasicBlockId,
+ place: Place,
+ span: MirSpan,
+ ) -> Result<()> {
+ let c = self.db.const_eval(const_id)?;
+ self.write_const_to_place(c, prev_block, place, span)
+ }
+
+ fn write_const_to_place(
+ &mut self,
+ c: Const,
+ prev_block: BasicBlockId,
+ place: Place,
+ span: MirSpan,
+ ) -> Result<()> {
+ self.push_assignment(prev_block, place, Operand::Constant(c).into(), span);
+ Ok(())
+ }
+
+ fn write_bytes_to_place(
+ &mut self,
+ prev_block: BasicBlockId,
+ place: Place,
+ cv: Vec<u8>,
+ ty: Ty,
+ span: MirSpan,
+ ) -> Result<()> {
+ self.push_assignment(prev_block, place, Operand::from_bytes(cv, ty).into(), span);
+ Ok(())
+ }
+
+ fn lower_enum_variant(
+ &mut self,
+ variant_id: EnumVariantId,
+ prev_block: BasicBlockId,
+ place: Place,
+ ty: Ty,
+ fields: Vec<Operand>,
+ span: MirSpan,
+ ) -> Result<BasicBlockId> {
+ let subst = match ty.kind(Interner) {
+ TyKind::Adt(_, subst) => subst.clone(),
+ _ => not_supported!("Non ADT enum"),
+ };
+ self.push_assignment(
+ prev_block,
+ place,
+ Rvalue::Aggregate(AggregateKind::Adt(variant_id.into(), subst), fields),
+ span,
+ );
+ Ok(prev_block)
+ }
+
+ fn lower_call_and_args(
+ &mut self,
+ func: Operand,
+ args: impl Iterator<Item = ExprId>,
+ place: Place,
+ mut current: BasicBlockId,
+ is_uninhabited: bool,
+ ) -> Result<Option<BasicBlockId>> {
+ let Some(args) = args
+ .map(|arg| {
+ if let Some((temp, c)) = self.lower_expr_to_some_operand(arg, current)? {
+ current = c;
+ Ok(Some(temp))
+ } else {
+ Ok(None)
+ }
+ })
+ .collect::<Result<Option<Vec<_>>>>()?
+ else {
+ return Ok(None);
+ };
+ self.lower_call(func, args, place, current, is_uninhabited)
+ }
+
+ fn lower_call(
+ &mut self,
+ func: Operand,
+ args: Vec<Operand>,
+ place: Place,
+ current: BasicBlockId,
+ is_uninhabited: bool,
+ ) -> Result<Option<BasicBlockId>> {
+ let b = if is_uninhabited { None } else { Some(self.new_basic_block()) };
+ self.set_terminator(
+ current,
+ Terminator::Call {
+ func,
+ args,
+ destination: place,
+ target: b,
+ cleanup: None,
+ from_hir_call: true,
+ },
+ );
+ Ok(b)
+ }
+
+ fn is_unterminated(&mut self, source: BasicBlockId) -> bool {
+ self.result.basic_blocks[source].terminator.is_none()
+ }
+
+ fn set_terminator(&mut self, source: BasicBlockId, terminator: Terminator) {
+ self.result.basic_blocks[source].terminator = Some(terminator);
+ }
+
+ fn set_goto(&mut self, source: BasicBlockId, target: BasicBlockId) {
+ self.set_terminator(source, Terminator::Goto { target });
+ }
+
+ fn expr_ty(&self, e: ExprId) -> Ty {
+ self.infer[e].clone()
+ }
+
+ fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty {
+ let mut ty = None;
+ if let Some(x) = self.infer.expr_adjustments.get(&e) {
+ if let Some(x) = x.last() {
+ ty = Some(x.target.clone());
+ }
+ }
+ ty.unwrap_or_else(|| self.expr_ty(e))
+ }
+
+ fn push_statement(&mut self, block: BasicBlockId, statement: Statement) {
+ self.result.basic_blocks[block].statements.push(statement);
+ }
+
+ fn push_assignment(
+ &mut self,
+ block: BasicBlockId,
+ place: Place,
+ rvalue: Rvalue,
+ span: MirSpan,
+ ) {
+ self.push_statement(block, StatementKind::Assign(place, rvalue).with_span(span));
+ }
+
+ /// It gets a `current` unterminated block, appends some statements and possibly a terminator to it to check if
+ /// the pattern matches and write bindings, and returns two unterminated blocks, one for the matched path (which
+ /// can be the `current` block) and one for the mismatched path. If the input pattern is irrefutable, the
+ /// mismatched path block is `None`.
+ ///
+ /// By default, it will create a new block for mismatched path. If you already have one, you can provide it with
+ /// `current_else` argument to save an unneccessary jump. If `current_else` isn't `None`, the result mismatched path
+ /// wouldn't be `None` as well. Note that this function will add jumps to the beginning of the `current_else` block,
+ /// so it should be an empty block.
+ fn pattern_match(
+ &mut self,
+ mut current: BasicBlockId,
+ mut current_else: Option<BasicBlockId>,
+ mut cond_place: Place,
+ mut cond_ty: Ty,
+ pattern: PatId,
+ mut binding_mode: BindingAnnotation,
+ ) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
+ Ok(match &self.body.pats[pattern] {
+ Pat::Missing => return Err(MirLowerError::IncompleteExpr),
+ Pat::Wild => (current, current_else),
+ Pat::Tuple { args, ellipsis } => {
+ pattern_matching_dereference(&mut cond_ty, &mut binding_mode, &mut cond_place);
+ let subst = match cond_ty.kind(Interner) {
+ TyKind::Tuple(_, s) => s,
+ _ => {
+ return Err(MirLowerError::TypeError(
+ "non tuple type matched with tuple pattern",
+ ))
+ }
+ };
+ self.pattern_match_tuple_like(
+ current,
+ current_else,
+ args.iter().enumerate().map(|(i, x)| {
+ (
+ PlaceElem::TupleField(i),
+ *x,
+ subst.at(Interner, i).assert_ty_ref(Interner).clone(),
+ )
+ }),
+ *ellipsis,
+ &cond_place,
+ binding_mode,
+ )?
+ }
+ Pat::Or(pats) => {
+ let then_target = self.new_basic_block();
+ let mut finished = false;
+ for pat in &**pats {
+ let (next, next_else) = self.pattern_match(
+ current,
+ None,
+ cond_place.clone(),
+ cond_ty.clone(),
+ *pat,
+ binding_mode,
+ )?;
+ self.set_goto(next, then_target);
+ match next_else {
+ Some(t) => {
+ current = t;
+ }
+ None => {
+ finished = true;
+ break;
+ }
+ }
+ }
+ if !finished {
+ let ce = *current_else.get_or_insert_with(|| self.new_basic_block());
+ self.set_goto(current, ce);
+ }
+ (then_target, current_else)
+ }
+ Pat::Record { .. } => not_supported!("record pattern"),
+ Pat::Range { .. } => not_supported!("range pattern"),
+ Pat::Slice { .. } => not_supported!("slice pattern"),
+ Pat::Path(_) => {
+ let Some(variant) = self.infer.variant_resolution_for_pat(pattern) else {
+ not_supported!("unresolved variant");
+ };
+ self.pattern_matching_variant(
+ cond_ty,
+ binding_mode,
+ cond_place,
+ variant,
+ current,
+ pattern.into(),
+ current_else,
+ &[],
+ &None,
+ )?
+ }
+ Pat::Lit(l) => {
+ let then_target = self.new_basic_block();
+ let else_target = current_else.unwrap_or_else(|| self.new_basic_block());
+ match &self.body.exprs[*l] {
+ Expr::Literal(l) => match l {
+ hir_def::expr::Literal::Int(x, _) => {
+ self.set_terminator(
+ current,
+ Terminator::SwitchInt {
+ discr: Operand::Copy(cond_place),
+ targets: SwitchTargets::static_if(
+ *x as u128,
+ then_target,
+ else_target,
+ ),
+ },
+ );
+ }
+ hir_def::expr::Literal::Uint(x, _) => {
+ self.set_terminator(
+ current,
+ Terminator::SwitchInt {
+ discr: Operand::Copy(cond_place),
+ targets: SwitchTargets::static_if(*x, then_target, else_target),
+ },
+ );
+ }
+ _ => not_supported!("non int path literal"),
+ },
+ _ => not_supported!("expression path literal"),
+ }
+ (then_target, Some(else_target))
+ }
+ Pat::Bind { id, subpat } => {
+ let target_place = self.result.binding_locals[*id];
+ let mode = self.body.bindings[*id].mode;
+ if let Some(subpat) = subpat {
+ (current, current_else) = self.pattern_match(
+ current,
+ current_else,
+ cond_place.clone(),
+ cond_ty,
+ *subpat,
+ binding_mode,
+ )?
+ }
+ if matches!(mode, BindingAnnotation::Ref | BindingAnnotation::RefMut) {
+ binding_mode = mode;
+ }
+ self.push_storage_live(*id, current);
+ self.push_assignment(
+ current,
+ target_place.into(),
+ match binding_mode {
+ BindingAnnotation::Unannotated | BindingAnnotation::Mutable => {
+ Operand::Copy(cond_place).into()
+ }
+ BindingAnnotation::Ref => Rvalue::Ref(BorrowKind::Shared, cond_place),
+ BindingAnnotation::RefMut => Rvalue::Ref(
+ BorrowKind::Mut { allow_two_phase_borrow: false },
+ cond_place,
+ ),
+ },
+ pattern.into(),
+ );
+ (current, current_else)
+ }
+ Pat::TupleStruct { path: _, args, ellipsis } => {
+ let Some(variant) = self.infer.variant_resolution_for_pat(pattern) else {
+ not_supported!("unresolved variant");
+ };
+ self.pattern_matching_variant(
+ cond_ty,
+ binding_mode,
+ cond_place,
+ variant,
+ current,
+ pattern.into(),
+ current_else,
+ args,
+ ellipsis,
+ )?
+ }
+ Pat::Ref { .. } => not_supported!("& pattern"),
+ Pat::Box { .. } => not_supported!("box pattern"),
+ Pat::ConstBlock(_) => not_supported!("const block pattern"),
+ })
+ }
+
+ fn pattern_matching_variant(
+ &mut self,
+ mut cond_ty: Ty,
+ mut binding_mode: BindingAnnotation,
+ mut cond_place: Place,
+ variant: VariantId,
+ current: BasicBlockId,
+ span: MirSpan,
+ current_else: Option<BasicBlockId>,
+ args: &[PatId],
+ ellipsis: &Option<usize>,
+ ) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
+ pattern_matching_dereference(&mut cond_ty, &mut binding_mode, &mut cond_place);
+ let subst = match cond_ty.kind(Interner) {
+ TyKind::Adt(_, s) => s,
+ _ => return Err(MirLowerError::TypeError("non adt type matched with tuple struct")),
+ };
+ let fields_type = self.db.field_types(variant);
+ Ok(match variant {
+ VariantId::EnumVariantId(v) => {
+ let e = self.db.const_eval_discriminant(v)? as u128;
+ let next = self.new_basic_block();
+ let tmp = self.discr_temp_place();
+ self.push_assignment(
+ current,
+ tmp.clone(),
+ Rvalue::Discriminant(cond_place.clone()),
+ span,
+ );
+ let else_target = current_else.unwrap_or_else(|| self.new_basic_block());
+ self.set_terminator(
+ current,
+ Terminator::SwitchInt {
+ discr: Operand::Copy(tmp),
+ targets: SwitchTargets::static_if(e, next, else_target),
+ },
+ );
+ let enum_data = self.db.enum_data(v.parent);
+ let fields =
+ enum_data.variants[v.local_id].variant_data.fields().iter().map(|(x, _)| {
+ (
+ PlaceElem::Field(FieldId { parent: v.into(), local_id: x }),
+ fields_type[x].clone().substitute(Interner, subst),
+ )
+ });
+ self.pattern_match_tuple_like(
+ next,
+ Some(else_target),
+ args.iter().zip(fields).map(|(x, y)| (y.0, *x, y.1)),
+ *ellipsis,
+ &cond_place,
+ binding_mode,
+ )?
+ }
+ VariantId::StructId(s) => {
+ let struct_data = self.db.struct_data(s);
+ let fields = struct_data.variant_data.fields().iter().map(|(x, _)| {
+ (
+ PlaceElem::Field(FieldId { parent: s.into(), local_id: x }),
+ fields_type[x].clone().substitute(Interner, subst),
+ )
+ });
+ self.pattern_match_tuple_like(
+ current,
+ current_else,
+ args.iter().zip(fields).map(|(x, y)| (y.0, *x, y.1)),
+ *ellipsis,
+ &cond_place,
+ binding_mode,
+ )?
+ }
+ VariantId::UnionId(_) => {
+ return Err(MirLowerError::TypeError("pattern matching on union"))
+ }
+ })
+ }
+
+ fn pattern_match_tuple_like(
+ &mut self,
+ mut current: BasicBlockId,
+ mut current_else: Option<BasicBlockId>,
+ args: impl Iterator<Item = (PlaceElem, PatId, Ty)>,
+ ellipsis: Option<usize>,
+ cond_place: &Place,
+ binding_mode: BindingAnnotation,
+ ) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
+ if ellipsis.is_some() {
+ not_supported!("tuple like pattern with ellipsis");
+ }
+ for (proj, arg, ty) in args {
+ let mut cond_place = cond_place.clone();
+ cond_place.projection.push(proj);
+ (current, current_else) =
+ self.pattern_match(current, current_else, cond_place, ty, arg, binding_mode)?;
+ }
+ Ok((current, current_else))
+ }
+
+ fn discr_temp_place(&mut self) -> Place {
+ match &self.discr_temp {
+ Some(x) => x.clone(),
+ None => {
+ let tmp: Place =
+ self.temp(TyBuilder::discr_ty()).expect("discr_ty is never unsized").into();
+ self.discr_temp = Some(tmp.clone());
+ tmp
+ }
+ }
+ }
+
+ fn lower_loop(
+ &mut self,
+ prev_block: BasicBlockId,
+ label: Option<LabelId>,
+ f: impl FnOnce(&mut MirLowerCtx<'_>, BasicBlockId) -> Result<()>,
+ ) -> Result<Option<BasicBlockId>> {
+ if label.is_some() {
+ not_supported!("loop with label");
+ }
+ let begin = self.new_basic_block();
+ let prev =
+ mem::replace(&mut self.current_loop_blocks, Some(LoopBlocks { begin, end: None }));
+ self.set_goto(prev_block, begin);
+ f(self, begin)?;
+ let my = mem::replace(&mut self.current_loop_blocks, prev)
+ .ok_or(MirLowerError::ImplementationError("current_loop_blocks is corrupt"))?;
+ Ok(my.end)
+ }
+
+ fn has_adjustments(&self, expr_id: ExprId) -> bool {
+ !self.infer.expr_adjustments.get(&expr_id).map(|x| x.is_empty()).unwrap_or(true)
+ }
+
+ fn merge_blocks(
+ &mut self,
+ b1: Option<BasicBlockId>,
+ b2: Option<BasicBlockId>,
+ ) -> Option<BasicBlockId> {
+ match (b1, b2) {
+ (None, None) => None,
+ (None, Some(b)) | (Some(b), None) => Some(b),
+ (Some(b1), Some(b2)) => {
+ let bm = self.new_basic_block();
+ self.set_goto(b1, bm);
+ self.set_goto(b2, bm);
+ Some(bm)
+ }
+ }
+ }
+
+ fn current_loop_end(&mut self) -> Result<BasicBlockId> {
+ let r = match self
+ .current_loop_blocks
+ .as_mut()
+ .ok_or(MirLowerError::ImplementationError("Current loop access out of loop"))?
+ .end
+ {
+ Some(x) => x,
+ None => {
+ let s = self.new_basic_block();
+ self.current_loop_blocks
+ .as_mut()
+ .ok_or(MirLowerError::ImplementationError("Current loop access out of loop"))?
+ .end = Some(s);
+ s
+ }
+ };
+ Ok(r)
+ }
+
+ fn is_uninhabited(&self, expr_id: ExprId) -> bool {
+ is_ty_uninhabited_from(&self.infer[expr_id], self.owner.module(self.db.upcast()), self.db)
+ }
+
+ /// This function push `StorageLive` statement for the binding, and applies changes to add `StorageDead` in
+ /// the appropriated places.
+ fn push_storage_live(&mut self, b: BindingId, current: BasicBlockId) {
+ // Current implementation is wrong. It adds no `StorageDead` at the end of scope, and before each break
+ // and continue. It just add a `StorageDead` before the `StorageLive`, which is not wrong, but unneeeded in
+ // the proper implementation. Due this limitation, implementing a borrow checker on top of this mir will falsely
+ // allow this:
+ //
+ // ```
+ // let x;
+ // loop {
+ // let y = 2;
+ // x = &y;
+ // if some_condition {
+ // break; // we need to add a StorageDead(y) above this to kill the x borrow
+ // }
+ // }
+ // use(x)
+ // ```
+ // But I think this approach work for mutability analysis, as user can't write code which mutates a binding
+ // after StorageDead, except loops, which are handled by this hack.
+ let span = self.body.bindings[b]
+ .definitions
+ .first()
+ .copied()
+ .map(MirSpan::PatId)
+ .unwrap_or(MirSpan::Unknown);
+ let l = self.result.binding_locals[b];
+ self.push_statement(current, StatementKind::StorageDead(l).with_span(span));
+ self.push_statement(current, StatementKind::StorageLive(l).with_span(span));
+ }
+
+ fn resolve_lang_item(&self, item: LangItem) -> Result<LangItemTarget> {
+ let crate_id = self.owner.module(self.db.upcast()).krate();
+ self.db.lang_item(crate_id, item).ok_or(MirLowerError::LangItemNotFound(item))
+ }
+
+ fn lower_block_to_place(
+ &mut self,
+ label: Option<LabelId>,
+ statements: &[hir_def::expr::Statement],
+ mut current: BasicBlockId,
+ tail: Option<ExprId>,
+ place: Place,
+ ) -> Result<Option<Idx<BasicBlock>>> {
+ if label.is_some() {
+ not_supported!("block with label");
+ }
+ for statement in statements.iter() {
+ match statement {
+ hir_def::expr::Statement::Let { pat, initializer, else_branch, type_ref: _ } => {
+ if let Some(expr_id) = initializer {
+ let else_block;
+ let Some((init_place, c)) =
+ self.lower_expr_as_place(current, *expr_id, true)?
+ else {
+ return Ok(None);
+ };
+ current = c;
+ (current, else_block) = self.pattern_match(
+ current,
+ None,
+ init_place,
+ self.expr_ty_after_adjustments(*expr_id),
+ *pat,
+ BindingAnnotation::Unannotated,
+ )?;
+ match (else_block, else_branch) {
+ (None, _) => (),
+ (Some(else_block), None) => {
+ self.set_terminator(else_block, Terminator::Unreachable);
+ }
+ (Some(else_block), Some(else_branch)) => {
+ if let Some((_, b)) =
+ self.lower_expr_as_place(else_block, *else_branch, true)?
+ {
+ self.set_terminator(b, Terminator::Unreachable);
+ }
+ }
+ }
+ } else {
+ self.body.walk_bindings_in_pat(*pat, |b| {
+ self.push_storage_live(b, current);
+ });
+ }
+ }
+ hir_def::expr::Statement::Expr { expr, has_semi: _ } => {
+ let Some((_, c)) = self.lower_expr_as_place(current, *expr, true)? else {
+ return Ok(None);
+ };
+ current = c;
+ }
+ }
+ }
+ match tail {
+ Some(tail) => self.lower_expr_to_place(tail, place, current),
+ None => Ok(Some(current)),
+ }
+ }
+}
+
+fn pattern_matching_dereference(
+ cond_ty: &mut Ty,
+ binding_mode: &mut BindingAnnotation,
+ cond_place: &mut Place,
+) {
+ while let Some((ty, _, mu)) = cond_ty.as_reference() {
+ if mu == Mutability::Mut && *binding_mode != BindingAnnotation::Ref {
+ *binding_mode = BindingAnnotation::RefMut;
+ } else {
+ *binding_mode = BindingAnnotation::Ref;
+ }
+ *cond_ty = ty.clone();
+ cond_place.projection.push(ProjectionElem::Deref);
+ }
+}
+
+fn cast_kind(source_ty: &Ty, target_ty: &Ty) -> Result<CastKind> {
+ Ok(match (source_ty.kind(Interner), target_ty.kind(Interner)) {
+ (TyKind::Scalar(s), TyKind::Scalar(t)) => match (s, t) {
+ (chalk_ir::Scalar::Float(_), chalk_ir::Scalar::Float(_)) => CastKind::FloatToFloat,
+ (chalk_ir::Scalar::Float(_), _) => CastKind::FloatToInt,
+ (_, chalk_ir::Scalar::Float(_)) => CastKind::IntToFloat,
+ (_, _) => CastKind::IntToInt,
+ },
+ // Enum to int casts
+ (TyKind::Scalar(_), TyKind::Adt(..)) | (TyKind::Adt(..), TyKind::Scalar(_)) => {
+ CastKind::IntToInt
+ }
+ (a, b) => not_supported!("Unknown cast between {a:?} and {b:?}"),
+ })
+}
+
+pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<MirBody>> {
+ let _p = profile::span("mir_body_query").detail(|| match def {
+ DefWithBodyId::FunctionId(it) => db.function_data(it).name.to_string(),
+ DefWithBodyId::StaticId(it) => db.static_data(it).name.clone().to_string(),
+ DefWithBodyId::ConstId(it) => {
+ db.const_data(it).name.clone().unwrap_or_else(Name::missing).to_string()
+ }
+ DefWithBodyId::VariantId(it) => {
+ db.enum_data(it.parent).variants[it.local_id].name.to_string()
+ }
+ });
+ let body = db.body(def);
+ let infer = db.infer(def);
+ let result = lower_to_mir(db, def, &body, &infer, body.body_expr)?;
+ Ok(Arc::new(result))
+}
+
+pub fn mir_body_recover(
+ _db: &dyn HirDatabase,
+ _cycle: &[String],
+ _def: &DefWithBodyId,
+) -> Result<Arc<MirBody>> {
+ Err(MirLowerError::Loop)
+}
+
+pub fn lower_to_mir(
+ db: &dyn HirDatabase,
+ owner: DefWithBodyId,
+ body: &Body,
+ infer: &InferenceResult,
+ // FIXME: root_expr should always be the body.body_expr, but since `X` in `[(); X]` doesn't have its own specific body yet, we
+ // need to take this input explicitly.
+ root_expr: ExprId,
+) -> Result<MirBody> {
+ if let Some((_, x)) = infer.type_mismatches().next() {
+ return Err(MirLowerError::TypeMismatch(x.clone()));
+ }
+ let mut basic_blocks = Arena::new();
+ let start_block =
+ basic_blocks.alloc(BasicBlock { statements: vec![], terminator: None, is_cleanup: false });
+ let mut locals = Arena::new();
+ // 0 is return local
+ locals.alloc(Local { ty: infer[root_expr].clone() });
+ let mut binding_locals: ArenaMap<BindingId, LocalId> = ArenaMap::new();
+ // 1 to param_len is for params
+ let param_locals: Vec<LocalId> = if let DefWithBodyId::FunctionId(fid) = owner {
+ let substs = TyBuilder::placeholder_subst(db, fid);
+ let callable_sig = db.callable_item_signature(fid.into()).substitute(Interner, &substs);
+ body.params
+ .iter()
+ .zip(callable_sig.params().iter())
+ .map(|(&x, ty)| {
+ let local_id = locals.alloc(Local { ty: ty.clone() });
+ if let Pat::Bind { id, subpat: None } = body[x] {
+ if matches!(
+ body.bindings[id].mode,
+ BindingAnnotation::Unannotated | BindingAnnotation::Mutable
+ ) {
+ binding_locals.insert(id, local_id);
+ }
+ }
+ local_id
+ })
+ .collect()
+ } else {
+ if !body.params.is_empty() {
+ return Err(MirLowerError::TypeError("Unexpected parameter for non function body"));
+ }
+ vec![]
+ };
+ // and then rest of bindings
+ for (id, _) in body.bindings.iter() {
+ if !binding_locals.contains_idx(id) {
+ binding_locals.insert(id, locals.alloc(Local { ty: infer[id].clone() }));
+ }
+ }
+ let mir = MirBody {
+ basic_blocks,
+ locals,
+ start_block,
+ binding_locals,
+ param_locals,
+ owner,
+ arg_count: body.params.len(),
+ };
+ let mut ctx = MirLowerCtx {
+ result: mir,
+ db,
+ infer,
+ body,
+ owner,
+ current_loop_blocks: None,
+ discr_temp: None,
+ };
+ let mut current = start_block;
+ for (&param, local) in body.params.iter().zip(ctx.result.param_locals.clone().into_iter()) {
+ if let Pat::Bind { id, .. } = body[param] {
+ if local == ctx.result.binding_locals[id] {
+ continue;
+ }
+ }
+ let r = ctx.pattern_match(
+ current,
+ None,
+ local.into(),
+ ctx.result.locals[local].ty.clone(),
+ param,
+ BindingAnnotation::Unannotated,
+ )?;
+ if let Some(b) = r.1 {
+ ctx.set_terminator(b, Terminator::Unreachable);
+ }
+ current = r.0;
+ }
+ if let Some(b) = ctx.lower_expr_to_place(root_expr, return_slot().into(), current)? {
+ ctx.result.basic_blocks[b].terminator = Some(Terminator::Return);
+ }
+ Ok(ctx.result)
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs
new file mode 100644
index 000000000..fe8147dcd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs
@@ -0,0 +1,237 @@
+//! MIR lowering for places
+
+use super::*;
+use hir_expand::name;
+
+macro_rules! not_supported {
+ ($x: expr) => {
+ return Err(MirLowerError::NotSupported(format!($x)))
+ };
+}
+
+impl MirLowerCtx<'_> {
+ fn lower_expr_to_some_place_without_adjust(
+ &mut self,
+ expr_id: ExprId,
+ prev_block: BasicBlockId,
+ ) -> Result<Option<(Place, BasicBlockId)>> {
+ let ty = self.expr_ty(expr_id);
+ let place = self.temp(ty)?;
+ let Some(current) = self.lower_expr_to_place_without_adjust(expr_id, place.into(), prev_block)? else {
+ return Ok(None);
+ };
+ Ok(Some((place.into(), current)))
+ }
+
+ fn lower_expr_to_some_place_with_adjust(
+ &mut self,
+ expr_id: ExprId,
+ prev_block: BasicBlockId,
+ adjustments: &[Adjustment],
+ ) -> Result<Option<(Place, BasicBlockId)>> {
+ let ty =
+ adjustments.last().map(|x| x.target.clone()).unwrap_or_else(|| self.expr_ty(expr_id));
+ let place = self.temp(ty)?;
+ let Some(current) = self.lower_expr_to_place_with_adjust(expr_id, place.into(), prev_block, adjustments)? else {
+ return Ok(None);
+ };
+ Ok(Some((place.into(), current)))
+ }
+
+ pub(super) fn lower_expr_as_place_with_adjust(
+ &mut self,
+ current: BasicBlockId,
+ expr_id: ExprId,
+ upgrade_rvalue: bool,
+ adjustments: &[Adjustment],
+ ) -> Result<Option<(Place, BasicBlockId)>> {
+ let try_rvalue = |this: &mut MirLowerCtx<'_>| {
+ if !upgrade_rvalue {
+ return Err(MirLowerError::MutatingRvalue);
+ }
+ this.lower_expr_to_some_place_with_adjust(expr_id, current, adjustments)
+ };
+ if let Some((last, rest)) = adjustments.split_last() {
+ match last.kind {
+ Adjust::Deref(None) => {
+ let Some(mut x) = self.lower_expr_as_place_with_adjust(
+ current,
+ expr_id,
+ upgrade_rvalue,
+ rest,
+ )? else {
+ return Ok(None);
+ };
+ x.0.projection.push(ProjectionElem::Deref);
+ Ok(Some(x))
+ }
+ Adjust::Deref(Some(od)) => {
+ let Some((r, current)) = self.lower_expr_as_place_with_adjust(
+ current,
+ expr_id,
+ upgrade_rvalue,
+ rest,
+ )? else {
+ return Ok(None);
+ };
+ self.lower_overloaded_deref(
+ current,
+ r,
+ rest.last()
+ .map(|x| x.target.clone())
+ .unwrap_or_else(|| self.expr_ty(expr_id)),
+ last.target.clone(),
+ expr_id.into(),
+ match od.0 {
+ Some(Mutability::Mut) => true,
+ Some(Mutability::Not) => false,
+ None => {
+ not_supported!("implicit overloaded deref with unknown mutability")
+ }
+ },
+ )
+ }
+ Adjust::NeverToAny | Adjust::Borrow(_) | Adjust::Pointer(_) => try_rvalue(self),
+ }
+ } else {
+ self.lower_expr_as_place_without_adjust(current, expr_id, upgrade_rvalue)
+ }
+ }
+
+ pub(super) fn lower_expr_as_place(
+ &mut self,
+ current: BasicBlockId,
+ expr_id: ExprId,
+ upgrade_rvalue: bool,
+ ) -> Result<Option<(Place, BasicBlockId)>> {
+ match self.infer.expr_adjustments.get(&expr_id) {
+ Some(a) => self.lower_expr_as_place_with_adjust(current, expr_id, upgrade_rvalue, a),
+ None => self.lower_expr_as_place_without_adjust(current, expr_id, upgrade_rvalue),
+ }
+ }
+
+ pub(super) fn lower_expr_as_place_without_adjust(
+ &mut self,
+ current: BasicBlockId,
+ expr_id: ExprId,
+ upgrade_rvalue: bool,
+ ) -> Result<Option<(Place, BasicBlockId)>> {
+ let try_rvalue = |this: &mut MirLowerCtx<'_>| {
+ if !upgrade_rvalue {
+ return Err(MirLowerError::MutatingRvalue);
+ }
+ this.lower_expr_to_some_place_without_adjust(expr_id, current)
+ };
+ match &self.body.exprs[expr_id] {
+ Expr::Path(p) => {
+ let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id);
+ let Some(pr) = resolver.resolve_path_in_value_ns(self.db.upcast(), p.mod_path()) else {
+ return Err(MirLowerError::unresolved_path(self.db, p));
+ };
+ let pr = match pr {
+ ResolveValueResult::ValueNs(v) => v,
+ ResolveValueResult::Partial(..) => return try_rvalue(self),
+ };
+ match pr {
+ ValueNs::LocalBinding(pat_id) => {
+ Ok(Some((self.result.binding_locals[pat_id].into(), current)))
+ }
+ _ => try_rvalue(self),
+ }
+ }
+ Expr::UnaryOp { expr, op } => match op {
+ hir_def::expr::UnaryOp::Deref => {
+ if !matches!(
+ self.expr_ty(*expr).kind(Interner),
+ TyKind::Ref(..) | TyKind::Raw(..)
+ ) {
+ let Some(_) = self.lower_expr_as_place(current, *expr, true)? else {
+ return Ok(None);
+ };
+ not_supported!("explicit overloaded deref");
+ }
+ let Some((mut r, current)) = self.lower_expr_as_place(current, *expr, true)? else {
+ return Ok(None);
+ };
+ r.projection.push(ProjectionElem::Deref);
+ Ok(Some((r, current)))
+ }
+ _ => try_rvalue(self),
+ },
+ Expr::Field { expr, .. } => {
+ let Some((mut r, current)) = self.lower_expr_as_place(current, *expr, true)? else {
+ return Ok(None);
+ };
+ self.push_field_projection(&mut r, expr_id)?;
+ Ok(Some((r, current)))
+ }
+ Expr::Index { base, index } => {
+ let base_ty = self.expr_ty_after_adjustments(*base);
+ let index_ty = self.expr_ty_after_adjustments(*index);
+ if index_ty != TyBuilder::usize()
+ || !matches!(base_ty.kind(Interner), TyKind::Array(..) | TyKind::Slice(..))
+ {
+ not_supported!("overloaded index");
+ }
+ let Some((mut p_base, current)) =
+ self.lower_expr_as_place(current, *base, true)? else {
+ return Ok(None);
+ };
+ let l_index = self.temp(self.expr_ty_after_adjustments(*index))?;
+ let Some(current) = self.lower_expr_to_place(*index, l_index.into(), current)? else {
+ return Ok(None);
+ };
+ p_base.projection.push(ProjectionElem::Index(l_index));
+ Ok(Some((p_base, current)))
+ }
+ _ => try_rvalue(self),
+ }
+ }
+
+ fn lower_overloaded_deref(
+ &mut self,
+ current: BasicBlockId,
+ place: Place,
+ source_ty: Ty,
+ target_ty: Ty,
+ span: MirSpan,
+ mutability: bool,
+ ) -> Result<Option<(Place, BasicBlockId)>> {
+ let (chalk_mut, trait_lang_item, trait_method_name, borrow_kind) = if !mutability {
+ (Mutability::Not, LangItem::Deref, name![deref], BorrowKind::Shared)
+ } else {
+ (
+ Mutability::Mut,
+ LangItem::DerefMut,
+ name![deref_mut],
+ BorrowKind::Mut { allow_two_phase_borrow: false },
+ )
+ };
+ let ty_ref = TyKind::Ref(chalk_mut, static_lifetime(), source_ty.clone()).intern(Interner);
+ let target_ty_ref = TyKind::Ref(chalk_mut, static_lifetime(), target_ty).intern(Interner);
+ let ref_place: Place = self.temp(ty_ref)?.into();
+ self.push_assignment(current, ref_place.clone(), Rvalue::Ref(borrow_kind, place), span);
+ let deref_trait = self
+ .resolve_lang_item(trait_lang_item)?
+ .as_trait()
+ .ok_or(MirLowerError::LangItemNotFound(trait_lang_item))?;
+ let deref_fn = self
+ .db
+ .trait_data(deref_trait)
+ .method_by_name(&trait_method_name)
+ .ok_or(MirLowerError::LangItemNotFound(trait_lang_item))?;
+ let deref_fn_op = Operand::const_zst(
+ TyKind::FnDef(
+ self.db.intern_callable_def(CallableDefId::FunctionId(deref_fn)).into(),
+ Substitution::from1(Interner, source_ty),
+ )
+ .intern(Interner),
+ );
+ let mut result: Place = self.temp(target_ty_ref)?.into();
+ let Some(current) = self.lower_call(deref_fn_op, vec![Operand::Copy(ref_place)], result.clone(), current, false)? else {
+ return Ok(None);
+ };
+ result.projection.push(ProjectionElem::Deref);
+ Ok(Some((result, current)))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs
new file mode 100644
index 000000000..ffc08b7e3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs
@@ -0,0 +1,348 @@
+//! A pretty-printer for MIR.
+
+use std::fmt::{Display, Write};
+
+use hir_def::{body::Body, expr::BindingId};
+use hir_expand::name::Name;
+use la_arena::ArenaMap;
+
+use crate::{
+ db::HirDatabase,
+ display::HirDisplay,
+ mir::{PlaceElem, ProjectionElem, StatementKind, Terminator},
+};
+
+use super::{
+ AggregateKind, BasicBlockId, BorrowKind, LocalId, MirBody, Operand, Place, Rvalue, UnOp,
+};
+
+impl MirBody {
+ pub fn pretty_print(&self, db: &dyn HirDatabase) -> String {
+ let hir_body = db.body(self.owner);
+ let mut ctx = MirPrettyCtx::new(self, &hir_body, db);
+ ctx.for_body();
+ ctx.result
+ }
+}
+
+struct MirPrettyCtx<'a> {
+ body: &'a MirBody,
+ hir_body: &'a Body,
+ db: &'a dyn HirDatabase,
+ result: String,
+ ident: String,
+ local_to_binding: ArenaMap<LocalId, BindingId>,
+}
+
+macro_rules! w {
+ ($dst:expr, $($arg:tt)*) => {
+ { let _ = write!($dst, $($arg)*); }
+ };
+}
+
+macro_rules! wln {
+ ($dst:expr) => {
+ { let _ = writeln!($dst); }
+ };
+ ($dst:expr, $($arg:tt)*) => {
+ { let _ = writeln!($dst, $($arg)*); }
+ };
+}
+
+impl Write for MirPrettyCtx<'_> {
+ fn write_str(&mut self, s: &str) -> std::fmt::Result {
+ let mut it = s.split('\n'); // note: `.lines()` is wrong here
+ self.write(it.next().unwrap_or_default());
+ for line in it {
+ self.write_line();
+ self.write(line);
+ }
+ Ok(())
+ }
+}
+
+enum LocalName {
+ Unknown(LocalId),
+ Binding(Name, LocalId),
+}
+
+impl Display for LocalName {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ LocalName::Unknown(l) => write!(f, "_{}", u32::from(l.into_raw())),
+ LocalName::Binding(n, l) => write!(f, "{n}_{}", u32::from(l.into_raw())),
+ }
+ }
+}
+
+impl<'a> MirPrettyCtx<'a> {
+ fn for_body(&mut self) {
+ self.with_block(|this| {
+ this.locals();
+ wln!(this);
+ this.blocks();
+ });
+ }
+
+ fn with_block(&mut self, f: impl FnOnce(&mut MirPrettyCtx<'_>)) {
+ self.ident += " ";
+ wln!(self, "{{");
+ f(self);
+ for _ in 0..4 {
+ self.result.pop();
+ self.ident.pop();
+ }
+ wln!(self, "}}");
+ }
+
+ fn new(body: &'a MirBody, hir_body: &'a Body, db: &'a dyn HirDatabase) -> Self {
+ let local_to_binding = body.binding_locals.iter().map(|(x, y)| (*y, x)).collect();
+ MirPrettyCtx {
+ body,
+ db,
+ result: String::new(),
+ ident: String::new(),
+ local_to_binding,
+ hir_body,
+ }
+ }
+
+ fn write_line(&mut self) {
+ self.result.push('\n');
+ self.result += &self.ident;
+ }
+
+ fn write(&mut self, line: &str) {
+ self.result += line;
+ }
+
+ fn locals(&mut self) {
+ for (id, local) in self.body.locals.iter() {
+ wln!(self, "let {}: {};", self.local_name(id), local.ty.display(self.db));
+ }
+ }
+
+ fn local_name(&self, local: LocalId) -> LocalName {
+ match self.local_to_binding.get(local) {
+ Some(b) => LocalName::Binding(self.hir_body.bindings[*b].name.clone(), local),
+ None => LocalName::Unknown(local),
+ }
+ }
+
+ fn basic_block_id(&self, basic_block_id: BasicBlockId) -> String {
+ format!("'bb{}", u32::from(basic_block_id.into_raw()))
+ }
+
+ fn blocks(&mut self) {
+ for (id, block) in self.body.basic_blocks.iter() {
+ wln!(self);
+ w!(self, "{}: ", self.basic_block_id(id));
+ self.with_block(|this| {
+ for statement in &block.statements {
+ match &statement.kind {
+ StatementKind::Assign(l, r) => {
+ this.place(l);
+ w!(this, " = ");
+ this.rvalue(r);
+ wln!(this, ";");
+ }
+ StatementKind::StorageDead(p) => {
+ wln!(this, "StorageDead({})", this.local_name(*p));
+ }
+ StatementKind::StorageLive(p) => {
+ wln!(this, "StorageLive({})", this.local_name(*p));
+ }
+ StatementKind::Deinit(p) => {
+ w!(this, "Deinit(");
+ this.place(p);
+ wln!(this, ");");
+ }
+ StatementKind::Nop => wln!(this, "Nop;"),
+ }
+ }
+ match &block.terminator {
+ Some(terminator) => match terminator {
+ Terminator::Goto { target } => {
+ wln!(this, "goto 'bb{};", u32::from(target.into_raw()))
+ }
+ Terminator::SwitchInt { discr, targets } => {
+ w!(this, "switch ");
+ this.operand(discr);
+ w!(this, " ");
+ this.with_block(|this| {
+ for (c, b) in targets.iter() {
+ wln!(this, "{c} => {},", this.basic_block_id(b));
+ }
+ wln!(this, "_ => {},", this.basic_block_id(targets.otherwise()));
+ });
+ }
+ Terminator::Call { func, args, destination, target, .. } => {
+ w!(this, "Call ");
+ this.with_block(|this| {
+ w!(this, "func: ");
+ this.operand(func);
+ wln!(this, ",");
+ w!(this, "args: [");
+ this.operand_list(args);
+ wln!(this, "],");
+ w!(this, "destination: ");
+ this.place(destination);
+ wln!(this, ",");
+ w!(this, "target: ");
+ match target {
+ Some(t) => w!(this, "{}", this.basic_block_id(*t)),
+ None => w!(this, "<unreachable>"),
+ }
+ wln!(this, ",");
+ });
+ }
+ _ => wln!(this, "{:?};", terminator),
+ },
+ None => wln!(this, "<no-terminator>;"),
+ }
+ })
+ }
+ }
+
+ fn place(&mut self, p: &Place) {
+ fn f(this: &mut MirPrettyCtx<'_>, local: LocalId, projections: &[PlaceElem]) {
+ let Some((last, head)) = projections.split_last() else {
+ // no projection
+ w!(this, "{}", this.local_name(local));
+ return;
+ };
+ match last {
+ ProjectionElem::Deref => {
+ w!(this, "(*");
+ f(this, local, head);
+ w!(this, ")");
+ }
+ ProjectionElem::Field(field) => {
+ let variant_data = field.parent.variant_data(this.db.upcast());
+ let name = &variant_data.fields()[field.local_id].name;
+ match field.parent {
+ hir_def::VariantId::EnumVariantId(e) => {
+ w!(this, "(");
+ f(this, local, head);
+ let variant_name =
+ &this.db.enum_data(e.parent).variants[e.local_id].name;
+ w!(this, " as {}).{}", variant_name, name);
+ }
+ hir_def::VariantId::StructId(_) | hir_def::VariantId::UnionId(_) => {
+ f(this, local, head);
+ w!(this, ".{name}");
+ }
+ }
+ }
+ ProjectionElem::TupleField(x) => {
+ f(this, local, head);
+ w!(this, ".{}", x);
+ }
+ ProjectionElem::Index(l) => {
+ f(this, local, head);
+ w!(this, "[{}]", this.local_name(*l));
+ }
+ x => {
+ f(this, local, head);
+ w!(this, ".{:?}", x);
+ }
+ }
+ }
+ f(self, p.local, &p.projection);
+ }
+
+ fn operand(&mut self, r: &Operand) {
+ match r {
+ Operand::Copy(p) | Operand::Move(p) => {
+ // MIR at the time of writing doesn't have difference between move and copy, so we show them
+ // equally. Feel free to change it.
+ self.place(p);
+ }
+ Operand::Constant(c) => w!(self, "Const({})", c.display(self.db)),
+ }
+ }
+
+ fn rvalue(&mut self, r: &Rvalue) {
+ match r {
+ Rvalue::Use(op) => self.operand(op),
+ Rvalue::Ref(r, p) => {
+ match r {
+ BorrowKind::Shared => w!(self, "&"),
+ BorrowKind::Shallow => w!(self, "&shallow "),
+ BorrowKind::Unique => w!(self, "&uniq "),
+ BorrowKind::Mut { .. } => w!(self, "&mut "),
+ }
+ self.place(p);
+ }
+ Rvalue::Aggregate(AggregateKind::Tuple(_), x) => {
+ w!(self, "(");
+ self.operand_list(x);
+ w!(self, ")");
+ }
+ Rvalue::Aggregate(AggregateKind::Array(_), x) => {
+ w!(self, "[");
+ self.operand_list(x);
+ w!(self, "]");
+ }
+ Rvalue::Aggregate(AggregateKind::Adt(_, _), x) => {
+ w!(self, "Adt(");
+ self.operand_list(x);
+ w!(self, ")");
+ }
+ Rvalue::Aggregate(AggregateKind::Union(_, _), x) => {
+ w!(self, "Union(");
+ self.operand_list(x);
+ w!(self, ")");
+ }
+ Rvalue::Len(p) => {
+ w!(self, "Len(");
+ self.place(p);
+ w!(self, ")");
+ }
+ Rvalue::Cast(ck, op, ty) => {
+ w!(self, "Discriminant({ck:?}");
+ self.operand(op);
+ w!(self, "{})", ty.display(self.db));
+ }
+ Rvalue::CheckedBinaryOp(b, o1, o2) => {
+ self.operand(o1);
+ w!(self, " {b} ");
+ self.operand(o2);
+ }
+ Rvalue::UnaryOp(u, o) => {
+ let u = match u {
+ UnOp::Not => "!",
+ UnOp::Neg => "-",
+ };
+ w!(self, "{u} ");
+ self.operand(o);
+ }
+ Rvalue::Discriminant(p) => {
+ w!(self, "Discriminant(");
+ self.place(p);
+ w!(self, ")");
+ }
+ Rvalue::ShallowInitBox(op, _) => {
+ w!(self, "ShallowInitBox(");
+ self.operand(op);
+ w!(self, ")");
+ }
+ Rvalue::CopyForDeref(p) => {
+ w!(self, "CopyForDeref(");
+ self.place(p);
+ w!(self, ")");
+ }
+ }
+ }
+
+ fn operand_list(&mut self, x: &[Operand]) {
+ let mut it = x.iter();
+ if let Some(first) = it.next() {
+ self.operand(first);
+ for op in it {
+ w!(self, ", ");
+ self.operand(op);
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
index 118e5311e..8c48331b9 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
@@ -9,7 +9,7 @@ use base_db::{
salsa, AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
};
use hir_def::{db::DefDatabase, ModuleId};
-use hir_expand::db::AstDatabase;
+use hir_expand::db::ExpandDatabase;
use stdx::hash::{NoHashHashMap, NoHashHashSet};
use syntax::TextRange;
use test_utils::extract_annotations;
@@ -17,7 +17,7 @@ use test_utils::extract_annotations;
#[salsa::database(
base_db::SourceDatabaseExtStorage,
base_db::SourceDatabaseStorage,
- hir_expand::db::AstDatabaseStorage,
+ hir_expand::db::ExpandDatabaseStorage,
hir_def::db::InternDatabaseStorage,
hir_def::db::DefDatabaseStorage,
crate::db::HirDatabaseStorage
@@ -41,8 +41,8 @@ impl fmt::Debug for TestDB {
}
}
-impl Upcast<dyn AstDatabase> for TestDB {
- fn upcast(&self) -> &(dyn AstDatabase + 'static) {
+impl Upcast<dyn ExpandDatabase> for TestDB {
+ fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
&*self
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
index ba5d9c241..83d31f002 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
@@ -23,7 +23,7 @@ use hir_def::{
src::HasSource,
AssocItemId, DefWithBodyId, HasModule, LocalModuleId, Lookup, ModuleDefId,
};
-use hir_expand::{db::AstDatabase, InFile};
+use hir_expand::{db::ExpandDatabase, InFile};
use once_cell::race::OnceBool;
use stdx::format_to;
use syntax::{
@@ -61,22 +61,27 @@ fn setup_tracing() -> Option<tracing::subscriber::DefaultGuard> {
Some(tracing::subscriber::set_default(subscriber))
}
+#[track_caller]
fn check_types(ra_fixture: &str) {
check_impl(ra_fixture, false, true, false)
}
+#[track_caller]
fn check_types_source_code(ra_fixture: &str) {
check_impl(ra_fixture, false, true, true)
}
+#[track_caller]
fn check_no_mismatches(ra_fixture: &str) {
check_impl(ra_fixture, true, false, false)
}
+#[track_caller]
fn check(ra_fixture: &str) {
check_impl(ra_fixture, false, false, false)
}
+#[track_caller]
fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_source: bool) {
let _tracing = setup_tracing();
let (db, files) = TestDB::with_many_files(ra_fixture);
@@ -158,7 +163,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
} else {
ty.display_test(&db).to_string()
};
- assert_eq!(actual, expected);
+ assert_eq!(actual, expected, "type annotation differs at {:#?}", range.range);
}
}
@@ -174,7 +179,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
} else {
ty.display_test(&db).to_string()
};
- assert_eq!(actual, expected);
+ assert_eq!(actual, expected, "type annotation differs at {:#?}", range.range);
}
if let Some(expected) = adjustments.remove(&range) {
let adjustments = inference_result
@@ -191,30 +196,11 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
}
}
- for (pat, mismatch) in inference_result.pat_type_mismatches() {
- let node = match pat_node(&body_source_map, pat, &db) {
- Some(value) => value,
- None => continue,
- };
- let range = node.as_ref().original_file_range(&db);
- let actual = format!(
- "expected {}, got {}",
- mismatch.expected.display_test(&db),
- mismatch.actual.display_test(&db)
- );
- match mismatches.remove(&range) {
- Some(annotation) => assert_eq!(actual, annotation),
- None => format_to!(unexpected_type_mismatches, "{:?}: {}\n", range.range, actual),
- }
- }
- for (expr, mismatch) in inference_result.expr_type_mismatches() {
- let node = match body_source_map.expr_syntax(expr) {
- Ok(sp) => {
- let root = db.parse_or_expand(sp.file_id).unwrap();
- sp.map(|ptr| ptr.to_node(&root).syntax().clone())
- }
- Err(SyntheticSyntax) => continue,
- };
+ for (expr_or_pat, mismatch) in inference_result.type_mismatches() {
+ let Some(node) = (match expr_or_pat {
+ hir_def::expr::ExprOrPatId::ExprId(expr) => expr_node(&body_source_map, expr, &db),
+ hir_def::expr::ExprOrPatId::PatId(pat) => pat_node(&body_source_map, pat, &db),
+ }) else { continue; };
let range = node.as_ref().original_file_range(&db);
let actual = format!(
"expected {}, got {}",
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs
index 3e110abaf..b524922b6 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs
@@ -258,6 +258,7 @@ fn test() {
#[test]
fn coerce_autoderef_block() {
+ // FIXME: We should know mutability in overloaded deref
check_no_mismatches(
r#"
//- minicore: deref
@@ -267,7 +268,7 @@ fn takes_ref_str(x: &str) {}
fn returns_string() -> String { loop {} }
fn test() {
takes_ref_str(&{ returns_string() });
- // ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Not))), Borrow(Ref(Not))
+ // ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(None))), Borrow(Ref(Not))
}
"#,
);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs
index f00fa9729..1876be303 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs
@@ -73,3 +73,24 @@ fn test(x: bool) -> &'static str {
"#,
);
}
+
+#[test]
+fn non_unit_block_expr_stmt_no_semi() {
+ check(
+ r#"
+fn test(x: bool) {
+ if x {
+ "notok"
+ //^^^^^^^ expected (), got &str
+ } else {
+ "ok"
+ //^^^^ expected (), got &str
+ }
+ match x { true => true, false => 0 }
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected (), got bool
+ //^ expected bool, got i32
+ ()
+}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs
index 41c53701d..378d47833 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs
@@ -9,6 +9,7 @@ fn infer_slice_method() {
check_types(
r#"
impl<T> [T] {
+ #[rustc_allow_incoherent_impl]
fn foo(&self) -> T {
loop {}
}
@@ -35,6 +36,7 @@ fn test() {
//- /lib.rs crate:other_crate
mod foo {
impl f32 {
+ #[rustc_allow_incoherent_impl]
pub fn foo(self) -> f32 { 0. }
}
}
@@ -47,6 +49,7 @@ fn infer_array_inherent_impl() {
check_types(
r#"
impl<T, const N: usize> [T; N] {
+ #[rustc_allow_incoherent_impl]
fn foo(&self) -> T {
loop {}
}
@@ -1167,7 +1170,6 @@ fn test() {
123..167 '{ ...o(); }': ()
133..134 's': &S
137..151 'unsafe { f() }': &S
- 137..151 'unsafe { f() }': &S
146..147 'f': fn f() -> &S
146..149 'f()': &S
157..158 's': &S
@@ -1253,6 +1255,7 @@ fn foo<T: Trait>(a: &T) {
#[test]
fn autoderef_visibility_field() {
+ // FIXME: We should know mutability in overloaded deref
check(
r#"
//- minicore: deref
@@ -1274,7 +1277,7 @@ mod a {
mod b {
fn foo() {
let x = super::a::Bar::new().0;
- // ^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Not)))
+ // ^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(None)))
// ^^^^^^^^^^^^^^^^^^^^^^ type: char
}
}
@@ -1437,6 +1440,7 @@ fn resolve_const_generic_array_methods() {
r#"
#[lang = "array"]
impl<T, const N: usize> [T; N] {
+ #[rustc_allow_incoherent_impl]
pub fn map<F, U>(self, f: F) -> [U; N]
where
F: FnMut(T) -> U,
@@ -1445,6 +1449,7 @@ impl<T, const N: usize> [T; N] {
#[lang = "slice"]
impl<T> [T] {
+ #[rustc_allow_incoherent_impl]
pub fn map<F, U>(self, f: F) -> &[U]
where
F: FnMut(T) -> U,
@@ -1468,6 +1473,7 @@ struct Const<const N: usize>;
#[lang = "array"]
impl<T, const N: usize> [T; N] {
+ #[rustc_allow_incoherent_impl]
pub fn my_map<F, U, const X: usize>(self, f: F, c: Const<X>) -> [U; X]
where
F: FnMut(T) -> U,
@@ -1476,6 +1482,7 @@ impl<T, const N: usize> [T; N] {
#[lang = "slice"]
impl<T> [T] {
+ #[rustc_allow_incoherent_impl]
pub fn my_map<F, const X: usize, U>(self, f: F, c: Const<X>) -> &[U]
where
F: FnMut(T) -> U,
@@ -1874,14 +1881,14 @@ fn incoherent_impls() {
pub struct Box<T>(T);
use core::error::Error;
-#[rustc_allow_incoherent_impl]
impl dyn Error {
+ #[rustc_allow_incoherent_impl]
pub fn downcast<T: Error + 'static>(self: Box<Self>) -> Result<Box<T>, Box<dyn Error>> {
loop {}
}
}
-#[rustc_allow_incoherent_impl]
impl dyn Error + Send {
+ #[rustc_allow_incoherent_impl]
/// Attempts to downcast the box to a concrete type.
pub fn downcast<T: Error + 'static>(self: Box<Self>) -> Result<Box<T>, Box<dyn Error + Send>> {
let err: Box<dyn Error> = self;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs
index 9333e2693..74bcab6ca 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs
@@ -476,7 +476,7 @@ fn infer_adt_pattern() {
183..184 'x': usize
190..191 'x': usize
201..205 'E::B': E
- 209..212 'foo': bool
+ 209..212 'foo': {unknown}
216..217 '1': usize
227..231 'E::B': E
235..237 '10': usize
@@ -953,9 +953,9 @@ fn main() {
42..51 'true | ()': bool
49..51 '()': ()
57..59 '{}': ()
- 68..80 '(() | true,)': ((),)
+ 68..80 '(() | true,)': (bool,)
69..71 '()': ()
- 69..78 '() | true': ()
+ 69..78 '() | true': bool
74..78 'true': bool
74..78 'true': bool
84..86 '{}': ()
@@ -964,19 +964,15 @@ fn main() {
96..102 '_ | ()': bool
100..102 '()': ()
108..110 '{}': ()
- 119..128 '(() | _,)': ((),)
+ 119..128 '(() | _,)': (bool,)
120..122 '()': ()
- 120..126 '() | _': ()
+ 120..126 '() | _': bool
125..126 '_': bool
132..134 '{}': ()
49..51: expected bool, got ()
- 68..80: expected (bool,), got ((),)
69..71: expected bool, got ()
- 69..78: expected bool, got ()
100..102: expected bool, got ()
- 119..128: expected (bool,), got ((),)
120..122: expected bool, got ()
- 120..126: expected bool, got ()
"#]],
);
}
@@ -1092,3 +1088,19 @@ fn my_fn(foo: ...) {}
"#,
);
}
+
+#[test]
+fn ref_pat_mutability() {
+ check(
+ r#"
+fn foo() {
+ let &() = &();
+ let &mut () = &mut ();
+ let &mut () = &();
+ //^^^^^^^ expected &(), got &mut ()
+ let &() = &mut ();
+ //^^^ expected &mut (), got &()
+}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
index de6ae7fff..689f0da44 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
@@ -270,7 +270,7 @@ fn infer_std_crash_5() {
61..320 '{ ... }': ()
75..79 'name': &{unknown}
82..166 'if doe... }': &{unknown}
- 85..98 'doesnt_matter': bool
+ 85..98 'doesnt_matter': {unknown}
99..128 '{ ... }': &{unknown}
113..118 'first': &{unknown}
134..166 '{ ... }': &{unknown}
@@ -279,7 +279,7 @@ fn infer_std_crash_5() {
181..188 'content': &{unknown}
191..313 'if ICE... }': &{unknown}
194..231 'ICE_RE..._VALUE': {unknown}
- 194..247 'ICE_RE...&name)': bool
+ 194..247 'ICE_RE...&name)': {unknown}
241..246 '&name': &&{unknown}
242..246 'name': &{unknown}
248..276 '{ ... }': &{unknown}
@@ -1015,9 +1015,9 @@ fn cfg_tail() {
20..31 '{ "first" }': ()
22..29 '"first"': &str
72..190 '{ ...] 13 }': ()
- 78..88 '{ "fake" }': &str
+ 78..88 '{ "fake" }': ()
80..86 '"fake"': &str
- 93..103 '{ "fake" }': &str
+ 93..103 '{ "fake" }': ()
95..101 '"fake"': &str
108..120 '{ "second" }': ()
110..118 '"second"': &str
@@ -1744,3 +1744,47 @@ fn foo(b: Bar) {
"#,
);
}
+
+#[test]
+fn regression_14305() {
+ check_no_mismatches(
+ r#"
+//- minicore: add
+trait Tr {}
+impl Tr for [u8; C] {}
+const C: usize = 2 + 2;
+"#,
+ );
+}
+
+#[test]
+fn regression_14164() {
+ check_types(
+ r#"
+trait Rec {
+ type K;
+ type Rebind<Tok>: Rec<K = Tok>;
+}
+
+trait Expr<K> {
+ type Part: Rec<K = K>;
+ fn foo(_: <Self::Part as Rec>::Rebind<i32>) {}
+}
+
+struct Head<K>(K);
+impl<K> Rec for Head<K> {
+ type K = K;
+ type Rebind<Tok> = Head<Tok>;
+}
+
+fn test<E>()
+where
+ E: Expr<usize, Part = Head<usize>>,
+{
+ let head;
+ //^^^^ Head<i32>
+ E::foo(head);
+}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
index 2e5787b70..13cc3fea5 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
@@ -352,7 +352,6 @@ unsafe fn baz(u: MyUnion) {
71..89 'MyUnio...o: 0 }': MyUnion
86..87 '0': u32
95..113 'unsafe...(u); }': ()
- 95..113 'unsafe...(u); }': ()
104..107 'baz': fn baz(MyUnion)
104..110 'baz(u)': ()
108..109 'u': MyUnion
@@ -360,7 +359,6 @@ unsafe fn baz(u: MyUnion) {
126..146 'MyUnio... 0.0 }': MyUnion
141..144 '0.0': f32
152..170 'unsafe...(u); }': ()
- 152..170 'unsafe...(u); }': ()
161..164 'baz': fn baz(MyUnion)
161..167 'baz(u)': ()
165..166 'u': MyUnion
@@ -1118,21 +1116,22 @@ fn infer_inherent_method() {
fn infer_inherent_method_str() {
check_infer(
r#"
- #[lang = "str"]
- impl str {
- fn foo(&self) -> i32 {}
- }
+#![rustc_coherence_is_core]
+#[lang = "str"]
+impl str {
+ fn foo(&self) -> i32 {}
+}
- fn test() {
- "foo".foo();
- }
- "#,
+fn test() {
+ "foo".foo();
+}
+"#,
expect![[r#"
- 39..43 'self': &str
- 52..54 '{}': i32
- 68..88 '{ ...o(); }': ()
- 74..79 '"foo"': &str
- 74..85 '"foo".foo()': i32
+ 67..71 'self': &str
+ 80..82 '{}': i32
+ 96..116 '{ ...o(); }': ()
+ 102..107 '"foo"': &str
+ 102..113 '"foo".foo()': i32
"#]],
);
}
@@ -2077,22 +2076,17 @@ async fn main() {
16..193 '{ ...2 }; }': ()
26..27 'x': i32
30..43 'unsafe { 92 }': i32
- 30..43 'unsafe { 92 }': i32
39..41 '92': i32
53..54 'y': impl Future<Output = ()>
- 57..85 'async ...wait }': ()
57..85 'async ...wait }': impl Future<Output = ()>
- 65..77 'async { () }': ()
65..77 'async { () }': impl Future<Output = ()>
65..83 'async ....await': ()
73..75 '()': ()
95..96 'z': ControlFlow<(), ()>
- 130..140 'try { () }': ()
130..140 'try { () }': ControlFlow<(), ()>
136..138 '()': ()
150..151 'w': i32
154..166 'const { 92 }': i32
- 154..166 'const { 92 }': i32
162..164 '92': i32
176..177 't': i32
180..190 ''a: { 92 }': i32
@@ -2122,7 +2116,6 @@ fn main() {
83..84 'f': F
89..91 '{}': ()
103..231 '{ ... }); }': ()
- 109..161 'async ... }': Result<(), ()>
109..161 'async ... }': impl Future<Output = Result<(), ()>>
125..139 'return Err(())': !
132..135 'Err': Err<(), ()>(()) -> Result<(), ()>
@@ -2134,7 +2127,6 @@ fn main() {
167..171 'test': fn test<(), (), || -> impl Future<Output = Result<(), ()>>, impl Future<Output = Result<(), ()>>>(|| -> impl Future<Output = Result<(), ()>>)
167..228 'test(|... })': ()
172..227 '|| asy... }': || -> impl Future<Output = Result<(), ()>>
- 175..227 'async ... }': Result<(), ()>
175..227 'async ... }': impl Future<Output = Result<(), ()>>
191..205 'return Err(())': !
198..201 'Err': Err<(), ()>(()) -> Result<(), ()>
@@ -2649,6 +2641,7 @@ impl<T> [T] {}
#[lang = "slice_alloc"]
impl<T> [T] {
+ #[rustc_allow_incoherent_impl]
pub fn into_vec<A: Allocator>(self: Box<Self, A>) -> Vec<T, A> {
unimplemented!()
}
@@ -2664,22 +2657,22 @@ struct Astruct;
impl B for Astruct {}
"#,
expect![[r#"
- 569..573 'self': Box<[T], A>
- 602..634 '{ ... }': Vec<T, A>
- 648..761 '{ ...t]); }': ()
- 658..661 'vec': Vec<i32, Global>
- 664..679 '<[_]>::into_vec': fn into_vec<i32, Global>(Box<[i32], Global>) -> Vec<i32, Global>
- 664..691 '<[_]>:...1i32])': Vec<i32, Global>
- 680..690 'box [1i32]': Box<[i32; 1], Global>
- 684..690 '[1i32]': [i32; 1]
- 685..689 '1i32': i32
- 701..702 'v': Vec<Box<dyn B, Global>, Global>
- 722..739 '<[_]> ...to_vec': fn into_vec<Box<dyn B, Global>, Global>(Box<[Box<dyn B, Global>], Global>) -> Vec<Box<dyn B, Global>, Global>
- 722..758 '<[_]> ...ruct])': Vec<Box<dyn B, Global>, Global>
- 740..757 'box [b...truct]': Box<[Box<dyn B, Global>; 1], Global>
- 744..757 '[box Astruct]': [Box<dyn B, Global>; 1]
- 745..756 'box Astruct': Box<Astruct, Global>
- 749..756 'Astruct': Astruct
+ 604..608 'self': Box<[T], A>
+ 637..669 '{ ... }': Vec<T, A>
+ 683..796 '{ ...t]); }': ()
+ 693..696 'vec': Vec<i32, Global>
+ 699..714 '<[_]>::into_vec': fn into_vec<i32, Global>(Box<[i32], Global>) -> Vec<i32, Global>
+ 699..726 '<[_]>:...1i32])': Vec<i32, Global>
+ 715..725 'box [1i32]': Box<[i32; 1], Global>
+ 719..725 '[1i32]': [i32; 1]
+ 720..724 '1i32': i32
+ 736..737 'v': Vec<Box<dyn B, Global>, Global>
+ 757..774 '<[_]> ...to_vec': fn into_vec<Box<dyn B, Global>, Global>(Box<[Box<dyn B, Global>], Global>) -> Vec<Box<dyn B, Global>, Global>
+ 757..793 '<[_]> ...ruct])': Vec<Box<dyn B, Global>, Global>
+ 775..792 'box [b...truct]': Box<[Box<dyn B, Global>; 1], Global>
+ 779..792 '[box Astruct]': [Box<dyn B, Global>; 1]
+ 780..791 'box Astruct': Box<Astruct, Global>
+ 784..791 'Astruct': Astruct
"#]],
)
}
@@ -3283,3 +3276,18 @@ fn func() {
"#]],
);
}
+
+#[test]
+fn issue_14275() {
+ // FIXME: evaluate const generic
+ check_types(
+ r#"
+struct Foo<const T: bool>;
+fn main() {
+ const B: bool = false;
+ let foo = Foo::<B>;
+ //^^^ Foo<_>
+}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
index 015085bde..da76d7fd8 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
@@ -83,6 +83,46 @@ async fn test() {
}
#[test]
+fn infer_async_closure() {
+ check_types(
+ r#"
+//- minicore: future, option
+async fn test() {
+ let f = async move |x: i32| x + 42;
+ f;
+// ^ |i32| -> impl Future<Output = i32>
+ let a = f(4);
+ a;
+// ^ impl Future<Output = i32>
+ let x = a.await;
+ x;
+// ^ i32
+ let f = async move || 42;
+ f;
+// ^ || -> impl Future<Output = i32>
+ let a = f();
+ a;
+// ^ impl Future<Output = i32>
+ let x = a.await;
+ x;
+// ^ i32
+ let b = ((async move || {})()).await;
+ b;
+// ^ ()
+ let c = async move || {
+ let y = None;
+ y
+ // ^ Option<u64>
+ };
+ let _: Option<u64> = c().await;
+ c;
+// ^ || -> impl Future<Output = Option<u64>>
+}
+"#,
+ );
+}
+
+#[test]
fn auto_sized_async_block() {
check_no_mismatches(
r#"
@@ -493,29 +533,30 @@ fn tuple_struct_with_fn() {
r#"
struct S(fn(u32) -> u64);
fn test() -> u64 {
- let a = S(|i| 2*i);
+ let a = S(|i| 2*i as u64);
let b = a.0(4);
a.0(2)
}"#,
expect![[r#"
- 43..101 '{ ...0(2) }': u64
+ 43..108 '{ ...0(2) }': u64
53..54 'a': S
57..58 'S': S(fn(u32) -> u64) -> S
- 57..67 'S(|i| 2*i)': S
- 59..66 '|i| 2*i': |u32| -> u64
+ 57..74 'S(|i| ...s u64)': S
+ 59..73 '|i| 2*i as u64': |u32| -> u64
60..61 'i': u32
- 63..64 '2': u32
- 63..66 '2*i': u32
+ 63..64 '2': u64
+ 63..73 '2*i as u64': u64
65..66 'i': u32
- 77..78 'b': u64
- 81..82 'a': S
- 81..84 'a.0': fn(u32) -> u64
- 81..87 'a.0(4)': u64
- 85..86 '4': u32
- 93..94 'a': S
- 93..96 'a.0': fn(u32) -> u64
- 93..99 'a.0(2)': u64
- 97..98 '2': u32
+ 65..73 'i as u64': u64
+ 84..85 'b': u64
+ 88..89 'a': S
+ 88..91 'a.0': fn(u32) -> u64
+ 88..94 'a.0(4)': u64
+ 92..93 '4': u32
+ 100..101 'a': S
+ 100..103 'a.0': fn(u32) -> u64
+ 100..106 'a.0(2)': u64
+ 104..105 '2': u32
"#]],
);
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
index 70d2d5efa..34d957e26 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
@@ -5,6 +5,7 @@ use std::iter;
use base_db::CrateId;
use chalk_ir::{cast::Cast, fold::Shift, BoundVar, DebruijnIndex};
+use either::Either;
use hir_def::{
db::DefDatabase,
generics::{
@@ -19,7 +20,6 @@ use hir_def::{
};
use hir_expand::name::Name;
use intern::Interned;
-use itertools::Either;
use rustc_hash::FxHashSet;
use smallvec::{smallvec, SmallVec};
@@ -315,7 +315,10 @@ fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option<Generic
GenericDefId::TypeAliasId(it) => it.lookup(db).container,
GenericDefId::ConstId(it) => it.lookup(db).container,
GenericDefId::EnumVariantId(it) => return Some(it.parent.into()),
- GenericDefId::AdtId(_) | GenericDefId::TraitId(_) | GenericDefId::ImplId(_) => return None,
+ GenericDefId::AdtId(_)
+ | GenericDefId::TraitId(_)
+ | GenericDefId::ImplId(_)
+ | GenericDefId::TraitAliasId(_) => return None,
};
match container {
diff --git a/src/tools/rust-analyzer/crates/hir/src/attrs.rs b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
index 54425d69b..db0b84ef0 100644
--- a/src/tools/rust-analyzer/crates/hir/src/attrs.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
@@ -14,7 +14,8 @@ use syntax::{ast, AstNode};
use crate::{
Adt, AssocItem, Const, ConstParam, Enum, Field, Function, GenericParam, Impl, LifetimeParam,
- Macro, Module, ModuleDef, Static, Struct, Trait, TypeAlias, TypeParam, Union, Variant,
+ Macro, Module, ModuleDef, Static, Struct, Trait, TraitAlias, TypeAlias, TypeParam, Union,
+ Variant,
};
pub trait HasAttrs {
@@ -60,6 +61,7 @@ impl_has_attrs![
(Static, StaticId),
(Const, ConstId),
(Trait, TraitId),
+ (TraitAlias, TraitAliasId),
(TypeAlias, TypeAliasId),
(Macro, MacroId),
(Function, FunctionId),
@@ -134,6 +136,7 @@ fn resolve_doc_path(
AttrDefId::StaticId(it) => it.resolver(db.upcast()),
AttrDefId::ConstId(it) => it.resolver(db.upcast()),
AttrDefId::TraitId(it) => it.resolver(db.upcast()),
+ AttrDefId::TraitAliasId(it) => it.resolver(db.upcast()),
AttrDefId::TypeAliasId(it) => it.resolver(db.upcast()),
AttrDefId::ImplId(it) => it.resolver(db.upcast()),
AttrDefId::ExternBlockId(it) => it.resolver(db.upcast()),
diff --git a/src/tools/rust-analyzer/crates/hir/src/db.rs b/src/tools/rust-analyzer/crates/hir/src/db.rs
index e25d86784..0935b5ea5 100644
--- a/src/tools/rust-analyzer/crates/hir/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/db.rs
@@ -5,8 +5,9 @@
//! But we need this for at least LRU caching at the query level.
pub use hir_def::db::*;
pub use hir_expand::db::{
- AstDatabase, AstDatabaseStorage, AstIdMapQuery, HygieneFrameQuery, InternMacroCallQuery,
- MacroArgTextQuery, MacroDefQuery, MacroExpandQuery, ParseMacroExpansionQuery,
+ AstIdMapQuery, ExpandDatabase, ExpandDatabaseStorage, ExpandProcMacroQuery, HygieneFrameQuery,
+ InternMacroCallQuery, MacroArgTextQuery, MacroDefQuery, MacroExpandErrorQuery,
+ MacroExpandQuery, ParseMacroExpansionQuery,
};
pub use hir_ty::db::*;
diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
index 54d43fa8d..253d62daf 100644
--- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
@@ -3,6 +3,8 @@
//!
//! This probably isn't the best way to do this -- ideally, diagnostics should
//! be expressed in terms of hir types themselves.
+pub use hir_ty::diagnostics::{IncoherentImpl, IncorrectCase};
+
use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions};
use either::Either;
@@ -10,7 +12,7 @@ use hir_def::path::ModPath;
use hir_expand::{name::Name, HirFileId, InFile};
use syntax::{ast, AstPtr, SyntaxNodePtr, TextRange};
-use crate::{AssocItem, Field, MacroKind, Type};
+use crate::{AssocItem, Field, Local, MacroKind, Type};
macro_rules! diagnostics {
($($diag:ident,)*) => {
@@ -31,15 +33,18 @@ macro_rules! diagnostics {
diagnostics![
BreakOutsideOfLoop,
+ ExpectedFunction,
InactiveCode,
IncorrectCase,
InvalidDeriveTarget,
+ IncoherentImpl,
MacroError,
MalformedDerive,
MismatchedArgCount,
MissingFields,
MissingMatchArms,
MissingUnsafe,
+ NeedMut,
NoSuchField,
PrivateAssocItem,
PrivateField,
@@ -47,10 +52,13 @@ diagnostics![
TypeMismatch,
UnimplementedBuiltinMacro,
UnresolvedExternCrate,
+ UnresolvedField,
UnresolvedImport,
UnresolvedMacroCall,
+ UnresolvedMethodCall,
UnresolvedModule,
UnresolvedProcMacro,
+ UnusedMut,
];
#[derive(Debug)]
@@ -131,6 +139,28 @@ pub struct PrivateAssocItem {
}
#[derive(Debug)]
+pub struct ExpectedFunction {
+ pub call: InFile<AstPtr<ast::Expr>>,
+ pub found: Type,
+}
+
+#[derive(Debug)]
+pub struct UnresolvedField {
+ pub expr: InFile<AstPtr<ast::Expr>>,
+ pub receiver: Type,
+ pub name: Name,
+ pub method_with_same_name_exists: bool,
+}
+
+#[derive(Debug)]
+pub struct UnresolvedMethodCall {
+ pub expr: InFile<AstPtr<ast::Expr>>,
+ pub receiver: Type,
+ pub name: Name,
+ pub field_with_same_name: Option<Type>,
+}
+
+#[derive(Debug)]
pub struct PrivateField {
pub expr: InFile<AstPtr<ast::Expr>>,
pub field: Field,
@@ -140,6 +170,7 @@ pub struct PrivateField {
pub struct BreakOutsideOfLoop {
pub expr: InFile<AstPtr<ast::Expr>>,
pub is_break: bool,
+ pub bad_value_break: bool,
}
#[derive(Debug)]
@@ -171,17 +202,24 @@ pub struct MismatchedArgCount {
#[derive(Debug)]
pub struct MissingMatchArms {
- pub file: HirFileId,
- pub match_expr: AstPtr<ast::Expr>,
+ pub scrutinee_expr: InFile<AstPtr<ast::Expr>>,
pub uncovered_patterns: String,
}
#[derive(Debug)]
pub struct TypeMismatch {
- // FIXME: add mismatches in patterns as well
- pub expr: InFile<AstPtr<ast::Expr>>,
+ pub expr_or_pat: Either<InFile<AstPtr<ast::Expr>>, InFile<AstPtr<ast::Pat>>>,
pub expected: Type,
pub actual: Type,
}
-pub use hir_ty::diagnostics::IncorrectCase;
+#[derive(Debug)]
+pub struct NeedMut {
+ pub local: Local,
+ pub span: InFile<SyntaxNodePtr>,
+}
+
+#[derive(Debug)]
+pub struct UnusedMut {
+ pub local: Local,
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs
index 0d1942012..5aae92efd 100644
--- a/src/tools/rust-analyzer/crates/hir/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/display.rs
@@ -17,15 +17,23 @@ use hir_ty::{
};
use crate::{
- Adt, Const, ConstParam, Enum, Field, Function, GenericParam, HasCrate, HasVisibility,
- LifetimeParam, Macro, Module, Static, Struct, Trait, TyBuilder, Type, TypeAlias,
- TypeOrConstParam, TypeParam, Union, Variant,
+ Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Enum, Field, Function, GenericParam,
+ HasCrate, HasVisibility, LifetimeParam, Macro, Module, Static, Struct, Trait, TraitAlias,
+ TyBuilder, Type, TypeAlias, TypeOrConstParam, TypeParam, Union, Variant,
};
impl HirDisplay for Function {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
- let data = f.db.function_data(self.id);
- write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ let db = f.db;
+ let data = db.function_data(self.id);
+ let container = self.as_assoc_item(db).map(|it| it.container(db));
+ let mut module = self.module(db);
+ if let Some(AssocItemContainer::Impl(_)) = container {
+ // Block-local impls are "hoisted" to the nearest (non-block) module.
+ module = module.nearest_non_block_module(db);
+ }
+ let module_id = module.id;
+ write_visibility(module_id, self.visibility(db), f)?;
if data.has_default_kw() {
f.write_str("default ")?;
}
@@ -35,7 +43,7 @@ impl HirDisplay for Function {
if data.has_async_kw() {
f.write_str("async ")?;
}
- if self.is_unsafe_to_call(f.db) {
+ if self.is_unsafe_to_call(db) {
f.write_str("unsafe ")?;
}
if let Some(abi) = &data.abi {
@@ -50,7 +58,7 @@ impl HirDisplay for Function {
let write_self_param = |ty: &TypeRef, f: &mut HirFormatter<'_>| match ty {
TypeRef::Path(p) if p.is_self_type() => f.write_str("self"),
- TypeRef::Reference(inner, lifetime, mut_) if matches!(&**inner,TypeRef::Path(p) if p.is_self_type()) =>
+ TypeRef::Reference(inner, lifetime, mut_) if matches!(&**inner, TypeRef::Path(p) if p.is_self_type()) =>
{
f.write_char('&')?;
if let Some(lifetime) = lifetime {
@@ -442,8 +450,15 @@ fn write_where_clause(def: GenericDefId, f: &mut HirFormatter<'_>) -> Result<(),
impl HirDisplay for Const {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
- write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
- let data = f.db.const_data(self.id);
+ let db = f.db;
+ let container = self.as_assoc_item(db).map(|it| it.container(db));
+ let mut module = self.module(db);
+ if let Some(AssocItemContainer::Impl(_)) = container {
+ // Block-local impls are "hoisted" to the nearest (non-block) module.
+ module = module.nearest_non_block_module(db);
+ }
+ write_visibility(module.id, self.visibility(db), f)?;
+ let data = db.const_data(self.id);
f.write_str("const ")?;
match &data.name {
Some(name) => write!(f, "{name}: ")?,
@@ -486,6 +501,22 @@ impl HirDisplay for Trait {
}
}
+impl HirDisplay for TraitAlias {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ let data = f.db.trait_alias_data(self.id);
+ write!(f, "trait {}", data.name)?;
+ let def_id = GenericDefId::TraitAliasId(self.id);
+ write_generic_params(def_id, f)?;
+ f.write_str(" = ")?;
+ // FIXME: Currently we lower every bounds in a trait alias as a trait bound on `Self` i.e.
+ // `trait Foo = Bar` is stored and displayed as `trait Foo = where Self: Bar`, which might
+ // be less readable.
+ write_where_clause(def_id, f)?;
+ Ok(())
+ }
+}
+
impl HirDisplay for TypeAlias {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
diff --git a/src/tools/rust-analyzer/crates/hir/src/from_id.rs b/src/tools/rust-analyzer/crates/hir/src/from_id.rs
index f825a72c0..aaaa7abf3 100644
--- a/src/tools/rust-analyzer/crates/hir/src/from_id.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/from_id.rs
@@ -4,7 +4,7 @@
//! are splitting the hir.
use hir_def::{
- expr::{LabelId, PatId},
+ expr::{BindingId, LabelId},
AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, GenericDefId, GenericParamId,
ModuleDefId, VariantId,
};
@@ -37,6 +37,7 @@ from_id![
(hir_def::EnumId, crate::Enum),
(hir_def::TypeAliasId, crate::TypeAlias),
(hir_def::TraitId, crate::Trait),
+ (hir_def::TraitAliasId, crate::TraitAlias),
(hir_def::StaticId, crate::Static),
(hir_def::ConstId, crate::Const),
(hir_def::FunctionId, crate::Function),
@@ -110,6 +111,7 @@ impl From<ModuleDefId> for ModuleDef {
ModuleDefId::ConstId(it) => ModuleDef::Const(it.into()),
ModuleDefId::StaticId(it) => ModuleDef::Static(it.into()),
ModuleDefId::TraitId(it) => ModuleDef::Trait(it.into()),
+ ModuleDefId::TraitAliasId(it) => ModuleDef::TraitAlias(it.into()),
ModuleDefId::TypeAliasId(it) => ModuleDef::TypeAlias(it.into()),
ModuleDefId::BuiltinType(it) => ModuleDef::BuiltinType(it.into()),
ModuleDefId::MacroId(it) => ModuleDef::Macro(it.into()),
@@ -127,6 +129,7 @@ impl From<ModuleDef> for ModuleDefId {
ModuleDef::Const(it) => ModuleDefId::ConstId(it.into()),
ModuleDef::Static(it) => ModuleDefId::StaticId(it.into()),
ModuleDef::Trait(it) => ModuleDefId::TraitId(it.into()),
+ ModuleDef::TraitAlias(it) => ModuleDefId::TraitAliasId(it.into()),
ModuleDef::TypeAlias(it) => ModuleDefId::TypeAliasId(it.into()),
ModuleDef::BuiltinType(it) => ModuleDefId::BuiltinType(it.into()),
ModuleDef::Macro(it) => ModuleDefId::MacroId(it.into()),
@@ -172,6 +175,7 @@ impl From<GenericDef> for GenericDefId {
GenericDef::Function(it) => GenericDefId::FunctionId(it.id),
GenericDef::Adt(it) => GenericDefId::AdtId(it.into()),
GenericDef::Trait(it) => GenericDefId::TraitId(it.id),
+ GenericDef::TraitAlias(it) => GenericDefId::TraitAliasId(it.id),
GenericDef::TypeAlias(it) => GenericDefId::TypeAliasId(it.id),
GenericDef::Impl(it) => GenericDefId::ImplId(it.id),
GenericDef::Variant(it) => GenericDefId::EnumVariantId(it.into()),
@@ -186,6 +190,7 @@ impl From<GenericDefId> for GenericDef {
GenericDefId::FunctionId(it) => GenericDef::Function(it.into()),
GenericDefId::AdtId(it) => GenericDef::Adt(it.into()),
GenericDefId::TraitId(it) => GenericDef::Trait(it.into()),
+ GenericDefId::TraitAliasId(it) => GenericDef::TraitAlias(it.into()),
GenericDefId::TypeAliasId(it) => GenericDef::TypeAlias(it.into()),
GenericDefId::ImplId(it) => GenericDef::Impl(it.into()),
GenericDefId::EnumVariantId(it) => GenericDef::Variant(it.into()),
@@ -246,9 +251,9 @@ impl From<AssocItem> for GenericDefId {
}
}
-impl From<(DefWithBodyId, PatId)> for Local {
- fn from((parent, pat_id): (DefWithBodyId, PatId)) -> Self {
- Local { parent, pat_id }
+impl From<(DefWithBodyId, BindingId)> for Local {
+ fn from((parent, binding_id): (DefWithBodyId, BindingId)) -> Self {
+ Local { parent, binding_id }
}
}
diff --git a/src/tools/rust-analyzer/crates/hir/src/has_source.rs b/src/tools/rust-analyzer/crates/hir/src/has_source.rs
index f8b01db3e..9f6b5c0a9 100644
--- a/src/tools/rust-analyzer/crates/hir/src/has_source.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/has_source.rs
@@ -10,8 +10,9 @@ use hir_expand::InFile;
use syntax::ast;
use crate::{
- db::HirDatabase, Adt, Const, Enum, Field, FieldSource, Function, Impl, LifetimeParam, Macro,
- Module, Static, Struct, Trait, TypeAlias, TypeOrConstParam, Union, Variant,
+ db::HirDatabase, Adt, Const, Enum, Field, FieldSource, Function, Impl, LifetimeParam,
+ LocalSource, Macro, Module, Static, Struct, Trait, TraitAlias, TypeAlias, TypeOrConstParam,
+ Union, Variant,
};
pub trait HasSource {
@@ -122,6 +123,12 @@ impl HasSource for Trait {
Some(self.id.lookup(db.upcast()).source(db.upcast()))
}
}
+impl HasSource for TraitAlias {
+ type Ast = ast::TraitAlias;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
impl HasSource for TypeAlias {
type Ast = ast::TypeAlias;
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
@@ -158,7 +165,7 @@ impl HasSource for Impl {
}
impl HasSource for TypeOrConstParam {
- type Ast = Either<ast::TypeOrConstParam, ast::Trait>;
+ type Ast = Either<ast::TypeOrConstParam, ast::TraitOrAlias>;
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
let child_source = self.id.parent.child_source(db.upcast());
Some(child_source.map(|it| it[self.id.local_id].clone()))
@@ -172,3 +179,11 @@ impl HasSource for LifetimeParam {
Some(child_source.map(|it| it[self.id.local_id].clone()))
}
}
+
+impl HasSource for LocalSource {
+ type Ast = Either<ast::IdentPat, ast::SelfParam>;
+
+ fn source(self, _: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.source)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs
index 2cb4ed2c3..35424feec 100644
--- a/src/tools/rust-analyzer/crates/hir/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs
@@ -41,34 +41,34 @@ use either::Either;
use hir_def::{
adt::VariantData,
body::{BodyDiagnostic, SyntheticSyntax},
- expr::{BindingAnnotation, ExprOrPatId, LabelId, Pat, PatId},
- generics::{TypeOrConstParamData, TypeParamProvenance},
+ expr::{BindingAnnotation, BindingId, ExprOrPatId, LabelId, Pat},
+ generics::{LifetimeParamData, TypeOrConstParamData, TypeParamProvenance},
item_tree::ItemTreeNode,
lang_item::{LangItem, LangItemTarget},
layout::{Layout, LayoutError, ReprOptions},
- nameres::{self, diagnostics::DefDiagnostic},
+ nameres::{self, diagnostics::DefDiagnostic, ModuleOrigin},
per_ns::PerNs,
resolver::{HasResolver, Resolver},
src::HasSource as _,
- type_ref::ConstScalar,
AdtId, AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, DefWithBodyId, EnumId,
EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
LocalEnumVariantId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId,
- TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
+ TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
};
use hir_expand::{name::name, MacroCallKind};
use hir_ty::{
all_super_traits, autoderef,
- consteval::{unknown_const_as_generic, ComputedExpr, ConstEvalError, ConstExt},
+ consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt},
diagnostics::BodyValidationDiagnostic,
+ display::HexifiedConst,
layout::layout_of_ty,
method_resolution::{self, TyFingerprint},
+ mir::{self, interpret_mir},
primitive::UintTy,
traits::FnTrait,
AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId,
- ConcreteConst, ConstValue, GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar,
- Substitution, TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind,
- WhereClause,
+ GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution,
+ TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, WhereClause,
};
use itertools::Itertools;
use nameres::diagnostics::DefDiagnosticKind;
@@ -77,7 +77,7 @@ use rustc_hash::FxHashSet;
use stdx::{impl_from, never};
use syntax::{
ast::{self, HasAttrs as _, HasDocComments, HasName},
- AstNode, AstPtr, SmolStr, SyntaxNodePtr, TextRange, T,
+ AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, T,
};
use crate::db::{DefDatabase, HirDatabase};
@@ -85,12 +85,12 @@ use crate::db::{DefDatabase, HirDatabase};
pub use crate::{
attrs::{HasAttrs, Namespace},
diagnostics::{
- AnyDiagnostic, BreakOutsideOfLoop, InactiveCode, IncorrectCase, InvalidDeriveTarget,
- MacroError, MalformedDerive, MismatchedArgCount, MissingFields, MissingMatchArms,
- MissingUnsafe, NoSuchField, PrivateAssocItem, PrivateField,
- ReplaceFilterMapNextWithFindMap, TypeMismatch, UnimplementedBuiltinMacro,
- UnresolvedExternCrate, UnresolvedImport, UnresolvedMacroCall, UnresolvedModule,
- UnresolvedProcMacro,
+ AnyDiagnostic, BreakOutsideOfLoop, ExpectedFunction, InactiveCode, IncoherentImpl,
+ IncorrectCase, InvalidDeriveTarget, MacroError, MalformedDerive, MismatchedArgCount,
+ MissingFields, MissingMatchArms, MissingUnsafe, NeedMut, NoSuchField, PrivateAssocItem,
+ PrivateField, ReplaceFilterMapNextWithFindMap, TypeMismatch, UnimplementedBuiltinMacro,
+ UnresolvedExternCrate, UnresolvedField, UnresolvedImport, UnresolvedMacroCall,
+ UnresolvedMethodCall, UnresolvedModule, UnresolvedProcMacro, UnusedMut,
},
has_source::HasSource,
semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits},
@@ -130,6 +130,7 @@ pub use {
},
hir_ty::{
display::{HirDisplay, HirDisplayError, HirWrite},
+ mir::MirEvalError,
PointerCast, Safety,
},
};
@@ -272,6 +273,7 @@ pub enum ModuleDef {
Const(Const),
Static(Static),
Trait(Trait),
+ TraitAlias(TraitAlias),
TypeAlias(TypeAlias),
BuiltinType(BuiltinType),
Macro(Macro),
@@ -284,6 +286,7 @@ impl_from!(
Const,
Static,
Trait,
+ TraitAlias,
TypeAlias,
BuiltinType,
Macro
@@ -310,6 +313,7 @@ impl ModuleDef {
ModuleDef::Const(it) => Some(it.module(db)),
ModuleDef::Static(it) => Some(it.module(db)),
ModuleDef::Trait(it) => Some(it.module(db)),
+ ModuleDef::TraitAlias(it) => Some(it.module(db)),
ModuleDef::TypeAlias(it) => Some(it.module(db)),
ModuleDef::Macro(it) => Some(it.module(db)),
ModuleDef::BuiltinType(_) => None,
@@ -338,6 +342,7 @@ impl ModuleDef {
ModuleDef::Const(it) => it.name(db)?,
ModuleDef::Adt(it) => it.name(db),
ModuleDef::Trait(it) => it.name(db),
+ ModuleDef::TraitAlias(it) => it.name(db),
ModuleDef::Function(it) => it.name(db),
ModuleDef::Variant(it) => it.name(db),
ModuleDef::TypeAlias(it) => it.name(db),
@@ -356,6 +361,7 @@ impl ModuleDef {
Adt::Union(it) => it.id.into(),
},
ModuleDef::Trait(it) => it.id.into(),
+ ModuleDef::TraitAlias(it) => it.id.into(),
ModuleDef::Function(it) => it.id.into(),
ModuleDef::TypeAlias(it) => it.id.into(),
ModuleDef::Module(it) => it.id.into(),
@@ -398,6 +404,7 @@ impl ModuleDef {
ModuleDef::Module(_)
| ModuleDef::Adt(_)
| ModuleDef::Trait(_)
+ | ModuleDef::TraitAlias(_)
| ModuleDef::TypeAlias(_)
| ModuleDef::Macro(_)
| ModuleDef::BuiltinType(_) => None,
@@ -413,6 +420,7 @@ impl ModuleDef {
ModuleDef::Const(it) => it.attrs(db),
ModuleDef::Static(it) => it.attrs(db),
ModuleDef::Trait(it) => it.attrs(db),
+ ModuleDef::TraitAlias(it) => it.attrs(db),
ModuleDef::TypeAlias(it) => it.attrs(db),
ModuleDef::Macro(it) => it.attrs(db),
ModuleDef::BuiltinType(_) => return None,
@@ -429,6 +437,7 @@ impl HasVisibility for ModuleDef {
ModuleDef::Const(it) => it.visibility(db),
ModuleDef::Static(it) => it.visibility(db),
ModuleDef::Trait(it) => it.visibility(db),
+ ModuleDef::TraitAlias(it) => it.visibility(db),
ModuleDef::TypeAlias(it) => it.visibility(db),
ModuleDef::Variant(it) => it.visibility(db),
ModuleDef::Macro(it) => it.visibility(db),
@@ -488,6 +497,20 @@ impl Module {
Some(Module { id: def_map.module_id(parent_id) })
}
+ /// Finds nearest non-block ancestor `Module` (`self` included).
+ pub fn nearest_non_block_module(self, db: &dyn HirDatabase) -> Module {
+ let mut id = self.id;
+ loop {
+ let def_map = id.def_map(db.upcast());
+ let origin = def_map[id.local_id].origin;
+ if matches!(origin, ModuleOrigin::BlockExpr { .. }) {
+ id = id.containing_module(db.upcast()).expect("block without parent module")
+ } else {
+ return Module { id };
+ }
+ }
+ }
+
pub fn path_to_root(self, db: &dyn HirDatabase) -> Vec<Module> {
let mut res = vec![self];
let mut curr = self;
@@ -581,11 +604,23 @@ impl Module {
}
}
+ let inherent_impls = db.inherent_impls_in_crate(self.id.krate());
+
for impl_def in self.impl_defs(db) {
for diag in db.impl_data_with_diagnostics(impl_def.id).1.iter() {
emit_def_diagnostic(db, acc, diag);
}
+ if inherent_impls.invalid_impls().contains(&impl_def.id) {
+ let loc = impl_def.id.lookup(db.upcast());
+ let tree = loc.id.item_tree(db.upcast());
+ let node = &tree[loc.id.value];
+ let file_id = loc.id.file_id();
+ let ast_id_map = db.ast_id_map(file_id);
+
+ acc.push(IncoherentImpl { impl_: ast_id_map.get(node.ast_id()), file_id }.into())
+ }
+
for item in impl_def.items(db) {
let def: DefWithBody = match item {
AssocItem::Function(it) => it.into(),
@@ -1092,8 +1127,8 @@ impl Variant {
self.source(db)?.value.expr()
}
- pub fn eval(self, db: &dyn HirDatabase) -> Result<ComputedExpr, ConstEvalError> {
- db.const_eval_variant(self.into())
+ pub fn eval(self, db: &dyn HirDatabase) -> Result<i128, ConstEvalError> {
+ db.const_eval_discriminant(self.into())
}
}
@@ -1170,6 +1205,25 @@ impl Adt {
}
}
+ /// Returns the lifetime of the DataType
+ pub fn lifetime(&self, db: &dyn HirDatabase) -> Option<LifetimeParamData> {
+ let resolver = match self {
+ Adt::Struct(s) => s.id.resolver(db.upcast()),
+ Adt::Union(u) => u.id.resolver(db.upcast()),
+ Adt::Enum(e) => e.id.resolver(db.upcast()),
+ };
+ resolver
+ .generic_params()
+ .and_then(|gp| {
+ (&gp.lifetimes)
+ .iter()
+ // there should only be a single lifetime
+ // but `Arena` requires to use an iterator
+ .nth(0)
+ })
+ .map(|arena| arena.1.clone())
+ }
+
pub fn as_enum(&self) -> Option<Enum> {
if let Self::Enum(v) = self {
Some(*v)
@@ -1285,6 +1339,15 @@ impl DefWithBody {
body.pretty_print(db.upcast(), self.id())
}
+ /// A textual representation of the MIR of this def's body for debugging purposes.
+ pub fn debug_mir(self, db: &dyn HirDatabase) -> String {
+ let body = db.mir_body(self.id());
+ match body {
+ Ok(body) => body.pretty_print(db),
+ Err(e) => format!("error:\n{e:?}"),
+ }
+ }
+
pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
let krate = self.module(db).id.krate();
@@ -1334,42 +1397,35 @@ impl DefWithBody {
let infer = db.infer(self.into());
let source_map = Lazy::new(|| db.body_with_source_map(self.into()).1);
+ let expr_syntax = |expr| source_map.expr_syntax(expr).expect("unexpected synthetic");
for d in &infer.diagnostics {
match d {
- hir_ty::InferenceDiagnostic::NoSuchField { expr } => {
- let field = source_map.field_syntax(*expr);
+ &hir_ty::InferenceDiagnostic::NoSuchField { expr } => {
+ let field = source_map.field_syntax(expr);
acc.push(NoSuchField { field }.into())
}
- &hir_ty::InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break } => {
- let expr = source_map
- .expr_syntax(expr)
- .expect("break outside of loop in synthetic syntax");
- acc.push(BreakOutsideOfLoop { expr, is_break }.into())
+ &hir_ty::InferenceDiagnostic::BreakOutsideOfLoop {
+ expr,
+ is_break,
+ bad_value_break,
+ } => {
+ let expr = expr_syntax(expr);
+ acc.push(BreakOutsideOfLoop { expr, is_break, bad_value_break }.into())
}
- hir_ty::InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
- match source_map.expr_syntax(*call_expr) {
- Ok(source_ptr) => acc.push(
- MismatchedArgCount {
- call_expr: source_ptr,
- expected: *expected,
- found: *found,
- }
+ &hir_ty::InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
+ acc.push(
+ MismatchedArgCount { call_expr: expr_syntax(call_expr), expected, found }
.into(),
- ),
- Err(SyntheticSyntax) => (),
- }
+ )
}
&hir_ty::InferenceDiagnostic::PrivateField { expr, field } => {
- let expr = source_map.expr_syntax(expr).expect("unexpected synthetic");
+ let expr = expr_syntax(expr);
let field = field.into();
acc.push(PrivateField { expr, field }.into())
}
&hir_ty::InferenceDiagnostic::PrivateAssocItem { id, item } => {
let expr_or_pat = match id {
- ExprOrPatId::ExprId(expr) => source_map
- .expr_syntax(expr)
- .expect("unexpected synthetic")
- .map(Either::Left),
+ ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(Either::Left),
ExprOrPatId::PatId(pat) => source_map
.pat_syntax(pat)
.expect("unexpected synthetic")
@@ -1378,16 +1434,76 @@ impl DefWithBody {
let item = item.into();
acc.push(PrivateAssocItem { expr_or_pat, item }.into())
}
+ hir_ty::InferenceDiagnostic::ExpectedFunction { call_expr, found } => {
+ let call_expr = expr_syntax(*call_expr);
+
+ acc.push(
+ ExpectedFunction {
+ call: call_expr,
+ found: Type::new(db, DefWithBodyId::from(self), found.clone()),
+ }
+ .into(),
+ )
+ }
+ hir_ty::InferenceDiagnostic::UnresolvedField {
+ expr,
+ receiver,
+ name,
+ method_with_same_name_exists,
+ } => {
+ let expr = expr_syntax(*expr);
+
+ acc.push(
+ UnresolvedField {
+ expr,
+ name: name.clone(),
+ receiver: Type::new(db, DefWithBodyId::from(self), receiver.clone()),
+ method_with_same_name_exists: *method_with_same_name_exists,
+ }
+ .into(),
+ )
+ }
+ hir_ty::InferenceDiagnostic::UnresolvedMethodCall {
+ expr,
+ receiver,
+ name,
+ field_with_same_name,
+ } => {
+ let expr = expr_syntax(*expr);
+
+ acc.push(
+ UnresolvedMethodCall {
+ expr,
+ name: name.clone(),
+ receiver: Type::new(db, DefWithBodyId::from(self), receiver.clone()),
+ field_with_same_name: field_with_same_name
+ .clone()
+ .map(|ty| Type::new(db, DefWithBodyId::from(self), ty)),
+ }
+ .into(),
+ )
+ }
}
}
- for (expr, mismatch) in infer.expr_type_mismatches() {
- let expr = match source_map.expr_syntax(expr) {
- Ok(expr) => expr,
- Err(SyntheticSyntax) => continue,
+ for (pat_or_expr, mismatch) in infer.type_mismatches() {
+ let expr_or_pat = match pat_or_expr {
+ ExprOrPatId::ExprId(expr) => source_map.expr_syntax(expr).map(Either::Left),
+ // FIXME: Re-enable these once we have less false positives
+ ExprOrPatId::PatId(_pat) => continue,
+ #[allow(unreachable_patterns)]
+ ExprOrPatId::PatId(pat) => source_map.pat_syntax(pat).map(Either::Right),
+ };
+ let expr_or_pat = match expr_or_pat {
+ Ok(Either::Left(expr)) => Either::Left(expr),
+ Ok(Either::Right(InFile { file_id, value: Either::Left(pat) })) => {
+ Either::Right(InFile { file_id, value: pat })
+ }
+ Ok(Either::Right(_)) | Err(SyntheticSyntax) => continue,
};
+
acc.push(
TypeMismatch {
- expr,
+ expr_or_pat,
expected: Type::new(db, DefWithBodyId::from(self), mismatch.expected.clone()),
actual: Type::new(db, DefWithBodyId::from(self), mismatch.actual.clone()),
}
@@ -1405,6 +1521,41 @@ impl DefWithBody {
}
}
+ let hir_body = db.body(self.into());
+
+ if let Ok(borrowck_result) = db.borrowck(self.into()) {
+ let mir_body = &borrowck_result.mir_body;
+ let mol = &borrowck_result.mutability_of_locals;
+ for (binding_id, _) in hir_body.bindings.iter() {
+ let need_mut = &mol[mir_body.binding_locals[binding_id]];
+ let local = Local { parent: self.into(), binding_id };
+ match (need_mut, local.is_mut(db)) {
+ (mir::MutabilityReason::Mut { .. }, true)
+ | (mir::MutabilityReason::Not, false) => (),
+ (mir::MutabilityReason::Mut { spans }, false) => {
+ for span in spans {
+ let span: InFile<SyntaxNodePtr> = match span {
+ mir::MirSpan::ExprId(e) => match source_map.expr_syntax(*e) {
+ Ok(s) => s.map(|x| x.into()),
+ Err(_) => continue,
+ },
+ mir::MirSpan::PatId(p) => match source_map.pat_syntax(*p) {
+ Ok(s) => s.map(|x| match x {
+ Either::Left(e) => e.into(),
+ Either::Right(e) => e.into(),
+ }),
+ Err(_) => continue,
+ },
+ mir::MirSpan::Unknown => continue,
+ };
+ acc.push(NeedMut { local, span }.into());
+ }
+ }
+ (mir::MutabilityReason::Not, true) => acc.push(UnusedMut { local }.into()),
+ }
+ }
+ }
+
for diagnostic in BodyValidationDiagnostic::collect(db, self.into()) {
match diagnostic {
BodyValidationDiagnostic::RecordMissingFields {
@@ -1489,11 +1640,13 @@ impl DefWithBody {
if let ast::Expr::MatchExpr(match_expr) =
&source_ptr.value.to_node(&root)
{
- if let Some(match_expr) = match_expr.expr() {
+ if let Some(scrut_expr) = match_expr.expr() {
acc.push(
MissingMatchArms {
- file: source_ptr.file_id,
- match_expr: AstPtr::new(&match_expr),
+ scrutinee_expr: InFile::new(
+ source_ptr.file_id,
+ AstPtr::new(&scrut_expr),
+ ),
uncovered_patterns,
}
.into(),
@@ -1582,6 +1735,10 @@ impl Function {
.collect()
}
+ pub fn num_params(self, db: &dyn HirDatabase) -> usize {
+ db.function_data(self.id).params.len()
+ }
+
pub fn method_params(self, db: &dyn HirDatabase) -> Option<Vec<Param>> {
if self.self_param(db).is_none() {
return None;
@@ -1639,6 +1796,14 @@ impl Function {
let def_map = db.crate_def_map(loc.krate(db).into());
def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() })
}
+
+ pub fn eval(self, db: &dyn HirDatabase) -> Result<(), MirEvalError> {
+ let body = db
+ .mir_body(self.id.into())
+ .map_err(|e| MirEvalError::MirLowerError(self.id.into(), e))?;
+ interpret_mir(db, &body, false)?;
+ Ok(())
+ }
}
// Note: logically, this belongs to `hir_ty`, but we are not using it there yet.
@@ -1679,8 +1844,8 @@ impl Param {
let parent = DefWithBodyId::FunctionId(self.func.into());
let body = db.body(parent);
let pat_id = body.params[self.idx];
- if let Pat::Bind { .. } = &body[pat_id] {
- Some(Local { parent, pat_id: body.params[self.idx] })
+ if let Pat::Bind { id, .. } = &body[pat_id] {
+ Some(Local { parent, binding_id: *id })
} else {
None
}
@@ -1781,8 +1946,18 @@ impl Const {
Type::new_with_resolver_inner(db, &resolver, ty)
}
- pub fn eval(self, db: &dyn HirDatabase) -> Result<ComputedExpr, ConstEvalError> {
- db.const_eval(self.id)
+ pub fn render_eval(self, db: &dyn HirDatabase) -> Result<String, ConstEvalError> {
+ let c = db.const_eval(self.id)?;
+ let r = format!("{}", HexifiedConst(c).display(db));
+ // We want to see things like `<utf8-error>` and `<layout-error>` as they are probably bug in our
+ // implementation, but there is no need to show things like `<enum-not-supported>` or `<ref-not-supported>` to
+ // the user.
+ if r.contains("not-supported>") {
+ return Err(ConstEvalError::MirEvalError(MirEvalError::NotSupported(
+ "rendering complex constants".to_string(),
+ )));
+ }
+ return Ok(r);
}
}
@@ -1894,6 +2069,27 @@ impl HasVisibility for Trait {
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TraitAlias {
+ pub(crate) id: TraitAliasId,
+}
+
+impl TraitAlias {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).container }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.trait_alias_data(self.id).name.clone()
+ }
+}
+
+impl HasVisibility for TraitAlias {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.trait_alias_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TypeAlias {
pub(crate) id: TypeAliasId,
}
@@ -2265,6 +2461,7 @@ pub enum GenericDef {
Function(Function),
Adt(Adt),
Trait(Trait),
+ TraitAlias(TraitAlias),
TypeAlias(TypeAlias),
Impl(Impl),
// enum variants cannot have generics themselves, but their parent enums
@@ -2277,6 +2474,7 @@ impl_from!(
Function,
Adt(Struct, Enum, Union),
Trait,
+ TraitAlias,
TypeAlias,
Impl,
Variant,
@@ -2317,20 +2515,53 @@ impl GenericDef {
}
/// A single local definition.
-///
-/// If the definition of this is part of a "MultiLocal", that is a local that has multiple declarations due to or-patterns
-/// then this only references a single one of those.
-/// To retrieve the other locals you should use [`Local::associated_locals`]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct Local {
pub(crate) parent: DefWithBodyId,
- pub(crate) pat_id: PatId,
+ pub(crate) binding_id: BindingId,
+}
+
+pub struct LocalSource {
+ pub local: Local,
+ pub source: InFile<Either<ast::IdentPat, ast::SelfParam>>,
+}
+
+impl LocalSource {
+ pub fn as_ident_pat(&self) -> Option<&ast::IdentPat> {
+ match &self.source.value {
+ Either::Left(x) => Some(x),
+ Either::Right(_) => None,
+ }
+ }
+
+ pub fn into_ident_pat(self) -> Option<ast::IdentPat> {
+ match self.source.value {
+ Either::Left(x) => Some(x),
+ Either::Right(_) => None,
+ }
+ }
+
+ pub fn original_file(&self, db: &dyn HirDatabase) -> FileId {
+ self.source.file_id.original_file(db.upcast())
+ }
+
+ pub fn name(&self) -> Option<ast::Name> {
+ self.source.value.name()
+ }
+
+ pub fn syntax(&self) -> &SyntaxNode {
+ self.source.value.syntax()
+ }
+
+ pub fn syntax_ptr(self) -> InFile<SyntaxNodePtr> {
+ self.source.map(|x| SyntaxNodePtr::new(x.syntax()))
+ }
}
impl Local {
pub fn is_param(self, db: &dyn HirDatabase) -> bool {
- let src = self.source(db);
- match src.value {
+ let src = self.primary_source(db);
+ match src.source.value {
Either::Left(pat) => pat
.syntax()
.ancestors()
@@ -2350,13 +2581,7 @@ impl Local {
pub fn name(self, db: &dyn HirDatabase) -> Name {
let body = db.body(self.parent);
- match &body[self.pat_id] {
- Pat::Bind { name, .. } => name.clone(),
- _ => {
- stdx::never!("hir::Local is missing a name!");
- Name::missing()
- }
- }
+ body[self.binding_id].name.clone()
}
pub fn is_self(self, db: &dyn HirDatabase) -> bool {
@@ -2365,15 +2590,12 @@ impl Local {
pub fn is_mut(self, db: &dyn HirDatabase) -> bool {
let body = db.body(self.parent);
- matches!(&body[self.pat_id], Pat::Bind { mode: BindingAnnotation::Mutable, .. })
+ body[self.binding_id].mode == BindingAnnotation::Mutable
}
pub fn is_ref(self, db: &dyn HirDatabase) -> bool {
let body = db.body(self.parent);
- matches!(
- &body[self.pat_id],
- Pat::Bind { mode: BindingAnnotation::Ref | BindingAnnotation::RefMut, .. }
- )
+ matches!(body[self.binding_id].mode, BindingAnnotation::Ref | BindingAnnotation::RefMut)
}
pub fn parent(self, _db: &dyn HirDatabase) -> DefWithBody {
@@ -2387,34 +2609,33 @@ impl Local {
pub fn ty(self, db: &dyn HirDatabase) -> Type {
let def = self.parent;
let infer = db.infer(def);
- let ty = infer[self.pat_id].clone();
+ let ty = infer[self.binding_id].clone();
Type::new(db, def, ty)
}
- pub fn associated_locals(self, db: &dyn HirDatabase) -> Box<[Local]> {
- let body = db.body(self.parent);
- body.ident_patterns_for(&self.pat_id)
+ /// All definitions for this local. Example: `let (a$0, _) | (_, a$0) = x;`
+ pub fn sources(self, db: &dyn HirDatabase) -> Vec<LocalSource> {
+ let (body, source_map) = db.body_with_source_map(self.parent);
+ body[self.binding_id]
+ .definitions
.iter()
- .map(|&pat_id| Local { parent: self.parent, pat_id })
+ .map(|&definition| {
+ let src = source_map.pat_syntax(definition).unwrap(); // Hmm...
+ let root = src.file_syntax(db.upcast());
+ src.map(|ast| match ast {
+ // Suspicious unwrap
+ Either::Left(it) => Either::Left(it.cast().unwrap().to_node(&root)),
+ Either::Right(it) => Either::Right(it.to_node(&root)),
+ })
+ })
+ .map(|source| LocalSource { local: self, source })
.collect()
}
- /// If this local is part of a multi-local, retrieve the representative local.
- /// That is the local that references are being resolved to.
- pub fn representative(self, db: &dyn HirDatabase) -> Local {
- let body = db.body(self.parent);
- Local { pat_id: body.pattern_representative(self.pat_id), ..self }
- }
-
- pub fn source(self, db: &dyn HirDatabase) -> InFile<Either<ast::IdentPat, ast::SelfParam>> {
- let (_body, source_map) = db.body_with_source_map(self.parent);
- let src = source_map.pat_syntax(self.pat_id).unwrap(); // Hmm...
- let root = src.file_syntax(db.upcast());
- src.map(|ast| match ast {
- // Suspicious unwrap
- Either::Left(it) => Either::Left(it.cast().unwrap().to_node(&root)),
- Either::Right(it) => Either::Right(it.to_node(&root)),
- })
+ /// The leftmost definition for this local. Example: `let (a$0, _) | (_, a) = x;`
+ pub fn primary_source(self, db: &dyn HirDatabase) -> LocalSource {
+ let all_sources = self.sources(db);
+ all_sources.into_iter().next().unwrap()
}
}
@@ -3001,6 +3222,14 @@ impl Type {
matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Uint(UintTy::Usize)))
}
+ pub fn is_float(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Float(_)))
+ }
+
+ pub fn is_char(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Char))
+ }
+
pub fn is_int_or_uint(&self) -> bool {
match self.ty.kind(Interner) {
TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_)) => true,
@@ -3015,6 +3244,13 @@ impl Type {
}
}
+ pub fn as_slice(&self) -> Option<Type> {
+ match &self.ty.kind(Interner) {
+ TyKind::Slice(ty) => Some(self.derived(ty.clone())),
+ _ => None,
+ }
+ }
+
pub fn strip_references(&self) -> Type {
self.derived(self.ty.strip_references().clone())
}
@@ -3190,6 +3426,14 @@ impl Type {
matches!(self.ty.kind(Interner), TyKind::Raw(..))
}
+ pub fn remove_raw_ptr(&self) -> Option<Type> {
+ if let TyKind::Raw(_, ty) = self.ty.kind(Interner) {
+ Some(self.derived(ty.clone()))
+ } else {
+ None
+ }
+ }
+
pub fn contains_unknown(&self) -> bool {
// FIXME: When we get rid of `ConstScalar::Unknown`, we can just look at precomputed
// `TypeFlags` in `TyData`.
@@ -3260,12 +3504,7 @@ impl Type {
pub fn as_array(&self, _db: &dyn HirDatabase) -> Option<(Type, usize)> {
if let TyKind::Array(ty, len) = &self.ty.kind(Interner) {
- match len.data(Interner).value {
- ConstValue::Concrete(ConcreteConst { interned: ConstScalar::UInt(len) }) => {
- Some((self.derived(ty.clone()), len as usize))
- }
- _ => None,
- }
+ try_const_usize(len).map(|x| (self.derived(ty.clone()), x as usize))
} else {
None
}
@@ -3321,6 +3560,24 @@ impl Type {
}
}
+ /// Iterates its type arguments
+ ///
+ /// It iterates the actual type arguments when concrete types are used
+ /// and otherwise the generic names.
+ /// It does not include `const` arguments.
+ ///
+ /// For code, such as:
+ /// ```text
+ /// struct Foo<T, U>
+ ///
+ /// impl<U> Foo<String, U>
+ /// ```
+ ///
+ /// It iterates:
+ /// ```text
+ /// - "String"
+ /// - "U"
+ /// ```
pub fn type_arguments(&self) -> impl Iterator<Item = Type> + '_ {
self.ty
.strip_references()
@@ -3331,12 +3588,62 @@ impl Type {
.map(move |ty| self.derived(ty))
}
- pub fn iterate_method_candidates<T>(
+ /// Iterates its type and const arguments
+ ///
+ /// It iterates the actual type and const arguments when concrete types
+ /// are used and otherwise the generic names.
+ ///
+ /// For code, such as:
+ /// ```text
+ /// struct Foo<T, const U: usize, const X: usize>
+ ///
+ /// impl<U> Foo<String, U, 12>
+ /// ```
+ ///
+ /// It iterates:
+ /// ```text
+ /// - "String"
+ /// - "U"
+ /// - "12"
+ /// ```
+ pub fn type_and_const_arguments<'a>(
+ &'a self,
+ db: &'a dyn HirDatabase,
+ ) -> impl Iterator<Item = SmolStr> + 'a {
+ self.ty
+ .strip_references()
+ .as_adt()
+ .into_iter()
+ .flat_map(|(_, substs)| substs.iter(Interner))
+ .filter_map(|arg| {
+ // arg can be either a `Ty` or `constant`
+ if let Some(ty) = arg.ty(Interner) {
+ Some(SmolStr::new(ty.display(db).to_string()))
+ } else if let Some(const_) = arg.constant(Interner) {
+ Some(SmolStr::new_inline(&const_.display(db).to_string()))
+ } else {
+ None
+ }
+ })
+ }
+
+ /// Combines lifetime indicators, type and constant parameters into a single `Iterator`
+ pub fn generic_parameters<'a>(
+ &'a self,
+ db: &'a dyn HirDatabase,
+ ) -> impl Iterator<Item = SmolStr> + 'a {
+ // iterate the lifetime
+ self.as_adt()
+ .and_then(|a| a.lifetime(db).and_then(|lt| Some((&lt.name).to_smol_str())))
+ .into_iter()
+ // add the type and const paramaters
+ .chain(self.type_and_const_arguments(db))
+ }
+
+ pub fn iterate_method_candidates_with_traits<T>(
&self,
db: &dyn HirDatabase,
scope: &SemanticsScope<'_>,
- // FIXME this can be retrieved from `scope`, except autoimport uses this
- // to specify a different set, so the method needs to be split
traits_in_scope: &FxHashSet<TraitId>,
with_local_impls: Option<Module>,
name: Option<&Name>,
@@ -3364,6 +3671,24 @@ impl Type {
slot
}
+ pub fn iterate_method_candidates<T>(
+ &self,
+ db: &dyn HirDatabase,
+ scope: &SemanticsScope<'_>,
+ with_local_impls: Option<Module>,
+ name: Option<&Name>,
+ callback: impl FnMut(Function) -> Option<T>,
+ ) -> Option<T> {
+ self.iterate_method_candidates_with_traits(
+ db,
+ scope,
+ &scope.visible_traits().0,
+ with_local_impls,
+ name,
+ callback,
+ )
+ }
+
fn iterate_method_candidates_dyn(
&self,
db: &dyn HirDatabase,
@@ -3632,11 +3957,13 @@ impl Type {
}
}
+// FIXME: Document this
#[derive(Debug)]
pub struct Callable {
ty: Type,
sig: CallableSig,
callee: Callee,
+ /// Whether this is a method that was called with method call syntax.
pub(crate) is_bound_method: bool,
}
@@ -3670,14 +3997,14 @@ impl Callable {
Other => CallableKind::Other,
}
}
- pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option<ast::SelfParam> {
+ pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option<(ast::SelfParam, Type)> {
let func = match self.callee {
Callee::Def(CallableDefId::FunctionId(it)) if self.is_bound_method => it,
_ => return None,
};
let src = func.lookup(db.upcast()).source(db.upcast());
let param_list = src.value.param_list()?;
- param_list.self_param()
+ Some((param_list.self_param()?, self.ty.derived(self.sig.params()[0].clone())))
}
pub fn n_params(&self) -> usize {
self.sig.params().len() - if self.is_bound_method { 1 } else { 0 }
@@ -3936,6 +4263,12 @@ impl HasCrate for Trait {
}
}
+impl HasCrate for TraitAlias {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
impl HasCrate for Static {
fn krate(&self, db: &dyn HirDatabase) -> Crate {
self.module(db).krate()
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
index 486b7ee62..407ba6f65 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
@@ -12,10 +12,10 @@ use hir_def::{
macro_id_to_def_id,
resolver::{self, HasResolver, Resolver, TypeNs},
type_ref::Mutability,
- AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId,
+ AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId,
};
use hir_expand::{
- db::AstDatabase,
+ db::ExpandDatabase,
name::{known, AsName},
ExpansionInfo, MacroCallId,
};
@@ -68,7 +68,8 @@ impl PathResolution {
| ModuleDef::Function(_)
| ModuleDef::Module(_)
| ModuleDef::Static(_)
- | ModuleDef::Trait(_),
+ | ModuleDef::Trait(_)
+ | ModuleDef::TraitAlias(_),
) => None,
PathResolution::Def(ModuleDef::TypeAlias(alias)) => {
Some(TypeNs::TypeAliasId((*alias).into()))
@@ -365,6 +366,16 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.resolve_method_call(call).map(Function::from)
}
+ /// Attempts to resolve this call expression as a method call falling back to resolving it as a field.
+ pub fn resolve_method_call_field_fallback(
+ &self,
+ call: &ast::MethodCallExpr,
+ ) -> Option<Either<Function, Field>> {
+ self.imp
+ .resolve_method_call_fallback(call)
+ .map(|it| it.map_left(Function::from).map_right(Field::from))
+ }
+
pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<Function> {
self.imp.resolve_await_to_poll(await_expr).map(Function::from)
}
@@ -400,7 +411,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.resolve_record_field(field)
}
- pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<Field> {
+ pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<(Field, Type)> {
self.imp.resolve_record_pat_field(field)
}
@@ -527,8 +538,8 @@ impl<'db> SemanticsImpl<'db> {
}
fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
- let src = self.wrap_node_infile(attr.clone());
let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
+ let src = self.wrap_node_infile(attr.clone());
let call_id = self.with_ctx(|ctx| {
ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
})?;
@@ -1092,7 +1103,10 @@ impl<'db> SemanticsImpl<'db> {
let kind = match adjust.kind {
hir_ty::Adjust::NeverToAny => Adjust::NeverToAny,
hir_ty::Adjust::Deref(Some(hir_ty::OverloadedDeref(m))) => {
- Adjust::Deref(Some(OverloadedDeref(mutability(m))))
+ // FIXME: Should we handle unknown mutability better?
+ Adjust::Deref(Some(OverloadedDeref(
+ m.map(mutability).unwrap_or(Mutability::Shared),
+ )))
}
hir_ty::Adjust::Deref(None) => Adjust::Deref(None),
hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => {
@@ -1145,6 +1159,13 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(call.syntax())?.resolve_method_call(self.db, call)
}
+ fn resolve_method_call_fallback(
+ &self,
+ call: &ast::MethodCallExpr,
+ ) -> Option<Either<FunctionId, FieldId>> {
+ self.analyze(call.syntax())?.resolve_method_call_fallback(self.db, call)
+ }
+
fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<FunctionId> {
self.analyze(await_expr.syntax())?.resolve_await_to_poll(self.db, await_expr)
}
@@ -1180,7 +1201,7 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(field.syntax())?.resolve_record_field(self.db, field)
}
- fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<Field> {
+ fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<(Field, Type)> {
self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
}
@@ -1330,6 +1351,7 @@ impl<'db> SemanticsImpl<'db> {
})
}
ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::TraitAliasId(it) => it.resolver(self.db.upcast()),
ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),
ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()),
ChildContainer::EnumId(it) => it.resolver(self.db.upcast()),
@@ -1514,7 +1536,7 @@ impl<'db> SemanticsImpl<'db> {
fn macro_call_to_macro_id(
ctx: &mut SourceToDefCtx<'_, '_>,
- db: &dyn AstDatabase,
+ db: &dyn ExpandDatabase,
macro_call_id: MacroCallId,
) -> Option<MacroId> {
let loc = db.lookup_intern_macro_call(macro_call_id);
@@ -1556,6 +1578,7 @@ to_def_impls![
(crate::Enum, ast::Enum, enum_to_def),
(crate::Union, ast::Union, union_to_def),
(crate::Trait, ast::Trait, trait_to_def),
+ (crate::TraitAlias, ast::TraitAlias, trait_alias_to_def),
(crate::Impl, ast::Impl, impl_to_def),
(crate::TypeAlias, ast::TypeAlias, type_alias_to_def),
(crate::Const, ast::Const, const_to_def),
@@ -1634,8 +1657,8 @@ impl<'a> SemanticsScope<'a> {
resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(id.into()),
- resolver::ScopeDef::Local(pat_id) => match self.resolver.body_owner() {
- Some(parent) => ScopeDef::Local(Local { parent, pat_id }),
+ resolver::ScopeDef::Local(binding_id) => match self.resolver.body_owner() {
+ Some(parent) => ScopeDef::Local(Local { parent, binding_id }),
None => continue,
},
resolver::ScopeDef::Label(label_id) => match self.resolver.body_owner() {
@@ -1673,6 +1696,7 @@ impl<'a> SemanticsScope<'a> {
}
}
+#[derive(Debug)]
pub struct VisibleTraits(pub FxHashSet<TraitId>);
impl ops::Deref for VisibleTraits {
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
index 2b5bfda1d..f6f8c9a25 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
@@ -89,16 +89,16 @@ use base_db::FileId;
use hir_def::{
child_by_source::ChildBySource,
dyn_map::DynMap,
- expr::{LabelId, PatId},
+ expr::{BindingId, LabelId},
keys::{self, Key},
AdtId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, FieldId, FunctionId,
GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId, StructId,
- TraitId, TypeAliasId, TypeParamId, UnionId, VariantId,
+ TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, VariantId,
};
use hir_expand::{attrs::AttrId, name::AsName, HirFileId, MacroCallId};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
-use stdx::impl_from;
+use stdx::{impl_from, never};
use syntax::{
ast::{self, HasName},
AstNode, SyntaxNode,
@@ -159,6 +159,12 @@ impl SourceToDefCtx<'_, '_> {
pub(super) fn trait_to_def(&mut self, src: InFile<ast::Trait>) -> Option<TraitId> {
self.to_def(src, keys::TRAIT)
}
+ pub(super) fn trait_alias_to_def(
+ &mut self,
+ src: InFile<ast::TraitAlias>,
+ ) -> Option<TraitAliasId> {
+ self.to_def(src, keys::TRAIT_ALIAS)
+ }
pub(super) fn impl_to_def(&mut self, src: InFile<ast::Impl>) -> Option<ImplId> {
self.to_def(src, keys::IMPL)
}
@@ -210,14 +216,14 @@ impl SourceToDefCtx<'_, '_> {
pub(super) fn bind_pat_to_def(
&mut self,
src: InFile<ast::IdentPat>,
- ) -> Option<(DefWithBodyId, PatId)> {
+ ) -> Option<(DefWithBodyId, BindingId)> {
let container = self.find_pat_or_label_container(src.syntax())?;
let (body, source_map) = self.db.body_with_source_map(container);
let src = src.map(ast::Pat::from);
let pat_id = source_map.node_pat(src.as_ref())?;
// the pattern could resolve to a constant, verify that that is not the case
- if let crate::Pat::Bind { .. } = body[pat_id] {
- Some((container, pat_id))
+ if let crate::Pat::Bind { id, .. } = body[pat_id] {
+ Some((container, id))
} else {
None
}
@@ -225,11 +231,16 @@ impl SourceToDefCtx<'_, '_> {
pub(super) fn self_param_to_def(
&mut self,
src: InFile<ast::SelfParam>,
- ) -> Option<(DefWithBodyId, PatId)> {
+ ) -> Option<(DefWithBodyId, BindingId)> {
let container = self.find_pat_or_label_container(src.syntax())?;
- let (_body, source_map) = self.db.body_with_source_map(container);
+ let (body, source_map) = self.db.body_with_source_map(container);
let pat_id = source_map.node_self_param(src.as_ref())?;
- Some((container, pat_id))
+ if let crate::Pat::Bind { id, .. } = body[pat_id] {
+ Some((container, id))
+ } else {
+ never!();
+ None
+ }
}
pub(super) fn label_to_def(
&mut self,
@@ -353,6 +364,9 @@ impl SourceToDefCtx<'_, '_> {
match item {
ast::Item::Module(it) => self.module_to_def(container.with_value(it))?.into(),
ast::Item::Trait(it) => self.trait_to_def(container.with_value(it))?.into(),
+ ast::Item::TraitAlias(it) => {
+ self.trait_alias_to_def(container.with_value(it))?.into()
+ }
ast::Item::Impl(it) => self.impl_to_def(container.with_value(it))?.into(),
ast::Item::Enum(it) => self.enum_to_def(container.with_value(it))?.into(),
ast::Item::TypeAlias(it) => {
@@ -400,6 +414,9 @@ impl SourceToDefCtx<'_, '_> {
ast::Item::Struct(it) => self.struct_to_def(InFile::new(file_id, it))?.into(),
ast::Item::Enum(it) => self.enum_to_def(InFile::new(file_id, it))?.into(),
ast::Item::Trait(it) => self.trait_to_def(InFile::new(file_id, it))?.into(),
+ ast::Item::TraitAlias(it) => {
+ self.trait_alias_to_def(InFile::new(file_id, it))?.into()
+ }
ast::Item::TypeAlias(it) => {
self.type_alias_to_def(InFile::new(file_id, it))?.into()
}
@@ -435,6 +452,7 @@ pub(crate) enum ChildContainer {
DefWithBodyId(DefWithBodyId),
ModuleId(ModuleId),
TraitId(TraitId),
+ TraitAliasId(TraitAliasId),
ImplId(ImplId),
EnumId(EnumId),
VariantId(VariantId),
@@ -447,6 +465,7 @@ impl_from! {
DefWithBodyId,
ModuleId,
TraitId,
+ TraitAliasId,
ImplId,
EnumId,
VariantId,
@@ -462,6 +481,7 @@ impl ChildContainer {
ChildContainer::DefWithBodyId(it) => it.child_by_source(db, file_id),
ChildContainer::ModuleId(it) => it.child_by_source(db, file_id),
ChildContainer::TraitId(it) => it.child_by_source(db, file_id),
+ ChildContainer::TraitAliasId(_) => DynMap::default(),
ChildContainer::ImplId(it) => it.child_by_source(db, file_id),
ChildContainer::EnumId(it) => it.child_by_source(db, file_id),
ChildContainer::VariantId(it) => it.child_by_source(db, file_id),
diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
index 3b39e9fa9..c24d196e1 100644
--- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
@@ -10,6 +10,7 @@ use std::{
sync::Arc,
};
+use either::Either;
use hir_def::{
body::{
self,
@@ -51,7 +52,7 @@ use syntax::{
use crate::{
db::HirDatabase, semantics::PathResolution, Adt, AssocItem, BindingMode, BuiltinAttr,
BuiltinType, Callable, Const, DeriveHelper, Field, Function, Local, Macro, ModuleDef, Static,
- Struct, ToolModule, Trait, Type, TypeAlias, Variant,
+ Struct, ToolModule, Trait, TraitAlias, Type, TypeAlias, Variant,
};
/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
@@ -266,6 +267,21 @@ impl SourceAnalyzer {
Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs))
}
+ pub(crate) fn resolve_method_call_fallback(
+ &self,
+ db: &dyn HirDatabase,
+ call: &ast::MethodCallExpr,
+ ) -> Option<Either<FunctionId, FieldId>> {
+ let expr_id = self.expr_id(db, &call.clone().into())?;
+ let inference_result = self.infer.as_ref()?;
+ match inference_result.method_resolution(expr_id) {
+ Some((f_in_trait, substs)) => {
+ Some(Either::Left(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs)))
+ }
+ None => inference_result.field_resolution(expr_id).map(Either::Right),
+ }
+ }
+
pub(crate) fn resolve_await_to_poll(
&self,
db: &dyn HirDatabase,
@@ -406,8 +422,8 @@ impl SourceAnalyzer {
// Shorthand syntax, resolve to the local
let path = ModPath::from_segments(PathKind::Plain, once(local_name.clone()));
match self.resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) {
- Some(ValueNs::LocalBinding(pat_id)) => {
- Some(Local { pat_id, parent: self.resolver.body_owner()? })
+ Some(ValueNs::LocalBinding(binding_id)) => {
+ Some(Local { binding_id, parent: self.resolver.body_owner()? })
}
_ => None,
}
@@ -425,14 +441,17 @@ impl SourceAnalyzer {
&self,
db: &dyn HirDatabase,
field: &ast::RecordPatField,
- ) -> Option<Field> {
+ ) -> Option<(Field, Type)> {
let field_name = field.field_name()?.as_name();
let record_pat = ast::RecordPat::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
let pat_id = self.pat_id(&record_pat.into())?;
let variant = self.infer.as_ref()?.variant_resolution_for_pat(pat_id)?;
let variant_data = variant.variant_data(db.upcast());
let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? };
- Some(field.into())
+ let (_, subst) = self.infer.as_ref()?.type_of_pat.get(pat_id)?.as_adt()?;
+ let field_ty =
+ db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst);
+ Some((field.into(), Type::new_with_resolver(db, &self.resolver, field_ty)))
}
pub(crate) fn resolve_macro_call(
@@ -791,7 +810,7 @@ impl SourceAnalyzer {
|| Arc::new(hir_ty::TraitEnvironment::empty(krate)),
|d| db.trait_environment(d),
);
- method_resolution::lookup_impl_method(db, env, func, substs)
+ method_resolution::lookup_impl_method(db, env, func, substs).0
}
fn resolve_impl_const_or_trait_def(
@@ -809,7 +828,7 @@ impl SourceAnalyzer {
|| Arc::new(hir_ty::TraitEnvironment::empty(krate)),
|d| db.trait_environment(d),
);
- method_resolution::lookup_impl_const(db, env, const_id, subs)
+ method_resolution::lookup_impl_const(db, env, const_id, subs).0
}
fn lang_trait_fn(
@@ -943,17 +962,17 @@ fn resolve_hir_path_(
res.map(|ty_ns| (ty_ns, path.segments().first()))
}
None => {
- let (ty, remaining) =
+ let (ty, remaining_idx) =
resolver.resolve_path_in_type_ns(db.upcast(), path.mod_path())?;
- match remaining {
- Some(remaining) if remaining > 1 => {
- if remaining + 1 == path.segments().len() {
+ match remaining_idx {
+ Some(remaining_idx) => {
+ if remaining_idx + 1 == path.segments().len() {
Some((ty, path.segments().last()))
} else {
None
}
}
- _ => Some((ty, path.segments().get(1))),
+ None => Some((ty, None)),
}
}
}?;
@@ -978,6 +997,7 @@ fn resolve_hir_path_(
TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()),
TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
+ TypeNs::TraitAliasId(it) => PathResolution::Def(TraitAlias::from(it).into()),
};
match unresolved {
Some(unresolved) => resolver
@@ -1001,8 +1021,8 @@ fn resolve_hir_path_(
let values = || {
resolver.resolve_path_in_value_ns_fully(db.upcast(), path.mod_path()).and_then(|val| {
let res = match val {
- ValueNs::LocalBinding(pat_id) => {
- let var = Local { parent: body_owner?, pat_id };
+ ValueNs::LocalBinding(binding_id) => {
+ let var = Local { parent: body_owner?, binding_id };
PathResolution::Local(var)
}
ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
@@ -1065,6 +1085,7 @@ fn resolve_hir_path_qualifier(
TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()),
TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
+ TypeNs::TraitAliasId(it) => PathResolution::Def(TraitAlias::from(it).into()),
})
.or_else(|| {
resolver
diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
index fd78decda..a9afa1c6f 100644
--- a/src/tools/rust-analyzer/crates/hir/src/symbols.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
@@ -68,6 +68,7 @@ pub enum FileSymbolKind {
Static,
Struct,
Trait,
+ TraitAlias,
TypeAlias,
Union,
}
@@ -153,6 +154,9 @@ impl<'a> SymbolCollector<'a> {
self.push_decl(id, FileSymbolKind::Trait);
self.collect_from_trait(id);
}
+ ModuleDefId::TraitAliasId(id) => {
+ self.push_decl(id, FileSymbolKind::TraitAlias);
+ }
ModuleDefId::TypeAliasId(id) => {
self.push_decl_assoc(id, FileSymbolKind::TypeAlias);
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_type.rs
index 0057f439f..785ae3d09 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_type.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_type.rs
@@ -211,10 +211,8 @@ fn main() {
check_assist_not_applicable(
add_explicit_type,
r#"
-//- minicore: option
-
fn main() {
- let $0l = [0.0; Some(2).unwrap()];
+ let $0l = [0.0; unresolved_function(5)];
}
"#,
);
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs
index f32ef2d59..9e1d9a702 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs
@@ -157,19 +157,12 @@ fn is_ref_and_impls_iter_method(
let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
let has_wanted_method = ty
- .iterate_method_candidates(
- sema.db,
- &scope,
- &scope.visible_traits().0,
- None,
- Some(&wanted_method),
- |func| {
- if func.ret_type(sema.db).impls_trait(sema.db, iter_trait, &[]) {
- return Some(());
- }
- None
- },
- )
+ .iterate_method_candidates(sema.db, &scope, None, Some(&wanted_method), |func| {
+ if func.ret_type(sema.db).impls_trait(sema.db, iter_trait, &[]) {
+ return Some(());
+ }
+ None
+ })
.is_some();
if !has_wanted_method {
return None;
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs
index 65c2479e9..7f2c01772 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs
@@ -1,6 +1,6 @@
use ide_db::defs::{Definition, NameRefClass};
use syntax::{
- ast::{self, HasName},
+ ast::{self, HasName, Name},
ted, AstNode, SyntaxNode,
};
@@ -48,7 +48,7 @@ pub(crate) fn convert_match_to_let_else(acc: &mut Assists, ctx: &AssistContext<'
other => format!("{{ {other} }}"),
};
let extracting_arm_pat = extracting_arm.pat()?;
- let extracted_variable = find_extracted_variable(ctx, &extracting_arm)?;
+ let extracted_variable_positions = find_extracted_variable(ctx, &extracting_arm)?;
acc.add(
AssistId("convert_match_to_let_else", AssistKind::RefactorRewrite),
@@ -56,7 +56,7 @@ pub(crate) fn convert_match_to_let_else(acc: &mut Assists, ctx: &AssistContext<'
let_stmt.syntax().text_range(),
|builder| {
let extracting_arm_pat =
- rename_variable(&extracting_arm_pat, extracted_variable, binding);
+ rename_variable(&extracting_arm_pat, &extracted_variable_positions, binding);
builder.replace(
let_stmt.syntax().text_range(),
format!("let {extracting_arm_pat} = {initializer_expr} else {diverging_arm_expr};"),
@@ -95,14 +95,15 @@ fn find_arms(
}
// Given an extracting arm, find the extracted variable.
-fn find_extracted_variable(ctx: &AssistContext<'_>, arm: &ast::MatchArm) -> Option<ast::Name> {
+fn find_extracted_variable(ctx: &AssistContext<'_>, arm: &ast::MatchArm) -> Option<Vec<Name>> {
match arm.expr()? {
ast::Expr::PathExpr(path) => {
let name_ref = path.syntax().descendants().find_map(ast::NameRef::cast)?;
match NameRefClass::classify(&ctx.sema, &name_ref)? {
NameRefClass::Definition(Definition::Local(local)) => {
- let source = local.source(ctx.db()).value.left()?;
- Some(source.name()?)
+ let source =
+ local.sources(ctx.db()).into_iter().map(|x| x.into_ident_pat()?.name());
+ source.collect()
}
_ => None,
}
@@ -115,27 +116,34 @@ fn find_extracted_variable(ctx: &AssistContext<'_>, arm: &ast::MatchArm) -> Opti
}
// Rename `extracted` with `binding` in `pat`.
-fn rename_variable(pat: &ast::Pat, extracted: ast::Name, binding: ast::Pat) -> SyntaxNode {
+fn rename_variable(pat: &ast::Pat, extracted: &[Name], binding: ast::Pat) -> SyntaxNode {
let syntax = pat.syntax().clone_for_update();
- let extracted_syntax = syntax.covering_element(extracted.syntax().text_range());
-
- // If `extracted` variable is a record field, we should rename it to `binding`,
- // otherwise we just need to replace `extracted` with `binding`.
-
- if let Some(record_pat_field) = extracted_syntax.ancestors().find_map(ast::RecordPatField::cast)
- {
- if let Some(name_ref) = record_pat_field.field_name() {
- ted::replace(
- record_pat_field.syntax(),
- ast::make::record_pat_field(ast::make::name_ref(&name_ref.text()), binding)
+ let extracted = extracted
+ .iter()
+ .map(|e| syntax.covering_element(e.syntax().text_range()))
+ .collect::<Vec<_>>();
+ for extracted_syntax in extracted {
+ // If `extracted` variable is a record field, we should rename it to `binding`,
+ // otherwise we just need to replace `extracted` with `binding`.
+
+ if let Some(record_pat_field) =
+ extracted_syntax.ancestors().find_map(ast::RecordPatField::cast)
+ {
+ if let Some(name_ref) = record_pat_field.field_name() {
+ ted::replace(
+ record_pat_field.syntax(),
+ ast::make::record_pat_field(
+ ast::make::name_ref(&name_ref.text()),
+ binding.clone(),
+ )
.syntax()
.clone_for_update(),
- );
+ );
+ }
+ } else {
+ ted::replace(extracted_syntax, binding.clone().syntax().clone_for_update());
}
- } else {
- ted::replace(extracted_syntax, binding.syntax().clone_for_update());
}
-
syntax
}
@@ -163,6 +171,39 @@ fn foo(opt: Option<()>) {
}
#[test]
+ fn or_pattern_multiple_binding() {
+ check_assist(
+ convert_match_to_let_else,
+ r#"
+//- minicore: option
+enum Foo {
+ A(u32),
+ B(u32),
+ C(String),
+}
+
+fn foo(opt: Option<Foo>) -> Result<u32, ()> {
+ let va$0lue = match opt {
+ Some(Foo::A(it) | Foo::B(it)) => it,
+ _ => return Err(()),
+ };
+}
+ "#,
+ r#"
+enum Foo {
+ A(u32),
+ B(u32),
+ C(String),
+}
+
+fn foo(opt: Option<Foo>) -> Result<u32, ()> {
+ let Some(Foo::A(value) | Foo::B(value)) = opt else { return Err(()) };
+}
+ "#,
+ );
+ }
+
+ #[test]
fn should_not_be_applicable_if_extracting_arm_is_not_an_identity_expr() {
cov_mark::check_count!(extracting_arm_is_not_an_identity_expr, 2);
check_assist_not_applicable(
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
index e04a1dabb..0b90c9ba3 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
@@ -3,7 +3,8 @@ use std::iter;
use ast::make;
use either::Either;
use hir::{
- HasSource, HirDisplay, InFile, Local, ModuleDef, PathResolution, Semantics, TypeInfo, TypeParam,
+ HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef, PathResolution, Semantics,
+ TypeInfo, TypeParam,
};
use ide_db::{
defs::{Definition, NameRefClass},
@@ -710,7 +711,7 @@ impl FunctionBody {
) => local_ref,
_ => return,
};
- let InFile { file_id, value } = local_ref.source(sema.db);
+ let InFile { file_id, value } = local_ref.primary_source(sema.db).source;
// locals defined inside macros are not relevant to us
if !file_id.is_macro() {
match value {
@@ -972,11 +973,11 @@ impl FunctionBody {
locals: impl Iterator<Item = Local>,
) -> Vec<Param> {
locals
- .map(|local| (local, local.source(ctx.db())))
+ .map(|local| (local, local.primary_source(ctx.db())))
.filter(|(_, src)| is_defined_outside_of_body(ctx, self, src))
- .filter_map(|(local, src)| match src.value {
- Either::Left(src) => Some((local, src)),
- Either::Right(_) => {
+ .filter_map(|(local, src)| match src.into_ident_pat() {
+ Some(src) => Some((local, src)),
+ None => {
stdx::never!(false, "Local::is_self returned false, but source is SelfParam");
None
}
@@ -1238,17 +1239,9 @@ fn local_outlives_body(
fn is_defined_outside_of_body(
ctx: &AssistContext<'_>,
body: &FunctionBody,
- src: &hir::InFile<Either<ast::IdentPat, ast::SelfParam>>,
+ src: &LocalSource,
) -> bool {
- src.file_id.original_file(ctx.db()) == ctx.file_id()
- && !body.contains_node(either_syntax(&src.value))
-}
-
-fn either_syntax(value: &Either<ast::IdentPat, ast::SelfParam>) -> &SyntaxNode {
- match value {
- Either::Left(pat) => pat.syntax(),
- Either::Right(it) => it.syntax(),
- }
+ src.original_file(ctx.db()) == ctx.file_id() && !body.contains_node(src.syntax())
}
/// find where to put extracted function definition
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs
index a738deffb..163561412 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs
@@ -287,7 +287,7 @@ fn foo() {
extract_variable,
r"
fn foo() {
- $0{ let x = 0; x }$0
+ $0{ let x = 0; x }$0;
something_else();
}",
r"
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs
index d9e00435e..4c61678ea 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs
@@ -192,6 +192,10 @@ fn target_data_for_def(
target_name = Some(t.name(db));
offset_target_and_file_id(db, t)?
}
+ hir::ModuleDef::TraitAlias(t) => {
+ target_name = Some(t.name(db));
+ offset_target_and_file_id(db, t)?
+ }
hir::ModuleDef::TypeAlias(t) => {
target_name = Some(t.name(db));
offset_target_and_file_id(db, t)?
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
index 45b27a63c..076838928 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
@@ -5,6 +5,7 @@ use ide_db::{
base_db::FileId,
defs::{Definition, NameRefClass},
famous_defs::FamousDefs,
+ helpers::is_editable_crate,
path_transform::PathTransform,
FxHashMap, FxHashSet, RootDatabase, SnippetCap,
};
@@ -65,6 +66,13 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let fn_name = &*name_ref.text();
let TargetInfo { target_module, adt_name, target, file, insert_offset } =
fn_target_info(ctx, path, &call, fn_name)?;
+
+ if let Some(m) = target_module {
+ if !is_editable_crate(m.krate(), ctx.db()) {
+ return None;
+ }
+ }
+
let function_builder = FunctionBuilder::from_call(ctx, &call, fn_name, target_module, target)?;
let text_range = call.syntax().text_range();
let label = format!("Generate {} function", function_builder.fn_name);
@@ -141,12 +149,11 @@ fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let receiver_ty = ctx.sema.type_of_expr(&call.receiver()?)?.original().strip_references();
let adt = receiver_ty.as_adt()?;
- let current_module = ctx.sema.scope(call.syntax())?.module();
let target_module = adt.module(ctx.sema.db);
-
- if current_module.krate() != target_module.krate() {
+ if !is_editable_crate(target_module.krate(), ctx.db()) {
return None;
}
+
let (impl_, file) = get_adt_source(ctx, &adt, fn_name.text().as_str())?;
let (target, insert_offset) = get_method_target(ctx, &impl_, &adt)?;
@@ -253,7 +260,7 @@ struct FunctionBuilder {
params: ast::ParamList,
ret_type: Option<ast::RetType>,
should_focus_return_type: bool,
- needs_pub: bool,
+ visibility: Visibility,
is_async: bool,
}
@@ -264,12 +271,14 @@ impl FunctionBuilder {
ctx: &AssistContext<'_>,
call: &ast::CallExpr,
fn_name: &str,
- target_module: Option<hir::Module>,
+ target_module: Option<Module>,
target: GeneratedFunctionTarget,
) -> Option<Self> {
- let needs_pub = target_module.is_some();
let target_module =
target_module.or_else(|| ctx.sema.scope(target.syntax()).map(|it| it.module()))?;
+
+ let current_module = ctx.sema.scope(call.syntax())?.module();
+ let visibility = calculate_necessary_visibility(current_module, target_module, ctx);
let fn_name = make::name(fn_name);
let mut necessary_generic_params = FxHashSet::default();
let params = fn_args(
@@ -300,7 +309,7 @@ impl FunctionBuilder {
params,
ret_type,
should_focus_return_type,
- needs_pub,
+ visibility,
is_async,
})
}
@@ -313,8 +322,9 @@ impl FunctionBuilder {
target_module: Module,
target: GeneratedFunctionTarget,
) -> Option<Self> {
- let needs_pub =
- !module_is_descendant(&ctx.sema.scope(call.syntax())?.module(), &target_module, ctx);
+ let current_module = ctx.sema.scope(call.syntax())?.module();
+ let visibility = calculate_necessary_visibility(current_module, target_module, ctx);
+
let fn_name = make::name(&name.text());
let mut necessary_generic_params = FxHashSet::default();
necessary_generic_params.extend(receiver_ty.generic_params(ctx.db()));
@@ -346,7 +356,7 @@ impl FunctionBuilder {
params,
ret_type,
should_focus_return_type,
- needs_pub,
+ visibility,
is_async,
})
}
@@ -354,7 +364,11 @@ impl FunctionBuilder {
fn render(self, is_method: bool) -> FunctionTemplate {
let placeholder_expr = make::ext::expr_todo();
let fn_body = make::block_expr(vec![], Some(placeholder_expr));
- let visibility = if self.needs_pub { Some(make::visibility_pub_crate()) } else { None };
+ let visibility = match self.visibility {
+ Visibility::None => None,
+ Visibility::Crate => Some(make::visibility_pub_crate()),
+ Visibility::Pub => Some(make::visibility_pub()),
+ };
let mut fn_def = make::fn_(
visibility,
self.fn_name,
@@ -527,7 +541,7 @@ impl GeneratedFunctionTarget {
/// Computes parameter list for the generated function.
fn fn_args(
ctx: &AssistContext<'_>,
- target_module: hir::Module,
+ target_module: Module,
call: ast::CallableExpr,
necessary_generic_params: &mut FxHashSet<hir::GenericParam>,
) -> Option<ast::ParamList> {
@@ -957,13 +971,13 @@ fn fn_arg_name(sema: &Semantics<'_, RootDatabase>, arg_expr: &ast::Expr) -> Stri
fn fn_arg_type(
ctx: &AssistContext<'_>,
- target_module: hir::Module,
+ target_module: Module,
fn_arg: &ast::Expr,
generic_params: &mut FxHashSet<hir::GenericParam>,
) -> String {
fn maybe_displayed_type(
ctx: &AssistContext<'_>,
- target_module: hir::Module,
+ target_module: Module,
fn_arg: &ast::Expr,
generic_params: &mut FxHashSet<hir::GenericParam>,
) -> Option<String> {
@@ -1013,7 +1027,7 @@ fn next_space_for_fn_after_call_site(expr: ast::CallableExpr) -> Option<Generate
}
fn next_space_for_fn_in_module(
- db: &dyn hir::db::AstDatabase,
+ db: &dyn hir::db::ExpandDatabase,
module_source: &hir::InFile<hir::ModuleSource>,
) -> Option<(FileId, GeneratedFunctionTarget)> {
let file = module_source.file_id.original_file(db);
@@ -1048,16 +1062,29 @@ fn next_space_for_fn_in_impl(impl_: &ast::Impl) -> Option<GeneratedFunctionTarge
}
}
-fn module_is_descendant(module: &hir::Module, ans: &hir::Module, ctx: &AssistContext<'_>) -> bool {
- if module == ans {
- return true;
- }
- for c in ans.children(ctx.sema.db) {
- if module_is_descendant(module, &c, ctx) {
- return true;
- }
+#[derive(Clone, Copy)]
+enum Visibility {
+ None,
+ Crate,
+ Pub,
+}
+
+fn calculate_necessary_visibility(
+ current_module: Module,
+ target_module: Module,
+ ctx: &AssistContext<'_>,
+) -> Visibility {
+ let db = ctx.db();
+ let current_module = current_module.nearest_non_block_module(db);
+ let target_module = target_module.nearest_non_block_module(db);
+
+ if target_module.krate() != current_module.krate() {
+ Visibility::Pub
+ } else if current_module.path_to_root(db).contains(&target_module) {
+ Visibility::None
+ } else {
+ Visibility::Crate
}
- false
}
// This is never intended to be used as a generic graph strucuture. If there's ever another need of
@@ -2656,4 +2683,79 @@ fn main() {
",
)
}
+
+ #[test]
+ fn applicable_in_different_local_crate() {
+ check_assist(
+ generate_function,
+ r"
+//- /lib.rs crate:lib new_source_root:local
+fn dummy() {}
+//- /main.rs crate:main deps:lib new_source_root:local
+fn main() {
+ lib::foo$0();
+}
+",
+ r"
+fn dummy() {}
+
+pub fn foo() ${0:-> _} {
+ todo!()
+}
+",
+ );
+ }
+
+ #[test]
+ fn applicable_in_different_local_crate_method() {
+ check_assist(
+ generate_function,
+ r"
+//- /lib.rs crate:lib new_source_root:local
+pub struct S;
+//- /main.rs crate:main deps:lib new_source_root:local
+fn main() {
+ lib::S.foo$0();
+}
+",
+ r"
+pub struct S;
+impl S {
+ pub fn foo(&self) ${0:-> _} {
+ todo!()
+ }
+}
+",
+ );
+ }
+
+ #[test]
+ fn not_applicable_in_different_library_crate() {
+ check_assist_not_applicable(
+ generate_function,
+ r"
+//- /lib.rs crate:lib new_source_root:library
+fn dummy() {}
+//- /main.rs crate:main deps:lib new_source_root:local
+fn main() {
+ lib::foo$0();
+}
+",
+ );
+ }
+
+ #[test]
+ fn not_applicable_in_different_library_crate_method() {
+ check_assist_not_applicable(
+ generate_function,
+ r"
+//- /lib.rs crate:lib new_source_root:library
+pub struct S;
+//- /main.rs crate:main deps:lib new_source_root:local
+fn main() {
+ lib::S.foo$0();
+}
+",
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs
index 9ce525ca3..442918619 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs
@@ -95,14 +95,7 @@ fn get_impl_method(
let scope = ctx.sema.scope(impl_.syntax())?;
let ty = impl_def.self_ty(db);
- ty.iterate_method_candidates(
- db,
- &scope,
- &scope.visible_traits().0,
- None,
- Some(fn_name),
- |func| Some(func),
- )
+ ty.iterate_method_candidates(db, &scope, None, Some(fn_name), |func| Some(func))
}
#[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs
index 8d311262a..e30a3e942 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs
@@ -1,5 +1,5 @@
use ide_db::{
- imports::import_assets::item_for_path_search, use_trivial_contructor::use_trivial_constructor,
+ imports::import_assets::item_for_path_search, use_trivial_constructor::use_trivial_constructor,
};
use itertools::Itertools;
use stdx::format_to;
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
index 5ac18727c..28d815e81 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
@@ -363,10 +363,10 @@ fn inline(
.collect();
if function.self_param(sema.db).is_some() {
- let this = || make::name_ref("this").syntax().clone_for_update();
+ let this = || make::name_ref("this").syntax().clone_for_update().first_token().unwrap();
if let Some(self_local) = params[0].2.as_local(sema.db) {
usages_for_locals(self_local)
- .flat_map(|FileReference { name, range, .. }| match name {
+ .filter_map(|FileReference { name, range, .. }| match name {
ast::NameLike::NameRef(_) => Some(body.syntax().covering_element(range)),
_ => None,
})
@@ -691,6 +691,42 @@ fn main() {
}
#[test]
+ fn generic_method_by_ref() {
+ check_assist(
+ inline_call,
+ r#"
+struct Foo(u32);
+
+impl Foo {
+ fn add<T>(&self, a: u32) -> Self {
+ Foo(self.0 + a)
+ }
+}
+
+fn main() {
+ let x = Foo(3).add$0::<usize>(2);
+}
+"#,
+ r#"
+struct Foo(u32);
+
+impl Foo {
+ fn add<T>(&self, a: u32) -> Self {
+ Foo(self.0 + a)
+ }
+}
+
+fn main() {
+ let x = {
+ let ref this = Foo(3);
+ Foo(this.0 + 2)
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
fn method_by_ref_mut() {
check_assist(
inline_call,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs
index ce44100e3..e69d1a296 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs
@@ -1,4 +1,3 @@
-use either::Either;
use hir::{PathResolution, Semantics};
use ide_db::{
base_db::FileId,
@@ -205,12 +204,14 @@ fn inline_usage(
return None;
}
- // FIXME: Handle multiple local definitions
- let bind_pat = match local.source(sema.db).value {
- Either::Left(ident) => ident,
- _ => return None,
+ let sources = local.sources(sema.db);
+ let [source] = sources.as_slice() else {
+ // Not applicable with locals with multiple definitions (i.e. or patterns)
+ return None;
};
+ let bind_pat = source.as_ident_pat()?;
+
let let_stmt = ast::LetStmt::cast(bind_pat.syntax().parent()?)?;
let UsageSearchResult { mut references } = Definition::Local(local).usages(sema).all();
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs
index 52dd670ec..a403d5bc6 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs
@@ -1,7 +1,7 @@
use itertools::Itertools;
use syntax::{
- ast::{self, AstNode, AstToken},
- match_ast, NodeOrToken, SyntaxElement, TextRange, TextSize, T,
+ ast::{self, make, AstNode, AstToken},
+ match_ast, ted, NodeOrToken, SyntaxElement, TextRange, TextSize, T,
};
use crate::{AssistContext, AssistId, AssistKind, Assists};
@@ -12,24 +12,28 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
//
// ```
// fn main() {
-// $0dbg!(92);
+// let x = $0dbg!(42 * dbg!(4 + 2));$0
// }
// ```
// ->
// ```
// fn main() {
-// 92;
+// let x = 42 * (4 + 2);
// }
// ```
pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let macro_calls = if ctx.has_empty_selection() {
- vec![ctx.find_node_at_offset::<ast::MacroCall>()?]
+ vec![ctx.find_node_at_offset::<ast::MacroExpr>()?]
} else {
ctx.covering_element()
.as_node()?
.descendants()
.filter(|node| ctx.selection_trimmed().contains_range(node.text_range()))
+ // When the selection exactly covers the macro call to be removed, `covering_element()`
+ // returns `ast::MacroCall` instead of its parent `ast::MacroExpr` that we want. So
+ // first try finding `ast::MacroCall`s and then retrieve their parent.
.filter_map(ast::MacroCall::cast)
+ .filter_map(|it| it.syntax().parent().and_then(ast::MacroExpr::cast))
.collect()
};
@@ -42,16 +46,27 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
acc.add(
AssistId("remove_dbg", AssistKind::Refactor),
"Remove dbg!()",
- ctx.selection_trimmed(),
+ replacements.iter().map(|&(range, _)| range).reduce(|acc, range| acc.cover(range)).unwrap(),
|builder| {
- for (range, text) in replacements {
- builder.replace(range, text);
+ for (range, expr) in replacements {
+ if let Some(expr) = expr {
+ builder.replace(range, expr.to_string());
+ } else {
+ builder.delete(range);
+ }
}
},
)
}
-fn compute_dbg_replacement(macro_call: ast::MacroCall) -> Option<(TextRange, String)> {
+/// Returns `None` when either
+/// - macro call is not `dbg!()`
+/// - any node inside `dbg!()` could not be parsed as an expression
+/// - (`macro_expr` has no parent - is that possible?)
+///
+/// Returns `Some(_, None)` when the macro call should just be removed.
+fn compute_dbg_replacement(macro_expr: ast::MacroExpr) -> Option<(TextRange, Option<ast::Expr>)> {
+ let macro_call = macro_expr.macro_call()?;
let tt = macro_call.token_tree()?;
let r_delim = NodeOrToken::Token(tt.right_delimiter_token()?);
if macro_call.path()?.segment()?.name_ref()?.text() != "dbg"
@@ -68,20 +83,19 @@ fn compute_dbg_replacement(macro_call: ast::MacroCall) -> Option<(TextRange, Str
.map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
.collect::<Option<Vec<ast::Expr>>>()?;
- let macro_expr = ast::MacroExpr::cast(macro_call.syntax().parent()?)?;
let parent = macro_expr.syntax().parent()?;
Some(match &*input_expressions {
// dbg!()
[] => {
match_ast! {
match parent {
- ast::StmtList(__) => {
+ ast::StmtList(_) => {
let range = macro_expr.syntax().text_range();
let range = match whitespace_start(macro_expr.syntax().prev_sibling_or_token()) {
Some(start) => range.cover_offset(start),
None => range,
};
- (range, String::new())
+ (range, None)
},
ast::ExprStmt(it) => {
let range = it.syntax().text_range();
@@ -89,19 +103,23 @@ fn compute_dbg_replacement(macro_call: ast::MacroCall) -> Option<(TextRange, Str
Some(start) => range.cover_offset(start),
None => range,
};
- (range, String::new())
+ (range, None)
},
- _ => (macro_call.syntax().text_range(), "()".to_owned())
+ _ => (macro_call.syntax().text_range(), Some(make::expr_unit())),
}
}
}
// dbg!(expr0)
[expr] => {
+ // dbg!(expr, &parent);
let wrap = match ast::Expr::cast(parent) {
Some(parent) => match (expr, parent) {
(ast::Expr::CastExpr(_), ast::Expr::CastExpr(_)) => false,
(
- ast::Expr::BoxExpr(_) | ast::Expr::PrefixExpr(_) | ast::Expr::RefExpr(_),
+ ast::Expr::BoxExpr(_)
+ | ast::Expr::PrefixExpr(_)
+ | ast::Expr::RefExpr(_)
+ | ast::Expr::MacroExpr(_),
ast::Expr::AwaitExpr(_)
| ast::Expr::CallExpr(_)
| ast::Expr::CastExpr(_)
@@ -112,7 +130,10 @@ fn compute_dbg_replacement(macro_call: ast::MacroCall) -> Option<(TextRange, Str
| ast::Expr::TryExpr(_),
) => true,
(
- ast::Expr::BinExpr(_) | ast::Expr::CastExpr(_) | ast::Expr::RangeExpr(_),
+ ast::Expr::BinExpr(_)
+ | ast::Expr::CastExpr(_)
+ | ast::Expr::RangeExpr(_)
+ | ast::Expr::MacroExpr(_),
ast::Expr::AwaitExpr(_)
| ast::Expr::BinExpr(_)
| ast::Expr::CallExpr(_)
@@ -129,16 +150,61 @@ fn compute_dbg_replacement(macro_call: ast::MacroCall) -> Option<(TextRange, Str
},
None => false,
};
- (
- macro_call.syntax().text_range(),
- if wrap { format!("({expr})") } else { expr.to_string() },
- )
+ let expr = replace_nested_dbgs(expr.clone());
+ let expr = if wrap { make::expr_paren(expr) } else { expr.clone_subtree() };
+ (macro_call.syntax().text_range(), Some(expr))
}
// dbg!(expr0, expr1, ...)
- exprs => (macro_call.syntax().text_range(), format!("({})", exprs.iter().format(", "))),
+ exprs => {
+ let exprs = exprs.iter().cloned().map(replace_nested_dbgs);
+ let expr = make::expr_tuple(exprs);
+ (macro_call.syntax().text_range(), Some(expr))
+ }
})
}
+fn replace_nested_dbgs(expanded: ast::Expr) -> ast::Expr {
+ if let ast::Expr::MacroExpr(mac) = &expanded {
+ // Special-case when `expanded` itself is `dbg!()` since we cannot replace the whole tree
+ // with `ted`. It should be fairly rare as it means the user wrote `dbg!(dbg!(..))` but you
+ // never know how code ends up being!
+ let replaced = if let Some((_, expr_opt)) = compute_dbg_replacement(mac.clone()) {
+ match expr_opt {
+ Some(expr) => expr,
+ None => {
+ stdx::never!("dbg! inside dbg! should not be just removed");
+ expanded
+ }
+ }
+ } else {
+ expanded
+ };
+
+ return replaced;
+ }
+
+ let expanded = expanded.clone_for_update();
+
+ // We need to collect to avoid mutation during traversal.
+ let macro_exprs: Vec<_> =
+ expanded.syntax().descendants().filter_map(ast::MacroExpr::cast).collect();
+
+ for mac in macro_exprs {
+ let expr_opt = match compute_dbg_replacement(mac.clone()) {
+ Some((_, expr)) => expr,
+ None => continue,
+ };
+
+ if let Some(expr) = expr_opt {
+ ted::replace(mac.syntax(), expr.syntax().clone_for_update());
+ } else {
+ ted::remove(mac.syntax());
+ }
+ }
+
+ expanded
+}
+
fn whitespace_start(it: Option<SyntaxElement>) -> Option<TextSize> {
Some(it?.into_token().and_then(ast::Whitespace::cast)?.syntax().text_range().start())
}
@@ -287,4 +353,32 @@ fn f() {
check_assist_not_applicable(remove_dbg, r#"$0dbg$0!(0)"#);
check_assist_not_applicable(remove_dbg, r#"$0dbg!(0$0)"#);
}
+
+ #[test]
+ fn test_nested_dbg() {
+ check(
+ r#"$0let x = dbg!(dbg!(dbg!(dbg!(0 + 1)) * 2) + dbg!(3));$0"#,
+ r#"let x = ((0 + 1) * 2) + 3;"#,
+ );
+ check(r#"$0dbg!(10, dbg!(), dbg!(20, 30))$0"#, r#"(10, (), (20, 30))"#);
+ }
+
+ #[test]
+ fn test_multiple_nested_dbg() {
+ check(
+ r#"
+fn f() {
+ $0dbg!();
+ let x = dbg!(dbg!(dbg!(0 + 1)) + 2) + dbg!(3);
+ dbg!(10, dbg!(), dbg!(20, 30));$0
+}
+"#,
+ r#"
+fn f() {
+ let x = ((0 + 1) + 2) + 3;
+ (10, (), (20, 30));
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
index 457559656..5e31d38fb 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
@@ -102,9 +102,11 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<'
return None;
}
+ let let_ = if pat_seen { " let" } else { "" };
+
acc.add(
AssistId("replace_if_let_with_match", AssistKind::RefactorRewrite),
- "Replace if let with match",
+ format!("Replace if{let_} with match"),
available_range,
move |edit| {
let match_expr = {
@@ -210,8 +212,17 @@ fn make_else_arm(
// ```
pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let match_expr: ast::MatchExpr = ctx.find_node_at_offset()?;
+ let match_arm_list = match_expr.match_arm_list()?;
+ let available_range = TextRange::new(
+ match_expr.syntax().text_range().start(),
+ match_arm_list.syntax().text_range().start(),
+ );
+ let cursor_in_range = available_range.contains_range(ctx.selection_trimmed());
+ if !cursor_in_range {
+ return None;
+ }
- let mut arms = match_expr.match_arm_list()?.arms();
+ let mut arms = match_arm_list.arms();
let (first_arm, second_arm) = (arms.next()?, arms.next()?);
if arms.next().is_some() || first_arm.guard().is_some() || second_arm.guard().is_some() {
return None;
@@ -226,10 +237,20 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<'
)?;
let scrutinee = match_expr.expr()?;
+ let let_ = match &if_let_pat {
+ ast::Pat::LiteralPat(p)
+ if p.literal()
+ .map(|it| it.token().kind())
+ .map_or(false, |it| it == T![true] || it == T![false]) =>
+ {
+ ""
+ }
+ _ => " let",
+ };
let target = match_expr.syntax().text_range();
acc.add(
AssistId("replace_match_with_if_let", AssistKind::RefactorRewrite),
- "Replace match with if let",
+ format!("Replace match with if{let_}"),
target,
move |edit| {
fn make_block_expr(expr: ast::Expr) -> ast::BlockExpr {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs
new file mode 100644
index 000000000..a7e3ed793
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs
@@ -0,0 +1,310 @@
+use ide_db::assists::{AssistId, AssistKind};
+use syntax::{
+ ast::{self, make, Expr, HasArgList},
+ AstNode,
+};
+
+use crate::{AssistContext, Assists};
+
+// Assist: replace_with_lazy_method
+//
+// Replace `unwrap_or` with `unwrap_or_else` and `ok_or` with `ok_or_else`.
+//
+// ```
+// # //- minicore:option, fn
+// fn foo() {
+// let a = Some(1);
+// a.unwra$0p_or(2);
+// }
+// ```
+// ->
+// ```
+// fn foo() {
+// let a = Some(1);
+// a.unwrap_or_else(|| 2);
+// }
+// ```
+pub(crate) fn replace_with_lazy_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let call: ast::MethodCallExpr = ctx.find_node_at_offset()?;
+ let scope = ctx.sema.scope(call.syntax())?;
+
+ let last_arg = call.arg_list()?.args().next()?;
+ let method_name = call.name_ref()?;
+
+ let callable = ctx.sema.resolve_method_call_as_callable(&call)?;
+ let (_, receiver_ty) = callable.receiver_param(ctx.sema.db)?;
+ let n_params = callable.n_params() + 1;
+
+ let method_name_lazy = format!(
+ "{method_name}{}",
+ if method_name.text().ends_with("or") { "_else" } else { "_with" }
+ );
+
+ receiver_ty.iterate_method_candidates_with_traits(
+ ctx.sema.db,
+ &scope,
+ &scope.visible_traits().0,
+ None,
+ None,
+ |func| {
+ let valid = func.name(ctx.sema.db).as_str() == Some(&*method_name_lazy)
+ && func.num_params(ctx.sema.db) == n_params
+ && {
+ let params = func.params_without_self(ctx.sema.db);
+ let last_p = params.first()?;
+ // FIXME: Check that this has the form of `() -> T` where T is the current type of the argument
+ last_p.ty().impls_fnonce(ctx.sema.db)
+ };
+ valid.then_some(func)
+ },
+ )?;
+
+ acc.add(
+ AssistId("replace_with_lazy_method", AssistKind::RefactorRewrite),
+ format!("Replace {method_name} with {method_name_lazy}"),
+ call.syntax().text_range(),
+ |builder| {
+ builder.replace(method_name.syntax().text_range(), method_name_lazy);
+ let closured = into_closure(&last_arg);
+ builder.replace_ast(last_arg, closured);
+ },
+ )
+}
+
+fn into_closure(param: &Expr) -> Expr {
+ (|| {
+ if let ast::Expr::CallExpr(call) = param {
+ if call.arg_list()?.args().count() == 0 {
+ Some(call.expr()?)
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+ })()
+ .unwrap_or_else(|| make::expr_closure(None, param.clone()))
+}
+
+// Assist: replace_with_eager_method
+//
+// Replace `unwrap_or_else` with `unwrap_or` and `ok_or_else` with `ok_or`.
+//
+// ```
+// # //- minicore:option, fn
+// fn foo() {
+// let a = Some(1);
+// a.unwra$0p_or_else(|| 2);
+// }
+// ```
+// ->
+// ```
+// fn foo() {
+// let a = Some(1);
+// a.unwrap_or(2);
+// }
+// ```
+pub(crate) fn replace_with_eager_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let call: ast::MethodCallExpr = ctx.find_node_at_offset()?;
+ let scope = ctx.sema.scope(call.syntax())?;
+
+ let last_arg = call.arg_list()?.args().next()?;
+ let method_name = call.name_ref()?;
+
+ let callable = ctx.sema.resolve_method_call_as_callable(&call)?;
+ let (_, receiver_ty) = callable.receiver_param(ctx.sema.db)?;
+ let n_params = callable.n_params() + 1;
+ let params = callable.params(ctx.sema.db);
+
+ // FIXME: Check that the arg is of the form `() -> T`
+ if !params.first()?.1.impls_fnonce(ctx.sema.db) {
+ return None;
+ }
+
+ let method_name_text = method_name.text();
+ let method_name_eager = method_name_text
+ .strip_suffix("_else")
+ .or_else(|| method_name_text.strip_suffix("_with"))?;
+
+ receiver_ty.iterate_method_candidates_with_traits(
+ ctx.sema.db,
+ &scope,
+ &scope.visible_traits().0,
+ None,
+ None,
+ |func| {
+ let valid = func.name(ctx.sema.db).as_str() == Some(&*method_name_eager)
+ && func.num_params(ctx.sema.db) == n_params;
+ valid.then_some(func)
+ },
+ )?;
+
+ acc.add(
+ AssistId("replace_with_eager_method", AssistKind::RefactorRewrite),
+ format!("Replace {method_name} with {method_name_eager}"),
+ call.syntax().text_range(),
+ |builder| {
+ builder.replace(method_name.syntax().text_range(), method_name_eager);
+ let called = into_call(&last_arg);
+ builder.replace_ast(last_arg, called);
+ },
+ )
+}
+
+fn into_call(param: &Expr) -> Expr {
+ (|| {
+ if let ast::Expr::ClosureExpr(closure) = param {
+ if closure.param_list()?.params().count() == 0 {
+ Some(closure.body()?)
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+ })()
+ .unwrap_or_else(|| make::expr_call(param.clone(), make::arg_list(Vec::new())))
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_assist;
+
+ use super::*;
+
+ #[test]
+ fn replace_or_with_or_else_simple() {
+ check_assist(
+ replace_with_lazy_method,
+ r#"
+//- minicore: option, fn
+fn foo() {
+ let foo = Some(1);
+ return foo.unwrap_$0or(2);
+}
+"#,
+ r#"
+fn foo() {
+ let foo = Some(1);
+ return foo.unwrap_or_else(|| 2);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_or_with_or_else_call() {
+ check_assist(
+ replace_with_lazy_method,
+ r#"
+//- minicore: option, fn
+fn foo() {
+ let foo = Some(1);
+ return foo.unwrap_$0or(x());
+}
+"#,
+ r#"
+fn foo() {
+ let foo = Some(1);
+ return foo.unwrap_or_else(x);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_or_with_or_else_block() {
+ check_assist(
+ replace_with_lazy_method,
+ r#"
+//- minicore: option, fn
+fn foo() {
+ let foo = Some(1);
+ return foo.unwrap_$0or({
+ let mut x = bar();
+ for i in 0..10 {
+ x += i;
+ }
+ x
+ });
+}
+"#,
+ r#"
+fn foo() {
+ let foo = Some(1);
+ return foo.unwrap_or_else(|| {
+ let mut x = bar();
+ for i in 0..10 {
+ x += i;
+ }
+ x
+ });
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_or_else_with_or_simple() {
+ check_assist(
+ replace_with_eager_method,
+ r#"
+//- minicore: option, fn
+fn foo() {
+ let foo = Some(1);
+ return foo.unwrap_$0or_else(|| 2);
+}
+"#,
+ r#"
+fn foo() {
+ let foo = Some(1);
+ return foo.unwrap_or(2);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_or_else_with_or_call() {
+ check_assist(
+ replace_with_eager_method,
+ r#"
+//- minicore: option, fn
+fn foo() {
+ let foo = Some(1);
+ return foo.unwrap_$0or_else(x);
+}
+
+fn x() -> i32 { 0 }
+"#,
+ r#"
+fn foo() {
+ let foo = Some(1);
+ return foo.unwrap_or(x());
+}
+
+fn x() -> i32 { 0 }
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_or_else_with_or_map() {
+ check_assist(
+ replace_with_eager_method,
+ r#"
+//- minicore: option, fn
+fn foo() {
+ let foo = Some("foo");
+ return foo.map$0_or_else(|| 42, |v| v.len());
+}
+"#,
+ r#"
+fn foo() {
+ let foo = Some("foo");
+ return foo.map_or(42, |v| v.len());
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_or_with_or_else.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_or_with_or_else.rs
deleted file mode 100644
index f0ed3c4fe..000000000
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_or_with_or_else.rs
+++ /dev/null
@@ -1,364 +0,0 @@
-use ide_db::{
- assists::{AssistId, AssistKind},
- famous_defs::FamousDefs,
-};
-use syntax::{
- ast::{self, make, Expr, HasArgList},
- AstNode,
-};
-
-use crate::{AssistContext, Assists};
-
-// Assist: replace_or_with_or_else
-//
-// Replace `unwrap_or` with `unwrap_or_else` and `ok_or` with `ok_or_else`.
-//
-// ```
-// # //- minicore:option
-// fn foo() {
-// let a = Some(1);
-// a.unwra$0p_or(2);
-// }
-// ```
-// ->
-// ```
-// fn foo() {
-// let a = Some(1);
-// a.unwrap_or_else(|| 2);
-// }
-// ```
-pub(crate) fn replace_or_with_or_else(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
- let call: ast::MethodCallExpr = ctx.find_node_at_offset()?;
-
- let kind = is_option_or_result(call.receiver()?, ctx)?;
-
- let (name, arg_list) = (call.name_ref()?, call.arg_list()?);
-
- let mut map_or = false;
-
- let replace = match &*name.text() {
- "unwrap_or" => "unwrap_or_else".to_string(),
- "or" => "or_else".to_string(),
- "ok_or" if kind == Kind::Option => "ok_or_else".to_string(),
- "map_or" => {
- map_or = true;
- "map_or_else".to_string()
- }
- _ => return None,
- };
-
- let arg = match arg_list.args().collect::<Vec<_>>().as_slice() {
- [] => make::arg_list(Vec::new()),
- [first] => {
- let param = into_closure(first);
- make::arg_list(vec![param])
- }
- [first, second] if map_or => {
- let param = into_closure(first);
- make::arg_list(vec![param, second.clone()])
- }
- _ => return None,
- };
-
- acc.add(
- AssistId("replace_or_with_or_else", AssistKind::RefactorRewrite),
- format!("Replace {name} with {replace}"),
- call.syntax().text_range(),
- |builder| {
- builder.replace(name.syntax().text_range(), replace);
- builder.replace_ast(arg_list, arg)
- },
- )
-}
-
-fn into_closure(param: &Expr) -> Expr {
- (|| {
- if let ast::Expr::CallExpr(call) = param {
- if call.arg_list()?.args().count() == 0 {
- Some(call.expr()?)
- } else {
- None
- }
- } else {
- None
- }
- })()
- .unwrap_or_else(|| make::expr_closure(None, param.clone()))
-}
-
-// Assist: replace_or_else_with_or
-//
-// Replace `unwrap_or_else` with `unwrap_or` and `ok_or_else` with `ok_or`.
-//
-// ```
-// # //- minicore:option
-// fn foo() {
-// let a = Some(1);
-// a.unwra$0p_or_else(|| 2);
-// }
-// ```
-// ->
-// ```
-// fn foo() {
-// let a = Some(1);
-// a.unwrap_or(2);
-// }
-// ```
-pub(crate) fn replace_or_else_with_or(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
- let call: ast::MethodCallExpr = ctx.find_node_at_offset()?;
-
- let kind = is_option_or_result(call.receiver()?, ctx)?;
-
- let (name, arg_list) = (call.name_ref()?, call.arg_list()?);
-
- let mut map_or = false;
- let replace = match &*name.text() {
- "unwrap_or_else" => "unwrap_or".to_string(),
- "or_else" => "or".to_string(),
- "ok_or_else" if kind == Kind::Option => "ok_or".to_string(),
- "map_or_else" => {
- map_or = true;
- "map_or".to_string()
- }
- _ => return None,
- };
-
- let arg = match arg_list.args().collect::<Vec<_>>().as_slice() {
- [] => make::arg_list(Vec::new()),
- [first] => {
- let param = into_call(first);
- make::arg_list(vec![param])
- }
- [first, second] if map_or => {
- let param = into_call(first);
- make::arg_list(vec![param, second.clone()])
- }
- _ => return None,
- };
-
- acc.add(
- AssistId("replace_or_else_with_or", AssistKind::RefactorRewrite),
- format!("Replace {name} with {replace}"),
- call.syntax().text_range(),
- |builder| {
- builder.replace(name.syntax().text_range(), replace);
- builder.replace_ast(arg_list, arg)
- },
- )
-}
-
-fn into_call(param: &Expr) -> Expr {
- (|| {
- if let ast::Expr::ClosureExpr(closure) = param {
- if closure.param_list()?.params().count() == 0 {
- Some(closure.body()?)
- } else {
- None
- }
- } else {
- None
- }
- })()
- .unwrap_or_else(|| make::expr_call(param.clone(), make::arg_list(Vec::new())))
-}
-
-#[derive(PartialEq, Eq)]
-enum Kind {
- Option,
- Result,
-}
-
-fn is_option_or_result(receiver: Expr, ctx: &AssistContext<'_>) -> Option<Kind> {
- let ty = ctx.sema.type_of_expr(&receiver)?.adjusted().as_adt()?.as_enum()?;
- let option_enum =
- FamousDefs(&ctx.sema, ctx.sema.scope(receiver.syntax())?.krate()).core_option_Option();
-
- if let Some(option_enum) = option_enum {
- if ty == option_enum {
- return Some(Kind::Option);
- }
- }
-
- let result_enum =
- FamousDefs(&ctx.sema, ctx.sema.scope(receiver.syntax())?.krate()).core_result_Result();
-
- if let Some(result_enum) = result_enum {
- if ty == result_enum {
- return Some(Kind::Result);
- }
- }
-
- None
-}
-
-#[cfg(test)]
-mod tests {
- use crate::tests::{check_assist, check_assist_not_applicable};
-
- use super::*;
-
- #[test]
- fn replace_or_with_or_else_simple() {
- check_assist(
- replace_or_with_or_else,
- r#"
-//- minicore: option
-fn foo() {
- let foo = Some(1);
- return foo.unwrap_$0or(2);
-}
-"#,
- r#"
-fn foo() {
- let foo = Some(1);
- return foo.unwrap_or_else(|| 2);
-}
-"#,
- )
- }
-
- #[test]
- fn replace_or_with_or_else_call() {
- check_assist(
- replace_or_with_or_else,
- r#"
-//- minicore: option
-fn foo() {
- let foo = Some(1);
- return foo.unwrap_$0or(x());
-}
-"#,
- r#"
-fn foo() {
- let foo = Some(1);
- return foo.unwrap_or_else(x);
-}
-"#,
- )
- }
-
- #[test]
- fn replace_or_with_or_else_block() {
- check_assist(
- replace_or_with_or_else,
- r#"
-//- minicore: option
-fn foo() {
- let foo = Some(1);
- return foo.unwrap_$0or({
- let mut x = bar();
- for i in 0..10 {
- x += i;
- }
- x
- });
-}
-"#,
- r#"
-fn foo() {
- let foo = Some(1);
- return foo.unwrap_or_else(|| {
- let mut x = bar();
- for i in 0..10 {
- x += i;
- }
- x
- });
-}
-"#,
- )
- }
-
- #[test]
- fn replace_or_else_with_or_simple() {
- check_assist(
- replace_or_else_with_or,
- r#"
-//- minicore: option
-fn foo() {
- let foo = Some(1);
- return foo.unwrap_$0or_else(|| 2);
-}
-"#,
- r#"
-fn foo() {
- let foo = Some(1);
- return foo.unwrap_or(2);
-}
-"#,
- )
- }
-
- #[test]
- fn replace_or_else_with_or_call() {
- check_assist(
- replace_or_else_with_or,
- r#"
-//- minicore: option
-fn foo() {
- let foo = Some(1);
- return foo.unwrap_$0or_else(x);
-}
-"#,
- r#"
-fn foo() {
- let foo = Some(1);
- return foo.unwrap_or(x());
-}
-"#,
- )
- }
-
- #[test]
- fn replace_or_else_with_or_result() {
- check_assist(
- replace_or_else_with_or,
- r#"
-//- minicore: result
-fn foo() {
- let foo = Ok(1);
- return foo.unwrap_$0or_else(x);
-}
-"#,
- r#"
-fn foo() {
- let foo = Ok(1);
- return foo.unwrap_or(x());
-}
-"#,
- )
- }
-
- #[test]
- fn replace_or_else_with_or_map() {
- check_assist(
- replace_or_else_with_or,
- r#"
-//- minicore: result
-fn foo() {
- let foo = Ok("foo");
- return foo.map$0_or_else(|| 42, |v| v.len());
-}
-"#,
- r#"
-fn foo() {
- let foo = Ok("foo");
- return foo.map_or(42, |v| v.len());
-}
-"#,
- )
- }
-
- #[test]
- fn replace_or_else_with_or_not_applicable() {
- check_assist_not_applicable(
- replace_or_else_with_or,
- r#"
-fn foo() {
- let foo = Ok(1);
- return foo.unwrap_$0or_else(x);
-}
-"#,
- )
- }
-}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
index 276cf5f5d..8b07e29a5 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
@@ -188,7 +188,7 @@ mod handlers {
mod replace_try_expr_with_match;
mod replace_derive_with_manual_impl;
mod replace_if_let_with_match;
- mod replace_or_with_or_else;
+ mod replace_method_eager_lazy;
mod replace_arith_op;
mod introduce_named_generic;
mod replace_let_with_if_let;
@@ -265,7 +265,6 @@ mod handlers {
inline_local_variable::inline_local_variable,
inline_type_alias::inline_type_alias,
inline_type_alias::inline_type_alias_uses,
- inline_macro::inline_macro,
introduce_named_generic::introduce_named_generic,
introduce_named_lifetime::introduce_named_lifetime,
invert_if::invert_if,
@@ -286,7 +285,6 @@ mod handlers {
raw_string::add_hash,
raw_string::make_usual_string,
raw_string::remove_hash,
- remove_dbg::remove_dbg,
remove_mut::remove_mut,
remove_unused_param::remove_unused_param,
remove_parentheses::remove_parentheses,
@@ -297,8 +295,8 @@ mod handlers {
replace_if_let_with_match::replace_if_let_with_match,
replace_if_let_with_match::replace_match_with_if_let,
replace_let_with_if_let::replace_let_with_if_let,
- replace_or_with_or_else::replace_or_else_with_or,
- replace_or_with_or_else::replace_or_with_or_else,
+ replace_method_eager_lazy::replace_with_eager_method,
+ replace_method_eager_lazy::replace_with_lazy_method,
replace_turbofish_with_explicit_type::replace_turbofish_with_explicit_type,
replace_qualified_name_with_use::replace_qualified_name_with_use,
replace_arith_op::replace_arith_with_wrapping,
@@ -335,6 +333,9 @@ mod handlers {
generate_setter::generate_setter,
generate_delegate_methods::generate_delegate_methods,
generate_deref::generate_deref,
+ //
+ remove_dbg::remove_dbg,
+ inline_macro::inline_macro,
// Are you sure you want to add new assist here, and not to the
// sorted list above?
]
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
index 8a25e1f64..e5a8d675a 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
@@ -2006,12 +2006,12 @@ fn doctest_remove_dbg() {
"remove_dbg",
r#####"
fn main() {
- $0dbg!(92);
+ let x = $0dbg!(42 * dbg!(4 + 2));$0
}
"#####,
r#####"
fn main() {
- 92;
+ let x = 42 * (4 + 2);
}
"#####,
)
@@ -2314,46 +2314,6 @@ fn handle(action: Action) {
}
#[test]
-fn doctest_replace_or_else_with_or() {
- check_doc_test(
- "replace_or_else_with_or",
- r#####"
-//- minicore:option
-fn foo() {
- let a = Some(1);
- a.unwra$0p_or_else(|| 2);
-}
-"#####,
- r#####"
-fn foo() {
- let a = Some(1);
- a.unwrap_or(2);
-}
-"#####,
- )
-}
-
-#[test]
-fn doctest_replace_or_with_or_else() {
- check_doc_test(
- "replace_or_with_or_else",
- r#####"
-//- minicore:option
-fn foo() {
- let a = Some(1);
- a.unwra$0p_or(2);
-}
-"#####,
- r#####"
-fn foo() {
- let a = Some(1);
- a.unwrap_or_else(|| 2);
-}
-"#####,
- )
-}
-
-#[test]
fn doctest_replace_qualified_name_with_use() {
check_doc_test(
"replace_qualified_name_with_use",
@@ -2428,6 +2388,46 @@ fn main() {
}
#[test]
+fn doctest_replace_with_eager_method() {
+ check_doc_test(
+ "replace_with_eager_method",
+ r#####"
+//- minicore:option, fn
+fn foo() {
+ let a = Some(1);
+ a.unwra$0p_or_else(|| 2);
+}
+"#####,
+ r#####"
+fn foo() {
+ let a = Some(1);
+ a.unwrap_or(2);
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_with_lazy_method() {
+ check_doc_test(
+ "replace_with_lazy_method",
+ r#####"
+//- minicore:option, fn
+fn foo() {
+ let a = Some(1);
+ a.unwra$0p_or(2);
+}
+"#####,
+ r#####"
+fn foo() {
+ let a = Some(1);
+ a.unwrap_or_else(|| 2);
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_sort_items() {
check_doc_test(
"sort_items",
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs
index eb87d6c58..c3136f6df 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs
@@ -23,7 +23,7 @@ pub(crate) mod env_vars;
use std::iter;
-use hir::{known, ScopeDef};
+use hir::{known, ScopeDef, Variant};
use ide_db::{imports::import_assets::LocatedImport, SymbolKind};
use syntax::ast;
@@ -537,17 +537,20 @@ fn enum_variants_with_paths(
impl_: &Option<ast::Impl>,
cb: impl Fn(&mut Completions, &CompletionContext<'_>, hir::Variant, hir::ModPath),
) {
+ let mut process_variant = |variant: Variant| {
+ let self_path = hir::ModPath::from_segments(
+ hir::PathKind::Plain,
+ iter::once(known::SELF_TYPE).chain(iter::once(variant.name(ctx.db))),
+ );
+
+ cb(acc, ctx, variant, self_path);
+ };
+
let variants = enum_.variants(ctx.db);
if let Some(impl_) = impl_.as_ref().and_then(|impl_| ctx.sema.to_def(impl_)) {
if impl_.self_ty(ctx.db).as_adt() == Some(hir::Adt::Enum(enum_)) {
- for &variant in &variants {
- let self_path = hir::ModPath::from_segments(
- hir::PathKind::Plain,
- iter::once(known::SELF_TYPE).chain(iter::once(variant.name(ctx.db))),
- );
- cb(acc, ctx, variant, self_path);
- }
+ variants.iter().for_each(|variant| process_variant(*variant));
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs
index 7c6e5e100..77246379e 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs
@@ -122,7 +122,7 @@ fn complete_methods(
mut f: impl FnMut(hir::Function),
) {
let mut seen_methods = FxHashSet::default();
- receiver.iterate_method_candidates(
+ receiver.iterate_method_candidates_with_traits(
ctx.db,
&ctx.scope,
&ctx.traits_in_scope(),
@@ -415,7 +415,6 @@ fn foo(a: lib::A) { a.$0 }
fn test_local_impls() {
check(
r#"
-//- /lib.rs crate:lib
pub struct A {}
mod m {
impl super::A {
@@ -427,9 +426,8 @@ mod m {
}
}
}
-//- /main.rs crate:main deps:lib
-fn foo(a: lib::A) {
- impl lib::A {
+fn foo(a: A) {
+ impl A {
fn local_method(&self) {}
}
a.$0
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
index 364969af9..0979f6a6d 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
@@ -5,10 +5,7 @@ use ide_db::imports::{
insert_use::ImportScope,
};
use itertools::Itertools;
-use syntax::{
- ast::{self},
- AstNode, SyntaxNode, T,
-};
+use syntax::{ast, AstNode, SyntaxNode, T};
use crate::{
context::{
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs
index 37849c251..69c05a76d 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs
@@ -33,7 +33,9 @@ pub(crate) fn complete_type_path(
// Don't suggest attribute macros and derives.
ScopeDef::ModuleDef(Macro(mac)) => mac.is_fn_like(ctx.db),
// Type things are fine
- ScopeDef::ModuleDef(BuiltinType(_) | Adt(_) | Module(_) | Trait(_) | TypeAlias(_))
+ ScopeDef::ModuleDef(
+ BuiltinType(_) | Adt(_) | Module(_) | Trait(_) | TraitAlias(_) | TypeAlias(_),
+ )
| ScopeDef::AdtSelfType(_)
| ScopeDef::Unknown
| ScopeDef::GenericParam(TypeParam(_)) => true,
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
index ea54068b0..8cbf89e9c 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
@@ -6,13 +6,13 @@ mod tests;
use std::iter;
-use base_db::SourceDatabaseExt;
use hir::{
HasAttrs, Local, Name, PathResolution, ScopeDef, Semantics, SemanticsScope, Type, TypeInfo,
};
use ide_db::{
base_db::{FilePosition, SourceDatabase},
famous_defs::FamousDefs,
+ helpers::is_editable_crate,
FxHashMap, FxHashSet, RootDatabase,
};
use syntax::{
@@ -220,6 +220,8 @@ pub(super) struct PatternContext {
/// The record pattern this name or ref is a field of
pub(super) record_pat: Option<ast::RecordPat>,
pub(super) impl_: Option<ast::Impl>,
+ /// List of missing variants in a match expr
+ pub(super) missing_variants: Vec<hir::Variant>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -387,8 +389,7 @@ pub(crate) struct CompletionContext<'a> {
impl<'a> CompletionContext<'a> {
/// The range of the identifier that is being completed.
pub(crate) fn source_range(&self) -> TextRange {
- // check kind of macro-expanded token, but use range of original token
- let kind = self.token.kind();
+ let kind = self.original_token.kind();
match kind {
CHAR => {
// assume we are completing a lifetime but the user has only typed the '
@@ -416,6 +417,7 @@ impl<'a> CompletionContext<'a> {
hir::ModuleDef::Const(it) => self.is_visible(it),
hir::ModuleDef::Static(it) => self.is_visible(it),
hir::ModuleDef::Trait(it) => self.is_visible(it),
+ hir::ModuleDef::TraitAlias(it) => self.is_visible(it),
hir::ModuleDef::TypeAlias(it) => self.is_visible(it),
hir::ModuleDef::Macro(it) => self.is_visible(it),
hir::ModuleDef::BuiltinType(_) => Visible::Yes,
@@ -525,10 +527,11 @@ impl<'a> CompletionContext<'a> {
return Visible::No;
}
// If the definition location is editable, also show private items
- let root_file = defining_crate.root_file(self.db);
- let source_root_id = self.db.file_source_root(root_file);
- let is_editable = !self.db.source_root(source_root_id).is_library;
- return if is_editable { Visible::Editable } else { Visible::No };
+ return if is_editable_crate(defining_crate, self.db) {
+ Visible::Editable
+ } else {
+ Visible::No
+ };
}
if self.is_doc_hidden(attrs, defining_crate) {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
index db0045aef..a94c40458 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
@@ -1,7 +1,7 @@
//! Module responsible for analyzing the code surrounding the cursor for completion.
use std::iter;
-use hir::{Semantics, Type, TypeInfo};
+use hir::{Semantics, Type, TypeInfo, Variant};
use ide_db::{active_parameter::ActiveParameter, RootDatabase};
use syntax::{
algo::{find_node_at_offset, non_trivia_sibling},
@@ -353,7 +353,7 @@ fn expected_type_and_name(
_ => ty,
};
- loop {
+ let (ty, name) = loop {
break match_ast! {
match node {
ast::LetStmt(it) => {
@@ -385,9 +385,7 @@ fn expected_type_and_name(
token.clone(),
).map(|ap| {
let name = ap.ident().map(NameOrNameRef::Name);
-
- let ty = strip_refs(ap.ty);
- (Some(ty), name)
+ (Some(ap.ty), name)
})
.unwrap_or((None, None))
},
@@ -489,7 +487,8 @@ fn expected_type_and_name(
},
}
};
- }
+ };
+ (ty.map(strip_refs), name)
}
fn classify_lifetime(
@@ -1133,6 +1132,9 @@ fn pattern_context_for(
pat: ast::Pat,
) -> PatternContext {
let mut param_ctx = None;
+
+ let mut missing_variants = vec![];
+
let (refutability, has_type_ascription) =
pat
.syntax()
@@ -1162,7 +1164,52 @@ fn pattern_context_for(
})();
return (PatternRefutability::Irrefutable, has_type_ascription)
},
- ast::MatchArm(_) => PatternRefutability::Refutable,
+ ast::MatchArm(match_arm) => {
+ let missing_variants_opt = match_arm
+ .syntax()
+ .parent()
+ .and_then(ast::MatchArmList::cast)
+ .and_then(|match_arm_list| {
+ match_arm_list
+ .syntax()
+ .parent()
+ .and_then(ast::MatchExpr::cast)
+ .and_then(|match_expr| {
+ let expr_opt = find_opt_node_in_file(&original_file, match_expr.expr());
+
+ expr_opt.and_then(|expr| {
+ sema.type_of_expr(&expr)?
+ .adjusted()
+ .autoderef(sema.db)
+ .find_map(|ty| match ty.as_adt() {
+ Some(hir::Adt::Enum(e)) => Some(e),
+ _ => None,
+ }).and_then(|enum_| {
+ Some(enum_.variants(sema.db))
+ })
+ })
+ }).and_then(|variants| {
+ Some(variants.iter().filter_map(|variant| {
+ let variant_name = variant.name(sema.db).to_string();
+
+ let variant_already_present = match_arm_list.arms().any(|arm| {
+ arm.pat().and_then(|pat| {
+ let pat_already_present = pat.syntax().to_string().contains(&variant_name);
+ pat_already_present.then(|| pat_already_present)
+ }).is_some()
+ });
+
+ (!variant_already_present).then_some(variant.clone())
+ }).collect::<Vec<Variant>>())
+ })
+ });
+
+ if let Some(missing_variants_) = missing_variants_opt {
+ missing_variants = missing_variants_;
+ };
+
+ PatternRefutability::Refutable
+ },
ast::LetExpr(_) => PatternRefutability::Refutable,
ast::ForExpr(_) => PatternRefutability::Irrefutable,
_ => PatternRefutability::Irrefutable,
@@ -1184,6 +1231,7 @@ fn pattern_context_for(
ref_token,
record_pat: None,
impl_: fetch_immediate_impl(sema, original_file, pat.syntax()),
+ missing_variants,
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs
index a654a5db5..82a1c10c5 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs
@@ -411,3 +411,15 @@ fn main() {
expect!["ty: i32, name: ?"],
);
}
+
+#[test]
+fn expected_type_ref_return_pos() {
+ check_expected_type_and_name(
+ r#"
+fn f(thing: u32) -> &u32 {
+ &thin$0
+}
+"#,
+ expect!["ty: u32, name: ?"],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
index 2f65491d8..bb9fa7cca 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
@@ -288,7 +288,7 @@ impl_from!(SymbolKind for CompletionItemKind);
impl CompletionItemKind {
#[cfg(test)]
- pub(crate) fn tag(&self) -> &'static str {
+ pub(crate) fn tag(self) -> &'static str {
match self {
CompletionItemKind::SymbolKind(kind) => match kind {
SymbolKind::Attribute => "at",
@@ -312,6 +312,7 @@ impl CompletionItemKind {
SymbolKind::Struct => "st",
SymbolKind::ToolModule => "tm",
SymbolKind::Trait => "tt",
+ SymbolKind::TraitAlias => "tr",
SymbolKind::TypeAlias => "ta",
SymbolKind::TypeParam => "tp",
SymbolKind::Union => "un",
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
index d99ad5f9f..c1f51aabb 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
@@ -367,6 +367,9 @@ fn res_to_kind(resolution: ScopeDef) -> CompletionItemKind {
ScopeDef::ModuleDef(Const(..)) => CompletionItemKind::SymbolKind(SymbolKind::Const),
ScopeDef::ModuleDef(Static(..)) => CompletionItemKind::SymbolKind(SymbolKind::Static),
ScopeDef::ModuleDef(Trait(..)) => CompletionItemKind::SymbolKind(SymbolKind::Trait),
+ ScopeDef::ModuleDef(TraitAlias(..)) => {
+ CompletionItemKind::SymbolKind(SymbolKind::TraitAlias)
+ }
ScopeDef::ModuleDef(TypeAlias(..)) => CompletionItemKind::SymbolKind(SymbolKind::TypeAlias),
ScopeDef::ModuleDef(BuiltinType(..)) => CompletionItemKind::BuiltinType,
ScopeDef::GenericParam(param) => CompletionItemKind::SymbolKind(match param {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs
index ffcad1185..44e886076 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs
@@ -267,4 +267,63 @@ fn main() {
"#,
);
}
+
+ #[test]
+ fn complete_missing_macro_arg() {
+ // Regression test for https://github.com/rust-lang/rust-analyzer/issues/14246
+ check_edit(
+ "BAR",
+ r#"
+macro_rules! foo {
+ ($val:ident, $val2: ident) => {
+ $val $val2
+ };
+}
+
+const BAR: u32 = 9;
+fn main() {
+ foo!(BAR, $0)
+}
+"#,
+ r#"
+macro_rules! foo {
+ ($val:ident, $val2: ident) => {
+ $val $val2
+ };
+}
+
+const BAR: u32 = 9;
+fn main() {
+ foo!(BAR, BAR)
+}
+"#,
+ );
+ check_edit(
+ "BAR",
+ r#"
+macro_rules! foo {
+ ($val:ident, $val2: ident) => {
+ $val $val2
+ };
+}
+
+const BAR: u32 = 9;
+fn main() {
+ foo!($0)
+}
+"#,
+ r#"
+macro_rules! foo {
+ ($val:ident, $val2: ident) => {
+ $val $val2
+ };
+}
+
+const BAR: u32 = 9;
+fn main() {
+ foo!(BAR)
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs
index 21b4bc217..9225c91be 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs
@@ -37,7 +37,9 @@ pub(crate) fn render_struct_pat(
let lookup = format_literal_lookup(name.as_str(), kind);
let pat = render_pat(&ctx, pattern_ctx, &escaped_name, kind, &visible_fields, fields_omitted)?;
- Some(build_completion(ctx, label, lookup, pat, strukt))
+ let db = ctx.db();
+
+ Some(build_completion(ctx, label, lookup, pat, strukt, strukt.ty(db), false))
}
pub(crate) fn render_variant_pat(
@@ -52,6 +54,7 @@ pub(crate) fn render_variant_pat(
let fields = variant.fields(ctx.db());
let (visible_fields, fields_omitted) = visible_fields(ctx.completion, &fields, variant)?;
+ let enum_ty = variant.parent_enum(ctx.db()).ty(ctx.db());
let (name, escaped_name) = match path {
Some(path) => (path.unescaped().to_string().into(), path.to_string().into()),
@@ -81,7 +84,15 @@ pub(crate) fn render_variant_pat(
}
};
- Some(build_completion(ctx, label, lookup, pat, variant))
+ Some(build_completion(
+ ctx,
+ label,
+ lookup,
+ pat,
+ variant,
+ enum_ty,
+ pattern_ctx.missing_variants.contains(&variant),
+ ))
}
fn build_completion(
@@ -90,13 +101,22 @@ fn build_completion(
lookup: SmolStr,
pat: String,
def: impl HasAttrs + Copy,
+ adt_ty: hir::Type,
+ // Missing in context of match statement completions
+ is_variant_missing: bool,
) -> CompletionItem {
+ let mut relevance = ctx.completion_relevance();
+
+ if is_variant_missing {
+ relevance.type_match = super::compute_type_match(ctx.completion, &adt_ty);
+ }
+
let mut item = CompletionItem::new(CompletionItemKind::Binding, ctx.source_range(), label);
item.set_documentation(ctx.docs(def))
.set_deprecated(ctx.is_deprecated(def))
.detail(&pat)
.lookup_by(lookup)
- .set_relevance(ctx.completion_relevance());
+ .set_relevance(relevance);
match ctx.snippet_cap() {
Some(snippet_cap) => item.insert_snippet(snippet_cap, pat),
None => item.insert_text(pat),
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs
index 4e60820dd..c97144b61 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs
@@ -857,9 +857,9 @@ mod lint {
#[test]
fn lint_feature() {
check_edit(
- "box_syntax",
+ "box_patterns",
r#"#[feature(box_$0)] struct Test;"#,
- r#"#[feature(box_syntax)] struct Test;"#,
+ r#"#[feature(box_patterns)] struct Test;"#,
)
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs
index ad9254e7f..c0e485c36 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs
@@ -614,6 +614,7 @@ fn f(u: U) {
check_empty(
r#"
+#![rustc_coherence_is_core]
#[lang = "u32"]
impl u32 {
pub const MIN: Self = 0;
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs
index 328faaa06..65cefdb08 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs
@@ -47,6 +47,66 @@ fn foo(s: Struct) {
}
#[test]
+fn record_pattern_field_enum() {
+ check(
+ r#"
+//- minicore:result
+enum Baz { Foo, Bar }
+
+fn foo(baz: Baz) {
+ match baz {
+ Baz::Foo => (),
+ $0
+ }
+}
+"#,
+ expect![[r#"
+ en Baz
+ en Result
+ md core
+ ev Err
+ ev Ok
+ bn Baz::Bar Baz::Bar$0
+ bn Baz::Foo Baz::Foo$0
+ bn Err(…) Err($1)$0
+ bn Ok(…) Ok($1)$0
+ kw mut
+ kw ref
+ "#]],
+ );
+
+ check(
+ r#"
+//- minicore:result
+enum Baz { Foo, Bar }
+
+fn foo(baz: Baz) {
+ use Baz::*;
+ match baz {
+ Foo => (),
+ $0
+ }
+}
+ "#,
+ expect![[r#"
+ en Baz
+ en Result
+ md core
+ ev Bar
+ ev Err
+ ev Foo
+ ev Ok
+ bn Bar Bar$0
+ bn Err(…) Err($1)$0
+ bn Foo Foo$0
+ bn Ok(…) Ok($1)$0
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
fn pattern_enum_variant() {
check(
r#"
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
index cb71c7b2b..f8a6f6cd3 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
@@ -608,6 +608,7 @@ fn f() {
}
//- /core.rs crate:core
+#![rustc_coherence_is_core]
#[lang = "u8"]
impl u8 {
pub const MAX: Self = 255;
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs b/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs
index 2b6b60547..0da4e729a 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs
@@ -96,6 +96,7 @@ pub fn generic_def_for_node(
hir::PathResolution::Def(hir::ModuleDef::Adt(it)) => it.into(),
hir::PathResolution::Def(hir::ModuleDef::Function(it)) => it.into(),
hir::PathResolution::Def(hir::ModuleDef::Trait(it)) => it.into(),
+ hir::PathResolution::Def(hir::ModuleDef::TraitAlias(it)) => it.into(),
hir::PathResolution::Def(hir::ModuleDef::TypeAlias(it)) => it.into(),
hir::PathResolution::Def(hir::ModuleDef::Variant(it)) => it.into(),
hir::PathResolution::Def(hir::ModuleDef::BuiltinType(_))
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
index b1ee9b58d..ea1d9cc49 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
@@ -71,70 +71,98 @@ impl RootDatabase {
base_db::SourceRootQuery
base_db::SourceRootCratesQuery
- // AstDatabase
+ // ExpandDatabase
hir::db::AstIdMapQuery
+ hir::db::ParseMacroExpansionQuery
+ hir::db::InternMacroCallQuery
hir::db::MacroArgTextQuery
hir::db::MacroDefQuery
- hir::db::ParseMacroExpansionQuery
hir::db::MacroExpandQuery
+ hir::db::ExpandProcMacroQuery
+ hir::db::MacroExpandErrorQuery
hir::db::HygieneFrameQuery
- hir::db::InternMacroCallQuery
// DefDatabase
hir::db::FileItemTreeQuery
- hir::db::BlockDefMapQuery
hir::db::CrateDefMapQueryQuery
- hir::db::FieldsAttrsQuery
- hir::db::VariantsAttrsQuery
- hir::db::FieldsAttrsSourceMapQuery
- hir::db::VariantsAttrsSourceMapQuery
+ hir::db::BlockDefMapQuery
hir::db::StructDataQuery
+ hir::db::StructDataWithDiagnosticsQuery
hir::db::UnionDataQuery
+ hir::db::UnionDataWithDiagnosticsQuery
hir::db::EnumDataQuery
+ hir::db::EnumDataWithDiagnosticsQuery
hir::db::ImplDataQuery
+ hir::db::ImplDataWithDiagnosticsQuery
hir::db::TraitDataQuery
+ hir::db::TraitDataWithDiagnosticsQuery
+ hir::db::TraitAliasDataQuery
hir::db::TypeAliasDataQuery
hir::db::FunctionDataQuery
hir::db::ConstDataQuery
hir::db::StaticDataQuery
+ hir::db::Macro2DataQuery
+ hir::db::MacroRulesDataQuery
+ hir::db::ProcMacroDataQuery
hir::db::BodyWithSourceMapQuery
hir::db::BodyQuery
hir::db::ExprScopesQuery
hir::db::GenericParamsQuery
+ hir::db::VariantsAttrsQuery
+ hir::db::FieldsAttrsQuery
+ hir::db::VariantsAttrsSourceMapQuery
+ hir::db::FieldsAttrsSourceMapQuery
hir::db::AttrsQuery
hir::db::CrateLangItemsQuery
hir::db::LangItemQuery
hir::db::ImportMapQuery
+ hir::db::FieldVisibilitiesQuery
+ hir::db::FunctionVisibilityQuery
+ hir::db::ConstVisibilityQuery
+ hir::db::CrateSupportsNoStdQuery
// HirDatabase
hir::db::InferQueryQuery
+ hir::db::MirBodyQuery
+ hir::db::BorrowckQuery
hir::db::TyQuery
hir::db::ValueTyQuery
hir::db::ImplSelfTyQuery
+ hir::db::ConstParamTyQuery
+ hir::db::ConstEvalQuery
+ hir::db::ConstEvalDiscriminantQuery
hir::db::ImplTraitQuery
hir::db::FieldTypesQuery
+ hir::db::LayoutOfAdtQuery
+ hir::db::TargetDataLayoutQuery
hir::db::CallableItemSignatureQuery
+ hir::db::ReturnTypeImplTraitsQuery
hir::db::GenericPredicatesForParamQuery
hir::db::GenericPredicatesQuery
+ hir::db::TraitEnvironmentQuery
hir::db::GenericDefaultsQuery
hir::db::InherentImplsInCrateQuery
- hir::db::TraitEnvironmentQuery
+ hir::db::InherentImplsInBlockQuery
+ hir::db::IncoherentInherentImplCratesQuery
hir::db::TraitImplsInCrateQuery
+ hir::db::TraitImplsInBlockQuery
hir::db::TraitImplsInDepsQuery
- hir::db::AssociatedTyDataQuery
+ hir::db::InternCallableDefQuery
+ hir::db::InternLifetimeParamIdQuery
+ hir::db::InternImplTraitIdQuery
+ hir::db::InternTypeOrConstParamIdQuery
+ hir::db::InternClosureQuery
+ hir::db::InternGeneratorQuery
hir::db::AssociatedTyDataQuery
hir::db::TraitDatumQuery
hir::db::StructDatumQuery
hir::db::ImplDatumQuery
hir::db::FnDefDatumQuery
- hir::db::ReturnTypeImplTraitsQuery
- hir::db::InternCallableDefQuery
- hir::db::InternTypeOrConstParamIdQuery
- hir::db::InternImplTraitIdQuery
- hir::db::InternClosureQuery
+ hir::db::FnDefVarianceQuery
+ hir::db::AdtVarianceQuery
hir::db::AssociatedTyValueQuery
hir::db::TraitSolveQueryQuery
- hir::db::InternTypeOrConstParamIdQuery
+ hir::db::ProgramClausesForChalkEnvQuery
// SymbolsDatabase
crate::symbol_index::ModuleSymbolsQuery
@@ -153,8 +181,14 @@ impl RootDatabase {
hir::db::InternConstQuery
hir::db::InternStaticQuery
hir::db::InternTraitQuery
+ hir::db::InternTraitAliasQuery
hir::db::InternTypeAliasQuery
hir::db::InternImplQuery
+ hir::db::InternExternBlockQuery
+ hir::db::InternBlockQuery
+ hir::db::InternMacro2Query
+ hir::db::InternProcMacroQuery
+ hir::db::InternMacroRulesQuery
];
acc.sort_by_key(|it| std::cmp::Reverse(it.1));
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
index ed7f04fd8..4071c490b 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
@@ -9,7 +9,8 @@ use arrayvec::ArrayVec;
use hir::{
Adt, AsAssocItem, AssocItem, BuiltinAttr, BuiltinType, Const, Crate, DeriveHelper, Field,
Function, GenericParam, HasVisibility, Impl, ItemInNs, Label, Local, Macro, Module, ModuleDef,
- Name, PathResolution, Semantics, Static, ToolModule, Trait, TypeAlias, Variant, Visibility,
+ Name, PathResolution, Semantics, Static, ToolModule, Trait, TraitAlias, TypeAlias, Variant,
+ Visibility,
};
use stdx::impl_from;
use syntax::{
@@ -31,6 +32,7 @@ pub enum Definition {
Const(Const),
Static(Static),
Trait(Trait),
+ TraitAlias(TraitAlias),
TypeAlias(TypeAlias),
BuiltinType(BuiltinType),
SelfType(Impl),
@@ -64,6 +66,7 @@ impl Definition {
Definition::Const(it) => it.module(db),
Definition::Static(it) => it.module(db),
Definition::Trait(it) => it.module(db),
+ Definition::TraitAlias(it) => it.module(db),
Definition::TypeAlias(it) => it.module(db),
Definition::Variant(it) => it.module(db),
Definition::SelfType(it) => it.module(db),
@@ -87,6 +90,7 @@ impl Definition {
Definition::Const(it) => it.visibility(db),
Definition::Static(it) => it.visibility(db),
Definition::Trait(it) => it.visibility(db),
+ Definition::TraitAlias(it) => it.visibility(db),
Definition::TypeAlias(it) => it.visibility(db),
Definition::Variant(it) => it.visibility(db),
Definition::BuiltinType(_) => Visibility::Public,
@@ -113,6 +117,7 @@ impl Definition {
Definition::Const(it) => it.name(db)?,
Definition::Static(it) => it.name(db),
Definition::Trait(it) => it.name(db),
+ Definition::TraitAlias(it) => it.name(db),
Definition::TypeAlias(it) => it.name(db),
Definition::BuiltinType(it) => it.name(),
Definition::SelfType(_) => return None,
@@ -300,6 +305,7 @@ impl NameClass {
ast::Item::Module(it) => Definition::Module(sema.to_def(&it)?),
ast::Item::Static(it) => Definition::Static(sema.to_def(&it)?),
ast::Item::Trait(it) => Definition::Trait(sema.to_def(&it)?),
+ ast::Item::TraitAlias(it) => Definition::TraitAlias(sema.to_def(&it)?),
ast::Item::TypeAlias(it) => Definition::TypeAlias(sema.to_def(&it)?),
ast::Item::Enum(it) => Definition::Adt(hir::Adt::Enum(sema.to_def(&it)?)),
ast::Item::Struct(it) => Definition::Adt(hir::Adt::Struct(sema.to_def(&it)?)),
@@ -321,7 +327,7 @@ impl NameClass {
let pat_parent = ident_pat.syntax().parent();
if let Some(record_pat_field) = pat_parent.and_then(ast::RecordPatField::cast) {
if record_pat_field.name_ref().is_none() {
- if let Some(field) = sema.resolve_record_pat_field(&record_pat_field) {
+ if let Some((field, _)) = sema.resolve_record_pat_field(&record_pat_field) {
return Some(NameClass::PatFieldShorthand {
local_def: local,
field_ref: field,
@@ -463,9 +469,12 @@ impl NameRefClass {
match_ast! {
match parent {
ast::MethodCallExpr(method_call) => {
- sema.resolve_method_call(&method_call)
- .map(Definition::Function)
- .map(NameRefClass::Definition)
+ sema.resolve_method_call_field_fallback(&method_call)
+ .map(|it| {
+ it.map_left(Definition::Function)
+ .map_right(Definition::Field)
+ .either(NameRefClass::Definition, NameRefClass::Definition)
+ })
},
ast::FieldExpr(field_expr) => {
sema.resolve_field(&field_expr)
@@ -474,6 +483,13 @@ impl NameRefClass {
},
ast::RecordPatField(record_pat_field) => {
sema.resolve_record_pat_field(&record_pat_field)
+ .map(|(field, ..)|field)
+ .map(Definition::Field)
+ .map(NameRefClass::Definition)
+ },
+ ast::RecordExprField(record_expr_field) => {
+ sema.resolve_record_field(&record_expr_field)
+ .map(|(field, ..)|field)
.map(Definition::Field)
.map(NameRefClass::Definition)
},
@@ -542,7 +558,7 @@ impl NameRefClass {
}
impl_from!(
- Field, Module, Function, Adt, Variant, Const, Static, Trait, TypeAlias, BuiltinType, Local,
+ Field, Module, Function, Adt, Variant, Const, Static, Trait, TraitAlias, TypeAlias, BuiltinType, Local,
GenericParam, Label, Macro
for Definition
);
@@ -599,6 +615,7 @@ impl From<ModuleDef> for Definition {
ModuleDef::Const(it) => Definition::Const(it),
ModuleDef::Static(it) => Definition::Static(it),
ModuleDef::Trait(it) => Definition::Trait(it),
+ ModuleDef::TraitAlias(it) => Definition::TraitAlias(it),
ModuleDef::TypeAlias(it) => Definition::TypeAlias(it),
ModuleDef::Macro(it) => Definition::Macro(it),
ModuleDef::BuiltinType(it) => Definition::BuiltinType(it),
@@ -616,6 +633,7 @@ impl From<Definition> for Option<ItemInNs> {
Definition::Const(it) => ModuleDef::Const(it),
Definition::Static(it) => ModuleDef::Static(it),
Definition::Trait(it) => ModuleDef::Trait(it),
+ Definition::TraitAlias(it) => ModuleDef::TraitAlias(it),
Definition::TypeAlias(it) => ModuleDef::TypeAlias(it),
Definition::BuiltinType(it) => ModuleDef::BuiltinType(it),
_ => return None,
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
index 6e56efe34..8e3b1eef1 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
@@ -2,8 +2,8 @@
use std::collections::VecDeque;
-use base_db::FileId;
-use hir::{ItemInNs, ModuleDef, Name, Semantics};
+use base_db::{FileId, SourceDatabaseExt};
+use hir::{Crate, ItemInNs, ModuleDef, Name, Semantics};
use syntax::{
ast::{self, make},
AstToken, SyntaxKind, SyntaxToken, TokenAtOffset,
@@ -103,3 +103,9 @@ pub fn lint_eq_or_in_group(lint: &str, lint_is: &str) -> bool {
false
}
}
+
+pub fn is_editable_crate(krate: Crate, db: &RootDatabase) -> bool {
+ let root_file = krate.root_file(db);
+ let source_root_id = db.file_source_root(root_file);
+ !db.source_root(source_root_id).is_library
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
index 994d48385..b26b0a908 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
@@ -528,7 +528,7 @@ fn trait_applicable_items(
},
)
} else {
- trait_candidate.receiver_ty.iterate_method_candidates(
+ trait_candidate.receiver_ty.iterate_method_candidates_with_traits(
db,
scope,
&trait_candidates,
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
index 156bbb634..b1df11bf9 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
@@ -22,7 +22,7 @@ pub mod source_change;
pub mod symbol_index;
pub mod traits;
pub mod ty_filter;
-pub mod use_trivial_contructor;
+pub mod use_trivial_constructor;
pub mod imports {
pub mod import_assets;
@@ -50,7 +50,7 @@ use base_db::{
AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
};
use hir::{
- db::{AstDatabase, DefDatabase, HirDatabase},
+ db::{DefDatabase, ExpandDatabase, HirDatabase},
symbols::FileSymbolKind,
};
use stdx::hash::NoHashHashSet;
@@ -68,7 +68,7 @@ pub type FxIndexMap<K, V> =
#[salsa::database(
base_db::SourceDatabaseExtStorage,
base_db::SourceDatabaseStorage,
- hir::db::AstDatabaseStorage,
+ hir::db::ExpandDatabaseStorage,
hir::db::DefDatabaseStorage,
hir::db::HirDatabaseStorage,
hir::db::InternDatabaseStorage,
@@ -95,8 +95,8 @@ impl fmt::Debug for RootDatabase {
}
}
-impl Upcast<dyn AstDatabase> for RootDatabase {
- fn upcast(&self) -> &(dyn AstDatabase + 'static) {
+impl Upcast<dyn ExpandDatabase> for RootDatabase {
+ fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
&*self
}
}
@@ -191,6 +191,7 @@ pub enum SymbolKind {
Struct,
ToolModule,
Trait,
+ TraitAlias,
TypeAlias,
TypeParam,
Union,
@@ -221,6 +222,7 @@ impl From<FileSymbolKind> for SymbolKind {
FileSymbolKind::Static => SymbolKind::Static,
FileSymbolKind::Struct => SymbolKind::Struct,
FileSymbolKind::Trait => SymbolKind::Trait,
+ FileSymbolKind::TraitAlias => SymbolKind::TraitAlias,
FileSymbolKind::TypeAlias => SymbolKind::TypeAlias,
FileSymbolKind::Union => SymbolKind::Union,
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
index 84d70b258..f710211c8 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
@@ -119,15 +119,9 @@ impl Definition {
Definition::Const(it) => name_range(it, sema),
Definition::Static(it) => name_range(it, sema),
Definition::Trait(it) => name_range(it, sema),
+ Definition::TraitAlias(it) => name_range(it, sema),
Definition::TypeAlias(it) => name_range(it, sema),
- Definition::Local(local) => {
- let src = local.source(sema.db);
- let name = match &src.value {
- Either::Left(bind_pat) => bind_pat.name()?,
- Either::Right(_) => return None,
- };
- src.with_value(name.syntax()).original_file_range_opt(sema.db)
- }
+ Definition::Local(it) => name_range(it.primary_source(sema.db), sema),
Definition::GenericParam(generic_param) => match generic_param {
hir::GenericParam::LifetimeParam(lifetime_param) => {
let src = lifetime_param.source(sema.db)?;
@@ -301,13 +295,7 @@ fn rename_reference(
source_change.insert_source_edit(file_id, edit);
Ok(())
};
- match def {
- Definition::Local(l) => l
- .associated_locals(sema.db)
- .iter()
- .try_for_each(|&local| insert_def_edit(Definition::Local(local))),
- def => insert_def_edit(def),
- }?;
+ insert_def_edit(def)?;
Ok(source_change)
}
@@ -470,59 +458,64 @@ fn source_edit_from_def(
def: Definition,
new_name: &str,
) -> Result<(FileId, TextEdit)> {
- let FileRange { file_id, range } = def
- .range_for_rename(sema)
- .ok_or_else(|| format_err!("No identifier available to rename"))?;
-
let mut edit = TextEdit::builder();
if let Definition::Local(local) = def {
- if let Either::Left(pat) = local.source(sema.db).value {
- // special cases required for renaming fields/locals in Record patterns
- if let Some(pat_field) = pat.syntax().parent().and_then(ast::RecordPatField::cast) {
+ let mut file_id = None;
+ for source in local.sources(sema.db) {
+ let source = source.source;
+ file_id = source.file_id.file_id();
+ if let Either::Left(pat) = source.value {
let name_range = pat.name().unwrap().syntax().text_range();
- if let Some(name_ref) = pat_field.name_ref() {
- if new_name == name_ref.text() && pat.at_token().is_none() {
- // Foo { field: ref mut local } -> Foo { ref mut field }
- // ^^^^^^ delete this
- // ^^^^^ replace this with `field`
- cov_mark::hit!(test_rename_local_put_init_shorthand_pat);
- edit.delete(
- name_ref
- .syntax()
- .text_range()
- .cover_offset(pat.syntax().text_range().start()),
- );
- edit.replace(name_range, name_ref.text().to_string());
+ // special cases required for renaming fields/locals in Record patterns
+ if let Some(pat_field) = pat.syntax().parent().and_then(ast::RecordPatField::cast) {
+ if let Some(name_ref) = pat_field.name_ref() {
+ if new_name == name_ref.text() && pat.at_token().is_none() {
+ // Foo { field: ref mut local } -> Foo { ref mut field }
+ // ^^^^^^ delete this
+ // ^^^^^ replace this with `field`
+ cov_mark::hit!(test_rename_local_put_init_shorthand_pat);
+ edit.delete(
+ name_ref
+ .syntax()
+ .text_range()
+ .cover_offset(pat.syntax().text_range().start()),
+ );
+ edit.replace(name_range, name_ref.text().to_string());
+ } else {
+ // Foo { field: ref mut local @ local 2} -> Foo { field: ref mut new_name @ local2 }
+ // Foo { field: ref mut local } -> Foo { field: ref mut new_name }
+ // ^^^^^ replace this with `new_name`
+ edit.replace(name_range, new_name.to_string());
+ }
} else {
- // Foo { field: ref mut local @ local 2} -> Foo { field: ref mut new_name @ local2 }
- // Foo { field: ref mut local } -> Foo { field: ref mut new_name }
- // ^^^^^ replace this with `new_name`
+ // Foo { ref mut field } -> Foo { field: ref mut new_name }
+ // ^ insert `field: `
+ // ^^^^^ replace this with `new_name`
+ edit.insert(
+ pat.syntax().text_range().start(),
+ format!("{}: ", pat_field.field_name().unwrap()),
+ );
edit.replace(name_range, new_name.to_string());
}
} else {
- // Foo { ref mut field } -> Foo { field: ref mut new_name }
- // ^ insert `field: `
- // ^^^^^ replace this with `new_name`
- edit.insert(
- pat.syntax().text_range().start(),
- format!("{}: ", pat_field.field_name().unwrap()),
- );
edit.replace(name_range, new_name.to_string());
}
}
}
+ let Some(file_id) = file_id else { bail!("No file available to rename") };
+ return Ok((file_id, edit.finish()));
}
- if edit.is_empty() {
- let (range, new_name) = match def {
- Definition::GenericParam(hir::GenericParam::LifetimeParam(_))
- | Definition::Label(_) => (
- TextRange::new(range.start() + syntax::TextSize::from(1), range.end()),
- new_name.strip_prefix('\'').unwrap_or(new_name).to_owned(),
- ),
- _ => (range, new_name.to_owned()),
- };
- edit.replace(range, new_name);
- }
+ let FileRange { file_id, range } = def
+ .range_for_rename(sema)
+ .ok_or_else(|| format_err!("No identifier available to rename"))?;
+ let (range, new_name) = match def {
+ Definition::GenericParam(hir::GenericParam::LifetimeParam(_)) | Definition::Label(_) => (
+ TextRange::new(range.start() + syntax::TextSize::from(1), range.end()),
+ new_name.strip_prefix('\'').unwrap_or(new_name).to_owned(),
+ ),
+ _ => (range, new_name.to_owned()),
+ };
+ edit.replace(range, new_name);
Ok((file_id, edit.finish()))
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
index c18a27f17..12f5e4e2a 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
@@ -244,14 +244,14 @@ impl Definition {
DefWithBody::Variant(v) => v.source(db).map(|src| src.syntax().cloned()),
};
return match def {
- Some(def) => SearchScope::file_range(def.as_ref().original_file_range(db)),
+ Some(def) => SearchScope::file_range(def.as_ref().original_file_range_full(db)),
None => SearchScope::single_file(file_id),
};
}
if let Definition::SelfType(impl_) = self {
return match impl_.source(db).map(|src| src.syntax().cloned()) {
- Some(def) => SearchScope::file_range(def.as_ref().original_file_range(db)),
+ Some(def) => SearchScope::file_range(def.as_ref().original_file_range_full(db)),
None => SearchScope::single_file(file_id),
};
}
@@ -261,13 +261,14 @@ impl Definition {
hir::GenericDef::Function(it) => it.source(db).map(|src| src.syntax().cloned()),
hir::GenericDef::Adt(it) => it.source(db).map(|src| src.syntax().cloned()),
hir::GenericDef::Trait(it) => it.source(db).map(|src| src.syntax().cloned()),
+ hir::GenericDef::TraitAlias(it) => it.source(db).map(|src| src.syntax().cloned()),
hir::GenericDef::TypeAlias(it) => it.source(db).map(|src| src.syntax().cloned()),
hir::GenericDef::Impl(it) => it.source(db).map(|src| src.syntax().cloned()),
hir::GenericDef::Variant(it) => it.source(db).map(|src| src.syntax().cloned()),
hir::GenericDef::Const(it) => it.source(db).map(|src| src.syntax().cloned()),
};
return match def {
- Some(def) => SearchScope::file_range(def.as_ref().original_file_range(db)),
+ Some(def) => SearchScope::file_range(def.as_ref().original_file_range_full(db)),
None => SearchScope::single_file(file_id),
};
}
@@ -318,10 +319,6 @@ impl Definition {
sema,
scope: None,
include_self_kw_refs: None,
- local_repr: match self {
- Definition::Local(local) => Some(local.representative(sema.db)),
- _ => None,
- },
search_self_mod: false,
}
}
@@ -336,9 +333,6 @@ pub struct FindUsages<'a> {
assoc_item_container: Option<hir::AssocItemContainer>,
/// whether to search for the `Self` type of the definition
include_self_kw_refs: Option<hir::Type>,
- /// the local representative for the local definition we are searching for
- /// (this is required for finding all local declarations in a or-pattern)
- local_repr: Option<hir::Local>,
/// whether to search for the `self` module
search_self_mod: bool,
}
@@ -643,19 +637,6 @@ impl<'a> FindUsages<'a> {
sink: &mut dyn FnMut(FileId, FileReference) -> bool,
) -> bool {
match NameRefClass::classify(self.sema, name_ref) {
- Some(NameRefClass::Definition(def @ Definition::Local(local)))
- if matches!(
- self.local_repr, Some(repr) if repr == local.representative(self.sema.db)
- ) =>
- {
- let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
- let reference = FileReference {
- range,
- name: ast::NameLike::NameRef(name_ref.clone()),
- category: ReferenceCategory::new(&def, name_ref),
- };
- sink(file_id, reference)
- }
Some(NameRefClass::Definition(def))
if self.def == def
// is our def a trait assoc item? then we want to find all assoc items from trait impls of our trait
@@ -700,14 +681,16 @@ impl<'a> FindUsages<'a> {
}
}
Some(NameRefClass::FieldShorthand { local_ref: local, field_ref: field }) => {
- let field = Definition::Field(field);
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
+
+ let field = Definition::Field(field);
+ let local = Definition::Local(local);
let access = match self.def {
Definition::Field(_) if field == self.def => {
ReferenceCategory::new(&field, name_ref)
}
- Definition::Local(_) if matches!(self.local_repr, Some(repr) if repr == local.representative(self.sema.db)) => {
- ReferenceCategory::new(&Definition::Local(local), name_ref)
+ Definition::Local(_) if local == self.def => {
+ ReferenceCategory::new(&local, name_ref)
}
_ => return false,
};
@@ -751,21 +734,6 @@ impl<'a> FindUsages<'a> {
};
sink(file_id, reference)
}
- Some(NameClass::Definition(def @ Definition::Local(local))) if def != self.def => {
- if matches!(
- self.local_repr,
- Some(repr) if local.representative(self.sema.db) == repr
- ) {
- let FileRange { file_id, range } = self.sema.original_range(name.syntax());
- let reference = FileReference {
- range,
- name: ast::NameLike::Name(name.clone()),
- category: None,
- };
- return sink(file_id, reference);
- }
- false
- }
Some(NameClass::Definition(def)) if def != self.def => {
match (&self.assoc_item_container, self.def) {
// for type aliases we always want to reference the trait def and all the trait impl counterparts
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
index 8e338061d..936354f29 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
@@ -83,6 +83,14 @@ impl From<NoHashHashMap<FileId, TextEdit>> for SourceChange {
}
}
+impl FromIterator<(FileId, TextEdit)> for SourceChange {
+ fn from_iter<T: IntoIterator<Item = (FileId, TextEdit)>>(iter: T) -> Self {
+ let mut this = SourceChange::default();
+ this.extend(iter);
+ this
+ }
+}
+
pub struct SourceChangeBuilder {
pub edit: TextEditBuilder,
pub file_id: FileId,
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_contructor.rs b/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs
index 39431bed3..39431bed3 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_contructor.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs
index 10e637979..114face2d 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs
@@ -7,10 +7,15 @@ pub(crate) fn break_outside_of_loop(
ctx: &DiagnosticsContext<'_>,
d: &hir::BreakOutsideOfLoop,
) -> Diagnostic {
- let construct = if d.is_break { "break" } else { "continue" };
+ let message = if d.bad_value_break {
+ "can't break with a value in this position".to_owned()
+ } else {
+ let construct = if d.is_break { "break" } else { "continue" };
+ format!("{construct} outside of loop")
+ };
Diagnostic::new(
"break-outside-of-loop",
- format!("{construct} outside of loop"),
+ message,
ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range,
)
}
@@ -135,4 +140,18 @@ fn foo() {
"#,
);
}
+
+ #[test]
+ fn value_break_in_for_loop() {
+ check_diagnostics(
+ r#"
+fn test() {
+ for _ in [()] {
+ break 3;
+ // ^^^^^^^ error: can't break with a value in this position
+ }
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs
new file mode 100644
index 000000000..d2f27664d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs
@@ -0,0 +1,39 @@
+use hir::HirDisplay;
+
+use crate::{Diagnostic, DiagnosticsContext};
+
+// Diagnostic: expected-function
+//
+// This diagnostic is triggered if a call is made on something that is not callable.
+pub(crate) fn expected_function(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::ExpectedFunction,
+) -> Diagnostic {
+ Diagnostic::new(
+ "expected-function",
+ format!("expected function, found {}", d.found.display(ctx.sema.db)),
+ ctx.sema.diagnostics_display_range(d.call.clone().map(|it| it.into())).range,
+ )
+ .experimental()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn smoke_test() {
+ check_diagnostics(
+ r#"
+fn foo() {
+ let x = 3;
+ x();
+ // ^^^ error: expected function, found i32
+ ""();
+ // ^^^^ error: expected function, found &str
+ foo();
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs
new file mode 100644
index 000000000..72af9ebfc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs
@@ -0,0 +1,77 @@
+use hir::InFile;
+
+use crate::{Diagnostic, DiagnosticsContext, Severity};
+
+// Diagnostic: incoherent-impl
+//
+// This diagnostic is triggered if the targe type of an impl is from a foreign crate.
+pub(crate) fn incoherent_impl(ctx: &DiagnosticsContext<'_>, d: &hir::IncoherentImpl) -> Diagnostic {
+ Diagnostic::new(
+ "incoherent-impl",
+ format!("cannot define inherent `impl` for foreign type"),
+ ctx.sema.diagnostics_display_range(InFile::new(d.file_id, d.impl_.clone().into())).range,
+ )
+ .severity(Severity::Error)
+}
+
+#[cfg(test)]
+mod change_case {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn primitive() {
+ check_diagnostics(
+ r#"
+ impl bool {}
+//^^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type
+"#,
+ );
+ }
+
+ #[test]
+ fn primitive_rustc_allow_incoherent_impl() {
+ check_diagnostics(
+ r#"
+impl bool {
+ #[rustc_allow_incoherent_impl]
+ fn falsch(self) -> Self { false }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn rustc_allow_incoherent_impl() {
+ check_diagnostics(
+ r#"
+//- /lib.rs crate:foo
+#[rustc_has_incoherent_inherent_impls]
+pub struct S;
+//- /main.rs crate:main deps:foo
+impl foo::S {
+ #[rustc_allow_incoherent_impl]
+ fn func(self) {}
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+//- /lib.rs crate:foo
+pub struct S;
+//- /main.rs crate:main deps:foo
+ impl foo::S { #[rustc_allow_incoherent_impl] fn func(self) {} }
+//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type
+"#,
+ );
+ check_diagnostics(
+ r#"
+//- /lib.rs crate:foo
+#[rustc_has_incoherent_inherent_impls]
+pub struct S;
+//- /main.rs crate:main deps:foo
+ impl foo::S { fn func(self) {} }
+//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
index 6a78c08d4..db88bf7b9 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
@@ -1,4 +1,4 @@
-use hir::{db::AstDatabase, InFile};
+use hir::{db::ExpandDatabase, InFile};
use ide_db::{assists::Assist, defs::NameClass};
use syntax::AstNode;
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
index 43af4d4f1..5c4327ff9 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
@@ -1,11 +1,11 @@
use either::Either;
use hir::{
- db::{AstDatabase, HirDatabase},
+ db::{ExpandDatabase, HirDatabase},
known, AssocItem, HirDisplay, InFile, Type,
};
use ide_db::{
assists::Assist, famous_defs::FamousDefs, imports::import_assets::item_for_path_search,
- source_change::SourceChange, use_trivial_contructor::use_trivial_constructor, FxHashMap,
+ source_change::SourceChange, use_trivial_constructor::use_trivial_constructor, FxHashMap,
};
use stdx::format_to;
use syntax::{
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
index c24430ce6..ac4463331 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
@@ -1,5 +1,3 @@
-use hir::InFile;
-
use crate::{Diagnostic, DiagnosticsContext};
// Diagnostic: missing-match-arm
@@ -12,7 +10,7 @@ pub(crate) fn missing_match_arms(
Diagnostic::new(
"missing-match-arm",
format!("missing match arm: {}", d.uncovered_patterns),
- ctx.sema.diagnostics_display_range(InFile::new(d.file, d.match_expr.clone().into())).range,
+ ctx.sema.diagnostics_display_range(d.scrutinee_expr.clone().map(Into::into)).range,
)
}
@@ -1038,7 +1036,6 @@ fn main() {
#[test]
fn reference_patterns_in_fields() {
cov_mark::check_count!(validate_match_bailed_out, 2);
-
check_diagnostics(
r#"
fn main() {
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
index ea1ea5a21..eb32db250 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
@@ -1,4 +1,10 @@
-use crate::{Diagnostic, DiagnosticsContext};
+use hir::db::ExpandDatabase;
+use ide_db::{assists::Assist, source_change::SourceChange};
+use syntax::{ast, SyntaxNode};
+use syntax::{match_ast, AstNode};
+use text_edit::TextEdit;
+
+use crate::{fix, Diagnostic, DiagnosticsContext};
// Diagnostic: missing-unsafe
//
@@ -9,11 +15,83 @@ pub(crate) fn missing_unsafe(ctx: &DiagnosticsContext<'_>, d: &hir::MissingUnsaf
"this operation is unsafe and requires an unsafe function or block",
ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range,
)
+ .with_fixes(fixes(ctx, d))
+}
+
+fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingUnsafe) -> Option<Vec<Assist>> {
+ // The fixit will not work correctly for macro expansions, so we don't offer it in that case.
+ if d.expr.file_id.is_macro() {
+ return None;
+ }
+
+ let root = ctx.sema.db.parse_or_expand(d.expr.file_id)?;
+ let expr = d.expr.value.to_node(&root);
+
+ let node_to_add_unsafe_block = pick_best_node_to_add_unsafe_block(&expr)?;
+
+ let replacement = format!("unsafe {{ {} }}", node_to_add_unsafe_block.text());
+ let edit = TextEdit::replace(node_to_add_unsafe_block.text_range(), replacement);
+ let source_change =
+ SourceChange::from_text_edit(d.expr.file_id.original_file(ctx.sema.db), edit);
+ Some(vec![fix("add_unsafe", "Add unsafe block", source_change, expr.syntax().text_range())])
+}
+
+// Pick the first ancestor expression of the unsafe `expr` that is not a
+// receiver of a method call, a field access, the left-hand side of an
+// assignment, or a reference. As all of those cases would incur a forced move
+// if wrapped which might not be wanted. That is:
+// - `unsafe_expr.foo` -> `unsafe { unsafe_expr.foo }`
+// - `unsafe_expr.foo.bar` -> `unsafe { unsafe_expr.foo.bar }`
+// - `unsafe_expr.foo()` -> `unsafe { unsafe_expr.foo() }`
+// - `unsafe_expr.foo.bar()` -> `unsafe { unsafe_expr.foo.bar() }`
+// - `unsafe_expr += 1` -> `unsafe { unsafe_expr += 1 }`
+// - `&unsafe_expr` -> `unsafe { &unsafe_expr }`
+// - `&&unsafe_expr` -> `unsafe { &&unsafe_expr }`
+fn pick_best_node_to_add_unsafe_block(unsafe_expr: &ast::Expr) -> Option<SyntaxNode> {
+ // The `unsafe_expr` might be:
+ // - `ast::CallExpr`: call an unsafe function
+ // - `ast::MethodCallExpr`: call an unsafe method
+ // - `ast::PrefixExpr`: dereference a raw pointer
+ // - `ast::PathExpr`: access a static mut variable
+ for (node, parent) in
+ unsafe_expr.syntax().ancestors().zip(unsafe_expr.syntax().ancestors().skip(1))
+ {
+ match_ast! {
+ match parent {
+ // If the `parent` is a `MethodCallExpr`, that means the `node`
+ // is the receiver of the method call, because only the receiver
+ // can be a direct child of a method call. The method name
+ // itself is not an expression but a `NameRef`, and an argument
+ // is a direct child of an `ArgList`.
+ ast::MethodCallExpr(_) => continue,
+ ast::FieldExpr(_) => continue,
+ ast::RefExpr(_) => continue,
+ ast::BinExpr(it) => {
+ // Check if the `node` is the left-hand side of an
+ // assignment, if so, we don't want to wrap it in an unsafe
+ // block, e.g. `unsafe_expr += 1`
+ let is_left_hand_side_of_assignment = {
+ if let Some(ast::BinaryOp::Assignment { .. }) = it.op_kind() {
+ it.lhs().map(|lhs| lhs.syntax().text_range().contains_range(node.text_range())).unwrap_or(false)
+ } else {
+ false
+ }
+ };
+ if !is_left_hand_side_of_assignment {
+ return Some(node);
+ }
+ },
+ _ => { return Some(node); }
+
+ }
+ }
+ }
+ None
}
#[cfg(test)]
mod tests {
- use crate::tests::check_diagnostics;
+ use crate::tests::{check_diagnostics, check_fix, check_no_fix};
#[test]
fn missing_unsafe_diagnostic_with_raw_ptr() {
@@ -23,7 +101,7 @@ fn main() {
let x = &5 as *const usize;
unsafe { let y = *x; }
let z = *x;
-} //^^ error: this operation is unsafe and requires an unsafe function or block
+} //^^💡 error: this operation is unsafe and requires an unsafe function or block
"#,
)
}
@@ -48,9 +126,9 @@ unsafe fn unsafe_fn() {
fn main() {
unsafe_fn();
- //^^^^^^^^^^^ error: this operation is unsafe and requires an unsafe function or block
+ //^^^^^^^^^^^💡 error: this operation is unsafe and requires an unsafe function or block
HasUnsafe.unsafe_fn();
- //^^^^^^^^^^^^^^^^^^^^^ error: this operation is unsafe and requires an unsafe function or block
+ //^^^^^^^^^^^^^^^^^^^^^💡 error: this operation is unsafe and requires an unsafe function or block
unsafe {
unsafe_fn();
HasUnsafe.unsafe_fn();
@@ -72,7 +150,7 @@ static mut STATIC_MUT: Ty = Ty { a: 0 };
fn main() {
let x = STATIC_MUT.a;
- //^^^^^^^^^^ error: this operation is unsafe and requires an unsafe function or block
+ //^^^^^^^^^^💡 error: this operation is unsafe and requires an unsafe function or block
unsafe {
let x = STATIC_MUT.a;
}
@@ -94,9 +172,298 @@ extern "rust-intrinsic" {
fn main() {
let _ = bitreverse(12);
let _ = floorf32(12.0);
- //^^^^^^^^^^^^^^ error: this operation is unsafe and requires an unsafe function or block
+ //^^^^^^^^^^^^^^💡 error: this operation is unsafe and requires an unsafe function or block
}
"#,
);
}
+
+ #[test]
+ fn add_unsafe_block_when_dereferencing_a_raw_pointer() {
+ check_fix(
+ r#"
+fn main() {
+ let x = &5 as *const usize;
+ let z = *x$0;
+}
+"#,
+ r#"
+fn main() {
+ let x = &5 as *const usize;
+ let z = unsafe { *x };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_unsafe_block_when_calling_unsafe_function() {
+ check_fix(
+ r#"
+unsafe fn func() {
+ let x = &5 as *const usize;
+ let z = *x;
+}
+fn main() {
+ func$0();
+}
+"#,
+ r#"
+unsafe fn func() {
+ let x = &5 as *const usize;
+ let z = *x;
+}
+fn main() {
+ unsafe { func() };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_unsafe_block_when_calling_unsafe_method() {
+ check_fix(
+ r#"
+struct S(usize);
+impl S {
+ unsafe fn func(&self) {
+ let x = &self.0 as *const usize;
+ let z = *x;
+ }
+}
+fn main() {
+ let s = S(5);
+ s.func$0();
+}
+"#,
+ r#"
+struct S(usize);
+impl S {
+ unsafe fn func(&self) {
+ let x = &self.0 as *const usize;
+ let z = *x;
+ }
+}
+fn main() {
+ let s = S(5);
+ unsafe { s.func() };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_unsafe_block_when_accessing_mutable_static() {
+ check_fix(
+ r#"
+struct Ty {
+ a: u8,
+}
+
+static mut STATIC_MUT: Ty = Ty { a: 0 };
+
+fn main() {
+ let x = STATIC_MUT$0.a;
+}
+"#,
+ r#"
+struct Ty {
+ a: u8,
+}
+
+static mut STATIC_MUT: Ty = Ty { a: 0 };
+
+fn main() {
+ let x = unsafe { STATIC_MUT.a };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_unsafe_block_when_calling_unsafe_intrinsic() {
+ check_fix(
+ r#"
+extern "rust-intrinsic" {
+ pub fn floorf32(x: f32) -> f32;
+}
+
+fn main() {
+ let _ = floorf32$0(12.0);
+}
+"#,
+ r#"
+extern "rust-intrinsic" {
+ pub fn floorf32(x: f32) -> f32;
+}
+
+fn main() {
+ let _ = unsafe { floorf32(12.0) };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn unsafe_expr_as_a_receiver_of_a_method_call() {
+ check_fix(
+ r#"
+unsafe fn foo() -> String {
+ "string".to_string()
+}
+
+fn main() {
+ foo$0().len();
+}
+"#,
+ r#"
+unsafe fn foo() -> String {
+ "string".to_string()
+}
+
+fn main() {
+ unsafe { foo().len() };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn unsafe_expr_as_an_argument_of_a_method_call() {
+ check_fix(
+ r#"
+static mut STATIC_MUT: u8 = 0;
+
+fn main() {
+ let mut v = vec![];
+ v.push(STATIC_MUT$0);
+}
+"#,
+ r#"
+static mut STATIC_MUT: u8 = 0;
+
+fn main() {
+ let mut v = vec![];
+ v.push(unsafe { STATIC_MUT });
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn unsafe_expr_as_left_hand_side_of_assignment() {
+ check_fix(
+ r#"
+static mut STATIC_MUT: u8 = 0;
+
+fn main() {
+ STATIC_MUT$0 = 1;
+}
+"#,
+ r#"
+static mut STATIC_MUT: u8 = 0;
+
+fn main() {
+ unsafe { STATIC_MUT = 1 };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn unsafe_expr_as_right_hand_side_of_assignment() {
+ check_fix(
+ r#"
+static mut STATIC_MUT: u8 = 0;
+
+fn main() {
+ let x;
+ x = STATIC_MUT$0;
+}
+"#,
+ r#"
+static mut STATIC_MUT: u8 = 0;
+
+fn main() {
+ let x;
+ x = unsafe { STATIC_MUT };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn unsafe_expr_in_binary_plus() {
+ check_fix(
+ r#"
+static mut STATIC_MUT: u8 = 0;
+
+fn main() {
+ let x = STATIC_MUT$0 + 1;
+}
+"#,
+ r#"
+static mut STATIC_MUT: u8 = 0;
+
+fn main() {
+ let x = unsafe { STATIC_MUT } + 1;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn ref_to_unsafe_expr() {
+ check_fix(
+ r#"
+static mut STATIC_MUT: u8 = 0;
+
+fn main() {
+ let x = &STATIC_MUT$0;
+}
+"#,
+ r#"
+static mut STATIC_MUT: u8 = 0;
+
+fn main() {
+ let x = unsafe { &STATIC_MUT };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn ref_ref_to_unsafe_expr() {
+ check_fix(
+ r#"
+static mut STATIC_MUT: u8 = 0;
+
+fn main() {
+ let x = &&STATIC_MUT$0;
+}
+"#,
+ r#"
+static mut STATIC_MUT: u8 = 0;
+
+fn main() {
+ let x = unsafe { &&STATIC_MUT };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn unsafe_expr_in_macro_call() {
+ check_no_fix(
+ r#"
+unsafe fn foo() -> u8 {
+ 0
+}
+
+fn main() {
+ let x = format!("foo: {}", foo$0());
+}
+ "#,
+ )
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs
new file mode 100644
index 000000000..96470265d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs
@@ -0,0 +1,649 @@
+use ide_db::source_change::SourceChange;
+use syntax::{AstNode, SyntaxKind, SyntaxNode, SyntaxToken, T};
+use text_edit::TextEdit;
+
+use crate::{fix, Diagnostic, DiagnosticsContext, Severity};
+
+// Diagnostic: need-mut
+//
+// This diagnostic is triggered on mutating an immutable variable.
+pub(crate) fn need_mut(ctx: &DiagnosticsContext<'_>, d: &hir::NeedMut) -> Diagnostic {
+ let fixes = (|| {
+ if d.local.is_ref(ctx.sema.db) {
+ // There is no simple way to add `mut` to `ref x` and `ref mut x`
+ return None;
+ }
+ let file_id = d.span.file_id.file_id()?;
+ let mut edit_builder = TextEdit::builder();
+ let use_range = d.span.value.text_range();
+ for source in d.local.sources(ctx.sema.db) {
+ let Some(ast) = source.name() else { continue };
+ edit_builder.insert(ast.syntax().text_range().start(), "mut ".to_string());
+ }
+ let edit = edit_builder.finish();
+ Some(vec![fix(
+ "add_mut",
+ "Change it to be mutable",
+ SourceChange::from_text_edit(file_id, edit),
+ use_range,
+ )])
+ })();
+ Diagnostic::new(
+ "need-mut",
+ format!("cannot mutate immutable variable `{}`", d.local.name(ctx.sema.db)),
+ ctx.sema.diagnostics_display_range(d.span.clone()).range,
+ )
+ .with_fixes(fixes)
+}
+
+// Diagnostic: unused-mut
+//
+// This diagnostic is triggered when a mutable variable isn't actually mutated.
+pub(crate) fn unused_mut(ctx: &DiagnosticsContext<'_>, d: &hir::UnusedMut) -> Diagnostic {
+ let ast = d.local.primary_source(ctx.sema.db).syntax_ptr();
+ let fixes = (|| {
+ let file_id = ast.file_id.file_id()?;
+ let mut edit_builder = TextEdit::builder();
+ let use_range = ast.value.text_range();
+ for source in d.local.sources(ctx.sema.db) {
+ let ast = source.syntax();
+ let Some(mut_token) = token(ast, T![mut]) else { continue };
+ edit_builder.delete(mut_token.text_range());
+ if let Some(token) = mut_token.next_token() {
+ if token.kind() == SyntaxKind::WHITESPACE {
+ edit_builder.delete(token.text_range());
+ }
+ }
+ }
+ let edit = edit_builder.finish();
+ Some(vec![fix(
+ "remove_mut",
+ "Remove unnecessary `mut`",
+ SourceChange::from_text_edit(file_id, edit),
+ use_range,
+ )])
+ })();
+ let ast = d.local.primary_source(ctx.sema.db).syntax_ptr();
+ Diagnostic::new(
+ "unused-mut",
+ "variable does not need to be mutable",
+ ctx.sema.diagnostics_display_range(ast).range,
+ )
+ .severity(Severity::WeakWarning)
+ .experimental() // Not supporting `#[allow(unused_mut)]` leads to false positive.
+ .with_fixes(fixes)
+}
+
+pub(super) fn token(parent: &SyntaxNode, kind: SyntaxKind) -> Option<SyntaxToken> {
+ parent.children_with_tokens().filter_map(|it| it.into_token()).find(|it| it.kind() == kind)
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_diagnostics, check_fix};
+
+ #[test]
+ fn unused_mut_simple() {
+ check_diagnostics(
+ r#"
+fn f(_: i32) {}
+fn main() {
+ let mut x = 2;
+ //^^^^^ 💡 weak: variable does not need to be mutable
+ f(x);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_false_positive_simple() {
+ check_diagnostics(
+ r#"
+fn f(_: i32) {}
+fn main() {
+ let x = 2;
+ f(x);
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+fn f(_: i32) {}
+fn main() {
+ let mut x = 2;
+ x = 5;
+ f(x);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn multiple_errors_for_single_variable() {
+ check_diagnostics(
+ r#"
+fn f(_: i32) {}
+fn main() {
+ let x = 2;
+ x = 10;
+ //^^^^^^ 💡 error: cannot mutate immutable variable `x`
+ x = 5;
+ //^^^^^ 💡 error: cannot mutate immutable variable `x`
+ &mut x;
+ //^^^^^^ 💡 error: cannot mutate immutable variable `x`
+ f(x);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unused_mut_fix() {
+ check_fix(
+ r#"
+fn f(_: i32) {}
+fn main() {
+ let mu$0t x = 2;
+ f(x);
+}
+"#,
+ r#"
+fn f(_: i32) {}
+fn main() {
+ let x = 2;
+ f(x);
+}
+"#,
+ );
+ check_fix(
+ r#"
+fn f(_: i32) {}
+fn main() {
+ let ((mu$0t x, _) | (_, mut x)) = (2, 3);
+ f(x);
+}
+"#,
+ r#"
+fn f(_: i32) {}
+fn main() {
+ let ((x, _) | (_, x)) = (2, 3);
+ f(x);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn need_mut_fix() {
+ check_fix(
+ r#"
+fn f(_: i32) {}
+fn main() {
+ let x = 2;
+ x$0 = 5;
+ f(x);
+}
+"#,
+ r#"
+fn f(_: i32) {}
+fn main() {
+ let mut x = 2;
+ x = 5;
+ f(x);
+}
+"#,
+ );
+ check_fix(
+ r#"
+fn f(_: i32) {}
+fn main() {
+ let ((x, _) | (_, x)) = (2, 3);
+ x =$0 4;
+ f(x);
+}
+"#,
+ r#"
+fn f(_: i32) {}
+fn main() {
+ let ((mut x, _) | (_, mut x)) = (2, 3);
+ x = 4;
+ f(x);
+}
+"#,
+ );
+
+ check_fix(
+ r#"
+struct Foo(i32);
+
+impl Foo {
+ fn foo(self) {
+ self = Fo$0o(5);
+ }
+}
+"#,
+ r#"
+struct Foo(i32);
+
+impl Foo {
+ fn foo(mut self) {
+ self = Foo(5);
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn need_mut_fix_not_applicable_on_ref() {
+ check_diagnostics(
+ r#"
+fn main() {
+ let ref x = 2;
+ x = &5;
+ //^^^^^^ error: cannot mutate immutable variable `x`
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+fn main() {
+ let ref mut x = 2;
+ x = &mut 5;
+ //^^^^^^^^^^ error: cannot mutate immutable variable `x`
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn field_mutate() {
+ check_diagnostics(
+ r#"
+fn f(_: i32) {}
+fn main() {
+ let mut x = (2, 7);
+ //^^^^^ 💡 weak: variable does not need to be mutable
+ f(x.1);
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+fn f(_: i32) {}
+fn main() {
+ let mut x = (2, 7);
+ x.0 = 5;
+ f(x.1);
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+fn f(_: i32) {}
+fn main() {
+ let x = (2, 7);
+ x.0 = 5;
+ //^^^^^^^ 💡 error: cannot mutate immutable variable `x`
+ f(x.1);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mutable_reference() {
+ check_diagnostics(
+ r#"
+fn main() {
+ let mut x = &mut 2;
+ //^^^^^ 💡 weak: variable does not need to be mutable
+ *x = 5;
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+fn main() {
+ let x = 2;
+ &mut x;
+ //^^^^^^ 💡 error: cannot mutate immutable variable `x`
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+fn main() {
+ let x_own = 2;
+ let ref mut x_ref = x_own;
+ //^^^^^^^^^^^^^ 💡 error: cannot mutate immutable variable `x_own`
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+struct Foo;
+impl Foo {
+ fn method(&mut self, x: i32) {}
+}
+fn main() {
+ let x = Foo;
+ x.method(2);
+ //^ 💡 error: cannot mutate immutable variable `x`
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn regression_14310() {
+ check_diagnostics(
+ r#"
+ fn clone(mut i: &!) -> ! {
+ //^^^^^ 💡 weak: variable does not need to be mutable
+ *i
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn match_bindings() {
+ check_diagnostics(
+ r#"
+fn main() {
+ match (2, 3) {
+ (x, mut y) => {
+ //^^^^^ 💡 weak: variable does not need to be mutable
+ x = 7;
+ //^^^^^ 💡 error: cannot mutate immutable variable `x`
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mutation_in_dead_code() {
+ // This one is interesting. Dead code is not represented at all in the MIR, so
+ // there would be no mutablility error for locals in dead code. Rustc tries to
+ // not emit `unused_mut` in this case, but since it works without `mut`, and
+ // special casing it is not trivial, we emit it.
+ check_diagnostics(
+ r#"
+fn main() {
+ return;
+ let mut x = 2;
+ //^^^^^ 💡 weak: variable does not need to be mutable
+ &mut x;
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+fn main() {
+ loop {}
+ let mut x = 2;
+ //^^^^^ 💡 weak: variable does not need to be mutable
+ &mut x;
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+enum X {}
+fn g() -> X {
+ loop {}
+}
+fn f() -> ! {
+ loop {}
+}
+fn main(b: bool) {
+ if b {
+ f();
+ } else {
+ g();
+ }
+ let mut x = 2;
+ //^^^^^ 💡 weak: variable does not need to be mutable
+ &mut x;
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+fn main(b: bool) {
+ if b {
+ loop {}
+ } else {
+ return;
+ }
+ let mut x = 2;
+ //^^^^^ 💡 weak: variable does not need to be mutable
+ &mut x;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn initialization_is_not_mutation() {
+ check_diagnostics(
+ r#"
+fn f(_: i32) {}
+fn main() {
+ let mut x;
+ //^^^^^ 💡 weak: variable does not need to be mutable
+ x = 5;
+ f(x);
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+fn f(_: i32) {}
+fn main(b: bool) {
+ let mut x;
+ //^^^^^ 💡 weak: variable does not need to be mutable
+ if b {
+ x = 1;
+ } else {
+ x = 3;
+ }
+ f(x);
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+fn f(_: i32) {}
+fn main(b: bool) {
+ let x;
+ if b {
+ x = 1;
+ }
+ x = 3;
+ //^^^^^ 💡 error: cannot mutate immutable variable `x`
+ f(x);
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+fn f(_: i32) {}
+fn main() {
+ let x;
+ loop {
+ x = 1;
+ //^^^^^ 💡 error: cannot mutate immutable variable `x`
+ f(x);
+ }
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+fn f(_: i32) {}
+fn main() {
+ loop {
+ let mut x = 1;
+ //^^^^^ 💡 weak: variable does not need to be mutable
+ f(x);
+ if let mut y = 2 {
+ //^^^^^ 💡 weak: variable does not need to be mutable
+ f(y);
+ }
+ match 3 {
+ mut z => f(z),
+ //^^^^^ 💡 weak: variable does not need to be mutable
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn initialization_is_not_mutation_in_loop() {
+ check_diagnostics(
+ r#"
+fn main() {
+ let a;
+ loop {
+ let c @ (
+ mut b,
+ //^^^^^ 💡 weak: variable does not need to be mutable
+ mut d
+ //^^^^^ 💡 weak: variable does not need to be mutable
+ );
+ a = 1;
+ //^^^^^ 💡 error: cannot mutate immutable variable `a`
+ b = 1;
+ c = (2, 3);
+ d = 3;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn function_arguments_are_initialized() {
+ check_diagnostics(
+ r#"
+fn f(mut x: i32) {
+ //^^^^^ 💡 weak: variable does not need to be mutable
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+fn f(x: i32) {
+ x = 5;
+ //^^^^^ 💡 error: cannot mutate immutable variable `x`
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn for_loop() {
+ check_diagnostics(
+ r#"
+//- minicore: iterators
+fn f(x: [(i32, u8); 10]) {
+ for (a, mut b) in x {
+ //^^^^^ 💡 weak: variable does not need to be mutable
+ a = 2;
+ //^^^^^ 💡 error: cannot mutate immutable variable `a`
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn overloaded_deref() {
+ // FIXME: check for false negative
+ check_diagnostics(
+ r#"
+//- minicore: deref_mut
+use core::ops::{Deref, DerefMut};
+
+struct Foo;
+impl Deref for Foo {
+ type Target = i32;
+ fn deref(&self) -> &i32 {
+ &5
+ }
+}
+impl DerefMut for Foo {
+ fn deref_mut(&mut self) -> &mut i32 {
+ &mut 5
+ }
+}
+fn f() {
+ let x = Foo;
+ let y = &*x;
+ let x = Foo;
+ let mut x = Foo;
+ let y: &mut i32 = &mut x;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn or_pattern() {
+ check_diagnostics(
+ r#"
+//- minicore: option
+fn f(_: i32) {}
+fn main() {
+ let ((Some(mut x), None) | (_, Some(mut x))) = (None, Some(7));
+ //^^^^^ 💡 weak: variable does not need to be mutable
+ f(x);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn or_pattern_no_terminator() {
+ check_diagnostics(
+ r#"
+enum Foo {
+ A, B, C, D
+}
+
+use Foo::*;
+
+fn f(inp: (Foo, Foo, Foo, Foo)) {
+ let ((A, B, _, x) | (B, C | D, x, _)) = inp else {
+ return;
+ };
+ x = B;
+ //^^^^^ 💡 error: cannot mutate immutable variable `x`
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn respect_allow_unused_mut() {
+ // FIXME: respect
+ check_diagnostics(
+ r#"
+fn f(_: i32) {}
+fn main() {
+ #[allow(unused_mut)]
+ let mut x = 2;
+ //^^^^^ 💡 weak: variable does not need to be mutable
+ f(x);
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
index 8da04e628..24c521ed1 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
@@ -1,4 +1,4 @@
-use hir::{db::AstDatabase, HasSource, HirDisplay, Semantics};
+use hir::{db::ExpandDatabase, HasSource, HirDisplay, Semantics};
use ide_db::{base_db::FileId, source_change::SourceChange, RootDatabase};
use syntax::{
ast::{self, edit::IndentLevel, make},
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs
index 0b3121c76..67da5c7f2 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs
@@ -118,4 +118,42 @@ fn main(s: module::Struct) {
"#,
);
}
+
+ #[test]
+ fn can_see_through_top_level_anonymous_const() {
+ // regression test for #14046.
+ check_diagnostics(
+ r#"
+struct S;
+mod m {
+ const _: () = {
+ impl crate::S {
+ pub(crate) fn method(self) {}
+ pub(crate) const A: usize = 42;
+ }
+ };
+ mod inner {
+ const _: () = {
+ impl crate::S {
+ pub(crate) fn method2(self) {}
+ pub(crate) const B: usize = 42;
+ pub(super) fn private(self) {}
+ pub(super) const PRIVATE: usize = 42;
+ }
+ };
+ }
+}
+fn main() {
+ S.method();
+ S::A;
+ S.method2();
+ S::B;
+ S.private();
+ //^^^^^^^^^^^ error: function `private` is private
+ S::PRIVATE;
+ //^^^^^^^^^^ error: const `PRIVATE` is private
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs
index e630ae368..be83ad6aa 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs
@@ -65,4 +65,24 @@ fn main(s: module::Struct) {
"#,
);
}
+
+ #[test]
+ fn block_module_madness() {
+ check_diagnostics(
+ r#"
+fn main() {
+ let strukt = {
+ use crate as ForceParentBlockDefMap;
+ {
+ pub struct Struct {
+ field: (),
+ }
+ Struct { field: () }
+ }
+ };
+ strukt.field;
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
index 9826e1c70..9b1c65983 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
@@ -1,4 +1,4 @@
-use hir::{db::AstDatabase, InFile};
+use hir::{db::ExpandDatabase, InFile};
use ide_db::source_change::SourceChange;
use syntax::{
ast::{self, HasArgList},
@@ -55,7 +55,18 @@ fn fixes(
#[cfg(test)]
mod tests {
- use crate::tests::{check_diagnostics, check_fix};
+ use crate::{
+ tests::{check_diagnostics_with_config, check_fix},
+ DiagnosticsConfig,
+ };
+
+ #[track_caller]
+ pub(crate) fn check_diagnostics(ra_fixture: &str) {
+ let mut config = DiagnosticsConfig::test_sample();
+ config.disabled.insert("inactive-code".to_string());
+ config.disabled.insert("unresolved-method".to_string());
+ check_diagnostics_with_config(config, ra_fixture)
+ }
#[test]
fn replace_filter_map_next_with_find_map2() {
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
index 2adae165e..4abc25a28 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
@@ -1,8 +1,9 @@
-use hir::{db::AstDatabase, HirDisplay, Type};
+use either::Either;
+use hir::{db::ExpandDatabase, HirDisplay, InFile, Type};
use ide_db::{famous_defs::FamousDefs, source_change::SourceChange};
use syntax::{
ast::{self, BlockExpr, ExprStmt},
- AstNode,
+ AstNode, AstPtr,
};
use text_edit::TextEdit;
@@ -10,19 +11,23 @@ use crate::{adjusted_display_range, fix, Assist, Diagnostic, DiagnosticsContext}
// Diagnostic: type-mismatch
//
-// This diagnostic is triggered when the type of an expression does not match
+// This diagnostic is triggered when the type of an expression or pattern does not match
// the expected type.
pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Diagnostic {
- let display_range = adjusted_display_range::<ast::BlockExpr>(
- ctx,
- d.expr.clone().map(|it| it.into()),
- &|block| {
- let r_curly_range = block.stmt_list()?.r_curly_token()?.text_range();
- cov_mark::hit!(type_mismatch_on_block);
- Some(r_curly_range)
- },
- );
-
+ let display_range = match &d.expr_or_pat {
+ Either::Left(expr) => adjusted_display_range::<ast::BlockExpr>(
+ ctx,
+ expr.clone().map(|it| it.into()),
+ &|block| {
+ let r_curly_range = block.stmt_list()?.r_curly_token()?.text_range();
+ cov_mark::hit!(type_mismatch_on_block);
+ Some(r_curly_range)
+ },
+ ),
+ Either::Right(pat) => {
+ ctx.sema.diagnostics_display_range(pat.clone().map(|it| it.into())).range
+ }
+ };
let mut diag = Diagnostic::new(
"type-mismatch",
format!(
@@ -42,10 +47,15 @@ pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch)
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Option<Vec<Assist>> {
let mut fixes = Vec::new();
- add_reference(ctx, d, &mut fixes);
- add_missing_ok_or_some(ctx, d, &mut fixes);
- remove_semicolon(ctx, d, &mut fixes);
- str_ref_to_owned(ctx, d, &mut fixes);
+ match &d.expr_or_pat {
+ Either::Left(expr_ptr) => {
+ add_reference(ctx, d, expr_ptr, &mut fixes);
+ add_missing_ok_or_some(ctx, d, expr_ptr, &mut fixes);
+ remove_semicolon(ctx, d, expr_ptr, &mut fixes);
+ str_ref_to_owned(ctx, d, expr_ptr, &mut fixes);
+ }
+ Either::Right(_pat_ptr) => {}
+ }
if fixes.is_empty() {
None
@@ -57,9 +67,10 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Option<Vec<Assi
fn add_reference(
ctx: &DiagnosticsContext<'_>,
d: &hir::TypeMismatch,
+ expr_ptr: &InFile<AstPtr<ast::Expr>>,
acc: &mut Vec<Assist>,
) -> Option<()> {
- let range = ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range;
+ let range = ctx.sema.diagnostics_display_range(expr_ptr.clone().map(|it| it.into())).range;
let (_, mutability) = d.expected.as_reference()?;
let actual_with_ref = Type::reference(&d.actual, mutability);
@@ -71,7 +82,7 @@ fn add_reference(
let edit = TextEdit::insert(range.start(), ampersands);
let source_change =
- SourceChange::from_text_edit(d.expr.file_id.original_file(ctx.sema.db), edit);
+ SourceChange::from_text_edit(expr_ptr.file_id.original_file(ctx.sema.db), edit);
acc.push(fix("add_reference_here", "Add reference here", source_change, range));
Some(())
}
@@ -79,10 +90,11 @@ fn add_reference(
fn add_missing_ok_or_some(
ctx: &DiagnosticsContext<'_>,
d: &hir::TypeMismatch,
+ expr_ptr: &InFile<AstPtr<ast::Expr>>,
acc: &mut Vec<Assist>,
) -> Option<()> {
- let root = ctx.sema.db.parse_or_expand(d.expr.file_id)?;
- let expr = d.expr.value.to_node(&root);
+ let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id)?;
+ let expr = expr_ptr.value.to_node(&root);
let expr_range = expr.syntax().text_range();
let scope = ctx.sema.scope(expr.syntax())?;
@@ -109,7 +121,7 @@ fn add_missing_ok_or_some(
builder.insert(expr.syntax().text_range().start(), format!("{variant_name}("));
builder.insert(expr.syntax().text_range().end(), ")".to_string());
let source_change =
- SourceChange::from_text_edit(d.expr.file_id.original_file(ctx.sema.db), builder.finish());
+ SourceChange::from_text_edit(expr_ptr.file_id.original_file(ctx.sema.db), builder.finish());
let name = format!("Wrap in {variant_name}");
acc.push(fix("wrap_in_constructor", &name, source_change, expr_range));
Some(())
@@ -118,10 +130,11 @@ fn add_missing_ok_or_some(
fn remove_semicolon(
ctx: &DiagnosticsContext<'_>,
d: &hir::TypeMismatch,
+ expr_ptr: &InFile<AstPtr<ast::Expr>>,
acc: &mut Vec<Assist>,
) -> Option<()> {
- let root = ctx.sema.db.parse_or_expand(d.expr.file_id)?;
- let expr = d.expr.value.to_node(&root);
+ let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id)?;
+ let expr = expr_ptr.value.to_node(&root);
if !d.actual.is_unit() {
return None;
}
@@ -136,7 +149,7 @@ fn remove_semicolon(
let edit = TextEdit::delete(semicolon_range);
let source_change =
- SourceChange::from_text_edit(d.expr.file_id.original_file(ctx.sema.db), edit);
+ SourceChange::from_text_edit(expr_ptr.file_id.original_file(ctx.sema.db), edit);
acc.push(fix("remove_semicolon", "Remove this semicolon", source_change, semicolon_range));
Some(())
@@ -145,24 +158,26 @@ fn remove_semicolon(
fn str_ref_to_owned(
ctx: &DiagnosticsContext<'_>,
d: &hir::TypeMismatch,
+ expr_ptr: &InFile<AstPtr<ast::Expr>>,
acc: &mut Vec<Assist>,
) -> Option<()> {
let expected = d.expected.display(ctx.sema.db);
let actual = d.actual.display(ctx.sema.db);
+ // FIXME do this properly
if expected.to_string() != "String" || actual.to_string() != "&str" {
return None;
}
- let root = ctx.sema.db.parse_or_expand(d.expr.file_id)?;
- let expr = d.expr.value.to_node(&root);
+ let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id)?;
+ let expr = expr_ptr.value.to_node(&root);
let expr_range = expr.syntax().text_range();
let to_owned = format!(".to_owned()");
let edit = TextEdit::insert(expr.syntax().text_range().end(), to_owned);
let source_change =
- SourceChange::from_text_edit(d.expr.file_id.original_file(ctx.sema.db), edit);
+ SourceChange::from_text_edit(expr_ptr.file_id.original_file(ctx.sema.db), edit);
acc.push(fix("str_ref_to_owned", "Add .to_owned() here", source_change, expr_range));
Some(())
@@ -595,4 +610,19 @@ fn f() -> i32 {
"#,
);
}
+
+ #[test]
+ fn type_mismatch_pat_smoke_test() {
+ check_diagnostics(
+ r#"
+fn f() {
+ let &() = &mut ();
+ match &() {
+ &9 => ()
+ //^ error: expected (), found i32
+ }
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs
new file mode 100644
index 000000000..cefa74e52
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs
@@ -0,0 +1,148 @@
+use hir::{db::ExpandDatabase, HirDisplay, InFile};
+use ide_db::{
+ assists::{Assist, AssistId, AssistKind},
+ base_db::FileRange,
+ label::Label,
+ source_change::SourceChange,
+};
+use syntax::{ast, AstNode, AstPtr};
+use text_edit::TextEdit;
+
+use crate::{Diagnostic, DiagnosticsContext};
+
+// Diagnostic: unresolved-field
+//
+// This diagnostic is triggered if a field does not exist on a given type.
+pub(crate) fn unresolved_field(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::UnresolvedField,
+) -> Diagnostic {
+ let method_suffix = if d.method_with_same_name_exists {
+ ", but a method with a similar name exists"
+ } else {
+ ""
+ };
+ Diagnostic::new(
+ "unresolved-field",
+ format!(
+ "no field `{}` on type `{}`{method_suffix}",
+ d.name,
+ d.receiver.display(ctx.sema.db)
+ ),
+ ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range,
+ )
+ .with_fixes(fixes(ctx, d))
+ .experimental()
+}
+
+fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<Vec<Assist>> {
+ if d.method_with_same_name_exists {
+ method_fix(ctx, &d.expr)
+ } else {
+ // FIXME: add quickfix
+
+ None
+ }
+}
+
+// FIXME: We should fill out the call here, mvoe the cursor and trigger signature help
+fn method_fix(
+ ctx: &DiagnosticsContext<'_>,
+ expr_ptr: &InFile<AstPtr<ast::Expr>>,
+) -> Option<Vec<Assist>> {
+ let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id)?;
+ let expr = expr_ptr.value.to_node(&root);
+ let FileRange { range, file_id } = ctx.sema.original_range_opt(expr.syntax())?;
+ Some(vec![Assist {
+ id: AssistId("expected-field-found-method-call-fix", AssistKind::QuickFix),
+ label: Label::new("Use parentheses to call the method".to_string()),
+ group: None,
+ target: range,
+ source_change: Some(SourceChange::from_text_edit(
+ file_id,
+ TextEdit::insert(range.end(), "()".to_owned()),
+ )),
+ trigger_signature_help: false,
+ }])
+}
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn smoke_test() {
+ check_diagnostics(
+ r#"
+fn main() {
+ ().foo;
+ // ^^^^^^ error: no field `foo` on type `()`
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_clash() {
+ check_diagnostics(
+ r#"
+struct Foo;
+impl Foo {
+ fn bar(&self) {}
+}
+fn foo() {
+ Foo.bar;
+ // ^^^^^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_trait_() {
+ check_diagnostics(
+ r#"
+struct Foo;
+trait Bar {
+ fn bar(&self) {}
+}
+impl Bar for Foo {}
+fn foo() {
+ Foo.bar;
+ // ^^^^^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_trait_2() {
+ check_diagnostics(
+ r#"
+struct Foo;
+trait Bar {
+ fn bar(&self);
+}
+impl Bar for Foo {
+ fn bar(&self) {}
+}
+fn foo() {
+ Foo.bar;
+ // ^^^^^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_diagnostic_on_unknown() {
+ check_diagnostics(
+ r#"
+fn foo() {
+ x.foo;
+ (&x).foo;
+ (&((x,),),).foo;
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs
new file mode 100644
index 000000000..f3ec6efa7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs
@@ -0,0 +1,131 @@
+use hir::{db::ExpandDatabase, HirDisplay};
+use ide_db::{
+ assists::{Assist, AssistId, AssistKind},
+ base_db::FileRange,
+ label::Label,
+ source_change::SourceChange,
+};
+use syntax::{ast, AstNode, TextRange};
+use text_edit::TextEdit;
+
+use crate::{Diagnostic, DiagnosticsContext};
+
+// Diagnostic: unresolved-method
+//
+// This diagnostic is triggered if a method does not exist on a given type.
+pub(crate) fn unresolved_method(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::UnresolvedMethodCall,
+) -> Diagnostic {
+ let field_suffix = if d.field_with_same_name.is_some() {
+ ", but a field with a similar name exists"
+ } else {
+ ""
+ };
+ Diagnostic::new(
+ "unresolved-method",
+ format!(
+ "no method `{}` on type `{}`{field_suffix}",
+ d.name,
+ d.receiver.display(ctx.sema.db)
+ ),
+ ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range,
+ )
+ .with_fixes(fixes(ctx, d))
+ .experimental()
+}
+
+fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -> Option<Vec<Assist>> {
+ if let Some(ty) = &d.field_with_same_name {
+ field_fix(ctx, d, ty)
+ } else {
+ // FIXME: add quickfix
+ None
+ }
+}
+
+fn field_fix(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::UnresolvedMethodCall,
+ ty: &hir::Type,
+) -> Option<Vec<Assist>> {
+ if !ty.impls_fnonce(ctx.sema.db) {
+ return None;
+ }
+ let expr_ptr = &d.expr;
+ let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id)?;
+ let expr = expr_ptr.value.to_node(&root);
+ let (file_id, range) = match expr {
+ ast::Expr::MethodCallExpr(mcall) => {
+ let FileRange { range, file_id } =
+ ctx.sema.original_range_opt(mcall.receiver()?.syntax())?;
+ let FileRange { range: range2, file_id: file_id2 } =
+ ctx.sema.original_range_opt(mcall.name_ref()?.syntax())?;
+ if file_id != file_id2 {
+ return None;
+ }
+ (file_id, TextRange::new(range.start(), range2.end()))
+ }
+ _ => return None,
+ };
+ Some(vec![Assist {
+ id: AssistId("expected-method-found-field-fix", AssistKind::QuickFix),
+ label: Label::new("Use parentheses to call the value of the field".to_string()),
+ group: None,
+ target: range,
+ source_change: Some(SourceChange::from_iter([
+ (file_id, TextEdit::insert(range.start(), "(".to_owned())),
+ (file_id, TextEdit::insert(range.end(), ")".to_owned())),
+ ])),
+ trigger_signature_help: false,
+ }])
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_diagnostics, check_fix};
+
+ #[test]
+ fn smoke_test() {
+ check_diagnostics(
+ r#"
+fn main() {
+ ().foo();
+ // ^^^^^^^^ error: no method `foo` on type `()`
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn field() {
+ check_diagnostics(
+ r#"
+struct Foo { bar: i32 }
+fn foo() {
+ Foo { bar: i32 }.bar();
+ // ^^^^^^^^^^^^^^^^^^^^^^ error: no method `bar` on type `Foo`, but a field with a similar name exists
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn callable_field() {
+ check_fix(
+ r#"
+//- minicore: fn
+struct Foo { bar: fn() }
+fn foo() {
+ Foo { bar: foo }.b$0ar();
+}
+"#,
+ r#"
+struct Foo { bar: fn() }
+fn foo() {
+ (Foo { bar: foo }.bar)();
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs
index 91395f1d8..94614f11c 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs
@@ -1,4 +1,4 @@
-use hir::db::AstDatabase;
+use hir::db::ExpandDatabase;
use ide_db::{assists::Assist, base_db::AnchoredPathBuf, source_change::FileSystemEdit};
use itertools::Itertools;
use syntax::AstNode;
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
index 64ba08ac8..71f136b8c 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
@@ -27,7 +27,9 @@
mod handlers {
pub(crate) mod break_outside_of_loop;
+ pub(crate) mod expected_function;
pub(crate) mod inactive_code;
+ pub(crate) mod incoherent_impl;
pub(crate) mod incorrect_case;
pub(crate) mod invalid_derive_target;
pub(crate) mod macro_error;
@@ -36,6 +38,7 @@ mod handlers {
pub(crate) mod missing_fields;
pub(crate) mod missing_match_arms;
pub(crate) mod missing_unsafe;
+ pub(crate) mod mutability_errors;
pub(crate) mod no_such_field;
pub(crate) mod private_assoc_item;
pub(crate) mod private_field;
@@ -43,6 +46,8 @@ mod handlers {
pub(crate) mod type_mismatch;
pub(crate) mod unimplemented_builtin_macro;
pub(crate) mod unresolved_extern_crate;
+ pub(crate) mod unresolved_field;
+ pub(crate) mod unresolved_method;
pub(crate) mod unresolved_import;
pub(crate) mod unresolved_macro_call;
pub(crate) mod unresolved_module;
@@ -248,7 +253,9 @@ pub fn diagnostics(
#[rustfmt::skip]
let d = match diag {
AnyDiagnostic::BreakOutsideOfLoop(d) => handlers::break_outside_of_loop::break_outside_of_loop(&ctx, &d),
+ AnyDiagnostic::ExpectedFunction(d) => handlers::expected_function::expected_function(&ctx, &d),
AnyDiagnostic::IncorrectCase(d) => handlers::incorrect_case::incorrect_case(&ctx, &d),
+ AnyDiagnostic::IncoherentImpl(d) => handlers::incoherent_impl::incoherent_impl(&ctx, &d),
AnyDiagnostic::MacroError(d) => handlers::macro_error::macro_error(&ctx, &d),
AnyDiagnostic::MalformedDerive(d) => handlers::malformed_derive::malformed_derive(&ctx, &d),
AnyDiagnostic::MismatchedArgCount(d) => handlers::mismatched_arg_count::mismatched_arg_count(&ctx, &d),
@@ -267,7 +274,10 @@ pub fn diagnostics(
AnyDiagnostic::UnresolvedModule(d) => handlers::unresolved_module::unresolved_module(&ctx, &d),
AnyDiagnostic::UnresolvedProcMacro(d) => handlers::unresolved_proc_macro::unresolved_proc_macro(&ctx, &d, config.proc_macros_enabled, config.proc_attr_macros_enabled),
AnyDiagnostic::InvalidDeriveTarget(d) => handlers::invalid_derive_target::invalid_derive_target(&ctx, &d),
-
+ AnyDiagnostic::UnresolvedField(d) => handlers::unresolved_field::unresolved_field(&ctx, &d),
+ AnyDiagnostic::UnresolvedMethodCall(d) => handlers::unresolved_method::unresolved_method(&ctx, &d),
+ AnyDiagnostic::NeedMut(d) => handlers::mutability_errors::need_mut(&ctx, &d),
+ AnyDiagnostic::UnusedMut(d) => handlers::mutability_errors::unused_mut(&ctx, &d),
AnyDiagnostic::InactiveCode(d) => match handlers::inactive_code::inactive_code(&ctx, &d) {
Some(it) => it,
None => continue,
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs
index 57b5ab6ab..a8e883690 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs
@@ -561,7 +561,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
.sema
.resolve_method_call_as_callable(code)
.and_then(|callable| callable.receiver_param(self.sema.db))
- .map(|self_param| self_param.kind())
+ .map(|(self_param, _)| self_param.kind())
.unwrap_or(ast::SelfParamKind::Owned);
}
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
index b4a7f2b91..fae25f310 100644
--- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
@@ -107,7 +107,18 @@ pub(crate) fn remove_links(markdown: &str) -> String {
out
}
-/// Retrieve a link to documentation for the given symbol.
+// Feature: Open Docs
+//
+// Retrieve a link to documentation for the given symbol.
+//
+// The simplest way to use this feature is via the context menu. Right-click on
+// the selected item. The context menu opens. Select **Open Docs**.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **rust-analyzer: Open Docs**
+// |===
pub(crate) fn external_docs(
db: &RootDatabase,
position: &FilePosition,
@@ -181,6 +192,7 @@ pub(crate) fn resolve_doc_path_for_def(
Definition::Const(it) => it.resolve_doc_path(db, link, ns),
Definition::Static(it) => it.resolve_doc_path(db, link, ns),
Definition::Trait(it) => it.resolve_doc_path(db, link, ns),
+ Definition::TraitAlias(it) => it.resolve_doc_path(db, link, ns),
Definition::TypeAlias(it) => it.resolve_doc_path(db, link, ns),
Definition::Macro(it) => it.resolve_doc_path(db, link, ns),
Definition::Field(it) => it.resolve_doc_path(db, link, ns),
@@ -493,6 +505,7 @@ fn filename_and_frag_for_def(
None => String::from("index.html"),
},
Definition::Trait(t) => format!("trait.{}.html", t.name(db)),
+ Definition::TraitAlias(t) => format!("traitalias.{}.html", t.name(db)),
Definition::TypeAlias(t) => format!("type.{}.html", t.name(db)),
Definition::BuiltinType(t) => format!("primitive.{}.html", t.name()),
Definition::Function(f) => format!("fn.{}.html", f.name(db)),
diff --git a/src/tools/rust-analyzer/crates/ide/src/file_structure.rs b/src/tools/rust-analyzer/crates/ide/src/file_structure.rs
index b23763dce..a32ac3549 100644
--- a/src/tools/rust-analyzer/crates/ide/src/file_structure.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/file_structure.rs
@@ -149,6 +149,7 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
ast::Enum(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Enum)),
ast::Variant(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Variant)),
ast::Trait(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Trait)),
+ ast::TraitAlias(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::TraitAlias)),
ast::Module(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Module)),
ast::TypeAlias(it) => decl_with_type_ref(&it, it.ty(), StructureNodeKind::SymbolKind(SymbolKind::TypeAlias)),
ast::RecordField(it) => decl_with_type_ref(&it, it.ty(), StructureNodeKind::SymbolKind(SymbolKind::Field)),
@@ -262,6 +263,8 @@ enum E { X, Y(i32) }
type T = ();
static S: i32 = 92;
const C: i32 = 92;
+trait Tr {}
+trait Alias = Tr;
impl E {}
@@ -459,9 +462,31 @@ fn g() {}
},
StructureNode {
parent: None,
+ label: "Tr",
+ navigation_range: 239..241,
+ node_range: 233..244,
+ kind: SymbolKind(
+ Trait,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "Alias",
+ navigation_range: 251..256,
+ node_range: 245..262,
+ kind: SymbolKind(
+ TraitAlias,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
label: "impl E",
- navigation_range: 239..240,
- node_range: 234..243,
+ navigation_range: 269..270,
+ node_range: 264..273,
kind: SymbolKind(
Impl,
),
@@ -471,8 +496,8 @@ fn g() {}
StructureNode {
parent: None,
label: "impl fmt::Debug for E",
- navigation_range: 265..266,
- node_range: 245..269,
+ navigation_range: 295..296,
+ node_range: 275..299,
kind: SymbolKind(
Impl,
),
@@ -482,8 +507,8 @@ fn g() {}
StructureNode {
parent: None,
label: "mc",
- navigation_range: 284..286,
- node_range: 271..303,
+ navigation_range: 314..316,
+ node_range: 301..333,
kind: SymbolKind(
Macro,
),
@@ -493,8 +518,8 @@ fn g() {}
StructureNode {
parent: None,
label: "mcexp",
- navigation_range: 334..339,
- node_range: 305..356,
+ navigation_range: 364..369,
+ node_range: 335..386,
kind: SymbolKind(
Macro,
),
@@ -504,8 +529,8 @@ fn g() {}
StructureNode {
parent: None,
label: "mcexp",
- navigation_range: 387..392,
- node_range: 358..409,
+ navigation_range: 417..422,
+ node_range: 388..439,
kind: SymbolKind(
Macro,
),
@@ -515,8 +540,8 @@ fn g() {}
StructureNode {
parent: None,
label: "obsolete",
- navigation_range: 428..436,
- node_range: 411..441,
+ navigation_range: 458..466,
+ node_range: 441..471,
kind: SymbolKind(
Function,
),
@@ -528,8 +553,8 @@ fn g() {}
StructureNode {
parent: None,
label: "very_obsolete",
- navigation_range: 481..494,
- node_range: 443..499,
+ navigation_range: 511..524,
+ node_range: 473..529,
kind: SymbolKind(
Function,
),
@@ -541,8 +566,8 @@ fn g() {}
StructureNode {
parent: None,
label: "Some region name",
- navigation_range: 501..528,
- node_range: 501..528,
+ navigation_range: 531..558,
+ node_range: 531..558,
kind: Region,
detail: None,
deprecated: false,
@@ -550,8 +575,8 @@ fn g() {}
StructureNode {
parent: None,
label: "m",
- navigation_range: 568..569,
- node_range: 543..606,
+ navigation_range: 598..599,
+ node_range: 573..636,
kind: SymbolKind(
Module,
),
@@ -560,22 +585,22 @@ fn g() {}
},
StructureNode {
parent: Some(
- 20,
+ 22,
),
label: "dontpanic",
- navigation_range: 543..563,
- node_range: 543..563,
+ navigation_range: 573..593,
+ node_range: 573..593,
kind: Region,
detail: None,
deprecated: false,
},
StructureNode {
parent: Some(
- 20,
+ 22,
),
label: "f",
- navigation_range: 575..576,
- node_range: 572..581,
+ navigation_range: 605..606,
+ node_range: 602..611,
kind: SymbolKind(
Function,
),
@@ -586,11 +611,11 @@ fn g() {}
},
StructureNode {
parent: Some(
- 20,
+ 22,
),
label: "g",
- navigation_range: 598..599,
- node_range: 582..604,
+ navigation_range: 628..629,
+ node_range: 612..634,
kind: SymbolKind(
Function,
),
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
index 93019527f..cf0819a25 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
@@ -766,6 +766,13 @@ trait Foo$0 { }
check(
r#"
+trait Foo$0 = ;
+ //^^^
+"#,
+ );
+
+ check(
+ r#"
mod bar$0 { }
//^^^
"#,
@@ -1066,6 +1073,23 @@ fn f() -> impl Sub<Item$0 = u8> {}
}
#[test]
+ fn goto_def_for_module_declaration_in_path_if_types_and_values_same_name() {
+ check(
+ r#"
+mod bar {
+ pub struct Foo {}
+ //^^^
+ pub fn Foo() {}
+}
+
+fn baz() {
+ let _foo_enum: bar::Foo$0 = bar::Foo {};
+}
+ "#,
+ )
+ }
+
+ #[test]
fn unknown_assoc_ty() {
check_unresolved(
r#"
@@ -1406,7 +1430,6 @@ include!("included.rs$0");
);
}
- #[cfg(test)]
mod goto_impl_of_trait_fn {
use super::check;
#[test]
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
index 190ab80ba..a1a119629 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
@@ -297,6 +297,7 @@ impl Foo<str> {}
//- /lib.rs crate:main deps:core
fn foo(_: bool$0) {{}}
//- /libcore.rs crate:core
+#![rustc_coherence_is_core]
#[lang = "bool"]
impl bool {}
//^^^^
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs
index 55cdb3200..6d2d0bd63 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs
@@ -55,7 +55,7 @@ pub(crate) fn goto_type_definition(
ty
} else {
let record_field = ast::RecordPatField::for_field_name_ref(&it)?;
- sema.resolve_record_pat_field(&record_field)?.ty(db)
+ sema.resolve_record_pat_field(&record_field)?.1
}
},
_ => return None,
diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
index c889eb930..d88ffd25c 100644
--- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
@@ -14,7 +14,7 @@ use syntax::{
SyntaxNode, SyntaxToken, TextRange, T,
};
-use crate::{references, NavigationTarget, TryToNav};
+use crate::{navigation_target::ToNav, references, NavigationTarget, TryToNav};
#[derive(PartialEq, Eq, Hash)]
pub struct HighlightedRange {
@@ -98,32 +98,39 @@ fn highlight_references(
category: access,
});
let mut res = FxHashSet::default();
-
- let mut def_to_hl_range = |def| {
- let hl_range = match def {
- Definition::Module(module) => {
- Some(NavigationTarget::from_module_to_decl(sema.db, module))
- }
- def => def.try_to_nav(sema.db),
- }
- .filter(|decl| decl.file_id == file_id)
- .and_then(|decl| decl.focus_range)
- .map(|range| {
- let category =
- references::decl_mutability(&def, node, range).then_some(ReferenceCategory::Write);
- HighlightedRange { range, category }
- });
- if let Some(hl_range) = hl_range {
- res.insert(hl_range);
- }
- };
for &def in &defs {
match def {
- Definition::Local(local) => local
- .associated_locals(sema.db)
- .iter()
- .for_each(|&local| def_to_hl_range(Definition::Local(local))),
- def => def_to_hl_range(def),
+ Definition::Local(local) => {
+ let category = local.is_mut(sema.db).then_some(ReferenceCategory::Write);
+ local
+ .sources(sema.db)
+ .into_iter()
+ .map(|x| x.to_nav(sema.db))
+ .filter(|decl| decl.file_id == file_id)
+ .filter_map(|decl| decl.focus_range)
+ .map(|range| HighlightedRange { range, category })
+ .for_each(|x| {
+ res.insert(x);
+ });
+ }
+ def => {
+ let hl_range = match def {
+ Definition::Module(module) => {
+ Some(NavigationTarget::from_module_to_decl(sema.db, module))
+ }
+ def => def.try_to_nav(sema.db),
+ }
+ .filter(|decl| decl.file_id == file_id)
+ .and_then(|decl| decl.focus_range)
+ .map(|range| {
+ let category = references::decl_mutability(&def, node, range)
+ .then_some(ReferenceCategory::Write);
+ HighlightedRange { range, category }
+ });
+ if let Some(hl_range) = hl_range {
+ res.insert(hl_range);
+ }
+ }
}
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs
index 5f2c61f5b..64b2221bd 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs
@@ -30,6 +30,7 @@ pub struct HoverConfig {
pub documentation: bool,
pub keywords: bool,
pub format: HoverDocFormat,
+ pub interpret_tests: bool,
}
#[derive(Clone, Debug, PartialEq, Eq)]
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
index 22611cfb8..da725ce50 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
@@ -3,7 +3,8 @@ use std::fmt::Display;
use either::Either;
use hir::{
- Adt, AsAssocItem, AttributeTemplate, HasAttrs, HasSource, HirDisplay, Semantics, TypeInfo,
+ db::DefDatabase, Adt, AsAssocItem, AttributeTemplate, HasAttrs, HasSource, HirDisplay,
+ MirEvalError, Semantics, TypeInfo,
};
use ide_db::{
base_db::SourceDatabase,
@@ -402,7 +403,20 @@ pub(super) fn definition(
))
}),
Definition::Module(it) => label_and_docs(db, it),
- Definition::Function(it) => label_and_docs(db, it),
+ Definition::Function(it) => label_and_layout_info_and_docs(db, it, |_| {
+ if !config.interpret_tests {
+ return None;
+ }
+ match it.eval(db) {
+ Ok(()) => Some("pass".into()),
+ Err(MirEvalError::Panic) => Some("fail".into()),
+ Err(MirEvalError::MirLowerError(f, e)) => {
+ let name = &db.function_data(f).name;
+ Some(format!("error: fail to lower {name} due {e:?}"))
+ }
+ Err(e) => Some(format!("error: {e:?}")),
+ }
+ }),
Definition::Adt(it) => label_and_layout_info_and_docs(db, it, |&it| {
let layout = it.layout(db).ok()?;
Some(format!("size = {}, align = {}", layout.size.bytes(), layout.align.abi.bytes()))
@@ -410,7 +424,7 @@ pub(super) fn definition(
Definition::Variant(it) => label_value_and_docs(db, it, |&it| {
if !it.parent_enum(db).is_data_carrying(db) {
match it.eval(db) {
- Ok(x) => Some(format!("{x}")),
+ Ok(x) => Some(if x >= 10 { format!("{x} ({x:#X})") } else { format!("{x}") }),
Err(_) => it.value(db).map(|x| format!("{x:?}")),
}
} else {
@@ -418,9 +432,9 @@ pub(super) fn definition(
}
}),
Definition::Const(it) => label_value_and_docs(db, it, |it| {
- let body = it.eval(db);
+ let body = it.render_eval(db);
match body {
- Ok(x) => Some(format!("{x}")),
+ Ok(x) => Some(x),
Err(_) => {
let source = it.source(db)?;
let mut body = source.value.body()?.syntax().clone();
@@ -440,6 +454,7 @@ pub(super) fn definition(
Some(body.to_string())
}),
Definition::Trait(it) => label_and_docs(db, it),
+ Definition::TraitAlias(it) => label_and_docs(db, it),
Definition::TypeAlias(it) => label_and_docs(db, it),
Definition::BuiltinType(it) => {
return famous_defs
@@ -620,8 +635,8 @@ fn local(db: &RootDatabase, it: hir::Local) -> Option<Markup> {
let ty = it.ty(db);
let ty = ty.display_truncated(db, None);
let is_mut = if it.is_mut(db) { "mut " } else { "" };
- let desc = match it.source(db).value {
- Either::Left(ident) => {
+ let desc = match it.primary_source(db).into_ident_pat() {
+ Some(ident) => {
let name = it.name(db);
let let_kw = if ident
.syntax()
@@ -634,7 +649,7 @@ fn local(db: &RootDatabase, it: hir::Local) -> Option<Markup> {
};
format!("{let_kw}{is_mut}{name}: {ty}")
}
- Either::Right(_) => format!("{is_mut}self: {ty}"),
+ None => format!("{is_mut}self: {ty}"),
};
markup(None, desc, None)
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
index bd7ce2f1d..57bf0f9ad 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
@@ -4,16 +4,19 @@ use syntax::TextRange;
use crate::{fixture, HoverConfig, HoverDocFormat};
+const HOVER_BASE_CONFIG: HoverConfig = HoverConfig {
+ links_in_hover: false,
+ documentation: true,
+ format: HoverDocFormat::Markdown,
+ keywords: true,
+ interpret_tests: false,
+};
+
fn check_hover_no_result(ra_fixture: &str) {
let (analysis, position) = fixture::position(ra_fixture);
let hover = analysis
.hover(
- &HoverConfig {
- links_in_hover: true,
- documentation: true,
- keywords: true,
- format: HoverDocFormat::Markdown,
- },
+ &HoverConfig { links_in_hover: true, ..HOVER_BASE_CONFIG },
FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
)
.unwrap();
@@ -25,12 +28,7 @@ fn check(ra_fixture: &str, expect: Expect) {
let (analysis, position) = fixture::position(ra_fixture);
let hover = analysis
.hover(
- &HoverConfig {
- links_in_hover: true,
- documentation: true,
- keywords: true,
- format: HoverDocFormat::Markdown,
- },
+ &HoverConfig { links_in_hover: true, ..HOVER_BASE_CONFIG },
FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
)
.unwrap()
@@ -47,12 +45,7 @@ fn check_hover_no_links(ra_fixture: &str, expect: Expect) {
let (analysis, position) = fixture::position(ra_fixture);
let hover = analysis
.hover(
- &HoverConfig {
- links_in_hover: false,
- documentation: true,
- keywords: true,
- format: HoverDocFormat::Markdown,
- },
+ &HOVER_BASE_CONFIG,
FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
)
.unwrap()
@@ -71,9 +64,8 @@ fn check_hover_no_markdown(ra_fixture: &str, expect: Expect) {
.hover(
&HoverConfig {
links_in_hover: true,
- documentation: true,
- keywords: true,
format: HoverDocFormat::PlainText,
+ ..HOVER_BASE_CONFIG
},
FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
)
@@ -91,12 +83,7 @@ fn check_actions(ra_fixture: &str, expect: Expect) {
let (analysis, file_id, position) = fixture::range_or_position(ra_fixture);
let hover = analysis
.hover(
- &HoverConfig {
- links_in_hover: true,
- documentation: true,
- keywords: true,
- format: HoverDocFormat::Markdown,
- },
+ &HoverConfig { links_in_hover: true, ..HOVER_BASE_CONFIG },
FileRange { file_id, range: position.range_or_empty() },
)
.unwrap()
@@ -106,34 +93,13 @@ fn check_actions(ra_fixture: &str, expect: Expect) {
fn check_hover_range(ra_fixture: &str, expect: Expect) {
let (analysis, range) = fixture::range(ra_fixture);
- let hover = analysis
- .hover(
- &HoverConfig {
- links_in_hover: false,
- documentation: true,
- keywords: true,
- format: HoverDocFormat::Markdown,
- },
- range,
- )
- .unwrap()
- .unwrap();
+ let hover = analysis.hover(&HOVER_BASE_CONFIG, range).unwrap().unwrap();
expect.assert_eq(hover.info.markup.as_str())
}
fn check_hover_range_no_results(ra_fixture: &str) {
let (analysis, range) = fixture::range(ra_fixture);
- let hover = analysis
- .hover(
- &HoverConfig {
- links_in_hover: false,
- documentation: true,
- keywords: true,
- format: HoverDocFormat::Markdown,
- },
- range,
- )
- .unwrap();
+ let hover = analysis.hover(&HOVER_BASE_CONFIG, range).unwrap();
assert!(hover.is_none());
}
@@ -490,7 +456,6 @@ fn hover_field_offset() {
// Hovering over the field when instantiating
check(
r#"
-//- /main.rs target_data_layout:e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128
struct Foo { fiel$0d_a: u8, field_b: i32, field_c: i16 }
"#,
expect![[r#"
@@ -512,7 +477,6 @@ fn hover_shows_struct_field_info() {
// Hovering over the field when instantiating
check(
r#"
-//- /main.rs target_data_layout:e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128
struct Foo { field_a: u32 }
fn main() {
@@ -535,7 +499,6 @@ fn main() {
// Hovering over the field in the definition
check(
r#"
-//- /main.rs target_data_layout:e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128
struct Foo { field_a$0: u32 }
fn main() {
@@ -611,6 +574,27 @@ const foo$0: u32 = {
}
#[test]
+fn hover_eval_complex_constants() {
+ check(
+ r#"
+ struct X { f1: (), f2: i32 }
+ const foo$0: (i8, X, i64) = (1, X { f2: 5 - 1, f1: () }, 1 - 2);
+ "#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const foo: (i8, X, i64) = (1, X { f1: (), f2: 4 }, -1)
+ ```
+ "#]],
+ );
+}
+
+#[test]
fn hover_default_generic_types() {
check(
r#"
@@ -1467,8 +1451,6 @@ fn my() {}
fn test_hover_struct_doc_comment() {
check(
r#"
-//- /main.rs target_data_layout:e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128
-
/// This is an example
/// multiline doc
///
@@ -1527,7 +1509,7 @@ fn foo() { let bar = Ba$0r; }
```
```rust
- struct Bar
+ struct Bar // size = 0, align = 1
```
---
@@ -1556,7 +1538,7 @@ fn foo() { let bar = Ba$0r; }
```
```rust
- struct Bar
+ struct Bar // size = 0, align = 1
```
---
@@ -1584,7 +1566,7 @@ pub struct B$0ar
```
```rust
- pub struct Bar
+ pub struct Bar // size = 0, align = 1
```
---
@@ -1611,7 +1593,7 @@ pub struct B$0ar
```
```rust
- pub struct Bar
+ pub struct Bar // size = 0, align = 1
```
---
@@ -2913,8 +2895,6 @@ fn main() { let foo_test = name_with_dashes::wrapper::Thing::new$0(); }
fn hover_field_pat_shorthand_ref_match_ergonomics() {
check(
r#"
-//- /main.rs target_data_layout:e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128
-
struct S {
f: i32,
}
@@ -3506,8 +3486,8 @@ impl<const LEN: usize> Foo<LEN$0> {}
}
#[test]
-fn hover_const_eval_variant() {
- // show hex for <10
+fn hover_const_eval_discriminant() {
+ // Don't show hex for <10
check(
r#"
#[repr(u8)]
@@ -3532,7 +3512,7 @@ enum E {
This is a doc
"#]],
);
- // show hex for >10
+ // Show hex for >10
check(
r#"
#[repr(u8)]
@@ -3656,7 +3636,7 @@ trait T {
}
impl T for i32 {
const AA: A = A {
- i: 2
+ i: 2 + 3
}
}
fn main() {
@@ -3671,9 +3651,7 @@ fn main() {
```
```rust
- const AA: A = A {
- i: 2
- }
+ const AA: A = A { i: 5 }
```
"#]],
);
@@ -3792,7 +3770,6 @@ const FOO$0: usize = 1 << 3;
This is a doc
"#]],
);
- // show hex for >10
check(
r#"
/// This is a doc
@@ -3850,7 +3827,7 @@ const FOO$0: i32 = 2 - 3;
```
```rust
- const FOO: i32 = -1
+ const FOO: i32 = -1 (0xFFFFFFFF)
```
---
@@ -4011,6 +3988,28 @@ const FOO$0: f32 = 1f32;
This is a doc
"#]],
);
+ // Don't show `<ref-not-supported>` in const hover
+ check(
+ r#"
+/// This is a doc
+const FOO$0: &i32 = &2;
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: &i32 = &2
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
//show f64 typecasted from float
check(
r#"
@@ -4354,8 +4353,6 @@ fn main() {
fn hover_intra_doc_links() {
check(
r#"
-//- /main.rs target_data_layout:e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128
-
pub mod theitem {
/// This is the item. Cool!
pub struct TheItem;
@@ -4496,7 +4493,7 @@ trait A where
fn string_shadowed_with_inner_items() {
check(
r#"
-//- /main.rs crate:main deps:alloc target_data_layout:e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128
+//- /main.rs crate:main deps:alloc
/// Custom `String` type.
struct String;
@@ -5191,7 +5188,7 @@ foo_macro!(
```
```rust
- pub struct Foo
+ pub struct Foo // size = 0, align = 1
```
---
@@ -5205,8 +5202,6 @@ foo_macro!(
fn hover_intra_in_attr() {
check(
r#"
-//- /main.rs target_data_layout:e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128
-
#[doc = "Doc comment for [`Foo$0`]"]
pub struct Foo(i32);
"#,
@@ -5295,7 +5290,7 @@ pub struct Type;
```
```rust
- const KONST: dep::Type = $crate::Type
+ const KONST: dep::Type = Type
```
"#]],
);
@@ -5327,8 +5322,6 @@ enum Enum {
fn hover_record_variant_field() {
check(
r#"
-//- /main.rs target_data_layout:e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128
-
enum Enum {
RecordV { field$0: u32 }
}
@@ -5647,3 +5640,204 @@ fn main() {
"#]],
);
}
+
+#[test]
+fn assoc_fn_in_block_local_impl() {
+ check(
+ r#"
+struct S;
+mod m {
+ const _: () = {
+ impl crate::S {
+ pub(crate) fn foo() {}
+ }
+ };
+}
+fn test() {
+ S::foo$0();
+}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test::S
+ ```
+
+ ```rust
+ pub(crate) fn foo()
+ ```
+ "#]],
+ );
+
+ check(
+ r#"
+struct S;
+mod m {
+ const _: () = {
+ const _: () = {
+ impl crate::S {
+ pub(crate) fn foo() {}
+ }
+ };
+ };
+}
+fn test() {
+ S::foo$0();
+}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test::S
+ ```
+
+ ```rust
+ pub(crate) fn foo()
+ ```
+ "#]],
+ );
+
+ check(
+ r#"
+struct S;
+mod m {
+ mod inner {
+ const _: () = {
+ impl crate::S {
+ pub(super) fn foo() {}
+ }
+ };
+ }
+
+ fn test() {
+ crate::S::foo$0();
+ }
+}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test::S
+ ```
+
+ ```rust
+ pub(super) fn foo()
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn assoc_const_in_block_local_impl() {
+ check(
+ r#"
+struct S;
+mod m {
+ const _: () = {
+ impl crate::S {
+ pub(crate) const A: () = ();
+ }
+ };
+}
+fn test() {
+ S::A$0;
+}
+"#,
+ expect![[r#"
+ *A*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub(crate) const A: () = ()
+ ```
+ "#]],
+ );
+
+ check(
+ r#"
+struct S;
+mod m {
+ const _: () = {
+ const _: () = {
+ impl crate::S {
+ pub(crate) const A: () = ();
+ }
+ };
+ };
+}
+fn test() {
+ S::A$0;
+}
+"#,
+ expect![[r#"
+ *A*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub(crate) const A: () = ()
+ ```
+ "#]],
+ );
+
+ check(
+ r#"
+struct S;
+mod m {
+ mod inner {
+ const _: () = {
+ impl crate::S {
+ pub(super) const A: () = ();
+ }
+ };
+ }
+
+ fn test() {
+ crate::S::A$0;
+ }
+}
+"#,
+ expect![[r#"
+ *A*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub(super) const A: () = ()
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn field_as_method_call_fallback() {
+ check(
+ r#"
+struct S { f: u32 }
+fn test() {
+ S { f: 0 }.f$0();
+}
+"#,
+ expect![[r#"
+ *f*
+
+ ```rust
+ test::S
+ ```
+
+ ```rust
+ f: u32 // size = 4, align = 4, offset = 0
+ ```
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs
index 188eb7f97..46505b304 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs
@@ -31,19 +31,31 @@ pub(super) fn hints(
return None;
}
- // These inherit from the inner expression which would result in duplicate hints
- if let ast::Expr::ParenExpr(_)
- | ast::Expr::IfExpr(_)
- | ast::Expr::BlockExpr(_)
- | ast::Expr::MatchExpr(_) = expr
- {
+ // ParenExpr resolve to their contained expressions HIR so they will dupe these hints
+ if let ast::Expr::ParenExpr(_) = expr {
return None;
}
+ if let ast::Expr::BlockExpr(b) = expr {
+ if !b.is_standalone() {
+ return None;
+ }
+ }
let descended = sema.descend_node_into_attributes(expr.clone()).pop();
let desc_expr = descended.as_ref().unwrap_or(expr);
let adjustments = sema.expr_adjustments(desc_expr).filter(|it| !it.is_empty())?;
+ if let ast::Expr::BlockExpr(_) | ast::Expr::IfExpr(_) | ast::Expr::MatchExpr(_) = desc_expr {
+ if let [Adjustment { kind: Adjust::Deref(_), source, .. }, Adjustment { kind: Adjust::Borrow(_), source: _, target }] =
+ &*adjustments
+ {
+ // Don't show unnecessary reborrows for these, they will just repeat the inner ones again
+ if source == target {
+ return None;
+ }
+ }
+ }
+
let (postfix, needs_outer_parens, needs_inner_parens) =
mode_and_needs_parens_for_adjustment_hints(expr, config.adjustment_hints_mode);
@@ -67,6 +79,7 @@ pub(super) fn hints(
for Adjustment { source, target, kind } in iter {
if source == target {
+ cov_mark::hit!(same_type_adjustment);
continue;
}
@@ -251,7 +264,7 @@ mod tests {
check_with_config(
InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG },
r#"
-//- minicore: coerce_unsized, fn
+//- minicore: coerce_unsized, fn, eq
fn main() {
let _: u32 = loop {};
//^^^^^^^<never-to-any>
@@ -332,7 +345,7 @@ fn main() {
loop {}
//^^^^^^^<never-to-any>
};
- let _: &mut [u32] = match () { () => &mut [] }
+ let _: &mut [u32] = match () { () => &mut [] };
//^^^^^^^<unsize>
//^^^^^^^&mut $
//^^^^^^^*
@@ -341,6 +354,12 @@ fn main() {
//^^^^^^^^^^<unsize>
//^^^^^^^^^^&mut $
//^^^^^^^^^^*
+ () == ();
+ // ^^&
+ // ^^&
+ (()) == {()};
+ // ^^&
+ // ^^^^&
}
#[derive(Copy, Clone)]
@@ -363,7 +382,7 @@ impl Struct {
..DISABLED_CONFIG
},
r#"
-//- minicore: coerce_unsized, fn
+//- minicore: coerce_unsized, fn, eq
fn main() {
Struct.consume();
@@ -419,7 +438,7 @@ fn main() {
loop {}
//^^^^^^^.<never-to-any>
};
- let _: &mut [u32] = match () { () => &mut [] }
+ let _: &mut [u32] = match () { () => &mut [] };
//^^^^^^^(
//^^^^^^^)
//^^^^^^^.*
@@ -432,6 +451,12 @@ fn main() {
//^^^^^^^^^^.*
//^^^^^^^^^^.&mut
//^^^^^^^^^^.<unsize>
+ () == ();
+ // ^^.&
+ // ^^.&
+ (()) == {()};
+ // ^^.&
+ // ^^^^.&
}
#[derive(Copy, Clone)]
@@ -499,6 +524,7 @@ fn main() {
#[test]
fn never_to_never_is_never_shown() {
+ cov_mark::check!(same_type_adjustment);
check_with_config(
InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG },
r#"
@@ -606,14 +632,13 @@ fn a() {
}
#[test]
- fn bug() {
+ fn let_stmt_explicit_ty() {
check_with_config(
InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG },
r#"
fn main() {
- // These should be identical, but they are not...
-
let () = return;
+ //^^^^^^<never-to-any>
let (): () = return;
//^^^^^^<never-to-any>
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs
index 4af7f9bdb..6a5092733 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs
@@ -176,15 +176,12 @@ fn pat_is_enum_variant(db: &RootDatabase, bind_pat: &ast::IdentPat, pat_ty: &hir
mod tests {
// This module also contains tests for super::closure_ret
- use expect_test::expect;
use syntax::{TextRange, TextSize};
use test_utils::extract_annotations;
use crate::{fixture, inlay_hints::InlayHintsConfig};
- use crate::inlay_hints::tests::{
- check, check_expect, check_with_config, DISABLED_CONFIG, TEST_CONFIG,
- };
+ use crate::inlay_hints::tests::{check, check_with_config, DISABLED_CONFIG, TEST_CONFIG};
use crate::ClosureReturnTypeHints;
#[track_caller]
@@ -278,8 +275,7 @@ fn main() {
#[test]
fn iterator_hint_regression_issue_12674() {
// Ensure we don't crash while solving the projection type of iterators.
- check_expect(
- InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
+ let (analysis, file_id) = fixture::file(
r#"
//- minicore: iterators
struct S<T>(T);
@@ -302,107 +298,18 @@ impl<'a, T> Iterator for SliceIter<'a, T> {
fn main(a: SliceIter<'_, Container>) {
a
- .filter_map(|c| Some(c.elements.iter().filter_map(|v| Some(v))))
- .map(|e| e);
+ .filter_map(|c| Some(c.elements.iter().filter_map(|v| Some(v))))
+ .map(|e| e);
}
- "#,
- expect![[r#"
- [
- InlayHint {
- range: 484..554,
- kind: Chaining,
- label: [
- "impl ",
- InlayHintLabelPart {
- text: "Iterator",
- linked_location: Some(
- FileRange {
- file_id: FileId(
- 1,
- ),
- range: 2611..2619,
- },
- ),
- tooltip: "",
- },
- "<",
- InlayHintLabelPart {
- text: "Item",
- linked_location: Some(
- FileRange {
- file_id: FileId(
- 1,
- ),
- range: 2643..2647,
- },
- ),
- tooltip: "",
- },
- " = impl ",
- InlayHintLabelPart {
- text: "Iterator",
- linked_location: Some(
- FileRange {
- file_id: FileId(
- 1,
- ),
- range: 2611..2619,
- },
- ),
- tooltip: "",
- },
- "<",
- InlayHintLabelPart {
- text: "Item",
- linked_location: Some(
- FileRange {
- file_id: FileId(
- 1,
- ),
- range: 2643..2647,
- },
- ),
- tooltip: "",
- },
- " = &&str>>",
- ],
- },
- InlayHint {
- range: 484..485,
- kind: Chaining,
- label: [
- "",
- InlayHintLabelPart {
- text: "SliceIter",
- linked_location: Some(
- FileRange {
- file_id: FileId(
- 0,
- ),
- range: 289..298,
- },
- ),
- tooltip: "",
- },
- "<",
- InlayHintLabelPart {
- text: "Container",
- linked_location: Some(
- FileRange {
- file_id: FileId(
- 0,
- ),
- range: 238..247,
- },
- ),
- tooltip: "",
- },
- ">",
- ],
- },
- ]
- "#]],
+"#,
);
+ analysis
+ .inlay_hints(
+ &InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
+ file_id,
+ None,
+ )
+ .unwrap();
}
#[test]
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs
index 0c54f084c..1e1771259 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs
@@ -435,7 +435,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 2611..2619,
+ range: 3415..3423,
},
),
tooltip: "",
@@ -448,7 +448,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 2643..2647,
+ range: 3447..3451,
},
),
tooltip: "",
@@ -468,7 +468,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 2611..2619,
+ range: 3415..3423,
},
),
tooltip: "",
@@ -481,7 +481,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 2643..2647,
+ range: 3447..3451,
},
),
tooltip: "",
@@ -501,7 +501,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 2611..2619,
+ range: 3415..3423,
},
),
tooltip: "",
@@ -514,7 +514,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 2643..2647,
+ range: 3447..3451,
},
),
tooltip: "",
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs
index 5dd51ad11..67eaa553a 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs
@@ -59,8 +59,14 @@ fn variant_hints(
},
kind: InlayKind::Discriminant,
label: InlayHintLabel::simple(
- match &d {
- Ok(v) => format!("{}", v),
+ match d {
+ Ok(x) => {
+ if x >= 10 {
+ format!("{x} ({x:#X})")
+ } else {
+ format!("{x}")
+ }
+ }
Err(_) => "?".into(),
},
Some(InlayTooltip::String(match &d {
diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs
index f2b535bdc..078b66dd3 100644
--- a/src/tools/rust-analyzer/crates/ide/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs
@@ -55,6 +55,7 @@ mod syntax_tree;
mod typing;
mod view_crate_graph;
mod view_hir;
+mod view_mir;
mod view_item_tree;
mod shuffle_crate_graph;
@@ -308,6 +309,10 @@ impl Analysis {
self.with_db(|db| view_hir::view_hir(db, position))
}
+ pub fn view_mir(&self, position: FilePosition) -> Cancellable<String> {
+ self.with_db(|db| view_mir::view_mir(db, position))
+ }
+
pub fn view_item_tree(&self, file_id: FileId) -> Cancellable<String> {
self.with_db(|db| view_item_tree::view_item_tree(db, file_id))
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/markup.rs b/src/tools/rust-analyzer/crates/ide/src/markup.rs
index de9fef61a..411eb695f 100644
--- a/src/tools/rust-analyzer/crates/ide/src/markup.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/markup.rs
@@ -32,7 +32,7 @@ impl Markup {
pub fn as_str(&self) -> &str {
self.text.as_str()
}
- pub fn fenced_block(contents: &impl fmt::Display) -> Markup {
+ pub fn fenced_block(contents: impl fmt::Display) -> Markup {
format!("```rust\n{contents}\n```").into()
}
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/moniker.rs b/src/tools/rust-analyzer/crates/ide/src/moniker.rs
index af5e96d23..349e79ecf 100644
--- a/src/tools/rust-analyzer/crates/ide/src/moniker.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/moniker.rs
@@ -208,6 +208,9 @@ pub(crate) fn def_to_moniker(
Definition::Trait(trait_) => {
MonikerDescriptor { name: trait_.name(db), desc: MonikerDescriptorKind::Type }
}
+ Definition::TraitAlias(ta) => {
+ MonikerDescriptor { name: ta.name(db), desc: MonikerDescriptorKind::Type }
+ }
Definition::TypeAlias(ta) => {
MonikerDescriptor { name: ta.name(db), desc: MonikerDescriptorKind::TypeParameter }
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/move_item.rs b/src/tools/rust-analyzer/crates/ide/src/move_item.rs
index ffc4bdd7d..b955ea99f 100644
--- a/src/tools/rust-analyzer/crates/ide/src/move_item.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/move_item.rs
@@ -73,6 +73,7 @@ fn find_ancestors(item: SyntaxElement, direction: Direction, range: TextRange) -
SyntaxKind::MACRO_CALL,
SyntaxKind::TYPE_ALIAS,
SyntaxKind::TRAIT,
+ SyntaxKind::TRAIT_ALIAS,
SyntaxKind::IMPL,
SyntaxKind::MACRO_DEF,
SyntaxKind::STRUCT,
diff --git a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
index 3aa799d43..6aae82f98 100644
--- a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
@@ -5,7 +5,7 @@ use std::fmt;
use either::Either;
use hir::{
symbols::FileSymbol, AssocItem, Documentation, FieldSource, HasAttrs, HasSource, HirDisplay,
- InFile, ModuleSource, Semantics,
+ InFile, LocalSource, ModuleSource, Semantics,
};
use ide_db::{
base_db::{FileId, FileRange},
@@ -192,6 +192,7 @@ impl TryToNav for Definition {
Definition::Const(it) => it.try_to_nav(db),
Definition::Static(it) => it.try_to_nav(db),
Definition::Trait(it) => it.try_to_nav(db),
+ Definition::TraitAlias(it) => it.try_to_nav(db),
Definition::TypeAlias(it) => it.try_to_nav(db),
Definition::BuiltinType(_) => None,
Definition::ToolModule(_) => None,
@@ -212,6 +213,7 @@ impl TryToNav for hir::ModuleDef {
hir::ModuleDef::Const(it) => it.try_to_nav(db),
hir::ModuleDef::Static(it) => it.try_to_nav(db),
hir::ModuleDef::Trait(it) => it.try_to_nav(db),
+ hir::ModuleDef::TraitAlias(it) => it.try_to_nav(db),
hir::ModuleDef::TypeAlias(it) => it.try_to_nav(db),
hir::ModuleDef::Macro(it) => it.try_to_nav(db),
hir::ModuleDef::BuiltinType(_) => None,
@@ -249,6 +251,9 @@ impl ToNavFromAst for hir::TypeAlias {
impl ToNavFromAst for hir::Trait {
const KIND: SymbolKind = SymbolKind::Trait;
}
+impl ToNavFromAst for hir::TraitAlias {
+ const KIND: SymbolKind = SymbolKind::TraitAlias;
+}
impl<D> TryToNav for D
where
@@ -382,9 +387,11 @@ impl TryToNav for hir::GenericParam {
}
}
-impl ToNav for hir::Local {
+impl ToNav for LocalSource {
fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
- let InFile { file_id, value } = self.source(db);
+ let InFile { file_id, value } = &self.source;
+ let file_id = *file_id;
+ let local = self.local;
let (node, name) = match &value {
Either::Left(bind_pat) => (bind_pat.syntax(), bind_pat.name()),
Either::Right(it) => (it.syntax(), it.name()),
@@ -393,10 +400,10 @@ impl ToNav for hir::Local {
let FileRange { file_id, range: full_range } =
InFile::new(file_id, node).original_file_range(db);
- let name = self.name(db).to_smol_str();
- let kind = if self.is_self(db) {
+ let name = local.name(db).to_smol_str();
+ let kind = if local.is_self(db) {
SymbolKind::SelfParam
- } else if self.is_param(db) {
+ } else if local.is_param(db) {
SymbolKind::ValueParam
} else {
SymbolKind::Local
@@ -414,6 +421,12 @@ impl ToNav for hir::Local {
}
}
+impl ToNav for hir::Local {
+ fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
+ self.primary_source(db).to_nav(db)
+ }
+}
+
impl ToNav for hir::Label {
fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
let InFile { file_id, value } = self.source(db);
@@ -544,6 +557,7 @@ pub(crate) fn description_from_symbol(db: &RootDatabase, symbol: &FileSymbol) ->
ast::Struct(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
ast::Enum(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
ast::Trait(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ ast::TraitAlias(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
ast::Module(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
ast::TypeAlias(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
ast::Const(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs
index cabbc2872..3684c1033 100644
--- a/src/tools/rust-analyzer/crates/ide/src/references.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/references.rs
@@ -1356,6 +1356,38 @@ impl Foo {
}
#[test]
+ fn test_trait_alias() {
+ check(
+ r#"
+trait Foo {}
+trait Bar$0 = Foo where Self: ;
+fn foo<T: Bar>(_: impl Bar, _: &dyn Bar) {}
+"#,
+ expect![[r#"
+ Bar TraitAlias FileId(0) 13..42 19..22
+
+ FileId(0) 53..56
+ FileId(0) 66..69
+ FileId(0) 79..82
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_trait_alias_self() {
+ check(
+ r#"
+trait Foo = where Self$0: ;
+"#,
+ expect![[r#"
+ Self TypeParam FileId(0) 6..9 6..9
+
+ FileId(0) 18..22
+ "#]],
+ );
+ }
+
+ #[test]
fn test_attr_differs_from_fn_with_same_name() {
check(
r#"
diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs
index c0237e1ed..e10c46381 100644
--- a/src/tools/rust-analyzer/crates/ide/src/rename.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs
@@ -353,6 +353,11 @@ mod tests {
fn check(new_name: &str, ra_fixture_before: &str, ra_fixture_after: &str) {
let ra_fixture_after = &trim_indent(ra_fixture_after);
let (analysis, position) = fixture::position(ra_fixture_before);
+ if !ra_fixture_after.starts_with("error: ") {
+ if let Err(err) = analysis.prepare_rename(position).unwrap() {
+ panic!("Prepare rename to '{new_name}' was failed: {err}")
+ }
+ }
let rename_result = analysis
.rename(position, new_name)
.unwrap_or_else(|err| panic!("Rename to '{new_name}' was cancelled: {err}"));
@@ -1710,6 +1715,23 @@ fn foo(bar: i32) -> Foo {
}
#[test]
+ fn test_rename_local_simple() {
+ check(
+ "i",
+ r#"
+fn foo(bar$0: i32) -> i32 {
+ bar
+}
+"#,
+ r#"
+fn foo(i: i32) -> i32 {
+ i
+}
+"#,
+ );
+ }
+
+ #[test]
fn test_rename_local_put_init_shorthand() {
cov_mark::check!(test_rename_local_put_init_shorthand);
check(
diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
index 5b35262aa..8a8a9151c 100644
--- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
@@ -2,7 +2,7 @@ use std::fmt;
use ast::HasName;
use cfg::CfgExpr;
-use hir::{AsAssocItem, HasAttrs, HasSource, HirDisplay, Semantics};
+use hir::{AsAssocItem, HasAttrs, HasSource, Semantics};
use ide_assists::utils::test_related_attribute;
use ide_db::{
base_db::{FilePosition, FileRange},
@@ -195,14 +195,13 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
//
// Provides a sneak peek of all tests where the current item is used.
//
-// The simplest way to use this feature is via the context menu:
-// - Right-click on the selected item. The context menu opens.
-// - Select **Peek related tests**
+// The simplest way to use this feature is via the context menu. Right-click on
+// the selected item. The context menu opens. Select **Peek Related Tests**.
//
// |===
// | Editor | Action Name
//
-// | VS Code | **rust-analyzer: Peek related tests**
+// | VS Code | **rust-analyzer: Peek Related Tests**
// |===
pub(crate) fn related_tests(
db: &RootDatabase,
@@ -371,9 +370,9 @@ pub(crate) fn runnable_impl(
let nav = def.try_to_nav(sema.db)?;
let ty = def.self_ty(sema.db);
let adt_name = ty.as_adt()?.name(sema.db);
- let mut ty_args = ty.type_arguments().peekable();
+ let mut ty_args = ty.generic_parameters(sema.db).peekable();
let params = if ty_args.peek().is_some() {
- format!("<{}>", ty_args.format_with(",", |ty, cb| cb(&ty.display(sema.db))))
+ format!("<{}>", ty_args.format_with(",", |ty, cb| cb(&ty)))
} else {
String::new()
};
@@ -417,6 +416,7 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option<Runnable> {
Definition::Const(it) => it.attrs(db),
Definition::Static(it) => it.attrs(db),
Definition::Trait(it) => it.attrs(db),
+ Definition::TraitAlias(it) => it.attrs(db),
Definition::TypeAlias(it) => it.attrs(db),
Definition::Macro(it) => it.attrs(db),
Definition::SelfType(it) => it.attrs(db),
@@ -437,14 +437,10 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option<Runnable> {
let ty = imp.self_ty(db);
if let Some(adt) = ty.as_adt() {
let name = adt.name(db);
- let mut ty_args = ty.type_arguments().peekable();
+ let mut ty_args = ty.generic_parameters(db).peekable();
format_to!(path, "{}", name);
if ty_args.peek().is_some() {
- format_to!(
- path,
- "<{}>",
- ty_args.format_with(",", |ty, cb| cb(&ty.display(db)))
- );
+ format_to!(path, "<{}>", ty_args.format_with(",", |ty, cb| cb(&ty)));
}
format_to!(path, "::{}", def_name);
path.retain(|c| c != ' ');
@@ -1001,6 +997,221 @@ impl Data {
}
#[test]
+ fn test_runnables_doc_test_in_impl_with_lifetime() {
+ check(
+ r#"
+//- /lib.rs
+$0
+fn main() {}
+
+struct Data<'a>;
+impl Data<'a> {
+ /// ```
+ /// let x = 5;
+ /// ```
+ fn foo() {}
+}
+"#,
+ &[Bin, DocTest],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..13,
+ focus_range: 4..8,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 52..106,
+ name: "foo",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "Data<'a>::foo",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_doc_test_in_impl_with_lifetime_and_types() {
+ check(
+ r#"
+//- /lib.rs
+$0
+fn main() {}
+
+struct Data<'a, T, U>;
+impl<T, U> Data<'a, T, U> {
+ /// ```
+ /// let x = 5;
+ /// ```
+ fn foo() {}
+}
+"#,
+ &[Bin, DocTest],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..13,
+ focus_range: 4..8,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 70..124,
+ name: "foo",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "Data<'a,T,U>::foo",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_doc_test_in_impl_with_const() {
+ check(
+ r#"
+//- /lib.rs
+$0
+fn main() {}
+
+struct Data<const N: usize>;
+impl<const N: usize> Data<N> {
+ /// ```
+ /// let x = 5;
+ /// ```
+ fn foo() {}
+}
+"#,
+ &[Bin, DocTest],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..13,
+ focus_range: 4..8,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 79..133,
+ name: "foo",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "Data<N>::foo",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_doc_test_in_impl_with_lifetime_types_and_const() {
+ check(
+ r#"
+//- /lib.rs
+$0
+fn main() {}
+
+struct Data<'a, T, const N: usize>;
+impl<'a, T, const N: usize> Data<'a, T, N> {
+ /// ```
+ /// let x = 5;
+ /// ```
+ fn foo() {}
+}
+"#,
+ &[Bin, DocTest],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..13,
+ focus_range: 4..8,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 100..154,
+ name: "foo",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "Data<'a,T,N>::foo",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+ #[test]
fn test_runnables_module() {
check(
r#"
@@ -2063,6 +2274,59 @@ mod tests {
}
#[test]
+ fn test_runnables_doc_test_in_impl_with_lifetime_type_const_value() {
+ check(
+ r#"
+//- /lib.rs
+$0
+fn main() {}
+
+struct Data<'a, A, const B: usize, C, const D: u32>;
+impl<A, C, const D: u32> Data<'a, A, 12, C, D> {
+ /// ```
+ /// ```
+ fn foo() {}
+}
+"#,
+ &[Bin, DocTest],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..13,
+ focus_range: 4..8,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 121..156,
+ name: "foo",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "Data<'a,A,12,C,D>::foo",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
fn doc_test_type_params() {
check(
r#"
diff --git a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
index f70ca55a5..4b2c139f6 100644
--- a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
@@ -16,7 +16,7 @@ use stdx::format_to;
use syntax::{
algo,
ast::{self, HasArgList},
- match_ast, AstNode, Direction, SyntaxToken, TextRange, TextSize,
+ match_ast, AstNode, Direction, SyntaxElementChildren, SyntaxToken, TextRange, TextSize,
};
use crate::RootDatabase;
@@ -102,6 +102,20 @@ pub(crate) fn signature_help(db: &RootDatabase, position: FilePosition) -> Optio
}
return signature_help_for_record_lit(&sema, record, token);
},
+ ast::RecordPat(record) => {
+ let cursor_outside = record.record_pat_field_list().and_then(|list| list.r_curly_token()).as_ref() == Some(&token);
+ if cursor_outside {
+ continue;
+ }
+ return signature_help_for_record_pat(&sema, record, token);
+ },
+ ast::TupleStructPat(tuple_pat) => {
+ let cursor_outside = tuple_pat.r_paren_token().as_ref() == Some(&token);
+ if cursor_outside {
+ continue;
+ }
+ return signature_help_for_tuple_struct_pat(&sema, tuple_pat, token);
+ },
_ => (),
}
}
@@ -172,7 +186,7 @@ fn signature_help_for_call(
res.signature.push('(');
{
- if let Some(self_param) = callable.receiver_param(db) {
+ if let Some((self_param, _)) = callable.receiver_param(db) {
format_to!(res.signature, "{}", self_param)
}
let mut buf = String::new();
@@ -252,6 +266,10 @@ fn signature_help_for_generics(
res.doc = it.docs(db).map(|it| it.into());
format_to!(res.signature, "trait {}", it.name(db));
}
+ hir::GenericDef::TraitAlias(it) => {
+ res.doc = it.docs(db).map(|it| it.into());
+ format_to!(res.signature, "trait {}", it.name(db));
+ }
hir::GenericDef::TypeAlias(it) => {
res.doc = it.docs(db).map(|it| it.into());
format_to!(res.signature, "type {}", it.name(db));
@@ -342,11 +360,112 @@ fn signature_help_for_record_lit(
record: ast::RecordExpr,
token: SyntaxToken,
) -> Option<SignatureHelp> {
- let active_parameter = record
- .record_expr_field_list()?
+ signature_help_for_record_(
+ sema,
+ record.record_expr_field_list()?.syntax().children_with_tokens(),
+ &record.path()?,
+ record
+ .record_expr_field_list()?
+ .fields()
+ .filter_map(|field| sema.resolve_record_field(&field))
+ .map(|(field, _, ty)| (field, ty)),
+ token,
+ )
+}
+
+fn signature_help_for_record_pat(
+ sema: &Semantics<'_, RootDatabase>,
+ record: ast::RecordPat,
+ token: SyntaxToken,
+) -> Option<SignatureHelp> {
+ signature_help_for_record_(
+ sema,
+ record.record_pat_field_list()?.syntax().children_with_tokens(),
+ &record.path()?,
+ record
+ .record_pat_field_list()?
+ .fields()
+ .filter_map(|field| sema.resolve_record_pat_field(&field)),
+ token,
+ )
+}
+
+fn signature_help_for_tuple_struct_pat(
+ sema: &Semantics<'_, RootDatabase>,
+ pat: ast::TupleStructPat,
+ token: SyntaxToken,
+) -> Option<SignatureHelp> {
+ let rest_pat = pat.fields().find(|it| matches!(it, ast::Pat::RestPat(_)));
+ let is_left_of_rest_pat =
+ rest_pat.map_or(true, |it| token.text_range().start() < it.syntax().text_range().end());
+
+ let mut res = SignatureHelp {
+ doc: None,
+ signature: String::new(),
+ parameters: vec![],
+ active_parameter: None,
+ };
+
+ let db = sema.db;
+ let path_res = sema.resolve_path(&pat.path()?)?;
+ let fields: Vec<_> = if let PathResolution::Def(ModuleDef::Variant(variant)) = path_res {
+ let en = variant.parent_enum(db);
+
+ res.doc = en.docs(db).map(|it| it.into());
+ format_to!(res.signature, "enum {}::{} (", en.name(db), variant.name(db));
+ variant.fields(db)
+ } else {
+ let adt = match path_res {
+ PathResolution::SelfType(imp) => imp.self_ty(db).as_adt()?,
+ PathResolution::Def(ModuleDef::Adt(adt)) => adt,
+ _ => return None,
+ };
+
+ match adt {
+ hir::Adt::Struct(it) => {
+ res.doc = it.docs(db).map(|it| it.into());
+ format_to!(res.signature, "struct {} (", it.name(db));
+ it.fields(db)
+ }
+ _ => return None,
+ }
+ };
+ let commas = pat
.syntax()
.children_with_tokens()
.filter_map(syntax::NodeOrToken::into_token)
+ .filter(|t| t.kind() == syntax::T![,]);
+ res.active_parameter = Some(if is_left_of_rest_pat {
+ commas.take_while(|t| t.text_range().start() <= token.text_range().start()).count()
+ } else {
+ let n_commas = commas
+ .collect::<Vec<_>>()
+ .into_iter()
+ .rev()
+ .take_while(|t| t.text_range().start() > token.text_range().start())
+ .count();
+ fields.len().saturating_sub(1).saturating_sub(n_commas)
+ });
+
+ let mut buf = String::new();
+ for ty in fields.into_iter().map(|it| it.ty(db)) {
+ format_to!(buf, "{}", ty.display_truncated(db, Some(20)));
+ res.push_call_param(&buf);
+ buf.clear();
+ }
+ res.signature.push_str(")");
+ Some(res)
+}
+
+fn signature_help_for_record_(
+ sema: &Semantics<'_, RootDatabase>,
+ field_list_children: SyntaxElementChildren,
+ path: &ast::Path,
+ fields2: impl Iterator<Item = (hir::Field, hir::Type)>,
+ token: SyntaxToken,
+) -> Option<SignatureHelp> {
+ let active_parameter = field_list_children
+ .filter_map(syntax::NodeOrToken::into_token)
.filter(|t| t.kind() == syntax::T![,])
.take_while(|t| t.text_range().start() <= token.text_range().start())
.count();
@@ -361,7 +480,7 @@ fn signature_help_for_record_lit(
let fields;
let db = sema.db;
- let path_res = sema.resolve_path(&record.path()?)?;
+ let path_res = sema.resolve_path(path)?;
if let PathResolution::Def(ModuleDef::Variant(variant)) = path_res {
fields = variant.fields(db);
let en = variant.parent_enum(db);
@@ -393,8 +512,7 @@ fn signature_help_for_record_lit(
let mut fields =
fields.into_iter().map(|field| (field.name(db), Some(field))).collect::<FxIndexMap<_, _>>();
let mut buf = String::new();
- for field in record.record_expr_field_list()?.fields() {
- let Some((field, _, ty)) = sema.resolve_record_field(&field) else { continue };
+ for (field, ty) in fields2 {
let name = field.name(db);
format_to!(buf, "{name}: {}", ty.display_truncated(db, Some(20)));
res.push_record_field(&buf);
@@ -435,6 +553,7 @@ mod tests {
(database, FilePosition { file_id, offset })
}
+ #[track_caller]
fn check(ra_fixture: &str, expect: Expect) {
let fixture = format!(
r#"
@@ -887,6 +1006,119 @@ fn main() {
}
#[test]
+ fn tuple_struct_pat() {
+ check(
+ r#"
+/// A cool tuple struct
+struct S(u32, i32);
+fn main() {
+ let S(0, $0);
+}
+"#,
+ expect![[r#"
+ A cool tuple struct
+ ------
+ struct S (u32, i32)
+ --- ^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn tuple_struct_pat_rest() {
+ check(
+ r#"
+/// A cool tuple struct
+struct S(u32, i32, f32, u16);
+fn main() {
+ let S(0, .., $0);
+}
+"#,
+ expect![[r#"
+ A cool tuple struct
+ ------
+ struct S (u32, i32, f32, u16)
+ --- --- --- ^^^
+ "#]],
+ );
+ check(
+ r#"
+/// A cool tuple struct
+struct S(u32, i32, f32, u16, u8);
+fn main() {
+ let S(0, .., $0, 0);
+}
+"#,
+ expect![[r#"
+ A cool tuple struct
+ ------
+ struct S (u32, i32, f32, u16, u8)
+ --- --- --- ^^^ --
+ "#]],
+ );
+ check(
+ r#"
+/// A cool tuple struct
+struct S(u32, i32, f32, u16);
+fn main() {
+ let S($0, .., 1);
+}
+"#,
+ expect![[r#"
+ A cool tuple struct
+ ------
+ struct S (u32, i32, f32, u16)
+ ^^^ --- --- ---
+ "#]],
+ );
+ check(
+ r#"
+/// A cool tuple struct
+struct S(u32, i32, f32, u16, u8);
+fn main() {
+ let S(1, .., 1, $0, 2);
+}
+"#,
+ expect![[r#"
+ A cool tuple struct
+ ------
+ struct S (u32, i32, f32, u16, u8)
+ --- --- --- ^^^ --
+ "#]],
+ );
+ check(
+ r#"
+/// A cool tuple struct
+struct S(u32, i32, f32, u16);
+fn main() {
+ let S(1, $0.., 1);
+}
+"#,
+ expect![[r#"
+ A cool tuple struct
+ ------
+ struct S (u32, i32, f32, u16)
+ --- ^^^ --- ---
+ "#]],
+ );
+ check(
+ r#"
+/// A cool tuple struct
+struct S(u32, i32, f32, u16);
+fn main() {
+ let S(1, ..$0, 1);
+}
+"#,
+ expect![[r#"
+ A cool tuple struct
+ ------
+ struct S (u32, i32, f32, u16)
+ --- ^^^ --- ---
+ "#]],
+ );
+ }
+
+ #[test]
fn generic_struct() {
check(
r#"
@@ -1547,6 +1779,29 @@ impl S {
}
#[test]
+ fn record_pat() {
+ check(
+ r#"
+struct Strukt<T, U = ()> {
+ t: T,
+ u: U,
+ unit: (),
+}
+fn f() {
+ let Strukt {
+ u: 0,
+ $0
+ }
+}
+"#,
+ expect![[r#"
+ struct Strukt { u: i32, t: T, unit: () }
+ ------ ^^^^ --------
+ "#]],
+ );
+ }
+
+ #[test]
fn test_enum_in_nested_method_in_lambda() {
check(
r#"
diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
index 3f7f6885f..c97691b14 100644
--- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
@@ -139,6 +139,7 @@ impl StaticIndex<'_> {
documentation: true,
keywords: true,
format: crate::HoverDocFormat::Markdown,
+ interpret_tests: false,
};
let tokens = tokens.filter(|token| {
matches!(
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
index 892e6a9bb..2111baad7 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
@@ -217,7 +217,9 @@ fn highlight_name_ref(
// to anything when used.
// We can fix this for derive attributes since derive helpers are recorded, but not for
// general attributes.
- None if name_ref.syntax().ancestors().any(|it| it.kind() == ATTR) => {
+ None if name_ref.syntax().ancestors().any(|it| it.kind() == ATTR)
+ && !sema.hir_file_for(name_ref.syntax()).is_derive_attr_pseudo_expansion(sema.db) =>
+ {
return HlTag::Symbol(SymbolKind::Attribute).into();
}
None => return HlTag::UnresolvedReference.into(),
@@ -410,6 +412,7 @@ fn highlight_def(
h
}
Definition::Trait(_) => Highlight::new(HlTag::Symbol(SymbolKind::Trait)),
+ Definition::TraitAlias(_) => Highlight::new(HlTag::Symbol(SymbolKind::TraitAlias)),
Definition::TypeAlias(type_) => {
let mut h = Highlight::new(HlTag::Symbol(SymbolKind::TypeAlias));
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
index 9139528c7..3c4cfc781 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
@@ -274,6 +274,7 @@ fn module_def_to_hl_tag(def: Definition) -> HlTag {
Definition::Const(_) => SymbolKind::Const,
Definition::Static(_) => SymbolKind::Static,
Definition::Trait(_) => SymbolKind::Trait,
+ Definition::TraitAlias(_) => SymbolKind::TraitAlias,
Definition::TypeAlias(_) => SymbolKind::TypeAlias,
Definition::BuiltinType(_) => return HlTag::BuiltinType,
Definition::Macro(_) => SymbolKind::Macro,
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs
index 3949f1189..a81c4ee0c 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs
@@ -150,6 +150,7 @@ impl HlTag {
SymbolKind::Struct => "struct",
SymbolKind::ToolModule => "tool_module",
SymbolKind::Trait => "trait",
+ SymbolKind::TraitAlias => "trait_alias",
SymbolKind::TypeAlias => "type_alias",
SymbolKind::TypeParam => "type_param",
SymbolKind::Union => "union",
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html
index 1a4398814..567ab8ccc 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html
@@ -53,6 +53,6 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="comment">// This is another normal comment</span>
<span class="comment documentation">/// This is another doc comment</span>
<span class="comment">// This is another normal comment</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="attribute attribute default_library library">derive</span><span class="parenthesis attribute">(</span><span class="derive attribute default_library library">Copy</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="attribute attribute default_library library">derive</span><span class="parenthesis attribute">(</span><span class="derive attribute default_library library">Copy</span><span class="comma attribute">,</span> <span class="unresolved_reference attribute">Unresolved</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
<span class="comment">// The reason for these being here is to test AttrIds</span>
<span class="keyword">struct</span> <span class="struct declaration">Foo</span><span class="semicolon">;</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
index fc9b5d3ba..ac9bd8e39 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
@@ -34,7 +34,7 @@ fn attributes() {
// This is another normal comment
/// This is another doc comment
// This is another normal comment
-#[derive(Copy)]
+#[derive(Copy, Unresolved)]
// The reason for these being here is to test AttrIds
struct Foo;
"#,
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_mir.rs b/src/tools/rust-analyzer/crates/ide/src/view_mir.rs
new file mode 100644
index 000000000..a36aba58b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/view_mir.rs
@@ -0,0 +1,29 @@
+use hir::{DefWithBody, Semantics};
+use ide_db::base_db::FilePosition;
+use ide_db::RootDatabase;
+use syntax::{algo::find_node_at_offset, ast, AstNode};
+
+// Feature: View Mir
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **rust-analyzer: View Mir**
+// |===
+pub(crate) fn view_mir(db: &RootDatabase, position: FilePosition) -> String {
+ body_mir(db, position).unwrap_or_else(|| "Not inside a function body".to_string())
+}
+
+fn body_mir(db: &RootDatabase, position: FilePosition) -> Option<String> {
+ let sema = Semantics::new(db);
+ let source_file = sema.parse(position.file_id);
+
+ let item = find_node_at_offset::<ast::Item>(source_file.syntax(), position.offset)?;
+ let def: DefWithBody = match item {
+ ast::Item::Fn(it) => sema.to_def(&it)?.into(),
+ ast::Item::Const(it) => sema.to_def(&it)?.into(),
+ ast::Item::Static(it) => sema.to_def(&it)?.into(),
+ _ => return None,
+ };
+ Some(def.debug_mir(db))
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar.rs b/src/tools/rust-analyzer/crates/parser/src/grammar.rs
index 15ec9e167..15435a26c 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar.rs
@@ -198,6 +198,10 @@ impl BlockLike {
fn is_block(self) -> bool {
self == BlockLike::Block
}
+
+ fn is_blocklike(kind: SyntaxKind) -> bool {
+ matches!(kind, BLOCK_EXPR | IF_EXPR | WHILE_EXPR | FOR_EXPR | LOOP_EXPR | MATCH_EXPR)
+ }
}
const VISIBILITY_FIRST: TokenSet = TokenSet::new(&[T![pub], T![crate]]);
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/attributes.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/attributes.rs
index 4ecaa6e6a..c13a19437 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/attributes.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/attributes.rs
@@ -43,7 +43,7 @@ pub(super) fn meta(p: &mut Parser<'_>) {
match p.current() {
T![=] => {
p.bump(T![=]);
- if !expressions::expr(p) {
+ if expressions::expr(p).is_none() {
p.error("expected expression");
}
}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs
index 4b080102a..a884d8b6e 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs
@@ -16,9 +16,9 @@ pub(super) enum Semicolon {
const EXPR_FIRST: TokenSet = LHS_FIRST;
-pub(super) fn expr(p: &mut Parser<'_>) -> bool {
+pub(super) fn expr(p: &mut Parser<'_>) -> Option<CompletedMarker> {
let r = Restrictions { forbid_structs: false, prefer_stmt: false };
- expr_bp(p, None, r, 1).is_some()
+ expr_bp(p, None, r, 1).map(|(m, _)| m)
}
pub(super) fn expr_stmt(
@@ -120,16 +120,27 @@ pub(super) fn stmt(p: &mut Parser<'_>, semicolon: Semicolon) {
// fn f() { let x: i32; }
types::ascription(p);
}
+
+ let mut expr_after_eq: Option<CompletedMarker> = None;
if p.eat(T![=]) {
// test let_stmt_init
// fn f() { let x = 92; }
- expressions::expr(p);
+ expr_after_eq = expressions::expr(p);
}
if p.at(T![else]) {
+ // test_err let_else_right_curly_brace
+ // fn func() { let Some(_) = {Some(1)} else { panic!("h") };}
+ if let Some(expr) = expr_after_eq {
+ if BlockLike::is_blocklike(expr.kind()) {
+ p.error(
+ "right curly brace `}` before `else` in a `let...else` statement not allowed",
+ )
+ }
+ }
+
// test let_else
// fn f() { let Some(x) = opt else { return }; }
-
let m = p.start();
p.bump(T![else]);
block_expr(p);
@@ -578,7 +589,14 @@ fn arg_list(p: &mut Parser<'_>) {
// fn main() {
// foo(#[attr] 92)
// }
- delimited(p, T!['('], T![')'], T![,], EXPR_FIRST.union(ATTRIBUTE_FIRST), expr);
+ delimited(
+ p,
+ T!['('],
+ T![')'],
+ T![,],
+ EXPR_FIRST.union(ATTRIBUTE_FIRST),
+ |p: &mut Parser<'_>| expr(p).is_some(),
+ );
m.complete(p, ARG_LIST);
}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs
index efc260383..d051dd268 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs
@@ -163,10 +163,8 @@ pub(super) fn atom_expr(
return None;
}
};
- let blocklike = match done.kind() {
- IF_EXPR | WHILE_EXPR | FOR_EXPR | LOOP_EXPR | MATCH_EXPR | BLOCK_EXPR => BlockLike::Block,
- _ => BlockLike::NotBlock,
- };
+ let blocklike =
+ if BlockLike::is_blocklike(done.kind()) { BlockLike::Block } else { BlockLike::NotBlock };
Some((done, blocklike))
}
@@ -188,7 +186,7 @@ fn tuple_expr(p: &mut Parser<'_>) -> CompletedMarker {
// test tuple_attrs
// const A: (i64, i64) = (1, #[cfg(test)] 2);
- if !expr(p) {
+ if expr(p).is_none() {
break;
}
@@ -221,7 +219,7 @@ fn array_expr(p: &mut Parser<'_>) -> CompletedMarker {
// test array_attrs
// const A: &[i64] = &[1, #[cfg(test)] 2];
- if !expr(p) {
+ if expr(p).is_none() {
break;
}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items/traits.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items/traits.rs
index c982e2d56..a8a1ccb15 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/items/traits.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items/traits.rs
@@ -20,7 +20,7 @@ pub(super) fn trait_(p: &mut Parser<'_>, m: Marker) {
// trait Z<U> = where Self: T<U>;
generic_params::opt_where_clause(p);
p.expect(T![;]);
- m.complete(p, TRAIT);
+ m.complete(p, TRAIT_ALIAS);
return;
}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/paths.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/paths.rs
index 1064ae997..26490aa97 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/paths.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/paths.rs
@@ -77,6 +77,9 @@ fn path_segment(p: &mut Parser<'_>, mode: Mode, first: bool) {
// type X = <A as B>::Output;
// fn foo() { <usize as Default>::default(); }
if first && p.eat(T![<]) {
+ // test_err angled_path_without_qual
+ // type X = <()>;
+ // type Y = <A as B>;
types::type_(p);
if p.eat(T![as]) {
if is_use_path_start(p) {
@@ -86,6 +89,9 @@ fn path_segment(p: &mut Parser<'_>, mode: Mode, first: bool) {
}
}
p.expect(T![>]);
+ if !p.at(T![::]) {
+ p.error("expected `::`");
+ }
} else {
let empty = if first {
p.eat(T![::]);
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs
index abcefffa2..5f4977886 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs
@@ -431,14 +431,15 @@ fn slice_pat(p: &mut Parser<'_>) -> CompletedMarker {
fn pat_list(p: &mut Parser<'_>, ket: SyntaxKind) {
while !p.at(EOF) && !p.at(ket) {
- if !p.at_ts(PAT_TOP_FIRST) {
- p.error("expected a pattern");
- break;
- }
-
pattern_top(p);
- if !p.at(ket) {
- p.expect(T![,]);
+ if !p.at(T![,]) {
+ if p.at_ts(PAT_TOP_FIRST) {
+ p.error(format!("expected {:?}, got {:?}", T![,], p.current()));
+ } else {
+ break;
+ }
+ } else {
+ p.bump(T![,]);
}
}
}
diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs
index 52b3fc23d..cd87b304a 100644
--- a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs
@@ -135,6 +135,7 @@ pub enum SyntaxKind {
STATIC,
CONST,
TRAIT,
+ TRAIT_ALIAS,
IMPL,
TYPE_ALIAS,
MACRO_CALL,
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0049_let_else_right_curly_brace_for.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0049_let_else_right_curly_brace_for.rast
new file mode 100644
index 000000000..026fecf4c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0049_let_else_right_curly_brace_for.rast
@@ -0,0 +1,58 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FOR_EXPR
+ FOR_KW "for"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ IN_KW "in"
+ WHITESPACE " "
+ RANGE_EXPR
+ LITERAL
+ INT_NUMBER "0"
+ DOT2 ".."
+ LITERAL
+ INT_NUMBER "10"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE " "
+ LET_ELSE
+ ELSE_KW "else"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+error 43: right curly brace `}` before `else` in a `let...else` statement not allowed
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0049_let_else_right_curly_brace_for.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0049_let_else_right_curly_brace_for.rs
new file mode 100644
index 000000000..d41027419
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0049_let_else_right_curly_brace_for.rs
@@ -0,0 +1,6 @@
+fn f() {
+ let _ = for _ in 0..10 {
+ } else {
+ return
+ };
+} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0050_let_else_right_curly_brace_loop.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0050_let_else_right_curly_brace_loop.rast
new file mode 100644
index 000000000..102321954
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0050_let_else_right_curly_brace_loop.rast
@@ -0,0 +1,46 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LOOP_EXPR
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE " "
+ LET_ELSE
+ ELSE_KW "else"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+error 33: right curly brace `}` before `else` in a `let...else` statement not allowed
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0050_let_else_right_curly_brace_loop.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0050_let_else_right_curly_brace_loop.rs
new file mode 100644
index 000000000..28b892869
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0050_let_else_right_curly_brace_loop.rs
@@ -0,0 +1,6 @@
+fn f() {
+ let _ = loop {
+ } else {
+ return
+ };
+} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0051_let_else_right_curly_brace_match.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0051_let_else_right_curly_brace_match.rast
new file mode 100644
index 000000000..6e1181246
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0051_let_else_right_curly_brace_match.rast
@@ -0,0 +1,85 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ L_PAREN "("
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ IDENT_PAT
+ NAME
+ IDENT "None"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE " "
+ LET_ELSE
+ ELSE_KW "else"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+error 83: right curly brace `}` before `else` in a `let...else` statement not allowed
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0051_let_else_right_curly_brace_match.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0051_let_else_right_curly_brace_match.rs
new file mode 100644
index 000000000..902d70dae
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0051_let_else_right_curly_brace_match.rs
@@ -0,0 +1,8 @@
+fn f() {
+ let _ = match Some(1) {
+ Some(_) => 1,
+ None => 2,
+ } else {
+ return
+ };
+} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0052_let_else_right_curly_brace_while.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0052_let_else_right_curly_brace_while.rast
new file mode 100644
index 000000000..298d47d53
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0052_let_else_right_curly_brace_while.rast
@@ -0,0 +1,49 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE " "
+ LET_ELSE
+ ELSE_KW "else"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+error 39: right curly brace `}` before `else` in a `let...else` statement not allowed
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0052_let_else_right_curly_brace_while.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0052_let_else_right_curly_brace_while.rs
new file mode 100644
index 000000000..a52343d8e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0052_let_else_right_curly_brace_while.rs
@@ -0,0 +1,6 @@
+fn f() {
+ let _ = while true {
+ } else {
+ return
+ };
+} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0053_let_else_right_curly_brace_if.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0053_let_else_right_curly_brace_if.rast
new file mode 100644
index 000000000..c0a4b0400
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0053_let_else_right_curly_brace_if.rast
@@ -0,0 +1,57 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE " "
+ ELSE_KW "else"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE " "
+ LET_ELSE
+ ELSE_KW "else"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+error 49: right curly brace `}` before `else` in a `let...else` statement not allowed
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0053_let_else_right_curly_brace_if.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0053_let_else_right_curly_brace_if.rs
new file mode 100644
index 000000000..9a87aecbd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0053_let_else_right_curly_brace_if.rs
@@ -0,0 +1,7 @@
+fn f() {
+ let _ = if true {
+ } else {
+ } else {
+ return
+ };
+} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0016_angled_path_without_qual.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0016_angled_path_without_qual.rast
new file mode 100644
index 000000000..0529e9750
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0016_angled_path_without_qual.rast
@@ -0,0 +1,49 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "X"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ L_ANGLE "<"
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Y"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "A"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "B"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+error 13: expected `::`
+error 32: expected `::`
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0016_angled_path_without_qual.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0016_angled_path_without_qual.rs
new file mode 100644
index 000000000..802d6cc14
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0016_angled_path_without_qual.rs
@@ -0,0 +1,2 @@
+type X = <()>;
+type Y = <A as B>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0017_let_else_right_curly_brace.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0017_let_else_right_curly_brace.rast
new file mode 100644
index 000000000..6ec580212
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0017_let_else_right_curly_brace.rast
@@ -0,0 +1,69 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "func"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ L_PAREN "("
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ R_CURLY "}"
+ WHITESPACE " "
+ LET_ELSE
+ ELSE_KW "else"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "panic"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"h\""
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 35: right curly brace `}` before `else` in a `let...else` statement not allowed
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0017_let_else_right_curly_brace.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0017_let_else_right_curly_brace.rs
new file mode 100644
index 000000000..30d52fea3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0017_let_else_right_curly_brace.rs
@@ -0,0 +1 @@
+fn func() { let Some(_) = {Some(1)} else { panic!("h") };}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rast
index 2ef66484a..c45f87089 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rast
@@ -1,5 +1,5 @@
SOURCE_FILE
- TRAIT
+ TRAIT_ALIAS
TRAIT_KW "trait"
WHITESPACE " "
NAME
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rast
index 4443d9d14..8f6782477 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rast
@@ -1,5 +1,5 @@
SOURCE_FILE
- TRAIT
+ TRAIT_ALIAS
TRAIT_KW "trait"
WHITESPACE " "
NAME
@@ -50,7 +50,7 @@ SOURCE_FILE
IDENT "Copy"
SEMICOLON ";"
WHITESPACE "\n"
- TRAIT
+ TRAIT_ALIAS
TRAIT_KW "trait"
WHITESPACE " "
NAME
diff --git a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs b/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs
index 6550cf27e..4e5d640f1 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs
@@ -15,13 +15,13 @@ use std::{
use cargo_metadata::{camino::Utf8Path, Message};
use la_arena::ArenaMap;
-use paths::AbsPathBuf;
+use paths::{AbsPath, AbsPathBuf};
use rustc_hash::FxHashMap;
use semver::Version;
use serde::Deserialize;
use crate::{
- cfg_flag::CfgFlag, CargoConfig, CargoFeatures, CargoWorkspace, InvocationLocation,
+ cfg_flag::CfgFlag, utf8_stdout, CargoConfig, CargoFeatures, CargoWorkspace, InvocationLocation,
InvocationStrategy, Package,
};
@@ -67,6 +67,7 @@ impl WorkspaceBuildScripts {
let mut cmd = Command::new(toolchain::cargo());
cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]);
+ cmd.args(&config.extra_args);
// --all-targets includes tests, benches and examples in addition to the
// default lib and bins. This is an independent concept from the --target
@@ -250,7 +251,7 @@ impl WorkspaceBuildScripts {
if tracing::enabled!(tracing::Level::INFO) {
for package in workspace.packages() {
- let package_build_data = &mut outputs[package];
+ let package_build_data = &outputs[package];
if !package_build_data.is_unchanged() {
tracing::info!(
"{}: {:?}",
@@ -378,6 +379,84 @@ impl WorkspaceBuildScripts {
pub(crate) fn get_output(&self, idx: Package) -> Option<&BuildScriptOutput> {
self.outputs.get(idx)
}
+
+ pub(crate) fn rustc_crates(
+ rustc: &CargoWorkspace,
+ current_dir: &AbsPath,
+ extra_env: &FxHashMap<String, String>,
+ ) -> Self {
+ let mut bs = WorkspaceBuildScripts::default();
+ for p in rustc.packages() {
+ bs.outputs.insert(p, BuildScriptOutput::default());
+ }
+ let res = (|| {
+ let target_libdir = (|| {
+ let mut cargo_config = Command::new(toolchain::cargo());
+ cargo_config.envs(extra_env);
+ cargo_config
+ .current_dir(current_dir)
+ .args(["rustc", "-Z", "unstable-options", "--print", "target-libdir"])
+ .env("RUSTC_BOOTSTRAP", "1");
+ if let Ok(it) = utf8_stdout(cargo_config) {
+ return Ok(it);
+ }
+ let mut cmd = Command::new(toolchain::rustc());
+ cmd.envs(extra_env);
+ cmd.args(["--print", "target-libdir"]);
+ utf8_stdout(cmd)
+ })()?;
+
+ let target_libdir = AbsPathBuf::try_from(PathBuf::from(target_libdir))
+ .map_err(|_| anyhow::format_err!("target-libdir was not an absolute path"))?;
+ tracing::info!("Loading rustc proc-macro paths from {}", target_libdir.display());
+
+ let proc_macro_dylibs: Vec<(String, AbsPathBuf)> = std::fs::read_dir(target_libdir)?
+ .filter_map(|entry| {
+ let dir_entry = entry.ok()?;
+ if dir_entry.file_type().ok()?.is_file() {
+ let path = dir_entry.path();
+ tracing::info!("p{:?}", path);
+ let extension = path.extension()?;
+ if extension == std::env::consts::DLL_EXTENSION {
+ let name = path.file_stem()?.to_str()?.split_once('-')?.0.to_owned();
+ let path = AbsPathBuf::try_from(path).ok()?;
+ return Some((name, path));
+ }
+ }
+ None
+ })
+ .collect();
+ for p in rustc.packages() {
+ let package = &rustc[p];
+ if package.targets.iter().any(|&it| rustc[it].is_proc_macro) {
+ if let Some((_, path)) = proc_macro_dylibs
+ .iter()
+ .find(|(name, _)| *name.trim_start_matches("lib") == package.name)
+ {
+ bs.outputs[p].proc_macro_dylib_path = Some(path.clone());
+ }
+ }
+ }
+
+ if tracing::enabled!(tracing::Level::INFO) {
+ for package in rustc.packages() {
+ let package_build_data = &bs.outputs[package];
+ if !package_build_data.is_unchanged() {
+ tracing::info!(
+ "{}: {:?}",
+ rustc[package].manifest.parent().display(),
+ package_build_data,
+ );
+ }
+ }
+ }
+ Ok(())
+ })();
+ if let Err::<_, anyhow::Error>(e) = res {
+ bs.error = Some(e.to_string());
+ }
+ bs
+ }
}
// FIXME: Find a better way to know if it is a dylib.
diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
index fdc7859eb..01162b1a8 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
@@ -50,7 +50,7 @@ impl ops::Index<Target> for CargoWorkspace {
/// Describes how to set the rustc source directory.
#[derive(Clone, Debug, PartialEq, Eq)]
-pub enum RustcSource {
+pub enum RustLibSource {
/// Explicit path for the rustc source directory.
Path(AbsPathBuf),
/// Try to automatically detect where the rustc source directory is.
@@ -95,16 +95,18 @@ pub struct CargoConfig {
/// rustc target
pub target: Option<String>,
/// Sysroot loading behavior
- pub sysroot: Option<RustcSource>,
+ pub sysroot: Option<RustLibSource>,
pub sysroot_src: Option<AbsPathBuf>,
/// rustc private crate source
- pub rustc_source: Option<RustcSource>,
+ pub rustc_source: Option<RustLibSource>,
/// crates to disable `#[cfg(test)]` on
pub unset_test_crates: UnsetTestCrates,
/// Invoke `cargo check` through the RUSTC_WRAPPER.
pub wrap_rustc_in_build_scripts: bool,
/// The command to run instead of `cargo check` for building build scripts.
pub run_build_script_command: Option<Vec<String>>,
+ /// Extra args to pass to the cargo command.
+ pub extra_args: Vec<String>,
/// Extra env vars to set when invoking the cargo command
pub extra_env: FxHashMap<String, String>,
pub invocation_strategy: InvocationStrategy,
diff --git a/src/tools/rust-analyzer/crates/project-model/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/src/lib.rs
index 9b6a71db8..70cb71ae3 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/lib.rs
@@ -44,7 +44,7 @@ pub use crate::{
build_scripts::WorkspaceBuildScripts,
cargo_workspace::{
CargoConfig, CargoFeatures, CargoWorkspace, Package, PackageData, PackageDependency,
- RustcSource, Target, TargetData, TargetKind, UnsetTestCrates,
+ RustLibSource, Target, TargetData, TargetKind, UnsetTestCrates,
},
manifest_path::ManifestPath,
project_json::{ProjectJson, ProjectJsonData},
diff --git a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs
index 328d2fbcf..74e41eda7 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs
@@ -88,23 +88,17 @@ impl Sysroot {
}
pub fn discover_with_src_override(
- dir: &AbsPath,
+ current_dir: &AbsPath,
extra_env: &FxHashMap<String, String>,
src: AbsPathBuf,
) -> Result<Sysroot> {
- tracing::debug!("discovering sysroot for {}", dir.display());
- let sysroot_dir = discover_sysroot_dir(dir, extra_env)?;
+ tracing::debug!("discovering sysroot for {}", current_dir.display());
+ let sysroot_dir = discover_sysroot_dir(current_dir, extra_env)?;
Ok(Sysroot::load(sysroot_dir, src))
}
- pub fn discover_rustc(
- cargo_toml: &ManifestPath,
- extra_env: &FxHashMap<String, String>,
- ) -> Option<ManifestPath> {
- tracing::debug!("discovering rustc source for {}", cargo_toml.display());
- let current_dir = cargo_toml.parent();
- let sysroot_dir = discover_sysroot_dir(current_dir, extra_env).ok()?;
- get_rustc_src(&sysroot_dir)
+ pub fn discover_rustc(&self) -> Option<ManifestPath> {
+ get_rustc_src(&self.root)
}
pub fn with_sysroot_dir(sysroot_dir: AbsPathBuf) -> Result<Sysroot> {
@@ -282,4 +276,7 @@ unwind
std_detect
test";
-const PROC_MACRO_DEPS: &str = "std";
+// core is required for our builtin derives to work in the proc_macro lib currently
+const PROC_MACRO_DEPS: &str = "
+std
+core";
diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs
index 9e9691d11..3754accbb 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs
@@ -24,8 +24,8 @@ fn load_cargo_with_overrides(file: &str, cfg_overrides: CfgOverrides) -> CrateGr
let project_workspace = ProjectWorkspace::Cargo {
cargo: cargo_workspace,
build_scripts: WorkspaceBuildScripts::default(),
- sysroot: None,
- rustc: None,
+ sysroot: Err(None),
+ rustc: Err(None),
rustc_cfg: Vec::new(),
cfg_overrides,
toolchain: None,
@@ -37,7 +37,7 @@ fn load_cargo_with_overrides(file: &str, cfg_overrides: CfgOverrides) -> CrateGr
fn load_rust_project(file: &str) -> CrateGraph {
let data = get_test_json_file(file);
let project = rooted_project_json(data);
- let sysroot = Some(get_fake_sysroot());
+ let sysroot = Ok(get_fake_sysroot());
let project_workspace = ProjectWorkspace::Json { project, sysroot, rustc_cfg: Vec::new() };
to_crate_graph(project_workspace)
}
@@ -1547,6 +1547,15 @@ fn rust_project_hello_world_project_model() {
),
prelude: true,
},
+ Dependency {
+ crate_id: CrateId(
+ 1,
+ ),
+ name: CrateName(
+ "core",
+ ),
+ prelude: true,
+ },
],
proc_macro: Err(
"no proc macro loaded for sysroot crate",
diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
index 2a11f1e8e..d1e53e12e 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
@@ -17,7 +17,7 @@ use stdx::{always, hash::NoHashHashMap};
use crate::{
build_scripts::BuildScriptOutput,
- cargo_workspace::{DepKind, PackageData, RustcSource},
+ cargo_workspace::{DepKind, PackageData, RustLibSource},
cfg_flag::CfgFlag,
rustc_cfg,
sysroot::SysrootCrate,
@@ -69,8 +69,8 @@ pub enum ProjectWorkspace {
Cargo {
cargo: CargoWorkspace,
build_scripts: WorkspaceBuildScripts,
- sysroot: Option<Sysroot>,
- rustc: Option<CargoWorkspace>,
+ sysroot: Result<Sysroot, Option<String>>,
+ rustc: Result<(CargoWorkspace, WorkspaceBuildScripts), Option<String>>,
/// Holds cfg flags for the current target. We get those by running
/// `rustc --print cfg`.
///
@@ -82,7 +82,7 @@ pub enum ProjectWorkspace {
target_layout: Result<String, String>,
},
/// Project workspace was manually specified using a `rust-project.json` file.
- Json { project: ProjectJson, sysroot: Option<Sysroot>, rustc_cfg: Vec<CfgFlag> },
+ Json { project: ProjectJson, sysroot: Result<Sysroot, Option<String>>, rustc_cfg: Vec<CfgFlag> },
// FIXME: The primary limitation of this approach is that the set of detached files needs to be fixed at the beginning.
// That's not the end user experience we should strive for.
// Ideally, you should be able to just open a random detached file in existing cargo projects, and get the basic features working.
@@ -93,7 +93,11 @@ pub enum ProjectWorkspace {
// //
/// Project with a set of disjoint files, not belonging to any particular workspace.
/// Backed by basic sysroot crates for basic completion and highlighting.
- DetachedFiles { files: Vec<AbsPathBuf>, sysroot: Option<Sysroot>, rustc_cfg: Vec<CfgFlag> },
+ DetachedFiles {
+ files: Vec<AbsPathBuf>,
+ sysroot: Result<Sysroot, Option<String>>,
+ rustc_cfg: Vec<CfgFlag>,
+ },
}
impl fmt::Debug for ProjectWorkspace {
@@ -113,10 +117,10 @@ impl fmt::Debug for ProjectWorkspace {
.debug_struct("Cargo")
.field("root", &cargo.workspace_root().file_name())
.field("n_packages", &cargo.packages().len())
- .field("sysroot", &sysroot.is_some())
+ .field("sysroot", &sysroot.is_ok())
.field(
"n_rustc_compiler_crates",
- &rustc.as_ref().map_or(0, |rc| rc.packages().len()),
+ &rustc.as_ref().map_or(0, |(rc, _)| rc.packages().len()),
)
.field("n_rustc_cfg", &rustc_cfg.len())
.field("n_cfg_overrides", &cfg_overrides.len())
@@ -126,7 +130,7 @@ impl fmt::Debug for ProjectWorkspace {
ProjectWorkspace::Json { project, sysroot, rustc_cfg } => {
let mut debug_struct = f.debug_struct("Json");
debug_struct.field("n_crates", &project.n_crates());
- if let Some(sysroot) = sysroot {
+ if let Ok(sysroot) = sysroot {
debug_struct.field("n_sysroot_crates", &sysroot.crates().len());
}
debug_struct.field("n_rustc_cfg", &rustc_cfg.len());
@@ -135,7 +139,7 @@ impl fmt::Debug for ProjectWorkspace {
ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => f
.debug_struct("DetachedFiles")
.field("n_files", &files.len())
- .field("sysroot", &sysroot.is_some())
+ .field("sysroot", &sysroot.is_ok())
.field("n_rustc_cfg", &rustc_cfg.len())
.finish(),
}
@@ -191,85 +195,81 @@ impl ProjectWorkspace {
let cargo = CargoWorkspace::new(meta);
let sysroot = match (&config.sysroot, &config.sysroot_src) {
- (Some(RustcSource::Path(path)), None) => {
- match Sysroot::with_sysroot_dir(path.clone()) {
- Ok(it) => Some(it),
- Err(e) => {
- tracing::error!(%e, "Failed to find sysroot at {}.", path.display());
- None
- }
- }
+ (Some(RustLibSource::Path(path)), None) => {
+ Sysroot::with_sysroot_dir(path.clone()).map_err(|e| {
+ Some(format!("Failed to find sysroot at {}:{e}", path.display()))
+ })
}
- (Some(RustcSource::Discover), None) => {
- match Sysroot::discover(cargo_toml.parent(), &config.extra_env) {
- Ok(it) => Some(it),
- Err(e) => {
- tracing::error!(
- %e,
- "Failed to find sysroot for Cargo.toml file {}. Is rust-src installed?",
- cargo_toml.display()
- );
- None
- }
- }
+ (Some(RustLibSource::Discover), None) => {
+ Sysroot::discover(cargo_toml.parent(), &config.extra_env).map_err(|e| {
+ Some(format!("Failed to find sysroot for Cargo.toml file {}. Is rust-src installed? {e}", cargo_toml.display()))
+ })
}
- (Some(RustcSource::Path(sysroot)), Some(sysroot_src)) => {
- Some(Sysroot::load(sysroot.clone(), sysroot_src.clone()))
+ (Some(RustLibSource::Path(sysroot)), Some(sysroot_src)) => {
+ Ok(Sysroot::load(sysroot.clone(), sysroot_src.clone()))
}
- (Some(RustcSource::Discover), Some(sysroot_src)) => {
- match Sysroot::discover_with_src_override(
+ (Some(RustLibSource::Discover), Some(sysroot_src)) => {
+ Sysroot::discover_with_src_override(
cargo_toml.parent(),
&config.extra_env,
sysroot_src.clone(),
- ) {
- Ok(it) => Some(it),
- Err(e) => {
- tracing::error!(
- %e,
- "Failed to find sysroot for Cargo.toml file {}. Is rust-src installed?",
- cargo_toml.display()
- );
- None
- }
- }
+ ).map_err(|e| {
+ Some(format!("Failed to find sysroot for Cargo.toml file {}. Is rust-src installed? {e}", cargo_toml.display()))
+ })
}
- (None, _) => None,
+ (None, _) => Err(None),
};
- if let Some(sysroot) = &sysroot {
- tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot");
+ if let Ok(sysroot) = &sysroot {
+ tracing::info!(workspace = %cargo_toml.display(), src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot");
}
let rustc_dir = match &config.rustc_source {
- Some(RustcSource::Path(path)) => ManifestPath::try_from(path.clone()).ok(),
- Some(RustcSource::Discover) => {
- Sysroot::discover_rustc(&cargo_toml, &config.extra_env)
+ Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone())
+ .map_err(|p| {
+ Some(format!("rustc source path is not absolute: {}", p.display()))
+ }),
+ Some(RustLibSource::Discover) => {
+ sysroot.as_ref().ok().and_then(Sysroot::discover_rustc).ok_or_else(|| {
+ Some(format!("Failed to discover rustc source for sysroot."))
+ })
}
- None => None,
+ None => Err(None),
};
- if let Some(rustc_dir) = &rustc_dir {
- tracing::info!(rustc_dir = %rustc_dir.display(), "Using rustc source");
- }
- let rustc = match rustc_dir {
- Some(rustc_dir) => match CargoWorkspace::fetch_metadata(
+ let rustc = rustc_dir.and_then(|rustc_dir| {
+ tracing::info!(workspace = %cargo_toml.display(), rustc_dir = %rustc_dir.display(), "Using rustc source");
+ match CargoWorkspace::fetch_metadata(
&rustc_dir,
cargo_toml.parent(),
- config,
+ &CargoConfig {
+ features: crate::CargoFeatures::default(),
+ ..config.clone()
+ },
progress,
) {
- Ok(meta) => Some(CargoWorkspace::new(meta)),
+ Ok(meta) => {
+ let workspace = CargoWorkspace::new(meta);
+ let buildscripts = WorkspaceBuildScripts::rustc_crates(
+ &workspace,
+ cargo_toml.parent(),
+ &config.extra_env,
+ );
+ Ok((workspace, buildscripts))
+ }
Err(e) => {
tracing::error!(
%e,
"Failed to read Cargo metadata from rustc source at {}",
rustc_dir.display()
);
- None
+ Err(Some(format!(
+ "Failed to read Cargo metadata from rustc source at {}: {e}",
+ rustc_dir.display())
+ ))
}
- },
- None => None,
- };
+ }
+ });
let rustc_cfg =
rustc_cfg::get(Some(&cargo_toml), config.target.as_deref(), &config.extra_env);
@@ -305,12 +305,12 @@ impl ProjectWorkspace {
extra_env: &FxHashMap<String, String>,
) -> ProjectWorkspace {
let sysroot = match (project_json.sysroot.clone(), project_json.sysroot_src.clone()) {
- (Some(sysroot), Some(sysroot_src)) => Some(Sysroot::load(sysroot, sysroot_src)),
+ (Some(sysroot), Some(sysroot_src)) => Ok(Sysroot::load(sysroot, sysroot_src)),
(Some(sysroot), None) => {
// assume sysroot is structured like rustup's and guess `sysroot_src`
let sysroot_src =
sysroot.join("lib").join("rustlib").join("src").join("rust").join("library");
- Some(Sysroot::load(sysroot, sysroot_src))
+ Ok(Sysroot::load(sysroot, sysroot_src))
}
(None, Some(sysroot_src)) => {
// assume sysroot is structured like rustup's and guess `sysroot`
@@ -318,11 +318,11 @@ impl ProjectWorkspace {
for _ in 0..5 {
sysroot.pop();
}
- Some(Sysroot::load(sysroot, sysroot_src))
+ Ok(Sysroot::load(sysroot, sysroot_src))
}
- (None, None) => None,
+ (None, None) => Err(None),
};
- if let Some(sysroot) = &sysroot {
+ if let Ok(sysroot) = &sysroot {
tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot");
}
@@ -335,33 +335,23 @@ impl ProjectWorkspace {
config: &CargoConfig,
) -> Result<ProjectWorkspace> {
let sysroot = match &config.sysroot {
- Some(RustcSource::Path(path)) => match Sysroot::with_sysroot_dir(path.clone()) {
- Ok(it) => Some(it),
- Err(e) => {
- tracing::error!(%e, "Failed to find sysroot at {}.", path.display());
- None
- }
- },
- Some(RustcSource::Discover) => {
+ Some(RustLibSource::Path(path)) => Sysroot::with_sysroot_dir(path.clone())
+ .map_err(|e| Some(format!("Failed to find sysroot at {}:{e}", path.display()))),
+ Some(RustLibSource::Discover) => {
let dir = &detached_files
.first()
.and_then(|it| it.parent())
.ok_or_else(|| format_err!("No detached files to load"))?;
- match Sysroot::discover(dir, &config.extra_env) {
- Ok(it) => Some(it),
- Err(e) => {
- tracing::error!(
- %e,
- "Failed to find sysroot for {}. Is rust-src installed?",
- dir.display()
- );
- None
- }
- }
+ Sysroot::discover(dir, &config.extra_env).map_err(|e| {
+ Some(format!(
+ "Failed to find sysroot for {}. Is rust-src installed? {e}",
+ dir.display()
+ ))
+ })
}
- None => None,
+ None => Err(None),
};
- if let Some(sysroot) = &sysroot {
+ if let Ok(sysroot) = &sysroot {
tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot");
}
let rustc_cfg = rustc_cfg::get(None, None, &Default::default());
@@ -442,10 +432,18 @@ impl ProjectWorkspace {
}
}
+ pub fn workspace_definition_path(&self) -> Option<&AbsPath> {
+ match self {
+ ProjectWorkspace::Cargo { cargo, .. } => Some(cargo.workspace_root()),
+ ProjectWorkspace::Json { project, .. } => Some(project.path()),
+ ProjectWorkspace::DetachedFiles { .. } => None,
+ }
+ }
+
pub fn find_sysroot_proc_macro_srv(&self) -> Option<AbsPathBuf> {
match self {
- ProjectWorkspace::Cargo { sysroot: Some(sysroot), .. }
- | ProjectWorkspace::Json { sysroot: Some(sysroot), .. } => {
+ ProjectWorkspace::Cargo { sysroot: Ok(sysroot), .. }
+ | ProjectWorkspace::Json { sysroot: Ok(sysroot), .. } => {
let standalone_server_name =
format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX);
["libexec", "lib"]
@@ -461,7 +459,7 @@ impl ProjectWorkspace {
/// The return type contains the path and whether or not
/// the root is a member of the current workspace
pub fn to_roots(&self) -> Vec<PackageRoot> {
- let mk_sysroot = |sysroot: Option<&Sysroot>, project_root: Option<&AbsPath>| {
+ let mk_sysroot = |sysroot: Result<&Sysroot, _>, project_root: Option<&AbsPath>| {
sysroot.map(|sysroot| PackageRoot {
// mark the sysroot as mutable if it is located inside of the project
is_local: project_root
@@ -531,7 +529,7 @@ impl ProjectWorkspace {
PackageRoot { is_local, include, exclude }
})
.chain(mk_sysroot(sysroot.as_ref(), Some(cargo.workspace_root())))
- .chain(rustc.iter().flat_map(|rustc| {
+ .chain(rustc.iter().flat_map(|(rustc, _)| {
rustc.packages().map(move |krate| PackageRoot {
is_local: false,
include: vec![rustc[krate].manifest.parent().to_path_buf()],
@@ -559,7 +557,7 @@ impl ProjectWorkspace {
sysroot_package_len + project.n_crates()
}
ProjectWorkspace::Cargo { cargo, sysroot, rustc, .. } => {
- let rustc_package_len = rustc.as_ref().map_or(0, |it| it.packages().len());
+ let rustc_package_len = rustc.as_ref().map_or(0, |(it, _)| it.packages().len());
let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.crates().len());
cargo.packages().len() + sysroot_package_len + rustc_package_len
}
@@ -584,7 +582,7 @@ impl ProjectWorkspace {
load_proc_macro,
load,
project,
- sysroot.as_ref(),
+ sysroot.as_ref().ok(),
extra_env,
Err("rust-project.json projects have no target layout set".into()),
),
@@ -600,9 +598,9 @@ impl ProjectWorkspace {
} => cargo_to_crate_graph(
load_proc_macro,
load,
- rustc,
+ rustc.as_ref().ok(),
cargo,
- sysroot.as_ref(),
+ sysroot.as_ref().ok(),
rustc_cfg.clone(),
cfg_overrides,
build_scripts,
@@ -616,7 +614,7 @@ impl ProjectWorkspace {
rustc_cfg.clone(),
load,
files,
- sysroot,
+ sysroot.as_ref().ok(),
Err("detached file projects have no target layout set".into()),
)
}
@@ -778,7 +776,7 @@ fn project_json_to_crate_graph(
fn cargo_to_crate_graph(
load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
- rustc: &Option<CargoWorkspace>,
+ rustc: Option<&(CargoWorkspace, WorkspaceBuildScripts)>,
cargo: &CargoWorkspace,
sysroot: Option<&Sysroot>,
rustc_cfg: Vec<CfgFlag>,
@@ -924,7 +922,7 @@ fn cargo_to_crate_graph(
if has_private {
// If the user provided a path to rustc sources, we add all the rustc_private crates
// and create dependencies on them for the crates which opt-in to that
- if let Some(rustc_workspace) = rustc {
+ if let Some((rustc_workspace, rustc_build_scripts)) = rustc {
handle_rustc_crates(
&mut crate_graph,
&mut pkg_to_lib_crate,
@@ -937,7 +935,13 @@ fn cargo_to_crate_graph(
&pkg_crates,
&cfg_options,
override_cfg,
- build_scripts,
+ if rustc_workspace.workspace_root() == cargo.workspace_root() {
+ // the rustc workspace does not use the installed toolchain's proc-macro server
+ // so we need to make sure we don't use the pre compiled proc-macros there either
+ build_scripts
+ } else {
+ rustc_build_scripts
+ },
target_layout,
);
}
@@ -949,7 +953,7 @@ fn detached_files_to_crate_graph(
rustc_cfg: Vec<CfgFlag>,
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
detached_files: &[AbsPathBuf],
- sysroot: &Option<Sysroot>,
+ sysroot: Option<&Sysroot>,
target_layout: TargetLayoutLoadResult,
) -> CrateGraph {
let _p = profile::span("detached_files_to_crate_graph");
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
index 93297faa6..6ce1de5d3 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -7,12 +7,12 @@ use std::{
};
use hir::{
- db::{AstDatabase, DefDatabase, HirDatabase},
+ db::{DefDatabase, ExpandDatabase, HirDatabase},
AssocItem, Crate, Function, HasSource, HirDisplay, ModuleDef,
};
use hir_def::{
body::{BodySourceMap, SyntheticSyntax},
- expr::ExprId,
+ expr::{ExprId, PatId},
FunctionId,
};
use hir_ty::{Interner, TyExt, TypeFlags};
@@ -24,7 +24,7 @@ use ide_db::base_db::{
use itertools::Itertools;
use oorandom::Rand32;
use profile::{Bytes, StopWatch};
-use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustcSource};
+use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
use rayon::prelude::*;
use rustc_hash::FxHashSet;
use stdx::format_to;
@@ -57,7 +57,7 @@ impl flags::AnalysisStats {
let mut cargo_config = CargoConfig::default();
cargo_config.sysroot = match self.no_sysroot {
true => None,
- false => Some(RustcSource::Discover),
+ false => Some(RustLibSource::Discover),
};
let no_progress = &|_| ();
@@ -222,7 +222,11 @@ impl flags::AnalysisStats {
let mut num_exprs = 0;
let mut num_exprs_unknown = 0;
let mut num_exprs_partially_unknown = 0;
- let mut num_type_mismatches = 0;
+ let mut num_expr_type_mismatches = 0;
+ let mut num_pats = 0;
+ let mut num_pats_unknown = 0;
+ let mut num_pats_partially_unknown = 0;
+ let mut num_pat_type_mismatches = 0;
let analysis = host.analysis();
for f in funcs.iter().copied() {
let name = f.name(db);
@@ -255,6 +259,8 @@ impl flags::AnalysisStats {
let f_id = FunctionId::from(f);
let (body, sm) = db.body_with_source_map(f_id.into());
let inference_result = db.infer(f_id.into());
+
+ // region:expressions
let (previous_exprs, previous_unknown, previous_partially_unknown) =
(num_exprs, num_exprs_unknown, num_exprs_partially_unknown);
for (expr_id, _) in body.exprs.iter() {
@@ -307,12 +313,12 @@ impl flags::AnalysisStats {
if unknown_or_partial && self.output == Some(OutputFormat::Csv) {
println!(
r#"{},type,"{}""#,
- location_csv(db, &analysis, vfs, &sm, expr_id),
+ location_csv_expr(db, &analysis, vfs, &sm, expr_id),
ty.display(db)
);
}
if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) {
- num_type_mismatches += 1;
+ num_expr_type_mismatches += 1;
if verbosity.is_verbose() {
if let Some((path, start, end)) =
expr_syntax_range(db, &analysis, vfs, &sm, expr_id)
@@ -339,7 +345,7 @@ impl flags::AnalysisStats {
if self.output == Some(OutputFormat::Csv) {
println!(
r#"{},mismatch,"{}","{}""#,
- location_csv(db, &analysis, vfs, &sm, expr_id),
+ location_csv_expr(db, &analysis, vfs, &sm, expr_id),
mismatch.expected.display(db),
mismatch.actual.display(db)
);
@@ -355,6 +361,109 @@ impl flags::AnalysisStats {
num_exprs_partially_unknown - previous_partially_unknown
));
}
+ // endregion:expressions
+
+ // region:patterns
+ let (previous_pats, previous_unknown, previous_partially_unknown) =
+ (num_pats, num_pats_unknown, num_pats_partially_unknown);
+ for (pat_id, _) in body.pats.iter() {
+ let ty = &inference_result[pat_id];
+ num_pats += 1;
+ let unknown_or_partial = if ty.is_unknown() {
+ num_pats_unknown += 1;
+ if verbosity.is_spammy() {
+ if let Some((path, start, end)) =
+ pat_syntax_range(db, &analysis, vfs, &sm, pat_id)
+ {
+ bar.println(format!(
+ "{} {}:{}-{}:{}: Unknown type",
+ path,
+ start.line + 1,
+ start.col,
+ end.line + 1,
+ end.col,
+ ));
+ } else {
+ bar.println(format!("{name}: Unknown type",));
+ }
+ }
+ true
+ } else {
+ let is_partially_unknown =
+ ty.data(Interner).flags.contains(TypeFlags::HAS_ERROR);
+ if is_partially_unknown {
+ num_pats_partially_unknown += 1;
+ }
+ is_partially_unknown
+ };
+ if self.only.is_some() && verbosity.is_spammy() {
+ // in super-verbose mode for just one function, we print every single pattern
+ if let Some((_, start, end)) = pat_syntax_range(db, &analysis, vfs, &sm, pat_id)
+ {
+ bar.println(format!(
+ "{}:{}-{}:{}: {}",
+ start.line + 1,
+ start.col,
+ end.line + 1,
+ end.col,
+ ty.display(db)
+ ));
+ } else {
+ bar.println(format!("unknown location: {}", ty.display(db)));
+ }
+ }
+ if unknown_or_partial && self.output == Some(OutputFormat::Csv) {
+ println!(
+ r#"{},type,"{}""#,
+ location_csv_pat(db, &analysis, vfs, &sm, pat_id),
+ ty.display(db)
+ );
+ }
+ if let Some(mismatch) = inference_result.type_mismatch_for_pat(pat_id) {
+ num_pat_type_mismatches += 1;
+ if verbosity.is_verbose() {
+ if let Some((path, start, end)) =
+ pat_syntax_range(db, &analysis, vfs, &sm, pat_id)
+ {
+ bar.println(format!(
+ "{} {}:{}-{}:{}: Expected {}, got {}",
+ path,
+ start.line + 1,
+ start.col,
+ end.line + 1,
+ end.col,
+ mismatch.expected.display(db),
+ mismatch.actual.display(db)
+ ));
+ } else {
+ bar.println(format!(
+ "{}: Expected {}, got {}",
+ name,
+ mismatch.expected.display(db),
+ mismatch.actual.display(db)
+ ));
+ }
+ }
+ if self.output == Some(OutputFormat::Csv) {
+ println!(
+ r#"{},mismatch,"{}","{}""#,
+ location_csv_pat(db, &analysis, vfs, &sm, pat_id),
+ mismatch.expected.display(db),
+ mismatch.actual.display(db)
+ );
+ }
+ }
+ }
+ if verbosity.is_spammy() {
+ bar.println(format!(
+ "In {}: {} pats, {} unknown, {} partial",
+ full_name,
+ num_pats - previous_pats,
+ num_pats_unknown - previous_unknown,
+ num_pats_partially_unknown - previous_partially_unknown
+ ));
+ }
+ // endregion:patterns
bar.inc(1);
}
@@ -366,10 +475,21 @@ impl flags::AnalysisStats {
percentage(num_exprs_unknown, num_exprs),
num_exprs_partially_unknown,
percentage(num_exprs_partially_unknown, num_exprs),
- num_type_mismatches
+ num_expr_type_mismatches
+ );
+ eprintln!(
+ " pats: {}, ??ty: {} ({}%), ?ty: {} ({}%), !ty: {}",
+ num_pats,
+ num_pats_unknown,
+ percentage(num_pats_unknown, num_pats),
+ num_pats_partially_unknown,
+ percentage(num_pats_partially_unknown, num_pats),
+ num_pat_type_mismatches
);
report_metric("unknown type", num_exprs_unknown, "#");
- report_metric("type mismatches", num_type_mismatches, "#");
+ report_metric("type mismatches", num_expr_type_mismatches, "#");
+ report_metric("pattern unknown type", num_pats_unknown, "#");
+ report_metric("pattern type mismatches", num_pat_type_mismatches, "#");
eprintln!("{:<20} {}", "Inference:", inference_sw.elapsed());
}
@@ -379,7 +499,7 @@ impl flags::AnalysisStats {
}
}
-fn location_csv(
+fn location_csv_expr(
db: &RootDatabase,
analysis: &Analysis,
vfs: &Vfs,
@@ -401,6 +521,30 @@ fn location_csv(
format!("{path},{}:{},{}:{}", start.line + 1, start.col, end.line + 1, end.col)
}
+fn location_csv_pat(
+ db: &RootDatabase,
+ analysis: &Analysis,
+ vfs: &Vfs,
+ sm: &BodySourceMap,
+ pat_id: PatId,
+) -> String {
+ let src = match sm.pat_syntax(pat_id) {
+ Ok(s) => s,
+ Err(SyntheticSyntax) => return "synthetic,,".to_string(),
+ };
+ let root = db.parse_or_expand(src.file_id).unwrap();
+ let node = src.map(|e| {
+ e.either(|it| it.to_node(&root).syntax().clone(), |it| it.to_node(&root).syntax().clone())
+ });
+ let original_range = node.as_ref().original_file_range(db);
+ let path = vfs.file_path(original_range.file_id);
+ let line_index = analysis.file_line_index(original_range.file_id).unwrap();
+ let text_range = original_range.range;
+ let (start, end) =
+ (line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
+ format!("{path},{}:{},{}:{}", start.line + 1, start.col, end.line + 1, end.col)
+}
+
fn expr_syntax_range(
db: &RootDatabase,
analysis: &Analysis,
@@ -423,6 +567,33 @@ fn expr_syntax_range(
None
}
}
+fn pat_syntax_range(
+ db: &RootDatabase,
+ analysis: &Analysis,
+ vfs: &Vfs,
+ sm: &BodySourceMap,
+ pat_id: PatId,
+) -> Option<(VfsPath, LineCol, LineCol)> {
+ let src = sm.pat_syntax(pat_id);
+ if let Ok(src) = src {
+ let root = db.parse_or_expand(src.file_id).unwrap();
+ let node = src.map(|e| {
+ e.either(
+ |it| it.to_node(&root).syntax().clone(),
+ |it| it.to_node(&root).syntax().clone(),
+ )
+ });
+ let original_range = node.as_ref().original_file_range(db);
+ let path = vfs.file_path(original_range.file_id);
+ let line_index = analysis.file_line_index(original_range.file_id).unwrap();
+ let text_range = original_range.range;
+ let (start, end) =
+ (line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
+ Some((path, start, end))
+ } else {
+ None
+ }
+}
fn shuffle<T>(rng: &mut Rand32, slice: &mut [T]) {
for i in 0..slice.len() {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
index ff821be53..4006d023d 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
@@ -1,6 +1,7 @@
//! Analyze all modules in a project for diagnostics. Exits with a non-zero
//! status code if any errors are found.
+use project_model::{CargoConfig, RustLibSource};
use rustc_hash::FxHashSet;
use hir::{db::HirDatabase, Crate, Module};
@@ -14,7 +15,8 @@ use crate::cli::{
impl flags::Diagnostics {
pub fn run(self) -> anyhow::Result<()> {
- let cargo_config = Default::default();
+ let mut cargo_config = CargoConfig::default();
+ cargo_config.sysroot = Some(RustLibSource::Discover);
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: !self.disable_build_scripts,
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
index 3fc1aa4ea..7f5d08449 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
@@ -13,7 +13,7 @@ use ide_db::LineIndexDatabase;
use ide_db::base_db::salsa::{self, ParallelDatabase};
use ide_db::line_index::WideEncoding;
use lsp_types::{self, lsif};
-use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
+use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
use vfs::{AbsPathBuf, Vfs};
use crate::cli::load_cargo::ProcMacroServerChoice;
@@ -289,7 +289,8 @@ impl flags::Lsif {
pub fn run(self) -> Result<()> {
eprintln!("Generating LSIF started...");
let now = Instant::now();
- let cargo_config = CargoConfig::default();
+ let mut cargo_config = CargoConfig::default();
+ cargo_config.sysroot = Some(RustLibSource::Discover);
let no_progress = &|_| ();
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
index 9a04fbea7..3e5e40750 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
@@ -15,7 +15,7 @@ use ide::{
TokenStaticData,
};
use ide_db::LineIndexDatabase;
-use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
+use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
use scip::types as scip_types;
use std::env;
@@ -29,7 +29,8 @@ impl flags::Scip {
pub fn run(self) -> Result<()> {
eprintln!("Generating SCIP start...");
let now = Instant::now();
- let cargo_config = CargoConfig::default();
+ let mut cargo_config = CargoConfig::default();
+ cargo_config.sysroot = Some(RustLibSource::Discover);
let no_progress = &|s| (eprintln!("rust-analyzer: Loading {s}"));
let load_cargo_config = LoadCargoConfig {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
index 3552f840a..82a769347 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
@@ -1,7 +1,7 @@
//! Applies structured search replace rules from the command line.
use ide_ssr::MatchFinder;
-use project_model::CargoConfig;
+use project_model::{CargoConfig, RustLibSource};
use crate::cli::{
flags,
@@ -12,7 +12,8 @@ use crate::cli::{
impl flags::Ssr {
pub fn run(self) -> Result<()> {
use ide_db::base_db::SourceDatabaseExt;
- let cargo_config = CargoConfig::default();
+ let mut cargo_config = CargoConfig::default();
+ cargo_config.sysroot = Some(RustLibSource::Discover);
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
index f609a50a0..c35cce103 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
@@ -22,7 +22,7 @@ use ide_db::{
use itertools::Itertools;
use lsp_types::{ClientCapabilities, MarkupKind};
use project_model::{
- CargoConfig, CargoFeatures, ProjectJson, ProjectJsonData, ProjectManifest, RustcSource,
+ CargoConfig, CargoFeatures, ProjectJson, ProjectJsonData, ProjectManifest, RustLibSource,
UnsetTestCrates,
};
use rustc_hash::{FxHashMap, FxHashSet};
@@ -101,6 +101,8 @@ config_data! {
/// Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to
/// avoid checking unnecessary things.
cargo_buildScripts_useRustcWrapper: bool = "true",
+ /// Extra arguments that are passed to every cargo invocation.
+ cargo_extraArgs: Vec<String> = "[]",
/// Extra environment variables that will be set when running cargo, rustc
/// or other commands within the workspace. Useful for setting RUSTFLAGS.
cargo_extraEnv: FxHashMap<String, String> = "{}",
@@ -270,7 +272,6 @@ config_data! {
/// The warnings will be indicated by a blue squiggly underline in code
/// and a blue icon in the `Problems Panel`.
diagnostics_warningsAsInfo: Vec<String> = "[]",
-
/// These directories will be ignored by rust-analyzer. They are
/// relative to the workspace root, and globs are not supported. You may
/// also need to add the folders to Code's `files.watcherExclude`.
@@ -366,6 +367,8 @@ config_data! {
inlayHints_typeHints_hideClosureInitialization: bool = "false",
/// Whether to hide inlay type hints for constructors.
inlayHints_typeHints_hideNamedConstructor: bool = "false",
+ /// Enables the experimental support for interpreting tests.
+ interpret_tests: bool = "false",
/// Join lines merges consecutive declaration and initialization of an assignment.
joinLines_joinAssignments: bool = "true",
@@ -456,7 +459,10 @@ config_data! {
/// Additional arguments to `rustfmt`.
rustfmt_extraArgs: Vec<String> = "[]",
/// Advanced option, fully override the command rust-analyzer uses for
- /// formatting.
+ /// formatting. This should be the equivalent of `rustfmt` here, and
+ /// not that of `cargo fmt`. The file contents will be passed on the
+ /// standard input and the formatted result will be read from the
+ /// standard output.
rustfmt_overrideCommand: Option<Vec<String>> = "null",
/// Enables the use of rustfmt's unstable range formatting command for the
/// `textDocument/rangeFormatting` request. The rustfmt option is unstable and only
@@ -849,27 +855,27 @@ impl Config {
}
pub fn linked_projects(&self) -> Vec<LinkedProject> {
match self.data.linkedProjects.as_slice() {
- [] => match self.discovered_projects.as_ref() {
- Some(discovered_projects) => {
- let exclude_dirs: Vec<_> = self
- .data
- .files_excludeDirs
- .iter()
- .map(|p| self.root_path.join(p))
- .collect();
- discovered_projects
+ [] => {
+ match self.discovered_projects.as_ref() {
+ Some(discovered_projects) => {
+ let exclude_dirs: Vec<_> = self
+ .data
+ .files_excludeDirs
+ .iter()
+ .map(|p| self.root_path.join(p))
+ .collect();
+ discovered_projects
.iter()
- .filter(|p| {
- let (ProjectManifest::ProjectJson(path)
- | ProjectManifest::CargoToml(path)) = p;
+ .filter(|(ProjectManifest::ProjectJson(path) | ProjectManifest::CargoToml(path))| {
!exclude_dirs.iter().any(|p| path.starts_with(p))
})
.cloned()
.map(LinkedProject::from)
.collect()
+ }
+ None => Vec::new(),
}
- None => Vec::new(),
- },
+ }
linked_projects => linked_projects
.iter()
.filter_map(|linked_project| match linked_project {
@@ -888,6 +894,15 @@ impl Config {
}
}
+ pub fn add_linked_projects(&mut self, linked_projects: Vec<ProjectJsonData>) {
+ let mut linked_projects = linked_projects
+ .into_iter()
+ .map(ManifestOrProjectJson::ProjectJson)
+ .collect::<Vec<ManifestOrProjectJson>>();
+
+ self.data.linkedProjects.append(&mut linked_projects);
+ }
+
pub fn did_save_text_document_dynamic_registration(&self) -> bool {
let caps = try_or_def!(self.caps.text_document.as_ref()?.synchronization.clone()?);
caps.did_save == Some(true) && caps.dynamic_registration == Some(true)
@@ -1050,10 +1065,20 @@ impl Config {
}
}
+ pub fn extra_args(&self) -> &Vec<String> {
+ &self.data.cargo_extraArgs
+ }
+
pub fn extra_env(&self) -> &FxHashMap<String, String> {
&self.data.cargo_extraEnv
}
+ pub fn check_extra_args(&self) -> Vec<String> {
+ let mut extra_args = self.extra_args().clone();
+ extra_args.extend_from_slice(&self.data.check_extraArgs);
+ extra_args
+ }
+
pub fn check_extra_env(&self) -> FxHashMap<String, String> {
let mut extra_env = self.data.cargo_extraEnv.clone();
extra_env.extend(self.data.check_extraEnv.clone());
@@ -1112,16 +1137,16 @@ impl Config {
pub fn cargo(&self) -> CargoConfig {
let rustc_source = self.data.rustc_source.as_ref().map(|rustc_src| {
if rustc_src == "discover" {
- RustcSource::Discover
+ RustLibSource::Discover
} else {
- RustcSource::Path(self.root_path.join(rustc_src))
+ RustLibSource::Path(self.root_path.join(rustc_src))
}
});
let sysroot = self.data.cargo_sysroot.as_ref().map(|sysroot| {
if sysroot == "discover" {
- RustcSource::Discover
+ RustLibSource::Discover
} else {
- RustcSource::Path(self.root_path.join(sysroot))
+ RustLibSource::Path(self.root_path.join(sysroot))
}
});
let sysroot_src =
@@ -1152,6 +1177,7 @@ impl Config {
InvocationLocation::Workspace => project_model::InvocationLocation::Workspace,
},
run_build_script_command: self.data.cargo_buildScripts_overrideCommand.clone(),
+ extra_args: self.data.cargo_extraArgs.clone(),
extra_env: self.data.cargo_extraEnv.clone(),
}
}
@@ -1222,7 +1248,7 @@ impl Config {
CargoFeaturesDef::All => vec![],
CargoFeaturesDef::Selected(it) => it,
},
- extra_args: self.data.check_extraArgs.clone(),
+ extra_args: self.check_extra_args(),
extra_env: self.check_extra_env(),
ansi_color_output: self.color_diagnostic_output(),
},
@@ -1441,6 +1467,7 @@ impl Config {
}
},
keywords: self.data.hover_documentation_keywords_enable,
+ interpret_tests: self.data.interpret_tests,
}
}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs
index 715804449..313bb2ec8 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs
@@ -88,6 +88,42 @@ impl<'a> RequestDispatcher<'a> {
}
/// Dispatches the request onto thread pool
+ pub(crate) fn on_no_retry<R>(
+ &mut self,
+ f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
+ ) -> &mut Self
+ where
+ R: lsp_types::request::Request + 'static,
+ R::Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug,
+ R::Result: Serialize,
+ {
+ let (req, params, panic_context) = match self.parse::<R>() {
+ Some(it) => it,
+ None => return self,
+ };
+
+ self.global_state.task_pool.handle.spawn({
+ let world = self.global_state.snapshot();
+ move || {
+ let result = panic::catch_unwind(move || {
+ let _pctx = stdx::panic_context::enter(panic_context);
+ f(world, params)
+ });
+ match thread_result_to_response::<R>(req.id.clone(), result) {
+ Ok(response) => Task::Response(response),
+ Err(_) => Task::Response(lsp_server::Response::new_err(
+ req.id,
+ lsp_server::ErrorCode::ContentModified as i32,
+ "content modified".to_string(),
+ )),
+ }
+ }
+ });
+
+ self
+ }
+
+ /// Dispatches the request onto thread pool
pub(crate) fn on<R>(
&mut self,
f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs
index 4e08bd0a7..2fca2ab85 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs
@@ -29,7 +29,7 @@ use project_model::{ManifestPath, ProjectWorkspace, TargetKind};
use serde_json::json;
use stdx::{format_to, never};
use syntax::{algo, ast, AstNode, TextRange, TextSize};
-use vfs::AbsPathBuf;
+use vfs::{AbsPath, AbsPathBuf};
use crate::{
cargo_target_spec::CargoTargetSpec,
@@ -46,6 +46,7 @@ use crate::{
pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result<()> {
state.proc_macro_clients.clear();
state.proc_macro_changed = false;
+
state.fetch_workspaces_queue.request_op("reload workspace request".to_string());
state.fetch_build_data_queue.request_op("reload workspace request".to_string());
Ok(())
@@ -84,6 +85,15 @@ pub(crate) fn handle_analyzer_status(
snap.workspaces.len(),
if snap.workspaces.len() == 1 { "" } else { "s" }
);
+
+ format_to!(
+ buf,
+ "Workspace root folders: {:?}",
+ snap.workspaces
+ .iter()
+ .flat_map(|ws| ws.workspace_definition_path())
+ .collect::<Vec<&AbsPath>>()
+ );
}
buf.push_str("\nAnalysis:\n");
buf.push_str(
@@ -134,6 +144,16 @@ pub(crate) fn handle_view_hir(
Ok(res)
}
+pub(crate) fn handle_view_mir(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<String> {
+ let _p = profile::span("handle_view_mir");
+ let position = from_proto::file_position(&snap, params)?;
+ let res = snap.analysis.view_mir(position)?;
+ Ok(res)
+}
+
pub(crate) fn handle_view_file_text(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentIdentifier,
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs
index e33589cc5..c7b513db9 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs
@@ -74,6 +74,14 @@ impl Request for ViewHir {
const METHOD: &'static str = "rust-analyzer/viewHir";
}
+pub enum ViewMir {}
+
+impl Request for ViewMir {
+ type Params = lsp_types::TextDocumentPositionParams;
+ type Result = String;
+ const METHOD: &'static str = "rust-analyzer/viewMir";
+}
+
pub enum ViewFileText {}
impl Request for ViewFileText {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_utils.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_utils.rs
index 30f1c53c1..12e5caf2c 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_utils.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_utils.rs
@@ -36,11 +36,41 @@ impl Progress {
}
impl GlobalState {
- pub(crate) fn show_message(&mut self, typ: lsp_types::MessageType, message: String) {
- let message = message;
- self.send_notification::<lsp_types::notification::ShowMessage>(
- lsp_types::ShowMessageParams { typ, message },
- )
+ pub(crate) fn show_message(
+ &mut self,
+ typ: lsp_types::MessageType,
+ message: String,
+ show_open_log_button: bool,
+ ) {
+ match self.config.open_server_logs() && show_open_log_button {
+ true => self.send_request::<lsp_types::request::ShowMessageRequest>(
+ lsp_types::ShowMessageRequestParams {
+ typ,
+ message,
+ actions: Some(vec![lsp_types::MessageActionItem {
+ title: "Open server logs".to_owned(),
+ properties: Default::default(),
+ }]),
+ },
+ |this, resp| {
+ let lsp_server::Response { error: None, result: Some(result), .. } = resp
+ else { return };
+ if let Ok(Some(_item)) = crate::from_json::<
+ <lsp_types::request::ShowMessageRequest as lsp_types::request::Request>::Result,
+ >(
+ lsp_types::request::ShowMessageRequest::METHOD, &result
+ ) {
+ this.send_notification::<lsp_ext::OpenServerLogs>(());
+ }
+ },
+ ),
+ false => self.send_notification::<lsp_types::notification::ShowMessage>(
+ lsp_types::ShowMessageParams {
+ typ,
+ message,
+ },
+ ),
+ }
}
/// Sends a notification to the client containing the error `message`.
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
index d1e38b33c..67a54cde6 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
@@ -111,12 +111,7 @@ impl fmt::Debug for Event {
impl GlobalState {
fn run(mut self, inbox: Receiver<lsp_server::Message>) -> Result<()> {
- if self.config.linked_projects().is_empty()
- && self.config.detached_files().is_empty()
- && self.config.notifications().cargo_toml_not_found
- {
- self.show_and_log_error("rust-analyzer failed to discover workspace".to_string(), None);
- };
+ self.update_status_or_notify();
if self.config.did_save_text_document_dynamic_registration() {
let save_registration_options = lsp_types::TextDocumentSaveRegistrationOptions {
@@ -323,17 +318,6 @@ impl GlobalState {
if let Some(diagnostic_changes) = self.diagnostics.take_changes() {
for file_id in diagnostic_changes {
- let db = self.analysis_host.raw_database();
- let source_root = db.file_source_root(file_id);
- if db.source_root(source_root).is_library {
- // Only publish diagnostics for files in the workspace, not from crates.io deps
- // or the sysroot.
- // While theoretically these should never have errors, we have quite a few false
- // positives particularly in the stdlib, and those diagnostics would stay around
- // forever if we emitted them here.
- continue;
- }
-
let uri = file_id_to_url(&self.vfs.read().0, file_id);
let mut diagnostics =
self.diagnostics.diagnostics_for(file_id).cloned().collect::<Vec<_>>();
@@ -405,25 +389,38 @@ impl GlobalState {
});
}
+ self.update_status_or_notify();
+
+ let loop_duration = loop_start.elapsed();
+ if loop_duration > Duration::from_millis(100) && was_quiescent {
+ tracing::warn!("overly long loop turn: {:?}", loop_duration);
+ self.poke_rust_analyzer_developer(format!("overly long loop turn: {loop_duration:?}"));
+ }
+ Ok(())
+ }
+
+ fn update_status_or_notify(&mut self) {
let status = self.current_status();
if self.last_reported_status.as_ref() != Some(&status) {
self.last_reported_status = Some(status.clone());
- if let (lsp_ext::Health::Error, Some(message)) = (status.health, &status.message) {
- self.show_message(lsp_types::MessageType::ERROR, message.clone());
- }
-
if self.config.server_status_notification() {
self.send_notification::<lsp_ext::ServerStatusNotification>(status);
+ } else if let (health, Some(message)) = (status.health, &status.message) {
+ let open_log_button = tracing::enabled!(tracing::Level::ERROR)
+ && (self.fetch_build_data_error().is_err()
+ || self.fetch_workspace_error().is_err());
+ self.show_message(
+ match health {
+ lsp_ext::Health::Ok => lsp_types::MessageType::INFO,
+ lsp_ext::Health::Warning => lsp_types::MessageType::WARNING,
+ lsp_ext::Health::Error => lsp_types::MessageType::ERROR,
+ },
+ message.clone(),
+ open_log_button,
+ );
}
}
-
- let loop_duration = loop_start.elapsed();
- if loop_duration > Duration::from_millis(100) && was_quiescent {
- tracing::warn!("overly long loop turn: {:?}", loop_duration);
- self.poke_rust_analyzer_developer(format!("overly long loop turn: {loop_duration:?}"));
- }
- Ok(())
}
fn handle_task(&mut self, prime_caches_progress: &mut Vec<PrimeCachesProgress>, task: Task) {
@@ -456,6 +453,9 @@ impl GlobalState {
ProjectWorkspaceProgress::Report(msg) => (Progress::Report, Some(msg)),
ProjectWorkspaceProgress::End(workspaces) => {
self.fetch_workspaces_queue.op_completed(Some(workspaces));
+ if let Err(e) = self.fetch_workspace_error() {
+ tracing::error!("FetchWorkspaceError:\n{e}");
+ }
let old = Arc::clone(&self.workspaces);
self.switch_workspaces("fetched workspace".to_string());
@@ -477,6 +477,9 @@ impl GlobalState {
BuildDataProgress::Report(msg) => (Some(Progress::Report), Some(msg)),
BuildDataProgress::End(build_data_result) => {
self.fetch_build_data_queue.op_completed(build_data_result);
+ if let Err(e) = self.fetch_build_data_error() {
+ tracing::error!("FetchBuildDataError:\n{e}");
+ }
self.switch_workspaces("fetched build data".to_string());
@@ -509,6 +512,7 @@ impl GlobalState {
self.vfs_progress_n_total = n_total;
self.vfs_progress_n_done = n_done;
+ // if n_total != 0 {
let state = if n_done == 0 {
Progress::Begin
} else if n_done < n_total {
@@ -523,7 +527,8 @@ impl GlobalState {
Some(format!("{n_done}/{n_total}")),
Some(Progress::fraction(n_done, n_total)),
None,
- )
+ );
+ // }
}
}
}
@@ -565,7 +570,10 @@ impl GlobalState {
flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)),
flycheck::Progress::DidCancel => (Progress::End, None),
flycheck::Progress::DidFailToRestart(err) => {
- self.show_and_log_error("cargo check failed".to_string(), Some(err));
+ self.show_and_log_error(
+ "cargo check failed to start".to_string(),
+ Some(err),
+ );
return;
}
flycheck::Progress::DidFinish(result) => {
@@ -634,6 +642,7 @@ impl GlobalState {
.on::<lsp_ext::AnalyzerStatus>(handlers::handle_analyzer_status)
.on::<lsp_ext::SyntaxTree>(handlers::handle_syntax_tree)
.on::<lsp_ext::ViewHir>(handlers::handle_view_hir)
+ .on::<lsp_ext::ViewMir>(handlers::handle_view_mir)
.on::<lsp_ext::ViewFileText>(handlers::handle_view_file_text)
.on::<lsp_ext::ViewCrateGraph>(handlers::handle_view_crate_graph)
.on::<lsp_ext::ViewItemTree>(handlers::handle_view_item_tree)
@@ -654,7 +663,7 @@ impl GlobalState {
.on::<lsp_types::request::GotoDeclaration>(handlers::handle_goto_declaration)
.on::<lsp_types::request::GotoImplementation>(handlers::handle_goto_implementation)
.on::<lsp_types::request::GotoTypeDefinition>(handlers::handle_goto_type_definition)
- .on::<lsp_types::request::InlayHintRequest>(handlers::handle_inlay_hints)
+ .on_no_retry::<lsp_types::request::InlayHintRequest>(handlers::handle_inlay_hints)
.on::<lsp_types::request::InlayHintResolveRequest>(handlers::handle_inlay_hints_resolve)
.on::<lsp_types::request::Completion>(handlers::handle_completion)
.on::<lsp_types::request::ResolveCompletionItem>(handlers::handle_completion_resolve)
@@ -920,6 +929,7 @@ impl GlobalState {
this.show_message(
lsp_types::MessageType::WARNING,
error.to_string(),
+ false,
);
}
this.update_configuration(config);
@@ -971,10 +981,20 @@ impl GlobalState {
}
fn update_diagnostics(&mut self) {
+ let db = self.analysis_host.raw_database();
let subscriptions = self
.mem_docs
.iter()
.map(|path| self.vfs.read().0.file_id(path).unwrap())
+ .filter(|&file_id| {
+ let source_root = db.file_source_root(file_id);
+ // Only publish diagnostics for files in the workspace, not from crates.io deps
+ // or the sysroot.
+ // While theoretically these should never have errors, we have quite a few false
+ // positives particularly in the stdlib, and those diagnostics would stay around
+ // forever if we emitted them here.
+ !db.source_root(source_root).is_library
+ })
.collect::<Vec<_>>();
tracing::trace!("updating notifications for {:?}", subscriptions);
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
index abce0d737..1a6e1af2e 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
@@ -12,17 +12,21 @@
//! correct. Instead, we try to provide a best-effort service. Even if the
//! project is currently loading and we don't have a full project model, we
//! still want to respond to various requests.
-use std::{mem, sync::Arc};
+use std::{collections::hash_map::Entry, mem, sync::Arc};
use flycheck::{FlycheckConfig, FlycheckHandle};
use hir::db::DefDatabase;
use ide::Change;
-use ide_db::base_db::{
- CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind,
- ProcMacroLoadResult, SourceRoot, VfsPath,
+use ide_db::{
+ base_db::{
+ CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind,
+ ProcMacroLoadResult, SourceRoot, VfsPath,
+ },
+ FxHashMap,
};
+use itertools::Itertools;
use proc_macro_api::{MacroDylib, ProcMacroServer};
-use project_model::{ProjectWorkspace, WorkspaceBuildScripts};
+use project_model::{PackageRoot, ProjectWorkspace, WorkspaceBuildScripts};
use syntax::SmolStr;
use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind};
@@ -52,7 +56,8 @@ pub(crate) enum BuildDataProgress {
impl GlobalState {
pub(crate) fn is_quiescent(&self) -> bool {
- !(self.fetch_workspaces_queue.op_in_progress()
+ !(self.last_reported_status.is_none()
+ || self.fetch_workspaces_queue.op_in_progress()
|| self.fetch_build_data_queue.op_in_progress()
|| self.vfs_progress_config_version < self.vfs_config_version
|| self.vfs_progress_n_done < self.vfs_progress_n_total)
@@ -85,36 +90,54 @@ impl GlobalState {
quiescent: self.is_quiescent(),
message: None,
};
+ let mut message = String::new();
if self.proc_macro_changed {
status.health = lsp_ext::Health::Warning;
- status.message =
- Some("Reload required due to source changes of a procedural macro.".into())
+ message.push_str("Reload required due to source changes of a procedural macro.\n\n");
}
if let Err(_) = self.fetch_build_data_error() {
status.health = lsp_ext::Health::Warning;
- status.message =
- Some("Failed to run build scripts of some packages, check the logs.".to_string());
+ message.push_str("Failed to run build scripts of some packages.\n\n");
}
if !self.config.cargo_autoreload()
&& self.is_quiescent()
&& self.fetch_workspaces_queue.op_requested()
{
status.health = lsp_ext::Health::Warning;
- status.message = Some("Workspace reload required".to_string())
+ message.push_str("Auto-reloading is disabled and the workspace has changed, a manual workspace reload is required.\n\n");
}
-
- if let Err(error) = self.fetch_workspace_error() {
- status.health = lsp_ext::Health::Error;
- status.message = Some(error)
- }
-
if self.config.linked_projects().is_empty()
&& self.config.detached_files().is_empty()
&& self.config.notifications().cargo_toml_not_found
{
status.health = lsp_ext::Health::Warning;
- status.message = Some("Workspace reload required".to_string())
+ message.push_str("Failed to discover workspace.\n\n");
+ }
+
+ for ws in self.workspaces.iter() {
+ let (ProjectWorkspace::Cargo { sysroot, .. }
+ | ProjectWorkspace::Json { sysroot, .. }
+ | ProjectWorkspace::DetachedFiles { sysroot, .. }) = ws;
+ if let Err(Some(e)) = sysroot {
+ status.health = lsp_ext::Health::Warning;
+ message.push_str(e);
+ message.push_str("\n\n");
+ }
+ if let ProjectWorkspace::Cargo { rustc: Err(Some(e)), .. } = ws {
+ status.health = lsp_ext::Health::Warning;
+ message.push_str(e);
+ message.push_str("\n\n");
+ }
+ }
+
+ if let Err(_) = self.fetch_workspace_error() {
+ status.health = lsp_ext::Health::Error;
+ message.push_str("Failed to load workspaces.\n\n");
+ }
+
+ if !message.is_empty() {
+ status.message = Some(message.trim_end().to_owned());
}
status
}
@@ -197,8 +220,7 @@ impl GlobalState {
let _p = profile::span("GlobalState::switch_workspaces");
tracing::info!(%cause, "will switch workspaces");
- if let Err(error_message) = self.fetch_workspace_error() {
- self.show_and_log_error(error_message, None);
+ if let Err(_) = self.fetch_workspace_error() {
if !self.workspaces.is_empty() {
// It only makes sense to switch to a partially broken workspace
// if we don't have any workspace at all yet.
@@ -206,10 +228,6 @@ impl GlobalState {
}
}
- if let Err(error) = self.fetch_build_data_error() {
- self.show_and_log_error("failed to run build scripts".to_string(), Some(error));
- }
-
let Some(workspaces) = self.fetch_workspaces_queue.last_op_result() else { return; };
let workspaces =
workspaces.iter().filter_map(|res| res.as_ref().ok().cloned()).collect::<Vec<_>>();
@@ -388,7 +406,7 @@ impl GlobalState {
tracing::info!("did switch workspaces");
}
- fn fetch_workspace_error(&self) -> Result<(), String> {
+ pub(super) fn fetch_workspace_error(&self) -> Result<(), String> {
let mut buf = String::new();
let Some(last_op_result) = self.fetch_workspaces_queue.last_op_result() else { return Ok(()) };
@@ -409,7 +427,7 @@ impl GlobalState {
Err(buf)
}
- fn fetch_build_data_error(&self) -> Result<(), String> {
+ pub(super) fn fetch_build_data_error(&self) -> Result<(), String> {
let mut buf = String::new();
for ws in &self.fetch_build_data_queue.last_op_result().1 {
@@ -494,7 +512,69 @@ impl ProjectFolders {
let mut fsc = FileSetConfig::builder();
let mut local_filesets = vec![];
- for root in workspaces.iter().flat_map(|ws| ws.to_roots()) {
+ // Dedup source roots
+ // Depending on the project setup, we can have duplicated source roots, or for example in
+ // the case of the rustc workspace, we can end up with two source roots that are almost the
+ // same but not quite, like:
+ // PackageRoot { is_local: false, include: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri")], exclude: [] }
+ // PackageRoot {
+ // is_local: true,
+ // include: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri"), AbsPathBuf(".../rust/build/x86_64-pc-windows-msvc/stage0-tools/x86_64-pc-windows-msvc/release/build/cargo-miri-85801cd3d2d1dae4/out")],
+ // exclude: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri/.git"), AbsPathBuf(".../rust/src/tools/miri/cargo-miri/target")]
+ // }
+ //
+ // The first one comes from the explicit rustc workspace which points to the rustc workspace itself
+ // The second comes from the rustc workspace that we load as the actual project workspace
+ // These `is_local` differing in this kind of way gives us problems, especially when trying to filter diagnostics as we don't report diagnostics for external libraries.
+ // So we need to deduplicate these, usually it would be enough to deduplicate by `include`, but as the rustc example shows here that doesn't work,
+ // so we need to also coalesce the includes if they overlap.
+
+ let mut roots: Vec<_> = workspaces
+ .iter()
+ .flat_map(|ws| ws.to_roots())
+ .update(|root| root.include.sort())
+ .sorted_by(|a, b| a.include.cmp(&b.include))
+ .collect();
+
+ // map that tracks indices of overlapping roots
+ let mut overlap_map = FxHashMap::<_, Vec<_>>::default();
+ let mut done = false;
+
+ while !mem::replace(&mut done, true) {
+ // maps include paths to indices of the corresponding root
+ let mut include_to_idx = FxHashMap::default();
+ // Find and note down the indices of overlapping roots
+ for (idx, root) in roots.iter().enumerate().filter(|(_, it)| !it.include.is_empty()) {
+ for include in &root.include {
+ match include_to_idx.entry(include) {
+ Entry::Occupied(e) => {
+ overlap_map.entry(*e.get()).or_default().push(idx);
+ }
+ Entry::Vacant(e) => {
+ e.insert(idx);
+ }
+ }
+ }
+ }
+ for (k, v) in overlap_map.drain() {
+ done = false;
+ for v in v {
+ let r = mem::replace(
+ &mut roots[v],
+ PackageRoot { is_local: false, include: vec![], exclude: vec![] },
+ );
+ roots[k].is_local |= r.is_local;
+ roots[k].include.extend(r.include);
+ roots[k].exclude.extend(r.exclude);
+ }
+ roots[k].include.sort();
+ roots[k].exclude.sort();
+ roots[k].include.dedup();
+ roots[k].exclude.dedup();
+ }
+ }
+
+ for root in roots.into_iter().filter(|it| !it.include.is_empty()) {
let file_set_roots: Vec<VfsPath> =
root.include.iter().cloned().map(VfsPath::from).collect();
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs
index 92029dc1d..7d97b69f8 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs
@@ -50,7 +50,7 @@ pub(crate) fn symbol_kind(symbol_kind: SymbolKind) -> lsp_types::SymbolKind {
SymbolKind::Struct => lsp_types::SymbolKind::STRUCT,
SymbolKind::Enum => lsp_types::SymbolKind::ENUM,
SymbolKind::Variant => lsp_types::SymbolKind::ENUM_MEMBER,
- SymbolKind::Trait => lsp_types::SymbolKind::INTERFACE,
+ SymbolKind::Trait | SymbolKind::TraitAlias => lsp_types::SymbolKind::INTERFACE,
SymbolKind::Macro
| SymbolKind::BuiltinAttr
| SymbolKind::Attribute
@@ -135,6 +135,7 @@ pub(crate) fn completion_item_kind(
SymbolKind::Static => lsp_types::CompletionItemKind::VALUE,
SymbolKind::Struct => lsp_types::CompletionItemKind::STRUCT,
SymbolKind::Trait => lsp_types::CompletionItemKind::INTERFACE,
+ SymbolKind::TraitAlias => lsp_types::CompletionItemKind::INTERFACE,
SymbolKind::TypeAlias => lsp_types::CompletionItemKind::STRUCT,
SymbolKind::TypeParam => lsp_types::CompletionItemKind::TYPE_PARAMETER,
SymbolKind::Union => lsp_types::CompletionItemKind::STRUCT,
@@ -656,6 +657,7 @@ fn semantic_token_type_and_modifiers(
SymbolKind::Union => semantic_tokens::UNION,
SymbolKind::TypeAlias => semantic_tokens::TYPE_ALIAS,
SymbolKind::Trait => semantic_tokens::INTERFACE,
+ SymbolKind::TraitAlias => semantic_tokens::INTERFACE,
SymbolKind::Macro => semantic_tokens::MACRO,
SymbolKind::BuiltinAttr => semantic_tokens::BUILTIN_ATTRIBUTE,
SymbolKind::ToolModule => semantic_tokens::TOOL_MODULE,
diff --git a/src/tools/rust-analyzer/crates/syntax/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/Cargo.toml
index 8fc493a23..305cf2d39 100644
--- a/src/tools/rust-analyzer/crates/syntax/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/syntax/Cargo.toml
@@ -14,6 +14,7 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
+either = "1.7.0"
itertools = "0.10.5"
rowan = "0.15.10"
rustc_lexer = { version = "727.0.0", package = "rustc-ap-rustc_lexer" }
diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram
index 36ad5fddf..548b5ba8b 100644
--- a/src/tools/rust-analyzer/crates/syntax/rust.ungram
+++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram
@@ -97,6 +97,7 @@ Item =
| Static
| Struct
| Trait
+| TraitAlias
| TypeAlias
| Union
| Use
@@ -240,10 +241,11 @@ Trait =
Attr* Visibility?
'unsafe'? 'auto'?
'trait' Name GenericParamList?
- (
- (':' TypeBoundList?)? WhereClause? AssocItemList
- | '=' TypeBoundList? WhereClause? ';'
- )
+ (':' TypeBoundList?)? WhereClause? AssocItemList
+
+TraitAlias =
+ Attr* Visibility?
+ 'trait' Name GenericParamList? '=' TypeBoundList? WhereClause? ';'
AssocItemList =
'{' Attr* AssocItem* '}'
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast.rs b/src/tools/rust-analyzer/crates/syntax/src/ast.rs
index 385a4e0a3..1e691beff 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast.rs
@@ -13,7 +13,7 @@ pub mod prec;
use std::marker::PhantomData;
-use itertools::Either;
+use either::Either;
use crate::{
syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken},
@@ -25,7 +25,8 @@ pub use self::{
generated::{nodes::*, tokens::*},
node_ext::{
AttrKind, FieldKind, Macro, NameLike, NameOrNameRef, PathSegmentKind, SelfParamKind,
- SlicePatComponents, StructKind, TypeBoundKind, TypeOrConstParam, VisibilityKind,
+ SlicePatComponents, StructKind, TraitOrAlias, TypeBoundKind, TypeOrConstParam,
+ VisibilityKind,
},
operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp},
token_ext::{CommentKind, CommentPlacement, CommentShape, IsString, QuoteOffsets, Radix},
@@ -128,6 +129,13 @@ where
}
}
+impl<L, R> HasAttrs for Either<L, R>
+where
+ L: HasAttrs,
+ R: HasAttrs,
+{
+}
+
mod support {
use super::{AstChildren, AstNode, SyntaxKind, SyntaxNode, SyntaxToken};
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs
index db66d08a7..c43d0830b 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs
@@ -48,23 +48,30 @@ impl From<ast::IfExpr> for ElseBranch {
}
impl ast::IfExpr {
- pub fn then_branch(&self) -> Option<ast::BlockExpr> {
- self.children_after_condition().next()
+ pub fn condition(&self) -> Option<ast::Expr> {
+ // If the condition is a BlockExpr, check if the then body is missing.
+ // If it is assume the condition is the expression that is missing instead.
+ let mut exprs = support::children(self.syntax());
+ let first = exprs.next();
+ match first {
+ Some(ast::Expr::BlockExpr(_)) => exprs.next().and(first),
+ first => first,
+ }
}
- pub fn else_branch(&self) -> Option<ElseBranch> {
- let res = match self.children_after_condition().nth(1) {
- Some(block) => ElseBranch::Block(block),
- None => {
- let elif = self.children_after_condition().next()?;
- ElseBranch::IfExpr(elif)
- }
- };
- Some(res)
+ pub fn then_branch(&self) -> Option<ast::BlockExpr> {
+ match support::children(self.syntax()).nth(1)? {
+ ast::Expr::BlockExpr(block) => Some(block),
+ _ => None,
+ }
}
- fn children_after_condition<N: AstNode>(&self) -> impl Iterator<Item = N> {
- self.syntax().children().skip(1).filter_map(N::cast)
+ pub fn else_branch(&self) -> Option<ElseBranch> {
+ match support::children(self.syntax()).nth(2)? {
+ ast::Expr::BlockExpr(block) => Some(ElseBranch::Block(block)),
+ ast::Expr::IfExpr(elif) => Some(ElseBranch::IfExpr(elif)),
+ _ => None,
+ }
}
}
@@ -356,7 +363,15 @@ impl ast::BlockExpr {
Some(it) => it,
None => return true,
};
- !matches!(parent.kind(), FN | IF_EXPR | WHILE_EXPR | LOOP_EXPR)
+ match parent.kind() {
+ FOR_EXPR | IF_EXPR => parent
+ .children()
+ .filter(|it| ast::Expr::can_cast(it.kind()))
+ .next()
+ .map_or(true, |it| it == *self.syntax()),
+ LET_ELSE | FN | WHILE_EXPR | LOOP_EXPR | CONST_BLOCK_PAT => false,
+ _ => true,
+ }
}
}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
index 642a3bfc3..fe3248453 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
@@ -407,7 +407,21 @@ impl Trait {
pub fn auto_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![auto]) }
pub fn trait_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![trait]) }
pub fn assoc_item_list(&self) -> Option<AssocItemList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TraitAlias {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for TraitAlias {}
+impl ast::HasName for TraitAlias {}
+impl ast::HasVisibility for TraitAlias {}
+impl ast::HasGenericParams for TraitAlias {}
+impl ast::HasDocComments for TraitAlias {}
+impl TraitAlias {
+ pub fn trait_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![trait]) }
pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn type_bound_list(&self) -> Option<TypeBoundList> { support::child(&self.syntax) }
pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
}
@@ -1573,6 +1587,7 @@ pub enum Item {
Static(Static),
Struct(Struct),
Trait(Trait),
+ TraitAlias(TraitAlias),
TypeAlias(TypeAlias),
Union(Union),
Use(Use),
@@ -2058,6 +2073,17 @@ impl AstNode for Trait {
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
+impl AstNode for TraitAlias {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TRAIT_ALIAS }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
impl AstNode for TypeAlias {
fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_ALIAS }
fn cast(syntax: SyntaxNode) -> Option<Self> {
@@ -3570,6 +3596,9 @@ impl From<Struct> for Item {
impl From<Trait> for Item {
fn from(node: Trait) -> Item { Item::Trait(node) }
}
+impl From<TraitAlias> for Item {
+ fn from(node: TraitAlias) -> Item { Item::TraitAlias(node) }
+}
impl From<TypeAlias> for Item {
fn from(node: TypeAlias) -> Item { Item::TypeAlias(node) }
}
@@ -3596,6 +3625,7 @@ impl AstNode for Item {
| STATIC
| STRUCT
| TRAIT
+ | TRAIT_ALIAS
| TYPE_ALIAS
| UNION
| USE
@@ -3616,6 +3646,7 @@ impl AstNode for Item {
STATIC => Item::Static(Static { syntax }),
STRUCT => Item::Struct(Struct { syntax }),
TRAIT => Item::Trait(Trait { syntax }),
+ TRAIT_ALIAS => Item::TraitAlias(TraitAlias { syntax }),
TYPE_ALIAS => Item::TypeAlias(TypeAlias { syntax }),
UNION => Item::Union(Union { syntax }),
USE => Item::Use(Use { syntax }),
@@ -3638,6 +3669,7 @@ impl AstNode for Item {
Item::Static(it) => &it.syntax,
Item::Struct(it) => &it.syntax,
Item::Trait(it) => &it.syntax,
+ Item::TraitAlias(it) => &it.syntax,
Item::TypeAlias(it) => &it.syntax,
Item::Union(it) => &it.syntax,
Item::Use(it) => &it.syntax,
@@ -3950,6 +3982,7 @@ impl AstNode for AnyHasAttrs {
| STATIC
| STRUCT
| TRAIT
+ | TRAIT_ALIAS
| TYPE_ALIAS
| UNION
| USE
@@ -4035,6 +4068,7 @@ impl AstNode for AnyHasDocComments {
| STATIC
| STRUCT
| TRAIT
+ | TRAIT_ALIAS
| TYPE_ALIAS
| UNION
| USE
@@ -4056,7 +4090,7 @@ impl AnyHasGenericParams {
}
impl AstNode for AnyHasGenericParams {
fn can_cast(kind: SyntaxKind) -> bool {
- matches!(kind, ENUM | FN | IMPL | STRUCT | TRAIT | TYPE_ALIAS | UNION)
+ matches!(kind, ENUM | FN | IMPL | STRUCT | TRAIT | TRAIT_ALIAS | TYPE_ALIAS | UNION)
}
fn cast(syntax: SyntaxNode) -> Option<Self> {
Self::can_cast(syntax.kind()).then_some(AnyHasGenericParams { syntax })
@@ -4108,6 +4142,7 @@ impl AstNode for AnyHasName {
| STATIC
| STRUCT
| TRAIT
+ | TRAIT_ALIAS
| TYPE_ALIAS
| UNION
| RENAME
@@ -4163,6 +4198,7 @@ impl AstNode for AnyHasVisibility {
| STATIC
| STRUCT
| TRAIT
+ | TRAIT_ALIAS
| TYPE_ALIAS
| UNION
| USE
@@ -4391,6 +4427,11 @@ impl std::fmt::Display for Trait {
std::fmt::Display::fmt(self.syntax(), f)
}
}
+impl std::fmt::Display for TraitAlias {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
impl std::fmt::Display for TypeAlias {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
index fe82aa907..3308077da 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
@@ -680,6 +680,81 @@ impl TypeOrConstParam {
}
}
+impl AstNode for TypeOrConstParam {
+ fn can_cast(kind: SyntaxKind) -> bool
+ where
+ Self: Sized,
+ {
+ matches!(kind, SyntaxKind::TYPE_PARAM | SyntaxKind::CONST_PARAM)
+ }
+
+ fn cast(syntax: SyntaxNode) -> Option<Self>
+ where
+ Self: Sized,
+ {
+ let res = match syntax.kind() {
+ SyntaxKind::TYPE_PARAM => TypeOrConstParam::Type(ast::TypeParam { syntax }),
+ SyntaxKind::CONST_PARAM => TypeOrConstParam::Const(ast::ConstParam { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ TypeOrConstParam::Type(it) => it.syntax(),
+ TypeOrConstParam::Const(it) => it.syntax(),
+ }
+ }
+}
+
+impl HasAttrs for TypeOrConstParam {}
+
+#[derive(Debug, Clone)]
+pub enum TraitOrAlias {
+ Trait(ast::Trait),
+ TraitAlias(ast::TraitAlias),
+}
+
+impl TraitOrAlias {
+ pub fn name(&self) -> Option<ast::Name> {
+ match self {
+ TraitOrAlias::Trait(x) => x.name(),
+ TraitOrAlias::TraitAlias(x) => x.name(),
+ }
+ }
+}
+
+impl AstNode for TraitOrAlias {
+ fn can_cast(kind: SyntaxKind) -> bool
+ where
+ Self: Sized,
+ {
+ matches!(kind, SyntaxKind::TRAIT | SyntaxKind::TRAIT_ALIAS)
+ }
+
+ fn cast(syntax: SyntaxNode) -> Option<Self>
+ where
+ Self: Sized,
+ {
+ let res = match syntax.kind() {
+ SyntaxKind::TRAIT => TraitOrAlias::Trait(ast::Trait { syntax }),
+ SyntaxKind::TRAIT_ALIAS => TraitOrAlias::TraitAlias(ast::TraitAlias { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ TraitOrAlias::Trait(it) => it.syntax(),
+ TraitOrAlias::TraitAlias(it) => it.syntax(),
+ }
+ }
+}
+
+impl HasAttrs for TraitOrAlias {}
+
pub enum VisibilityKind {
In(ast::Path),
PubCrate,
@@ -862,12 +937,6 @@ impl From<ast::Adt> for ast::Item {
}
}
-impl ast::IfExpr {
- pub fn condition(&self) -> Option<ast::Expr> {
- support::child(&self.syntax)
- }
-}
-
impl ast::MatchGuard {
pub fn condition(&self) -> Option<ast::Expr> {
support::child(&self.syntax)
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
index aa2b7ed5c..3e43df2d0 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
@@ -1,7 +1,7 @@
//! Various traits that are implemented by ast nodes.
//!
//! The implementations are usually trivial, and live in generated.rs
-use itertools::Either;
+use either::Either;
use crate::{
ast::{self, support, AstChildren, AstNode, AstToken},
@@ -134,3 +134,5 @@ impl Iterator for AttrDocCommentIter {
})
}
}
+
+impl<A: HasName, B: HasName> HasName for Either<A, B> {}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs b/src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs
index 3ff6e0300..ccce71966 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs
@@ -86,6 +86,7 @@ pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc {
"STATIC",
"CONST",
"TRAIT",
+ "TRAIT_ALIAS",
"IMPL",
"TYPE_ALIAS",
"MACRO_CALL",
diff --git a/src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs b/src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs
index 03aa2c451..e954b5825 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs
@@ -783,6 +783,7 @@ fn extract_struct_traits(ast: &mut AstSrc) {
"Enum",
"Variant",
"Trait",
+ "TraitAlias",
"Module",
"Static",
"Const",
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs b/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs
index d1afd0039..cd1235fa6 100644
--- a/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs
+++ b/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs
@@ -180,7 +180,9 @@ impl Fixture {
let mut cfg_key_values = Vec::new();
let mut env = FxHashMap::default();
let mut introduce_new_source_root = None;
- let mut target_data_layout = None;
+ let mut target_data_layout = Some(
+ "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128".to_string(),
+ );
for component in components[1..].iter() {
let (key, value) =
component.split_once(':').unwrap_or_else(|| panic!("invalid meta line: {meta:?}"));
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
index 3b033e1aa..ca6de4061 100644
--- a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
+++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
@@ -44,6 +44,8 @@
//! try: infallible
//! unsize: sized
+#![rustc_coherence_is_core]
+
pub mod marker {
// region:sized
#[lang = "sized"]
@@ -510,6 +512,7 @@ pub mod fmt {
pub mod slice {
#[lang = "slice"]
impl<T> [T] {
+ #[lang = "slice_len_fn"]
pub fn len(&self) -> usize {
loop {}
}
@@ -533,6 +536,40 @@ pub mod option {
None => panic!("called `Option::unwrap()` on a `None` value"),
}
}
+
+ pub fn and<U>(self, optb: Option<U>) -> Option<U> {
+ loop {}
+ }
+ pub fn unwrap_or(self, default: T) -> T {
+ loop {}
+ }
+ // region:fn
+ pub fn and_then<U, F>(self, f: F) -> Option<U>
+ where
+ F: FnOnce(T) -> Option<U>,
+ {
+ loop {}
+ }
+ pub fn unwrap_or_else<F>(self, f: F) -> T
+ where
+ F: FnOnce() -> T,
+ {
+ loop {}
+ }
+ pub fn map_or<U, F>(self, default: U, f: F) -> U
+ where
+ F: FnOnce(T) -> U,
+ {
+ loop {}
+ }
+ pub fn map_or_else<U, D, F>(self, default: D, f: F) -> U
+ where
+ D: FnOnce() -> U,
+ F: FnOnce(T) -> U,
+ {
+ loop {}
+ }
+ // endregion:fn
}
}
// endregion:option
@@ -727,6 +764,20 @@ pub mod iter {
self
}
}
+ pub struct IntoIter<T, const N: usize>([T; N]);
+ impl<T, const N: usize> IntoIterator for [T; N] {
+ type Item = T;
+ type IntoIter = IntoIter<T, N>;
+ fn into_iter(self) -> I {
+ IntoIter(self)
+ }
+ }
+ impl<T, const N: usize> Iterator for IntoIter<T, N> {
+ type Item = T;
+ fn next(&mut self) -> Option<T> {
+ loop {}
+ }
+ }
}
pub use self::collect::IntoIterator;
}
diff --git a/src/tools/rust-analyzer/crates/toolchain/src/lib.rs b/src/tools/rust-analyzer/crates/toolchain/src/lib.rs
index 67bdad2aa..729f84a81 100644
--- a/src/tools/rust-analyzer/crates/toolchain/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/toolchain/src/lib.rs
@@ -31,8 +31,9 @@ fn get_path_for_executable(executable_name: &'static str) -> PathBuf {
// example: for cargo, this checks $CARGO environment variable; for rustc, $RUSTC; etc
// 2) `<executable_name>`
// example: for cargo, this tries just `cargo`, which will succeed if `cargo` is on the $PATH
- // 3) `~/.cargo/bin/<executable_name>`
- // example: for cargo, this tries ~/.cargo/bin/cargo
+ // 3) `$CARGO_HOME/bin/<executable_name>`
+ // where $CARGO_HOME defaults to ~/.cargo (see https://doc.rust-lang.org/cargo/guide/cargo-home.html)
+ // example: for cargo, this tries $CARGO_HOME/bin/cargo, or ~/.cargo/bin/cargo if $CARGO_HOME is unset.
// It seems that this is a reasonable place to try for cargo, rustc, and rustup
let env_var = executable_name.to_ascii_uppercase();
if let Some(path) = env::var_os(env_var) {
@@ -43,8 +44,7 @@ fn get_path_for_executable(executable_name: &'static str) -> PathBuf {
return executable_name.into();
}
- if let Some(mut path) = home::home_dir() {
- path.push(".cargo");
+ if let Some(mut path) = get_cargo_home() {
path.push("bin");
path.push(executable_name);
if let Some(path) = probe(path) {
@@ -60,6 +60,19 @@ fn lookup_in_path(exec: &str) -> bool {
env::split_paths(&paths).map(|path| path.join(exec)).find_map(probe).is_some()
}
+fn get_cargo_home() -> Option<PathBuf> {
+ if let Some(path) = env::var_os("CARGO_HOME") {
+ return Some(path.into());
+ }
+
+ if let Some(mut path) = home::home_dir() {
+ path.push(".cargo");
+ return Some(path);
+ }
+
+ None
+}
+
fn probe(path: PathBuf) -> Option<PathBuf> {
let with_extension = match env::consts::EXE_EXTENSION {
"" => None,
diff --git a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md
index c3623a5cc..de1422032 100644
--- a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md
+++ b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md
@@ -1,5 +1,5 @@
<!---
-lsp_ext.rs hash: d87477896dfe41d4
+lsp_ext.rs hash: 37f31ae648632897
If you need to change the above hash to make the test pass, please check if you
need to adjust this doc as well and ping this issue:
@@ -527,6 +527,17 @@ Primarily for debugging, but very useful for all people working on rust-analyzer
Returns a textual representation of the HIR of the function containing the cursor.
For debugging or when working on rust-analyzer itself.
+## View Mir
+
+**Method:** `rust-analyzer/viewMir`
+
+**Request:** `TextDocumentPositionParams`
+
+**Response:** `string`
+
+Returns a textual representation of the MIR of the function containing the cursor.
+For debugging or when working on rust-analyzer itself.
+
## View File Text
**Method:** `rust-analyzer/viewFileText`
diff --git a/src/tools/rust-analyzer/docs/user/generated_config.adoc b/src/tools/rust-analyzer/docs/user/generated_config.adoc
index 50e3670a7..6937a7ed9 100644
--- a/src/tools/rust-analyzer/docs/user/generated_config.adoc
+++ b/src/tools/rust-analyzer/docs/user/generated_config.adoc
@@ -71,6 +71,11 @@ cargo check --quiet --workspace --message-format=json --all-targets
Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to
avoid checking unnecessary things.
--
+[[rust-analyzer.cargo.extraArgs]]rust-analyzer.cargo.extraArgs (default: `[]`)::
++
+--
+Extra arguments that are passed to every cargo invocation.
+--
[[rust-analyzer.cargo.extraEnv]]rust-analyzer.cargo.extraEnv (default: `{}`)::
+
--
@@ -537,6 +542,11 @@ Only applies to closures with blocks, same as `#rust-analyzer.inlayHints.closure
--
Whether to hide inlay type hints for constructors.
--
+[[rust-analyzer.interpret.tests]]rust-analyzer.interpret.tests (default: `false`)::
++
+--
+Enables the experimental support for interpreting tests.
+--
[[rust-analyzer.joinLines.joinAssignments]]rust-analyzer.joinLines.joinAssignments (default: `true`)::
+
--
@@ -699,7 +709,10 @@ Additional arguments to `rustfmt`.
+
--
Advanced option, fully override the command rust-analyzer uses for
-formatting.
+formatting. This should be the equivalent of `rustfmt` here, and
+not that of `cargo fmt`. The file contents will be passed on the
+standard input and the formatted result will be read from the
+standard output.
--
[[rust-analyzer.rustfmt.rangeFormatting.enable]]rust-analyzer.rustfmt.rangeFormatting.enable (default: `false`)::
+
diff --git a/src/tools/rust-analyzer/lib/la-arena/src/map.rs b/src/tools/rust-analyzer/lib/la-arena/src/map.rs
index b9d491da3..7fff2b09c 100644
--- a/src/tools/rust-analyzer/lib/la-arena/src/map.rs
+++ b/src/tools/rust-analyzer/lib/la-arena/src/map.rs
@@ -94,6 +94,12 @@ impl<T, V> ArenaMap<Idx<T>, V> {
.filter_map(|(idx, o)| Some((Self::from_idx(idx), o.as_mut()?)))
}
+ /// Returns an iterator over the arena indexes and values in the map.
+ // FIXME: Implement `IntoIterator` trait.
+ pub fn into_iter(self) -> impl Iterator<Item = (Idx<T>, V)> {
+ self.v.into_iter().enumerate().filter_map(|(idx, o)| Some((Self::from_idx(idx), o?)))
+ }
+
/// Gets the given key's corresponding entry in the map for in-place manipulation.
pub fn entry(&mut self, idx: Idx<T>) -> Entry<'_, Idx<T>, V> {
let idx = Self::to_idx(idx);
diff --git a/src/tools/rust-installer/.github/workflows/ci.yml b/src/tools/rust-installer/.github/workflows/ci.yml
deleted file mode 100644
index 57a5cb76e..000000000
--- a/src/tools/rust-installer/.github/workflows/ci.yml
+++ /dev/null
@@ -1,23 +0,0 @@
----
-
-name: CI
-on: [push, pull_request]
-
-jobs:
- test:
- name: Test
- runs-on: ubuntu-latest
- env:
- LZMA_API_STATIC: 1
- steps:
- - name: Checkout the source code
- uses: actions/checkout@v2
-
- - name: Install Rust stable
- run: rustup toolchain update stable && rustup default stable
-
- - name: Build the tool
- run: cargo build
-
- - name: Execute the test suite
- run: ./test.sh
diff --git a/src/tools/rust-installer/Cargo.toml b/src/tools/rust-installer/Cargo.toml
index 38b81a1ba..97734f048 100644
--- a/src/tools/rust-installer/Cargo.toml
+++ b/src/tools/rust-installer/Cargo.toml
@@ -13,7 +13,7 @@ path = "src/main.rs"
anyhow = "1.0.19"
flate2 = "1.0.1"
rayon = "1.0"
-tar = "0.4.13"
+tar = "0.4.38"
walkdir = "2"
xz2 = "0.1.4"
num_cpus = "1"
@@ -22,7 +22,3 @@ remove_dir_all = "0.5"
[dependencies.clap]
features = ["derive"]
version = "3.1"
-
-[target."cfg(windows)".dependencies]
-lazy_static = "1"
-winapi = { version = "0.3", features = ["errhandlingapi", "handleapi", "ioapiset", "winerror", "winioctl", "winnt"] }
diff --git a/src/tools/rust-installer/LICENSE-APACHE b/src/tools/rust-installer/LICENSE-APACHE
deleted file mode 100644
index 16fe87b06..000000000
--- a/src/tools/rust-installer/LICENSE-APACHE
+++ /dev/null
@@ -1,201 +0,0 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
-2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
-3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
-4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
-5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
-6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
-8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
-Copyright [yyyy] [name of copyright owner]
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
diff --git a/src/tools/rust-installer/LICENSE-MIT b/src/tools/rust-installer/LICENSE-MIT
deleted file mode 100644
index e69282e38..000000000
--- a/src/tools/rust-installer/LICENSE-MIT
+++ /dev/null
@@ -1,25 +0,0 @@
-Copyright (c) 2015 The Rust Project Developers
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
diff --git a/src/tools/rust-installer/combine-installers.sh b/src/tools/rust-installer/combine-installers.sh
index 4931c34dd..bee5319fd 100755
--- a/src/tools/rust-installer/combine-installers.sh
+++ b/src/tools/rust-installer/combine-installers.sh
@@ -1,13 +1,4 @@
#!/bin/bash
-# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
set -ue
@@ -21,4 +12,4 @@ abs_path() {
}
src_dir="$(abs_path $(dirname "$0"))"
-cargo run --manifest-path="$src_dir/Cargo.toml" -- combine "$@"
+$CARGO run --manifest-path="$src_dir/Cargo.toml" -- combine "$@"
diff --git a/src/tools/rust-installer/gen-install-script.sh b/src/tools/rust-installer/gen-install-script.sh
index b4559d147..f112fd4b2 100755
--- a/src/tools/rust-installer/gen-install-script.sh
+++ b/src/tools/rust-installer/gen-install-script.sh
@@ -1,13 +1,4 @@
#!/bin/bash
-# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
set -ue
diff --git a/src/tools/rust-installer/gen-installer.sh b/src/tools/rust-installer/gen-installer.sh
index 198cfe742..eabd8c95c 100755
--- a/src/tools/rust-installer/gen-installer.sh
+++ b/src/tools/rust-installer/gen-installer.sh
@@ -1,13 +1,4 @@
#!/bin/bash
-# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
set -ue
@@ -21,4 +12,4 @@ abs_path() {
}
src_dir="$(abs_path $(dirname "$0"))"
-cargo run --manifest-path="$src_dir/Cargo.toml" -- generate "$@"
+$CARGO run --manifest-path="$src_dir/Cargo.toml" -- generate "$@"
diff --git a/src/tools/rust-installer/install-template.sh b/src/tools/rust-installer/install-template.sh
index 7790541a4..92a3f1f2c 100644
--- a/src/tools/rust-installer/install-template.sh
+++ b/src/tools/rust-installer/install-template.sh
@@ -1,13 +1,4 @@
#!/bin/bash
-# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
# No undefined variables
set -u
diff --git a/src/tools/rust-installer/make-tarballs.sh b/src/tools/rust-installer/make-tarballs.sh
index e9f88cc8b..e342007da 100755
--- a/src/tools/rust-installer/make-tarballs.sh
+++ b/src/tools/rust-installer/make-tarballs.sh
@@ -1,13 +1,4 @@
#!/bin/sh
-# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
set -ue
@@ -21,4 +12,4 @@ abs_path() {
}
src_dir="$(abs_path $(dirname "$0"))"
-cargo run --manifest-path="$src_dir/Cargo.toml" -- tarball "$@"
+$CARGO run --manifest-path="$src_dir/Cargo.toml" -- tarball "$@"
diff --git a/src/tools/rust-installer/src/combiner.rs b/src/tools/rust-installer/src/combiner.rs
index 2ec09d67e..abcf59cfe 100644
--- a/src/tools/rust-installer/src/combiner.rs
+++ b/src/tools/rust-installer/src/combiner.rs
@@ -1,7 +1,7 @@
use super::Scripter;
use super::Tarballer;
use crate::{
- compression::{CompressionFormat, CompressionFormats},
+ compression::{CompressionFormat, CompressionFormats, CompressionProfile},
util::*,
};
use anyhow::{bail, Context, Result};
@@ -48,6 +48,10 @@ actor! {
#[clap(value_name = "DIR")]
output_dir: String = "./dist",
+ /// The profile used to compress the tarball.
+ #[clap(value_name = "FORMAT", default_value_t)]
+ compression_profile: CompressionProfile,
+
/// The formats used to compress the tarball
#[clap(value_name = "FORMAT", default_value_t)]
compression_formats: CompressionFormats,
@@ -153,6 +157,7 @@ impl Combiner {
.work_dir(self.work_dir)
.input(self.package_name)
.output(path_to_str(&output)?.into())
+ .compression_profile(self.compression_profile)
.compression_formats(self.compression_formats.clone());
tarballer.run()?;
diff --git a/src/tools/rust-installer/src/compression.rs b/src/tools/rust-installer/src/compression.rs
index 7e20a9497..7c9c946e0 100644
--- a/src/tools/rust-installer/src/compression.rs
+++ b/src/tools/rust-installer/src/compression.rs
@@ -4,6 +4,37 @@ use rayon::prelude::*;
use std::{convert::TryFrom, fmt, io::Read, io::Write, path::Path, str::FromStr};
use xz2::{read::XzDecoder, write::XzEncoder};
+#[derive(Default, Debug, Copy, Clone)]
+pub enum CompressionProfile {
+ Fast,
+ #[default]
+ Balanced,
+ Best,
+}
+
+impl FromStr for CompressionProfile {
+ type Err = Error;
+
+ fn from_str(input: &str) -> Result<Self, Error> {
+ Ok(match input {
+ "fast" => Self::Fast,
+ "balanced" => Self::Balanced,
+ "best" => Self::Best,
+ other => anyhow::bail!("invalid compression profile: {other}"),
+ })
+ }
+}
+
+impl fmt::Display for CompressionProfile {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ CompressionProfile::Fast => f.write_str("fast"),
+ CompressionProfile::Balanced => f.write_str("balanced"),
+ CompressionProfile::Best => f.write_str("best"),
+ }
+ }
+}
+
#[derive(Debug, Copy, Clone)]
pub enum CompressionFormat {
Gz,
@@ -26,7 +57,11 @@ impl CompressionFormat {
}
}
- pub(crate) fn encode(&self, path: impl AsRef<Path>) -> Result<Box<dyn Encoder>, Error> {
+ pub(crate) fn encode(
+ &self,
+ path: impl AsRef<Path>,
+ profile: CompressionProfile,
+ ) -> Result<Box<dyn Encoder>, Error> {
let mut os = path.as_ref().as_os_str().to_os_string();
os.push(format!(".{}", self.extension()));
let path = Path::new(&os);
@@ -37,16 +72,33 @@ impl CompressionFormat {
let file = crate::util::create_new_file(path)?;
Ok(match self {
- CompressionFormat::Gz => Box::new(GzEncoder::new(file, flate2::Compression::best())),
+ CompressionFormat::Gz => Box::new(GzEncoder::new(
+ file,
+ match profile {
+ CompressionProfile::Fast => flate2::Compression::fast(),
+ CompressionProfile::Balanced => flate2::Compression::new(6),
+ CompressionProfile::Best => flate2::Compression::best(),
+ },
+ )),
CompressionFormat::Xz => {
- // Note that preset 6 takes about 173MB of memory per thread, so we limit the number of
- // threads to not blow out 32-bit hosts. (We could be more precise with
- // `MtStreamBuilder::memusage()` if desired.)
- let stream = xz2::stream::MtStreamBuilder::new()
- .threads(Ord::min(num_cpus::get(), 8) as u32)
- .preset(6)
- .encoder()?;
- Box::new(XzEncoder::new_stream(file, stream))
+ let encoder = match profile {
+ CompressionProfile::Fast => {
+ xz2::stream::MtStreamBuilder::new().threads(6).preset(1).encoder().unwrap()
+ }
+ CompressionProfile::Balanced => {
+ xz2::stream::MtStreamBuilder::new().threads(6).preset(6).encoder().unwrap()
+ }
+ CompressionProfile::Best => {
+ // Note that this isn't actually the best compression settings for the
+ // produced artifacts, the production artifacts on static.rust-lang.org are
+ // produced by rust-lang/promote-release which hosts recompression logic
+ // and is tuned for optimal compression.
+ xz2::stream::MtStreamBuilder::new().threads(6).preset(9).encoder().unwrap()
+ }
+ };
+
+ let compressor = XzEncoder::new_stream(std::io::BufWriter::new(file), encoder);
+ Box::new(compressor)
}
})
}
@@ -94,10 +146,13 @@ impl fmt::Display for CompressionFormats {
if i != 0 {
write!(f, ",")?;
}
- fmt::Display::fmt(match format {
- CompressionFormat::Xz => "xz",
- CompressionFormat::Gz => "gz",
- }, f)?;
+ fmt::Display::fmt(
+ match format {
+ CompressionFormat::Xz => "xz",
+ CompressionFormat::Gz => "gz",
+ },
+ f,
+ )?;
}
Ok(())
}
@@ -158,20 +213,14 @@ impl Write for CombinedEncoder {
}
fn flush(&mut self) -> std::io::Result<()> {
- self.encoders
- .par_iter_mut()
- .map(|w| w.flush())
- .collect::<std::io::Result<Vec<()>>>()?;
+ self.encoders.par_iter_mut().map(|w| w.flush()).collect::<std::io::Result<Vec<()>>>()?;
Ok(())
}
}
impl Encoder for CombinedEncoder {
fn finish(self: Box<Self>) -> Result<(), Error> {
- self.encoders
- .into_par_iter()
- .map(|e| e.finish())
- .collect::<Result<Vec<()>, Error>>()?;
+ self.encoders.into_par_iter().map(|e| e.finish()).collect::<Result<Vec<()>, Error>>()?;
Ok(())
}
}
diff --git a/src/tools/rust-installer/src/generator.rs b/src/tools/rust-installer/src/generator.rs
index 1e4d00b05..ddd105259 100644
--- a/src/tools/rust-installer/src/generator.rs
+++ b/src/tools/rust-installer/src/generator.rs
@@ -1,6 +1,6 @@
use super::Scripter;
use super::Tarballer;
-use crate::compression::CompressionFormats;
+use crate::compression::{CompressionFormats, CompressionProfile};
use crate::util::*;
use anyhow::{bail, format_err, Context, Result};
use std::collections::BTreeSet;
@@ -54,6 +54,10 @@ actor! {
#[clap(value_name = "DIR")]
output_dir: String = "./dist",
+ /// The profile used to compress the tarball.
+ #[clap(value_name = "FORMAT", default_value_t)]
+ compression_profile: CompressionProfile,
+
/// The formats used to compress the tarball
#[clap(value_name = "FORMAT", default_value_t)]
compression_formats: CompressionFormats,
@@ -113,6 +117,7 @@ impl Generator {
.work_dir(self.work_dir)
.input(self.package_name)
.output(path_to_str(&output)?.into())
+ .compression_profile(self.compression_profile)
.compression_formats(self.compression_formats.clone());
tarballer.run()?;
diff --git a/src/tools/rust-installer/src/remove_dir_all.rs b/src/tools/rust-installer/src/remove_dir_all.rs
deleted file mode 100644
index 110976528..000000000
--- a/src/tools/rust-installer/src/remove_dir_all.rs
+++ /dev/null
@@ -1,860 +0,0 @@
-#![allow(non_snake_case)]
-
-use std::io;
-use std::path::Path;
-
-#[cfg(not(windows))]
-pub fn remove_dir_all(path: &Path) -> io::Result<()> {
- ::std::fs::remove_dir_all(path)
-}
-
-#[cfg(windows)]
-pub fn remove_dir_all(path: &Path) -> io::Result<()> {
- win::remove_dir_all(path)
-}
-
-#[cfg(windows)]
-mod win {
- use winapi::ctypes::{c_uint, c_ushort};
- use winapi::shared::minwindef::{BOOL, DWORD, FALSE, FILETIME, LPVOID};
- use winapi::shared::winerror::{
- ERROR_CALL_NOT_IMPLEMENTED, ERROR_INSUFFICIENT_BUFFER, ERROR_NO_MORE_FILES,
- };
- use winapi::um::errhandlingapi::{GetLastError, SetLastError};
- use winapi::um::fileapi::{
- CreateFileW, FindFirstFileW, FindNextFileW, GetFileInformationByHandle,
- };
- use winapi::um::fileapi::{BY_HANDLE_FILE_INFORMATION, CREATE_ALWAYS, CREATE_NEW};
- use winapi::um::fileapi::{FILE_BASIC_INFO, FILE_RENAME_INFO, TRUNCATE_EXISTING};
- use winapi::um::fileapi::{OPEN_ALWAYS, OPEN_EXISTING};
- use winapi::um::handleapi::{CloseHandle, INVALID_HANDLE_VALUE};
- use winapi::um::ioapiset::DeviceIoControl;
- use winapi::um::libloaderapi::{GetModuleHandleW, GetProcAddress};
- use winapi::um::minwinbase::{
- FileBasicInfo, FileRenameInfo, FILE_INFO_BY_HANDLE_CLASS, WIN32_FIND_DATAW,
- };
- use winapi::um::winbase::SECURITY_SQOS_PRESENT;
- use winapi::um::winbase::{
- FILE_FLAG_BACKUP_SEMANTICS, FILE_FLAG_DELETE_ON_CLOSE, FILE_FLAG_OPEN_REPARSE_POINT,
- };
- use winapi::um::winioctl::FSCTL_GET_REPARSE_POINT;
- use winapi::um::winnt::{DELETE, FILE_ATTRIBUTE_DIRECTORY, HANDLE, LPCWSTR};
- use winapi::um::winnt::{FILE_ATTRIBUTE_READONLY, FILE_ATTRIBUTE_REPARSE_POINT};
- use winapi::um::winnt::{FILE_GENERIC_WRITE, FILE_WRITE_DATA, GENERIC_READ, GENERIC_WRITE};
- use winapi::um::winnt::{FILE_READ_ATTRIBUTES, FILE_WRITE_ATTRIBUTES};
- use winapi::um::winnt::{FILE_SHARE_DELETE, FILE_SHARE_READ, FILE_SHARE_WRITE};
- use winapi::um::winnt::{IO_REPARSE_TAG_MOUNT_POINT, IO_REPARSE_TAG_SYMLINK, LARGE_INTEGER};
-
- use std::ffi::{OsStr, OsString};
- use std::io;
- use std::mem;
- use std::os::windows::ffi::{OsStrExt, OsStringExt};
- use std::path::{Path, PathBuf};
- use std::ptr;
- use std::sync::Arc;
-
- pub fn remove_dir_all(path: &Path) -> io::Result<()> {
- // On Windows it is not enough to just recursively remove the contents of a
- // directory and then the directory itself. Deleting does not happen
- // instantaneously, but is scheduled.
- // To work around this, we move the file or directory to some `base_dir`
- // right before deletion to avoid races.
- //
- // As `base_dir` we choose the parent dir of the directory we want to
- // remove. We very probably have permission to create files here, as we
- // already need write permission in this dir to delete the directory. And it
- // should be on the same volume.
- //
- // To handle files with names like `CON` and `morse .. .`, and when a
- // directory structure is so deep it needs long path names the path is first
- // converted to a `//?/`-path with `get_path()`.
- //
- // To make sure we don't leave a moved file laying around if the process
- // crashes before we can delete the file, we do all operations on an file
- // handle. By opening a file with `FILE_FLAG_DELETE_ON_CLOSE` Windows will
- // always delete the file when the handle closes.
- //
- // All files are renamed to be in the `base_dir`, and have their name
- // changed to "rm-<counter>". After every rename the counter is increased.
- // Rename should not overwrite possibly existing files in the base dir. So
- // if it fails with `AlreadyExists`, we just increase the counter and try
- // again.
- //
- // For read-only files and directories we first have to remove the read-only
- // attribute before we can move or delete them. This also removes the
- // attribute from possible hardlinks to the file, so just before closing we
- // restore the read-only attribute.
- //
- // If 'path' points to a directory symlink or junction we should not
- // recursively remove the target of the link, but only the link itself.
- //
- // Moving and deleting is guaranteed to succeed if we are able to open the
- // file with `DELETE` permission. If others have the file open we only have
- // `DELETE` permission if they have specified `FILE_SHARE_DELETE`. We can
- // also delete the file now, but it will not disappear until all others have
- // closed the file. But no-one can open the file after we have flagged it
- // for deletion.
-
- // Open the path once to get the canonical path, file type and attributes.
- let (path, metadata) = {
- let mut opts = OpenOptions::new();
- opts.access_mode(FILE_READ_ATTRIBUTES);
- opts.custom_flags(FILE_FLAG_BACKUP_SEMANTICS | FILE_FLAG_OPEN_REPARSE_POINT);
- let file = File::open(path, &opts)?;
- (get_path(&file)?, file.file_attr()?)
- };
-
- let mut ctx = RmdirContext {
- base_dir: match path.parent() {
- Some(dir) => dir,
- None => {
- return Err(io::Error::new(
- io::ErrorKind::PermissionDenied,
- "can't delete root directory",
- ))
- }
- },
- readonly: metadata.perm().readonly(),
- counter: 0,
- };
-
- let filetype = metadata.file_type();
- if filetype.is_dir() {
- remove_dir_all_recursive(path.as_ref(), &mut ctx)
- } else if filetype.is_symlink_dir() {
- remove_item(path.as_ref(), &mut ctx)
- } else {
- Err(io::Error::new(
- io::ErrorKind::PermissionDenied,
- "Not a directory",
- ))
- }
- }
-
- fn readdir(p: &Path) -> io::Result<ReadDir> {
- let root = p.to_path_buf();
- let star = p.join("*");
- let path = to_u16s(&star)?;
-
- unsafe {
- let mut wfd = mem::zeroed();
- let find_handle = FindFirstFileW(path.as_ptr(), &mut wfd);
- if find_handle != INVALID_HANDLE_VALUE {
- Ok(ReadDir {
- handle: FindNextFileHandle(find_handle),
- root: Arc::new(root),
- first: Some(wfd),
- })
- } else {
- Err(io::Error::last_os_error())
- }
- }
- }
-
- struct RmdirContext<'a> {
- base_dir: &'a Path,
- readonly: bool,
- counter: u64,
- }
-
- fn remove_dir_all_recursive(path: &Path, ctx: &mut RmdirContext) -> io::Result<()> {
- let dir_readonly = ctx.readonly;
- for child in readdir(path)? {
- let child = child?;
- let child_type = child.file_type()?;
- ctx.readonly = child.metadata()?.perm().readonly();
- if child_type.is_dir() {
- remove_dir_all_recursive(&child.path(), ctx)?;
- } else {
- remove_item(&child.path().as_ref(), ctx)?;
- }
- }
- ctx.readonly = dir_readonly;
- remove_item(path, ctx)
- }
-
- fn remove_item(path: &Path, ctx: &mut RmdirContext) -> io::Result<()> {
- if !ctx.readonly {
- let mut opts = OpenOptions::new();
- opts.access_mode(DELETE);
- opts.custom_flags(
- FILE_FLAG_BACKUP_SEMANTICS | // delete directory
- FILE_FLAG_OPEN_REPARSE_POINT | // delete symlink
- FILE_FLAG_DELETE_ON_CLOSE,
- );
- let file = File::open(path, &opts)?;
- move_item(&file, ctx)
- } else {
- // remove read-only permision
- set_perm(&path, FilePermissions::new())?;
- // move and delete file, similar to !readonly.
- // only the access mode is different.
- let mut opts = OpenOptions::new();
- opts.access_mode(DELETE | FILE_WRITE_ATTRIBUTES);
- opts.custom_flags(
- FILE_FLAG_BACKUP_SEMANTICS
- | FILE_FLAG_OPEN_REPARSE_POINT
- | FILE_FLAG_DELETE_ON_CLOSE,
- );
- let file = File::open(path, &opts)?;
- move_item(&file, ctx)?;
- // restore read-only flag just in case there are other hard links
- let mut perm = FilePermissions::new();
- perm.set_readonly(true);
- let _ = file.set_perm(perm); // ignore if this fails
- Ok(())
- }
- }
-
- macro_rules! compat_fn {
- ($module:ident: $(
- fn $symbol:ident($($argname:ident: $argtype:ty),*)
- -> $rettype:ty {
- $($body:expr);*
- }
- )*) => ($(
- #[allow(unused_variables)]
- unsafe fn $symbol($($argname: $argtype),*) -> $rettype {
- use std::sync::atomic::{AtomicUsize, Ordering};
- use std::mem;
- use std::ffi::CString;
- type F = unsafe extern "system" fn($($argtype),*) -> $rettype;
-
- lazy_static! { static ref PTR: AtomicUsize = AtomicUsize::new(0);}
-
- fn lookup(module: &str, symbol: &str) -> Option<usize> {
- let mut module: Vec<u16> = module.encode_utf16().collect();
- module.push(0);
- let symbol = CString::new(symbol).unwrap();
- unsafe {
- let handle = GetModuleHandleW(module.as_ptr());
- match GetProcAddress(handle, symbol.as_ptr()) as usize {
- 0 => None,
- n => Some(n),
- }
- }
- }
-
- fn store_func(ptr: &AtomicUsize, module: &str, symbol: &str,
- fallback: usize) -> usize {
- let value = lookup(module, symbol).unwrap_or(fallback);
- ptr.store(value, Ordering::SeqCst);
- value
- }
-
- fn load() -> usize {
- store_func(&PTR, stringify!($module), stringify!($symbol), fallback as usize)
- }
- unsafe extern "system" fn fallback($($argname: $argtype),*)
- -> $rettype {
- $($body);*
- }
-
- let addr = match PTR.load(Ordering::SeqCst) {
- 0 => load(),
- n => n,
- };
- mem::transmute::<usize, F>(addr)($($argname),*)
- }
- )*)
- }
-
- compat_fn! {
- kernel32:
- fn GetFinalPathNameByHandleW(_hFile: HANDLE,
- _lpszFilePath: LPCWSTR,
- _cchFilePath: DWORD,
- _dwFlags: DWORD) -> DWORD {
- SetLastError(ERROR_CALL_NOT_IMPLEMENTED as DWORD); 0
- }
- fn SetFileInformationByHandle(_hFile: HANDLE,
- _FileInformationClass: FILE_INFO_BY_HANDLE_CLASS,
- _lpFileInformation: LPVOID,
- _dwBufferSize: DWORD) -> BOOL {
- SetLastError(ERROR_CALL_NOT_IMPLEMENTED as DWORD); 0
- }
- }
-
- fn cvt(i: i32) -> io::Result<i32> {
- if i == 0 {
- Err(io::Error::last_os_error())
- } else {
- Ok(i)
- }
- }
-
- fn to_u16s<S: AsRef<OsStr>>(s: S) -> io::Result<Vec<u16>> {
- fn inner(s: &OsStr) -> io::Result<Vec<u16>> {
- let mut maybe_result: Vec<u16> = s.encode_wide().collect();
- if maybe_result.iter().any(|&u| u == 0) {
- return Err(io::Error::new(
- io::ErrorKind::InvalidInput,
- "strings passed to WinAPI cannot contain NULs",
- ));
- }
- maybe_result.push(0);
- Ok(maybe_result)
- }
- inner(s.as_ref())
- }
-
- fn truncate_utf16_at_nul<'a>(v: &'a [u16]) -> &'a [u16] {
- match v.iter().position(|c| *c == 0) {
- // don't include the 0
- Some(i) => &v[..i],
- None => v,
- }
- }
-
- fn fill_utf16_buf<F1, F2, T>(mut f1: F1, f2: F2) -> io::Result<T>
- where
- F1: FnMut(*mut u16, DWORD) -> DWORD,
- F2: FnOnce(&[u16]) -> T,
- {
- // Start off with a stack buf but then spill over to the heap if we end up
- // needing more space.
- let mut stack_buf = [0u16; 512];
- let mut heap_buf = Vec::new();
- unsafe {
- let mut n = stack_buf.len();
- loop {
- let buf = if n <= stack_buf.len() {
- &mut stack_buf[..]
- } else {
- let extra = n - heap_buf.len();
- heap_buf.reserve(extra);
- heap_buf.set_len(n);
- &mut heap_buf[..]
- };
-
- // This function is typically called on windows API functions which
- // will return the correct length of the string, but these functions
- // also return the `0` on error. In some cases, however, the
- // returned "correct length" may actually be 0!
- //
- // To handle this case we call `SetLastError` to reset it to 0 and
- // then check it again if we get the "0 error value". If the "last
- // error" is still 0 then we interpret it as a 0 length buffer and
- // not an actual error.
- SetLastError(0);
- let k = match f1(buf.as_mut_ptr(), n as DWORD) {
- 0 if GetLastError() == 0 => 0,
- 0 => return Err(io::Error::last_os_error()),
- n => n,
- } as usize;
- if k == n && GetLastError() == ERROR_INSUFFICIENT_BUFFER {
- n *= 2;
- } else if k >= n {
- n = k;
- } else {
- return Ok(f2(&buf[..k]));
- }
- }
- }
- }
-
- #[derive(Clone, PartialEq, Eq, Debug, Default)]
- struct FilePermissions {
- readonly: bool,
- }
-
- impl FilePermissions {
- fn new() -> FilePermissions {
- Default::default()
- }
- fn readonly(&self) -> bool {
- self.readonly
- }
- fn set_readonly(&mut self, readonly: bool) {
- self.readonly = readonly
- }
- }
-
- #[derive(Clone)]
- struct OpenOptions {
- // generic
- read: bool,
- write: bool,
- append: bool,
- truncate: bool,
- create: bool,
- create_new: bool,
- // system-specific
- custom_flags: u32,
- access_mode: Option<DWORD>,
- attributes: DWORD,
- share_mode: DWORD,
- security_qos_flags: DWORD,
- security_attributes: usize, // FIXME: should be a reference
- }
-
- impl OpenOptions {
- fn new() -> OpenOptions {
- OpenOptions {
- // generic
- read: false,
- write: false,
- append: false,
- truncate: false,
- create: false,
- create_new: false,
- // system-specific
- custom_flags: 0,
- access_mode: None,
- share_mode: FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
- attributes: 0,
- security_qos_flags: 0,
- security_attributes: 0,
- }
- }
- fn custom_flags(&mut self, flags: u32) {
- self.custom_flags = flags;
- }
- fn access_mode(&mut self, access_mode: u32) {
- self.access_mode = Some(access_mode);
- }
-
- fn get_access_mode(&self) -> io::Result<DWORD> {
- const ERROR_INVALID_PARAMETER: i32 = 87;
-
- match (self.read, self.write, self.append, self.access_mode) {
- (_, _, _, Some(mode)) => Ok(mode),
- (true, false, false, None) => Ok(GENERIC_READ),
- (false, true, false, None) => Ok(GENERIC_WRITE),
- (true, true, false, None) => Ok(GENERIC_READ | GENERIC_WRITE),
- (false, _, true, None) => Ok(FILE_GENERIC_WRITE & !FILE_WRITE_DATA),
- (true, _, true, None) => Ok(GENERIC_READ | (FILE_GENERIC_WRITE & !FILE_WRITE_DATA)),
- (false, false, false, None) => {
- Err(io::Error::from_raw_os_error(ERROR_INVALID_PARAMETER))
- }
- }
- }
-
- fn get_creation_mode(&self) -> io::Result<DWORD> {
- const ERROR_INVALID_PARAMETER: i32 = 87;
-
- match (self.write, self.append) {
- (true, false) => {}
- (false, false) => {
- if self.truncate || self.create || self.create_new {
- return Err(io::Error::from_raw_os_error(ERROR_INVALID_PARAMETER));
- }
- }
- (_, true) => {
- if self.truncate && !self.create_new {
- return Err(io::Error::from_raw_os_error(ERROR_INVALID_PARAMETER));
- }
- }
- }
-
- Ok(match (self.create, self.truncate, self.create_new) {
- (false, false, false) => OPEN_EXISTING,
- (true, false, false) => OPEN_ALWAYS,
- (false, true, false) => TRUNCATE_EXISTING,
- (true, true, false) => CREATE_ALWAYS,
- (_, _, true) => CREATE_NEW,
- })
- }
-
- fn get_flags_and_attributes(&self) -> DWORD {
- self.custom_flags
- | self.attributes
- | self.security_qos_flags
- | if self.security_qos_flags != 0 {
- SECURITY_SQOS_PRESENT
- } else {
- 0
- }
- | if self.create_new {
- FILE_FLAG_OPEN_REPARSE_POINT
- } else {
- 0
- }
- }
- }
-
- struct File {
- handle: Handle,
- }
-
- impl File {
- fn open(path: &Path, opts: &OpenOptions) -> io::Result<File> {
- let path = to_u16s(path)?;
- let handle = unsafe {
- CreateFileW(
- path.as_ptr(),
- opts.get_access_mode()?,
- opts.share_mode,
- opts.security_attributes as *mut _,
- opts.get_creation_mode()?,
- opts.get_flags_and_attributes(),
- ptr::null_mut(),
- )
- };
- if handle == INVALID_HANDLE_VALUE {
- Err(io::Error::last_os_error())
- } else {
- Ok(File {
- handle: Handle::new(handle),
- })
- }
- }
-
- fn file_attr(&self) -> io::Result<FileAttr> {
- unsafe {
- let mut info: BY_HANDLE_FILE_INFORMATION = mem::zeroed();
- cvt(GetFileInformationByHandle(self.handle.raw(), &mut info))?;
- let mut attr = FileAttr {
- attributes: info.dwFileAttributes,
- creation_time: info.ftCreationTime,
- last_access_time: info.ftLastAccessTime,
- last_write_time: info.ftLastWriteTime,
- file_size: ((info.nFileSizeHigh as u64) << 32) | (info.nFileSizeLow as u64),
- reparse_tag: 0,
- };
- if attr.is_reparse_point() {
- let mut b = [0; MAXIMUM_REPARSE_DATA_BUFFER_SIZE];
- if let Ok((_, buf)) = self.reparse_point(&mut b) {
- attr.reparse_tag = buf.ReparseTag;
- }
- }
- Ok(attr)
- }
- }
-
- fn set_attributes(&self, attr: DWORD) -> io::Result<()> {
- let zero: LARGE_INTEGER = unsafe { mem::zeroed() };
-
- let mut info = FILE_BASIC_INFO {
- CreationTime: zero, // do not change
- LastAccessTime: zero, // do not change
- LastWriteTime: zero, // do not change
- ChangeTime: zero, // do not change
- FileAttributes: attr,
- };
- let size = mem::size_of_val(&info);
- cvt(unsafe {
- SetFileInformationByHandle(
- self.handle.raw(),
- FileBasicInfo,
- &mut info as *mut _ as *mut _,
- size as DWORD,
- )
- })?;
- Ok(())
- }
-
- fn rename(&self, new: &Path, replace: bool) -> io::Result<()> {
- // &self must be opened with DELETE permission
- use std::iter;
- #[cfg(target_arch = "x86")]
- const STRUCT_SIZE: usize = 12;
- #[cfg(target_arch = "x86_64")]
- const STRUCT_SIZE: usize = 20;
-
- // FIXME: check for internal NULs in 'new'
- let mut data: Vec<u16> = iter::repeat(0u16)
- .take(STRUCT_SIZE / 2)
- .chain(new.as_os_str().encode_wide())
- .collect();
- data.push(0);
- let size = data.len() * 2;
-
- unsafe {
- // Thanks to alignment guarantees on Windows this works
- // (8 for 32-bit and 16 for 64-bit)
- let info = data.as_mut_ptr() as *mut FILE_RENAME_INFO;
- // The type of ReplaceIfExists is BOOL, but it actually expects a
- // BOOLEAN. This means true is -1, not c::TRUE.
- (*info).ReplaceIfExists = if replace { -1 } else { FALSE };
- (*info).RootDirectory = ptr::null_mut();
- (*info).FileNameLength = (size - STRUCT_SIZE) as DWORD;
- cvt(SetFileInformationByHandle(
- self.handle().raw(),
- FileRenameInfo,
- data.as_mut_ptr() as *mut _ as *mut _,
- size as DWORD,
- ))?;
- Ok(())
- }
- }
- fn set_perm(&self, perm: FilePermissions) -> io::Result<()> {
- let attr = self.file_attr()?.attributes;
- if perm.readonly == (attr & FILE_ATTRIBUTE_READONLY != 0) {
- Ok(())
- } else if perm.readonly {
- self.set_attributes(attr | FILE_ATTRIBUTE_READONLY)
- } else {
- self.set_attributes(attr & !FILE_ATTRIBUTE_READONLY)
- }
- }
-
- fn handle(&self) -> &Handle {
- &self.handle
- }
-
- fn reparse_point<'a>(
- &self,
- space: &'a mut [u8; MAXIMUM_REPARSE_DATA_BUFFER_SIZE],
- ) -> io::Result<(DWORD, &'a REPARSE_DATA_BUFFER)> {
- unsafe {
- let mut bytes = 0;
- cvt({
- DeviceIoControl(
- self.handle.raw(),
- FSCTL_GET_REPARSE_POINT,
- ptr::null_mut(),
- 0,
- space.as_mut_ptr() as *mut _,
- space.len() as DWORD,
- &mut bytes,
- ptr::null_mut(),
- )
- })?;
- Ok((bytes, &*(space.as_ptr() as *const REPARSE_DATA_BUFFER)))
- }
- }
- }
-
- #[derive(Copy, Clone, PartialEq, Eq, Hash)]
- enum FileType {
- Dir,
- File,
- SymlinkFile,
- SymlinkDir,
- ReparsePoint,
- MountPoint,
- }
-
- impl FileType {
- fn new(attrs: DWORD, reparse_tag: DWORD) -> FileType {
- match (
- attrs & FILE_ATTRIBUTE_DIRECTORY != 0,
- attrs & FILE_ATTRIBUTE_REPARSE_POINT != 0,
- reparse_tag,
- ) {
- (false, false, _) => FileType::File,
- (true, false, _) => FileType::Dir,
- (false, true, IO_REPARSE_TAG_SYMLINK) => FileType::SymlinkFile,
- (true, true, IO_REPARSE_TAG_SYMLINK) => FileType::SymlinkDir,
- (true, true, IO_REPARSE_TAG_MOUNT_POINT) => FileType::MountPoint,
- (_, true, _) => FileType::ReparsePoint,
- // Note: if a _file_ has a reparse tag of the type IO_REPARSE_TAG_MOUNT_POINT it is
- // invalid, as junctions always have to be dirs. We set the filetype to ReparsePoint
- // to indicate it is something symlink-like, but not something you can follow.
- }
- }
-
- fn is_dir(&self) -> bool {
- *self == FileType::Dir
- }
- fn is_symlink_dir(&self) -> bool {
- *self == FileType::SymlinkDir || *self == FileType::MountPoint
- }
- }
-
- impl DirEntry {
- fn new(root: &Arc<PathBuf>, wfd: &WIN32_FIND_DATAW) -> Option<DirEntry> {
- let first_bytes = &wfd.cFileName[0..3];
- if first_bytes.starts_with(&[46, 0]) || first_bytes.starts_with(&[46, 46, 0]) {
- None
- } else {
- Some(DirEntry {
- root: root.clone(),
- data: *wfd,
- })
- }
- }
-
- fn path(&self) -> PathBuf {
- self.root.join(&self.file_name())
- }
-
- fn file_name(&self) -> OsString {
- let filename = truncate_utf16_at_nul(&self.data.cFileName);
- OsString::from_wide(filename)
- }
-
- fn file_type(&self) -> io::Result<FileType> {
- Ok(FileType::new(
- self.data.dwFileAttributes,
- /* reparse_tag = */ self.data.dwReserved0,
- ))
- }
-
- fn metadata(&self) -> io::Result<FileAttr> {
- Ok(FileAttr {
- attributes: self.data.dwFileAttributes,
- creation_time: self.data.ftCreationTime,
- last_access_time: self.data.ftLastAccessTime,
- last_write_time: self.data.ftLastWriteTime,
- file_size: ((self.data.nFileSizeHigh as u64) << 32)
- | (self.data.nFileSizeLow as u64),
- reparse_tag: if self.data.dwFileAttributes & FILE_ATTRIBUTE_REPARSE_POINT != 0 {
- // reserved unless this is a reparse point
- self.data.dwReserved0
- } else {
- 0
- },
- })
- }
- }
-
- struct DirEntry {
- root: Arc<PathBuf>,
- data: WIN32_FIND_DATAW,
- }
-
- struct ReadDir {
- handle: FindNextFileHandle,
- root: Arc<PathBuf>,
- first: Option<WIN32_FIND_DATAW>,
- }
-
- impl Iterator for ReadDir {
- type Item = io::Result<DirEntry>;
- fn next(&mut self) -> Option<io::Result<DirEntry>> {
- if let Some(first) = self.first.take() {
- if let Some(e) = DirEntry::new(&self.root, &first) {
- return Some(Ok(e));
- }
- }
- unsafe {
- let mut wfd = mem::zeroed();
- loop {
- if FindNextFileW(self.handle.0, &mut wfd) == 0 {
- if GetLastError() == ERROR_NO_MORE_FILES {
- return None;
- } else {
- return Some(Err(io::Error::last_os_error()));
- }
- }
- if let Some(e) = DirEntry::new(&self.root, &wfd) {
- return Some(Ok(e));
- }
- }
- }
- }
- }
-
- #[derive(Clone)]
- struct FileAttr {
- attributes: DWORD,
- creation_time: FILETIME,
- last_access_time: FILETIME,
- last_write_time: FILETIME,
- file_size: u64,
- reparse_tag: DWORD,
- }
-
- impl FileAttr {
- fn perm(&self) -> FilePermissions {
- FilePermissions {
- readonly: self.attributes & FILE_ATTRIBUTE_READONLY != 0,
- }
- }
-
- fn file_type(&self) -> FileType {
- FileType::new(self.attributes, self.reparse_tag)
- }
-
- fn is_reparse_point(&self) -> bool {
- self.attributes & FILE_ATTRIBUTE_REPARSE_POINT != 0
- }
- }
-
- #[repr(C)]
- struct REPARSE_DATA_BUFFER {
- ReparseTag: c_uint,
- ReparseDataLength: c_ushort,
- Reserved: c_ushort,
- rest: (),
- }
-
- const MAXIMUM_REPARSE_DATA_BUFFER_SIZE: usize = 16 * 1024;
-
- /// An owned container for `HANDLE` object, closing them on Drop.
- ///
- /// All methods are inherited through a `Deref` impl to `RawHandle`
- struct Handle(RawHandle);
-
- use std::ops::Deref;
-
- /// A wrapper type for `HANDLE` objects to give them proper Send/Sync inference
- /// as well as Rust-y methods.
- ///
- /// This does **not** drop the handle when it goes out of scope, use `Handle`
- /// instead for that.
- #[derive(Copy, Clone)]
- struct RawHandle(HANDLE);
-
- unsafe impl Send for RawHandle {}
- unsafe impl Sync for RawHandle {}
-
- impl Handle {
- fn new(handle: HANDLE) -> Handle {
- Handle(RawHandle::new(handle))
- }
- }
-
- impl Deref for Handle {
- type Target = RawHandle;
- fn deref(&self) -> &RawHandle {
- &self.0
- }
- }
-
- impl Drop for Handle {
- fn drop(&mut self) {
- unsafe {
- let _ = CloseHandle(self.raw());
- }
- }
- }
-
- impl RawHandle {
- fn new(handle: HANDLE) -> RawHandle {
- RawHandle(handle)
- }
-
- fn raw(&self) -> HANDLE {
- self.0
- }
- }
-
- struct FindNextFileHandle(HANDLE);
-
- fn get_path(f: &File) -> io::Result<PathBuf> {
- fill_utf16_buf(
- |buf, sz| unsafe {
- GetFinalPathNameByHandleW(f.handle.raw(), buf, sz, VOLUME_NAME_DOS)
- },
- |buf| PathBuf::from(OsString::from_wide(buf)),
- )
- }
-
- fn move_item(file: &File, ctx: &mut RmdirContext) -> io::Result<()> {
- let mut tmpname = ctx.base_dir.join(format! {"rm-{}", ctx.counter});
- ctx.counter += 1;
- // Try to rename the file. If it already exists, just retry with an other
- // filename.
- while let Err(err) = file.rename(tmpname.as_ref(), false) {
- if err.kind() != io::ErrorKind::AlreadyExists {
- return Err(err);
- };
- tmpname = ctx.base_dir.join(format!("rm-{}", ctx.counter));
- ctx.counter += 1;
- }
- Ok(())
- }
-
- fn set_perm(path: &Path, perm: FilePermissions) -> io::Result<()> {
- let mut opts = OpenOptions::new();
- opts.access_mode(FILE_READ_ATTRIBUTES | FILE_WRITE_ATTRIBUTES);
- opts.custom_flags(FILE_FLAG_BACKUP_SEMANTICS);
- let file = File::open(path, &opts)?;
- file.set_perm(perm)
- }
-
- const VOLUME_NAME_DOS: DWORD = 0x0;
-}
diff --git a/src/tools/rust-installer/src/tarballer.rs b/src/tools/rust-installer/src/tarballer.rs
index 76f5af3fa..7353a49fe 100644
--- a/src/tools/rust-installer/src/tarballer.rs
+++ b/src/tools/rust-installer/src/tarballer.rs
@@ -1,12 +1,12 @@
use anyhow::{bail, Context, Result};
use std::fs::{read_link, symlink_metadata};
-use std::io::{empty, BufWriter, Write};
+use std::io::{BufWriter, Write};
use std::path::Path;
use tar::{Builder, Header};
use walkdir::WalkDir;
use crate::{
- compression::{CombinedEncoder, CompressionFormats},
+ compression::{CombinedEncoder, CompressionFormats, CompressionProfile},
util::*,
};
@@ -25,6 +25,10 @@ actor! {
#[clap(value_name = "DIR")]
work_dir: String = "./workdir",
+ /// The profile used to compress the tarball.
+ #[clap(value_name = "FORMAT", default_value_t)]
+ compression_profile: CompressionProfile,
+
/// The formats used to compress the tarball.
#[clap(value_name = "FORMAT", default_value_t)]
compression_formats: CompressionFormats,
@@ -38,7 +42,7 @@ impl Tarballer {
let encoder = CombinedEncoder::new(
self.compression_formats
.iter()
- .map(|f| f.encode(&tarball_name))
+ .map(|f| f.encode(&tarball_name, self.compression_profile))
.collect::<Result<Vec<_>>>()?,
);
@@ -89,8 +93,7 @@ fn append_path<W: Write>(builder: &mut Builder<W>, src: &Path, path: &String) ->
header.set_metadata(&stat);
if stat.file_type().is_symlink() {
let link = read_link(src)?;
- header.set_link_name(&link)?;
- builder.append_data(&mut header, path, &mut empty())?;
+ builder.append_link(&mut header, path, &link)?;
} else {
if cfg!(windows) {
// Windows doesn't really have a mode, so `tar` never marks files executable.
diff --git a/src/tools/rust-installer/test.sh b/src/tools/rust-installer/test.sh
index bf6de4cb1..dac6f77ef 100755
--- a/src/tools/rust-installer/test.sh
+++ b/src/tools/rust-installer/test.sh
@@ -20,7 +20,6 @@ abs_path() {
S="$(abs_path $(dirname $0))"
TEST_DIR="$S/test"
-TMP_DIR="$S/tmp"
WORK_DIR="$TMP_DIR/workdir"
OUT_DIR="$TMP_DIR/outdir"
PREFIX_DIR="$TMP_DIR/prefix"
diff --git a/src/tools/rust-installer/triagebot.toml b/src/tools/rust-installer/triagebot.toml
deleted file mode 100644
index 493500449..000000000
--- a/src/tools/rust-installer/triagebot.toml
+++ /dev/null
@@ -1,4 +0,0 @@
-[assign]
-
-[assign.owners]
-"*" = ["@Mark-Simulacrum"]
diff --git a/src/tools/rustbook/Cargo.toml b/src/tools/rustbook/Cargo.toml
index b296aa2f4..b9cf2617b 100644
--- a/src/tools/rustbook/Cargo.toml
+++ b/src/tools/rustbook/Cargo.toml
@@ -6,9 +6,9 @@ edition = "2021"
[dependencies]
clap = "4.0.32"
-env_logger = "0.7.1"
+env_logger = "0.10"
[dependencies.mdbook]
-version = "0.4.25"
+version = "0.4.28"
default-features = false
features = ["search"]
diff --git a/src/tools/rustc-workspace-hack/Cargo.toml b/src/tools/rustc-workspace-hack/Cargo.toml
index 84b16a37a..e088ffbbe 100644
--- a/src/tools/rustc-workspace-hack/Cargo.toml
+++ b/src/tools/rustc-workspace-hack/Cargo.toml
@@ -84,11 +84,17 @@ libz-sys = { version = "1.1.2" }
# Ensure default features of regex, which are disabled in some scenarios.
regex = { version = "1.5.6" }
serde_json = { version = "1.0.31", features = ["raw_value", "unbounded_depth"] }
-syn = { version = "1", features = ['full', 'visit'] }
+syn = { version = "1", features = ['full', 'visit', 'visit-mut'] } # `visit-mut` required by Cargo via `gix`
url = { version = "2.0", features = ['serde'] }
# Ensure default features of rand, which are disabled in some scenarios.
rand = { version = "0.8.5" }
+# Ensure features of `hashbrown`, `smallvec`, and `once_cell`,
+# which are used transitively by Cargo (via `gix`).
+hashbrown = { version = "0.12.3", default-features = false, features = ["inline-more"] }
+once_cell = { version = "1.16.0", default-features = false, features = ["unstable"] }
+smallvec = { version = "1.10.0", features = ["write"] }
+
[target.'cfg(not(windows))'.dependencies]
openssl = { version = "0.10.35", optional = true }
diff --git a/src/tools/rustdoc-gui/.eslintrc.js b/src/tools/rustdoc-gui/.eslintrc.js
new file mode 100644
index 000000000..f4aadc071
--- /dev/null
+++ b/src/tools/rustdoc-gui/.eslintrc.js
@@ -0,0 +1,96 @@
+module.exports = {
+ "env": {
+ "browser": true,
+ "node": true,
+ "es6": true
+ },
+ "extends": "eslint:recommended",
+ "parserOptions": {
+ "ecmaVersion": 2018,
+ "sourceType": "module"
+ },
+ "rules": {
+ "linebreak-style": [
+ "error",
+ "unix"
+ ],
+ "semi": [
+ "error",
+ "always"
+ ],
+ "quotes": [
+ "error",
+ "double"
+ ],
+ "linebreak-style": [
+ "error",
+ "unix"
+ ],
+ "no-trailing-spaces": "error",
+ "no-var": ["error"],
+ "prefer-const": ["error"],
+ "prefer-arrow-callback": ["error"],
+ "brace-style": [
+ "error",
+ "1tbs",
+ { "allowSingleLine": false }
+ ],
+ "keyword-spacing": [
+ "error",
+ { "before": true, "after": true }
+ ],
+ "arrow-spacing": [
+ "error",
+ { "before": true, "after": true }
+ ],
+ "key-spacing": [
+ "error",
+ { "beforeColon": false, "afterColon": true, "mode": "strict" }
+ ],
+ "func-call-spacing": ["error", "never"],
+ "space-infix-ops": "error",
+ "space-before-function-paren": ["error", "never"],
+ "space-before-blocks": "error",
+ "comma-dangle": ["error", "always-multiline"],
+ "comma-style": ["error", "last"],
+ "max-len": ["error", { "code": 100, "tabWidth": 4 }],
+ "eol-last": ["error", "always"],
+ "arrow-parens": ["error", "as-needed"],
+ "no-unused-vars": [
+ "error",
+ {
+ "argsIgnorePattern": "^_",
+ "varsIgnorePattern": "^_"
+ }
+ ],
+ "eqeqeq": "error",
+ "no-const-assign": "error",
+ "no-debugger": "error",
+ "no-dupe-args": "error",
+ "no-dupe-else-if": "error",
+ "no-dupe-keys": "error",
+ "no-duplicate-case": "error",
+ "no-ex-assign": "error",
+ "no-fallthrough": "error",
+ "no-invalid-regexp": "error",
+ "no-import-assign": "error",
+ "no-self-compare": "error",
+ "no-template-curly-in-string": "error",
+ "block-scoped-var": "error",
+ "guard-for-in": "error",
+ "no-alert": "error",
+ "no-confusing-arrow": "error",
+ "no-div-regex": "error",
+ "no-floating-decimal": "error",
+ "no-implicit-globals": "error",
+ "no-implied-eval": "error",
+ "no-label-var": "error",
+ "no-lonely-if": "error",
+ "no-mixed-operators": "error",
+ "no-multi-assign": "error",
+ "no-return-assign": "error",
+ "no-script-url": "error",
+ "no-sequences": "error",
+ "no-div-regex": "error",
+ }
+};
diff --git a/src/tools/rustdoc-gui/tester.js b/src/tools/rustdoc-gui/tester.js
index 2f0ca1ec3..72baad606 100644
--- a/src/tools/rustdoc-gui/tester.js
+++ b/src/tools/rustdoc-gui/tester.js
@@ -6,8 +6,8 @@
const fs = require("fs");
const path = require("path");
-const os = require('os');
-const {Options, runTest} = require('browser-ui-test');
+const os = require("os");
+const {Options, runTest} = require("browser-ui-test");
// If a test fails or errors, we will retry it two more times in case it was a flaky failure.
const NB_RETRY = 3;
@@ -31,7 +31,7 @@ function isNumeric(s) {
}
function parseOptions(args) {
- var opts = {
+ const opts = {
"doc_folder": "",
"tests_folder": "",
"files": [],
@@ -42,7 +42,7 @@ function parseOptions(args) {
"executable_path": null,
"no_sandbox": false,
};
- var correspondances = {
+ const correspondances = {
"--doc-folder": "doc_folder",
"--tests-folder": "tests_folder",
"--debug": "debug",
@@ -52,39 +52,41 @@ function parseOptions(args) {
"--no-sandbox": "no_sandbox",
};
- for (var i = 0; i < args.length; ++i) {
- if (args[i] === "--doc-folder"
- || args[i] === "--tests-folder"
- || args[i] === "--file"
- || args[i] === "--jobs"
- || args[i] === "--executable-path") {
+ for (let i = 0; i < args.length; ++i) {
+ const arg = args[i];
+ if (arg === "--doc-folder"
+ || arg === "--tests-folder"
+ || arg === "--file"
+ || arg === "--jobs"
+ || arg === "--executable-path") {
i += 1;
if (i >= args.length) {
- console.log("Missing argument after `" + args[i - 1] + "` option.");
+ console.log("Missing argument after `" + arg + "` option.");
return null;
}
- if (args[i - 1] === "--jobs") {
- if (!isNumeric(args[i])) {
+ const arg_value = args[i];
+ if (arg === "--jobs") {
+ if (!isNumeric(arg_value)) {
console.log(
- "`--jobs` option expects a positive number, found `" + args[i] + "`");
+ "`--jobs` option expects a positive number, found `" + arg_value + "`");
return null;
}
- opts["jobs"] = parseInt(args[i]);
- } else if (args[i - 1] !== "--file") {
- opts[correspondances[args[i - 1]]] = args[i];
+ opts["jobs"] = parseInt(arg_value);
+ } else if (arg !== "--file") {
+ opts[correspondances[arg]] = arg_value;
} else {
- opts["files"].push(args[i]);
+ opts["files"].push(arg_value);
}
- } else if (args[i] === "--help") {
+ } else if (arg === "--help") {
showHelp();
process.exit(0);
- } else if (args[i] === "--no-sandbox") {
+ } else if (arg === "--no-sandbox") {
console.log("`--no-sandbox` is being used. Be very careful!");
- opts[correspondances[args[i]]] = true;
- } else if (correspondances[args[i]]) {
- opts[correspondances[args[i]]] = true;
+ opts[correspondances[arg]] = true;
+ } else if (correspondances[arg]) {
+ opts[correspondances[arg]] = true;
} else {
- console.log("Unknown option `" + args[i] + "`.");
+ console.log("Unknown option `" + arg + "`.");
console.log("Use `--help` to see the list of options");
return null;
}
@@ -186,7 +188,7 @@ function createEmptyResults() {
}
async function main(argv) {
- let opts = parseOptions(argv.slice(2));
+ const opts = parseOptions(argv.slice(2));
if (opts === null) {
process.exit(1);
}
@@ -198,7 +200,7 @@ async function main(argv) {
const framework_options = new Options();
try {
// This is more convenient that setting fields one by one.
- let args = [
+ const args = [
"--variable", "DOC_PATH", opts["doc_folder"], "--enable-fail-on-js-error",
"--allow-file-access-from-files",
];
@@ -232,7 +234,7 @@ async function main(argv) {
} else {
files = opts["files"];
}
- files = files.filter(file => path.extname(file) == ".goml");
+ files = files.filter(file => path.extname(file) === ".goml");
if (files.length === 0) {
console.error("rustdoc-gui: No test selected");
process.exit(2);
@@ -257,7 +259,7 @@ async function main(argv) {
// We catch this "event" to display a nicer message in case of unexpected exit (because of a
// missing `--no-sandbox`).
- const exitHandling = (code) => {
+ const exitHandling = () => {
if (!opts["no_sandbox"]) {
console.log("");
console.log(
@@ -266,10 +268,10 @@ async function main(argv) {
console.log("");
}
};
- process.on('exit', exitHandling);
+ process.on("exit", exitHandling);
const originalFilesLen = files.length;
- let results = createEmptyResults();
+ const results = createEmptyResults();
const status_bar = char_printer(files.length);
let new_results;
@@ -279,7 +281,7 @@ async function main(argv) {
Array.prototype.push.apply(results.successful, new_results.successful);
// We generate the new list of files with the previously failing tests.
files = Array.prototype.concat(new_results.failed, new_results.errored).map(
- f => f['file_name']);
+ f => f["file_name"]);
if (files.length > originalFilesLen / 2) {
// If we have too many failing tests, it's very likely not flaky failures anymore so
// no need to retry.
diff --git a/src/tools/rustdoc-js/.eslintrc.js b/src/tools/rustdoc-js/.eslintrc.js
new file mode 100644
index 000000000..4ab3a3157
--- /dev/null
+++ b/src/tools/rustdoc-js/.eslintrc.js
@@ -0,0 +1,96 @@
+module.exports = {
+ "env": {
+ "browser": true,
+ "node": true,
+ "es6": true
+ },
+ "extends": "eslint:recommended",
+ "parserOptions": {
+ "ecmaVersion": 2015,
+ "sourceType": "module"
+ },
+ "rules": {
+ "linebreak-style": [
+ "error",
+ "unix"
+ ],
+ "semi": [
+ "error",
+ "always"
+ ],
+ "quotes": [
+ "error",
+ "double"
+ ],
+ "linebreak-style": [
+ "error",
+ "unix"
+ ],
+ "no-trailing-spaces": "error",
+ "no-var": ["error"],
+ "prefer-const": ["error"],
+ "prefer-arrow-callback": ["error"],
+ "brace-style": [
+ "error",
+ "1tbs",
+ { "allowSingleLine": false }
+ ],
+ "keyword-spacing": [
+ "error",
+ { "before": true, "after": true }
+ ],
+ "arrow-spacing": [
+ "error",
+ { "before": true, "after": true }
+ ],
+ "key-spacing": [
+ "error",
+ { "beforeColon": false, "afterColon": true, "mode": "strict" }
+ ],
+ "func-call-spacing": ["error", "never"],
+ "space-infix-ops": "error",
+ "space-before-function-paren": ["error", "never"],
+ "space-before-blocks": "error",
+ "comma-dangle": ["error", "always-multiline"],
+ "comma-style": ["error", "last"],
+ "max-len": ["error", { "code": 100, "tabWidth": 4 }],
+ "eol-last": ["error", "always"],
+ "arrow-parens": ["error", "as-needed"],
+ "no-unused-vars": [
+ "error",
+ {
+ "argsIgnorePattern": "^_",
+ "varsIgnorePattern": "^_"
+ }
+ ],
+ "eqeqeq": "error",
+ "no-const-assign": "error",
+ "no-debugger": "error",
+ "no-dupe-args": "error",
+ "no-dupe-else-if": "error",
+ "no-dupe-keys": "error",
+ "no-duplicate-case": "error",
+ "no-ex-assign": "error",
+ "no-fallthrough": "error",
+ "no-invalid-regexp": "error",
+ "no-import-assign": "error",
+ "no-self-compare": "error",
+ "no-template-curly-in-string": "error",
+ "block-scoped-var": "error",
+ "guard-for-in": "error",
+ "no-alert": "error",
+ "no-confusing-arrow": "error",
+ "no-div-regex": "error",
+ "no-floating-decimal": "error",
+ "no-implicit-globals": "error",
+ "no-implied-eval": "error",
+ "no-label-var": "error",
+ "no-lonely-if": "error",
+ "no-mixed-operators": "error",
+ "no-multi-assign": "error",
+ "no-return-assign": "error",
+ "no-script-url": "error",
+ "no-sequences": "error",
+ "no-div-regex": "error",
+ }
+};
diff --git a/src/tools/rustdoc-js/tester.js b/src/tools/rustdoc-js/tester.js
index ea5780f66..6b9a9b66a 100644
--- a/src/tools/rustdoc-js/tester.js
+++ b/src/tools/rustdoc-js/tester.js
@@ -1,9 +1,9 @@
-const fs = require('fs');
-const path = require('path');
+const fs = require("fs");
+const path = require("path");
function loadContent(content) {
- var Module = module.constructor;
- var m = new Module();
+ const Module = module.constructor;
+ const m = new Module();
m._compile(content, "tmp.js");
m.exports.ignore_order = content.indexOf("\n// ignore-order\n") !== -1 ||
content.startsWith("// ignore-order\n");
@@ -15,7 +15,7 @@ function loadContent(content) {
}
function readFile(filePath) {
- return fs.readFileSync(filePath, 'utf8');
+ return fs.readFileSync(filePath, "utf8");
}
function contentToDiffLine(key, value) {
@@ -25,41 +25,41 @@ function contentToDiffLine(key, value) {
// This function is only called when no matching result was found and therefore will only display
// the diff between the two items.
function betterLookingDiff(entry, data) {
- let output = ' {\n';
- let spaces = ' ';
- for (let key in entry) {
- if (!entry.hasOwnProperty(key)) {
+ let output = " {\n";
+ const spaces = " ";
+ for (const key in entry) {
+ if (!Object.prototype.hasOwnProperty.call(entry, key)) {
continue;
}
- if (!data || !data.hasOwnProperty(key)) {
- output += '-' + spaces + contentToDiffLine(key, entry[key]) + '\n';
+ if (!data || !Object.prototype.hasOwnProperty.call(data, key)) {
+ output += "-" + spaces + contentToDiffLine(key, entry[key]) + "\n";
continue;
}
- let value = data[key];
+ const value = data[key];
if (value !== entry[key]) {
- output += '-' + spaces + contentToDiffLine(key, entry[key]) + '\n';
- output += '+' + spaces + contentToDiffLine(key, value) + '\n';
+ output += "-" + spaces + contentToDiffLine(key, entry[key]) + "\n";
+ output += "+" + spaces + contentToDiffLine(key, value) + "\n";
} else {
- output += spaces + contentToDiffLine(key, value) + '\n';
+ output += spaces + contentToDiffLine(key, value) + "\n";
}
}
- return output + ' }';
+ return output + " }";
}
function lookForEntry(entry, data) {
- for (var i = 0; i < data.length; ++i) {
- var allGood = true;
- for (var key in entry) {
- if (!entry.hasOwnProperty(key)) {
+ return data.findIndex(data_entry => {
+ let allGood = true;
+ for (const key in entry) {
+ if (!Object.prototype.hasOwnProperty.call(entry, key)) {
continue;
}
- var value = data[i][key];
+ let value = data_entry[key];
// To make our life easier, if there is a "parent" type, we add it to the path.
- if (key === 'path' && data[i]['parent'] !== undefined) {
+ if (key === "path" && data_entry["parent"] !== undefined) {
if (value.length > 0) {
- value += '::' + data[i]['parent']['name'];
+ value += "::" + data_entry["parent"]["name"];
} else {
- value = data[i]['parent']['name'];
+ value = data_entry["parent"]["name"];
}
}
if (value !== entry[key]) {
@@ -67,11 +67,8 @@ function lookForEntry(entry, data) {
break;
}
}
- if (allGood === true) {
- return i;
- }
- }
- return null;
+ return allGood === true;
+ });
}
// This function checks if `expected` has all the required fields needed for the checks.
@@ -82,11 +79,18 @@ function checkNeededFields(fullPath, expected, error_text, queryName, position)
"foundElems",
"original",
"returned",
- "typeFilter",
"userQuery",
"error",
];
- } else if (fullPath.endsWith("elems") || fullPath.endsWith("generics")) {
+ } else if (fullPath.endsWith("elems") || fullPath.endsWith("returned")) {
+ fieldsToCheck = [
+ "name",
+ "fullPath",
+ "pathWithoutLast",
+ "pathLast",
+ "generics",
+ ];
+ } else if (fullPath.endsWith("generics")) {
fieldsToCheck = [
"name",
"fullPath",
@@ -97,13 +101,12 @@ function checkNeededFields(fullPath, expected, error_text, queryName, position)
} else {
fieldsToCheck = [];
}
- for (var i = 0; i < fieldsToCheck.length; ++i) {
- const field = fieldsToCheck[i];
- if (!expected.hasOwnProperty(field)) {
+ for (const field of fieldsToCheck) {
+ if (!Object.prototype.hasOwnProperty.call(expected, field)) {
let text = `${queryName}==> Mandatory key \`${field}\` is not present`;
if (fullPath.length > 0) {
text += ` in field \`${fullPath}\``;
- if (position != null) {
+ if (position !== null) {
text += ` (position ${position})`;
}
}
@@ -114,28 +117,29 @@ function checkNeededFields(fullPath, expected, error_text, queryName, position)
function valueCheck(fullPath, expected, result, error_text, queryName) {
if (Array.isArray(expected)) {
- for (var i = 0; i < expected.length; ++i) {
+ let i;
+ for (i = 0; i < expected.length; ++i) {
checkNeededFields(fullPath, expected[i], error_text, queryName, i);
if (i >= result.length) {
error_text.push(`${queryName}==> EXPECTED has extra value in array from field ` +
`\`${fullPath}\` (position ${i}): \`${JSON.stringify(expected[i])}\``);
} else {
- valueCheck(fullPath + '[' + i + ']', expected[i], result[i], error_text, queryName);
+ valueCheck(fullPath + "[" + i + "]", expected[i], result[i], error_text, queryName);
}
}
for (; i < result.length; ++i) {
error_text.push(`${queryName}==> RESULT has extra value in array from field ` +
`\`${fullPath}\` (position ${i}): \`${JSON.stringify(result[i])}\` ` +
- 'compared to EXPECTED');
+ "compared to EXPECTED");
}
} else if (expected !== null && typeof expected !== "undefined" &&
- expected.constructor == Object) {
+ expected.constructor == Object) { // eslint-disable-line eqeqeq
for (const key in expected) {
- if (!expected.hasOwnProperty(key)) {
+ if (!Object.prototype.hasOwnProperty.call(expected, key)) {
continue;
}
- if (!result.hasOwnProperty(key)) {
- error_text.push('==> Unknown key "' + key + '"');
+ if (!Object.prototype.hasOwnProperty.call(result, key)) {
+ error_text.push("==> Unknown key \"" + key + "\"");
break;
}
let result_v = result[key];
@@ -150,13 +154,13 @@ function valueCheck(fullPath, expected, result, error_text, queryName) {
});
result_v = result_v.join("");
}
- const obj_path = fullPath + (fullPath.length > 0 ? '.' : '') + key;
+ const obj_path = fullPath + (fullPath.length > 0 ? "." : "") + key;
valueCheck(obj_path, expected[key], result_v, error_text, queryName);
}
} else {
- expectedValue = JSON.stringify(expected);
- resultValue = JSON.stringify(result);
- if (expectedValue != resultValue) {
+ const expectedValue = JSON.stringify(expected);
+ const resultValue = JSON.stringify(result);
+ if (expectedValue !== resultValue) {
error_text.push(`${queryName}==> Different values for field \`${fullPath}\`:\n` +
`EXPECTED: \`${expectedValue}\`\nRESULT: \`${resultValue}\``);
}
@@ -164,10 +168,10 @@ function valueCheck(fullPath, expected, result, error_text, queryName) {
}
function runParser(query, expected, parseQuery, queryName) {
- var error_text = [];
+ const error_text = [];
checkNeededFields("", expected, error_text, queryName, null);
if (error_text.length === 0) {
- valueCheck('', expected, parseQuery(query), error_text, queryName);
+ valueCheck("", expected, parseQuery(query), error_text, queryName);
}
return error_text;
}
@@ -176,48 +180,48 @@ function runSearch(query, expected, doSearch, loadedFile, queryName) {
const ignore_order = loadedFile.ignore_order;
const exact_check = loadedFile.exact_check;
- var results = doSearch(query, loadedFile.FILTER_CRATE);
- var error_text = [];
+ const results = doSearch(query, loadedFile.FILTER_CRATE);
+ const error_text = [];
- for (var key in expected) {
- if (!expected.hasOwnProperty(key)) {
+ for (const key in expected) {
+ if (!Object.prototype.hasOwnProperty.call(expected, key)) {
continue;
}
- if (!results.hasOwnProperty(key)) {
- error_text.push('==> Unknown key "' + key + '"');
+ if (!Object.prototype.hasOwnProperty.call(results, key)) {
+ error_text.push("==> Unknown key \"" + key + "\"");
break;
}
- var entry = expected[key];
+ const entry = expected[key];
- if (exact_check == true && entry.length !== results[key].length) {
+ if (exact_check && entry.length !== results[key].length) {
error_text.push(queryName + "==> Expected exactly " + entry.length +
" results but found " + results[key].length + " in '" + key + "'");
}
- var prev_pos = -1;
- for (var i = 0; i < entry.length; ++i) {
- var entry_pos = lookForEntry(entry[i], results[key]);
- if (entry_pos === null) {
+ let prev_pos = -1;
+ entry.forEach((elem, index) => {
+ const entry_pos = lookForEntry(elem, results[key]);
+ if (entry_pos === -1) {
error_text.push(queryName + "==> Result not found in '" + key + "': '" +
- JSON.stringify(entry[i]) + "'");
+ JSON.stringify(elem) + "'");
// By default, we just compare the two first items.
let item_to_diff = 0;
- if ((ignore_order === false || exact_check === true) && i < results[key].length) {
- item_to_diff = i;
+ if ((!ignore_order || exact_check) && index < results[key].length) {
+ item_to_diff = index;
}
error_text.push("Diff of first error:\n" +
- betterLookingDiff(entry[i], results[key][item_to_diff]));
+ betterLookingDiff(elem, results[key][item_to_diff]));
} else if (exact_check === true && prev_pos + 1 !== entry_pos) {
error_text.push(queryName + "==> Exact check failed at position " + (prev_pos + 1) +
- ": expected '" + JSON.stringify(entry[i]) + "' but found '" +
- JSON.stringify(results[key][i]) + "'");
+ ": expected '" + JSON.stringify(elem) + "' but found '" +
+ JSON.stringify(results[key][index]) + "'");
} else if (ignore_order === false && entry_pos < prev_pos) {
- error_text.push(queryName + "==> '" + JSON.stringify(entry[i]) + "' was supposed " +
+ error_text.push(queryName + "==> '" + JSON.stringify(elem) + "' was supposed " +
"to be before '" + JSON.stringify(results[key][entry_pos]) + "'");
} else {
prev_pos = entry_pos;
}
- }
+ });
}
return error_text;
}
@@ -252,15 +256,15 @@ function runCheck(loadedFile, key, callback) {
console.log(`==> QUERY variable should have the same length as ${key}`);
return 1;
}
- for (var i = 0; i < query.length; ++i) {
- var error_text = callback(query[i], expected[i], "[ query `" + query[i] + "`]");
+ for (let i = 0; i < query.length; ++i) {
+ const error_text = callback(query[i], expected[i], "[ query `" + query[i] + "`]");
if (checkResult(error_text, loadedFile, false) !== 0) {
return 1;
}
}
console.log("OK");
} else {
- var error_text = callback(query, expected, "");
+ const error_text = callback(query, expected, "");
if (checkResult(error_text, loadedFile, true) !== 0) {
return 1;
}
@@ -269,9 +273,9 @@ function runCheck(loadedFile, key, callback) {
}
function runChecks(testFile, doSearch, parseQuery) {
- var checkExpected = false;
- var checkParsed = false;
- var testFileContent = readFile(testFile) + 'exports.QUERY = QUERY;';
+ let checkExpected = false;
+ let checkParsed = false;
+ let testFileContent = readFile(testFile) + "exports.QUERY = QUERY;";
if (testFileContent.indexOf("FILTER_CRATE") !== -1) {
testFileContent += "exports.FILTER_CRATE = FILTER_CRATE;";
@@ -280,11 +284,11 @@ function runChecks(testFile, doSearch, parseQuery) {
}
if (testFileContent.indexOf("\nconst EXPECTED") !== -1) {
- testFileContent += 'exports.EXPECTED = EXPECTED;';
+ testFileContent += "exports.EXPECTED = EXPECTED;";
checkExpected = true;
}
if (testFileContent.indexOf("\nconst PARSED") !== -1) {
- testFileContent += 'exports.PARSED = PARSED;';
+ testFileContent += "exports.PARSED = PARSED;";
checkParsed = true;
}
if (!checkParsed && !checkExpected) {
@@ -294,7 +298,7 @@ function runChecks(testFile, doSearch, parseQuery) {
}
const loadedFile = loadContent(testFileContent);
- var res = 0;
+ let res = 0;
if (checkExpected) {
res += runCheck(loadedFile, "EXPECTED", (query, expected, text) => {
@@ -323,18 +327,17 @@ function loadSearchJS(doc_folder, resource_suffix) {
const searchIndex = require(searchIndexJs);
const staticFiles = path.join(doc_folder, "static.files");
- const searchJs = fs.readdirSync(staticFiles).find(
- f => f.match(/search.*\.js$/));
+ const searchJs = fs.readdirSync(staticFiles).find(f => f.match(/search.*\.js$/));
const searchModule = require(path.join(staticFiles, searchJs));
const searchWords = searchModule.initSearch(searchIndex.searchIndex);
return {
- doSearch: function (queryStr, filterCrate, currentCrate) {
+ doSearch: function(queryStr, filterCrate, currentCrate) {
return searchModule.execQuery(searchModule.parseQuery(queryStr), searchWords,
filterCrate, currentCrate);
},
parseQuery: searchModule.parseQuery,
- }
+ };
}
function showHelp() {
@@ -349,14 +352,14 @@ function showHelp() {
}
function parseOptions(args) {
- var opts = {
+ const opts = {
"crate_name": "",
"resource_suffix": "",
"doc_folder": "",
"test_folder": "",
"test_file": [],
};
- var correspondences = {
+ const correspondences = {
"--resource-suffix": "resource_suffix",
"--doc-folder": "doc_folder",
"--test-folder": "test_folder",
@@ -364,23 +367,25 @@ function parseOptions(args) {
"--crate-name": "crate_name",
};
- for (var i = 0; i < args.length; ++i) {
- if (correspondences.hasOwnProperty(args[i])) {
+ for (let i = 0; i < args.length; ++i) {
+ const arg = args[i];
+ if (Object.prototype.hasOwnProperty.call(correspondences, arg)) {
i += 1;
if (i >= args.length) {
- console.log("Missing argument after `" + args[i - 1] + "` option.");
+ console.log("Missing argument after `" + arg + "` option.");
return null;
}
- if (args[i - 1] !== "--test-file") {
- opts[correspondences[args[i - 1]]] = args[i];
+ const arg_value = args[i];
+ if (arg !== "--test-file") {
+ opts[correspondences[arg]] = arg_value;
} else {
- opts[correspondences[args[i - 1]]].push(args[i]);
+ opts[correspondences[arg]].push(arg_value);
}
- } else if (args[i] === "--help") {
+ } else if (arg === "--help") {
showHelp();
process.exit(0);
} else {
- console.log("Unknown option `" + args[i] + "`.");
+ console.log("Unknown option `" + arg + "`.");
console.log("Use `--help` to see the list of options");
return null;
}
@@ -398,27 +403,28 @@ function parseOptions(args) {
}
function main(argv) {
- var opts = parseOptions(argv.slice(2));
+ const opts = parseOptions(argv.slice(2));
if (opts === null) {
return 1;
}
- let parseAndSearch = loadSearchJS(
+ const parseAndSearch = loadSearchJS(
opts["doc_folder"],
- opts["resource_suffix"]);
- var errors = 0;
+ opts["resource_suffix"]
+ );
+ let errors = 0;
- let doSearch = function (queryStr, filterCrate) {
+ const doSearch = function(queryStr, filterCrate) {
return parseAndSearch.doSearch(queryStr, filterCrate, opts["crate_name"]);
};
if (opts["test_file"].length !== 0) {
- opts["test_file"].forEach(function (file) {
+ opts["test_file"].forEach(file => {
process.stdout.write(`Testing ${file} ... `);
errors += runChecks(file, doSearch, parseAndSearch.parseQuery);
});
} else if (opts["test_folder"].length !== 0) {
- fs.readdirSync(opts["test_folder"]).forEach(function (file) {
+ fs.readdirSync(opts["test_folder"]).forEach(file => {
if (!file.endsWith(".js")) {
return;
}
diff --git a/src/tools/rustfmt/src/attr.rs b/src/tools/rustfmt/src/attr.rs
index 5648e1254..22e45082a 100644
--- a/src/tools/rustfmt/src/attr.rs
+++ b/src/tools/rustfmt/src/attr.rs
@@ -2,7 +2,7 @@
use rustc_ast::ast;
use rustc_ast::HasAttrs;
-use rustc_span::{symbol::sym, Span, Symbol};
+use rustc_span::{symbol::sym, Span};
use self::doc_comment::DocCommentFormatter;
use crate::comment::{contains_comment, rewrite_doc_comment, CommentStyle};
@@ -19,20 +19,6 @@ use crate::utils::{count_newlines, mk_sp};
mod doc_comment;
-pub(crate) fn contains_name(attrs: &[ast::Attribute], name: Symbol) -> bool {
- attrs.iter().any(|attr| attr.has_name(name))
-}
-
-pub(crate) fn first_attr_value_str_by_name(
- attrs: &[ast::Attribute],
- name: Symbol,
-) -> Option<Symbol> {
- attrs
- .iter()
- .find(|attr| attr.has_name(name))
- .and_then(|attr| attr.value_str())
-}
-
/// Returns attributes on the given statement.
pub(crate) fn get_attrs_from_stmt(stmt: &ast::Stmt) -> &[ast::Attribute] {
stmt.attrs()
diff --git a/src/tools/rustfmt/src/closures.rs b/src/tools/rustfmt/src/closures.rs
index 340113866..c95e9a97b 100644
--- a/src/tools/rustfmt/src/closures.rs
+++ b/src/tools/rustfmt/src/closures.rs
@@ -195,7 +195,6 @@ fn rewrite_closure_expr(
| ast::ExprKind::Struct(..) => true,
ast::ExprKind::AddrOf(_, _, ref expr)
- | ast::ExprKind::Box(ref expr)
| ast::ExprKind::Try(ref expr)
| ast::ExprKind::Unary(_, ref expr)
| ast::ExprKind::Cast(ref expr, _) => allow_multi_line(expr),
@@ -441,7 +440,6 @@ fn is_block_closure_forced_inner(expr: &ast::Expr, version: Version) -> bool {
ast::ExprKind::If(..) | ast::ExprKind::While(..) | ast::ExprKind::ForLoop(..) => true,
ast::ExprKind::Loop(..) if version == Version::Two => true,
ast::ExprKind::AddrOf(_, _, ref expr)
- | ast::ExprKind::Box(ref expr)
| ast::ExprKind::Try(ref expr)
| ast::ExprKind::Unary(_, ref expr)
| ast::ExprKind::Cast(ref expr, _) => is_block_closure_forced_inner(expr, version),
diff --git a/src/tools/rustfmt/src/expr.rs b/src/tools/rustfmt/src/expr.rs
index 3f0f217f8..ac96bedf2 100644
--- a/src/tools/rustfmt/src/expr.rs
+++ b/src/tools/rustfmt/src/expr.rs
@@ -236,7 +236,6 @@ pub(crate) fn format_expr(
ast::ExprKind::Yeet(Some(ref expr)) => {
rewrite_unary_prefix(context, "do yeet ", &**expr, shape)
}
- ast::ExprKind::Box(ref expr) => rewrite_unary_prefix(context, "box ", &**expr, shape),
ast::ExprKind::AddrOf(borrow_kind, mutability, ref expr) => {
rewrite_expr_addrof(context, borrow_kind, mutability, expr, shape)
}
@@ -367,7 +366,7 @@ pub(crate) fn format_expr(
))
}
}
- ast::ExprKind::Async(capture_by, _node_id, ref block) => {
+ ast::ExprKind::Async(capture_by, ref block) => {
let mover = if capture_by == ast::CaptureBy::Value {
"move "
} else {
@@ -1299,7 +1298,6 @@ pub(crate) fn is_simple_expr(expr: &ast::Expr) -> bool {
ast::ExprKind::Lit(..) => true,
ast::ExprKind::Path(ref qself, ref path) => qself.is_none() && path.segments.len() <= 1,
ast::ExprKind::AddrOf(_, _, ref expr)
- | ast::ExprKind::Box(ref expr)
| ast::ExprKind::Cast(ref expr, _)
| ast::ExprKind::Field(ref expr, _)
| ast::ExprKind::Try(ref expr)
@@ -1361,7 +1359,6 @@ pub(crate) fn can_be_overflowed_expr(
// Handle unary-like expressions
ast::ExprKind::AddrOf(_, _, ref expr)
- | ast::ExprKind::Box(ref expr)
| ast::ExprKind::Try(ref expr)
| ast::ExprKind::Unary(_, ref expr)
| ast::ExprKind::Cast(ref expr, _) => can_be_overflowed_expr(context, expr, args_len),
@@ -1373,7 +1370,6 @@ pub(crate) fn is_nested_call(expr: &ast::Expr) -> bool {
match expr.kind {
ast::ExprKind::Call(..) | ast::ExprKind::MacCall(..) => true,
ast::ExprKind::AddrOf(_, _, ref expr)
- | ast::ExprKind::Box(ref expr)
| ast::ExprKind::Try(ref expr)
| ast::ExprKind::Unary(_, ref expr)
| ast::ExprKind::Cast(ref expr, _) => is_nested_call(expr),
@@ -2133,7 +2129,6 @@ pub(crate) fn is_method_call(expr: &ast::Expr) -> bool {
match expr.kind {
ast::ExprKind::MethodCall(..) => true,
ast::ExprKind::AddrOf(_, _, ref expr)
- | ast::ExprKind::Box(ref expr)
| ast::ExprKind::Cast(ref expr, _)
| ast::ExprKind::Try(ref expr)
| ast::ExprKind::Unary(_, ref expr) => is_method_call(expr),
diff --git a/src/tools/rustfmt/src/items.rs b/src/tools/rustfmt/src/items.rs
index 25e8a0248..43779cfae 100644
--- a/src/tools/rustfmt/src/items.rs
+++ b/src/tools/rustfmt/src/items.rs
@@ -1804,13 +1804,15 @@ pub(crate) struct StaticParts<'a> {
impl<'a> StaticParts<'a> {
pub(crate) fn from_item(item: &'a ast::Item) -> Self {
- let (defaultness, prefix, ty, mutability, expr) = match item.kind {
- ast::ItemKind::Static(ref ty, mutability, ref expr) => {
- (None, "static", ty, mutability, expr)
- }
- ast::ItemKind::Const(defaultness, ref ty, ref expr) => {
- (Some(defaultness), "const", ty, ast::Mutability::Not, expr)
- }
+ let (defaultness, prefix, ty, mutability, expr) = match &item.kind {
+ ast::ItemKind::Static(s) => (None, "static", &s.ty, s.mutability, &s.expr),
+ ast::ItemKind::Const(c) => (
+ Some(c.defaultness),
+ "const",
+ &c.ty,
+ ast::Mutability::Not,
+ &c.expr,
+ ),
_ => unreachable!(),
};
StaticParts {
@@ -1826,10 +1828,8 @@ impl<'a> StaticParts<'a> {
}
pub(crate) fn from_trait_item(ti: &'a ast::AssocItem) -> Self {
- let (defaultness, ty, expr_opt) = match ti.kind {
- ast::AssocItemKind::Const(defaultness, ref ty, ref expr_opt) => {
- (defaultness, ty, expr_opt)
- }
+ let (defaultness, ty, expr_opt) = match &ti.kind {
+ ast::AssocItemKind::Const(c) => (c.defaultness, &c.ty, &c.expr),
_ => unreachable!(),
};
StaticParts {
@@ -1845,8 +1845,8 @@ impl<'a> StaticParts<'a> {
}
pub(crate) fn from_impl_item(ii: &'a ast::AssocItem) -> Self {
- let (defaultness, ty, expr) = match ii.kind {
- ast::AssocItemKind::Const(defaultness, ref ty, ref expr) => (defaultness, ty, expr),
+ let (defaultness, ty, expr) = match &ii.kind {
+ ast::AssocItemKind::Const(c) => (c.defaultness, &c.ty, &c.expr),
_ => unreachable!(),
};
StaticParts {
diff --git a/src/tools/rustfmt/src/matches.rs b/src/tools/rustfmt/src/matches.rs
index 85d9c5d2b..aac5e59b8 100644
--- a/src/tools/rustfmt/src/matches.rs
+++ b/src/tools/rustfmt/src/matches.rs
@@ -592,7 +592,6 @@ fn can_flatten_block_around_this(body: &ast::Expr) -> bool {
| ast::ExprKind::Struct(..)
| ast::ExprKind::Tup(..) => true,
ast::ExprKind::AddrOf(_, _, ref expr)
- | ast::ExprKind::Box(ref expr)
| ast::ExprKind::Try(ref expr)
| ast::ExprKind::Unary(_, ref expr)
| ast::ExprKind::Index(ref expr, _)
diff --git a/src/tools/rustfmt/src/parse/parser.rs b/src/tools/rustfmt/src/parse/parser.rs
index 7ab042506..6bc53159b 100644
--- a/src/tools/rustfmt/src/parse/parser.rs
+++ b/src/tools/rustfmt/src/parse/parser.rs
@@ -2,13 +2,12 @@ use std::panic::{catch_unwind, AssertUnwindSafe};
use std::path::{Path, PathBuf};
use rustc_ast::token::TokenKind;
-use rustc_ast::{ast, ptr};
+use rustc_ast::{ast, attr, ptr};
use rustc_errors::Diagnostic;
use rustc_parse::{new_parser_from_file, parser::Parser as RawParser};
use rustc_span::{sym, Span};
use thin_vec::ThinVec;
-use crate::attr::first_attr_value_str_by_name;
use crate::parse::session::ParseSess;
use crate::Input;
@@ -93,7 +92,7 @@ pub(crate) enum ParserError {
impl<'a> Parser<'a> {
pub(crate) fn submod_path_from_attr(attrs: &[ast::Attribute], path: &Path) -> Option<PathBuf> {
- let path_sym = first_attr_value_str_by_name(attrs, sym::path)?;
+ let path_sym = attr::first_attr_value_str_by_name(attrs, sym::path)?;
let path_str = path_sym.as_str();
// On windows, the base path might have the form
diff --git a/src/tools/rustfmt/src/reorder.rs b/src/tools/rustfmt/src/reorder.rs
index 9e4a668aa..3bddf4c1b 100644
--- a/src/tools/rustfmt/src/reorder.rs
+++ b/src/tools/rustfmt/src/reorder.rs
@@ -8,7 +8,7 @@
use std::cmp::{Ord, Ordering};
-use rustc_ast::ast;
+use rustc_ast::{ast, attr};
use rustc_span::{symbol::sym, Span};
use crate::config::{Config, GroupImportsTactic};
@@ -167,7 +167,7 @@ fn rewrite_reorderable_or_regroupable_items(
}
fn contains_macro_use_attr(item: &ast::Item) -> bool {
- crate::attr::contains_name(&item.attrs, sym::macro_use)
+ attr::contains_name(&item.attrs, sym::macro_use)
}
/// Divides imports into three groups, corresponding to standard, external
diff --git a/src/tools/rustfmt/src/utils.rs b/src/tools/rustfmt/src/utils.rs
index 1e89f3ae7..a26375ee6 100644
--- a/src/tools/rustfmt/src/utils.rs
+++ b/src/tools/rustfmt/src/utils.rs
@@ -492,7 +492,6 @@ pub(crate) fn is_block_expr(context: &RewriteContext<'_>, expr: &ast::Expr, repr
| ast::ExprKind::Assign(..)
| ast::ExprKind::AssignOp(..)
| ast::ExprKind::Await(..)
- | ast::ExprKind::Box(..)
| ast::ExprKind::Break(..)
| ast::ExprKind::Cast(..)
| ast::ExprKind::Continue(..)
diff --git a/src/tools/rustfmt/tests/source/expr.rs b/src/tools/rustfmt/tests/source/expr.rs
index 21f8a4a43..879c551ea 100644
--- a/src/tools/rustfmt/tests/source/expr.rs
+++ b/src/tools/rustfmt/tests/source/expr.rs
@@ -3,7 +3,6 @@
// Test expressions
fn foo() -> bool {
- let boxed: Box<i32> = box 5;
let referenced = &5 ;
let very_long_variable_name = ( a + first + simple + test );
@@ -132,12 +131,6 @@ fn qux() {
}
}
-fn issue227() {
- {
- let handler = box DocumentProgressHandler::new(addr, DocumentProgressTask::DOMContentLoaded);
- }
-}
-
fn issue184(source: &str) {
for c in source.chars() {
if index < 'a' {
@@ -413,10 +406,6 @@ fn issue2704() {
.concat(&requires1)
.concat(&requires2)
.distinct_total());
- let requires = requires.set(box requires0
- .concat(&requires1)
- .concat(&requires2)
- .distinct_total());
let requires = requires.set(requires0
.concat(&requires1)
.concat(&requires2)
diff --git a/src/tools/rustfmt/tests/target/configs/combine_control_expr/false.rs b/src/tools/rustfmt/tests/target/configs/combine_control_expr/false.rs
index 5ada9b1dd..0ab820249 100644
--- a/src/tools/rustfmt/tests/target/configs/combine_control_expr/false.rs
+++ b/src/tools/rustfmt/tests/target/configs/combine_control_expr/false.rs
@@ -108,12 +108,6 @@ fn main() {
bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
));
- // Box
- foo(box Bar {
- aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
- bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
- });
-
// Unary
foo(!bar(
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
diff --git a/src/tools/rustfmt/tests/target/configs/combine_control_expr/true.rs b/src/tools/rustfmt/tests/target/configs/combine_control_expr/true.rs
index 52acd2649..aa41e021f 100644
--- a/src/tools/rustfmt/tests/target/configs/combine_control_expr/true.rs
+++ b/src/tools/rustfmt/tests/target/configs/combine_control_expr/true.rs
@@ -96,12 +96,6 @@ fn main() {
bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
));
- // Box
- foo(box Bar {
- aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
- bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
- });
-
// Unary
foo(!bar(
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
diff --git a/src/tools/rustfmt/tests/target/expr.rs b/src/tools/rustfmt/tests/target/expr.rs
index 84df802bc..187a1dc97 100644
--- a/src/tools/rustfmt/tests/target/expr.rs
+++ b/src/tools/rustfmt/tests/target/expr.rs
@@ -3,7 +3,6 @@
// Test expressions
fn foo() -> bool {
- let boxed: Box<i32> = box 5;
let referenced = &5;
let very_long_variable_name = (a + first + simple + test);
@@ -179,13 +178,6 @@ fn qux() {
}
}
-fn issue227() {
- {
- let handler =
- box DocumentProgressHandler::new(addr, DocumentProgressTask::DOMContentLoaded);
- }
-}
-
fn issue184(source: &str) {
for c in source.chars() {
if index < 'a' {
@@ -455,12 +447,6 @@ fn issue2704() {
.distinct_total(),
);
let requires = requires.set(
- box requires0
- .concat(&requires1)
- .concat(&requires2)
- .distinct_total(),
- );
- let requires = requires.set(
requires0
.concat(&requires1)
.concat(&requires2)
diff --git a/src/tools/suggest-tests/Cargo.toml b/src/tools/suggest-tests/Cargo.toml
new file mode 100644
index 000000000..f4f4d548b
--- /dev/null
+++ b/src/tools/suggest-tests/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "suggest-tests"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+glob = "0.3.0"
+build_helper = { version = "0.1.0", path = "../build_helper" }
+once_cell = "1.17.1"
diff --git a/src/tools/suggest-tests/src/dynamic_suggestions.rs b/src/tools/suggest-tests/src/dynamic_suggestions.rs
new file mode 100644
index 000000000..2b0213cdc
--- /dev/null
+++ b/src/tools/suggest-tests/src/dynamic_suggestions.rs
@@ -0,0 +1,23 @@
+use std::path::Path;
+
+use crate::Suggestion;
+
+type DynamicSuggestion = fn(&Path) -> Vec<Suggestion>;
+
+pub(crate) const DYNAMIC_SUGGESTIONS: &[DynamicSuggestion] = &[|path: &Path| -> Vec<Suggestion> {
+ if path.starts_with("compiler/") || path.starts_with("library/") {
+ let path = path.components().take(2).collect::<Vec<_>>();
+
+ vec![Suggestion::with_single_path(
+ "test",
+ None,
+ &format!(
+ "{}/{}",
+ path[0].as_os_str().to_str().unwrap(),
+ path[1].as_os_str().to_str().unwrap()
+ ),
+ )]
+ } else {
+ Vec::new()
+ }
+}];
diff --git a/src/tools/suggest-tests/src/lib.rs b/src/tools/suggest-tests/src/lib.rs
new file mode 100644
index 000000000..44cd3c7f6
--- /dev/null
+++ b/src/tools/suggest-tests/src/lib.rs
@@ -0,0 +1,96 @@
+use std::{
+ fmt::{self, Display},
+ path::Path,
+};
+
+use dynamic_suggestions::DYNAMIC_SUGGESTIONS;
+use glob::Pattern;
+use static_suggestions::STATIC_SUGGESTIONS;
+
+mod dynamic_suggestions;
+mod static_suggestions;
+
+#[cfg(test)]
+mod tests;
+
+macro_rules! sug {
+ ($cmd:expr) => {
+ Suggestion::new($cmd, None, &[])
+ };
+
+ ($cmd:expr, $paths:expr) => {
+ Suggestion::new($cmd, None, $paths.as_slice())
+ };
+
+ ($cmd:expr, $stage:expr, $paths:expr) => {
+ Suggestion::new($cmd, Some($stage), $paths.as_slice())
+ };
+}
+
+pub(crate) use sug;
+
+pub fn get_suggestions<T: AsRef<str>>(modified_files: &[T]) -> Vec<Suggestion> {
+ let mut suggestions = Vec::new();
+
+ // static suggestions
+ for sug in STATIC_SUGGESTIONS.iter() {
+ let glob = Pattern::new(&sug.0).expect("Found invalid glob pattern!");
+
+ for file in modified_files {
+ if glob.matches(file.as_ref()) {
+ suggestions.extend_from_slice(&sug.1);
+ }
+ }
+ }
+
+ // dynamic suggestions
+ for sug in DYNAMIC_SUGGESTIONS {
+ for file in modified_files {
+ let sugs = sug(Path::new(file.as_ref()));
+
+ suggestions.extend_from_slice(&sugs);
+ }
+ }
+
+ suggestions.sort();
+ suggestions.dedup();
+
+ suggestions
+}
+
+#[derive(Clone, PartialOrd, Ord, PartialEq, Eq, Debug)]
+pub struct Suggestion {
+ pub cmd: String,
+ pub stage: Option<u32>,
+ pub paths: Vec<String>,
+}
+
+impl Suggestion {
+ pub fn new(cmd: &str, stage: Option<u32>, paths: &[&str]) -> Self {
+ Self { cmd: cmd.to_owned(), stage, paths: paths.iter().map(|p| p.to_string()).collect() }
+ }
+
+ pub fn with_single_path(cmd: &str, stage: Option<u32>, path: &str) -> Self {
+ Self::new(cmd, stage, &[path])
+ }
+}
+
+impl Display for Suggestion {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
+ write!(f, "{} ", self.cmd)?;
+
+ for path in &self.paths {
+ write!(f, "{} ", path)?;
+ }
+
+ if let Some(stage) = self.stage {
+ write!(f, "{}", stage)?;
+ } else {
+ // write a sentinel value here (in place of a stage) to be consumed
+ // by the shim in bootstrap, it will be read and ignored.
+ write!(f, "N/A")?;
+ }
+
+ Ok(())
+ }
+}
diff --git a/src/tools/suggest-tests/src/main.rs b/src/tools/suggest-tests/src/main.rs
new file mode 100644
index 000000000..0b541b60c
--- /dev/null
+++ b/src/tools/suggest-tests/src/main.rs
@@ -0,0 +1,27 @@
+use std::process::ExitCode;
+
+use build_helper::git::get_git_modified_files;
+use suggest_tests::get_suggestions;
+
+fn main() -> ExitCode {
+ let modified_files = get_git_modified_files(None, &Vec::new());
+ let modified_files = match modified_files {
+ Ok(Some(files)) => files,
+ Ok(None) => {
+ eprintln!("git error");
+ return ExitCode::FAILURE;
+ }
+ Err(err) => {
+ eprintln!("Could not get modified files from git: \"{err}\"");
+ return ExitCode::FAILURE;
+ }
+ };
+
+ let suggestions = get_suggestions(&modified_files);
+
+ for sug in &suggestions {
+ println!("{sug}");
+ }
+
+ ExitCode::SUCCESS
+}
diff --git a/src/tools/suggest-tests/src/static_suggestions.rs b/src/tools/suggest-tests/src/static_suggestions.rs
new file mode 100644
index 000000000..d8166ead8
--- /dev/null
+++ b/src/tools/suggest-tests/src/static_suggestions.rs
@@ -0,0 +1,24 @@
+use crate::{sug, Suggestion};
+
+// FIXME: perhaps this could use `std::lazy` when it is stablizied
+macro_rules! static_suggestions {
+ ($( $glob:expr => [ $( $suggestion:expr ),* ] ),*) => {
+ pub(crate) const STATIC_SUGGESTIONS: ::once_cell::unsync::Lazy<Vec<(&'static str, Vec<Suggestion>)>>
+ = ::once_cell::unsync::Lazy::new(|| vec![ $( ($glob, vec![ $($suggestion),* ]) ),*]);
+ }
+}
+
+static_suggestions! {
+ "*.md" => [
+ sug!("test", 0, ["linkchecker"])
+ ],
+
+ "compiler/*" => [
+ sug!("check"),
+ sug!("test", 1, ["src/test/ui", "src/test/run-make"])
+ ],
+
+ "src/librustdoc/*" => [
+ sug!("test", 1, ["rustdoc"])
+ ]
+}
diff --git a/src/tools/suggest-tests/src/tests.rs b/src/tools/suggest-tests/src/tests.rs
new file mode 100644
index 000000000..5bc1a7df7
--- /dev/null
+++ b/src/tools/suggest-tests/src/tests.rs
@@ -0,0 +1,21 @@
+macro_rules! sugg_test {
+ ( $( $name:ident: $paths:expr => $suggestions:expr ),* ) => {
+ $(
+ #[test]
+ fn $name() {
+ let suggestions = crate::get_suggestions(&$paths).into_iter().map(|s| s.to_string()).collect::<Vec<_>>();
+ assert_eq!(suggestions, $suggestions);
+ }
+ )*
+ };
+}
+
+sugg_test! {
+ test_error_code_docs: ["compiler/rustc_error_codes/src/error_codes/E0000.md"] =>
+ ["check N/A", "test compiler/rustc_error_codes N/A", "test linkchecker 0", "test src/test/ui src/test/run-make 1"],
+
+ test_rustdoc: ["src/librustdoc/src/lib.rs"] => ["test rustdoc 1"],
+
+ test_rustdoc_and_libstd: ["src/librustdoc/src/lib.rs", "library/std/src/lib.rs"] =>
+ ["test library/std N/A", "test rustdoc 1"]
+}
diff --git a/src/tools/tidy/Cargo.toml b/src/tools/tidy/Cargo.toml
index cdf1dd366..8c6b1eb22 100644
--- a/src/tools/tidy/Cargo.toml
+++ b/src/tools/tidy/Cargo.toml
@@ -5,7 +5,7 @@ edition = "2021"
autobins = false
[dependencies]
-cargo_metadata = "0.14"
+cargo_metadata = "0.15"
cargo-platform = "0.1.2"
regex = "1"
miropt-test-tools = { path = "../miropt-test-tools" }
diff --git a/src/tools/tidy/src/alphabetical.rs b/src/tools/tidy/src/alphabetical.rs
index f913f6cde..fdc411c89 100644
--- a/src/tools/tidy/src/alphabetical.rs
+++ b/src/tools/tidy/src/alphabetical.rs
@@ -95,7 +95,7 @@ fn check_section<'a>(
}
pub fn check(path: &Path, bad: &mut bool) {
- walk(path, &mut filter_dirs, &mut |entry, contents| {
+ walk(path, |path, _is_dir| filter_dirs(path), &mut |entry, contents| {
let file = &entry.path().display();
let mut lines = contents.lines().enumerate().peekable();
diff --git a/src/tools/tidy/src/bins.rs b/src/tools/tidy/src/bins.rs
index b898f20a5..197e9a996 100644
--- a/src/tools/tidy/src/bins.rs
+++ b/src/tools/tidy/src/bins.rs
@@ -57,8 +57,8 @@ mod os_impl {
match fs::File::create(&path) {
Ok(file) => {
let exec = is_executable(&path).unwrap_or(false);
- std::mem::drop(file);
- std::fs::remove_file(&path).expect("Deleted temp file");
+ drop(file);
+ fs::remove_file(&path).expect("Deleted temp file");
// If the file is executable, then we assume that this
// filesystem does not track executability, so skip this check.
return if exec { Unsupported } else { Supported };
@@ -101,23 +101,11 @@ mod os_impl {
const ALLOWED: &[&str] = &["configure", "x"];
+ // FIXME: we don't need to look at all binaries, only files that have been modified in this branch
+ // (e.g. using `git ls-files`).
walk_no_read(
- path,
- &mut |path| {
- filter_dirs(path)
- || path.ends_with("src/etc")
- // This is a list of directories that we almost certainly
- // don't need to walk. A future PR will likely want to
- // remove these in favor of crate::walk_no_read using git
- // ls-files to discover the paths we should check, which
- // would naturally ignore all of these directories. It's
- // also likely faster than walking the directory tree
- // directly (since git is just reading from a couple files
- // to produce the results).
- || path.ends_with("target")
- || path.ends_with("build")
- || path.ends_with(".git")
- },
+ &[path],
+ |path, _is_dir| filter_dirs(path) || path.ends_with("src/etc"),
&mut |entry| {
let file = entry.path();
let extension = file.extension();
diff --git a/src/tools/tidy/src/debug_artifacts.rs b/src/tools/tidy/src/debug_artifacts.rs
index 0dd9c1e16..582014d50 100644
--- a/src/tools/tidy/src/debug_artifacts.rs
+++ b/src/tools/tidy/src/debug_artifacts.rs
@@ -1,22 +1,26 @@
//! Tidy check to prevent creation of unnecessary debug artifacts while running tests.
-use crate::walk::{filter_dirs, walk};
+use crate::walk::{filter_dirs, filter_not_rust, walk};
use std::path::Path;
const GRAPHVIZ_POSTFLOW_MSG: &str = "`borrowck_graphviz_postflow` attribute in test";
pub fn check(test_dir: &Path, bad: &mut bool) {
- walk(test_dir, &mut filter_dirs, &mut |entry, contents| {
- let filename = entry.path();
- let is_rust = filename.extension().map_or(false, |ext| ext == "rs");
- if !is_rust {
- return;
- }
-
- for (i, line) in contents.lines().enumerate() {
- if line.contains("borrowck_graphviz_postflow") {
- tidy_error!(bad, "{}:{}: {}", filename.display(), i + 1, GRAPHVIZ_POSTFLOW_MSG);
+ walk(
+ test_dir,
+ |path, _is_dir| filter_dirs(path) || filter_not_rust(path),
+ &mut |entry, contents| {
+ for (i, line) in contents.lines().enumerate() {
+ if line.contains("borrowck_graphviz_postflow") {
+ tidy_error!(
+ bad,
+ "{}:{}: {}",
+ entry.path().display(),
+ i + 1,
+ GRAPHVIZ_POSTFLOW_MSG
+ );
+ }
}
- }
- });
+ },
+ );
}
diff --git a/src/tools/tidy/src/deps.rs b/src/tools/tidy/src/deps.rs
index bddfdcfaf..a9eb6c8d0 100644
--- a/src/tools/tidy/src/deps.rs
+++ b/src/tools/tidy/src/deps.rs
@@ -42,13 +42,16 @@ const EXCEPTIONS: &[(&str, &str)] = &[
("bitmaps", "MPL-2.0+"), // cargo via im-rc
("fiat-crypto", "MIT OR Apache-2.0 OR BSD-1-Clause"), // cargo via pasetors
("subtle", "BSD-3-Clause"), // cargo via pasetors
+ ("dunce", "CC0-1.0 OR MIT-0"), // cargo via gix (and dev dependency)
+ ("imara-diff", "Apache-2.0"), // cargo via gix
+ ("sha1_smol", "BSD-3-Clause"), // cargo via gix
+ ("unicode-bom", "Apache-2.0"), // cargo via gix
("instant", "BSD-3-Clause"), // rustc_driver/tracing-subscriber/parking_lot
("snap", "BSD-3-Clause"), // rustc
("fluent-langneg", "Apache-2.0"), // rustc (fluent translations)
("self_cell", "Apache-2.0"), // rustc (fluent translations)
// FIXME: this dependency violates the documentation comment above:
("fortanix-sgx-abi", "MPL-2.0"), // libstd but only for `sgx` target
- ("dunce", "CC0-1.0"), // cargo (dev dependency)
("similar", "Apache-2.0"), // cargo (dev dependency)
("normalize-line-endings", "Apache-2.0"), // cargo (dev dependency)
("dissimilar", "Apache-2.0"), // rustdoc, rustc_lexer (few tests) via expect-test, (dev deps)
@@ -257,6 +260,7 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"valuable",
"version_check",
"wasi",
+ "windows",
"winapi",
"winapi-i686-pc-windows-gnu",
"winapi-util",
diff --git a/src/tools/tidy/src/edition.rs b/src/tools/tidy/src/edition.rs
index 8172e3d29..f28f677e0 100644
--- a/src/tools/tidy/src/edition.rs
+++ b/src/tools/tidy/src/edition.rs
@@ -9,27 +9,20 @@ fn is_edition_2021(mut line: &str) -> bool {
}
pub fn check(path: &Path, bad: &mut bool) {
- walk(
- path,
- &mut |path| {
- filter_dirs(path)
- || (path.ends_with("tests") && path.join("COMPILER_TESTS.md").exists())
- },
- &mut |entry, contents| {
- let file = entry.path();
- let filename = file.file_name().unwrap();
- if filename != "Cargo.toml" {
- return;
- }
+ walk(path, |path, _is_dir| filter_dirs(path), &mut |entry, contents| {
+ let file = entry.path();
+ let filename = file.file_name().unwrap();
+ if filename != "Cargo.toml" {
+ return;
+ }
- let is_2021 = contents.lines().any(is_edition_2021);
- if !is_2021 {
- tidy_error!(
- bad,
- "{} doesn't have `edition = \"2021\"` on a separate line",
- file.display()
- );
- }
- },
- );
+ let is_2021 = contents.lines().any(is_edition_2021);
+ if !is_2021 {
+ tidy_error!(
+ bad,
+ "{} doesn't have `edition = \"2021\"` on a separate line",
+ file.display()
+ );
+ }
+ });
}
diff --git a/src/tools/tidy/src/error_codes.rs b/src/tools/tidy/src/error_codes.rs
index c60caa0d4..417ace58c 100644
--- a/src/tools/tidy/src/error_codes.rs
+++ b/src/tools/tidy/src/error_codes.rs
@@ -46,8 +46,10 @@ pub fn check(root_path: &Path, search_paths: &[&Path], verbose: bool, bad: &mut
// Stage 1: create list
let error_codes = extract_error_codes(root_path, &mut errors);
- println!("Found {} error codes", error_codes.len());
- println!("Highest error code: `{}`", error_codes.iter().max().unwrap());
+ if verbose {
+ println!("Found {} error codes", error_codes.len());
+ println!("Highest error code: `{}`", error_codes.iter().max().unwrap());
+ }
// Stage 2: check list has docs
let no_longer_emitted = check_error_codes_docs(root_path, &error_codes, &mut errors, verbose);
@@ -127,7 +129,7 @@ fn check_error_codes_docs(
let mut no_longer_emitted_codes = Vec::new();
- walk(&docs_path, &mut |_| false, &mut |entry, contents| {
+ walk(&docs_path, |_, _| false, &mut |entry, contents| {
let path = entry.path();
// Error if the file isn't markdown.
@@ -319,7 +321,7 @@ fn check_error_codes_used(
let mut found_codes = Vec::new();
- walk_many(search_paths, &mut filter_dirs, &mut |entry, contents| {
+ walk_many(search_paths, |path, _is_dir| filter_dirs(path), &mut |entry, contents| {
let path = entry.path();
// Return early if we aren't looking at a source file.
diff --git a/src/tools/tidy/src/features.rs b/src/tools/tidy/src/features.rs
index af92e6eb8..2fd4c797b 100644
--- a/src/tools/tidy/src/features.rs
+++ b/src/tools/tidy/src/features.rs
@@ -9,8 +9,9 @@
//! * All unstable lang features have tests to ensure they are actually unstable.
//! * Language features in a group are sorted by feature name.
-use crate::walk::{filter_dirs, walk, walk_many};
+use crate::walk::{filter_dirs, filter_not_rust, walk, walk_many};
use std::collections::hash_map::{Entry, HashMap};
+use std::ffi::OsStr;
use std::fmt;
use std::fs;
use std::num::NonZeroU32;
@@ -101,17 +102,15 @@ pub fn check(
&tests_path.join("rustdoc-ui"),
&tests_path.join("rustdoc"),
],
- &mut filter_dirs,
+ |path, _is_dir| {
+ filter_dirs(path)
+ || filter_not_rust(path)
+ || path.file_name() == Some(OsStr::new("features.rs"))
+ || path.file_name() == Some(OsStr::new("diagnostic_list.rs"))
+ },
&mut |entry, contents| {
let file = entry.path();
let filename = file.file_name().unwrap().to_string_lossy();
- if !filename.ends_with(".rs")
- || filename == "features.rs"
- || filename == "diagnostic_list.rs"
- {
- return;
- }
-
let filen_underscore = filename.replace('-', "_").replace(".rs", "");
let filename_is_gate_test = test_filen_gate(&filen_underscore, &mut features);
@@ -219,8 +218,6 @@ pub fn check(
for line in lines {
println!("* {line}");
}
- } else {
- println!("* {} features", features.len());
}
CollectedFeatures { lib: lib_features, lang: features }
@@ -477,11 +474,11 @@ fn get_and_check_lib_features(
fn map_lib_features(
base_src_path: &Path,
- mf: &mut dyn FnMut(Result<(&str, Feature), &str>, &Path, usize),
+ mf: &mut (dyn Send + Sync + FnMut(Result<(&str, Feature), &str>, &Path, usize)),
) {
walk(
base_src_path,
- &mut |path| filter_dirs(path) || path.ends_with("tests"),
+ |path, _is_dir| filter_dirs(path) || path.ends_with("tests"),
&mut |entry, contents| {
let file = entry.path();
let filename = file.file_name().unwrap().to_string_lossy();
diff --git a/src/tools/tidy/src/main.rs b/src/tools/tidy/src/main.rs
index 505f9d724..f59406c40 100644
--- a/src/tools/tidy/src/main.rs
+++ b/src/tools/tidy/src/main.rs
@@ -13,7 +13,7 @@ use std::path::PathBuf;
use std::process;
use std::str::FromStr;
use std::sync::atomic::{AtomicBool, Ordering};
-use std::thread::{scope, ScopedJoinHandle};
+use std::thread::{self, scope, ScopedJoinHandle};
fn main() {
let root_path: PathBuf = env::args_os().nth(1).expect("need path to root of repo").into();
@@ -55,16 +55,28 @@ fn main() {
VecDeque::with_capacity(concurrency.get());
macro_rules! check {
- ($p:ident $(, $args:expr)* ) => {
+ ($p:ident) => {
+ check!(@ $p, name=format!("{}", stringify!($p)));
+ };
+ ($p:ident, $path:expr $(, $args:expr)* ) => {
+ let shortened = $path.strip_prefix(&root_path).unwrap();
+ let name = if shortened == std::path::Path::new("") {
+ format!("{} (.)", stringify!($p))
+ } else {
+ format!("{} ({})", stringify!($p), shortened.display())
+ };
+ check!(@ $p, name=name, $path $(,$args)*);
+ };
+ (@ $p:ident, name=$name:expr $(, $args:expr)* ) => {
drain_handles(&mut handles);
- let handle = s.spawn(|| {
+ let handle = thread::Builder::new().name($name).spawn_scoped(s, || {
let mut flag = false;
$p::check($($args, )* &mut flag);
if (flag) {
bad.store(true, Ordering::Relaxed);
}
- });
+ }).unwrap();
handles.push_back(handle);
}
}
@@ -91,7 +103,6 @@ fn main() {
// Checks that need to be done for both the compiler and std libraries.
check!(unit_tests, &src_path);
- check!(unit_tests, &tests_path);
check!(unit_tests, &compiler_path);
check!(unit_tests, &library_path);
@@ -107,10 +118,8 @@ fn main() {
check!(edition, &src_path);
check!(edition, &compiler_path);
check!(edition, &library_path);
- check!(edition, &tests_path);
check!(alphabetical, &src_path);
- check!(alphabetical, &tests_path);
check!(alphabetical, &compiler_path);
check!(alphabetical, &library_path);
diff --git a/src/tools/tidy/src/mir_opt_tests.rs b/src/tools/tidy/src/mir_opt_tests.rs
index 2a9dcac2e..2f6918510 100644
--- a/src/tools/tidy/src/mir_opt_tests.rs
+++ b/src/tools/tidy/src/mir_opt_tests.rs
@@ -3,19 +3,24 @@
use std::collections::HashSet;
use std::path::{Path, PathBuf};
+use crate::walk::walk_no_read;
+
fn check_unused_files(path: &Path, bless: bool, bad: &mut bool) {
let mut rs_files = Vec::<PathBuf>::new();
let mut output_files = HashSet::<PathBuf>::new();
- let files = walkdir::WalkDir::new(&path.join("mir-opt")).into_iter();
- for file in files.filter_map(Result::ok).filter(|e| e.file_type().is_file()) {
- let filepath = file.path();
- if filepath.extension() == Some("rs".as_ref()) {
- rs_files.push(filepath.to_owned());
- } else {
- output_files.insert(filepath.to_owned());
- }
- }
+ walk_no_read(
+ &[&path.join("mir-opt")],
+ |path, _is_dir| path.file_name() == Some("README.md".as_ref()),
+ &mut |file| {
+ let filepath = file.path();
+ if filepath.extension() == Some("rs".as_ref()) {
+ rs_files.push(filepath.to_owned());
+ } else {
+ output_files.insert(filepath.to_owned());
+ }
+ },
+ );
for file in rs_files {
for bw in [32, 64] {
@@ -26,16 +31,14 @@ fn check_unused_files(path: &Path, bless: bool, bad: &mut bool) {
}
for extra in output_files {
- if extra.file_name() != Some("README.md".as_ref()) {
- if !bless {
- tidy_error!(
- bad,
- "the following output file is not associated with any mir-opt test, you can remove it: {}",
- extra.display()
- );
- } else {
- let _ = std::fs::remove_file(extra);
- }
+ if !bless {
+ tidy_error!(
+ bad,
+ "the following output file is not associated with any mir-opt test, you can remove it: {}",
+ extra.display()
+ );
+ } else {
+ let _ = std::fs::remove_file(extra);
}
}
}
diff --git a/src/tools/tidy/src/pal.rs b/src/tools/tidy/src/pal.rs
index f4592fdcf..6fd41e833 100644
--- a/src/tools/tidy/src/pal.rs
+++ b/src/tools/tidy/src/pal.rs
@@ -31,7 +31,6 @@
//! this in the long term.
use crate::walk::{filter_dirs, walk};
-use std::iter::Iterator;
use std::path::Path;
// Paths that may contain platform-specific code.
@@ -59,7 +58,6 @@ const EXCEPTION_PATHS: &[&str] = &[
"library/std/src/path.rs",
"library/std/src/sys_common", // Should only contain abstractions over platforms
"library/std/src/net/test.rs", // Utility helpers for tests
- "library/std/src/panic.rs", // fuchsia-specific panic backtrace handling
"library/std/src/personality.rs",
"library/std/src/personality/",
];
@@ -68,7 +66,7 @@ pub fn check(path: &Path, bad: &mut bool) {
// Sanity check that the complex parsing here works.
let mut saw_target_arch = false;
let mut saw_cfg_bang = false;
- walk(path, &mut filter_dirs, &mut |entry, contents| {
+ walk(path, |path, _is_dir| filter_dirs(path), &mut |entry, contents| {
let file = entry.path();
let filestr = file.to_string_lossy().replace("\\", "/");
if !filestr.ends_with(".rs") {
@@ -128,6 +126,7 @@ fn check_cfgs(
|| cfg.contains("target_env")
|| cfg.contains("target_abi")
|| cfg.contains("target_vendor")
+ || cfg.contains("target_family")
|| cfg.contains("unix")
|| cfg.contains("windows");
diff --git a/src/tools/tidy/src/rustdoc_gui_tests.rs b/src/tools/tidy/src/rustdoc_gui_tests.rs
index feb513df3..91776bc98 100644
--- a/src/tools/tidy/src/rustdoc_gui_tests.rs
+++ b/src/tools/tidy/src/rustdoc_gui_tests.rs
@@ -5,10 +5,7 @@ use std::path::Path;
pub fn check(path: &Path, bad: &mut bool) {
crate::walk::walk(
&path.join("rustdoc-gui"),
- &mut |p| {
- // If there is no extension, it's very likely a folder and we want to go into it.
- p.extension().map(|e| e != "goml").unwrap_or(false)
- },
+ |p, is_dir| !is_dir && p.extension().map_or(true, |e| e != "goml"),
&mut |entry, content| {
for line in content.lines() {
if !line.starts_with("// ") {
diff --git a/src/tools/tidy/src/style.rs b/src/tools/tidy/src/style.rs
index 6a0855405..a2012db90 100644
--- a/src/tools/tidy/src/style.rs
+++ b/src/tools/tidy/src/style.rs
@@ -19,7 +19,7 @@
use crate::walk::{filter_dirs, walk};
use regex::{Regex, RegexSet};
-use std::path::Path;
+use std::{ffi::OsStr, path::Path};
/// Error code markdown is restricted to 80 columns because they can be
/// displayed on the console with --example.
@@ -171,9 +171,9 @@ fn contains_ignore_directive(can_contain: bool, contents: &str, check: &str) ->
}
macro_rules! suppressible_tidy_err {
- ($err:ident, $skip:ident, $msg:expr) => {
+ ($err:ident, $skip:ident, $msg:literal) => {
if let Directive::Deny = $skip {
- $err($msg);
+ $err(&format!($msg));
} else {
$skip = Directive::Ignore(true);
}
@@ -227,22 +227,41 @@ fn is_unexplained_ignore(extension: &str, line: &str) -> bool {
}
pub fn check(path: &Path, bad: &mut bool) {
- fn skip(path: &Path) -> bool {
- filter_dirs(path) || skip_markdown_path(path)
+ fn skip(path: &Path, is_dir: bool) -> bool {
+ if path.file_name().map_or(false, |name| name.to_string_lossy().starts_with(".#")) {
+ // vim or emacs temporary file
+ return true;
+ }
+
+ if filter_dirs(path) || skip_markdown_path(path) {
+ return true;
+ }
+
+ // Don't check extensions for directories
+ if is_dir {
+ return false;
+ }
+
+ let extensions = ["rs", "py", "js", "sh", "c", "cpp", "h", "md", "css", "ftl", "goml"];
+
+ // NB: don't skip paths without extensions (or else we'll skip all directories and will only check top level files)
+ if path.extension().map_or(true, |ext| !extensions.iter().any(|e| ext == OsStr::new(e))) {
+ return true;
+ }
+
+ // We only check CSS files in rustdoc.
+ path.extension().map_or(false, |e| e == "css") && !is_in(path, "src", "librustdoc")
}
+
let problematic_consts_strings: Vec<String> = (PROBLEMATIC_CONSTS.iter().map(u32::to_string))
.chain(PROBLEMATIC_CONSTS.iter().map(|v| format!("{:x}", v)))
.chain(PROBLEMATIC_CONSTS.iter().map(|v| format!("{:X}", v)))
.collect();
let problematic_regex = RegexSet::new(problematic_consts_strings.as_slice()).unwrap();
- walk(path, &mut skip, &mut |entry, contents| {
+
+ walk(path, skip, &mut |entry, contents| {
let file = entry.path();
let filename = file.file_name().unwrap().to_string_lossy();
- let extensions =
- [".rs", ".py", ".js", ".sh", ".c", ".cpp", ".h", ".md", ".css", ".ftl", ".goml"];
- if extensions.iter().all(|e| !filename.ends_with(e)) || filename.starts_with(".#") {
- return;
- }
let is_style_file = filename.ends_with(".css");
let under_rustfmt = filename.ends_with(".rs") &&
@@ -253,11 +272,6 @@ pub fn check(path: &Path, bad: &mut bool) {
a.ends_with("src/doc/book")
});
- if is_style_file && !is_in(file, "src", "librustdoc") {
- // We only check CSS files in rustdoc.
- return;
- }
-
if contents.is_empty() {
tidy_error!(bad, "{}: empty file", file.display());
}
@@ -300,10 +314,13 @@ pub fn check(path: &Path, bad: &mut bool) {
contains_ignore_directive(can_contain, &contents, "leading-newlines");
let mut skip_copyright = contains_ignore_directive(can_contain, &contents, "copyright");
let mut skip_dbg = contains_ignore_directive(can_contain, &contents, "dbg");
+ let mut skip_odd_backticks =
+ contains_ignore_directive(can_contain, &contents, "odd-backticks");
let mut leading_new_lines = false;
let mut trailing_new_lines = 0;
let mut lines = 0;
let mut last_safety_comment = false;
+ let mut comment_block: Option<(usize, usize)> = None;
let is_test = file.components().any(|c| c.as_os_str() == "tests");
// scanning the whole file for multiple needles at once is more efficient than
// executing lines times needles separate searches.
@@ -351,7 +368,7 @@ pub fn check(path: &Path, bad: &mut bool) {
suppressible_tidy_err!(
err,
skip_line_length,
- &format!("line longer than {max_columns} chars")
+ "line longer than {max_columns} chars"
);
}
if !is_style_file && line.contains('\t') {
@@ -415,15 +432,55 @@ pub fn check(path: &Path, bad: &mut bool) {
// For now only enforce in compiler
let is_compiler = || file.components().any(|c| c.as_os_str() == "compiler");
- if is_compiler()
- && line.contains("//")
- && line
- .chars()
- .collect::<Vec<_>>()
- .windows(4)
- .any(|cs| matches!(cs, ['.', ' ', ' ', last] if last.is_alphabetic()))
- {
- err(DOUBLE_SPACE_AFTER_DOT)
+
+ if is_compiler() {
+ if line.contains("//")
+ && line
+ .chars()
+ .collect::<Vec<_>>()
+ .windows(4)
+ .any(|cs| matches!(cs, ['.', ' ', ' ', last] if last.is_alphabetic()))
+ {
+ err(DOUBLE_SPACE_AFTER_DOT)
+ }
+
+ if filename.ends_with(".ftl") {
+ let line_backticks = trimmed.chars().filter(|ch| *ch == '`').count();
+ if line_backticks % 2 == 1 {
+ suppressible_tidy_err!(err, skip_odd_backticks, "odd number of backticks");
+ }
+ } else if trimmed.contains("//") {
+ let (start_line, mut backtick_count) = comment_block.unwrap_or((i + 1, 0));
+ let line_backticks = trimmed.chars().filter(|ch| *ch == '`').count();
+ let comment_text = trimmed.split("//").nth(1).unwrap();
+ // This check ensures that we don't lint for code that has `//` in a string literal
+ if line_backticks % 2 == 1 {
+ backtick_count += comment_text.chars().filter(|ch| *ch == '`').count();
+ }
+ comment_block = Some((start_line, backtick_count));
+ } else {
+ if let Some((start_line, backtick_count)) = comment_block.take() {
+ if backtick_count % 2 == 1 {
+ let mut err = |msg: &str| {
+ tidy_error!(bad, "{}:{start_line}: {msg}", file.display());
+ };
+ let block_len = (i + 1) - start_line;
+ if block_len == 1 {
+ suppressible_tidy_err!(
+ err,
+ skip_odd_backticks,
+ "comment with odd number of backticks"
+ );
+ } else {
+ suppressible_tidy_err!(
+ err,
+ skip_odd_backticks,
+ "{block_len}-line comment block with odd number of backticks"
+ );
+ }
+ }
+ }
+ }
}
}
if leading_new_lines {
@@ -441,7 +498,7 @@ pub fn check(path: &Path, bad: &mut bool) {
n => suppressible_tidy_err!(
err,
skip_trailing_newlines,
- &format!("too many trailing newlines ({n})")
+ "too many trailing newlines ({n})"
),
};
if lines > LINES {
diff --git a/src/tools/tidy/src/target_specific_tests.rs b/src/tools/tidy/src/target_specific_tests.rs
index d7a157672..de022be28 100644
--- a/src/tools/tidy/src/target_specific_tests.rs
+++ b/src/tools/tidy/src/target_specific_tests.rs
@@ -4,6 +4,8 @@
use std::collections::BTreeMap;
use std::path::Path;
+use crate::walk::filter_not_rust;
+
const COMMENT: &str = "//";
const LLVM_COMPONENTS_HEADER: &str = "needs-llvm-components:";
const COMPILE_FLAGS_HEADER: &str = "compile-flags:";
@@ -35,61 +37,57 @@ struct RevisionInfo<'a> {
}
pub fn check(path: &Path, bad: &mut bool) {
- crate::walk::walk(
- path,
- &mut |path| path.extension().map(|p| p == "rs") == Some(false),
- &mut |entry, content| {
- let file = entry.path().display();
- let mut header_map = BTreeMap::new();
- iter_header(content, &mut |cfg, directive| {
- if let Some(value) = directive.strip_prefix(LLVM_COMPONENTS_HEADER) {
- let info = header_map.entry(cfg).or_insert(RevisionInfo::default());
- let comp_vec = info.llvm_components.get_or_insert(Vec::new());
- for component in value.split(' ') {
- let component = component.trim();
- if !component.is_empty() {
- comp_vec.push(component);
- }
- }
- } else if directive.starts_with(COMPILE_FLAGS_HEADER) {
- let compile_flags = &directive[COMPILE_FLAGS_HEADER.len()..];
- if let Some((_, v)) = compile_flags.split_once("--target") {
- if let Some((arch, _)) =
- v.trim_start_matches(|c| c == ' ' || c == '=').split_once("-")
- {
- let info = header_map.entry(cfg).or_insert(RevisionInfo::default());
- info.target_arch.replace(arch);
- } else {
- eprintln!("{file}: seems to have a malformed --target value");
- *bad = true;
- }
+ crate::walk::walk(path, |path, _is_dir| filter_not_rust(path), &mut |entry, content| {
+ let file = entry.path().display();
+ let mut header_map = BTreeMap::new();
+ iter_header(content, &mut |cfg, directive| {
+ if let Some(value) = directive.strip_prefix(LLVM_COMPONENTS_HEADER) {
+ let info = header_map.entry(cfg).or_insert(RevisionInfo::default());
+ let comp_vec = info.llvm_components.get_or_insert(Vec::new());
+ for component in value.split(' ') {
+ let component = component.trim();
+ if !component.is_empty() {
+ comp_vec.push(component);
}
}
- });
- for (rev, RevisionInfo { target_arch, llvm_components }) in &header_map {
- let rev = rev.unwrap_or("[unspecified]");
- match (target_arch, llvm_components) {
- (None, None) => {}
- (Some(_), None) => {
- eprintln!(
- "{}: revision {} should specify `{}` as it has `--target` set",
- file, rev, LLVM_COMPONENTS_HEADER
- );
+ } else if directive.starts_with(COMPILE_FLAGS_HEADER) {
+ let compile_flags = &directive[COMPILE_FLAGS_HEADER.len()..];
+ if let Some((_, v)) = compile_flags.split_once("--target") {
+ if let Some((arch, _)) =
+ v.trim_start_matches(|c| c == ' ' || c == '=').split_once("-")
+ {
+ let info = header_map.entry(cfg).or_insert(RevisionInfo::default());
+ info.target_arch.replace(arch);
+ } else {
+ eprintln!("{file}: seems to have a malformed --target value");
*bad = true;
}
- (None, Some(_)) => {
- eprintln!(
- "{}: revision {} should not specify `{}` as it doesn't need `--target`",
- file, rev, LLVM_COMPONENTS_HEADER
- );
- *bad = true;
- }
- (Some(_), Some(_)) => {
- // FIXME: check specified components against the target architectures we
- // gathered.
- }
}
}
- },
- );
+ });
+ for (rev, RevisionInfo { target_arch, llvm_components }) in &header_map {
+ let rev = rev.unwrap_or("[unspecified]");
+ match (target_arch, llvm_components) {
+ (None, None) => {}
+ (Some(_), None) => {
+ eprintln!(
+ "{}: revision {} should specify `{}` as it has `--target` set",
+ file, rev, LLVM_COMPONENTS_HEADER
+ );
+ *bad = true;
+ }
+ (None, Some(_)) => {
+ eprintln!(
+ "{}: revision {} should not specify `{}` as it doesn't need `--target`",
+ file, rev, LLVM_COMPONENTS_HEADER
+ );
+ *bad = true;
+ }
+ (Some(_), Some(_)) => {
+ // FIXME: check specified components against the target architectures we
+ // gathered.
+ }
+ }
+ }
+ });
}
diff --git a/src/tools/tidy/src/ui_tests.rs b/src/tools/tidy/src/ui_tests.rs
index 409f75631..29664c854 100644
--- a/src/tools/tidy/src/ui_tests.rs
+++ b/src/tools/tidy/src/ui_tests.rs
@@ -3,87 +3,99 @@
//! - there are no stray `.stderr` files
use ignore::Walk;
-use ignore::WalkBuilder;
+use std::collections::HashMap;
use std::fs;
-use std::path::Path;
+use std::path::{Path, PathBuf};
-const ENTRY_LIMIT: usize = 1000;
// FIXME: The following limits should be reduced eventually.
-const ROOT_ENTRY_LIMIT: usize = 940;
-const ISSUES_ENTRY_LIMIT: usize = 1978;
+const ENTRY_LIMIT: usize = 885;
+const ROOT_ENTRY_LIMIT: usize = 891;
+const ISSUES_ENTRY_LIMIT: usize = 1977;
-fn check_entries(path: &Path, bad: &mut bool) {
- for dir in Walk::new(&path.join("ui")) {
- if let Ok(entry) = dir {
- if entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false) {
- let dir_path = entry.path();
- // Use special values for these dirs.
- let is_root = path.join("ui") == dir_path;
- let is_issues_dir = path.join("ui/issues") == dir_path;
- let limit = if is_root {
- ROOT_ENTRY_LIMIT
- } else if is_issues_dir {
- ISSUES_ENTRY_LIMIT
- } else {
- ENTRY_LIMIT
- };
+fn check_entries(tests_path: &Path, bad: &mut bool) {
+ let mut directories: HashMap<PathBuf, usize> = HashMap::new();
- let count = WalkBuilder::new(&dir_path)
- .max_depth(Some(1))
- .build()
- .into_iter()
- .collect::<Vec<_>>()
- .len()
- - 1; // remove the dir itself
+ for dir in Walk::new(&tests_path.join("ui")) {
+ if let Ok(entry) = dir {
+ let parent = entry.path().parent().unwrap().to_path_buf();
+ *directories.entry(parent).or_default() += 1;
+ }
+ }
- if count > limit {
- tidy_error!(
- bad,
- "following path contains more than {} entries, \
- you should move the test to some relevant subdirectory (current: {}): {}",
- limit,
- count,
- dir_path.display()
- );
- }
- }
+ let (mut max, mut max_root, mut max_issues) = (0usize, 0usize, 0usize);
+ for (dir_path, count) in directories {
+ // Use special values for these dirs.
+ let is_root = tests_path.join("ui") == dir_path;
+ let is_issues_dir = tests_path.join("ui/issues") == dir_path;
+ let (limit, maxcnt) = if is_root {
+ (ROOT_ENTRY_LIMIT, &mut max_root)
+ } else if is_issues_dir {
+ (ISSUES_ENTRY_LIMIT, &mut max_issues)
+ } else {
+ (ENTRY_LIMIT, &mut max)
+ };
+ *maxcnt = (*maxcnt).max(count);
+ if count > limit {
+ tidy_error!(
+ bad,
+ "following path contains more than {} entries, \
+ you should move the test to some relevant subdirectory (current: {}): {}",
+ limit,
+ count,
+ dir_path.display()
+ );
}
}
+ if ENTRY_LIMIT > max {
+ tidy_error!(bad, "`ENTRY_LIMIT` is too high (is {ENTRY_LIMIT}, should be {max})");
+ }
+ if ROOT_ENTRY_LIMIT > max_root {
+ tidy_error!(
+ bad,
+ "`ROOT_ENTRY_LIMIT` is too high (is {ROOT_ENTRY_LIMIT}, should be {max_root})"
+ );
+ }
+ if ISSUES_ENTRY_LIMIT > max_issues {
+ tidy_error!(
+ bad,
+ "`ISSUES_ENTRY_LIMIT` is too high (is {ISSUES_ENTRY_LIMIT}, should be {max_issues})"
+ );
+ }
}
pub fn check(path: &Path, bad: &mut bool) {
check_entries(&path, bad);
- for path in &[&path.join("ui"), &path.join("ui-fulldeps")] {
- crate::walk::walk_no_read(path, &mut |_| false, &mut |entry| {
- let file_path = entry.path();
- if let Some(ext) = file_path.extension() {
- if ext == "stderr" || ext == "stdout" {
- // Test output filenames have one of the formats:
- // ```
- // $testname.stderr
- // $testname.$mode.stderr
- // $testname.$revision.stderr
- // $testname.$revision.$mode.stderr
- // ```
- //
- // For now, just make sure that there is a corresponding
- // `$testname.rs` file.
- //
- // NB: We do not use file_stem() as some file names have multiple `.`s and we
- // must strip all of them.
- let testname =
- file_path.file_name().unwrap().to_str().unwrap().split_once('.').unwrap().0;
- if !file_path.with_file_name(testname).with_extension("rs").exists() {
- tidy_error!(bad, "Stray file with UI testing output: {:?}", file_path);
- }
+ let (ui, ui_fulldeps) = (path.join("ui"), path.join("ui-fulldeps"));
+ let paths = [ui.as_path(), ui_fulldeps.as_path()];
+ crate::walk::walk_no_read(&paths, |_, _| false, &mut |entry| {
+ let file_path = entry.path();
+ if let Some(ext) = file_path.extension() {
+ if ext == "stderr" || ext == "stdout" {
+ // Test output filenames have one of the formats:
+ // ```
+ // $testname.stderr
+ // $testname.$mode.stderr
+ // $testname.$revision.stderr
+ // $testname.$revision.$mode.stderr
+ // ```
+ //
+ // For now, just make sure that there is a corresponding
+ // `$testname.rs` file.
+ //
+ // NB: We do not use file_stem() as some file names have multiple `.`s and we
+ // must strip all of them.
+ let testname =
+ file_path.file_name().unwrap().to_str().unwrap().split_once('.').unwrap().0;
+ if !file_path.with_file_name(testname).with_extension("rs").exists() {
+ tidy_error!(bad, "Stray file with UI testing output: {:?}", file_path);
+ }
- if let Ok(metadata) = fs::metadata(file_path) {
- if metadata.len() == 0 {
- tidy_error!(bad, "Empty file with UI testing output: {:?}", file_path);
- }
+ if let Ok(metadata) = fs::metadata(file_path) {
+ if metadata.len() == 0 {
+ tidy_error!(bad, "Empty file with UI testing output: {:?}", file_path);
}
}
}
- });
- }
+ }
+ });
}
diff --git a/src/tools/tidy/src/unit_tests.rs b/src/tools/tidy/src/unit_tests.rs
index 27f36c855..0a5dad887 100644
--- a/src/tools/tidy/src/unit_tests.rs
+++ b/src/tools/tidy/src/unit_tests.rs
@@ -11,18 +11,19 @@ use crate::walk::{filter_dirs, walk};
use std::path::Path;
pub fn check(root_path: &Path, bad: &mut bool) {
- let core = &root_path.join("core");
- let core_tests = &core.join("tests");
- let core_benches = &core.join("benches");
- let is_core = |path: &Path| {
- path.starts_with(core) && !(path.starts_with(core_tests) || path.starts_with(core_benches))
+ let core = root_path.join("core");
+ let core_copy = core.clone();
+ let core_tests = core.join("tests");
+ let core_benches = core.join("benches");
+ let is_core = move |path: &Path| {
+ path.starts_with(&core)
+ && !(path.starts_with(&core_tests) || path.starts_with(&core_benches))
};
- let mut skip = |path: &Path| {
+ let skip = move |path: &Path, is_dir| {
let file_name = path.file_name().unwrap_or_default();
- if path.is_dir() {
+ if is_dir {
filter_dirs(path)
- || path.ends_with("tests")
|| path.ends_with("src/doc")
|| (file_name == "tests" || file_name == "benches") && !is_core(path)
} else {
@@ -35,9 +36,9 @@ pub fn check(root_path: &Path, bad: &mut bool) {
}
};
- walk(root_path, &mut skip, &mut |entry, contents| {
+ walk(root_path, skip, &mut |entry, contents| {
let path = entry.path();
- let is_core = path.starts_with(core);
+ let is_core = path.starts_with(&core_copy);
for (i, line) in contents.lines().enumerate() {
let line = line.trim();
let is_test = || line.contains("#[test]") && !line.contains("`#[test]");
diff --git a/src/tools/tidy/src/walk.rs b/src/tools/tidy/src/walk.rs
index 4cfb70fa3..3539943eb 100644
--- a/src/tools/tidy/src/walk.rs
+++ b/src/tools/tidy/src/walk.rs
@@ -1,9 +1,8 @@
-use std::fs::File;
-use std::io::Read;
-use walkdir::{DirEntry, WalkDir};
+use ignore::DirEntry;
-use std::path::Path;
+use std::{ffi::OsStr, fs::File, io::Read, path::Path};
+/// The default directory filter.
pub fn filter_dirs(path: &Path) -> bool {
let skip = [
"tidy-test-file",
@@ -30,40 +29,57 @@ pub fn filter_dirs(path: &Path) -> bool {
// Filter RLS output directories
"target/rls",
"src/bootstrap/target",
+ "vendor",
];
skip.iter().any(|p| path.ends_with(p))
}
-pub fn walk_many(
- paths: &[&Path],
- skip: &mut dyn FnMut(&Path) -> bool,
+/// Filter for only files that end in `.rs`.
+pub fn filter_not_rust(path: &Path) -> bool {
+ path.extension() != Some(OsStr::new("rs")) && !path.is_dir()
+}
+
+pub fn walk(
+ path: &Path,
+ skip: impl Send + Sync + 'static + Fn(&Path, bool) -> bool,
f: &mut dyn FnMut(&DirEntry, &str),
) {
- for path in paths {
- walk(path, skip, f);
- }
+ walk_many(&[path], skip, f);
}
-pub fn walk(path: &Path, skip: &mut dyn FnMut(&Path) -> bool, f: &mut dyn FnMut(&DirEntry, &str)) {
- let mut contents = String::new();
- walk_no_read(path, skip, &mut |entry| {
+pub fn walk_many(
+ paths: &[&Path],
+ skip: impl Send + Sync + 'static + Fn(&Path, bool) -> bool,
+ f: &mut dyn FnMut(&DirEntry, &str),
+) {
+ let mut contents = Vec::new();
+ walk_no_read(paths, skip, &mut |entry| {
contents.clear();
- if t!(File::open(entry.path()), entry.path()).read_to_string(&mut contents).is_err() {
- contents.clear();
- }
- f(&entry, &contents);
+ let mut file = t!(File::open(entry.path()), entry.path());
+ t!(file.read_to_end(&mut contents), entry.path());
+ let contents_str = match std::str::from_utf8(&contents) {
+ Ok(s) => s,
+ Err(_) => return, // skip this file
+ };
+ f(&entry, &contents_str);
});
}
pub(crate) fn walk_no_read(
- path: &Path,
- skip: &mut dyn FnMut(&Path) -> bool,
+ paths: &[&Path],
+ skip: impl Send + Sync + 'static + Fn(&Path, bool) -> bool,
f: &mut dyn FnMut(&DirEntry),
) {
- let walker = WalkDir::new(path).into_iter().filter_entry(|e| !skip(e.path()));
- for entry in walker {
+ let mut walker = ignore::WalkBuilder::new(paths[0]);
+ for path in &paths[1..] {
+ walker.add(path);
+ }
+ let walker = walker.filter_entry(move |e| {
+ !skip(e.path(), e.file_type().map(|ft| ft.is_dir()).unwrap_or(false))
+ });
+ for entry in walker.build() {
if let Ok(entry) = entry {
- if entry.file_type().is_dir() {
+ if entry.file_type().map_or(true, |kind| kind.is_dir() || kind.is_symlink()) {
continue;
}
f(&entry);
diff --git a/src/tools/unicode-table-generator/src/case_mapping.rs b/src/tools/unicode-table-generator/src/case_mapping.rs
index 992aac1f8..7a978db62 100644
--- a/src/tools/unicode-table-generator/src/case_mapping.rs
+++ b/src/tools/unicode-table-generator/src/case_mapping.rs
@@ -1,36 +1,62 @@
use crate::{fmt_list, UnicodeData};
-use std::fmt;
+use std::{
+ char,
+ collections::BTreeMap,
+ fmt::{self, Write},
+};
+
+const INDEX_MASK: u32 = 1 << 22;
pub(crate) fn generate_case_mapping(data: &UnicodeData) -> String {
let mut file = String::new();
+ write!(file, "const INDEX_MASK: u32 = 0x{:x};", INDEX_MASK).unwrap();
+ file.push_str("\n\n");
file.push_str(HEADER.trim_start());
-
- let decl_type = "&[(char, [char; 3])]";
-
- file.push_str(&format!(
- "static LOWERCASE_TABLE: {} = &[{}];",
- decl_type,
- fmt_list(data.to_lower.iter().map(to_mapping))
- ));
+ file.push('\n');
+ file.push_str(&generate_tables("LOWER", &data.to_lower));
file.push_str("\n\n");
- file.push_str(&format!(
- "static UPPERCASE_TABLE: {} = &[{}];",
- decl_type,
- fmt_list(data.to_upper.iter().map(to_mapping))
- ));
+ file.push_str(&generate_tables("UPPER", &data.to_upper));
file
}
-fn to_mapping((key, (a, b, c)): (&u32, &(u32, u32, u32))) -> (CharEscape, [CharEscape; 3]) {
- (
- CharEscape(std::char::from_u32(*key).unwrap()),
- [
- CharEscape(std::char::from_u32(*a).unwrap()),
- CharEscape(std::char::from_u32(*b).unwrap()),
- CharEscape(std::char::from_u32(*c).unwrap()),
- ],
- )
+fn generate_tables(case: &str, data: &BTreeMap<u32, (u32, u32, u32)>) -> String {
+ let mut mappings = Vec::with_capacity(data.len());
+ let mut multis = Vec::new();
+
+ for (&key, &(a, b, c)) in data.iter() {
+ let key = char::from_u32(key).unwrap();
+
+ if key.is_ascii() {
+ continue;
+ }
+
+ let value = if b == 0 && c == 0 {
+ a
+ } else {
+ multis.push([
+ CharEscape(char::from_u32(a).unwrap()),
+ CharEscape(char::from_u32(b).unwrap()),
+ CharEscape(char::from_u32(c).unwrap()),
+ ]);
+
+ INDEX_MASK | (u32::try_from(multis.len()).unwrap() - 1)
+ };
+
+ mappings.push((CharEscape(key), value));
+ }
+
+ let mut tables = String::new();
+
+ write!(tables, "static {}CASE_TABLE: &[(char, u32)] = &[{}];", case, fmt_list(mappings))
+ .unwrap();
+
+ tables.push_str("\n\n");
+
+ write!(tables, "static {}CASE_TABLE_MULTI: &[[char; 3]] = &[{}];", case, fmt_list(multis))
+ .unwrap();
+
+ tables
}
struct CharEscape(char);
@@ -46,10 +72,16 @@ pub fn to_lower(c: char) -> [char; 3] {
if c.is_ascii() {
[(c as u8).to_ascii_lowercase() as char, '\0', '\0']
} else {
- match bsearch_case_table(c, LOWERCASE_TABLE) {
- None => [c, '\0', '\0'],
- Some(index) => LOWERCASE_TABLE[index].1,
- }
+ LOWERCASE_TABLE
+ .binary_search_by(|&(key, _)| key.cmp(&c))
+ .map(|i| {
+ let u = LOWERCASE_TABLE[i].1;
+ char::from_u32(u).map(|c| [c, '\0', '\0']).unwrap_or_else(|| {
+ // SAFETY: Index comes from statically generated table
+ unsafe { *LOWERCASE_TABLE_MULTI.get_unchecked((u & (INDEX_MASK - 1)) as usize) }
+ })
+ })
+ .unwrap_or([c, '\0', '\0'])
}
}
@@ -57,14 +89,16 @@ pub fn to_upper(c: char) -> [char; 3] {
if c.is_ascii() {
[(c as u8).to_ascii_uppercase() as char, '\0', '\0']
} else {
- match bsearch_case_table(c, UPPERCASE_TABLE) {
- None => [c, '\0', '\0'],
- Some(index) => UPPERCASE_TABLE[index].1,
- }
+ UPPERCASE_TABLE
+ .binary_search_by(|&(key, _)| key.cmp(&c))
+ .map(|i| {
+ let u = UPPERCASE_TABLE[i].1;
+ char::from_u32(u).map(|c| [c, '\0', '\0']).unwrap_or_else(|| {
+ // SAFETY: Index comes from statically generated table
+ unsafe { *UPPERCASE_TABLE_MULTI.get_unchecked((u & (INDEX_MASK - 1)) as usize) }
+ })
+ })
+ .unwrap_or([c, '\0', '\0'])
}
}
-
-fn bsearch_case_table(c: char, table: &[(char, [char; 3])]) -> Option<usize> {
- table.binary_search_by(|&(key, _)| key.cmp(&c)).ok()
-}
";
diff --git a/src/tools/unicode-table-generator/src/raw_emitter.rs b/src/tools/unicode-table-generator/src/raw_emitter.rs
index 890ff986c..7547b49ab 100644
--- a/src/tools/unicode-table-generator/src/raw_emitter.rs
+++ b/src/tools/unicode-table-generator/src/raw_emitter.rs
@@ -1,6 +1,5 @@
use crate::fmt_list;
use std::collections::{BTreeMap, BTreeSet, HashMap};
-use std::convert::TryFrom;
use std::fmt::{self, Write};
use std::ops::Range;
diff --git a/src/tools/unicode-table-generator/src/skiplist.rs b/src/tools/unicode-table-generator/src/skiplist.rs
index 6e439968c..9b613a94c 100644
--- a/src/tools/unicode-table-generator/src/skiplist.rs
+++ b/src/tools/unicode-table-generator/src/skiplist.rs
@@ -1,6 +1,5 @@
use crate::fmt_list;
use crate::raw_emitter::RawEmitter;
-use std::convert::TryInto;
use std::fmt::Write as _;
use std::ops::Range;
diff --git a/src/version b/src/version
index 493498565..832e9afb6 100644
--- a/src/version
+++ b/src/version
@@ -1 +1 @@
-1.69.0
+1.70.0